Merge branch 'main' into dev

dev
ValueRaider 2023-08-13 12:40:11 +01:00 committed by GitHub
commit c9dd582dd8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 356 additions and 102 deletions

View File

@ -1,44 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
# IMPORTANT
# Read and follow these instructions carefully. Help us help you.
### Are you up-to-date?
Upgrade to the latest version and confirm the issue/bug is still there.
`$ pip install yfinance --upgrade --no-cache-dir`
Confirm by running:
`import yfinance as yf ; print(yf.__version__)`
and comparing against [PIP](https://pypi.org/project/yfinance/#history).
### Does Yahoo actually have the data?
Are you spelling symbol *exactly* same as Yahoo?
Then visit `finance.yahoo.com` and confirm they have the data you want. Maybe your symbol was delisted, or your expectations of `yfinance` are wrong.
### Are you spamming Yahoo?
Yahoo Finance free service has rate-limiting depending on request type - roughly 60/minute for prices, 10/minute for info. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
### Still think it's a bug?
**Delete these instructions** and replace with your bug report, providing the following as best you can:
- Simple code that reproduces your problem, that we can copy-paste-run.
- Run code with [debug logging enabled](https://github.com/ranaroussi/yfinance#logging) and post the full output.
- If you think `yfinance` returning bad data, give us proof.
- `yfinance` version and Python version.
- Operating system type.

View File

@ -0,0 +1,88 @@
name: Bug report
description: Report a bug in our project
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
# IMPORTANT - Read and follow these instructions carefully. Help us help you.
### Does issue already exist?
Use the search tool. Don't annoy everyone by duplicating existing Issues.
### Are you up-to-date?
Upgrade to the latest version and confirm the issue/bug is still there.
`$ pip install yfinance --upgrade --no-cache-dir`
Confirm by running:
`import yfinance as yf ; print(yf.__version__)`
and comparing against [PIP](https://pypi.org/project/yfinance/#history).
### Does Yahoo actually have the data?
Are you spelling symbol *exactly* same as Yahoo?
Then visit `finance.yahoo.com` and confirm they have the data you want. Maybe your symbol was delisted, or your expectations of `yfinance` are wrong.
### Are you spamming Yahoo?
Yahoo Finance free service has rate-limiting https://github.com/ranaroussi/yfinance/discussions/1513. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
- type: markdown
attributes:
value: |
---
## Still think it's a bug?
Provide the following as best you can:
- type: textarea
id: code
attributes:
label: "Simple code that reproduces your problem"
description: "Provide a snippet of code that we can copy-paste-run. Wrap code in Python Markdown code blocks for proper formatting (```` ```python ... ``` ````)."
validations:
required: true
- type: textarea
id: debug-log
attributes:
label: "Debug log"
description: "Run code with debug logging enabled and post the full output. Instructions: https://github.com/ranaroussi/yfinance/tree/main#logging"
validations:
required: true
- type: textarea
id: bad-data-proof
attributes:
label: "Bad data proof"
description: "If you think `yfinance` returning bad data, provide your proof here."
validations:
required: false
- type: input
id: version-yfinance
attributes:
label: "`yfinance` version"
validations:
required: true
- type: input
id: version-python
attributes:
label: "Python version"
validations:
required: false
- type: input
id: os
attributes:
label: "Operating system"
validations:
required: false

View File

@ -1,6 +1,23 @@
Change Log
===========
0.2.27
------
Bug fixes:
- fix merging 1d-prices with out-of-range divs/splits #1635
- fix multithread error 'tz already in cache' #1648
0.2.26
------
Proxy improvements
- bug fixes #1371
- security fix #1625
0.2.25
------
Fix single ISIN as ticker #1611
Fix 'Only 100 years allowed' error #1576
0.2.24
------
Fix info[] missing values #1603

View File

@ -251,14 +251,13 @@ To install `yfinance` using `conda`, see
- [Python](https://www.python.org) \>= 2.7, 3.4+
- [Pandas](https://github.com/pydata/pandas) \>= 1.3.0
- [Numpy](http://www.numpy.org) \>= 1.16.5
- [requests](http://docs.python-requests.org/en/master) \>= 2.26
- [requests](http://docs.python-requests.org/en/master) \>= 2.31
- [lxml](https://pypi.org/project/lxml) \>= 4.9.1
- [appdirs](https://pypi.org/project/appdirs) \>= 1.4.4
- [pytz](https://pypi.org/project/pytz) \>=2022.5
- [frozendict](https://pypi.org/project/frozendict) \>= 2.3.4
- [beautifulsoup4](https://pypi.org/project/beautifulsoup4) \>= 4.11.1
- [html5lib](https://pypi.org/project/html5lib) \>= 1.1
- [cryptography](https://pypi.org/project/cryptography) \>= 3.3.2
#### Optional (if you want to use `pandas_datareader`)

View File

@ -1,5 +1,5 @@
{% set name = "yfinance" %}
{% set version = "0.2.24" %}
{% set version = "0.2.27" %}
package:
name: "{{ name|lower }}"
@ -18,7 +18,7 @@ requirements:
host:
- pandas >=1.3.0
- numpy >=1.16.5
- requests >=2.26
- requests >=2.31
- multitasking >=0.0.7
- lxml >=4.9.1
- appdirs >=1.4.4
@ -27,14 +27,13 @@ requirements:
- beautifulsoup4 >=4.11.1
- html5lib >=1.1
# - pycryptodome >=3.6.6
- cryptography >=3.3.2
- pip
- python
run:
- pandas >=1.3.0
- numpy >=1.16.5
- requests >=2.26
- requests >=2.31
- multitasking >=0.0.7
- lxml >=4.9.1
- appdirs >=1.4.4
@ -43,7 +42,6 @@ requirements:
- beautifulsoup4 >=4.11.1
- html5lib >=1.1
# - pycryptodome >=3.6.6
- cryptography >=3.3.2
- python
test:

View File

@ -1,6 +1,6 @@
pandas>=1.3.0
numpy>=1.16.5
requests>=2.26
requests>=2.31
multitasking>=0.0.7
lxml>=4.9.1
appdirs>=1.4.4
@ -8,4 +8,3 @@ pytz>=2022.5
frozendict>=2.3.4
beautifulsoup4>=4.11.1
html5lib>=1.1
cryptography>=3.3.2

View File

@ -60,7 +60,7 @@ setup(
keywords='pandas, yahoo finance, pandas datareader',
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'examples']),
install_requires=['pandas>=1.3.0', 'numpy>=1.16.5',
'requests>=2.26', 'multitasking>=0.0.7',
'requests>=2.31', 'multitasking>=0.0.7',
'lxml>=4.9.1', 'appdirs>=1.4.4', 'pytz>=2022.5',
'frozendict>=2.3.4',
'beautifulsoup4>=4.11.1', 'html5lib>=1.1'],

View File

@ -224,6 +224,15 @@ class TestPriceHistory(unittest.TestCase):
print("{}-without-events missing these dates: {}".format(tkr, missing_from_df2))
raise
# Reproduce issue #1634 - 1d dividend out-of-range, should be prepended to prices
div_dt = _pd.Timestamp(2022, 7, 21).tz_localize("America/New_York")
df_dividends = _pd.DataFrame(data={"Dividends":[1.0]}, index=[div_dt])
df_prices = _pd.DataFrame(data={c:[1.0] for c in yf.const.price_colnames}|{'Volume':0}, index=[div_dt+_dt.timedelta(days=1)])
df_merged = yf.utils.safe_merge_dfs(df_prices, df_dividends, '1d')
self.assertEqual(df_merged.shape[0], 2)
self.assertTrue(df_merged[df_prices.columns].iloc[1:].equals(df_prices))
self.assertEqual(df_merged.index[0], div_dt)
def test_intraDayWithEvents(self):
tkrs = ["BHP.AX", "IMP.JO", "BP.L", "PNL.L", "INTC"]
test_run = False

View File

@ -25,6 +25,8 @@ class TestTicker(unittest.TestCase):
def setUpClass(cls):
cls.session = session_gbl
cls.proxy = None
@classmethod
def tearDownClass(cls):
if cls.session is not None:
@ -144,6 +146,129 @@ class TestTicker(unittest.TestCase):
# dat.earnings_trend
# dat.earnings_forecasts
def test_goodTicker_withProxy(self):
# that yfinance works when full api is called on same instance of ticker
tkr = "IBM"
dat = yf.Ticker(tkr, session=self.session)
dat._fetch_ticker_tz(proxy=self.proxy, timeout=5, debug_mode=False, raise_errors=False)
dat._get_ticker_tz(proxy=self.proxy, timeout=5, debug_mode=False, raise_errors=False)
dat.history(period="1wk", proxy=self.proxy)
v = dat.stats(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(len(v) > 0)
v = dat.get_recommendations(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_calendar(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_major_holders(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_institutional_holders(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_mutualfund_holders(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_info(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(len(v) > 0)
v = dat.get_sustainability(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_recommendations_summary(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_analyst_price_target(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_rev_forecast(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_earnings_forecast(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_trend_details(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_earnings_trend(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_earnings(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_income_stmt(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_incomestmt(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_financials(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_balance_sheet(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_balancesheet(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_cash_flow(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_cashflow(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_shares(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_shares_full(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_isin(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(v != "")
v = dat.get_news(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(len(v) > 0)
v = dat.get_earnings_dates(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
# TODO: enable after merge
# dat.get_history_metadata(proxy=self.proxy)
# self.assertIsNotNone(v)
# self.assertTrue(len(v) > 0)
class TestTickerHistory(unittest.TestCase):
session = None

51
tests/utils.py 100644
View File

@ -0,0 +1,51 @@
"""
Tests for utils
To run all tests in suite from commandline:
python -m unittest tests.utils
Specific test class:
python -m unittest tests.utils.TestTicker
"""
# import pandas as pd
# import numpy as np
from .context import yfinance as yf
from .context import session_gbl
import unittest
# import requests_cache
import tempfile
class TestUtils(unittest.TestCase):
session = None
@classmethod
def setUpClass(cls):
cls.tempCacheDir = tempfile.TemporaryDirectory()
yf.set_tz_cache_location(cls.tempCacheDir.name)
@classmethod
def tearDownClass(cls):
cls.tempCacheDir.cleanup()
def test_storeTzNoRaise(self):
# storing TZ to cache should never raise exception
tkr = 'AMZN'
tz1 = "America/New_York"
tz2 = "London/Europe"
cache = yf.utils.get_tz_cache()
cache.store(tkr, tz1)
cache.store(tkr, tz2)
def suite():
suite = unittest.TestSuite()
suite.addTest(TestUtils('Test utils'))
return suite
if __name__ == '__main__':
unittest.main()

View File

@ -181,12 +181,6 @@ class TickerBase:
if params["interval"] == "30m":
params["interval"] = "15m"
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
# if the ticker is MUTUALFUND or ETF, then get capitalGains events
params["events"] = "div,splits,capitalGains"
@ -212,6 +206,7 @@ class TickerBase:
data = get_fn(
url=url,
params=params,
proxy=proxy,
timeout=timeout
)
if "Will be right back" in data.text or data is None:
@ -1684,9 +1679,9 @@ class TickerBase:
return data
@property
def fast_info(self):
def get_fast_info(self, proxy=None):
if self._fast_info is None:
self._fast_info = FastInfo(self)
self._fast_info = FastInfo(self, proxy=proxy)
return self._fast_info
@property
@ -1905,7 +1900,7 @@ class TickerBase:
logger = utils.get_yf_logger()
# Process dates
tz = self._get_ticker_tz(proxy=None, timeout=10)
tz = self._get_ticker_tz(proxy=proxy, timeout=10)
dt_now = pd.Timestamp.utcnow().tz_convert(tz)
if start is not None:
start_ts = utils._parse_user_dt(start, tz)
@ -1929,8 +1924,8 @@ class TickerBase:
ts_url_base = f"https://query2.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{self.ticker}?symbol={self.ticker}"
shares_url = f"{ts_url_base}&period1={int(start.timestamp())}&period2={int(end.timestamp())}"
try:
json_str = self._data.cache_get(shares_url).text
json_data = _json.loads(json_str)
json_data = self._data.cache_get(url=shares_url, proxy=proxy)
json_data = json_data.json()
except (_json.JSONDecodeError, requests.exceptions.RequestException):
logger.error(f"{self.ticker}: Yahoo web request for share count failed")
return None
@ -2099,10 +2094,10 @@ class TickerBase:
return dates
def get_history_metadata(self) -> dict:
def get_history_metadata(self, proxy=None) -> dict:
if self._history_metadata is None:
# Request intraday data, because then Yahoo returns exchange schedule.
self.history(period="1wk", interval="1h", prepost=True)
self.history(period="1wk", interval="1h", prepost=True, proxy=proxy)
if self._history_metadata_formatted is False:
self._history_metadata = utils.format_history_metadata(self._history_metadata)

View File

@ -115,5 +115,4 @@ fundamentals_keys = {
"PaymentstoSuppliersforGoodsandServices", "ClassesofCashReceiptsfromOperatingActivities",
"OtherCashReceiptsfromOperatingActivities", "ReceiptsfromGovernmentGrants", "ReceiptsfromCustomers"]}
price_colnames = ['Open', 'High', 'Low', 'Close', 'Adj Close']

View File

@ -65,7 +65,7 @@ class TickerData:
def _get_proxy(self, proxy):
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
if isinstance(proxy, (dict, frozendict)) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
return proxy

View File

@ -209,7 +209,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
if len(tickers) == 1:
ticker = tickers[0]
return shared._DFS[shared._ISINS.get(ticker, ticker)]
return shared._DFS[ticker]
try:
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,

View File

@ -50,19 +50,19 @@ class Financials:
def get_income_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
res = self._income_time_series
if freq not in res:
res[freq] = self._fetch_time_series("income", freq, proxy=None)
res[freq] = self._fetch_time_series("income", freq, proxy)
return res[freq]
def get_balance_sheet_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
res = self._balance_sheet_time_series
if freq not in res:
res[freq] = self._fetch_time_series("balance-sheet", freq, proxy=None)
res[freq] = self._fetch_time_series("balance-sheet", freq, proxy)
return res[freq]
def get_cash_flow_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
res = self._cash_flow_time_series
if freq not in res:
res[freq] = self._fetch_time_series("cash-flow", freq, proxy=None)
res[freq] = self._fetch_time_series("cash-flow", freq, proxy)
return res[freq]
@utils.log_indent_decorator

View File

@ -35,7 +35,7 @@ class Holders:
def _scrape(self, proxy):
ticker_url = f"{self._SCRAPE_URL_}/{self._data.ticker}"
try:
resp = self._data.cache_get(ticker_url + '/holders', proxy)
resp = self._data.cache_get(ticker_url + '/holders', proxy=proxy)
holders = pd.read_html(resp.text)
except Exception:
holders = []

View File

@ -78,8 +78,9 @@ class InfoDictWrapper(MutableMapping):
class FastInfo:
# Contain small subset of info[] items that can be fetched faster elsewhere.
# Imitates a dict.
def __init__(self, tickerBaseObject):
def __init__(self, tickerBaseObject, proxy=None):
self._tkr = tickerBaseObject
self.proxy = proxy
self._prices_1y = None
self._prices_1wk_1h_prepost = None
@ -182,9 +183,9 @@ class FastInfo:
if self._prices_1y is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1y = self._tkr.history(period="380d", auto_adjust=False, keepna=True)
self._prices_1y = self._tkr.history(period="380d", auto_adjust=False, keepna=True, proxy=self.proxy)
logging.disable(logging.NOTSET)
self._md = self._tkr.get_history_metadata()
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
try:
ctp = self._md["currentTradingPeriod"]
self._today_open = pd.to_datetime(ctp["regular"]["start"], unit='s', utc=True).tz_convert(self.timezone)
@ -211,7 +212,7 @@ class FastInfo:
if self._prices_1wk_1h_prepost is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1wk_1h_prepost = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=True)
self._prices_1wk_1h_prepost = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=True, proxy=self.proxy)
logging.disable(logging.NOTSET)
return self._prices_1wk_1h_prepost
@ -219,7 +220,7 @@ class FastInfo:
if self._prices_1wk_1h_reg is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1wk_1h_reg = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=False)
self._prices_1wk_1h_reg = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=False, proxy=self.proxy)
logging.disable(logging.NOTSET)
return self._prices_1wk_1h_reg
@ -228,7 +229,7 @@ class FastInfo:
return self._md
self._get_1y_prices()
self._md = self._tkr.get_history_metadata()
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
return self._md
def _exchange_open_now(self):
@ -261,7 +262,7 @@ class FastInfo:
if self._tkr._history_metadata is None:
self._get_1y_prices()
md = self._tkr.get_history_metadata()
md = self._tkr.get_history_metadata(proxy=self.proxy)
self._currency = md["currency"]
return self._currency
@ -272,7 +273,7 @@ class FastInfo:
if self._tkr._history_metadata is None:
self._get_1y_prices()
md = self._tkr.get_history_metadata()
md = self._tkr.get_history_metadata(proxy=self.proxy)
self._quote_type = md["instrumentType"]
return self._quote_type
@ -297,7 +298,7 @@ class FastInfo:
if self._shares is not None:
return self._shares
shares = self._tkr.get_shares_full(start=pd.Timestamp.utcnow().date()-pd.Timedelta(days=548))
shares = self._tkr.get_shares_full(start=pd.Timestamp.utcnow().date()-pd.Timedelta(days=548), proxy=self.proxy)
# if shares is None:
# # Requesting 18 months failed, so fallback to shares which should include last year
# shares = self._tkr.get_shares()

View File

@ -141,6 +141,10 @@ class Ticker(TickerBase):
def info(self) -> dict:
return self.get_info()
@property
def fast_info(self):
return self.get_fast_info()
@property
def calendar(self) -> _pd.DataFrame:
return self.get_calendar()

View File

@ -22,6 +22,7 @@
from __future__ import print_function
import atexit as _atexit
import datetime as _datetime
import logging
import os as _os
@ -43,6 +44,7 @@ import requests as _requests
from dateutil.relativedelta import relativedelta
from pytz import UnknownTimeZoneError
from yfinance import const
from .const import _BASE_URL_
try:
@ -682,21 +684,28 @@ def safe_merge_dfs(df_main, df_sub, interval):
f_outOfRange = indices == -1
if f_outOfRange.any() and not intraday:
# If dividend is occuring in next interval after last price row,
# add a new row of NaNs
last_dt = df_main.index[-1]
next_interval_start_dt = last_dt + td
empty_row_data = {c:[_np.nan] for c in const.price_colnames}|{'Volume':[0]}
if interval == '1d':
# Allow for weekends & holidays
next_interval_end_dt = last_dt + 7 * _pd.Timedelta(days=7)
else:
next_interval_end_dt = next_interval_start_dt + td
for i in _np.where(f_outOfRange)[0]:
dt = df_sub.index[i]
if next_interval_start_dt <= dt < next_interval_end_dt:
new_dt = dt if interval == '1d' else next_interval_start_dt
# For 1d, add all out-of-range event dates
for i in _np.where(f_outOfRange)[0]:
dt = df_sub.index[i]
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
df_main.loc[new_dt] = _np.nan
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
df_main = _pd.concat([df_main, empty_row], sort=True)
else:
# Else, only add out-of-range event dates if occurring in interval
# immediately after last pricfe row
last_dt = df_main.index[-1]
next_interval_start_dt = last_dt + td
next_interval_end_dt = next_interval_start_dt + td
for i in _np.where(f_outOfRange)[0]:
dt = df_sub.index[i]
if next_interval_start_dt <= dt < next_interval_end_dt:
new_dt = next_interval_start_dt
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
df_main = _pd.concat([df_main, empty_row], sort=True)
df_main = df_main.sort_index()
# Re-calculate indices
indices = _np.searchsorted(_np.append(df_main.index, df_main.index[-1] + td), df_sub.index, side='right')
@ -711,7 +720,7 @@ def safe_merge_dfs(df_main, df_sub, interval):
f_outOfRange = indices == -1
if f_outOfRange.any():
if intraday or interval in ['1d', '1wk']:
raise Exception(f"The following '{data_col}' events are out-of-range, did not expect with interval {interval}: {df_sub.index}")
raise Exception(f"The following '{data_col}' events are out-of-range, did not expect with interval {interval}: {df_sub.index[f_outOfRange]}")
get_yf_logger().debug(f'Discarding these {data_col} events:' + '\n' + str(df_sub[f_outOfRange]))
df_sub = df_sub[~f_outOfRange].copy()
indices = indices[~f_outOfRange]
@ -969,10 +978,14 @@ class _TzCache:
def store(self, tkr, tz):
if tz is None:
self.tz_db.delete(tkr)
elif self.tz_db.get(tkr) is not None:
raise Exception(f"Tkr {tkr} tz already in cache")
else:
self.tz_db.set(tkr, tz)
tz_db = self.tz_db.get(tkr)
if tz_db is not None:
if tz != tz_db:
get_yf_logger().debug(f'{tkr}: Overwriting cached TZ "{tz_db}" with different TZ "{tz}"')
self.tz_db.set(tkr, tz)
else:
self.tz_db.set(tkr, tz)
@property
def _db_dir(self):

View File

@ -1 +1 @@
version = "0.2.25b1"
version = "0.2.27"