Replace prints with logging module

pull/1423/head
Flávio Veloso Soares 2023-02-15 16:43:42 -08:00
parent a0046439d1
commit 972547ca8c
8 changed files with 43 additions and 24 deletions

View File

@ -15,6 +15,9 @@ Sanity check for most common library uses all working
import yfinance as yf
import unittest
import logging
logging.basicConfig(level=logging.DEBUG)
symbols = ['MSFT', 'IWO', 'VFINX', '^GSPC', 'BTC-USD']
tickers = [yf.Ticker(symbol) for symbol in symbols]

View File

@ -21,6 +21,7 @@
from __future__ import print_function
import logging
import time as _time
import datetime as _datetime
import dateutil as _dateutil
@ -47,6 +48,7 @@ _BASE_URL_ = 'https://query2.finance.yahoo.com'
_SCRAPE_URL_ = 'https://finance.yahoo.com/quote'
_ROOT_URL_ = 'https://finance.yahoo.com'
logger = logging.getLogger(__name__)
class FastInfo:
# Contain small subset of info[] items that can be fetched faster elsewhere.
@ -899,7 +901,7 @@ class TickerBase:
sub_interval = nexts[interval]
td_range = itds[interval]
else:
print("WARNING: Have not implemented repair for '{}' interval. Contact developers".format(interval))
logger.critical("Have not implemented repair for '%s' interval. Contact developers", interval)
raise Exception("why here")
return df
@ -1035,7 +1037,7 @@ class TickerBase:
df_fine = self.history(start=fetch_start, end=fetch_end, interval=sub_interval, auto_adjust=False, actions=False, prepost=prepost, repair=r, keepna=True)
if df_fine is None or df_fine.empty:
if not silent:
print("YF: WARNING: Cannot reconstruct because Yahoo not returning data in interval")
logger.warning("Cannot reconstruct because Yahoo not returning data in interval")
continue
# Discard the buffer
df_fine = df_fine.loc[g[0] : g[-1]+itds[sub_interval]-_datetime.timedelta(milliseconds=1)]
@ -1273,7 +1275,7 @@ class TickerBase:
if n_fixed_crudely > 0:
report_msg += f"({n_fixed_crudely} crudely) "
report_msg += f"in {interval} price data"
print(report_msg)
logger.info('%s', report_msg)
# Restore original values where repair failed
f = df2_tagged
@ -1366,7 +1368,7 @@ class TickerBase:
if n_fixed < 4:
dts_repaired = sorted(list(set(dts_tagged).difference(dts_not_repaired)))
msg += f": {dts_repaired}"
print(msg)
logger.info('%s', msg)
if df2_reserve is not None:
df3 = _pd.concat([df3, df2_reserve]).sort_index()
@ -1729,7 +1731,7 @@ class TickerBase:
if start is None:
start = end - _pd.Timedelta(days=548) # 18 months
if start >= end:
print("ERROR: start date must be before end")
logger.error("Start date must be before end")
return None
start = start.floor("D")
end = end.ceil("D")
@ -1741,14 +1743,14 @@ class TickerBase:
json_str = self._data.cache_get(shares_url).text
json_data = _json.loads(json_str)
except:
print(f"{self.ticker}: Yahoo web request for share count failed")
logger.error("%s: Yahoo web request for share count failed", self.ticker)
return None
try:
fail = json_data["finance"]["error"]["code"] == "Bad Request"
except:
fail = False
if fail:
print(f"{self.ticker}: Yahoo web request for share count failed")
logger.error(f"%s: Yahoo web request for share count failed", self.ticker)
return None
shares_data = json_data["timeseries"]["result"]
@ -1757,7 +1759,7 @@ class TickerBase:
try:
df = _pd.Series(shares_data[0]["shares_out"], index=_pd.to_datetime(shares_data[0]["timestamp"], unit="s"))
except Exception as e:
print(f"{self.ticker}: Failed to parse shares count data: "+str(e))
logger.error(f"%s: Failed to parse shares count data: %s", self.ticker, e)
return None
df.index = df.index.tz_localize(tz)
@ -1872,7 +1874,7 @@ class TickerBase:
if dates is None or dates.shape[0] == 0:
err_msg = "No earnings dates found, symbol may be delisted"
print('- %s: %s' % (self.ticker, err_msg))
logger.error('%s: %s', self.ticker, err_msg)
return None
dates = dates.reset_index(drop=True)

View File

@ -1,6 +1,7 @@
import functools
from functools import lru_cache
import logging
import hashlib
from base64 import b64decode
usePycryptodome = False # slightly faster
@ -25,6 +26,8 @@ except ImportError:
cache_maxsize = 64
logger = logging.getLogger(__name__)
def lru_cache_freezeargs(func):
"""
@ -294,7 +297,7 @@ class TickerData:
msg = "No decryption keys could be extracted from JS file."
if "requests_cache" in str(type(response)):
msg += " Try flushing your 'requests_cache', probably parsing old JS."
print("WARNING: " + msg + " Falling back to backup decrypt methods.")
logger.warning("WARNING: %s Falling back to backup decrypt methods.", msg)
if len(keys) == 0:
keys = []
try:

View File

@ -21,6 +21,7 @@
from __future__ import print_function
import logging
import time as _time
import multitasking as _multitasking
import pandas as _pd
@ -28,6 +29,7 @@ import pandas as _pd
from . import Ticker, utils
from . import shared
logger = logging.getLogger(__name__)
def download(tickers, start=None, end=None, actions=False, threads=True, ignore_tz=None,
group_by='column', auto_adjust=False, back_adjust=False, repair=False, keepna=False,
@ -144,12 +146,16 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
if progress:
shared._PROGRESS_BAR.completed()
if shared._ERRORS and show_errors:
print('\n%.f Failed download%s:' % (
len(shared._ERRORS), 's' if len(shared._ERRORS) > 1 else ''))
# print(shared._ERRORS)
print("\n".join(['- %s: %s' %
v for v in list(shared._ERRORS.items())]))
if shared._ERRORS:
if show_errors:
print('\n%.f Failed download%s:' % (
len(shared._ERRORS), 's' if len(shared._ERRORS) > 1 else ''))
# print(shared._ERRORS)
print("\n".join(['- %s: %s' %
v for v in list(shared._ERRORS.items())]))
else:
logger.error('%d failed downloads: %s',
len(shared._ERRORS), shared._ERRORS)
if ignore_tz:
for tkr in shared._DFS.keys():

View File

@ -58,7 +58,7 @@ class Analysis:
analysis_data = analysis_data['QuoteSummaryStore']
except KeyError as e:
err_msg = "No analysis data found, symbol may be delisted"
print('- %s: %s' % (self._data.ticker, err_msg))
logger.error('%s: %s', self._data.ticker, err_msg)
return
if isinstance(analysis_data.get('earningsTrend'), dict):

View File

@ -1,4 +1,5 @@
import datetime
import logging
import json
import pandas as pd
@ -8,6 +9,7 @@ from yfinance import utils
from yfinance.data import TickerData
from yfinance.exceptions import YFinanceDataException, YFinanceException
logger = logging.getLogger(__name__)
class Fundamentals:
@ -50,7 +52,7 @@ class Fundamentals:
self._fin_data_quote = self._financials_data['QuoteSummaryStore']
except KeyError:
err_msg = "No financials data found, symbol may be delisted"
print('- %s: %s' % (self._data.ticker, err_msg))
logger.error('%s: %s', self._data.ticker, err_msg)
return None
def _scrape_earnings(self, proxy):
@ -144,7 +146,7 @@ class Financials:
if statement is not None:
return statement
except YFinanceException as e:
print(f"- {self._data.ticker}: Failed to create {name} financials table for reason: {repr(e)}")
logger.error("%s: Failed to create %s financials table for reason: %r", self._data.ticker, name, e)
return pd.DataFrame()
def _create_financials_table(self, name, timescale, proxy):
@ -267,7 +269,7 @@ class Financials:
if statement is not None:
return statement
except YFinanceException as e:
print(f"- {self._data.ticker}: Failed to create financials table for {name} reason: {repr(e)}")
logger.error("%s: Failed to create financials table for %s reason: %r", self._data.ticker, name, e)
return pd.DataFrame()
def _create_financials_table_old(self, name, timescale, proxy):

View File

@ -1,4 +1,5 @@
import datetime
import logging
import json
import pandas as pd
@ -6,6 +7,7 @@ import pandas as pd
from yfinance import utils
from yfinance.data import TickerData
logger = logging.getLogger(__name__)
info_retired_keys_price = {"currentPrice", "dayHigh", "dayLow", "open", "previousClose", "volume", "volume24Hr"}
info_retired_keys_price.update({"regularMarket"+s for s in ["DayHigh", "DayLow", "Open", "PreviousClose", "Price", "Volume"]})
@ -126,7 +128,7 @@ class Quote:
quote_summary_store = json_data['QuoteSummaryStore']
except KeyError:
err_msg = "No summary info found, symbol may be delisted"
print('- %s: %s' % (self._data.ticker, err_msg))
logger.error('%s: %s', self._data.ticker, err_msg)
return None
# sustainability

View File

@ -936,9 +936,10 @@ def get_tz_cache():
try:
_tz_cache = _TzCache()
except _TzCacheException as err:
print("Failed to create TzCache, reason: {}".format(err))
print("TzCache will not be used.")
print("Tip: You can direct cache to use a different location with 'set_tz_cache_location(mylocation)'")
logger.error("Failed to create TzCache, reason: %s. "
"TzCache will not be used. "
"Tip: You can direct cache to use a different location with 'set_tz_cache_location(mylocation)'",
err)
_tz_cache = _TzCacheDummy()
return _tz_cache