Improve logging behaviour, particulary download()

- Use same logger across all files
- download():
  - write tracebacks to DEBUG
  - deprecate 'show_errors' argument
pull/1493/head
ValueRaider 2023-04-15 17:29:07 +01:00
parent 4937c933a2
commit d3e2e71a6e
7 changed files with 40 additions and 28 deletions

View File

@ -21,7 +21,6 @@
from __future__ import print_function
import logging
import warnings
import time as _time
import datetime as _datetime
@ -49,7 +48,7 @@ _BASE_URL_ = 'https://query2.finance.yahoo.com'
_SCRAPE_URL_ = 'https://finance.yahoo.com/quote'
_ROOT_URL_ = 'https://finance.yahoo.com'
logger = logging.getLogger(__name__)
logger = utils.get_yf_logger()
class TickerBase:
def __init__(self, ticker, session=None):
@ -145,10 +144,10 @@ class TickerBase:
if debug is not None:
if debug:
utils.print_once(f"yfinance: Ticker.history(debug={debug}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
logging.getLogger('yfinance').setLevel(logging.ERROR)
logger.setLevel(logging.ERROR)
else:
utils.print_once(f"yfinance: Ticker.history(debug={debug}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
logging.getLogger('yfinance').setLevel(logging.CRITICAL)
logger.setLevel(logging.CRITICAL)
if start or period is None or period.lower() == "max":
# Check can get TZ. Fail => probably delisted

View File

@ -26,9 +26,11 @@ try:
except ImportError:
import json as json
from . import utils
cache_maxsize = 64
logger = logging.getLogger(__name__)
logger = utils.get_yf_logger()
def lru_cache_freezeargs(func):

View File

@ -30,11 +30,9 @@ import pandas as _pd
from . import Ticker, utils
from . import shared
logger = logging.getLogger(__name__)
def download(tickers, start=None, end=None, actions=False, threads=True, ignore_tz=None,
group_by='column', auto_adjust=False, back_adjust=False, repair=False, keepna=False,
progress=True, period="max", show_errors=True, interval="1d", prepost=False,
progress=True, period="max", show_errors=None, interval="1d", prepost=False,
proxy=None, rounding=False, timeout=10):
"""Download yahoo tickers
:Parameters:
@ -80,11 +78,20 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
Optional. Round values to 2 decimal places?
show_errors: bool
Optional. Doesn't print errors if False
DEPRECATED, will be removed in future version
timeout: None or float
If not None stops waiting for a response after given number of
seconds. (Can also be a fraction of a second e.g. 0.01)
"""
if show_errors is not None:
if show_errors:
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
logging.getLogger('yfinance').setLevel(logging.ERROR)
else:
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
logging.getLogger('yfinance').setLevel(logging.CRITICAL)
if ignore_tz is None:
# Set default value depending on interval
if interval[1:] in ['m', 'h']:
@ -117,11 +124,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
# reset shared._DFS
shared._DFS = {}
shared._ERRORS = {}
# temporarily disable error reporting while downloading
yf_logger = logging.getLogger('yfinance')
yf_lvl = yf_logger.level
yf_logger.setLevel(logging.CRITICAL)
shared._TRACEBACKS = {}
# download using threads
if threads:
@ -154,19 +157,13 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
if progress:
shared._PROGRESS_BAR.completed()
# restore error reporting
yf_logger.setLevel(yf_lvl)
if shared._ERRORS:
if show_errors:
print('\n%.f Failed download%s:' % (
len(shared._ERRORS), 's' if len(shared._ERRORS) > 1 else ''))
# print(shared._ERRORS)
print("\n".join(['- %s: %s' %
v for v in list(shared._ERRORS.items())]))
else:
logger.error('%d failed downloads: %s',
len(shared._ERRORS), shared._ERRORS)
logger = utils.get_yf_logger()
logger.error('\n%.f Failed download%s:' % (
len(shared._ERRORS), 's' if len(shared._ERRORS) > 1 else ''))
for ticker in shared._ERRORS:
logger.error(f'- {ticker}: {shared._ERRORS[ticker]}')
logger.debug(f'{ticker}: ' + shared._TRACEBACKS[ticker])
if ignore_tz:
for tkr in shared._DFS.keys():
@ -230,8 +227,9 @@ def _download_one_threaded(ticker, start=None, end=None,
keepna, timeout)
except Exception as e:
# glob try/except needed as current thead implementation breaks if exception is raised.
shared._TRACEBACKS[ticker] = traceback.format_exc()
shared._DFS[ticker] = utils.empty_df()
shared._ERRORS[ticker] = traceback.format_exc()
shared._ERRORS[ticker] = repr(e)
else:
shared._DFS[ticker.upper()] = data
if progress:

View File

@ -9,7 +9,7 @@ from yfinance import utils
from yfinance.data import TickerData
from yfinance.exceptions import YFinanceDataException, YFinanceException
logger = logging.getLogger(__name__)
logger = utils.get_yf_logger()
class Fundamentals:

View File

@ -8,7 +8,7 @@ import pandas as pd
from yfinance import utils
from yfinance.data import TickerData
logger = logging.getLogger(__name__)
logger = utils.get_yf_logger()
info_retired_keys_price = {"currentPrice", "dayHigh", "dayLow", "open", "previousClose", "volume", "volume24Hr"}
info_retired_keys_price.update({"regularMarket"+s for s in ["DayHigh", "DayLow", "Open", "PreviousClose", "Price", "Volume"]})

View File

@ -22,4 +22,5 @@
_DFS = {}
_PROGRESS_BAR = None
_ERRORS = {}
_TRACEBACKS = {}
_ISINS = {}

View File

@ -36,6 +36,7 @@ import appdirs as _ad
import sqlite3 as _sqlite3
import atexit as _atexit
from functools import lru_cache
import logging
from threading import Lock
@ -69,6 +70,17 @@ def print_once(msg):
print(msg)
yf_logger = None
def get_yf_logger():
global yf_logger
if yf_logger is None:
yf_logger = logging.getLogger("yfinance")
if yf_logger.handlers is None or len(yf_logger.handlers) == 0:
# Only add basic stream handler is user not already added one
yf_logger.addHandler(logging.StreamHandler())
return yf_logger
def is_isin(string):
return bool(_re.match("^([A-Z]{2})([A-Z0-9]{9})([0-9]{1})$", string))