Add unittest for duplication fix

pull/1070/head
ValueRaider 2022-10-14 23:15:13 +01:00
parent 5c0b2bbaa3
commit c679551faa
4 changed files with 87 additions and 5 deletions

View File

@ -0,0 +1 @@
#!/usr/bin/env python

9
tests/context.py 100644
View File

@ -0,0 +1,9 @@
# -*- coding: utf-8 -*-
import sys
import os
_parent_dp = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
_src_dp = _parent_dp
sys.path.insert(0, _src_dp)
import yfinance

73
tests/prices.py 100644
View File

@ -0,0 +1,73 @@
from .context import yfinance as yf
import unittest
import datetime as _dt
import pytz as _tz
class TestPriceHistory(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_duplicatingDaily(self):
tkrs = []
tkrs.append("IMP.JO")
tkrs.append("BHG.JO")
tkrs.append("SSW.JO")
tkrs.append("BP.L")
tkrs.append("INTC")
test_run = False
for tkr in tkrs:
dat = yf.Ticker(tkr)
tz = dat._get_ticker_tz()
dt_utc = _tz.timezone("UTC").localize(_dt.datetime.utcnow())
dt = dt_utc.astimezone(_tz.timezone(tz))
if dt.time() < _dt.time(17,0):
continue
test_run = True
df = dat.history(start=dt.date()-_dt.timedelta(days=7), interval="1d")
dt0 = df.index[-2]
dt1 = df.index[-1]
try:
self.assertNotEqual(dt0, dt1)
except:
print("Ticker = ", tkr)
raise
if not test_run:
self.skipTest("Skipping test_duplicatingDaily() because only expected to fail just after market close")
def test_duplicatingWeekly(self):
tkrs = ['MSFT', 'IWO', 'VFINX', '^GSPC', 'BTC-USD']
test_run = False
for tkr in tkrs:
dat = yf.Ticker(tkr)
tz = dat._get_ticker_tz()
dt = _tz.timezone(tz).localize(_dt.datetime.now())
if dt.date().weekday() not in [1,2,3,4]:
continue
test_run = True
df = dat.history(start=dt.date()-_dt.timedelta(days=7), interval="1wk")
dt0 = df.index[-2]
dt1 = df.index[-1]
try:
self.assertNotEqual(dt0.week, dt1.week)
except:
print("Ticker={}: Last two rows within same week:".format(tkr))
print(df.iloc[df.shape[0]-2:])
raise
if not test_run:
self.skipTest("Skipping test_duplicatingWeekly() because not possible to fail Monday/weekend")
if __name__ == '__main__':
unittest.main()

View File

@ -285,16 +285,15 @@ def fix_Yahoo_returning_live_separate(quotes, interval, tz_exchange):
if "Adj Close" in quotes.columns:
quotes.loc[idx2,"Adj Close"] = quotes["Adj Close"][n-1]
quotes.loc[idx2,"Volume"] += quotes["Volume"][n-1]
quotes = quotes.iloc[0:n-1]
n = quotes.shape[0]
quotes = quotes.drop(quotes.index[n-1])
# Similar bug in daily data except most data is simply duplicated
# - exception is volume, *slightly* different on final row (and matches website)
# - exception is volume, *slightly* greater on final row (and matches website)
elif interval=="1d":
if dt1.date() == dt2.date():
# Last two rows are on same day. Drop second-to-last row
quotes = quotes.drop(quotes.index[n-2])
n = quotes.shape[0]
return quotes