Compare commits
11 Commits
fix/cache-
...
0.1.85
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66af3080dd | ||
|
|
9d396b9559 | ||
|
|
23b6ad12c1 | ||
|
|
22131e9fc7 | ||
|
|
e99e61f95a | ||
|
|
a3fe95ea27 | ||
|
|
000cb70bcb | ||
|
|
c8d9d06e75 | ||
|
|
a5e07a0375 | ||
|
|
a0a12bcf4c | ||
|
|
42e5751705 |
@@ -1,6 +1,19 @@
|
||||
Change Log
|
||||
===========
|
||||
|
||||
0.1.85
|
||||
------
|
||||
- Fix info['log_url'] #1062
|
||||
- Fix handling delisted ticker #1137
|
||||
|
||||
0.1.84
|
||||
------
|
||||
- Make tz-cache thread-safe
|
||||
|
||||
0.1.83
|
||||
------
|
||||
- Reduce spam-effect of tz-fetch
|
||||
|
||||
0.1.81
|
||||
------
|
||||
- Fix unhandled tz-cache exception #1107
|
||||
|
||||
@@ -28,6 +28,9 @@ class TestTicker(unittest.TestCase):
|
||||
history = ticker.history(period="max")
|
||||
assert(history.empty is False and history is not None)
|
||||
|
||||
histories = yf.download(symbols, period="1yr")
|
||||
assert(histories.empty is False and histories is not None)
|
||||
|
||||
def test_attributes(self):
|
||||
for ticker in tickers:
|
||||
ticker.isin
|
||||
|
||||
@@ -150,17 +150,14 @@ class TickerBase():
|
||||
|
||||
if start or period is None or period.lower() == "max":
|
||||
# Check can get TZ. Fail => probably delisted
|
||||
try:
|
||||
tz = self._get_ticker_tz()
|
||||
except KeyError as e:
|
||||
if "exchangeTimezoneName" in str(e):
|
||||
shared._DFS[self.ticker] = utils.empty_df()
|
||||
shared._ERRORS[self.ticker] = err_msg
|
||||
if "many" not in kwargs and debug_mode:
|
||||
print('- %s: %s' % (self.ticker, err_msg))
|
||||
return utils.empty_df()
|
||||
else:
|
||||
raise
|
||||
tz = self._get_ticker_tz(debug_mode, proxy, timeout)
|
||||
if tz is None:
|
||||
# Every valid ticker has a timezone. Missing = problem
|
||||
shared._DFS[self.ticker] = utils.empty_df()
|
||||
shared._ERRORS[self.ticker] = err_msg
|
||||
if "many" not in kwargs and debug_mode:
|
||||
print('- %s: %s' % (self.ticker, err_msg))
|
||||
return utils.empty_df()
|
||||
|
||||
if end is None:
|
||||
end = int(_time.time())
|
||||
@@ -331,14 +328,27 @@ class TickerBase():
|
||||
|
||||
# ------------------------
|
||||
|
||||
def _get_ticker_tz(self):
|
||||
def _get_ticker_tz(self, debug_mode, proxy, timeout):
|
||||
if not self._tz is None:
|
||||
return self._tz
|
||||
|
||||
tkr_tz = utils.cache_lookup_tkr_tz(self.ticker)
|
||||
|
||||
if tkr_tz is not None:
|
||||
invalid_value = isinstance(tkr_tz, str)
|
||||
if not invalid_value:
|
||||
try:
|
||||
_tz.timezone(tz)
|
||||
except:
|
||||
invalid_value = True
|
||||
if invalid_value:
|
||||
# Clear from cache and force re-fetch
|
||||
utils.cache_store_tkr_tz(self.ticker, None)
|
||||
tkr_tz = None
|
||||
|
||||
if tkr_tz is None:
|
||||
tkr_tz = self.info["exchangeTimezoneName"]
|
||||
# info fetch is relatively slow so cache timezone
|
||||
tkr_tz = self._fetch_ticker_tz(debug_mode, proxy, timeout)
|
||||
|
||||
try:
|
||||
utils.cache_store_tkr_tz(self.ticker, tkr_tz)
|
||||
except PermissionError:
|
||||
@@ -348,6 +358,48 @@ class TickerBase():
|
||||
self._tz = tkr_tz
|
||||
return tkr_tz
|
||||
|
||||
|
||||
def _fetch_ticker_tz(self, debug_mode, proxy, timeout):
|
||||
# Query Yahoo for basic price data just to get returned timezone
|
||||
|
||||
params = {"range":"1d", "interval":"1d"}
|
||||
|
||||
# setup proxy in requests format
|
||||
if proxy is not None:
|
||||
if isinstance(proxy, dict) and "https" in proxy:
|
||||
proxy = proxy["https"]
|
||||
proxy = {"https": proxy}
|
||||
|
||||
# Getting data from json
|
||||
url = "{}/v8/finance/chart/{}".format(self._base_url, self.ticker)
|
||||
|
||||
session = self.session or _requests
|
||||
try:
|
||||
data = session.get(url=url, params=params, proxies=proxy, headers=utils.user_agent_headers, timeout=timeout)
|
||||
data = data.json()
|
||||
except Exception as e:
|
||||
if debug_mode:
|
||||
print("Failed to get ticker '{}' reason: {}".format(self.ticker, e))
|
||||
return None
|
||||
else:
|
||||
error = data.get('chart', {}).get('error', None)
|
||||
if error:
|
||||
# explicit error from yahoo API
|
||||
if debug_mode:
|
||||
print("Got error from yahoo api for ticker {}, Error: {}".format(self.ticker, error))
|
||||
else:
|
||||
try:
|
||||
return data["chart"]["result"][0]["meta"]["exchangeTimezoneName"]
|
||||
except Exception as err:
|
||||
if debug_mode:
|
||||
print("Could not get exchangeTimezoneName for ticker '{}' reason: {}".format(self.ticker, err))
|
||||
print("Got response: ")
|
||||
print("-------------")
|
||||
print(" {}".format(data))
|
||||
print("-------------")
|
||||
return None
|
||||
|
||||
|
||||
def _get_info(self, proxy=None):
|
||||
# setup proxy in requests format
|
||||
if proxy is not None:
|
||||
@@ -425,9 +477,12 @@ class TickerBase():
|
||||
|
||||
self._info['logo_url'] = ""
|
||||
try:
|
||||
domain = self._info['website'].split(
|
||||
'://')[1].split('/')[0].replace('www.', '')
|
||||
self._info['logo_url'] = 'https://logo.clearbit.com/%s' % domain
|
||||
if not 'website' in self._info:
|
||||
self._info['logo_url'] = 'https://logo.clearbit.com/%s.com' % self._info['shortName'].split(' ')[0].split(',')[0]
|
||||
else:
|
||||
domain = self._info['website'].split(
|
||||
'://')[1].split('/')[0].replace('www.', '')
|
||||
self._info['logo_url'] = 'https://logo.clearbit.com/%s' % domain
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
@@ -31,6 +31,9 @@ import sys as _sys
|
||||
import os as _os
|
||||
import appdirs as _ad
|
||||
|
||||
from threading import Lock
|
||||
mutex = Lock()
|
||||
|
||||
try:
|
||||
import ujson as _json
|
||||
except ImportError:
|
||||
@@ -332,27 +335,36 @@ def cache_lookup_tkr_tz(tkr):
|
||||
if not _os.path.isfile(fp):
|
||||
return None
|
||||
|
||||
df = _pd.read_csv(fp)
|
||||
f = df["Ticker"] == tkr
|
||||
if sum(f) == 0:
|
||||
mutex.acquire()
|
||||
df = _pd.read_csv(fp, index_col="Ticker")
|
||||
mutex.release()
|
||||
if tkr in df.index:
|
||||
return df.loc[tkr,"Tz"]
|
||||
else:
|
||||
return None
|
||||
|
||||
return df["Tz"][f].iloc[0]
|
||||
def cache_store_tkr_tz(tkr,tz):
|
||||
df = _pd.DataFrame({"Ticker":[tkr], "Tz":[tz]})
|
||||
|
||||
dp = get_cache_dirpath()
|
||||
fp = _os.path.join(dp, "tkr-tz.csv")
|
||||
mutex.acquire()
|
||||
if not _os.path.isdir(dp):
|
||||
_os.makedirs(dp)
|
||||
fp = _os.path.join(dp, "tkr-tz.csv")
|
||||
if not _os.path.isfile(fp):
|
||||
df.to_csv(fp, index=False)
|
||||
return
|
||||
if (not _os.path.isfile(fp)) and (tz is not None):
|
||||
df = _pd.DataFrame({"Tz":[tz]}, index=[tkr])
|
||||
df.index.name = "Ticker"
|
||||
df.to_csv(fp)
|
||||
|
||||
df_all = _pd.read_csv(fp)
|
||||
f = df_all["Ticker"]==tkr
|
||||
if sum(f) > 0:
|
||||
raise Exception("Tkr {} tz already in cache".format(tkr))
|
||||
|
||||
_pd.concat([df_all,df]).to_csv(fp, index=False)
|
||||
else:
|
||||
df = _pd.read_csv(fp, index_col="Ticker")
|
||||
if tz is None:
|
||||
# Delete if in cache:
|
||||
if tkr in df.index:
|
||||
df.drop(tkr).to_csv(fp)
|
||||
else:
|
||||
if tkr in df.index:
|
||||
raise Exception("Tkr {} tz already in cache".format(tkr))
|
||||
df.loc[tkr,"Tz"] = tz
|
||||
df.to_csv(fp)
|
||||
|
||||
mutex.release()
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "0.1.81"
|
||||
version = "0.1.85"
|
||||
|
||||
Reference in New Issue
Block a user