Compare commits

...

51 Commits

Author SHA1 Message Date
Value Raider
5b0cb60cf5 Bump version to 0.2.27 2023-08-03 21:24:07 +01:00
ValueRaider
1a97c22874 Merge pull request #1635 from ranaroussi/hotfix/prices-events-merge
Fix merging 1d-prices with out-of-range divs/splits
2023-08-03 21:20:29 +01:00
ValueRaider
b0de31da63 Merge pull request #1648 from ranaroussi/hotfix/tkr-tz-already-in-cache
Fix multithread error 'tz already in cache'
2023-08-03 18:02:51 +01:00
Value Raider
cc87608824 Fix multithread error 'tz already in cache' 2023-08-02 19:29:06 +01:00
ValueRaider
d0b2070036 Fix merging 1d-prices with out-of-range divs/splits 2023-07-23 15:20:57 +01:00
ValueRaider
ddf0cf19cd Bump version to 0.2.26 2023-07-21 12:56:10 +01:00
ValueRaider
1bd819ac4d Merge pull request #1371 from ranaroussi/hotfix/proxy
Fix proxy arg passthrough
2023-07-21 01:01:14 +01:00
ValueRaider
1b9fc5f12f Merge pull request #1625 from ricardoprins/main
Bump requests to 2.31 and removes cryptography.
2023-07-21 00:59:46 +01:00
Ricardo Prins
274f309052 Bump requests to 2.31 and removes cryptography. 2023-07-20 17:17:44 -06:00
ValueRaider
edac283a60 Merge pull request #1623 from ranaroussi/bug-report-yaml
Fix yaml issue rendering
2023-07-19 18:21:52 +01:00
ValueRaider
781fad501f Merge branch 'main' into bug-report-yaml 2023-07-19 18:21:44 +01:00
ValueRaider
39527d24d4 Fix yaml issue template rendering 2023-07-19 18:21:04 +01:00
ValueRaider
45f1c88460 yaml issue template - escape some backticks 2023-07-19 18:09:20 +01:00
ValueRaider
7d638e1040 Merge pull request #1613 from ranaroussi/bug-report-yaml
Convert issue template to yaml
2023-07-19 18:05:40 +01:00
ValueRaider
97b13dfa8c Convert issue template to yaml + improve 2023-07-19 18:01:47 +01:00
ValueRaider
693565a85b Bump version to 0.2.25 2023-07-18 13:45:55 +01:00
ValueRaider
957051e0e8 Merge pull request #1605 from ranaroussi/dev
sync dev -> main
2023-07-18 12:02:19 +01:00
ValueRaider
bd81ebb4e9 Merge pull request #1611 from ricardoprins/main
[BUG] Fix failure when using single ISIN as a ticker (#1525)
2023-07-18 10:55:18 +01:00
ValueRaider
46f53f9957 Port proxy fix to relocated 'FastInfo' 2023-07-17 18:34:00 +01:00
ValueRaider
056b84d8fe Merge branch 'main' into hotfix/proxy 2023-07-17 18:29:04 +01:00
Ricardo Prins
835dbd9629 Fix failure when using single ISIN as a ticker 2023-07-17 08:49:39 -06:00
ValueRaider
07a4594455 Dev version 0.2.25b1 2023-07-14 21:55:29 +01:00
ValueRaider
adfa2e9beb Merge pull request #1604 from ranaroussi/main
sync main -> dev
2023-07-14 20:11:45 +01:00
ValueRaider
b286797e8c Bump version to 0.2.24 2023-07-14 15:52:33 +01:00
ValueRaider
b306bef350 Merge pull request #1603 from ranaroussi/hotfix/info-missing-values
Fix info[] missing values
2023-07-14 15:51:23 +01:00
ValueRaider
61c89660df Optimise info fetch, improve test 2023-07-14 15:29:55 +01:00
Value Raider
31af2ab1d5 Fix recently-fixed info[] missing data 2023-07-13 22:20:42 +01:00
Value Raider
21c380fa61 Bump version to 0.2.23 2023-07-13 20:54:56 +01:00
ValueRaider
e0000cd787 Merge pull request #1595 from signifer-geo/bug20230714
Update quote.py
2023-07-13 20:51:33 +01:00
signifer-geo
11d43eb1a1 Update quote.py
dead code deleted
2023-07-14 04:29:59 +09:00
signifer-geo
509a109f29 Update quote.py
It fixes the error: unauthorized, invalid crumb
2023-07-14 03:11:24 +09:00
ValueRaider
b0639409a3 Merge pull request #1586 from ranaroussi/improve-readme
Emphasise API on Wiki
2023-07-10 15:26:31 +01:00
ValueRaider
ed10feee9a Merge pull request #1584 from lucas03/lukas/start-date-docs
update start parameter docstring
2023-07-06 21:20:17 +01:00
ValueRaider
aba81eedc2 Emphasise API on Wiki
More emphasis that user should review the Wiki for the full API for download() and Ticker.history()
2023-07-06 21:15:41 +01:00
Lukas Vojt
d424d027ac update docstrings for start parameter
requested here
https://github.com/ranaroussi/yfinance/pull/1576#issuecomment-1616599633
2023-07-06 08:17:07 +00:00
ValueRaider
9268fcfa76 Merge pull request #1545 from SnoozeFreddo/main
fix: Readme cache-ratelimit. Limiter parenthesis was never closed
2023-06-27 13:54:08 +01:00
ValueRaider
711e1138d3 Merge pull request #1576 from lucas03/lukas/start-date
fix start date on history
2023-06-27 12:32:17 +01:00
Lukas Vojt
0789b690a4 fix: start year on history
timestamp of 1900 is older than 100 years,
so yahoo responds with error:

GDEVW: 1d data not available for startTime=-2208994789 and
endTime=1687780922. Only 100 years worth of day granularity data are
allowed to be fetched per request.

this should fix it,
something similar was proposed here:
https://github.com/ranaroussi/yfinance/pull/648

 # Please enter the commit message for
your changes. Lines starting
2023-06-26 18:43:35 +02:00
Konstantinos Ftikas
762abd8bba fix: Readme cache-ratelimit. Limiter parenthesis was never closed
The example in the docs will not work out of the box due to a syntax error.
2023-06-03 14:19:31 +02:00
ValueRaider
f8aab533ba Merge branch 'main' into hotfix/proxy 2023-02-08 13:52:34 +00:00
ValueRaider
5cdc78f479 Merge pull request #1398 from vidalmarco/patch-1
get_shares_full does not work with proxy
2023-02-05 10:59:33 +00:00
Marco Vidal
ba634fad0e get_shares_full does not work with proxy
Error: "Yahoo web request for share count failed" 
updated cache_get call by adding proxy parameter and by calling it by keyword
2023-02-05 09:17:22 +01:00
ValueRaider
8a5ca71f52 Fix holders.py proxy pass-through 2023-02-05 00:06:49 +00:00
ValueRaider
141ce7e471 Fix proxy + cache_get. Improve error propagation 2023-02-01 21:19:54 +00:00
ValueRaider
4eae728a06 Potential fix for proxy - enable #2 2023-02-01 19:17:18 +00:00
ValueRaider
2d6b6b26ed Potential fix for proxy - enable 2023-02-01 19:04:47 +00:00
ValueRaider
ec3dfaf305 Potential fix for proxy - revert 2023-02-01 18:10:45 +00:00
ValueRaider
e89d390824 Potential fix for proxy 2023-02-01 18:09:51 +00:00
ValueRaider
563a1a3448 Add Ticker test for proxy 2023-02-01 17:28:57 +00:00
ValueRaider
2e6d3d0e60 Fix proxy in 'history()' 2023-02-01 17:06:23 +00:00
ValueRaider
553bc5965a Fix proxy arg passthrough 2023-01-28 23:07:19 +00:00
21 changed files with 383 additions and 123 deletions

View File

@@ -1,44 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
# IMPORTANT
# Read and follow these instructions carefully. Help us help you.
### Are you up-to-date?
Upgrade to the latest version and confirm the issue/bug is still there.
`$ pip install yfinance --upgrade --no-cache-dir`
Confirm by running:
`import yfinance as yf ; print(yf.__version__)`
and comparing against [PIP](https://pypi.org/project/yfinance/#history).
### Does Yahoo actually have the data?
Are you spelling symbol *exactly* same as Yahoo?
Then visit `finance.yahoo.com` and confirm they have the data you want. Maybe your symbol was delisted, or your expectations of `yfinance` are wrong.
### Are you spamming Yahoo?
Yahoo Finance free service has rate-limiting depending on request type - roughly 60/minute for prices, 10/minute for info. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
### Still think it's a bug?
**Delete these instructions** and replace with your bug report, providing the following as best you can:
- Simple code that reproduces your problem, that we can copy-paste-run.
- Run code with [debug logging enabled](https://github.com/ranaroussi/yfinance#logging) and post the full output.
- If you think `yfinance` returning bad data, give us proof.
- `yfinance` version and Python version.
- Operating system type.

88
.github/ISSUE_TEMPLATE/bug_report.yaml vendored Normal file
View File

@@ -0,0 +1,88 @@
name: Bug report
description: Report a bug in our project
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
# IMPORTANT - Read and follow these instructions carefully. Help us help you.
### Does issue already exist?
Use the search tool. Don't annoy everyone by duplicating existing Issues.
### Are you up-to-date?
Upgrade to the latest version and confirm the issue/bug is still there.
`$ pip install yfinance --upgrade --no-cache-dir`
Confirm by running:
`import yfinance as yf ; print(yf.__version__)`
and comparing against [PIP](https://pypi.org/project/yfinance/#history).
### Does Yahoo actually have the data?
Are you spelling symbol *exactly* same as Yahoo?
Then visit `finance.yahoo.com` and confirm they have the data you want. Maybe your symbol was delisted, or your expectations of `yfinance` are wrong.
### Are you spamming Yahoo?
Yahoo Finance free service has rate-limiting https://github.com/ranaroussi/yfinance/discussions/1513. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
- type: markdown
attributes:
value: |
---
## Still think it's a bug?
Provide the following as best you can:
- type: textarea
id: code
attributes:
label: "Simple code that reproduces your problem"
description: "Provide a snippet of code that we can copy-paste-run. Wrap code in Python Markdown code blocks for proper formatting (```` ```python ... ``` ````)."
validations:
required: true
- type: textarea
id: debug-log
attributes:
label: "Debug log"
description: "Run code with debug logging enabled and post the full output. Instructions: https://github.com/ranaroussi/yfinance/tree/main#logging"
validations:
required: true
- type: textarea
id: bad-data-proof
attributes:
label: "Bad data proof"
description: "If you think `yfinance` returning bad data, provide your proof here."
validations:
required: false
- type: input
id: version-yfinance
attributes:
label: "`yfinance` version"
validations:
required: true
- type: input
id: version-python
attributes:
label: "Python version"
validations:
required: false
- type: input
id: os
attributes:
label: "Operating system"
validations:
required: false

1
.gitignore vendored
View File

@@ -4,6 +4,7 @@ dist
yfinance.egg-info
*.pyc
.coverage
.idea/
.vscode/
build/
*.html

View File

@@ -1,6 +1,31 @@
Change Log
===========
0.2.27
------
Bug fixes:
- fix merging 1d-prices with out-of-range divs/splits #1635
- fix multithread error 'tz already in cache' #1648
0.2.26
------
Proxy improvements
- bug fixes #1371
- security fix #1625
0.2.25
------
Fix single ISIN as ticker #1611
Fix 'Only 100 years allowed' error #1576
0.2.24
------
Fix info[] missing values #1603
0.2.23
------
Fix 'Unauthorized' error #1595
0.2.22
------
Fix unhandled 'sqlite3.DatabaseError' #1574

View File

@@ -144,20 +144,10 @@ To download price history into one table:
```python
import yfinance as yf
data = yf.download("SPY AAPL", start="2017-01-01", end="2017-04-30")
data = yf.download("SPY AAPL", period="1mo")
```
`yf.download()` and `Ticker.history()` have many options for configuring fetching and processing, e.g.:
```python
yf.download(tickers = "SPY AAPL", # list of tickers
period = "1y", # time period
interval = "1d", # trading interval
prepost = False, # download pre/post market hours data?
repair = True) # repair obvious price errors e.g. 100x?
```
Review the [Wiki](https://github.com/ranaroussi/yfinance/wiki) for more options and detail.
#### `yf.download()` and `Ticker.history()` have many options for configuring fetching and processing. [Review the Wiki](https://github.com/ranaroussi/yfinance/wiki) for more options and detail.
### Logging
@@ -188,7 +178,7 @@ class CachedLimiterSession(CacheMixin, LimiterMixin, Session):
pass
session = CachedLimiterSession(
limiter=Limiter(RequestRate(2, Duration.SECOND*5), # max 2 requests per 5 seconds
limiter=Limiter(RequestRate(2, Duration.SECOND*5)), # max 2 requests per 5 seconds
bucket_class=MemoryQueueBucket,
backend=SQLiteCache("yfinance.cache"),
)
@@ -261,14 +251,13 @@ To install `yfinance` using `conda`, see
- [Python](https://www.python.org) \>= 2.7, 3.4+
- [Pandas](https://github.com/pydata/pandas) \>= 1.3.0
- [Numpy](http://www.numpy.org) \>= 1.16.5
- [requests](http://docs.python-requests.org/en/master) \>= 2.26
- [requests](http://docs.python-requests.org/en/master) \>= 2.31
- [lxml](https://pypi.org/project/lxml) \>= 4.9.1
- [appdirs](https://pypi.org/project/appdirs) \>= 1.4.4
- [pytz](https://pypi.org/project/pytz) \>=2022.5
- [frozendict](https://pypi.org/project/frozendict) \>= 2.3.4
- [beautifulsoup4](https://pypi.org/project/beautifulsoup4) \>= 4.11.1
- [html5lib](https://pypi.org/project/html5lib) \>= 1.1
- [cryptography](https://pypi.org/project/cryptography) \>= 3.3.2
#### Optional (if you want to use `pandas_datareader`)

View File

@@ -1,5 +1,5 @@
{% set name = "yfinance" %}
{% set version = "0.2.22" %}
{% set version = "0.2.27" %}
package:
name: "{{ name|lower }}"
@@ -18,7 +18,7 @@ requirements:
host:
- pandas >=1.3.0
- numpy >=1.16.5
- requests >=2.26
- requests >=2.31
- multitasking >=0.0.7
- lxml >=4.9.1
- appdirs >=1.4.4
@@ -27,14 +27,13 @@ requirements:
- beautifulsoup4 >=4.11.1
- html5lib >=1.1
# - pycryptodome >=3.6.6
- cryptography >=3.3.2
- pip
- python
run:
- pandas >=1.3.0
- numpy >=1.16.5
- requests >=2.26
- requests >=2.31
- multitasking >=0.0.7
- lxml >=4.9.1
- appdirs >=1.4.4
@@ -43,7 +42,6 @@ requirements:
- beautifulsoup4 >=4.11.1
- html5lib >=1.1
# - pycryptodome >=3.6.6
- cryptography >=3.3.2
- python
test:

View File

@@ -1,6 +1,6 @@
pandas>=1.3.0
numpy>=1.16.5
requests>=2.26
requests>=2.31
multitasking>=0.0.7
lxml>=4.9.1
appdirs>=1.4.4
@@ -8,4 +8,3 @@ pytz>=2022.5
frozendict>=2.3.4
beautifulsoup4>=4.11.1
html5lib>=1.1
cryptography>=3.3.2

View File

@@ -60,7 +60,7 @@ setup(
keywords='pandas, yahoo finance, pandas datareader',
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'examples']),
install_requires=['pandas>=1.3.0', 'numpy>=1.16.5',
'requests>=2.26', 'multitasking>=0.0.7',
'requests>=2.31', 'multitasking>=0.0.7',
'lxml>=4.9.1', 'appdirs>=1.4.4', 'pytz>=2022.5',
'frozendict>=2.3.4',
'beautifulsoup4>=4.11.1', 'html5lib>=1.1'],

View File

@@ -223,6 +223,15 @@ class TestPriceHistory(unittest.TestCase):
print("{}-without-events missing these dates: {}".format(tkr, missing_from_df2))
raise
# Reproduce issue #1634 - 1d dividend out-of-range, should be prepended to prices
div_dt = _pd.Timestamp(2022, 7, 21).tz_localize("America/New_York")
df_dividends = _pd.DataFrame(data={"Dividends":[1.0]}, index=[div_dt])
df_prices = _pd.DataFrame(data={c:[1.0] for c in yf.const.price_colnames}|{'Volume':0}, index=[div_dt+_dt.timedelta(days=1)])
df_merged = yf.utils.safe_merge_dfs(df_prices, df_dividends, '1d')
self.assertEqual(df_merged.shape[0], 2)
self.assertTrue(df_merged[df_prices.columns].iloc[1:].equals(df_prices))
self.assertEqual(df_merged.index[0], div_dt)
def test_intraDayWithEvents(self):
tkrs = ["BHP.AX", "IMP.JO", "BP.L", "PNL.L", "INTC"]
test_run = False

View File

@@ -25,6 +25,8 @@ class TestTicker(unittest.TestCase):
def setUpClass(cls):
cls.session = session_gbl
cls.proxy = None
@classmethod
def tearDownClass(cls):
if cls.session is not None:
@@ -144,6 +146,129 @@ class TestTicker(unittest.TestCase):
# dat.earnings_trend
# dat.earnings_forecasts
def test_goodTicker_withProxy(self):
# that yfinance works when full api is called on same instance of ticker
tkr = "IBM"
dat = yf.Ticker(tkr, session=self.session)
dat._fetch_ticker_tz(proxy=self.proxy, timeout=5, debug_mode=False, raise_errors=False)
dat._get_ticker_tz(proxy=self.proxy, timeout=5, debug_mode=False, raise_errors=False)
dat.history(period="1wk", proxy=self.proxy)
v = dat.stats(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(len(v) > 0)
v = dat.get_recommendations(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_calendar(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_major_holders(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_institutional_holders(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_mutualfund_holders(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_info(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(len(v) > 0)
v = dat.get_sustainability(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_recommendations_summary(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_analyst_price_target(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_rev_forecast(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_earnings_forecast(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_trend_details(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_earnings_trend(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_earnings(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_income_stmt(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_incomestmt(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_financials(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_balance_sheet(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_balancesheet(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_cash_flow(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_cashflow(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_shares(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_shares_full(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
v = dat.get_isin(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(v != "")
v = dat.get_news(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertTrue(len(v) > 0)
v = dat.get_earnings_dates(proxy=self.proxy)
self.assertIsNotNone(v)
self.assertFalse(v.empty)
# TODO: enable after merge
# dat.get_history_metadata(proxy=self.proxy)
# self.assertIsNotNone(v)
# self.assertTrue(len(v) > 0)
class TestTickerHistory(unittest.TestCase):
session = None
@@ -692,7 +817,10 @@ class TestTickerInfo(unittest.TestCase):
def test_info(self):
data = self.tickers[0].info
self.assertIsInstance(data, dict, "data has wrong type")
self.assertIn("symbol", data.keys(), "Did not find expected key in info dict")
expected_keys = ['industry', 'currentPrice', 'exchange', 'floatShares', 'companyOfficers', 'bid']
for k in expected_keys:
print(k)
self.assertIn("symbol", data.keys(), f"Did not find expected key '{k}' in info dict")
self.assertEqual(self.symbols[0], data["symbol"], "Wrong symbol value in info dict")
# def test_fast_info_matches_info(self):

51
tests/utils.py Normal file
View File

@@ -0,0 +1,51 @@
"""
Tests for utils
To run all tests in suite from commandline:
python -m unittest tests.utils
Specific test class:
python -m unittest tests.utils.TestTicker
"""
# import pandas as pd
# import numpy as np
from .context import yfinance as yf
from .context import session_gbl
import unittest
# import requests_cache
import tempfile
class TestUtils(unittest.TestCase):
session = None
@classmethod
def setUpClass(cls):
cls.tempCacheDir = tempfile.TemporaryDirectory()
yf.set_tz_cache_location(cls.tempCacheDir.name)
@classmethod
def tearDownClass(cls):
cls.tempCacheDir.cleanup()
def test_storeTzNoRaise(self):
# storing TZ to cache should never raise exception
tkr = 'AMZN'
tz1 = "America/New_York"
tz2 = "London/Europe"
cache = yf.utils.get_tz_cache()
cache.store(tkr, tz1)
cache.store(tkr, tz2)
def suite():
suite = unittest.TestSuite()
suite.addTest(TestUtils('Test utils'))
return suite
if __name__ == '__main__':
unittest.main()

View File

@@ -101,7 +101,7 @@ class TickerBase:
Intraday data cannot extend last 60 days
start: str
Download start date string (YYYY-MM-DD) or _datetime, inclusive.
Default is 1900-01-01
Default is 99 years ago
E.g. for start="2020-01-01", the first data point will be on "2020-01-01"
end: str
Download end date string (YYYY-MM-DD) or _datetime, exclusive.
@@ -170,8 +170,8 @@ class TickerBase:
if interval == "1m":
start = end - 604800 # Subtract 7 days
else:
_UNIX_TIMESTAMP_1900 = -2208994789
start = _UNIX_TIMESTAMP_1900
max_start_datetime = pd.Timestamp.utcnow().floor("D") - _datetime.timedelta(days=99 * 365)
start = int(max_start_datetime.timestamp())
else:
start = utils._parse_user_dt(start, tz)
params = {"period1": start, "period2": end}
@@ -186,12 +186,6 @@ class TickerBase:
if params["interval"] == "30m":
params["interval"] = "15m"
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
#if the ticker is MUTUALFUND or ETF, then get capitalGains events
params["events"] = "div,splits,capitalGains"
@@ -217,6 +211,7 @@ class TickerBase:
data = get_fn(
url=url,
params=params,
proxy=proxy,
timeout=timeout
)
if "Will be right back" in data.text or data is None:
@@ -1502,9 +1497,9 @@ class TickerBase:
return data
@property
def fast_info(self):
def get_fast_info(self, proxy=None):
if self._fast_info is None:
self._fast_info = FastInfo(self)
self._fast_info = FastInfo(self, proxy=proxy)
return self._fast_info
@property
@@ -1723,7 +1718,7 @@ class TickerBase:
logger = utils.get_yf_logger()
# Process dates
tz = self._get_ticker_tz(proxy=None, timeout=10)
tz = self._get_ticker_tz(proxy=proxy, timeout=10)
dt_now = _pd.Timestamp.utcnow().tz_convert(tz)
if start is not None:
start_ts = utils._parse_user_dt(start, tz)
@@ -1747,8 +1742,8 @@ class TickerBase:
ts_url_base = "https://query2.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{0}?symbol={0}".format(self.ticker)
shares_url = ts_url_base + "&period1={}&period2={}".format(int(start.timestamp()), int(end.timestamp()))
try:
json_str = self._data.cache_get(shares_url).text
json_data = _json.loads(json_str)
json_data = self._data.cache_get(url=shares_url, proxy=proxy)
json_data = json_data.json()
except:
logger.error("%s: Yahoo web request for share count failed", self.ticker)
return None
@@ -1921,10 +1916,10 @@ class TickerBase:
return dates
def get_history_metadata(self) -> dict:
def get_history_metadata(self, proxy=None) -> dict:
if self._history_metadata is None:
# Request intraday data, because then Yahoo returns exchange schedule.
self.history(period="1wk", interval="1h", prepost=True)
self.history(period="1wk", interval="1h", prepost=True, proxy=proxy)
if self._history_metadata_formatted is False:
self._history_metadata = utils.format_history_metadata(self._history_metadata)

View File

@@ -6,3 +6,5 @@ fundamentals_keys['financials'] = ["TaxEffectOfUnusualItems","TaxRateForCalcs","
fundamentals_keys['balance-sheet'] = ["TreasurySharesNumber","PreferredSharesNumber","OrdinarySharesNumber","ShareIssued","NetDebt","TotalDebt","TangibleBookValue","InvestedCapital","WorkingCapital","NetTangibleAssets","CapitalLeaseObligations","CommonStockEquity","PreferredStockEquity","TotalCapitalization","TotalEquityGrossMinorityInterest","MinorityInterest","StockholdersEquity","OtherEquityInterest","GainsLossesNotAffectingRetainedEarnings","OtherEquityAdjustments","FixedAssetsRevaluationReserve","ForeignCurrencyTranslationAdjustments","MinimumPensionLiabilities","UnrealizedGainLoss","TreasuryStock","RetainedEarnings","AdditionalPaidInCapital","CapitalStock","OtherCapitalStock","CommonStock","PreferredStock","TotalPartnershipCapital","GeneralPartnershipCapital","LimitedPartnershipCapital","TotalLiabilitiesNetMinorityInterest","TotalNonCurrentLiabilitiesNetMinorityInterest","OtherNonCurrentLiabilities","LiabilitiesHeldforSaleNonCurrent","RestrictedCommonStock","PreferredSecuritiesOutsideStockEquity","DerivativeProductLiabilities","EmployeeBenefits","NonCurrentPensionAndOtherPostretirementBenefitPlans","NonCurrentAccruedExpenses","DuetoRelatedPartiesNonCurrent","TradeandOtherPayablesNonCurrent","NonCurrentDeferredLiabilities","NonCurrentDeferredRevenue","NonCurrentDeferredTaxesLiabilities","LongTermDebtAndCapitalLeaseObligation","LongTermCapitalLeaseObligation","LongTermDebt","LongTermProvisions","CurrentLiabilities","OtherCurrentLiabilities","CurrentDeferredLiabilities","CurrentDeferredRevenue","CurrentDeferredTaxesLiabilities","CurrentDebtAndCapitalLeaseObligation","CurrentCapitalLeaseObligation","CurrentDebt","OtherCurrentBorrowings","LineOfCredit","CommercialPaper","CurrentNotesPayable","PensionandOtherPostRetirementBenefitPlansCurrent","CurrentProvisions","PayablesAndAccruedExpenses","CurrentAccruedExpenses","InterestPayable","Payables","OtherPayable","DuetoRelatedPartiesCurrent","DividendsPayable","TotalTaxPayable","IncomeTaxPayable","AccountsPayable","TotalAssets","TotalNonCurrentAssets","OtherNonCurrentAssets","DefinedPensionBenefit","NonCurrentPrepaidAssets","NonCurrentDeferredAssets","NonCurrentDeferredTaxesAssets","DuefromRelatedPartiesNonCurrent","NonCurrentNoteReceivables","NonCurrentAccountsReceivable","FinancialAssets","InvestmentsAndAdvances","OtherInvestments","InvestmentinFinancialAssets","HeldToMaturitySecurities","AvailableForSaleSecurities","FinancialAssetsDesignatedasFairValueThroughProfitorLossTotal","TradingSecurities","LongTermEquityInvestment","InvestmentsinJointVenturesatCost","InvestmentsInOtherVenturesUnderEquityMethod","InvestmentsinAssociatesatCost","InvestmentsinSubsidiariesatCost","InvestmentProperties","GoodwillAndOtherIntangibleAssets","OtherIntangibleAssets","Goodwill","NetPPE","AccumulatedDepreciation","GrossPPE","Leases","ConstructionInProgress","OtherProperties","MachineryFurnitureEquipment","BuildingsAndImprovements","LandAndImprovements","Properties","CurrentAssets","OtherCurrentAssets","HedgingAssetsCurrent","AssetsHeldForSaleCurrent","CurrentDeferredAssets","CurrentDeferredTaxesAssets","RestrictedCash","PrepaidAssets","Inventory","InventoriesAdjustmentsAllowances","OtherInventories","FinishedGoods","WorkInProcess","RawMaterials","Receivables","ReceivablesAdjustmentsAllowances","OtherReceivables","DuefromRelatedPartiesCurrent","TaxesReceivable","AccruedInterestReceivable","NotesReceivable","LoansReceivable","AccountsReceivable","AllowanceForDoubtfulAccountsReceivable","GrossAccountsReceivable","CashCashEquivalentsAndShortTermInvestments","OtherShortTermInvestments","CashAndCashEquivalents","CashEquivalents","CashFinancial"]
fundamentals_keys['cash-flow'] = ["ForeignSales","DomesticSales","AdjustedGeographySegmentData","FreeCashFlow","RepurchaseOfCapitalStock","RepaymentOfDebt","IssuanceOfDebt","IssuanceOfCapitalStock","CapitalExpenditure","InterestPaidSupplementalData","IncomeTaxPaidSupplementalData","EndCashPosition","OtherCashAdjustmentOutsideChangeinCash","BeginningCashPosition","EffectOfExchangeRateChanges","ChangesInCash","OtherCashAdjustmentInsideChangeinCash","CashFlowFromDiscontinuedOperation","FinancingCashFlow","CashFromDiscontinuedFinancingActivities","CashFlowFromContinuingFinancingActivities","NetOtherFinancingCharges","InterestPaidCFF","ProceedsFromStockOptionExercised","CashDividendsPaid","PreferredStockDividendPaid","CommonStockDividendPaid","NetPreferredStockIssuance","PreferredStockPayments","PreferredStockIssuance","NetCommonStockIssuance","CommonStockPayments","CommonStockIssuance","NetIssuancePaymentsOfDebt","NetShortTermDebtIssuance","ShortTermDebtPayments","ShortTermDebtIssuance","NetLongTermDebtIssuance","LongTermDebtPayments","LongTermDebtIssuance","InvestingCashFlow","CashFromDiscontinuedInvestingActivities","CashFlowFromContinuingInvestingActivities","NetOtherInvestingChanges","InterestReceivedCFI","DividendsReceivedCFI","NetInvestmentPurchaseAndSale","SaleOfInvestment","PurchaseOfInvestment","NetInvestmentPropertiesPurchaseAndSale","SaleOfInvestmentProperties","PurchaseOfInvestmentProperties","NetBusinessPurchaseAndSale","SaleOfBusiness","PurchaseOfBusiness","NetIntangiblesPurchaseAndSale","SaleOfIntangibles","PurchaseOfIntangibles","NetPPEPurchaseAndSale","SaleOfPPE","PurchaseOfPPE","CapitalExpenditureReported","OperatingCashFlow","CashFromDiscontinuedOperatingActivities","CashFlowFromContinuingOperatingActivities","TaxesRefundPaid","InterestReceivedCFO","InterestPaidCFO","DividendReceivedCFO","DividendPaidCFO","ChangeInWorkingCapital","ChangeInOtherWorkingCapital","ChangeInOtherCurrentLiabilities","ChangeInOtherCurrentAssets","ChangeInPayablesAndAccruedExpense","ChangeInAccruedExpense","ChangeInInterestPayable","ChangeInPayable","ChangeInDividendPayable","ChangeInAccountPayable","ChangeInTaxPayable","ChangeInIncomeTaxPayable","ChangeInPrepaidAssets","ChangeInInventory","ChangeInReceivables","ChangesInAccountReceivables","OtherNonCashItems","ExcessTaxBenefitFromStockBasedCompensation","StockBasedCompensation","UnrealizedGainLossOnInvestmentSecurities","ProvisionandWriteOffofAssets","AssetImpairmentCharge","AmortizationOfSecurities","DeferredTax","DeferredIncomeTax","DepreciationAmortizationDepletion","Depletion","DepreciationAndAmortization","AmortizationCashFlow","AmortizationOfIntangibles","Depreciation","OperatingGainsLosses","PensionAndEmployeeBenefitExpense","EarningsLossesFromEquityInvestments","GainLossOnInvestmentSecurities","NetForeignCurrencyExchangeGainLoss","GainLossOnSaleOfPPE","GainLossOnSaleOfBusiness","NetIncomeFromContinuingOperations","CashFlowsfromusedinOperatingActivitiesDirect","TaxesRefundPaidDirect","InterestReceivedDirect","InterestPaidDirect","DividendsReceivedDirect","DividendsPaidDirect","ClassesofCashPayments","OtherCashPaymentsfromOperatingActivities","PaymentsonBehalfofEmployees","PaymentstoSuppliersforGoodsandServices","ClassesofCashReceiptsfromOperatingActivities","OtherCashReceiptsfromOperatingActivities","ReceiptsfromGovernmentGrants","ReceiptsfromCustomers"]
price_colnames = ['Open', 'High', 'Low', 'Close', 'Adj Close']

View File

@@ -65,7 +65,7 @@ class TickerData:
def _get_proxy(self, proxy):
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
if isinstance(proxy, (dict, frozendict)) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
return proxy

View File

@@ -47,7 +47,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
Intraday data cannot extend last 60 days
start: str
Download start date string (YYYY-MM-DD) or _datetime, inclusive.
Default is 1900-01-01
Default is 99 years ago
E.g. for start="2020-01-01", the first data point will be on "2020-01-01"
end: str
Download end date string (YYYY-MM-DD) or _datetime, exclusive.
@@ -207,7 +207,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
if len(tickers) == 1:
ticker = tickers[0]
return shared._DFS[shared._ISINS.get(ticker, ticker)]
return shared._DFS[ticker]
try:
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,

View File

@@ -51,19 +51,19 @@ class Financials:
def get_income_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
res = self._income_time_series
if freq not in res:
res[freq] = self._fetch_time_series("income", freq, proxy=None)
res[freq] = self._fetch_time_series("income", freq, proxy)
return res[freq]
def get_balance_sheet_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
res = self._balance_sheet_time_series
if freq not in res:
res[freq] = self._fetch_time_series("balance-sheet", freq, proxy=None)
res[freq] = self._fetch_time_series("balance-sheet", freq, proxy)
return res[freq]
def get_cash_flow_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
res = self._cash_flow_time_series
if freq not in res:
res[freq] = self._fetch_time_series("cash-flow", freq, proxy=None)
res[freq] = self._fetch_time_series("cash-flow", freq, proxy)
return res[freq]
@utils.log_indent_decorator

View File

@@ -34,7 +34,7 @@ class Holders:
def _scrape(self, proxy):
ticker_url = "{}/{}".format(self._SCRAPE_URL_, self._data.ticker)
try:
resp = self._data.cache_get(ticker_url + '/holders', proxy)
resp = self._data.cache_get(ticker_url + '/holders', proxy=proxy)
holders = pd.read_html(resp.text)
except Exception:
holders = []

View File

@@ -20,8 +20,7 @@ info_retired_keys_symbol = {"symbol"}
info_retired_keys = info_retired_keys_price | info_retired_keys_exchange | info_retired_keys_marketCap | info_retired_keys_symbol
_BASIC_URL_ = "https://query2.finance.yahoo.com/v10/finance/quoteSummary"
_BASIC_URL_ = "https://query2.finance.yahoo.com/v6/finance/quoteSummary"
from collections.abc import MutableMapping
class InfoDictWrapper(MutableMapping):
@@ -78,8 +77,9 @@ class InfoDictWrapper(MutableMapping):
class FastInfo:
# Contain small subset of info[] items that can be fetched faster elsewhere.
# Imitates a dict.
def __init__(self, tickerBaseObject):
def __init__(self, tickerBaseObject, proxy=None):
self._tkr = tickerBaseObject
self.proxy = proxy
self._prices_1y = None
self._prices_1wk_1h_prepost = None
@@ -175,9 +175,9 @@ class FastInfo:
if self._prices_1y is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1y = self._tkr.history(period="380d", auto_adjust=False, keepna=True)
self._prices_1y = self._tkr.history(period="380d", auto_adjust=False, keepna=True, proxy=self.proxy)
logging.disable(logging.NOTSET)
self._md = self._tkr.get_history_metadata()
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
try:
ctp = self._md["currentTradingPeriod"]
self._today_open = pd.to_datetime(ctp["regular"]["start"], unit='s', utc=True).tz_convert(self.timezone)
@@ -204,7 +204,7 @@ class FastInfo:
if self._prices_1wk_1h_prepost is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1wk_1h_prepost = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=True)
self._prices_1wk_1h_prepost = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=True, proxy=self.proxy)
logging.disable(logging.NOTSET)
return self._prices_1wk_1h_prepost
@@ -212,7 +212,7 @@ class FastInfo:
if self._prices_1wk_1h_reg is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1wk_1h_reg = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=False)
self._prices_1wk_1h_reg = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=False, proxy=self.proxy)
logging.disable(logging.NOTSET)
return self._prices_1wk_1h_reg
@@ -221,7 +221,7 @@ class FastInfo:
return self._md
self._get_1y_prices()
self._md = self._tkr.get_history_metadata()
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
return self._md
def _exchange_open_now(self):
@@ -254,7 +254,7 @@ class FastInfo:
if self._tkr._history_metadata is None:
self._get_1y_prices()
md = self._tkr.get_history_metadata()
md = self._tkr.get_history_metadata(proxy=self.proxy)
self._currency = md["currency"]
return self._currency
@@ -265,7 +265,7 @@ class FastInfo:
if self._tkr._history_metadata is None:
self._get_1y_prices()
md = self._tkr.get_history_metadata()
md = self._tkr.get_history_metadata(proxy=self.proxy)
self._quote_type = md["instrumentType"]
return self._quote_type
@@ -290,7 +290,7 @@ class FastInfo:
if self._shares is not None:
return self._shares
shares = self._tkr.get_shares_full(start=pd.Timestamp.utcnow().date()-pd.Timedelta(days=548))
shares = self._tkr.get_shares_full(start=pd.Timestamp.utcnow().date()-pd.Timedelta(days=548), proxy=self.proxy)
# if shares is None:
# # Requesting 18 months failed, so fallback to shares which should include last year
# shares = self._tkr.get_shares()
@@ -587,10 +587,12 @@ class Quote:
if self._already_fetched:
return
self._already_fetched = True
modules = ['summaryProfile', 'financialData', 'quoteType',
'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
modules = ['financialData', 'quoteType', 'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
params_dict = {}
params_dict["modules"] = modules
params_dict["ssl"] = "true"
result = self._data.get_raw_json(
_BASIC_URL_ + f"/{self._data.ticker}", params={"modules": ",".join(modules), "ssl": "true"}, proxy=proxy
_BASIC_URL_ + f"/{self._data.ticker}", params=params_dict, proxy=proxy
)
result["quoteSummary"]["result"][0]["symbol"] = self._data.ticker
query1_info = next(

View File

@@ -137,6 +137,10 @@ class Ticker(TickerBase):
def info(self) -> dict:
return self.get_info()
@property
def fast_info(self):
return self.get_fast_info()
@property
def calendar(self) -> _pd.DataFrame:
return self.get_calendar()

View File

@@ -21,6 +21,8 @@
from __future__ import print_function
from yfinance import const
import datetime as _datetime
import dateutil as _dateutil
from typing import Dict, Union, List, Optional
@@ -689,21 +691,28 @@ def safe_merge_dfs(df_main, df_sub, interval):
f_outOfRange = indices == -1
if f_outOfRange.any() and not intraday:
# If dividend is occuring in next interval after last price row,
# add a new row of NaNs
last_dt = df_main.index[-1]
next_interval_start_dt = last_dt + td
empty_row_data = {c:[_np.nan] for c in const.price_colnames}|{'Volume':[0]}
if interval == '1d':
# Allow for weekends & holidays
next_interval_end_dt = last_dt+7*_pd.Timedelta(days=7)
else:
next_interval_end_dt = next_interval_start_dt + td
for i in _np.where(f_outOfRange)[0]:
dt = df_sub.index[i]
if dt >= next_interval_start_dt and dt < next_interval_end_dt:
new_dt = dt if interval == '1d' else next_interval_start_dt
# For 1d, add all out-of-range event dates
for i in _np.where(f_outOfRange)[0]:
dt = df_sub.index[i]
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
df_main.loc[new_dt] = _np.nan
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
df_main = _pd.concat([df_main, empty_row], sort=True)
else:
# Else, only add out-of-range event dates if occurring in interval
# immediately after last pricfe row
last_dt = df_main.index[-1]
next_interval_start_dt = last_dt + td
next_interval_end_dt = next_interval_start_dt + td
for i in _np.where(f_outOfRange)[0]:
dt = df_sub.index[i]
if dt >= next_interval_start_dt and dt < next_interval_end_dt:
new_dt = next_interval_start_dt
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
df_main = _pd.concat([df_main, empty_row], sort=True)
df_main = df_main.sort_index()
# Re-calculate indices
indices = _np.searchsorted(_np.append(df_main.index, df_main.index[-1]+td), df_sub.index, side='right')
@@ -718,7 +727,7 @@ def safe_merge_dfs(df_main, df_sub, interval):
f_outOfRange = indices == -1
if f_outOfRange.any():
if intraday or interval in ['1d', '1wk']:
raise Exception(f"The following '{data_col}' events are out-of-range, did not expect with interval {interval}: {df_sub.index}")
raise Exception(f"The following '{data_col}' events are out-of-range, did not expect with interval {interval}: {df_sub.index[f_outOfRange]}")
get_yf_logger().debug(f'Discarding these {data_col} events:' + '\n' + str(df_sub[f_outOfRange]))
df_sub = df_sub[~f_outOfRange].copy()
indices = indices[~f_outOfRange]
@@ -980,10 +989,14 @@ class _TzCache:
def store(self, tkr, tz):
if tz is None:
self.tz_db.delete(tkr)
elif self.tz_db.get(tkr) is not None:
raise Exception("Tkr {} tz already in cache".format(tkr))
else:
self.tz_db.set(tkr, tz)
tz_db = self.tz_db.get(tkr)
if tz_db is not None:
if tz != tz_db:
get_yf_logger().debug(f'{tkr}: Overwriting cached TZ "{tz_db}" with different TZ "{tz}"')
self.tz_db.set(tkr, tz)
else:
self.tz_db.set(tkr, tz)
@property
def _db_dir(self):

View File

@@ -1 +1 @@
version = "0.2.22"
version = "0.2.27"