Compare commits

...

112 Commits

Author SHA1 Message Date
ValueRaider
30abd89474 README: add new 'sustainability' attribute 2024-07-19 16:00:52 +01:00
ValueRaider
26697c25de Version 0.2.41 2024-07-19 15:57:45 +01:00
ValueRaider
3e9a77a7f7 Merge pull request #1971 from ranaroussi/dev
dev -> main
2024-07-19 15:50:08 +01:00
ValueRaider
7ca6b10a63 Fix typo in merged branch feature/price-repair-zeroes-improve 2024-07-19 10:39:28 +01:00
ValueRaider
d024dbb012 Merge pull request #1989 from ranaroussi/fix/price-repair-syntax-error-fixes
Fix: price repair syntax errors
2024-07-18 21:33:00 +01:00
ValueRaider
862ef7c78d Merge pull request #1990 from ranaroussi/feature/price-repair-zeroes-improve
Price repair zeroes: improve for 1d+ data
2024-07-18 21:32:24 +01:00
ValueRaider
d948db7344 Price repair zeroes: improve for 1d+ data
If 1d/1wk close changes without volume, tag as bad for repair.
2024-07-18 21:26:01 +01:00
ValueRaider
c894b9e595 Price repair: fix 2x syntax errors 2024-07-18 21:07:55 +01:00
ValueRaider
0681a777b8 Tests small fixes + fix another Pandas warning 2024-07-15 21:33:03 +01:00
ValueRaider
96279b932b Merge pull request #1981 from ranaroussi/fix/pandas-warnings
Prices: fix some Pandas deprecation warnings
2024-07-15 20:55:15 +01:00
ValueRaider
b0e9e917fe Merge pull request #1985 from rhwvu/main
Rebased: Adding various financials to const.py
2024-07-15 20:51:13 +01:00
R. W
d219e841b3 const.py addition 2024-07-15 15:02:43 -04:00
ValueRaider
a73e974d19 Merge pull request #1977 from ranaroussi/fix/earnings
Deprecate Ticker.earnings
2024-07-13 18:46:21 +01:00
ValueRaider
2e48495a9b Fix deprecation warnings not printing. Remove deprecated info code. 2024-07-13 18:45:35 +01:00
ValueRaider
742cc85a0c Prices: fix some Pandas deprecation warnings 2024-07-13 18:30:38 +01:00
ValueRaider
9c89308780 Merge pull request #1967 from SnowCheetos/preiod_max_fix
fixed history fetching when period='max'
2024-07-06 19:55:02 +01:00
ValueRaider
4160fec989 Clarify max period when interval=60m/1h. Tidy. 2024-07-06 19:51:14 +01:00
ValueRaider
57dac67b0e Deprecate 'Ticker.earnings' 2024-07-05 17:40:15 +01:00
SnowCheetos
b1d56ac235 directly subtracted seconds, kept 99 years impl as on main branch 2024-07-03 11:17:03 -05:00
ValueRaider
6be7cc311b Merge pull request #1970 from ranaroussi/fix/history-metadata
history metadata: Fix '1wk is invalid' & repeated calls
2024-06-28 14:01:24 +01:00
ValueRaider
a9f6c3346b history metadata: Fix '1wk is invalid' & repeated calls 2024-06-28 14:00:12 +01:00
zhihaos2
a42a5b75d6 fixed history fetching when period='max' 2024-06-25 13:37:13 -05:00
ValueRaider
2d5978e4d8 Merge pull request #1965 from vittoboa/add-loss-adjustment-expense
Add pull of "Loss Adjustment Expense" as part of Income Statement
2024-06-21 21:58:37 +01:00
vittoboa
17b4518e6a Add pull of Loss Adjustment Expense data in income statement 2024-06-20 23:53:45 +02:00
ValueRaider
2b1dd86c2b Merge pull request #1959 from MohamedAlaa201/mohamed/sustainibillity_implementation
Sustainability Fetching Implementation
2024-06-13 21:13:06 +01:00
Darksinian
ead2b894e4 Implement Sustainibility Fetching 2024-06-13 13:36:29 -04:00
ValueRaider
b27b88caff Merge pull request #1955 from lp177/patch-1 2024-06-04 13:51:06 +01:00
lp177
5d6981346e Fix 'T' is deprecated
site-packages/yfinance/scrapers/history.py:239: FutureWarning: 'T' is deprecated and will be removed in a future version, please use 'min' instead.
2024-06-01 07:57:41 +02:00
ValueRaider
19f9760599 Merge pull request #1946 from ranaroussi/main
sync main -> dev
2024-05-20 19:16:25 +01:00
ValueRaider
930b305327 Version 0.2.40 2024-05-19 17:25:01 +01:00
ValueRaider
7af213dea2 Fix unmatched quotes (0.2.39 regression) 2024-05-19 17:21:59 +01:00
ValueRaider
049337327e Version 0.2.39 2024-05-19 15:14:45 +01:00
ValueRaider
e65ca40d95 Merge pull request #1927 from ranaroussi/dev
sync dev -> main
2024-05-19 15:10:34 +01:00
ValueRaider
fe00fd5152 Ruff fixes 2024-05-19 15:09:57 +01:00
ValueRaider
cb691df586 Merge pull request #1941 from ranaroussi/main
sync main -> dev
2024-05-19 15:07:18 +01:00
ValueRaider
4bc546cb71 Update ci.yml to Node20 2024-05-19 15:05:35 +01:00
ValueRaider
f3c9f9962d Fix tests ; Fine-tune split repair ; Fix UTC warning 2024-05-19 15:01:52 +01:00
ValueRaider
da1c466550 Merge pull request #1931 from ranaroussi/feature/improve-price-repair-bad-splits
Price repair: improve 'sudden change' repair for splits & currency
2024-05-19 11:40:18 +01:00
ValueRaider
97f35b721c Price repair: improve 'sudden change' repair for splits & currency
Original logic for repairing missing split adjustment only checked latest split.
Improved logic checks ALL splits in data, because any can be missing.

Then related changes to 'sudden change detection':
- use prices median not mean, reduce sensitivity to noise.
- handle Kuwait Dinar, which sub-divides into 1000x not 100x.
2024-05-19 11:39:03 +01:00
ValueRaider
7c41434f44 Merge pull request #1930 from ranaroussi/fix/session-switching
Fix switching session from/to requests_cache
2024-05-11 21:40:41 +01:00
ValueRaider
070f13577e Merge pull request #1928 from marcofognog/dev
Add more specific error thowring base on PR 1918
2024-05-11 21:37:35 +01:00
Marcao
7628bec2a6 Adjust and fix according to feedback 2024-05-11 19:20:19 +02:00
ValueRaider
ac4efa3e3d Fix switching session from/to requests_cache
Session switch logic was not recalculating 'self._session_is_caching'.
Also removed message 'help stress-test cookie & crumb & requests_cache', clearly works now.
2024-05-11 09:33:17 +01:00
Elijah Lopez
5a683b916d Add raise missing ticker tests, replace deprecated datetime methods
- renamed test files conform with standards
- replaced utcfromtimestamp
2024-05-10 17:23:14 +02:00
Elijah Lopez
30fdc96157 Fix: PricesMissingError not being raised 2024-05-10 17:15:08 +02:00
Elijah Lopez
ee87a95b8d Rename errors from YFinance to YF 2024-05-10 17:15:08 +02:00
Elijah Lopez
685ef71d9f Add error classes for symbol delisting errors, closes #270 2024-05-10 17:15:08 +02:00
ValueRaider
098e77659c Merge pull request #1922 from ranaroussi/fix/datetime-utc-warning 2024-05-09 10:05:43 +01:00
ValueRaider
dc5c718556 Fix: datetime.datetime.utcnow() is deprecated ...
Python 3.12 deprecates datetime.datetime.utcnow().
Instead of switching to datetime.datetime.now(datetime.UTC), which won't work in Python 3.11,
just switch to Pandas.utcnow().
2024-05-02 22:45:26 +01:00
ValueRaider
84ba6d7d88 Merge pull request #1920 from ranaroussi/feature/price-repair-fx
Don't price-repair FX volume=0, is normal
2024-04-28 12:51:59 +01:00
ValueRaider
e238ac1f95 Merge pull request #1919 from ranaroussi/feature/readme-table-of-contents
Add table-of-contents to README
2024-04-28 12:51:15 +01:00
ValueRaider
efe15e1907 Add table-of-contents to README 2024-04-27 12:50:15 +01:00
ValueRaider
2dcbe34910 Don't price-repair FX volume=0, is normal 2024-04-26 21:32:39 +01:00
ValueRaider
bb47cd4182 Merge pull request #1917 from ranaroussi/main
sync main -> dev
2024-04-24 21:06:43 +01:00
ValueRaider
94e3833e90 Merge pull request #1913 from vittoboa/add_functools_wraps_to_wrapper
Fix help(yf.download) not showing the information about the function
2024-04-22 21:12:22 +01:00
vittoboa
f8e8eecf44 Add functools.wraps to log_indent_decorator's wrapper function 2024-04-22 21:08:10 +02:00
ValueRaider
a1bcb4c351 Version 0.2.38 2024-04-16 22:18:37 +01:00
ValueRaider
18089f451b Merge pull request #1908 from vittoboa/holders_404_error
Fix 404 Error for Holders
2024-04-16 22:14:33 +01:00
vittoboa
3d180fcf2c Move ticker symbol from parameter to URL 2024-04-16 22:37:07 +02:00
ValueRaider
82a3145fdf Merge pull request #1897 from ranaroussi/feature/deprecate-pdr
Deprecate 'pandas_datareader', remove a deprecated argument
2024-04-06 12:42:52 +01:00
ValueRaider
48e9075a2d Deprecate 'pandas_datareader', remove a deprecated argument.
Drop official support for 'pandas_datareader', tag pdr_override() as deprecated.
Also removed deprecated argument 'download(show_errors)'.
2024-04-06 12:42:04 +01:00
ValueRaider
88e8ddb7f5 Merge pull request #1896 from ranaroussi/feature/replace-dead-appdirs
Replace dead 'appdirs' package with 'platformdirs'
2024-04-06 12:22:37 +01:00
ValueRaider
812931ff98 Replace dead 'appdirs' package with 'platformdirs' 2024-04-06 12:19:46 +01:00
ValueRaider
1967e974c1 Merge pull request #1874 from ranaroussi/fix/price-repair-subtle-bug
Fix potential for price repair to discard price=0 rows
2024-03-04 19:46:59 +00:00
Value Raider
942a26fd37 Fix potential for price repair to discard price=0 rows 2024-03-01 22:03:03 +00:00
Value Raider
c2d568367c Version 0.2.37 2024-02-25 13:25:28 +00:00
ValueRaider
d3728d3071 Merge pull request #1869 from ranaroussi/dev
Dev
2024-02-24 23:09:34 +00:00
ValueRaider
915bb1a080 Merge pull request #1866 from ranaroussi/fix/price-repair-confusing-order
Price repair bug-fix
2024-02-24 22:58:20 +00:00
Value Raider
d55c317158 Fix bug: prices order flipping during repair, introducing potential data corruption 2024-02-19 22:17:20 +00:00
ValueRaider
ac1d09049e Merge pull request #1865 from cottrell/fix
Fix some errors.
2024-02-19 22:11:48 +00:00
David Cottrell
afb4e0d5dc Fix some errors. 2024-02-19 21:43:12 +00:00
ValueRaider
1d31e7ca01 Update issue form - more emphasis on following instructions 2024-02-11 13:47:36 +00:00
ValueRaider
683064f9ad Merge pull request #1849 from ranaroussi/refactor/price-history 2024-02-07 23:11:51 +00:00
Value Raider
cdf897f9e6 Move price history+repair logic into new file 2024-02-04 13:09:37 +00:00
ValueRaider
eab6c8dfa7 Update bug_report.yaml because people can't read 2024-02-01 21:28:38 +00:00
ValueRaider
97f93d35ed Merge pull request #1844 from power-edge/dev
adding upgrade for pandas deprecation warning, adding pyarrow>=0.17.0…
2024-01-31 21:51:05 +00:00
Nikolaus Schuetz
5aef8addab removing dev requirements (they are included by extras) 2024-01-29 17:43:12 -05:00
ValueRaider
6b8a4a5608 Merge pull request #1841 from Rogach/pr/dont-disable-global-logging
do not disable app-wide logging in quote.py (fixes #1829)
2024-01-28 16:29:43 +00:00
Platon Pronko
212a7987c3 do not disable app-wide logging in quote.py (fixes #1829) 2024-01-28 20:43:50 +05:00
Nikolaus Schuetz
58a0a57457 adding upgrade for pandas deprecation warning, adding pyarrow>=0.17.0 at minimum requirement as defined in dev requirements for pandas==1.3.0 version 2024-01-26 20:12:58 -05:00
ValueRaider
75297c0eba Merge pull request #1838 from mreiche/bugfix/remove-empty-series
Remove _empty_series leftovers
2024-01-23 19:07:16 +00:00
Mike Reiche
1dc2719368 Remove _empty_series leftovers 2024-01-23 15:32:56 +01:00
Value Raider
ab979e9141 Version 0.2.36 2024-01-21 18:10:41 +00:00
ValueRaider
b837c1ec2a Merge pull request #1834 from ranaroussi/dev
sync dev -> main
2024-01-21 18:08:04 +00:00
ValueRaider
2630c66cd1 Merge pull request #1833 from ange-daumal/json-fix
Fix JSON error handling
2024-01-19 21:56:42 +00:00
ValueRaider
7af789fe9a Merge pull request #1830 from ange-daumal/patch-1
Fix JSON error handling
2024-01-19 21:51:52 +00:00
ValueRaider
73e36688b7 Merge pull request #1827 from ranaroussi/fix/peewee-with-old-sqlite
Handle peewee with old sqlite
2024-01-19 21:51:31 +00:00
ValueRaider
f1264716fc Merge pull request #1824 from ranaroussi/fix/price-keepna-with-repair
Fix history() keepna=False with repair=True
2024-01-19 21:51:10 +00:00
Ange Daumal
06fd35121a Fix JSON access to prevent KeyError 2024-01-19 22:51:02 +01:00
Mike Reiche
91f468e4d3 Fix JSON access to prevent KeyError 2024-01-19 22:42:42 +01:00
ValueRaider
d00c1a976c Merge pull request #1831 from ranaroussi/main
sync main -> dev
2024-01-15 19:28:56 +00:00
ValueRaider
176c3d628b Update ci.yml to Node16 2024-01-15 19:27:37 +00:00
ValueRaider
8f53af1593 Merge pull request #1823 from molpcs/patch-2
Update README.md for better copy-ability
2024-01-14 12:36:17 +00:00
ValueRaider
19188d52d4 Merge pull request #1795 from amanlai/main
explicitly name the column levels
2024-01-14 10:45:04 +00:00
Value Raider
ffaf200562 Handle peewee with old sqlite 2024-01-13 23:00:59 +00:00
Value Raider
6686258e66 Fix history() keepna=False with repair=True 2024-01-13 13:19:44 +00:00
molpcs
47bc46c804 Update README.md
Wrap yfinance[optional] code snippet with quotes to avoid conflict with zsh globbing. Remains compatible with bash.
2024-01-12 11:57:58 -08:00
ValueRaider
f563e51509 Merge pull request #1822 from akshayparopkari/patch-1 2024-01-11 09:30:22 +00:00
Akshay Paropkari
c5404bcd9d Update fundamentals.py
Error in supplying timescale values resulted in misleading ValueError - 

```
ValueError: Illegal argument: timescale must be one of: ['income', 'balance-sheet', 'cash-flow']
```
2024-01-11 05:17:39 +00:00
ValueRaider
006e0a155b Merge pull request #1724 from mreiche/bugfix/data-types-2
Bugfix/data types 2
2024-01-09 20:13:41 +00:00
Mike Reiche
dbc55e5596 Remove unused List import 2024-01-09 21:08:46 +01:00
Mike Reiche
4ce63fe8ca Merge remote-tracking branch 'yfinance/dev' into bugfix/data-types-2 2024-01-09 08:51:33 +01:00
Mike Reiche
223f5337a8 Remove empty static series 2024-01-09 08:50:31 +01:00
Mike Reiche
4c34487149 Revert disabling earnings test 2024-01-09 08:50:00 +01:00
Mike Reiche
ac8a917288 Revert adding explicit requirements 2024-01-09 08:43:54 +01:00
Mike Reiche
15321bd097 Merge remote-tracking branch 'yfinance/main' into bugfix/data-types-2 2024-01-09 08:42:43 +01:00
ValueRaider
10961905b6 Merge pull request #1817 from ranaroussi/main
sync main -> dev
2024-01-07 18:39:10 +00:00
Manlai Amar
a3095d2a40 explicitly name the column levels 2023-12-21 00:02:53 -08:00
Mike Reiche
ba3c1b5ac6 Merge remote-tracking branch 'yfinance/dev' into bugfix/data-types-2
# Conflicts:
#	yfinance/base.py
2023-10-21 12:44:39 +02:00
Mike Reiche
ba977a16a2 Added tests 2023-10-12 08:53:16 +02:00
Mike Reiche
9a3d60105c Minor typing fixes 2023-10-12 08:53:16 +02:00
Mike Reiche
0521428f69 Fixed typing bug when series are empty 2023-10-12 08:53:15 +02:00
28 changed files with 2104 additions and 1945 deletions

View File

@@ -6,23 +6,13 @@ body:
- type: markdown
attributes:
value: |
# IMPORTANT - Read and follow these instructions carefully. Help us help you.
### Does issue already exist?
Use the search tool. Don't annoy everyone by duplicating existing Issues.
# !!! IMPORTANT !!! FOLLOW THESE INSTRUCTIONS CAREFULLY !!!
### Are you up-to-date?
Upgrade to the latest version and confirm the issue/bug is still there.
Upgrade to the latest version: `$ pip install yfinance --upgrade --no-cache-dir`
`$ pip install yfinance --upgrade --no-cache-dir`
Confirm by running:
`import yfinance as yf ; print(yf.__version__)`
and comparing against [PIP](https://pypi.org/project/yfinance/#history).
Confirm latest version by running: `import yfinance as yf ; print(yf.__version__)` and comparing against [PyPI](https://pypi.org/project/yfinance/#history).
### Does Yahoo actually have the data?
@@ -34,6 +24,10 @@ body:
Yahoo Finance free service has rate-limiting https://github.com/ranaroussi/yfinance/discussions/1513. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
### Does issue already exist?
Use the search tool. Don't duplicate existing issues.
- type: markdown
attributes:
value: |
@@ -61,7 +55,7 @@ body:
id: debug-log
attributes:
label: "Debug log"
description: "Run code with debug logging enabled and post the full output. Instructions: https://github.com/ranaroussi/yfinance/tree/main#logging"
description: "Run code with debug logging enabled and post the full output. IMPORTANT INSTRUCTIONS: https://github.com/ranaroussi/yfinance/tree/main#logging"
validations:
required: true

View File

@@ -8,11 +8,11 @@ jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.x
- run: pip install -r requirements.txt
- run: pip install mkdocstrings==0.14.0
- run: pip install mkdocs-material
- run: mkdocs gh-deploy --force
- run: mkdocs gh-deploy --force

1
.gitignore vendored
View File

@@ -10,6 +10,7 @@ build/
*.html
*.css
*.png
test.ipynb
# Environments
.env

View File

@@ -1,6 +1,61 @@
Change Log
===========
0.2.41
------
Improvements:
- add keys to financials #1965 #1985
- fetch Sustainability #1959
- improve price-repair-zeroes #1990
Fixes (prices):
- fetching when period='max' #1967
- metadata: Fix '1wk is invalid' & repeated calls #1970
- Pandas warnings #1955 #1981
- price repair syntax errors #1989
Maintenance:
- deprecate Ticker.earnings #1977
0.2.40
------
Fix typo in 0.2.39 c7af213
0.2.39
------
Fixes:
- Fix switching session from/to requests_cache #1930
Price repair:
- Fix potential for price repair to discard price=0 rows #1874
- Don't price-repair FX volume=0, is normal #1920
- Improve 'sudden change' repair for splits & currency #1931
Information:
- Fix help(yf.download) not showing the information about the function #1913 @vittoboa
- Add more specific error throwing based on PR 1918 #1928 @elibroftw @marcofognog
Maintenance:
- Replace dead 'appdirs' package with 'platformdirs' #1896
- Deprecate 'pandas_datareader', remove a deprecated argument #1897
- Fix: datetime.datetime.utcnow() is deprecated ... #1922
0.2.38
------
Fix holders & insiders #1908
0.2.37
------
Small fixes:
- Fix Pandas warnings #1838 #1844
- Fix price repair bug, typos, refactor #1866 #1865 #1849
- Stop disabling logging #1841
0.2.36
------
Small fixes:
- Update README.md for better copy-ability #1823
- Name download() column levels #1795
- Fix history(keepna=False) when repair=True #1824
- Replace empty list with empty pd.Series #1724
- Handle peewee with old sqlite #1827
- Fix JSON error handling #1830 #1833
0.2.35
------
Internal fixes for 0.2.34

View File

@@ -42,6 +42,14 @@ Yahoo! finance API is intended for personal use only.**
---
- [Installation](#installation)
- [Quick start](#quick-start)
- [Advanced](#logging)
- [Wiki](https://github.com/ranaroussi/yfinance/wiki)
- [Contribute](#developers-want-to-contribute)
---
## Installation
Install `yfinance` using `pip`:
@@ -55,7 +63,7 @@ $ pip install yfinance --upgrade --no-cache-dir
To install with optional dependencies, replace `optional` with: `nospam` for [caching-requests](#smarter-scraping), `repair` for [price repair](https://github.com/ranaroussi/yfinance/wiki/Price-repair), or `nospam,repair` for both:
``` {.sourceCode .bash}
$ pip install yfinance[optional]
$ pip install "yfinance[optional]"
```
[Required dependencies](./requirements.txt) , [all dependencies](./setup.py#L62).
@@ -111,12 +119,14 @@ msft.insider_transactions
msft.insider_purchases
msft.insider_roster_holders
msfs.sustainability
# show recommendations
msft.recommendations
msft.recommendations_summary
msft.upgrades_downgrades
# Show future and historic earnings dates, returns at most next 4 quarters and last 8 quarters by default.
# Show future and historic earnings dates, returns at most next 4 quarters and last 8 quarters by default.
# Note: If more are needed use msft.get_earnings_dates(limit=XX) with increased limit argument.
msft.earnings_dates
@@ -183,7 +193,7 @@ data = yf.download("SPY AAPL", period="1mo")
### Smarter scraping
Install the `nospam` packages for smarter scraping using `pip` (see [Installation](#installation)). These packages help cache calls such that Yahoo is not spammed with requests.
Install the `nospam` packages for smarter scraping using `pip` (see [Installation](#installation)). These packages help cache calls such that Yahoo is not spammed with requests.
To use a custom `requests` session, pass a `session=` argument to
the Ticker constructor. This allows for caching calls to the API as well as a custom way to modify requests via the `User-agent` header.
@@ -228,31 +238,16 @@ yfinance?](https://stackoverflow.com/questions/63107801)
- How to download single or multiple tickers into a single
dataframe with single level column names and a ticker column
### `pandas_datareader` override
If your code uses `pandas_datareader` and you want to download data
faster, you can "hijack" `pandas_datareader.data.get_data_yahoo()`
method to use **yfinance** while making sure the returned data is in the
same format as **pandas\_datareader**'s `get_data_yahoo()`.
```python
from pandas_datareader import data as pdr
import yfinance as yf
yf.pdr_override() # <== that's all it takes :-)
# download dataframe
data = pdr.get_data_yahoo("SPY", start="2017-01-01", end="2017-04-30")
```
### Persistent cache store
To reduce Yahoo, yfinance store some data locally: timezones to localize dates, and cookie. Cache location is:
- Windows = C:/Users/\<USER\>/AppData/Local/py-yfinance
- Linux = /home/\<USER\>/.cache/py-yfinance
- MacOS = /Users/\<USER\>/Library/Caches/py-yfinance
You can direct cache to use a different location with `set_tz_cache_location()`:
```python
import yfinance as yf
yf.set_tz_cache_location("custom/cache/location")
@@ -279,7 +274,7 @@ intended for research and educational purposes. You should refer to Yahoo!'s ter
([here](https://policies.yahoo.com/us/en/yahoo/terms/product-atos/apiforydn/index.htm),
[here](https://legal.yahoo.com/us/en/yahoo/terms/otos/index.html), and
[here](https://policies.yahoo.com/us/en/yahoo/terms/index.htm)) for
detailes on your rights to use the actual data downloaded.
details on your rights to use the actual data downloaded.
---

View File

@@ -1,5 +1,5 @@
{% set name = "yfinance" %}
{% set version = "0.2.35" %}
{% set version = "0.2.41" %}
package:
name: "{{ name|lower }}"
@@ -21,7 +21,7 @@ requirements:
- requests >=2.31
- multitasking >=0.0.7
- lxml >=4.9.1
- appdirs >=1.4.4
- platformdirs >=2.0.0
- pytz >=2022.5
- frozendict >=2.3.4
- beautifulsoup4 >=4.11.1
@@ -37,7 +37,7 @@ requirements:
- requests >=2.31
- multitasking >=0.0.7
- lxml >=4.9.1
- appdirs >=1.4.4
- platformdirs >=2.0.0
- pytz >=2022.5
- frozendict >=2.3.4
- beautifulsoup4 >=4.11.1

View File

@@ -3,9 +3,9 @@ numpy>=1.16.5
requests>=2.31
multitasking>=0.0.7
lxml>=4.9.1
appdirs>=1.4.4
platformdirs>=2.0.0
pytz>=2022.5
frozendict>=2.3.4
beautifulsoup4>=4.11.1
html5lib>=1.1
peewee>=3.16.2
peewee>=3.16.2

View File

@@ -61,7 +61,7 @@ setup(
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'examples']),
install_requires=['pandas>=1.3.0', 'numpy>=1.16.5',
'requests>=2.31', 'multitasking>=0.0.7',
'lxml>=4.9.1', 'appdirs>=1.4.4', 'pytz>=2022.5',
'lxml>=4.9.1', 'platformdirs>=2.0.0', 'pytz>=2022.5',
'frozendict>=2.3.4', 'peewee>=3.16.2',
'beautifulsoup4>=4.11.1', 'html5lib>=1.1'],
extras_require={

View File

@@ -1 +0,0 @@
#!/usr/bin/env python

View File

@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
import appdirs as _ad
import platformdirs as _ad
import datetime as _dt
import sys
import os

View File

@@ -1,27 +1,27 @@
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
2021-12-13 00:00:00+00:00,393.999975585938,406.6,391.4,402.899916992188,291.232287597656,62714764.4736842,0,0
2021-12-20 00:00:00+00:00,393.999975585938,412.199990234375,392.502983398438,409.899997558594,296.292243652344,46596651.3157895,0,0
2021-12-27 00:00:00+00:00,409.899997558594,416.550971679688,408.387001953125,410.4,296.653642578125,10818482.8947368,0,0
2022-01-03 00:00:00+00:00,410.4,432.199995117188,410.4,432.099985351563,312.339265136719,44427327.6315789,0,0
2022-01-10 00:00:00+00:00,431.3,439.199982910156,429.099970703125,436.099912109375,315.230618896484,29091400,0,0
2022-01-17 00:00:00+00:00,437.999912109375,445.199965820313,426.999997558594,431.999975585938,312.267017822266,43787351.3157895,0,0
2022-01-24 00:00:00+00:00,430.099975585938,440.999973144531,420.999968261719,433.499982910156,313.351237792969,58487296.0526316,0,0
2022-01-31 00:00:00+00:00,436.199968261719,443.049987792969,432.099985351563,435.199916992188,314.580045166016,43335806.5789474,0,0
2022-02-07 00:00:00+00:00,437.899995117188,448.799992675781,436.051994628906,444.39998046875,321.230207519531,39644061.8421053,0,0
2022-02-14 00:00:00+00:00,437.699975585938,441.999978027344,426.699968261719,432.199995117188,312.411558837891,49972693.4210526,0,0
2022-02-21 00:00:00+00:00,435.499992675781,438.476999511719,408.29998046875,423.399970703125,306.050571289063,65719596.0526316,0,0
2022-02-28 00:00:00+00:00,415.099995117188,427.999909667969,386.199932861328,386.799945068359,279.594578857422,94057936.8421053,4.1875,0
2022-03-07 00:00:00+00:00,374.999952392578,417.299978027344,361.101981201172,409.599968261719,298.389248046875,71269101.3157895,0,0
2022-03-14 00:00:00+00:00,413.099985351563,426.699968261719,408.899992675781,422.399965820313,307.713929443359,55431927.6315789,0,0
2022-03-21 00:00:00+00:00,422.699995117188,442.7,422.399965820313,437.799985351563,318.932696533203,39896352.6315789,0,0
2022-03-28 00:00:00+01:00,442.49998046875,460.999978027344,440.097983398438,444.6,323.886403808594,56413515.7894737,0,0
2022-04-04 00:00:00+01:00,439.699985351563,445.399985351563,421.999973144531,425.799973144531,310.190817871094,49415836.8421053,19.342106,0
2022-04-11 00:00:00+01:00,425.39998046875,435.599909667969,420.799995117188,434.299968261719,327.211427001953,29875081.5789474,0,0
2022-04-18 00:00:00+01:00,434.299968261719,447.799987792969,433.599992675781,437.799985351563,329.848419189453,49288272.3684211,0,0
2022-04-25 00:00:00+01:00,430.699987792969,438.799990234375,423.999982910156,433.299916992188,326.457967529297,44656776.3157895,0,0
2022-05-02 00:00:00+01:00,433.299916992188,450.999975585938,414.499982910156,414.899975585938,312.595018310547,29538167.1052632,0,0
2022-05-09 00:00:00+01:00,413.199995117188,417.449992675781,368.282923583984,408.199970703125,307.547099609375,73989611.8421053,0,0
2022-05-16 00:00:00+01:00,384,423.600006103516,384,412.100006103516,310.485473632813,81938261,101.69,0.76
2021-12-13 00:00:00+00:00,518.421020507813,535,515,530.131469726563,383.200378417969,47663221,0,0
2021-12-20 00:00:00+00:00,518.421020507813,542.368408203125,516.451293945313,539.342102050781,389.858215332031,35413455,0,0
2021-12-27 00:00:00+00:00,539.342102050781,548.093383789063,537.351318359375,540,390.333740234375,8222047,0,0
2022-01-03 00:00:00+00:00,540,568.684204101563,540,568.552612304688,410.972717285156,33764769,0,0
2022-01-10 00:00:00+00:00,567.5,577.894714355469,564.605224609375,573.815673828125,414.777130126953,22109464,0,0
2022-01-17 00:00:00+00:00,576.315673828125,585.789428710938,561.842102050781,568.421020507813,410.877655029297,33278387,0,0
2022-01-24 00:00:00+00:00,565.921020507813,580.263122558594,553.947326660156,570.394714355469,412.304260253906,44450345,0,0
2022-01-31 00:00:00+00:00,573.947326660156,582.960510253906,568.552612304688,572.631469726563,413.921112060547,32935213,0,0
2022-02-07 00:00:00+00:00,576.184204101563,590.526306152344,573.752624511719,584.73681640625,422.671325683594,30129487,0,0
2022-02-14 00:00:00+00:00,575.921020507813,581.578918457031,561.447326660156,568.684204101563,411.067840576172,37979247,0,0
2022-02-21 00:00:00+00:00,573.026306152344,576.943420410156,537.23681640625,557.105224609375,402.698120117188,49946893,0,0
2022-02-28 00:00:00+00:00,546.184204101563,563.157775878906,508.157806396484,508.947296142578,367.887603759766,71484032,4.1875,0
2022-03-07 00:00:00+00:00,493.420989990234,549.078918457031,475.134185791016,538.947326660156,392.617431640625,54164517,0,0
2022-03-14 00:00:00+00:00,543.552612304688,561.447326660156,538.026306152344,555.789428710938,404.886749267578,42128265,0,0
2022-03-21 00:00:00+00:00,556.184204101563,582.5,555.789428710938,576.052612304688,419.648284912109,30321228,0,0
2022-03-28 00:00:00+01:00,582.23681640625,606.578918457031,579.076293945313,585,426.166320800781,42874272,0,0
2022-04-04 00:00:00+01:00,578.552612304688,586.052612304688,555.263122558594,560.263122558594,408.145812988281,37556036,19.342106,0
2022-04-11 00:00:00+01:00,559.73681640625,573.157775878906,553.684204101563,571.447326660156,430.541351318359,22705062,0,0
2022-04-18 00:00:00+01:00,571.447326660156,589.210510253906,570.526306152344,576.052612304688,434.011077880859,37459087,0,0
2022-04-25 00:00:00+01:00,566.710510253906,577.368408203125,557.894714355469,570.131469726563,429.549957275391,33939150,0,0
2022-05-02 00:00:00+01:00,570.131469726563,593.421020507813,545.394714355469,545.921020507813,411.309234619141,22449007,0,0
2022-05-09 00:00:00+01:00,543.684204101563,549.276306152344,484.582794189453,537.105224609375,404.667236328125,56232105,0,0
2022-05-16 00:00:00+01:00,505.263157894737,557.368429083573,505.263157894737,542.236850136205,408.533517937911,62273078.36,101.69,0.76
2022-05-23 00:00:00+01:00,416.100006103516,442.399993896484,341.915008544922,440.899993896484,409.764678955078,45432941,0,0
2022-05-30 00:00:00+01:00,442.700012207031,444.200012207031,426.600006103516,428.700012207031,398.426239013672,37906659,0,0
2022-06-06 00:00:00+01:00,425.299987792969,434.010009765625,405.200012207031,405.399993896484,376.771606445313,40648810,0,0
1 Date Open High Low Close Adj Close Volume Dividends Stock Splits
2 2021-12-13 00:00:00+00:00 393.999975585938 518.421020507813 406.6 535 391.4 515 402.899916992188 530.131469726563 291.232287597656 383.200378417969 62714764.4736842 47663221 0 0
3 2021-12-20 00:00:00+00:00 393.999975585938 518.421020507813 412.199990234375 542.368408203125 392.502983398438 516.451293945313 409.899997558594 539.342102050781 296.292243652344 389.858215332031 46596651.3157895 35413455 0 0
4 2021-12-27 00:00:00+00:00 409.899997558594 539.342102050781 416.550971679688 548.093383789063 408.387001953125 537.351318359375 410.4 540 296.653642578125 390.333740234375 10818482.8947368 8222047 0 0
5 2022-01-03 00:00:00+00:00 410.4 540 432.199995117188 568.684204101563 410.4 540 432.099985351563 568.552612304688 312.339265136719 410.972717285156 44427327.6315789 33764769 0 0
6 2022-01-10 00:00:00+00:00 431.3 567.5 439.199982910156 577.894714355469 429.099970703125 564.605224609375 436.099912109375 573.815673828125 315.230618896484 414.777130126953 29091400 22109464 0 0
7 2022-01-17 00:00:00+00:00 437.999912109375 576.315673828125 445.199965820313 585.789428710938 426.999997558594 561.842102050781 431.999975585938 568.421020507813 312.267017822266 410.877655029297 43787351.3157895 33278387 0 0
8 2022-01-24 00:00:00+00:00 430.099975585938 565.921020507813 440.999973144531 580.263122558594 420.999968261719 553.947326660156 433.499982910156 570.394714355469 313.351237792969 412.304260253906 58487296.0526316 44450345 0 0
9 2022-01-31 00:00:00+00:00 436.199968261719 573.947326660156 443.049987792969 582.960510253906 432.099985351563 568.552612304688 435.199916992188 572.631469726563 314.580045166016 413.921112060547 43335806.5789474 32935213 0 0
10 2022-02-07 00:00:00+00:00 437.899995117188 576.184204101563 448.799992675781 590.526306152344 436.051994628906 573.752624511719 444.39998046875 584.73681640625 321.230207519531 422.671325683594 39644061.8421053 30129487 0 0
11 2022-02-14 00:00:00+00:00 437.699975585938 575.921020507813 441.999978027344 581.578918457031 426.699968261719 561.447326660156 432.199995117188 568.684204101563 312.411558837891 411.067840576172 49972693.4210526 37979247 0 0
12 2022-02-21 00:00:00+00:00 435.499992675781 573.026306152344 438.476999511719 576.943420410156 408.29998046875 537.23681640625 423.399970703125 557.105224609375 306.050571289063 402.698120117188 65719596.0526316 49946893 0 0
13 2022-02-28 00:00:00+00:00 415.099995117188 546.184204101563 427.999909667969 563.157775878906 386.199932861328 508.157806396484 386.799945068359 508.947296142578 279.594578857422 367.887603759766 94057936.8421053 71484032 4.1875 0
14 2022-03-07 00:00:00+00:00 374.999952392578 493.420989990234 417.299978027344 549.078918457031 361.101981201172 475.134185791016 409.599968261719 538.947326660156 298.389248046875 392.617431640625 71269101.3157895 54164517 0 0
15 2022-03-14 00:00:00+00:00 413.099985351563 543.552612304688 426.699968261719 561.447326660156 408.899992675781 538.026306152344 422.399965820313 555.789428710938 307.713929443359 404.886749267578 55431927.6315789 42128265 0 0
16 2022-03-21 00:00:00+00:00 422.699995117188 556.184204101563 442.7 582.5 422.399965820313 555.789428710938 437.799985351563 576.052612304688 318.932696533203 419.648284912109 39896352.6315789 30321228 0 0
17 2022-03-28 00:00:00+01:00 442.49998046875 582.23681640625 460.999978027344 606.578918457031 440.097983398438 579.076293945313 444.6 585 323.886403808594 426.166320800781 56413515.7894737 42874272 0 0
18 2022-04-04 00:00:00+01:00 439.699985351563 578.552612304688 445.399985351563 586.052612304688 421.999973144531 555.263122558594 425.799973144531 560.263122558594 310.190817871094 408.145812988281 49415836.8421053 37556036 19.342106 0
19 2022-04-11 00:00:00+01:00 425.39998046875 559.73681640625 435.599909667969 573.157775878906 420.799995117188 553.684204101563 434.299968261719 571.447326660156 327.211427001953 430.541351318359 29875081.5789474 22705062 0 0
20 2022-04-18 00:00:00+01:00 434.299968261719 571.447326660156 447.799987792969 589.210510253906 433.599992675781 570.526306152344 437.799985351563 576.052612304688 329.848419189453 434.011077880859 49288272.3684211 37459087 0 0
21 2022-04-25 00:00:00+01:00 430.699987792969 566.710510253906 438.799990234375 577.368408203125 423.999982910156 557.894714355469 433.299916992188 570.131469726563 326.457967529297 429.549957275391 44656776.3157895 33939150 0 0
22 2022-05-02 00:00:00+01:00 433.299916992188 570.131469726563 450.999975585938 593.421020507813 414.499982910156 545.394714355469 414.899975585938 545.921020507813 312.595018310547 411.309234619141 29538167.1052632 22449007 0 0
23 2022-05-09 00:00:00+01:00 413.199995117188 543.684204101563 417.449992675781 549.276306152344 368.282923583984 484.582794189453 408.199970703125 537.105224609375 307.547099609375 404.667236328125 73989611.8421053 56232105 0 0
24 2022-05-16 00:00:00+01:00 384 505.263157894737 423.600006103516 557.368429083573 384 505.263157894737 412.100006103516 542.236850136205 310.485473632813 408.533517937911 81938261 62273078.36 101.69 0.76
25 2022-05-23 00:00:00+01:00 416.100006103516 442.399993896484 341.915008544922 440.899993896484 409.764678955078 45432941 0 0
26 2022-05-30 00:00:00+01:00 442.700012207031 444.200012207031 426.600006103516 428.700012207031 398.426239013672 37906659 0 0
27 2022-06-06 00:00:00+01:00 425.299987792969 434.010009765625 405.200012207031 405.399993896484 376.771606445313 40648810 0 0

View File

@@ -43,14 +43,14 @@ class TestPriceHistory(unittest.TestCase):
df_tkrs = df.columns.levels[1]
self.assertEqual(sorted(tkrs), sorted(df_tkrs))
def test_download_with_invalid_ticker(self):
#Checks if using an invalid symbol gives the same output as not using an invalid symbol in combination with a valid symbol (AAPL)
#Checks to make sure that invalid symbol handling for the date column is the same as the base case (no invalid symbols)
invalid_tkrs = ["AAPL", "ATVI"] #AAPL exists and ATVI does not exist
valid_tkrs = ["AAPL", "INTC"] #AAPL and INTC both exist
data_invalid_sym = yf.download(invalid_tkrs, start='2023-11-16', end='2023-11-17')
data_valid_sym = yf.download(valid_tkrs, start='2023-11-16', end='2023-11-17')
@@ -62,7 +62,7 @@ class TestPriceHistory(unittest.TestCase):
dat = yf.Ticker(tkr, session=self.session)
tz = dat._get_ticker_tz(proxy=None, timeout=None)
dt_utc = _tz.timezone("UTC").localize(_dt.datetime.utcnow())
dt_utc = _pd.Timestamp.utcnow()
dt = dt_utc.astimezone(_tz.timezone(tz))
start_d = dt.date() - _dt.timedelta(days=7)
df = dat.history(start=start_d, interval="1h")
@@ -82,7 +82,7 @@ class TestPriceHistory(unittest.TestCase):
dat = yf.Ticker(tkr, session=self.session)
tz = dat._get_ticker_tz(proxy=None, timeout=None)
dt_utc = _tz.timezone("UTC").localize(_dt.datetime.utcnow())
dt_utc = _pd.Timestamp.utcnow()
dt = dt_utc.astimezone(_tz.timezone(tz))
if dt.time() < _dt.time(17, 0):
continue
@@ -277,7 +277,7 @@ class TestPriceHistory(unittest.TestCase):
# Reproduce issue #1634 - 1d dividend out-of-range, should be prepended to prices
div_dt = _pd.Timestamp(2022, 7, 21).tz_localize("America/New_York")
df_dividends = _pd.DataFrame(data={"Dividends":[1.0]}, index=[div_dt])
df_prices = _pd.DataFrame(data={c:[1.0] for c in yf.const.price_colnames}|{'Volume':0}, index=[div_dt+_dt.timedelta(days=1)])
df_prices = _pd.DataFrame(data={c:[1.0] for c in yf.const._PRICE_COLNAMES_}|{'Volume':0}, index=[div_dt+_dt.timedelta(days=1)])
df_merged = yf.utils.safe_merge_dfs(df_prices, df_dividends, '1d')
self.assertEqual(df_merged.shape[0], 2)
self.assertTrue(df_merged[df_prices.columns].iloc[1:].equals(df_prices))
@@ -359,13 +359,6 @@ class TestPriceHistory(unittest.TestCase):
dfd_divs = dfd[dfd['Dividends'] != 0]
self.assertEqual(dfm_divs.shape[0], dfd_divs.shape[0])
dfm = yf.Ticker("F").history(period="50mo", interval="1mo")
dfd = yf.Ticker("F").history(period="50mo", interval="1d")
dfd = dfd[dfd.index > dfm.index[0]]
dfm_divs = dfm[dfm['Dividends'] != 0]
dfd_divs = dfd[dfd['Dividends'] != 0]
self.assertEqual(dfm_divs.shape[0], dfd_divs.shape[0])
def test_tz_dst_ambiguous(self):
# Reproduce issue #1100
try:
@@ -470,6 +463,18 @@ class TestPriceRepair(unittest.TestCase):
if cls.session is not None:
cls.session.close()
def test_types(self):
tkr = 'INTC'
dat = yf.Ticker(tkr, session=self.session)
data = dat.history(period="3mo", interval="1d", prepost=True, repair=True)
self.assertIsInstance(data, _pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
reconstructed = dat._lazy_load_price_history()._reconstruct_intervals_batch(data, "1wk", True)
self.assertIsInstance(reconstructed, _pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
def test_reconstruct_2m(self):
# 2m repair requires 1m data.
# Yahoo restricts 1m fetches to 7 days max within last 30 days.
@@ -494,6 +499,7 @@ class TestPriceRepair(unittest.TestCase):
tkr = "PNL.L"
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
df = _pd.DataFrame(data={"Open": [470.5, 473.5, 474.5, 470],
@@ -517,7 +523,7 @@ class TestPriceRepair(unittest.TestCase):
# Run test
df_repaired = dat._fix_unit_random_mixups(df_bad, "1wk", tz_exchange, prepost=False)
df_repaired = hist._fix_unit_random_mixups(df_bad, "1wk", tz_exchange, prepost=False)
# First test - no errors left
for c in data_cols:
@@ -548,6 +554,7 @@ class TestPriceRepair(unittest.TestCase):
tkr = "PNL.L"
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
df = _pd.DataFrame(data={"Open": [400, 398, 392.5, 417],
@@ -574,7 +581,7 @@ class TestPriceRepair(unittest.TestCase):
df.index = df.index.tz_localize(tz_exchange)
df_bad.index = df_bad.index.tz_localize(tz_exchange)
df_repaired = dat._fix_unit_random_mixups(df_bad, "1wk", tz_exchange, prepost=False)
df_repaired = hist._fix_unit_random_mixups(df_bad, "1wk", tz_exchange, prepost=False)
# First test - no errors left
for c in data_cols:
@@ -606,6 +613,7 @@ class TestPriceRepair(unittest.TestCase):
tkr = "PNL.L"
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
df = _pd.DataFrame(data={"Open": [478, 476, 476, 472],
@@ -627,7 +635,7 @@ class TestPriceRepair(unittest.TestCase):
df.index = df.index.tz_localize(tz_exchange)
df_bad.index = df_bad.index.tz_localize(tz_exchange)
df_repaired = dat._fix_unit_random_mixups(df_bad, "1d", tz_exchange, prepost=False)
df_repaired = hist._fix_unit_random_mixups(df_bad, "1d", tz_exchange, prepost=False)
# First test - no errors left
for c in data_cols:
@@ -656,6 +664,7 @@ class TestPriceRepair(unittest.TestCase):
for interval in ['1d', '1wk']:
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
_dp = os.path.dirname(__file__)
@@ -672,7 +681,7 @@ class TestPriceRepair(unittest.TestCase):
df.index = _pd.to_datetime(df.index, utc=True).tz_convert(tz_exchange)
df = df.sort_index()
df_repaired = dat._fix_unit_switch(df_bad, interval, tz_exchange)
df_repaired = hist._fix_unit_switch(df_bad, interval, tz_exchange)
df_repaired = df_repaired.sort_index()
# First test - no errors left
@@ -704,6 +713,7 @@ class TestPriceRepair(unittest.TestCase):
def test_repair_zeroes_daily(self):
tkr = "BBIL.L"
dat = yf.Ticker(tkr, session=self.session)
hist = dat._lazy_load_price_history()
tz_exchange = dat.fast_info["timezone"]
df_bad = _pd.DataFrame(data={"Open": [0, 102.04, 102.04],
@@ -719,7 +729,7 @@ class TestPriceRepair(unittest.TestCase):
df_bad.index.name = "Date"
df_bad.index = df_bad.index.tz_localize(tz_exchange)
repaired_df = dat._fix_zeroes(df_bad, "1d", tz_exchange, prepost=False)
repaired_df = hist._fix_zeroes(df_bad, "1d", tz_exchange, prepost=False)
correct_df = df_bad.copy()
correct_df.loc["2022-11-01", "Open"] = 102.080002
@@ -753,6 +763,7 @@ class TestPriceRepair(unittest.TestCase):
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
df.index = df.index.tz_localize(tz_exchange)
hist = dat._lazy_load_price_history()
rtol = 5e-3
for i in [0, 1, 2]:
@@ -761,7 +772,7 @@ class TestPriceRepair(unittest.TestCase):
df_slice_bad = df_slice.copy()
df_slice_bad.loc[df_slice_bad.index[j], "Adj Close"] = 0.0
df_slice_bad_repaired = dat._fix_zeroes(df_slice_bad, "1d", tz_exchange, prepost=False)
df_slice_bad_repaired = hist._fix_zeroes(df_slice_bad, "1d", tz_exchange, prepost=False)
for c in ["Close", "Adj Close"]:
self.assertTrue(_np.isclose(df_slice_bad_repaired[c], df_slice[c], rtol=rtol).all())
self.assertTrue("Repaired?" in df_slice_bad_repaired.columns)
@@ -771,8 +782,9 @@ class TestPriceRepair(unittest.TestCase):
tkr = "INTC"
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
correct_df = dat.history(period="1wk", interval="1h", auto_adjust=False, repair=True)
correct_df = hist.history(period="5d", interval="1h", auto_adjust=False, repair=True)
df_bad = correct_df.copy()
bad_idx = correct_df.index[10]
@@ -783,7 +795,7 @@ class TestPriceRepair(unittest.TestCase):
df_bad.loc[bad_idx, "Adj Close"] = _np.nan
df_bad.loc[bad_idx, "Volume"] = 0
repaired_df = dat._fix_zeroes(df_bad, "1h", tz_exchange, prepost=False)
repaired_df = hist._fix_zeroes(df_bad, "1h", tz_exchange, prepost=False)
for c in ["Open", "Low", "High", "Close"]:
try:
@@ -801,7 +813,7 @@ class TestPriceRepair(unittest.TestCase):
self.assertTrue("Repaired?" in repaired_df.columns)
self.assertFalse(repaired_df["Repaired?"].isna().any())
def test_repair_bad_stock_split(self):
def test_repair_bad_stock_splits(self):
# Stocks that split in 2022 but no problems in Yahoo data,
# so repair should change nothing
good_tkrs = ['AMZN', 'DXCM', 'FTNT', 'GOOG', 'GME', 'PANW', 'SHOP', 'TSLA']
@@ -812,11 +824,12 @@ class TestPriceRepair(unittest.TestCase):
for interval in intervals:
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
_dp = os.path.dirname(__file__)
df_good = dat.history(start='2020-01-01', end=_dt.date.today(), interval=interval, auto_adjust=False)
repaired_df = dat._fix_bad_stock_split(df_good, interval, tz_exchange)
repaired_df = hist._fix_bad_stock_splits(df_good, interval, tz_exchange)
# Expect no change from repair
df_good = df_good.sort_index()
@@ -836,6 +849,7 @@ class TestPriceRepair(unittest.TestCase):
for tkr in bad_tkrs:
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
_dp = os.path.dirname(__file__)
interval = '1d'
@@ -846,11 +860,11 @@ class TestPriceRepair(unittest.TestCase):
df_bad = _pd.read_csv(fp, index_col="Date")
df_bad.index = _pd.to_datetime(df_bad.index, utc=True)
repaired_df = dat._fix_bad_stock_split(df_bad, "1d", tz_exchange)
repaired_df = hist._fix_bad_stock_splits(df_bad, "1d", tz_exchange)
fp = os.path.join(_dp, "data", tkr.replace('.','-')+'-'+interval+"-bad-stock-split-fixed.csv")
correct_df = _pd.read_csv(fp, index_col="Date")
correct_df.index = _pd.to_datetime(correct_df.index)
correct_df.index = _pd.to_datetime(correct_df.index, utc=True)
repaired_df = repaired_df.sort_index()
correct_df = correct_df.sort_index()
@@ -876,11 +890,12 @@ class TestPriceRepair(unittest.TestCase):
for interval in intervals:
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
_dp = os.path.dirname(__file__)
df_good = dat.history(start='2020-11-30', end='2021-04-01', interval=interval, auto_adjust=False)
df_good = hist.history(start='2020-11-30', end='2021-04-01', interval=interval, auto_adjust=False)
repaired_df = dat._fix_bad_stock_split(df_good, interval, tz_exchange)
repaired_df = hist._fix_bad_stock_splits(df_good, interval, tz_exchange)
# Expect no change from repair
df_good = df_good.sort_index()
@@ -900,12 +915,13 @@ class TestPriceRepair(unittest.TestCase):
dat = yf.Ticker(tkr, session=self.session)
tz_exchange = dat.fast_info["timezone"]
hist = dat._lazy_load_price_history()
_dp = os.path.dirname(__file__)
df_bad = _pd.read_csv(os.path.join(_dp, "data", tkr.replace('.','-')+"-1d-missing-div-adjust.csv"), index_col="Date")
df_bad.index = _pd.to_datetime(df_bad.index)
repaired_df = dat._fix_missing_div_adjust(df_bad, "1d", tz_exchange)
repaired_df = hist._fix_missing_div_adjust(df_bad, "1d", tz_exchange)
correct_df = _pd.read_csv(os.path.join(_dp, "data", tkr.replace('.','-')+"-1d-missing-div-adjust-fixed.csv"), index_col="Date")
correct_df.index = _pd.to_datetime(correct_df.index)

View File

@@ -12,7 +12,7 @@ import pandas as pd
from .context import yfinance as yf
from .context import session_gbl
from yfinance.exceptions import YFNotImplementedError
from yfinance.exceptions import YFChartError, YFInvalidPeriodError, YFNotImplementedError, YFTickerMissingError, YFTzMissingError
import unittest
@@ -35,8 +35,6 @@ ticker_attributes = (
("recommendations", Union[pd.DataFrame, dict]),
("recommendations_summary", Union[pd.DataFrame, dict]),
("upgrades_downgrades", Union[pd.DataFrame, dict]),
("earnings", pd.DataFrame),
("quarterly_earnings", pd.DataFrame),
("quarterly_cashflow", pd.DataFrame),
("cashflow", pd.DataFrame),
("quarterly_balance_sheet", pd.DataFrame),
@@ -100,20 +98,57 @@ class TestTicker(unittest.TestCase):
tkr = "DJI" # typo of "^DJI"
dat = yf.Ticker(tkr, session=self.session)
dat.history(period="1wk")
dat.history(period="5d")
dat.history(start="2022-01-01")
dat.history(start="2022-01-01", end="2022-03-01")
yf.download([tkr], period="1wk", threads=False, ignore_tz=False)
yf.download([tkr], period="1wk", threads=True, ignore_tz=False)
yf.download([tkr], period="1wk", threads=False, ignore_tz=True)
yf.download([tkr], period="1wk", threads=True, ignore_tz=True)
yf.download([tkr], period="5d", threads=False, ignore_tz=False)
yf.download([tkr], period="5d", threads=True, ignore_tz=False)
yf.download([tkr], period="5d", threads=False, ignore_tz=True)
yf.download([tkr], period="5d", threads=True, ignore_tz=True)
for k in dat.fast_info:
dat.fast_info[k]
for attribute_name, attribute_type in ticker_attributes:
assert_attribute_type(self, dat, attribute_name, attribute_type)
assert_attribute_type(self, dat, attribute_name, attribute_type)
assert isinstance(dat.dividends, pd.Series)
assert dat.dividends.empty
assert isinstance(dat.splits, pd.Series)
assert dat.splits.empty
assert isinstance(dat.capital_gains, pd.Series)
assert dat.capital_gains.empty
with self.assertRaises(YFNotImplementedError):
assert isinstance(dat.shares, pd.DataFrame)
assert dat.shares.empty
assert isinstance(dat.actions, pd.DataFrame)
assert dat.actions.empty
def test_invalid_period(self):
tkr = 'VALE'
dat = yf.Ticker(tkr, session=self.session)
with self.assertRaises(YFInvalidPeriodError):
dat.history(period="2wks", interval="1d", raise_errors=True)
with self.assertRaises(YFInvalidPeriodError):
dat.history(period="2mo", interval="1d", raise_errors=True)
def test_prices_missing(self):
# this test will need to be updated every time someone wants to run a test
# hard to find a ticker that matches this error other than options
# META call option, 2024 April 26th @ strike of 180000
tkr = 'META240426C00180000'
dat = yf.Ticker(tkr, session=self.session)
with self.assertRaises(YFChartError):
dat.history(period="5d", interval="1m", raise_errors=True)
def test_ticker_missing(self):
tkr = 'ATVI'
dat = yf.Ticker(tkr, session=self.session)
# A missing ticker can trigger either a niche error or the generalized error
with self.assertRaises((YFTickerMissingError, YFTzMissingError, YFChartError)):
dat.history(period="3mo", interval="1d", raise_errors=True)
def test_goodTicker(self):
# that yfinance works when full api is called on same instance of ticker
@@ -122,32 +157,32 @@ class TestTicker(unittest.TestCase):
for tkr in tkrs:
dat = yf.Ticker(tkr, session=self.session)
dat.history(period="1wk")
dat.history(period="5d")
dat.history(start="2022-01-01")
dat.history(start="2022-01-01", end="2022-03-01")
yf.download([tkr], period="1wk", threads=False, ignore_tz=False)
yf.download([tkr], period="1wk", threads=True, ignore_tz=False)
yf.download([tkr], period="1wk", threads=False, ignore_tz=True)
yf.download([tkr], period="1wk", threads=True, ignore_tz=True)
yf.download([tkr], period="5d", threads=False, ignore_tz=False)
yf.download([tkr], period="5d", threads=True, ignore_tz=False)
yf.download([tkr], period="5d", threads=False, ignore_tz=True)
yf.download([tkr], period="5d", threads=True, ignore_tz=True)
for k in dat.fast_info:
dat.fast_info[k]
for attribute_name, attribute_type in ticker_attributes:
assert_attribute_type(self, dat, attribute_name, attribute_type)
assert_attribute_type(self, dat, attribute_name, attribute_type)
def test_goodTicker_withProxy(self):
tkr = "IBM"
dat = yf.Ticker(tkr, session=self.session, proxy=self.proxy)
dat._fetch_ticker_tz(proxy=None, timeout=5)
dat._get_ticker_tz(proxy=None, timeout=5)
dat.history(period="1wk")
dat.history(period="5d")
for attribute_name, attribute_type in ticker_attributes:
assert_attribute_type(self, dat, attribute_name, attribute_type)
class TestTickerHistory(unittest.TestCase):
session = None
@@ -231,15 +266,6 @@ class TestTickerHistory(unittest.TestCase):
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
def test_reconstruct_intervals_batch(self):
data = self.ticker.history(period="3mo", interval="1d", prepost=True, repair=True)
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
reconstructed = self.ticker._reconstruct_intervals_batch(data, "1wk", True)
self.assertIsInstance(reconstructed, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
class TestTickerEarnings(unittest.TestCase):
session = None
@@ -278,22 +304,6 @@ class TestTickerEarnings(unittest.TestCase):
# Below will fail because not ported to Yahoo API
# def test_earnings(self):
# data = self.ticker.earnings
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
# data_cached = self.ticker.earnings
# self.assertIs(data, data_cached, "data not cached")
# def test_quarterly_earnings(self):
# data = self.ticker.quarterly_earnings
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
# data_cached = self.ticker.quarterly_earnings
# self.assertIs(data, data_cached, "data not cached")
# def test_earnings_forecasts(self):
# data = self.ticker.earnings_forecasts
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
@@ -363,7 +373,7 @@ class TestTickerHolders(unittest.TestCase):
data_cached = self.ticker.insider_transactions
self.assertIs(data, data_cached, "data not cached")
def test_insider_purchases(self):
data = self.ticker.insider_purchases
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
@@ -395,9 +405,9 @@ class TestTickerMiscFinancials(unittest.TestCase):
def setUp(self):
self.ticker = yf.Ticker("GOOGL", session=self.session)
# For ticker 'BSE.AX' (and others), Yahoo not returning
# full quarterly financials (usually cash-flow) with all entries,
# For ticker 'BSE.AX' (and others), Yahoo not returning
# full quarterly financials (usually cash-flow) with all entries,
# instead returns a smaller version in different data store.
self.ticker_old_fmt = yf.Ticker("BSE.AX", session=self.session)
@@ -676,15 +686,14 @@ class TestTickerMiscFinancials(unittest.TestCase):
data_cached = self.ticker.calendar
self.assertIs(data, data_cached, "data not cached")
# Below will fail because not ported to Yahoo API
# def test_sustainability(self):
# data = self.ticker.sustainability
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
# self.assertFalse(data.empty, "data is empty")
def test_sustainability(self):
data = self.ticker.sustainability
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
self.assertFalse(data.empty, "data is empty")
# data_cached = self.ticker.sustainability
# self.assertIs(data, data_cached, "data not cached")
data_cached = self.ticker.sustainability
self.assertIs(data, data_cached, "data not cached")
# def test_shares(self):
# data = self.ticker.shares
@@ -706,7 +715,7 @@ class TestTickerAnalysts(unittest.TestCase):
def setUp(self):
self.ticker = yf.Ticker("GOOGL", session=self.session)
def tearDown(self):
self.ticker = None
@@ -806,7 +815,6 @@ class TestTickerInfo(unittest.TestCase):
# This one should have a trailing PEG ratio
data2 = self.tickers[2].info
self.assertIsInstance(data2['trailingPegRatio'], float)
pass
# def test_fast_info_matches_info(self):
# fast_info_keys = set()
@@ -844,7 +852,7 @@ class TestTickerInfo(unittest.TestCase):
# key_rename_map[yf.utils.snake_case_2_camelCase(k)] = key_rename_map[k]
# # Note: share count items in info[] are bad. Sometimes the float > outstanding!
# # So often fast_info["shares"] does not match.
# # So often fast_info["shares"] does not match.
# # Why isn't fast_info["shares"] wrong? Because using it to calculate market cap always correct.
# bad_keys = {"shares"}

View File

@@ -8,6 +8,8 @@ Specific test class:
python -m unittest tests.utils.TestTicker
"""
from unittest import TestSuite
# import pandas as pd
# import numpy as np
@@ -34,16 +36,16 @@ class TestCache(unittest.TestCase):
tkr = 'AMZN'
tz1 = "America/New_York"
tz2 = "London/Europe"
cache = yf.utils.get_tz_cache()
cache = yf.cache.get_tz_cache()
cache.store(tkr, tz1)
cache.store(tkr, tz2)
def test_setTzCacheLocation(self):
self.assertEqual(yf.utils._DBManager.get_location(), self.tempCacheDir.name)
self.assertEqual(yf.cache._TzDBManager.get_location(), self.tempCacheDir.name)
tkr = 'AMZN'
tz1 = "America/New_York"
cache = yf.utils.get_tz_cache()
cache = yf.cache.get_tz_cache()
cache.store(tkr, tz1)
self.assertTrue(os.path.exists(os.path.join(self.tempCacheDir.name, "tkr-tz.db")))
@@ -60,10 +62,10 @@ class TestCacheNoPermission(unittest.TestCase):
tz1 = "America/New_York"
# During attempt to store, will discover cannot write
yf.utils.get_tz_cache().store(tkr, tz1)
yf.cache.get_tz_cache().store(tkr, tz1)
# Handling the store failure replaces cache with a dummy
cache = yf.utils.get_tz_cache()
cache = yf.cache.get_tz_cache()
self.assertTrue(cache.dummy)
cache.store(tkr, tz1)
@@ -71,19 +73,19 @@ class TestCacheNoPermission(unittest.TestCase):
# Test that if cache path in read-only filesystem, no exception.
tkr = 'AMZN'
# During attempt to lookup, will discover cannot write
yf.utils.get_tz_cache().lookup(tkr)
yf.cache.get_tz_cache().lookup(tkr)
# Handling the lookup failure replaces cache with a dummy
cache = yf.utils.get_tz_cache()
cache = yf.cache.get_tz_cache()
self.assertTrue(cache.dummy)
cache.lookup(tkr)
def suite():
suite = unittest.TestSuite()
suite.addTest(TestCache('Test cache'))
suite.addTest(TestCacheNoPermission('Test cache no permission'))
return suite
ts: TestSuite = unittest.TestSuite()
ts.addTest(TestCache('Test cache'))
ts.addTest(TestCacheNoPermission('Test cache no permission'))
return ts
if __name__ == '__main__':

View File

@@ -29,19 +29,7 @@ from .cache import set_tz_cache_location
__version__ = version.version
__author__ = "Ran Aroussi"
import warnings
warnings.filterwarnings('default', category=DeprecationWarning, module='^yfinance')
def pdr_override():
"""
make pandas datareader optional
otherwise can be called via fix_yahoo_finance.download(...)
"""
try:
import pandas_datareader
pandas_datareader.data.get_data_yahoo = download
pandas_datareader.data.get_data_yahoo_actions = download
pandas_datareader.data.DataReader = download
except Exception:
pass
__all__ = ['download', 'Ticker', 'Tickers', 'pdr_override', 'enable_debug_mode', 'set_tz_cache_location']
__all__ = ['download', 'Ticker', 'Tickers', 'enable_debug_mode', 'set_tz_cache_location']

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
import peewee as _peewee
from threading import Lock
import os as _os
import appdirs as _ad
import platformdirs as _ad
import atexit as _atexit
import datetime as _datetime
import pickle as _pkl
@@ -145,7 +145,14 @@ class _TzCache:
db.connect()
tz_db_proxy.initialize(db)
db.create_tables([_KV])
try:
db.create_tables([_KV])
except _peewee.OperationalError as e:
if 'WITHOUT' in str(e):
_KV._meta.without_rowid = False
db.create_tables([_KV])
else:
raise
self.initialised = 1 # success
def lookup(self, key):
@@ -344,7 +351,14 @@ class _CookieCache:
db.connect()
Cookie_db_proxy.initialize(db)
db.create_tables([_CookieSchema])
try:
db.create_tables([_CookieSchema])
except _peewee.OperationalError as e:
if 'WITHOUT' in str(e):
_CookieSchema._meta.without_rowid = False
db.create_tables([_CookieSchema])
else:
raise
self.initialised = 1 # success
def lookup(self, strategy):

View File

@@ -30,7 +30,9 @@ fundamentals_keys = {
"AmortizationOfIntangiblesIncomeStatement", "DepreciationIncomeStatement", "ResearchAndDevelopment",
"SellingGeneralAndAdministration", "SellingAndMarketingExpense", "GeneralAndAdministrativeExpense",
"OtherGandA", "InsuranceAndClaims", "RentAndLandingFees", "SalariesAndWages", "GrossProfit",
"CostOfRevenue", "TotalRevenue", "ExciseTaxes", "OperatingRevenue"],
"CostOfRevenue", "TotalRevenue", "ExciseTaxes", "OperatingRevenue", "LossAdjustmentExpense",
"NetPolicyholderBenefitsAndClaims", "PolicyholderBenefitsGross", "PolicyholderBenefitsCeded",
"OccupancyAndEquipment", "ProfessionalExpenseAndContractServicesExpense", "OtherNonInterestExpense"],
'balance-sheet': ["TreasurySharesNumber", "PreferredSharesNumber", "OrdinarySharesNumber", "ShareIssued", "NetDebt",
"TotalDebt", "TangibleBookValue", "InvestedCapital", "WorkingCapital", "NetTangibleAssets",
"CapitalLeaseObligations", "CommonStockEquity", "PreferredStockEquity", "TotalCapitalization",
@@ -74,7 +76,8 @@ fundamentals_keys = {
"DuefromRelatedPartiesCurrent", "TaxesReceivable", "AccruedInterestReceivable", "NotesReceivable",
"LoansReceivable", "AccountsReceivable", "AllowanceForDoubtfulAccountsReceivable",
"GrossAccountsReceivable", "CashCashEquivalentsAndShortTermInvestments",
"OtherShortTermInvestments", "CashAndCashEquivalents", "CashEquivalents", "CashFinancial"],
"OtherShortTermInvestments", "CashAndCashEquivalents", "CashEquivalents", "CashFinancial",
"CashCashEquivalentsAndFederalFundsSold"],
'cash-flow': ["ForeignSales", "DomesticSales", "AdjustedGeographySegmentData", "FreeCashFlow",
"RepurchaseOfCapitalStock", "RepaymentOfDebt", "IssuanceOfDebt", "IssuanceOfCapitalStock",
"CapitalExpenditure", "InterestPaidSupplementalData", "IncomeTaxPaidSupplementalData",
@@ -115,7 +118,7 @@ fundamentals_keys = {
"PaymentstoSuppliersforGoodsandServices", "ClassesofCashReceiptsfromOperatingActivities",
"OtherCashReceiptsfromOperatingActivities", "ReceiptsfromGovernmentGrants", "ReceiptsfromCustomers"]}
price_colnames = ['Open', 'High', 'Low', 'Close', 'Adj Close']
_PRICE_COLNAMES_ = ['Open', 'High', 'Low', 'Close', 'Adj Close']
quote_summary_valid_modules = (
"summaryProfile", # contains general information about the company

View File

@@ -60,7 +60,23 @@ class YfData(metaclass=SingletonMeta):
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
def __init__(self, session=None):
self._session = session or requests.Session()
self._crumb = None
self._cookie = None
# Default to using 'basic' strategy
self._cookie_strategy = 'basic'
# If it fails, then fallback method is 'csrf'
# self._cookie_strategy = 'csrf'
self._cookie_lock = threading.Lock()
self._set_session(session or requests.Session())
def _set_session(self, session):
if session is None:
return
with self._cookie_lock:
self._session = session
try:
self._session.cache
@@ -74,23 +90,6 @@ class YfData(metaclass=SingletonMeta):
self._session_is_caching = True
from requests_cache import DO_NOT_CACHE
self._expire_after = DO_NOT_CACHE
self._crumb = None
self._cookie = None
if self._session_is_caching and self._cookie is None:
utils.print_once("WARNING: cookie & crumb does not work well with requests_cache. Am experimenting with 'expire_after=DO_NOT_CACHE', but you need to help stress-test.")
# Default to using 'basic' strategy
self._cookie_strategy = 'basic'
# If it fails, then fallback method is 'csrf'
# self._cookie_strategy = 'csrf'
self._cookie_lock = threading.Lock()
def _set_session(self, session):
if session is None:
return
with self._cookie_lock:
self._session = session
def _set_cookie_strategy(self, strategy, have_lock=False):
if strategy == self._cookie_strategy:

View File

@@ -1,12 +1,50 @@
class YFinanceException(Exception):
class YFException(Exception):
def __init__(self, description=""):
super().__init__(description)
class YFDataException(YFException):
pass
class YFinanceDataException(YFinanceException):
pass
class YFChartError(YFException):
def __init__(self, ticker, description):
self.ticker = ticker
super().__init__(f"{self.ticker}: {description}")
class YFNotImplementedError(NotImplementedError):
def __init__(self, method_name):
super().__init__(f"Have not implemented fetching '{method_name}' from Yahoo API")
class YFTickerMissingError(YFException):
def __init__(self, ticker, rationale):
super().__init__(f"${ticker}: possibly delisted; {rationale}")
self.rationale = rationale
self.ticker = ticker
class YFTzMissingError(YFTickerMissingError):
def __init__(self, ticker):
super().__init__(ticker, "No timezone found")
class YFPricesMissingError(YFTickerMissingError):
def __init__(self, ticker, debug_info):
self.debug_info = debug_info
super().__init__(ticker, f"No price data found {debug_info}")
class YFEarningsDateMissing(YFTickerMissingError):
# note that this does not get raised. Added in case of raising it in the future
def __init__(self, ticker):
super().__init__(ticker, "No earnings dates found")
class YFInvalidPeriodError(YFException):
def __init__(self, ticker, invalid_period, valid_ranges):
self.ticker = ticker
self.invalid_period = invalid_period
self.valid_ranges = valid_ranges
super().__init__(f"{self.ticker}: Period '{invalid_period}' is invalid, must be one of {valid_ranges}")

View File

@@ -36,7 +36,7 @@ from . import shared
@utils.log_indent_decorator
def download(tickers, start=None, end=None, actions=False, threads=True, ignore_tz=None,
group_by='column', auto_adjust=False, back_adjust=False, repair=False, keepna=False,
progress=True, period="max", show_errors=None, interval="1d", prepost=False,
progress=True, period="max", interval="1d", prepost=False,
proxy=None, rounding=False, timeout=10, session=None):
"""Download yahoo tickers
:Parameters:
@@ -80,9 +80,6 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
Optional. Proxy server URL scheme. Default is None
rounding: bool
Optional. Round values to 2 decimal places?
show_errors: bool
Optional. Doesn't print errors if False
DEPRECATED, will be removed in future version
timeout: None or float
If not None stops waiting for a response after given number of
seconds. (Can also be a fraction of a second e.g. 0.01)
@@ -91,14 +88,6 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
"""
logger = utils.get_yf_logger()
if show_errors is not None:
if show_errors:
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
logger.setLevel(logging.ERROR)
else:
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
logger.setLevel(logging.CRITICAL)
if logger.isEnabledFor(logging.DEBUG):
if threads:
# With DEBUG, each thread generates a lot of log messages.
@@ -217,11 +206,11 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
try:
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
keys=shared._DFS.keys())
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
except Exception:
_realign_dfs()
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
keys=shared._DFS.keys())
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
data.index = _pd.to_datetime(data.index)
# switch names back to isins if applicable
data.rename(columns=shared._ISINS, inplace=True)

View File

@@ -1,12 +1,12 @@
import datetime
import json
import warnings
import pandas as pd
from yfinance import utils, const
from yfinance.data import YfData
from yfinance.exceptions import YFinanceException, YFNotImplementedError
from yfinance.exceptions import YFException, YFNotImplementedError
class Fundamentals:
@@ -30,9 +30,8 @@ class Fundamentals:
@property
def earnings(self) -> dict:
if self._earnings is None:
raise YFNotImplementedError('earnings')
return self._earnings
warnings.warn("'Ticker.earnings' is deprecated as not available via API. Look for \"Net Income\" in Ticker.income_stmt.", DeprecationWarning)
return None
@property
def shares(self) -> pd.DataFrame:
@@ -70,7 +69,7 @@ class Financials:
@utils.log_indent_decorator
def _fetch_time_series(self, name, timescale, proxy=None):
# Fetching time series preferred over scraping 'QuoteSummaryStore',
# because it matches what Yahoo shows. But for some tickers returns nothing,
# because it matches what Yahoo shows. But for some tickers returns nothing,
# despite 'QuoteSummaryStore' containing valid data.
allowed_names = ["income", "balance-sheet", "cash-flow"]
@@ -79,14 +78,14 @@ class Financials:
if name not in allowed_names:
raise ValueError(f"Illegal argument: name must be one of: {allowed_names}")
if timescale not in allowed_timescales:
raise ValueError(f"Illegal argument: timescale must be one of: {allowed_names}")
raise ValueError(f"Illegal argument: timescale must be one of: {allowed_timescales}")
try:
statement = self._create_financials_table(name, timescale, proxy)
if statement is not None:
return statement
except YFinanceException as e:
except YFException as e:
utils.get_yf_logger().error(f"{self._symbol}: Failed to create {name} financials table for reason: {e}")
return pd.DataFrame()

1703
yfinance/scrapers/history.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,7 @@ import requests
from yfinance import utils
from yfinance.data import YfData
from yfinance.const import _BASE_URL_
from yfinance.exceptions import YFinanceDataException
from yfinance.exceptions import YFDataException
_QUOTE_SUMMARY_URL_ = f"{_BASE_URL_}/v10/finance/quoteSummary/"
@@ -73,8 +73,8 @@ class Holders:
def _fetch(self, proxy):
modules = ','.join(
["institutionOwnership", "fundOwnership", "majorDirectHolders", "majorHoldersBreakdown", "insiderTransactions", "insiderHolders", "netSharePurchaseActivity"])
params_dict = {"modules": modules, "corsDomain": "finance.yahoo.com", "symbol": self._symbol, "formatted": "false"}
result = self._data.get_raw_json(_QUOTE_SUMMARY_URL_, user_agent_headers=self._data.user_agent_headers, params=params_dict, proxy=proxy)
params_dict = {"modules": modules, "corsDomain": "finance.yahoo.com", "formatted": "false"}
result = self._data.get_raw_json(f"{_QUOTE_SUMMARY_URL_}/{self._symbol}", user_agent_headers=self._data.user_agent_headers, params=params_dict, proxy=proxy)
return result
def _fetch_and_parse(self):
@@ -104,7 +104,7 @@ class Holders:
self._parse_insider_holders(data["insiderHolders"])
self._parse_net_share_purchase_activity(data["netSharePurchaseActivity"])
except (KeyError, IndexError):
raise YFinanceDataException("Failed to parse holders json data.")
raise YFDataException("Failed to parse holders json data.")
@staticmethod
def _parse_raw_values(data):
@@ -189,7 +189,7 @@ class Holders:
if not df.empty:
df["positionDirectDate"] = pd.to_datetime(df["positionDirectDate"], unit="s")
df["latestTransDate"] = pd.to_datetime(df["latestTransDate"], unit="s")
df.rename(columns={
"name": "Name",
"relation": "Position",
@@ -242,5 +242,3 @@ class Holders:
}
).convert_dtypes()
self._insider_purchases = df

View File

@@ -1,8 +1,5 @@
import datetime
import json
import logging
import warnings
from collections.abc import MutableMapping
import numpy as _np
import pandas as pd
@@ -11,7 +8,7 @@ import requests
from yfinance import utils
from yfinance.data import YfData
from yfinance.const import quote_summary_valid_modules, _BASE_URL_
from yfinance.exceptions import YFNotImplementedError, YFinanceDataException, YFinanceException
from yfinance.exceptions import YFDataException, YFException
info_retired_keys_price = {"currentPrice", "dayHigh", "dayLow", "open", "previousClose", "volume", "volume24Hr"}
info_retired_keys_price.update({"regularMarket"+s for s in ["DayHigh", "DayLow", "Open", "PreviousClose", "Price", "Volume"]})
@@ -26,57 +23,6 @@ info_retired_keys = info_retired_keys_price | info_retired_keys_exchange | info_
_QUOTE_SUMMARY_URL_ = f"{_BASE_URL_}/v10/finance/quoteSummary"
class InfoDictWrapper(MutableMapping):
""" Simple wrapper around info dict, intercepting 'gets' to
print how-to-migrate messages for specific keys. Requires
override dict API"""
def __init__(self, info):
self.info = info
def keys(self):
return self.info.keys()
def __str__(self):
return self.info.__str__()
def __repr__(self):
return self.info.__repr__()
def __contains__(self, k):
return k in self.info.keys()
def __getitem__(self, k):
if k in info_retired_keys_price:
warnings.warn(f"Price data removed from info (key='{k}'). Use Ticker.fast_info or history() instead", DeprecationWarning)
return None
elif k in info_retired_keys_exchange:
warnings.warn(f"Exchange data removed from info (key='{k}'). Use Ticker.fast_info or Ticker.get_history_metadata() instead", DeprecationWarning)
return None
elif k in info_retired_keys_marketCap:
warnings.warn(f"Market cap removed from info (key='{k}'). Use Ticker.fast_info instead", DeprecationWarning)
return None
elif k in info_retired_keys_symbol:
warnings.warn(f"Symbol removed from info (key='{k}'). You know this already", DeprecationWarning)
return None
return self.info[self._keytransform(k)]
def __setitem__(self, k, value):
self.info[self._keytransform(k)] = value
def __delitem__(self, k):
del self.info[self._keytransform(k)]
def __iter__(self):
return iter(self.info)
def __len__(self):
return len(self.info)
def _keytransform(self, k):
return k
class FastInfo:
# Contain small subset of info[] items that can be fetched faster elsewhere.
# Imitates a dict.
@@ -182,10 +128,7 @@ class FastInfo:
def _get_1y_prices(self, fullDaysOnly=False):
if self._prices_1y is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1y = self._tkr.history(period="380d", auto_adjust=False, keepna=True, proxy=self.proxy)
logging.disable(logging.NOTSET)
self._prices_1y = self._tkr.history(period="1y", auto_adjust=False, keepna=True, proxy=self.proxy)
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
try:
ctp = self._md["currentTradingPeriod"]
@@ -211,18 +154,12 @@ class FastInfo:
def _get_1wk_1h_prepost_prices(self):
if self._prices_1wk_1h_prepost is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1wk_1h_prepost = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=True, proxy=self.proxy)
logging.disable(logging.NOTSET)
self._prices_1wk_1h_prepost = self._tkr.history(period="5d", interval="1h", auto_adjust=False, prepost=True, proxy=self.proxy)
return self._prices_1wk_1h_prepost
def _get_1wk_1h_reg_prices(self):
if self._prices_1wk_1h_reg is None:
# Temporarily disable error printing
logging.disable(logging.CRITICAL)
self._prices_1wk_1h_reg = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=False, proxy=self.proxy)
logging.disable(logging.NOTSET)
self._prices_1wk_1h_reg = self._tkr.history(period="5d", interval="1h", auto_adjust=False, prepost=False, proxy=self.proxy)
return self._prices_1wk_1h_reg
def _get_exchange_metadata(self):
@@ -261,8 +198,6 @@ class FastInfo:
if self._currency is not None:
return self._currency
if self._tkr._history_metadata is None:
self._get_1y_prices()
md = self._tkr.get_history_metadata(proxy=self.proxy)
self._currency = md["currency"]
return self._currency
@@ -272,8 +207,6 @@ class FastInfo:
if self._quote_type is not None:
return self._quote_type
if self._tkr._history_metadata is None:
self._get_1y_prices()
md = self._tkr.get_history_metadata(proxy=self.proxy)
self._quote_type = md["instrumentType"]
return self._quote_type
@@ -579,7 +512,15 @@ class Quote:
@property
def sustainability(self) -> pd.DataFrame:
if self._sustainability is None:
raise YFNotImplementedError('sustainability')
result = self._fetch(self.proxy, modules=['esgScores'])
if result is None:
self._sustainability = pd.DataFrame()
else:
try:
data = result["quoteSummary"]["result"][0]
except (KeyError, IndexError):
raise YFDataException(f"Failed to parse json response from Yahoo Finance: {result}")
self._sustainability = pd.DataFrame(data)
return self._sustainability
@property
@@ -592,7 +533,7 @@ class Quote:
try:
data = result["quoteSummary"]["result"][0]["recommendationTrend"]["trend"]
except (KeyError, IndexError):
raise YFinanceDataException(f"Failed to parse json response from Yahoo Finance: {result}")
raise YFDataException(f"Failed to parse json response from Yahoo Finance: {result}")
self._recommendations = pd.DataFrame(data)
return self._recommendations
@@ -606,14 +547,14 @@ class Quote:
try:
data = result["quoteSummary"]["result"][0]["upgradeDowngradeHistory"]["history"]
if len(data) == 0:
raise YFinanceDataException(f"No upgrade/downgrade history found for {self._symbol}")
raise YFDataException(f"No upgrade/downgrade history found for {self._symbol}")
df = pd.DataFrame(data)
df.rename(columns={"epochGradeDate": "GradeDate", 'firm': 'Firm', 'toGrade': 'ToGrade', 'fromGrade': 'FromGrade', 'action': 'Action'}, inplace=True)
df.set_index('GradeDate', inplace=True)
df.index = pd.to_datetime(df.index, unit='s')
self._upgrades_downgrades = df
except (KeyError, IndexError):
raise YFinanceDataException(f"Failed to parse json response from Yahoo Finance: {result}")
raise YFDataException(f"Failed to parse json response from Yahoo Finance: {result}")
return self._upgrades_downgrades
@property
@@ -628,11 +569,11 @@ class Quote:
def _fetch(self, proxy, modules: list):
if not isinstance(modules, list):
raise YFinanceException("Should provide a list of modules, see available modules using `valid_modules`")
raise YFException("Should provide a list of modules, see available modules using `valid_modules`")
modules = ','.join([m for m in modules if m in quote_summary_valid_modules])
if len(modules) == 0:
raise YFinanceException("No valid modules provided, see available modules using `valid_modules`")
raise YFException("No valid modules provided, see available modules using `valid_modules`")
params_dict = {"modules": modules, "corsDomain": "finance.yahoo.com", "formatted": "false", "symbol": self._symbol}
try:
result = self._data.get_raw_json(_QUOTE_SUMMARY_URL_ + f"/{self._symbol}", user_agent_headers=self._data.user_agent_headers, params=params_dict, proxy=proxy)
@@ -733,10 +674,11 @@ class Quote:
json_str = self._data.cache_get(url=url, proxy=proxy).text
json_data = json.loads(json_str)
if json_data["timeseries"]["error"] is not None:
raise YFinanceException("Failed to parse json response from Yahoo Finance: " + json_data["error"])
json_result = json_data.get("timeseries") or json_data.get("finance")
if json_result["error"] is not None:
raise YFException("Failed to parse json response from Yahoo Finance: " + str(json_result["error"]))
for k in keys:
keydict = json_data["timeseries"]["result"][0]
keydict = json_result["result"][0]
if k in keydict:
self._info[k] = keydict[k][-1]["reportedValue"]["raw"]
else:
@@ -767,4 +709,4 @@ class Quote:
self._calendar['Revenue Low'] = earnings.get('revenueLow', None)
self._calendar['Revenue Average'] = earnings.get('revenueAverage', None)
except (KeyError, IndexError):
raise YFinanceDataException(f"Failed to parse json response from Yahoo Finance: {result}")
raise YFDataException(f"Failed to parse json response from Yahoo Finance: {result}")

View File

@@ -21,12 +21,12 @@
from __future__ import print_function
import datetime as _datetime
from collections import namedtuple as _namedtuple
import pandas as _pd
from .base import TickerBase
from .const import _BASE_URL_
class Ticker(TickerBase):
@@ -40,15 +40,14 @@ class Ticker(TickerBase):
def _download_options(self, date=None):
if date is None:
url = f"{self._base_url}/v7/finance/options/{self.ticker}"
url = f"{_BASE_URL_}/v7/finance/options/{self.ticker}"
else:
url = f"{self._base_url}/v7/finance/options/{self.ticker}?date={date}"
url = f"{_BASE_URL_}/v7/finance/options/{self.ticker}?date={date}"
r = self._data.get(url=url, proxy=self.proxy).json()
if len(r.get('optionChain', {}).get('result', [])) > 0:
for exp in r['optionChain']['result'][0]['expirationDates']:
self._expirations[_datetime.datetime.utcfromtimestamp(
exp).strftime('%Y-%m-%d')] = exp
self._expirations[_pd.Timestamp(exp, unit='s').strftime('%Y-%m-%d')] = exp
self._underlying = r['optionChain']['result'][0].get('quote', {})
@@ -134,7 +133,7 @@ class Ticker(TickerBase):
return self.get_dividends()
@property
def capital_gains(self):
def capital_gains(self) -> _pd.Series:
return self.get_capital_gains()
@property
@@ -146,7 +145,7 @@ class Ticker(TickerBase):
return self.get_actions()
@property
def shares(self) -> _pd.DataFrame :
def shares(self) -> _pd.DataFrame:
return self.get_shares()
@property
@@ -259,7 +258,7 @@ class Ticker(TickerBase):
return tuple(self._expirations.keys())
@property
def news(self):
def news(self) -> list:
return self.get_news()
@property

View File

@@ -26,7 +26,7 @@ import logging
import re as _re
import sys as _sys
import threading
from functools import lru_cache
from functools import lru_cache, wraps
from inspect import getmembers
from types import FunctionType
from typing import List, Optional
@@ -95,6 +95,7 @@ def get_indented_logger(name=None):
def log_indent_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
logger = get_indented_logger('yfinance')
logger.debug(f'Entering {func.__name__}()')
@@ -689,7 +690,7 @@ def safe_merge_dfs(df_main, df_sub, interval):
df_main['Dividends'] = 0.0
return df_main
else:
empty_row_data = {**{c:[_np.nan] for c in const.price_colnames}, 'Volume':[0]}
empty_row_data = {**{c:[_np.nan] for c in const._PRICE_COLNAMES_}, 'Volume':[0]}
if interval == '1d':
# For 1d, add all out-of-range event dates
for i in _np.where(f_outOfRange)[0]:
@@ -772,7 +773,7 @@ def fix_Yahoo_dst_issue(df, interval):
f_pre_midnight = (df.index.minute == 0) & (df.index.hour.isin([22, 23]))
dst_error_hours = _np.array([0] * df.shape[0])
dst_error_hours[f_pre_midnight] = 24 - df.index[f_pre_midnight].hour
df.index += _pd.TimedeltaIndex(dst_error_hours, 'h')
df.index += _pd.to_timedelta(dst_error_hours, 'h')
return df

View File

@@ -1 +1 @@
version = "0.2.35"
version = "0.2.41"