Compare commits
293 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
930b305327 | ||
|
|
7af213dea2 | ||
|
|
049337327e | ||
|
|
e65ca40d95 | ||
|
|
fe00fd5152 | ||
|
|
cb691df586 | ||
|
|
4bc546cb71 | ||
|
|
f3c9f9962d | ||
|
|
da1c466550 | ||
|
|
97f35b721c | ||
|
|
7c41434f44 | ||
|
|
070f13577e | ||
|
|
7628bec2a6 | ||
|
|
ac4efa3e3d | ||
|
|
5a683b916d | ||
|
|
30fdc96157 | ||
|
|
ee87a95b8d | ||
|
|
685ef71d9f | ||
|
|
098e77659c | ||
|
|
dc5c718556 | ||
|
|
84ba6d7d88 | ||
|
|
e238ac1f95 | ||
|
|
efe15e1907 | ||
|
|
2dcbe34910 | ||
|
|
bb47cd4182 | ||
|
|
94e3833e90 | ||
|
|
f8e8eecf44 | ||
|
|
a1bcb4c351 | ||
|
|
18089f451b | ||
|
|
3d180fcf2c | ||
|
|
82a3145fdf | ||
|
|
48e9075a2d | ||
|
|
88e8ddb7f5 | ||
|
|
812931ff98 | ||
|
|
1967e974c1 | ||
|
|
942a26fd37 | ||
|
|
c2d568367c | ||
|
|
d3728d3071 | ||
|
|
915bb1a080 | ||
|
|
d55c317158 | ||
|
|
ac1d09049e | ||
|
|
afb4e0d5dc | ||
|
|
1d31e7ca01 | ||
|
|
683064f9ad | ||
|
|
cdf897f9e6 | ||
|
|
eab6c8dfa7 | ||
|
|
97f93d35ed | ||
|
|
5aef8addab | ||
|
|
6b8a4a5608 | ||
|
|
212a7987c3 | ||
|
|
58a0a57457 | ||
|
|
75297c0eba | ||
|
|
1dc2719368 | ||
|
|
ab979e9141 | ||
|
|
b837c1ec2a | ||
|
|
2630c66cd1 | ||
|
|
7af789fe9a | ||
|
|
73e36688b7 | ||
|
|
f1264716fc | ||
|
|
06fd35121a | ||
|
|
91f468e4d3 | ||
|
|
d00c1a976c | ||
|
|
176c3d628b | ||
|
|
8f53af1593 | ||
|
|
19188d52d4 | ||
|
|
ffaf200562 | ||
|
|
6686258e66 | ||
|
|
47bc46c804 | ||
|
|
f563e51509 | ||
|
|
c5404bcd9d | ||
|
|
006e0a155b | ||
|
|
dbc55e5596 | ||
|
|
4ce63fe8ca | ||
|
|
223f5337a8 | ||
|
|
4c34487149 | ||
|
|
ac8a917288 | ||
|
|
15321bd097 | ||
|
|
10961905b6 | ||
|
|
acbd2a8d78 | ||
|
|
61c4696c65 | ||
|
|
a7c41afa52 | ||
|
|
49d8dfd544 | ||
|
|
477dc6e6c4 | ||
|
|
7e6ad0834c | ||
|
|
c94cbb64d4 | ||
|
|
c053e2cb30 | ||
|
|
112b297c41 | ||
|
|
5195c3a798 | ||
|
|
c1ad2589da | ||
|
|
d1a34a4da0 | ||
|
|
d44eff4065 | ||
|
|
db670aefd7 | ||
|
|
a3095d2a40 | ||
|
|
f753e6090d | ||
|
|
9021fe52b4 | ||
|
|
281cc64a4a | ||
|
|
8975689bd1 | ||
|
|
24f53e935d | ||
|
|
a6790606ef | ||
|
|
122269cf53 | ||
|
|
a914647fa4 | ||
|
|
dc957eeb0e | ||
|
|
f8d65d0def | ||
|
|
f32097e157 | ||
|
|
469037be80 | ||
|
|
9648e69b7e | ||
|
|
f718db6c2f | ||
|
|
c8280e4001 | ||
|
|
53c29480b6 | ||
|
|
4a5616d5c4 | ||
|
|
5e0006e4b3 | ||
|
|
2b1a26ef0c | ||
|
|
8fdf53233f | ||
|
|
4175885747 | ||
|
|
580502941a | ||
|
|
1863b211cd | ||
|
|
0bcd2dc725 | ||
|
|
c60e590bd7 | ||
|
|
fce4707340 | ||
|
|
f7825c1c3a | ||
|
|
27ef2bcd1a | ||
|
|
fb2006b814 | ||
|
|
9b9158050a | ||
|
|
f30e4ebd4c | ||
|
|
f08fe83290 | ||
|
|
ca2040f5fd | ||
|
|
1cfeddff59 | ||
|
|
1ab476b14f | ||
|
|
ae2ae7bce4 | ||
|
|
1d3ef4f733 | ||
|
|
a3ac9fc72d | ||
|
|
03a1f03583 | ||
|
|
af9a356fd5 | ||
|
|
9b6e35bdcd | ||
|
|
4d4e56cdc8 | ||
|
|
91efcd8f7d | ||
|
|
63a3531edc | ||
|
|
1b0d8357d6 | ||
|
|
4466e57b95 | ||
|
|
6d3d6b659c | ||
|
|
b696add360 | ||
|
|
06751a0b9c | ||
|
|
ba3c1b5ac6 | ||
|
|
7432d2939c | ||
|
|
ba977a16a2 | ||
|
|
9a3d60105c | ||
|
|
0521428f69 | ||
|
|
308e58b914 | ||
|
|
f6beadf448 | ||
|
|
d607c43967 | ||
|
|
4c1669ad9d | ||
|
|
7da64b679e | ||
|
|
38f8ccd40a | ||
|
|
13acc3dc97 | ||
|
|
cc1ac7bbcc | ||
|
|
75449fd0ac | ||
|
|
22e0c414c4 | ||
|
|
37d60e6efb | ||
|
|
dac9a48742 | ||
|
|
bd52326091 | ||
|
|
9581b8bd45 | ||
|
|
62b2c25da8 | ||
|
|
7618dda5d0 | ||
|
|
95ef486e13 | ||
|
|
9e59f6b61c | ||
|
|
716cd65fd3 | ||
|
|
5b1605b5a1 | ||
|
|
412cfbcd6d | ||
|
|
6abee6df44 | ||
|
|
fad21dfeac | ||
|
|
fc27f9c367 | ||
|
|
bb79b573ed | ||
|
|
127b53ee7f | ||
|
|
88525abcbd | ||
|
|
99ef055cc4 | ||
|
|
0f36f7980b | ||
|
|
8282af9ce4 | ||
|
|
5208c8cf05 | ||
|
|
d3dfb4c6a8 | ||
|
|
279726afe4 | ||
|
|
937386f3ef | ||
|
|
32e569f652 | ||
|
|
de59f0b2c6 | ||
|
|
7d6d8562e8 | ||
|
|
6cae6d45b1 | ||
|
|
ec3de0710d | ||
|
|
0713d93867 | ||
|
|
67e81a8f9a | ||
|
|
b6372c0945 | ||
|
|
c9dd582dd8 | ||
|
|
677f3d5702 | ||
|
|
4f9b05a546 | ||
|
|
e1f94ed337 | ||
|
|
93a7ee6161 | ||
|
|
5b0cb60cf5 | ||
|
|
1a97c22874 | ||
|
|
b0de31da63 | ||
|
|
cc87608824 | ||
|
|
6c1e26093c | ||
|
|
e8fdd12cb1 | ||
|
|
93b6e024da | ||
|
|
d5282967ce | ||
|
|
9908c1ff48 | ||
|
|
a4d7d6c577 | ||
|
|
f9080c22a5 | ||
|
|
32e1d479b1 | ||
|
|
5729ce3cb6 | ||
|
|
d0b2070036 | ||
|
|
688120cab7 | ||
|
|
4a1e1c4447 | ||
|
|
f99677ed1e | ||
|
|
6a613eb114 | ||
|
|
0503240973 | ||
|
|
ae6c05fa74 | ||
|
|
aa9a0286a1 | ||
|
|
ddf0cf19cd | ||
|
|
a2bde88c36 | ||
|
|
1bd819ac4d | ||
|
|
1b9fc5f12f | ||
|
|
274f309052 | ||
|
|
edac283a60 | ||
|
|
781fad501f | ||
|
|
39527d24d4 | ||
|
|
45f1c88460 | ||
|
|
7d638e1040 | ||
|
|
97b13dfa8c | ||
|
|
693565a85b | ||
|
|
957051e0e8 | ||
|
|
bd81ebb4e9 | ||
|
|
46f53f9957 | ||
|
|
056b84d8fe | ||
|
|
835dbd9629 | ||
|
|
07a4594455 | ||
|
|
736c03ac5b | ||
|
|
adfa2e9beb | ||
|
|
b286797e8c | ||
|
|
b306bef350 | ||
|
|
61c89660df | ||
|
|
31af2ab1d5 | ||
|
|
21c380fa61 | ||
|
|
e0000cd787 | ||
|
|
11d43eb1a1 | ||
|
|
509a109f29 | ||
|
|
b0639409a3 | ||
|
|
ed10feee9a | ||
|
|
aba81eedc2 | ||
|
|
d424d027ac | ||
|
|
9268fcfa76 | ||
|
|
711e1138d3 | ||
|
|
0789b690a4 | ||
|
|
6055566de8 | ||
|
|
398a19a855 | ||
|
|
e771cfabb6 | ||
|
|
5b676f803b | ||
|
|
eb5c50d5c7 | ||
|
|
1cb0b215c4 | ||
|
|
50dcb2ce5a | ||
|
|
1ce9ce2784 | ||
|
|
cd4816e289 | ||
|
|
27e9ce7542 | ||
|
|
02c1c60f3b | ||
|
|
27ea9472c1 | ||
|
|
801f58790a | ||
|
|
080834e3ce | ||
|
|
4e7b2094d0 | ||
|
|
c72e04bf55 | ||
|
|
abbe4c3a2f | ||
|
|
9e21b85043 | ||
|
|
b44917b7f9 | ||
|
|
6f78dd6e6b | ||
|
|
593dc8fcee | ||
|
|
b94baa4cc5 | ||
|
|
1a054135fb | ||
|
|
4e2253a406 | ||
|
|
9af7ec0a4e | ||
|
|
e57647c1d7 | ||
|
|
762abd8bba | ||
|
|
d1ea402792 | ||
|
|
65f65b1776 | ||
|
|
9388c29207 | ||
|
|
9f91f4b180 | ||
|
|
f8aab533ba | ||
|
|
5cdc78f479 | ||
|
|
ba634fad0e | ||
|
|
8a5ca71f52 | ||
|
|
141ce7e471 | ||
|
|
4eae728a06 | ||
|
|
2d6b6b26ed | ||
|
|
ec3dfaf305 | ||
|
|
e89d390824 | ||
|
|
563a1a3448 | ||
|
|
2e6d3d0e60 | ||
|
|
553bc5965a |
44
.github/ISSUE_TEMPLATE/bug_report.md
vendored
44
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,44 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# IMPORTANT
|
||||
|
||||
# Read and follow these instructions carefully. Help us help you.
|
||||
|
||||
### Are you up-to-date?
|
||||
|
||||
Upgrade to the latest version and confirm the issue/bug is still there.
|
||||
|
||||
`$ pip install yfinance --upgrade --no-cache-dir`
|
||||
|
||||
Confirm by running:
|
||||
|
||||
`import yfinance as yf ; print(yf.__version__)`
|
||||
|
||||
and comparing against [PIP](https://pypi.org/project/yfinance/#history).
|
||||
|
||||
### Does Yahoo actually have the data?
|
||||
|
||||
Are you spelling symbol *exactly* same as Yahoo?
|
||||
|
||||
Then visit `finance.yahoo.com` and confirm they have the data you want. Maybe your symbol was delisted, or your expectations of `yfinance` are wrong.
|
||||
|
||||
### Are you spamming Yahoo?
|
||||
|
||||
Yahoo Finance free service has rate-limiting depending on request type - roughly 60/minute for prices, 10/minute for info. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
|
||||
|
||||
### Still think it's a bug?
|
||||
|
||||
**Delete these instructions** and replace with your bug report, providing the following as best you can:
|
||||
|
||||
- Simple code that reproduces your problem, that we can copy-paste-run.
|
||||
- Run code with [debug logging enabled](https://github.com/ranaroussi/yfinance/tree/dev#logging) and post the full output.
|
||||
- If you think `yfinance` returning bad data, give us proof.
|
||||
- `yfinance` version and Python version.
|
||||
- Operating system type.
|
||||
89
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
89
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Bug report
|
||||
description: Report a bug in our project
|
||||
labels: ["bug"]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# !!! IMPORTANT !!! FOLLOW THESE INSTRUCTIONS CAREFULLY !!!
|
||||
|
||||
### Are you up-to-date?
|
||||
|
||||
Upgrade to the latest version: `$ pip install yfinance --upgrade --no-cache-dir`
|
||||
|
||||
Confirm latest version by running: `import yfinance as yf ; print(yf.__version__)` and comparing against [PyPI](https://pypi.org/project/yfinance/#history).
|
||||
|
||||
### Does Yahoo actually have the data?
|
||||
|
||||
Are you spelling symbol *exactly* same as Yahoo?
|
||||
|
||||
Then visit `finance.yahoo.com` and confirm they have the data you want. Maybe your symbol was delisted, or your expectations of `yfinance` are wrong.
|
||||
|
||||
### Are you spamming Yahoo?
|
||||
|
||||
Yahoo Finance free service has rate-limiting https://github.com/ranaroussi/yfinance/discussions/1513. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
|
||||
|
||||
### Does issue already exist?
|
||||
|
||||
Use the search tool. Don't duplicate existing issues.
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
---
|
||||
## Still think it's a bug?
|
||||
|
||||
Provide the following as best you can:
|
||||
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: "Describe bug"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: code
|
||||
attributes:
|
||||
label: "Simple code that reproduces your problem"
|
||||
description: "Provide a snippet of code that we can copy-paste-run. Wrap code in Python Markdown code blocks for proper formatting (```` ```python ... ``` ````)."
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: debug-log
|
||||
attributes:
|
||||
label: "Debug log"
|
||||
description: "Run code with debug logging enabled and post the full output. IMPORTANT INSTRUCTIONS: https://github.com/ranaroussi/yfinance/tree/main#logging"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: bad-data-proof
|
||||
attributes:
|
||||
label: "Bad data proof"
|
||||
description: "If you think `yfinance` returning bad data, provide your proof here."
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: version-yfinance
|
||||
attributes:
|
||||
label: "`yfinance` version"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: version-python
|
||||
attributes:
|
||||
label: "Python version"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: "Operating system"
|
||||
validations:
|
||||
required: false
|
||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -8,11 +8,11 @@ jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.x
|
||||
- run: pip install -r requirements.txt
|
||||
- run: pip install mkdocstrings==0.14.0
|
||||
- run: pip install mkdocs-material
|
||||
- run: mkdocs gh-deploy --force
|
||||
- run: mkdocs gh-deploy --force
|
||||
|
||||
13
.github/workflows/ruff.yml
vendored
Normal file
13
.github/workflows/ruff.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Ruff
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- dev
|
||||
jobs:
|
||||
ruff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: chartboost/ruff-action@v1
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,6 +4,7 @@ dist
|
||||
yfinance.egg-info
|
||||
*.pyc
|
||||
.coverage
|
||||
.idea/
|
||||
.vscode/
|
||||
build/
|
||||
*.html
|
||||
|
||||
138
CHANGELOG.rst
138
CHANGELOG.rst
@@ -1,6 +1,144 @@
|
||||
Change Log
|
||||
===========
|
||||
|
||||
0.2.40
|
||||
------
|
||||
Fix typo in 0.2.39 c7af213
|
||||
|
||||
0.2.39
|
||||
------
|
||||
Fixes:
|
||||
- Fix switching session from/to requests_cache #1930
|
||||
Price repair:
|
||||
- Fix potential for price repair to discard price=0 rows #1874
|
||||
- Don't price-repair FX volume=0, is normal #1920
|
||||
- Improve 'sudden change' repair for splits & currency #1931
|
||||
Information:
|
||||
- Fix help(yf.download) not showing the information about the function #1913 @vittoboa
|
||||
- Add more specific error throwing based on PR 1918 #1928 @elibroftw @marcofognog
|
||||
Maintenance:
|
||||
- Replace dead 'appdirs' package with 'platformdirs' #1896
|
||||
- Deprecate 'pandas_datareader', remove a deprecated argument #1897
|
||||
- Fix: datetime.datetime.utcnow() is deprecated ... #1922
|
||||
|
||||
0.2.38
|
||||
------
|
||||
Fix holders & insiders #1908
|
||||
|
||||
0.2.37
|
||||
------
|
||||
Small fixes:
|
||||
- Fix Pandas warnings #1838 #1844
|
||||
- Fix price repair bug, typos, refactor #1866 #1865 #1849
|
||||
- Stop disabling logging #1841
|
||||
|
||||
0.2.36
|
||||
------
|
||||
Small fixes:
|
||||
- Update README.md for better copy-ability #1823
|
||||
- Name download() column levels #1795
|
||||
- Fix history(keepna=False) when repair=True #1824
|
||||
- Replace empty list with empty pd.Series #1724
|
||||
- Handle peewee with old sqlite #1827
|
||||
- Fix JSON error handling #1830 #1833
|
||||
|
||||
0.2.35
|
||||
------
|
||||
Internal fixes for 0.2.34
|
||||
|
||||
0.2.34
|
||||
------
|
||||
Features:
|
||||
- Add Recommendations Trend Summary #1754
|
||||
- Add Recommendation upgrades & downgrades #1773
|
||||
- Add Insider Roster & Transactions #1772
|
||||
- Moved download() progress bar to STDERR #1776
|
||||
- PIP optional dependencies #1771
|
||||
- Set sensible min versions for optional 'nospam' reqs #1807
|
||||
Fixes
|
||||
- Fix download() DatetimeIndex on invalid symbols #1779
|
||||
- Fix invalid date entering cache DB #1796
|
||||
- Fix Ticker.calendar fetch #1790
|
||||
- Fixed adding complementary to info #1774
|
||||
- Ticker.earnings_dates: fix warning "Value 'NaN' has dtype incompatible with float64" #1810
|
||||
- Minor fixes for price repair and related tests #1768
|
||||
- Fix price repair div adjust #1798
|
||||
- Fix 'raise_errors' argument ignored in Ticker.history() #1806
|
||||
Maintenance
|
||||
- Fix regression: _get_ticker_tz() args were being swapped. Improve its unit test #1793
|
||||
- Refactor Ticker proxy #1711
|
||||
- Add Ruff linter checks #1756
|
||||
- Resolve Pandas FutureWarnings #1766
|
||||
|
||||
0.2.33
|
||||
------
|
||||
Cookie fixes:
|
||||
- fix backup strategy #1759
|
||||
- fix Ticker(ISIN) #1760
|
||||
|
||||
0.2.32
|
||||
------
|
||||
Add cookie & crumb to requests #1657
|
||||
|
||||
0.2.31
|
||||
------
|
||||
- Fix TZ cache exception blocking import #1705 #1709
|
||||
- Fix merging pre-market events with intraday prices #1703
|
||||
|
||||
0.2.30
|
||||
------
|
||||
- Fix OperationalError #1698
|
||||
|
||||
0.2.29
|
||||
------
|
||||
- Fix pandas warning when retrieving quotes. #1672
|
||||
- Replace sqlite3 with peewee for 100% thread-safety #1675
|
||||
- Fix merging events with intraday prices #1684
|
||||
- Fix error when calling enable_debug_mode twice #1687
|
||||
- Price repair fixes #1688
|
||||
|
||||
0.2.28
|
||||
------
|
||||
- Fix TypeError: 'FastInfo' object is not callable #1636
|
||||
- Improve & fix price repair #1633 #1660
|
||||
- option_chain() also return underlying data #1606
|
||||
|
||||
0.2.27
|
||||
------
|
||||
Bug fixes:
|
||||
- fix merging 1d-prices with out-of-range divs/splits #1635
|
||||
- fix multithread error 'tz already in cache' #1648
|
||||
|
||||
0.2.26
|
||||
------
|
||||
Proxy improvements
|
||||
- bug fixes #1371
|
||||
- security fix #1625
|
||||
|
||||
0.2.25
|
||||
------
|
||||
Fix single ISIN as ticker #1611
|
||||
Fix 'Only 100 years allowed' error #1576
|
||||
|
||||
0.2.24
|
||||
------
|
||||
Fix info[] missing values #1603
|
||||
|
||||
0.2.23
|
||||
------
|
||||
Fix 'Unauthorized' error #1595
|
||||
|
||||
0.2.22
|
||||
------
|
||||
Fix unhandled 'sqlite3.DatabaseError' #1574
|
||||
|
||||
0.2.21
|
||||
------
|
||||
Fix financials tables #1568
|
||||
Price repair update: fix Yahoo messing up dividend and split adjustments #1543
|
||||
Fix logging behaviour #1562
|
||||
Fix merge future div/split into prices #1567
|
||||
|
||||
0.2.20
|
||||
------
|
||||
Switch to `logging` module #1493 #1522 #1541
|
||||
|
||||
15
CODE_OF_CONDUCT.md
Normal file
15
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Code of Conduct
|
||||
|
||||
## Submitting a new issue
|
||||
|
||||
* Search through existing Issues and Discussions, in case your issue already exists and a solution is being developed.
|
||||
* Ensure you read & follow the template form.
|
||||
* Consider you may be the best person to investigate and fix.
|
||||
|
||||
## Contributing to an existing Issue
|
||||
|
||||
* Read the entire thread.
|
||||
* Ensure your comment is contributing something new/useful. Remember you can simply react to other comments.
|
||||
* Be concise:
|
||||
- use the formatting options
|
||||
- if replying to a big comment, instead of quoting it, link to it
|
||||
152
README.md
152
README.md
@@ -42,19 +42,33 @@ Yahoo! finance API is intended for personal use only.**
|
||||
|
||||
---
|
||||
|
||||
## News
|
||||
- [Installation](#installation)
|
||||
- [Quick start](#quick-start)
|
||||
- [Advanced](#logging)
|
||||
- [Wiki](https://github.com/ranaroussi/yfinance/wiki)
|
||||
- [Contribute](#developers-want-to-contribute)
|
||||
|
||||
### 2023-01-27
|
||||
Since December 2022 Yahoo has been encrypting the web data that `yfinance` scrapes for non-price data. Price data still works. Fortunately the decryption keys are available, although Yahoo moved/changed them several times hence `yfinance` breaking several times. `yfinance` is now better prepared for any future changes by Yahoo.
|
||||
---
|
||||
|
||||
Why is Yahoo doing this? We don't know. Is it to stop scrapers? Maybe, so we've implemented changes to reduce load on Yahoo. In December we rolled out version 0.2 with optimised scraping. Then in 0.2.6 introduced `Ticker.fast_info`, providing much faster access to some `Ticker.info` elements wherever possible e.g. price stats and forcing users to switch (sorry but we think necessary).
|
||||
## Installation
|
||||
|
||||
### 2023-02-07
|
||||
Yahoo is now regularly changing their decryption key, breaking `yfinance` decryption. Is technically possible to extract this from their webpage but not implemented because difficult, see [discussion in the issue thread](https://github.com/ranaroussi/yfinance/issues/1407).
|
||||
Install `yfinance` using `pip`:
|
||||
|
||||
### 2023-04-09
|
||||
``` {.sourceCode .bash}
|
||||
$ pip install yfinance --upgrade --no-cache-dir
|
||||
```
|
||||
|
||||
Fixed `Ticker.info`
|
||||
[With Conda](https://anaconda.org/ranaroussi/yfinance).
|
||||
|
||||
To install with optional dependencies, replace `optional` with: `nospam` for [caching-requests](#smarter-scraping), `repair` for [price repair](https://github.com/ranaroussi/yfinance/wiki/Price-repair), or `nospam,repair` for both:
|
||||
|
||||
``` {.sourceCode .bash}
|
||||
$ pip install "yfinance[optional]"
|
||||
```
|
||||
|
||||
[Required dependencies](./requirements.txt) , [all dependencies](./setup.py#L62).
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
@@ -83,9 +97,6 @@ msft.splits
|
||||
msft.capital_gains # only for mutual funds & etfs
|
||||
|
||||
# show share count
|
||||
# - yearly summary:
|
||||
msft.shares
|
||||
# - accurate time-series count:
|
||||
msft.get_shares_full(start="2022-01-01", end=None)
|
||||
|
||||
# show financials:
|
||||
@@ -104,27 +115,16 @@ msft.quarterly_cashflow
|
||||
msft.major_holders
|
||||
msft.institutional_holders
|
||||
msft.mutualfund_holders
|
||||
msft.insider_transactions
|
||||
msft.insider_purchases
|
||||
msft.insider_roster_holders
|
||||
|
||||
# show earnings
|
||||
msft.earnings
|
||||
msft.quarterly_earnings
|
||||
|
||||
# show sustainability
|
||||
msft.sustainability
|
||||
|
||||
# show analysts recommendations
|
||||
# show recommendations
|
||||
msft.recommendations
|
||||
msft.recommendations_summary
|
||||
# show analysts other work
|
||||
msft.analyst_price_target
|
||||
msft.revenue_forecasts
|
||||
msft.earnings_forecasts
|
||||
msft.earnings_trend
|
||||
msft.upgrades_downgrades
|
||||
|
||||
# show next event (earnings, etc)
|
||||
msft.calendar
|
||||
|
||||
# Show future and historic earnings dates, returns at most next 4 quarters and last 8 quarters by default.
|
||||
# Show future and historic earnings dates, returns at most next 4 quarters and last 8 quarters by default.
|
||||
# Note: If more are needed use msft.get_earnings_dates(limit=XX) with increased limit argument.
|
||||
msft.earnings_dates
|
||||
|
||||
@@ -180,37 +180,21 @@ To download price history into one table:
|
||||
|
||||
```python
|
||||
import yfinance as yf
|
||||
data = yf.download("SPY AAPL", start="2017-01-01", end="2017-04-30")
|
||||
data = yf.download("SPY AAPL", period="1mo")
|
||||
```
|
||||
|
||||
`yf.download()` and `Ticker.history()` have many options for configuring fetching and processing, e.g.:
|
||||
|
||||
```python
|
||||
yf.download(tickers = "SPY AAPL", # list of tickers
|
||||
period = "1y", # time period
|
||||
interval = "1d", # trading interval
|
||||
prepost = False, # download pre/post market hours data?
|
||||
repair = True) # repair obvious price errors e.g. 100x?
|
||||
```
|
||||
|
||||
Review the [Wiki](https://github.com/ranaroussi/yfinance/wiki) for more options and detail.
|
||||
#### `yf.download()` and `Ticker.history()` have many options for configuring fetching and processing. [Review the Wiki](https://github.com/ranaroussi/yfinance/wiki) for more options and detail.
|
||||
|
||||
### Logging
|
||||
|
||||
`yfinance` now uses the `logging` module. To control the detail of printed messages you simply change the level:
|
||||
```
|
||||
import logging
|
||||
logger = logging.getLogger('yfinance')
|
||||
logger.setLevel(logging.ERROR) # default: only print errors
|
||||
logger.setLevel(logging.CRITICAL) # disable printing
|
||||
logger.setLevel(logging.DEBUG) # verbose: print errors & debug info
|
||||
```
|
||||
`yfinance` now uses the `logging` module to handle messages, default behaviour is only print errors. If debugging, use `yf.enable_debug_mode()` to switch logging to debug with custom formatting.
|
||||
|
||||
### Smarter scraping
|
||||
|
||||
To use a custom `requests` session (for example to cache calls to the
|
||||
API or customize the `User-agent` header), pass a `session=` argument to
|
||||
the Ticker constructor.
|
||||
Install the `nospam` packages for smarter scraping using `pip` (see [Installation](#installation)). These packages help cache calls such that Yahoo is not spammed with requests.
|
||||
|
||||
To use a custom `requests` session, pass a `session=` argument to
|
||||
the Ticker constructor. This allows for caching calls to the API as well as a custom way to modify requests via the `User-agent` header.
|
||||
|
||||
```python
|
||||
import requests_cache
|
||||
@@ -221,7 +205,7 @@ ticker = yf.Ticker('msft', session=session)
|
||||
ticker.actions
|
||||
```
|
||||
|
||||
Combine a `requests_cache` with rate-limiting to avoid triggering Yahoo's rate-limiter/blocker that can corrupt data.
|
||||
Combine `requests_cache` with rate-limiting to avoid triggering Yahoo's rate-limiter/blocker that can corrupt data.
|
||||
```python
|
||||
from requests import Session
|
||||
from requests_cache import CacheMixin, SQLiteCache
|
||||
@@ -231,7 +215,7 @@ class CachedLimiterSession(CacheMixin, LimiterMixin, Session):
|
||||
pass
|
||||
|
||||
session = CachedLimiterSession(
|
||||
limiter=Limiter(RequestRate(2, Duration.SECOND*5), # max 2 requests per 5 seconds
|
||||
limiter=Limiter(RequestRate(2, Duration.SECOND*5)), # max 2 requests per 5 seconds
|
||||
bucket_class=MemoryQueueBucket,
|
||||
backend=SQLiteCache("yfinance.cache"),
|
||||
)
|
||||
@@ -252,29 +236,16 @@ yfinance?](https://stackoverflow.com/questions/63107801)
|
||||
- How to download single or multiple tickers into a single
|
||||
dataframe with single level column names and a ticker column
|
||||
|
||||
### `pandas_datareader` override
|
||||
### Persistent cache store
|
||||
|
||||
If your code uses `pandas_datareader` and you want to download data
|
||||
faster, you can "hijack" `pandas_datareader.data.get_data_yahoo()`
|
||||
method to use **yfinance** while making sure the returned data is in the
|
||||
same format as **pandas\_datareader**'s `get_data_yahoo()`.
|
||||
To reduce Yahoo, yfinance store some data locally: timezones to localize dates, and cookie. Cache location is:
|
||||
|
||||
```python
|
||||
from pandas_datareader import data as pdr
|
||||
- Windows = C:/Users/\<USER\>/AppData/Local/py-yfinance
|
||||
- Linux = /home/\<USER\>/.cache/py-yfinance
|
||||
- MacOS = /Users/\<USER\>/Library/Caches/py-yfinance
|
||||
|
||||
import yfinance as yf
|
||||
yf.pdr_override() # <== that's all it takes :-)
|
||||
|
||||
# download dataframe
|
||||
data = pdr.get_data_yahoo("SPY", start="2017-01-01", end="2017-04-30")
|
||||
```
|
||||
|
||||
### Timezone cache store
|
||||
|
||||
When fetching price data, all dates are localized to stock exchange timezone.
|
||||
But timezone retrieval is relatively slow, so yfinance attemps to cache them
|
||||
in your users cache folder.
|
||||
You can direct cache to use a different location with `set_tz_cache_location()`:
|
||||
|
||||
```python
|
||||
import yfinance as yf
|
||||
yf.set_tz_cache_location("custom/cache/location")
|
||||
@@ -283,41 +254,6 @@ yf.set_tz_cache_location("custom/cache/location")
|
||||
|
||||
---
|
||||
|
||||
## Installation
|
||||
|
||||
Install `yfinance` using `pip`:
|
||||
|
||||
``` {.sourceCode .bash}
|
||||
$ pip install yfinance --upgrade --no-cache-dir
|
||||
```
|
||||
|
||||
Test new features by installing betas, provide feedback in [corresponding Discussion](https://github.com/ranaroussi/yfinance/discussions):
|
||||
``` {.sourceCode .bash}
|
||||
$ pip install yfinance --upgrade --no-cache-dir --pre
|
||||
```
|
||||
|
||||
To install `yfinance` using `conda`, see
|
||||
[this](https://anaconda.org/ranaroussi/yfinance).
|
||||
|
||||
### Requirements
|
||||
|
||||
- [Python](https://www.python.org) \>= 2.7, 3.4+
|
||||
- [Pandas](https://github.com/pydata/pandas) \>= 1.3.0
|
||||
- [Numpy](http://www.numpy.org) \>= 1.16.5
|
||||
- [requests](http://docs.python-requests.org/en/master) \>= 2.26
|
||||
- [lxml](https://pypi.org/project/lxml) \>= 4.9.1
|
||||
- [appdirs](https://pypi.org/project/appdirs) \>= 1.4.4
|
||||
- [pytz](https://pypi.org/project/pytz) \>=2022.5
|
||||
- [frozendict](https://pypi.org/project/frozendict) \>= 2.3.4
|
||||
- [beautifulsoup4](https://pypi.org/project/beautifulsoup4) \>= 4.11.1
|
||||
- [html5lib](https://pypi.org/project/html5lib) \>= 1.1
|
||||
- [cryptography](https://pypi.org/project/cryptography) \>= 3.3.2
|
||||
|
||||
#### Optional (if you want to use `pandas_datareader`)
|
||||
|
||||
- [pandas\_datareader](https://github.com/pydata/pandas-datareader)
|
||||
\>= 0.4.0
|
||||
|
||||
## Developers: want to contribute?
|
||||
|
||||
`yfinance` relies on community to investigate bugs and contribute code. Developer guide: https://github.com/ranaroussi/yfinance/discussions/1084
|
||||
@@ -336,7 +272,7 @@ intended for research and educational purposes. You should refer to Yahoo!'s ter
|
||||
([here](https://policies.yahoo.com/us/en/yahoo/terms/product-atos/apiforydn/index.htm),
|
||||
[here](https://legal.yahoo.com/us/en/yahoo/terms/otos/index.html), and
|
||||
[here](https://policies.yahoo.com/us/en/yahoo/terms/index.htm)) for
|
||||
detailes on your rights to use the actual data downloaded.
|
||||
details on your rights to use the actual data downloaded.
|
||||
|
||||
---
|
||||
|
||||
|
||||
14
meta.yaml
14
meta.yaml
@@ -1,5 +1,5 @@
|
||||
{% set name = "yfinance" %}
|
||||
{% set version = "0.2.20" %}
|
||||
{% set version = "0.2.40" %}
|
||||
|
||||
package:
|
||||
name: "{{ name|lower }}"
|
||||
@@ -18,32 +18,32 @@ requirements:
|
||||
host:
|
||||
- pandas >=1.3.0
|
||||
- numpy >=1.16.5
|
||||
- requests >=2.26
|
||||
- requests >=2.31
|
||||
- multitasking >=0.0.7
|
||||
- lxml >=4.9.1
|
||||
- appdirs >=1.4.4
|
||||
- platformdirs >=2.0.0
|
||||
- pytz >=2022.5
|
||||
- frozendict >=2.3.4
|
||||
- beautifulsoup4 >=4.11.1
|
||||
- html5lib >=1.1
|
||||
- peewee >=3.16.2
|
||||
# - pycryptodome >=3.6.6
|
||||
- cryptography >=3.3.2
|
||||
- pip
|
||||
- python
|
||||
|
||||
run:
|
||||
- pandas >=1.3.0
|
||||
- numpy >=1.16.5
|
||||
- requests >=2.26
|
||||
- requests >=2.31
|
||||
- multitasking >=0.0.7
|
||||
- lxml >=4.9.1
|
||||
- appdirs >=1.4.4
|
||||
- platformdirs >=2.0.0
|
||||
- pytz >=2022.5
|
||||
- frozendict >=2.3.4
|
||||
- beautifulsoup4 >=4.11.1
|
||||
- html5lib >=1.1
|
||||
- peewee >=3.16.2
|
||||
# - pycryptodome >=3.6.6
|
||||
- cryptography >=3.3.2
|
||||
- python
|
||||
|
||||
test:
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
pandas>=1.3.0
|
||||
numpy>=1.16.5
|
||||
requests>=2.26
|
||||
requests>=2.31
|
||||
multitasking>=0.0.7
|
||||
lxml>=4.9.1
|
||||
appdirs>=1.4.4
|
||||
platformdirs>=2.0.0
|
||||
pytz>=2022.5
|
||||
frozendict>=2.3.4
|
||||
beautifulsoup4>=4.11.1
|
||||
html5lib>=1.1
|
||||
cryptography>=3.3.2
|
||||
peewee>=3.16.2
|
||||
|
||||
15
setup.py
15
setup.py
@@ -39,7 +39,7 @@ setup(
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
# 'Development Status :: 3 - Alpha',
|
||||
'Development Status :: 4 - Beta',
|
||||
#'Development Status :: 5 - Production/Stable',
|
||||
# 'Development Status :: 5 - Production/Stable',
|
||||
|
||||
|
||||
'Operating System :: OS Independent',
|
||||
@@ -60,12 +60,15 @@ setup(
|
||||
keywords='pandas, yahoo finance, pandas datareader',
|
||||
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'examples']),
|
||||
install_requires=['pandas>=1.3.0', 'numpy>=1.16.5',
|
||||
'requests>=2.26', 'multitasking>=0.0.7',
|
||||
'lxml>=4.9.1', 'appdirs>=1.4.4', 'pytz>=2022.5',
|
||||
'frozendict>=2.3.4',
|
||||
# 'pycryptodome>=3.6.6',
|
||||
'cryptography>=3.3.2',
|
||||
'requests>=2.31', 'multitasking>=0.0.7',
|
||||
'lxml>=4.9.1', 'platformdirs>=2.0.0', 'pytz>=2022.5',
|
||||
'frozendict>=2.3.4', 'peewee>=3.16.2',
|
||||
'beautifulsoup4>=4.11.1', 'html5lib>=1.1'],
|
||||
extras_require={
|
||||
'nospam': ['requests_cache>=1.0', 'requests_ratelimiter>=0.3.1'],
|
||||
'repair': ['scipy>=1.6.3'],
|
||||
},
|
||||
# Note: Pandas.read_html() needs html5lib & beautifulsoup4
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'sample=sample:main',
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
#
|
||||
# yfinance - market data downloader
|
||||
# https://github.com/ranaroussi/yfinance
|
||||
|
||||
"""
|
||||
Sanity check for most common library uses all working
|
||||
- Stock: Microsoft
|
||||
- ETF: Russell 2000 Growth
|
||||
- Mutual fund: Vanguard 500 Index fund
|
||||
- Index: S&P500
|
||||
- Currency BTC-USD
|
||||
"""
|
||||
|
||||
import yfinance as yf
|
||||
import unittest
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
symbols = ['MSFT', 'IWO', 'VFINX', '^GSPC', 'BTC-USD']
|
||||
tickers = [yf.Ticker(symbol) for symbol in symbols]
|
||||
|
||||
|
||||
class TestTicker(unittest.TestCase):
|
||||
def test_info_history(self):
|
||||
for ticker in tickers:
|
||||
# always should have info and history for valid symbols
|
||||
assert(ticker.info is not None and ticker.info != {})
|
||||
history = ticker.history(period="max")
|
||||
assert(history.empty is False and history is not None)
|
||||
|
||||
def test_attributes(self):
|
||||
for ticker in tickers:
|
||||
ticker.isin
|
||||
ticker.major_holders
|
||||
ticker.institutional_holders
|
||||
ticker.mutualfund_holders
|
||||
ticker.dividends
|
||||
ticker.splits
|
||||
ticker.actions
|
||||
ticker.shares
|
||||
ticker.info
|
||||
ticker.calendar
|
||||
ticker.recommendations
|
||||
ticker.earnings
|
||||
ticker.quarterly_earnings
|
||||
ticker.income_stmt
|
||||
ticker.quarterly_income_stmt
|
||||
ticker.balance_sheet
|
||||
ticker.quarterly_balance_sheet
|
||||
ticker.cashflow
|
||||
ticker.quarterly_cashflow
|
||||
ticker.recommendations_summary
|
||||
ticker.analyst_price_target
|
||||
ticker.revenue_forecasts
|
||||
ticker.sustainability
|
||||
ticker.options
|
||||
ticker.news
|
||||
ticker.earnings_trend
|
||||
ticker.earnings_dates
|
||||
ticker.earnings_forecasts
|
||||
|
||||
def test_holders(self):
|
||||
for ticker in tickers:
|
||||
assert(ticker.info is not None and ticker.info != {})
|
||||
assert(ticker.major_holders is not None)
|
||||
assert(ticker.institutional_holders is not None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
@@ -1,36 +1,43 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import platformdirs as _ad
|
||||
import datetime as _dt
|
||||
import sys
|
||||
import os
|
||||
import yfinance
|
||||
from requests import Session
|
||||
from requests_cache import CacheMixin, SQLiteCache
|
||||
from requests_ratelimiter import LimiterMixin, MemoryQueueBucket
|
||||
from pyrate_limiter import Duration, RequestRate, Limiter
|
||||
|
||||
_parent_dp = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
_src_dp = _parent_dp
|
||||
sys.path.insert(0, _src_dp)
|
||||
|
||||
import yfinance
|
||||
|
||||
|
||||
# Optional: see the exact requests that are made during tests:
|
||||
# import logging
|
||||
# logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
# Use adjacent cache folder for testing, delete if already exists and older than today
|
||||
testing_cache_dirpath = os.path.join(_ad.user_cache_dir(), "py-yfinance-testing")
|
||||
yfinance.set_tz_cache_location(testing_cache_dirpath)
|
||||
if os.path.isdir(testing_cache_dirpath):
|
||||
mtime = _dt.datetime.fromtimestamp(os.path.getmtime(testing_cache_dirpath))
|
||||
if mtime.date() < _dt.date.today():
|
||||
import shutil
|
||||
shutil.rmtree(testing_cache_dirpath)
|
||||
|
||||
|
||||
# Setup a session to rate-limit and cache persistently:
|
||||
import datetime as _dt
|
||||
import os
|
||||
import appdirs as _ad
|
||||
from requests import Session
|
||||
from requests_cache import CacheMixin, SQLiteCache
|
||||
from requests_ratelimiter import LimiterMixin, MemoryQueueBucket
|
||||
class CachedLimiterSession(CacheMixin, LimiterMixin, Session):
|
||||
pass
|
||||
from pyrate_limiter import Duration, RequestRate, Limiter
|
||||
history_rate = RequestRate(1, Duration.SECOND*2)
|
||||
limiter = Limiter(history_rate)
|
||||
cache_fp = os.path.join(testing_cache_dirpath, "unittests-cache")
|
||||
session_gbl = CachedLimiterSession(
|
||||
limiter=limiter,
|
||||
bucket_class=MemoryQueueBucket,
|
||||
backend=SQLiteCache(os.path.join(_ad.user_cache_dir(), "py-yfinance", "unittests-cache"),
|
||||
expire_after=_dt.timedelta(hours=1)),
|
||||
backend=SQLiteCache(cache_fp, expire_after=_dt.timedelta(hours=1)),
|
||||
)
|
||||
# Use this instead if only want rate-limiting:
|
||||
# from requests_ratelimiter import LimiterSession
|
||||
|
||||
23
tests/data/4063-T-1d-bad-stock-split-fixed.csv
Normal file
23
tests/data/4063-T-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-14 00:00:00+09:00,4126,4130,4055,4129,4129,7459400,0,0
|
||||
2023-04-13 00:00:00+09:00,4064,4099,4026,4081,4081,5160200,0,0
|
||||
2023-04-12 00:00:00+09:00,3968,4084,3966,4064,4064,6372000,0,0
|
||||
2023-04-11 00:00:00+09:00,3990,4019,3954,3960,3960,6476500,0,0
|
||||
2023-04-10 00:00:00+09:00,3996,4009,3949,3964,3964,3485200,0,0
|
||||
2023-04-07 00:00:00+09:00,3897,3975,3892,3953,3953,4554700,0,0
|
||||
2023-04-06 00:00:00+09:00,4002,4004,3920,3942,3942,8615200,0,0
|
||||
2023-04-05 00:00:00+09:00,4150,4150,4080,4088,4088,6063700,0,0
|
||||
2023-04-04 00:00:00+09:00,4245,4245,4144,4155,4155,6780600,0,0
|
||||
2023-04-03 00:00:00+09:00,4250,4259,4162,4182,4182,7076800,0,0
|
||||
2023-03-31 00:00:00+09:00,4229,4299,4209,4275,4275,9608400,0,0
|
||||
2023-03-30 00:00:00+09:00,4257,4268,4119,4161,4161,5535200,55,5
|
||||
2023-03-29 00:00:00+09:00,4146,4211,4146,4206,4151,6514500,0,0
|
||||
2023-03-28 00:00:00+09:00,4200,4207,4124,4142,4087.837109375,4505500,0,0
|
||||
2023-03-27 00:00:00+09:00,4196,4204,4151,4192,4137.183203125,5959500,0,0
|
||||
2023-03-24 00:00:00+09:00,4130,4187,4123,4177,4122.379296875,8961500,0,0
|
||||
2023-03-23 00:00:00+09:00,4056,4106,4039,4086,4032.569140625,5480000,0,0
|
||||
2023-03-22 00:00:00+09:00,4066,4128,4057,4122,4068.0984375,8741500,0,0
|
||||
2023-03-20 00:00:00+09:00,4000,4027,3980,3980,3927.95546875,7006500,0,0
|
||||
2023-03-17 00:00:00+09:00,4018,4055,4016,4031,3978.28828125,6961500,0,0
|
||||
2023-03-16 00:00:00+09:00,3976,4045,3972,4035,3982.236328125,5019000,0,0
|
||||
2023-03-15 00:00:00+09:00,4034,4050,4003,4041,3988.1578125,6122000,0,0
|
||||
|
23
tests/data/4063-T-1d-bad-stock-split.csv
Normal file
23
tests/data/4063-T-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-14 00:00:00+09:00,4126,4130,4055,4129,4129,7459400,0,0
|
||||
2023-04-13 00:00:00+09:00,4064,4099,4026,4081,4081,5160200,0,0
|
||||
2023-04-12 00:00:00+09:00,3968,4084,3966,4064,4064,6372000,0,0
|
||||
2023-04-11 00:00:00+09:00,3990,4019,3954,3960,3960,6476500,0,0
|
||||
2023-04-10 00:00:00+09:00,3996,4009,3949,3964,3964,3485200,0,0
|
||||
2023-04-07 00:00:00+09:00,3897,3975,3892,3953,3953,4554700,0,0
|
||||
2023-04-06 00:00:00+09:00,4002,4004,3920,3942,3942,8615200,0,0
|
||||
2023-04-05 00:00:00+09:00,4150,4150,4080,4088,4088,6063700,0,0
|
||||
2023-04-04 00:00:00+09:00,4245,4245,4144,4155,4155,6780600,0,0
|
||||
2023-04-03 00:00:00+09:00,4250,4259,4162,4182,4182,7076800,0,0
|
||||
2023-03-31 00:00:00+09:00,4229,4299,4209,4275,4275,9608400,0,0
|
||||
2023-03-30 00:00:00+09:00,4257,4268,4119,4161,4161,5535200,55,5
|
||||
2023-03-29 00:00:00+09:00,4146,4211,4146,4206,4151,6514500,0,0
|
||||
2023-03-28 00:00:00+09:00,21000,21035,20620,20710,20439.185546875,901100,0,0
|
||||
2023-03-27 00:00:00+09:00,20980,21020,20755,20960,20685.916015625,1191900,0,0
|
||||
2023-03-24 00:00:00+09:00,20650,20935,20615,20885,20611.896484375,1792300,0,0
|
||||
2023-03-23 00:00:00+09:00,20280,20530,20195,20430,20162.845703125,1096000,0,0
|
||||
2023-03-22 00:00:00+09:00,20330,20640,20285,20610,20340.4921875,1748300,0,0
|
||||
2023-03-20 00:00:00+09:00,20000,20135,19900,19900,19639.77734375,1401300,0,0
|
||||
2023-03-17 00:00:00+09:00,20090,20275,20080,20155,19891.44140625,1392300,0,0
|
||||
2023-03-16 00:00:00+09:00,19880,20225,19860,20175,19911.181640625,1003800,0,0
|
||||
2023-03-15 00:00:00+09:00,20170,20250,20015,20205,19940.7890625,1224400,0,0
|
||||
|
6
tests/data/8TRA-DE-1d-missing-div-adjust-fixed.csv
Normal file
6
tests/data/8TRA-DE-1d-missing-div-adjust-fixed.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-30 00:00:00+02:00,19.5900001525879,19.7999992370605,19.2700004577637,19.3500003814697,18.6291382416581,196309,0,0
|
||||
2023-05-31 00:00:00+02:00,19.1200008392334,19.1399993896484,18.7000007629395,18.7900009155273,18.0900009155273,156652,0,0
|
||||
2023-06-02 00:00:00+02:00,18.5499992370605,19,18.5100002288818,18.8999996185303,18.8999996185303,83439,0.7,0
|
||||
2023-06-05 00:00:00+02:00,18.9300003051758,19.0900001525879,18.8400001525879,19,19,153167,0,0
|
||||
2023-06-06 00:00:00+02:00,18.9099998474121,18.9500007629395,18.5100002288818,18.6599998474121,18.6599998474121,104352,0,0
|
||||
|
6
tests/data/8TRA-DE-1d-missing-div-adjust.csv
Normal file
6
tests/data/8TRA-DE-1d-missing-div-adjust.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-30 00:00:00+02:00,19.59000015258789,19.799999237060547,19.270000457763672,19.350000381469727,19.350000381469727,196309,0.0,0.0
|
||||
2023-05-31 00:00:00+02:00,19.1200008392334,19.139999389648438,18.700000762939453,18.790000915527344,18.790000915527344,156652,0.0,0.0
|
||||
2023-06-02 00:00:00+02:00,18.549999237060547,19.0,18.510000228881836,18.899999618530273,18.899999618530273,83439,0.7,0.0
|
||||
2023-06-05 00:00:00+02:00,18.93000030517578,19.09000015258789,18.84000015258789,19.0,19.0,153167,0.0,0.0
|
||||
2023-06-06 00:00:00+02:00,18.90999984741211,18.950000762939453,18.510000228881836,18.65999984741211,18.65999984741211,104352,0.0,0.0
|
||||
|
24
tests/data/AET-L-1d-100x-error-fixed.csv
Normal file
24
tests/data/AET-L-1d-100x-error-fixed.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-06 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-06-01 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-31 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-30 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-27 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-26 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-25 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-24 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-23 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-20 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-19 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-18 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,532454,0,0
|
||||
2022-05-17 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-16 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-13 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-12 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-11 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-10 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-09 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-06 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-05 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-04 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-03 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
|
24
tests/data/AET-L-1d-100x-error.csv
Normal file
24
tests/data/AET-L-1d-100x-error.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-06 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-06-01 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-31 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-30 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-27 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-26 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-24 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-23 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-20 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-19 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-18 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,532454,0.0,0.0
|
||||
2022-05-17 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-16 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-13 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-12 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-11 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-10 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-09 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-06 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-05 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-04 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-03 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
|
37
tests/data/AET-L-1wk-100x-error-fixed.csv
Normal file
37
tests/data/AET-L-1wk-100x-error-fixed.csv
Normal file
@@ -0,0 +1,37 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-05-30 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-05-23 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-05-16 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,532454,0,0
|
||||
2022-05-09 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-05-02 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-25 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-18 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-11 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-04 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-28 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-21 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-14 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-07 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-28 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-21 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-14 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-07 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-31 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-24 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-17 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-10 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-03 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-27 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-20 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-13 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-06 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-29 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-22 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-15 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-08 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-01 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-25 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-18 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-11 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-04 00:00:00+01:00,14.8000,15.3400,14.4000,14.5500,14.5500,2171373,0,0
|
||||
2021-09-27 00:00:00+01:00,15.6000,16.0000,14.9000,15.0500,15.0500,3860549,0,0
|
||||
|
25
tests/data/AET-L-1wk-100x-error-fixed.csv.old
Normal file
25
tests/data/AET-L-1wk-100x-error-fixed.csv.old
Normal file
@@ -0,0 +1,25 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-08-15 00:00:00+01:00,27.6000,28.2000,26.2000,27.6000,27.6000,3535668,0,0
|
||||
2022-08-12 00:00:00+01:00,27.3000,29.8000,26.4030,27.0000,27.0000,7223353,0,0
|
||||
2022-08-11 00:00:00+01:00,26.0000,29.8000,24.2000,27.1000,27.1000,12887933,0,0
|
||||
2022-08-10 00:00:00+01:00,25.0000,29.2000,22.5000,25.0000,25.0000,26572680,0,0
|
||||
2022-08-09 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-08 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-05 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-04 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-03 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-02 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-01 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-29 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-28 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-27 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-26 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-25 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-22 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-21 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-20 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-19 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-18 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-15 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-14 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-13 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
37
tests/data/AET-L-1wk-100x-error.csv
Normal file
37
tests/data/AET-L-1wk-100x-error.csv
Normal file
@@ -0,0 +1,37 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-05-30 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-23 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-16 00:00:00+01:00,14.550000190734863,14.550000190734863,0.14550000429153442,0.14550000429153442,0.14550000429153442,532454,0.0,0.0
|
||||
2022-05-09 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-02 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-18 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-11 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-04 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-28 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-21 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-14 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-07 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-28 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-21 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-14 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-07 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-31 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-24 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-17 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-10 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-03 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-27 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-20 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-13 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-06 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-29 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-22 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-15 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-08 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-01 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-18 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-11 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-04 00:00:00+01:00,14.800000190734863,15.34000015258789,0.14399999380111694,0.14550000429153442,0.14550000429153442,2171373,0.0,0.0
|
||||
2021-09-27 00:00:00+01:00,15.600000381469727,16.0,14.899999618530273,15.050000190734863,15.050000190734863,3860549,0.0,0.0
|
||||
|
25
tests/data/AET-L-1wk-100x-error.csv.old
Normal file
25
tests/data/AET-L-1wk-100x-error.csv.old
Normal file
@@ -0,0 +1,25 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-08-15 00:00:00+01:00,27.600000381469727,28.200000762939453,26.200000762939453,27.600000381469727,27.600000381469727,3535668,0.0,0.0
|
||||
2022-08-12 00:00:00+01:00,27.299999237060547,29.799999237060547,26.402999877929688,27.0,27.0,7223353,0.0,0.0
|
||||
2022-08-11 00:00:00+01:00,26.0,29.799999237060547,24.200000762939453,27.100000381469727,27.100000381469727,12887933,0.0,0.0
|
||||
2022-08-10 00:00:00+01:00,25.0,29.200000762939453,22.5,25.0,25.0,26572680,0.0,0.0
|
||||
2022-08-09 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-08 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-05 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-04 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-03 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-02 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-01 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-07-29 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-28 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-27 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-26 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-22 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-21 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-20 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-19 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-18 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-15 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-14 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-13 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
30
tests/data/ALPHA-PA-1d-bad-stock-split-fixed.csv
Normal file
30
tests/data/ALPHA-PA-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-20 00:00:00+02:00,3,3,2,3,3,2076,0,0
|
||||
2023-04-21 00:00:00+02:00,3,3,2,3,3,2136,0,0
|
||||
2023-04-24 00:00:00+02:00,3,3,1,1,1,77147,0,0
|
||||
2023-04-25 00:00:00+02:00,1,2,1,2,2,9625,0,0
|
||||
2023-04-26 00:00:00+02:00,2,2,1,2,2,5028,0,0
|
||||
2023-04-27 00:00:00+02:00,2,2,1,1,1,3235,0,0
|
||||
2023-04-28 00:00:00+02:00,2,2,1,2,2,10944,0,0
|
||||
2023-05-02 00:00:00+02:00,2,2,2,2,2,12220,0,0
|
||||
2023-05-03 00:00:00+02:00,2,2,2,2,2,4683,0,0
|
||||
2023-05-04 00:00:00+02:00,2,2,1,2,2,3368,0,0
|
||||
2023-05-05 00:00:00+02:00,2,2,1,2,2,26069,0,0
|
||||
2023-05-08 00:00:00+02:00,1,2,1,1,1,70540,0,0
|
||||
2023-05-09 00:00:00+02:00,1,2,1,1,1,14228,0,0
|
||||
2023-05-10 00:00:00+02:00,1.08000004291534,1.39999997615814,0.879999995231628,1,1,81012,0,0.0001
|
||||
2023-05-11 00:00:00+02:00,1.03999996185303,1.03999996185303,0.850000023841858,1,1,40254,0,0
|
||||
2023-05-12 00:00:00+02:00,0.949999988079071,1.10000002384186,0.949999988079071,1.01999998092651,1.01999998092651,35026,0,0
|
||||
2023-05-15 00:00:00+02:00,0.949999988079071,1.01999998092651,0.860000014305115,0.939999997615814,0.939999997615814,41486,0,0
|
||||
2023-05-16 00:00:00+02:00,0.899999976158142,0.944000005722046,0.800000011920929,0.800000011920929,0.800000011920929,43583,0,0
|
||||
2023-05-17 00:00:00+02:00,0.850000023841858,0.850000023841858,0.779999971389771,0.810000002384186,0.810000002384186,29984,0,0
|
||||
2023-05-18 00:00:00+02:00,0.779999971389771,0.78600001335144,0.740000009536743,0.740000009536743,0.740000009536743,24679,0,0
|
||||
2023-05-19 00:00:00+02:00,0.78600001335144,0.78600001335144,0.649999976158142,0.65200001001358,0.65200001001358,26732,0,0
|
||||
2023-05-22 00:00:00+02:00,0.8299999833107,1.05999994277954,0.709999978542328,0.709999978542328,0.709999978542328,169538,0,0
|
||||
2023-05-23 00:00:00+02:00,0.899999976158142,1.60800004005432,0.860000014305115,1.22000002861023,1.22000002861023,858471,0,0
|
||||
2023-05-24 00:00:00+02:00,1.19400000572205,1.25999999046326,0.779999971389771,0.779999971389771,0.779999971389771,627823,0,0
|
||||
2023-05-25 00:00:00+02:00,0.980000019073486,1.22000002861023,0.702000021934509,0.732999980449677,0.732999980449677,1068939,0,0
|
||||
2023-05-26 00:00:00+02:00,0.660000026226044,0.72000002861023,0.602999985218048,0.611999988555908,0.611999988555908,631580,0,0
|
||||
2023-05-29 00:00:00+02:00,0.620000004768372,0.75,0.578999996185303,0.600000023841858,0.600000023841858,586150,0,0
|
||||
2023-05-30 00:00:00+02:00,0.610000014305115,0.634999990463257,0.497000008821487,0.497000008821487,0.497000008821487,552308,0,0
|
||||
2023-05-31 00:00:00+02:00,0.458999991416931,0.469999998807907,0.374000012874603,0.379999995231628,0.379999995231628,899067,0,0
|
||||
|
30
tests/data/ALPHA-PA-1d-bad-stock-split.csv
Normal file
30
tests/data/ALPHA-PA-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-20 00:00:00+02:00,3.0,3.0,2.0,3.0,3.0,2076,0.0,0.0
|
||||
2023-04-21 00:00:00+02:00,3.0,3.0,2.0,3.0,3.0,2136,0.0,0.0
|
||||
2023-04-24 00:00:00+02:00,3.0,3.0,1.0,1.0,1.0,77147,0.0,0.0
|
||||
2023-04-25 00:00:00+02:00,1.0,2.0,1.0,2.0,2.0,9625,0.0,0.0
|
||||
2023-04-26 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,5028,0.0,0.0
|
||||
2023-04-27 00:00:00+02:00,2.0,2.0,1.0,1.0,1.0,3235,0.0,0.0
|
||||
2023-04-28 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,10944,0.0,0.0
|
||||
2023-05-02 00:00:00+02:00,2.0,2.0,2.0,2.0,2.0,12220,0.0,0.0
|
||||
2023-05-03 00:00:00+02:00,2.0,2.0,2.0,2.0,2.0,4683,0.0,0.0
|
||||
2023-05-04 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,3368,0.0,0.0
|
||||
2023-05-05 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,26069,0.0,0.0
|
||||
2023-05-08 00:00:00+02:00,9.999999747378752e-05,0.00019999999494757503,9.999999747378752e-05,9.999999747378752e-05,9.999999747378752e-05,705399568,0.0,0.0
|
||||
2023-05-09 00:00:00+02:00,1.0,2.0,1.0,1.0,1.0,14228,0.0,0.0
|
||||
2023-05-10 00:00:00+02:00,1.0800000429153442,1.399999976158142,0.8799999952316284,1.0,1.0,81012,0.0,0.0001
|
||||
2023-05-11 00:00:00+02:00,1.0399999618530273,1.0399999618530273,0.8500000238418579,1.0,1.0,40254,0.0,0.0
|
||||
2023-05-12 00:00:00+02:00,0.949999988079071,1.100000023841858,0.949999988079071,1.0199999809265137,1.0199999809265137,35026,0.0,0.0
|
||||
2023-05-15 00:00:00+02:00,0.949999988079071,1.0199999809265137,0.8600000143051147,0.9399999976158142,0.9399999976158142,41486,0.0,0.0
|
||||
2023-05-16 00:00:00+02:00,0.8999999761581421,0.9440000057220459,0.800000011920929,0.800000011920929,0.800000011920929,43583,0.0,0.0
|
||||
2023-05-17 00:00:00+02:00,0.8500000238418579,0.8500000238418579,0.7799999713897705,0.8100000023841858,0.8100000023841858,29984,0.0,0.0
|
||||
2023-05-18 00:00:00+02:00,0.7799999713897705,0.7860000133514404,0.7400000095367432,0.7400000095367432,0.7400000095367432,24679,0.0,0.0
|
||||
2023-05-19 00:00:00+02:00,0.7860000133514404,0.7860000133514404,0.6499999761581421,0.6520000100135803,0.6520000100135803,26732,0.0,0.0
|
||||
2023-05-22 00:00:00+02:00,0.8299999833106995,1.059999942779541,0.7099999785423279,0.7099999785423279,0.7099999785423279,169538,0.0,0.0
|
||||
2023-05-23 00:00:00+02:00,0.8999999761581421,1.6080000400543213,0.8600000143051147,1.2200000286102295,1.2200000286102295,858471,0.0,0.0
|
||||
2023-05-24 00:00:00+02:00,1.194000005722046,1.2599999904632568,0.7799999713897705,0.7799999713897705,0.7799999713897705,627823,0.0,0.0
|
||||
2023-05-25 00:00:00+02:00,0.9800000190734863,1.2200000286102295,0.7020000219345093,0.7329999804496765,0.7329999804496765,1068939,0.0,0.0
|
||||
2023-05-26 00:00:00+02:00,0.6600000262260437,0.7200000286102295,0.6029999852180481,0.6119999885559082,0.6119999885559082,631580,0.0,0.0
|
||||
2023-05-29 00:00:00+02:00,0.6200000047683716,0.75,0.5789999961853027,0.6000000238418579,0.6000000238418579,586150,0.0,0.0
|
||||
2023-05-30 00:00:00+02:00,0.6100000143051147,0.6349999904632568,0.4970000088214874,0.4970000088214874,0.4970000088214874,552308,0.0,0.0
|
||||
2023-05-31 00:00:00+02:00,0.45899999141693115,0.4699999988079071,0.37400001287460327,0.3799999952316284,0.3799999952316284,899067,0.0,0.0
|
||||
|
85
tests/data/AV-L-1wk-bad-stock-split-fixed.csv
Normal file
85
tests/data/AV-L-1wk-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,85 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2021-12-13 00:00:00+00:00,518.421020507813,535,515,530.131469726563,383.200378417969,47663221,0,0
|
||||
2021-12-20 00:00:00+00:00,518.421020507813,542.368408203125,516.451293945313,539.342102050781,389.858215332031,35413455,0,0
|
||||
2021-12-27 00:00:00+00:00,539.342102050781,548.093383789063,537.351318359375,540,390.333740234375,8222047,0,0
|
||||
2022-01-03 00:00:00+00:00,540,568.684204101563,540,568.552612304688,410.972717285156,33764769,0,0
|
||||
2022-01-10 00:00:00+00:00,567.5,577.894714355469,564.605224609375,573.815673828125,414.777130126953,22109464,0,0
|
||||
2022-01-17 00:00:00+00:00,576.315673828125,585.789428710938,561.842102050781,568.421020507813,410.877655029297,33278387,0,0
|
||||
2022-01-24 00:00:00+00:00,565.921020507813,580.263122558594,553.947326660156,570.394714355469,412.304260253906,44450345,0,0
|
||||
2022-01-31 00:00:00+00:00,573.947326660156,582.960510253906,568.552612304688,572.631469726563,413.921112060547,32935213,0,0
|
||||
2022-02-07 00:00:00+00:00,576.184204101563,590.526306152344,573.752624511719,584.73681640625,422.671325683594,30129487,0,0
|
||||
2022-02-14 00:00:00+00:00,575.921020507813,581.578918457031,561.447326660156,568.684204101563,411.067840576172,37979247,0,0
|
||||
2022-02-21 00:00:00+00:00,573.026306152344,576.943420410156,537.23681640625,557.105224609375,402.698120117188,49946893,0,0
|
||||
2022-02-28 00:00:00+00:00,546.184204101563,563.157775878906,508.157806396484,508.947296142578,367.887603759766,71484032,4.1875,0
|
||||
2022-03-07 00:00:00+00:00,493.420989990234,549.078918457031,475.134185791016,538.947326660156,392.617431640625,54164517,0,0
|
||||
2022-03-14 00:00:00+00:00,543.552612304688,561.447326660156,538.026306152344,555.789428710938,404.886749267578,42128265,0,0
|
||||
2022-03-21 00:00:00+00:00,556.184204101563,582.5,555.789428710938,576.052612304688,419.648284912109,30321228,0,0
|
||||
2022-03-28 00:00:00+01:00,582.23681640625,606.578918457031,579.076293945313,585,426.166320800781,42874272,0,0
|
||||
2022-04-04 00:00:00+01:00,578.552612304688,586.052612304688,555.263122558594,560.263122558594,408.145812988281,37556036,19.342106,0
|
||||
2022-04-11 00:00:00+01:00,559.73681640625,573.157775878906,553.684204101563,571.447326660156,430.541351318359,22705062,0,0
|
||||
2022-04-18 00:00:00+01:00,571.447326660156,589.210510253906,570.526306152344,576.052612304688,434.011077880859,37459087,0,0
|
||||
2022-04-25 00:00:00+01:00,566.710510253906,577.368408203125,557.894714355469,570.131469726563,429.549957275391,33939150,0,0
|
||||
2022-05-02 00:00:00+01:00,570.131469726563,593.421020507813,545.394714355469,545.921020507813,411.309234619141,22449007,0,0
|
||||
2022-05-09 00:00:00+01:00,543.684204101563,549.276306152344,484.582794189453,537.105224609375,404.667236328125,56232105,0,0
|
||||
2022-05-16 00:00:00+01:00,505.263157894737,557.368429083573,505.263157894737,542.236850136205,408.533517937911,62273078.36,101.69,0.76
|
||||
2022-05-23 00:00:00+01:00,416.100006103516,442.399993896484,341.915008544922,440.899993896484,409.764678955078,45432941,0,0
|
||||
2022-05-30 00:00:00+01:00,442.700012207031,444.200012207031,426.600006103516,428.700012207031,398.426239013672,37906659,0,0
|
||||
2022-06-06 00:00:00+01:00,425.299987792969,434.010009765625,405.200012207031,405.399993896484,376.771606445313,40648810,0,0
|
||||
2022-06-13 00:00:00+01:00,402.5,420,399.799987792969,411.200012207031,382.162048339844,74196958,0,0
|
||||
2022-06-20 00:00:00+01:00,412.5,421.899993896484,398.399993896484,411.5,382.440826416016,28679717,0,0
|
||||
2022-06-27 00:00:00+01:00,413.100006103516,422.399993896484,397.399993896484,401.600006103516,373.239959716797,35468994,0,0
|
||||
2022-07-04 00:00:00+01:00,405.399993896484,406.600006103516,382.299987792969,401.299987792969,372.961120605469,35304748,0,0
|
||||
2022-07-11 00:00:00+01:00,394.799987792969,405.850006103516,383.399993896484,396.600006103516,368.593048095703,42308459,0,0
|
||||
2022-07-18 00:00:00+01:00,392.5,399.700012207031,384.799987792969,391.700012207031,364.039093017578,36656839,0,0
|
||||
2022-07-25 00:00:00+01:00,392.200012207031,400.799987792969,388.700012207031,396,368.035430908203,33124660,0,0
|
||||
2022-08-01 00:00:00+01:00,396.399993896484,405.5,390.415008544922,402,373.611724853516,21753121,0,0
|
||||
2022-08-08 00:00:00+01:00,406.600006103516,473.700012207031,403.299987792969,467.899993896484,434.858032226563,59155709,0,0
|
||||
2022-08-15 00:00:00+01:00,468.100006103516,470.5,434,437,406.140106201172,36989620,10.3,0
|
||||
2022-08-22 00:00:00+01:00,436.100006103516,436.869995117188,419.299987792969,420.5,399.780303955078,36492572,0,0
|
||||
2022-08-29 00:00:00+01:00,420.5,426.600006103516,408.600006103516,426.600006103516,405.579742431641,29573657,0,0
|
||||
2022-09-05 00:00:00+01:00,418.5,444.4169921875,416.100006103516,443.100006103516,421.266723632813,34375126,0,0
|
||||
2022-09-12 00:00:00+01:00,444.649993896484,448.899993896484,435.200012207031,440.100006103516,418.414520263672,39085960,0,0
|
||||
2022-09-19 00:00:00+01:00,440.100006103516,447.200012207031,419.299987792969,422.899993896484,402.062042236328,27982081,0,0
|
||||
2022-09-26 00:00:00+01:00,421.200012207031,421.200012207031,373.31201171875,388.200012207031,369.071868896484,70408935,0,0
|
||||
2022-10-03 00:00:00+01:00,382.899993896484,409.875,380.555999755859,400.700012207031,380.955932617188,37581751,0,0
|
||||
2022-10-10 00:00:00+01:00,395.799987792969,404.470001220703,366.700012207031,394.299987792969,374.871276855469,52952323,0,0
|
||||
2022-10-17 00:00:00+01:00,394.299987792969,414.799987792969,393,406.5,386.470123291016,26441475,0,0
|
||||
2022-10-24 00:00:00+01:00,407.100006103516,418.227996826172,407.100006103516,413.299987792969,392.93505859375,26239756,0,0
|
||||
2022-10-31 00:00:00+00:00,413.899993896484,430.200012207031,412,429.299987792969,408.146667480469,23168047,0,0
|
||||
2022-11-07 00:00:00+00:00,427.299987792969,445.899993896484,420.652008056641,438.399993896484,416.798278808594,36709117,0,0
|
||||
2022-11-14 00:00:00+00:00,438.299987792969,458.489990234375,435,455.100006103516,432.675415039063,29106506,0,0
|
||||
2022-11-21 00:00:00+00:00,454.399993896484,461,450,456.600006103516,434.101501464844,21667730,0,0
|
||||
2022-11-28 00:00:00+00:00,453.799987792969,456.899993896484,435.100006103516,444.799987792969,422.882934570313,33326204,0,0
|
||||
2022-12-05 00:00:00+00:00,442.899993896484,450.25,441.299987792969,448,425.925262451172,29147089,0,0
|
||||
2022-12-12 00:00:00+00:00,445.100006103516,451.299987792969,431.200012207031,436.100006103516,414.611633300781,46593233,0,0
|
||||
2022-12-19 00:00:00+00:00,436,452.600006103516,433.600006103516,444,422.122344970703,20982140,0,0
|
||||
2022-12-26 00:00:00+00:00,444,452.058013916016,442.399993896484,442.799987792969,420.981475830078,8249664,0,0
|
||||
2023-01-02 00:00:00+00:00,445.899993896484,458.149993896484,443.299987792969,456,433.531066894531,28687622,0,0
|
||||
2023-01-09 00:00:00+00:00,456,461.066009521484,435.799987792969,444.200012207031,422.3125,39237336,0,0
|
||||
2023-01-16 00:00:00+00:00,444.299987792969,447.200012207031,434.399993896484,439,417.368713378906,35267336,0,0
|
||||
2023-01-23 00:00:00+00:00,440,459.299987792969,439.5,457.399993896484,434.862091064453,37495012,0,0
|
||||
2023-01-30 00:00:00+00:00,454.399993896484,459.399993896484,447.799987792969,450.299987792969,428.111907958984,48879358,0,0
|
||||
2023-02-06 00:00:00+00:00,448,449.200012207031,436.299987792969,440,418.319458007813,38799772,0,0
|
||||
2023-02-13 00:00:00+00:00,441.200012207031,450.299987792969,440,447.600006103516,425.544982910156,30251441,0,0
|
||||
2023-02-20 00:00:00+00:00,448.5,450.799987792969,434.299987792969,440,418.319458007813,26764528,0,0
|
||||
2023-02-27 00:00:00+00:00,442.899993896484,450.5,441.608001708984,447.200012207031,425.164703369141,29895454,0,0
|
||||
2023-03-06 00:00:00+00:00,447.399993896484,467.299987792969,443.100006103516,449.700012207031,427.54150390625,82322819,0,0
|
||||
2023-03-13 00:00:00+00:00,450,451.417999267578,400.68701171875,402.200012207031,382.382019042969,85158023,0,0
|
||||
2023-03-20 00:00:00+00:00,396.200012207031,425.399993896484,383.496002197266,408.299987792969,388.181427001953,60152666,0,0
|
||||
2023-03-27 00:00:00+01:00,416,422.049987792969,399.549987792969,404.200012207031,384.283477783203,81534829,20.7,0
|
||||
2023-04-03 00:00:00+01:00,405,434.100006103516,404.399993896484,417.100006103516,417.100006103516,43217151,0,0
|
||||
2023-04-10 00:00:00+01:00,419.100006103516,426.700012207031,419.100006103516,421.700012207031,421.700012207031,32435695,0,0
|
||||
2023-04-17 00:00:00+01:00,423.700012207031,427.635009765625,415.399993896484,420.299987792969,420.299987792969,37715986,0,0
|
||||
2023-04-24 00:00:00+01:00,418.100006103516,423,415.299987792969,423,423,34331974,0,0
|
||||
2023-05-01 00:00:00+01:00,423.399993896484,426.100006103516,406.399993896484,414.600006103516,414.600006103516,40446519,0,0
|
||||
2023-05-08 00:00:00+01:00,414.600006103516,419.100006103516,408,412.700012207031,412.700012207031,36950836,0,0
|
||||
2023-05-15 00:00:00+01:00,414,418.399993896484,407.399993896484,413.5,413.5,53109487,0,0
|
||||
2023-05-22 00:00:00+01:00,413.600006103516,424,394.700012207031,401.299987792969,401.299987792969,64363368,0,0
|
||||
2023-05-29 00:00:00+01:00,401.299987792969,409.477996826172,392.700012207031,409.100006103516,409.100006103516,47587959,0,0
|
||||
2023-06-05 00:00:00+01:00,406.299987792969,410.700012207031,400.100006103516,400.899993896484,400.899993896484,22494985,0,0
|
||||
2023-06-12 00:00:00+01:00,404.100006103516,406,394.5,396,396,41531163,0,0
|
||||
2023-06-19 00:00:00+01:00,394,399.899993896484,380.720001220703,386.200012207031,386.200012207031,40439880,0,0
|
||||
2023-06-26 00:00:00+01:00,387.200012207031,397,382.899993896484,395.200012207031,395.200012207031,27701915,0,0
|
||||
2023-07-03 00:00:00+01:00,396.5,399.799987792969,380.100006103516,381.799987792969,381.799987792969,26005305,0,0
|
||||
2023-07-10 00:00:00+01:00,380,392.299987792969,379.403991699219,386,386,29789300,0,0
|
||||
2023-07-17 00:00:00+01:00,385,389.5,384.251007080078,387.100006103516,387.100006103516,0,0,0
|
||||
|
85
tests/data/AV-L-1wk-bad-stock-split.csv
Normal file
85
tests/data/AV-L-1wk-bad-stock-split.csv
Normal file
@@ -0,0 +1,85 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2021-12-13 00:00:00+00:00,518.4210205078125,535.0,515.0,530.1314697265625,383.20037841796875,47663221,0.0,0.0
|
||||
2021-12-20 00:00:00+00:00,518.4210205078125,542.368408203125,516.4512939453125,539.3421020507812,389.85821533203125,35413455,0.0,0.0
|
||||
2021-12-27 00:00:00+00:00,539.3421020507812,548.0933837890625,537.351318359375,540.0,390.333740234375,8222047,0.0,0.0
|
||||
2022-01-03 00:00:00+00:00,540.0,568.6842041015625,540.0,568.5526123046875,410.97271728515625,33764769,0.0,0.0
|
||||
2022-01-10 00:00:00+00:00,567.5,577.8947143554688,564.605224609375,573.815673828125,414.7771301269531,22109464,0.0,0.0
|
||||
2022-01-17 00:00:00+00:00,576.315673828125,585.7894287109375,561.8421020507812,568.4210205078125,410.8776550292969,33278387,0.0,0.0
|
||||
2022-01-24 00:00:00+00:00,565.9210205078125,580.2631225585938,553.9473266601562,570.3947143554688,412.30426025390625,44450345,0.0,0.0
|
||||
2022-01-31 00:00:00+00:00,573.9473266601562,582.9605102539062,568.5526123046875,572.6314697265625,413.9211120605469,32935213,0.0,0.0
|
||||
2022-02-07 00:00:00+00:00,576.1842041015625,590.5263061523438,573.7526245117188,584.73681640625,422.67132568359375,30129487,0.0,0.0
|
||||
2022-02-14 00:00:00+00:00,575.9210205078125,581.5789184570312,561.4473266601562,568.6842041015625,411.0678405761719,37979247,0.0,0.0
|
||||
2022-02-21 00:00:00+00:00,573.0263061523438,576.9434204101562,537.23681640625,557.105224609375,402.6981201171875,49946893,0.0,0.0
|
||||
2022-02-28 00:00:00+00:00,546.1842041015625,563.1577758789062,508.1578063964844,508.9472961425781,367.8876037597656,71484032,4.1875,0.0
|
||||
2022-03-07 00:00:00+00:00,493.4209899902344,549.0789184570312,475.1341857910156,538.9473266601562,392.617431640625,54164517,0.0,0.0
|
||||
2022-03-14 00:00:00+00:00,543.5526123046875,561.4473266601562,538.0263061523438,555.7894287109375,404.8867492675781,42128265,0.0,0.0
|
||||
2022-03-21 00:00:00+00:00,556.1842041015625,582.5,555.7894287109375,576.0526123046875,419.6482849121094,30321228,0.0,0.0
|
||||
2022-03-28 00:00:00+01:00,582.23681640625,606.5789184570312,579.0762939453125,585.0,426.16632080078125,42874272,0.0,0.0
|
||||
2022-04-04 00:00:00+01:00,578.5526123046875,586.0526123046875,555.2631225585938,560.2631225585938,408.14581298828125,37556036,19.342106,0.0
|
||||
2022-04-11 00:00:00+01:00,559.73681640625,573.1577758789062,553.6842041015625,571.4473266601562,430.5413513183594,22705062,0.0,0.0
|
||||
2022-04-18 00:00:00+01:00,571.4473266601562,589.2105102539062,570.5263061523438,576.0526123046875,434.0110778808594,37459087,0.0,0.0
|
||||
2022-04-25 00:00:00+01:00,566.7105102539062,577.368408203125,557.8947143554688,570.1314697265625,429.5499572753906,33939150,0.0,0.0
|
||||
2022-05-02 00:00:00+01:00,570.1314697265625,593.4210205078125,545.3947143554688,545.9210205078125,411.3092346191406,22449007,0.0,0.0
|
||||
2022-05-09 00:00:00+01:00,543.6842041015625,549.2763061523438,484.5827941894531,537.105224609375,404.667236328125,56232105,0.0,0.0
|
||||
2022-05-16 00:00:00+01:00,384.0,423.6000061035156,384.0,412.1000061035156,310.4854736328125,81938261,101.69,0.76
|
||||
2022-05-23 00:00:00+01:00,416.1000061035156,442.3999938964844,341.9150085449219,440.8999938964844,409.7646789550781,45432941,0.0,0.0
|
||||
2022-05-30 00:00:00+01:00,442.70001220703125,444.20001220703125,426.6000061035156,428.70001220703125,398.4262390136719,37906659,0.0,0.0
|
||||
2022-06-06 00:00:00+01:00,425.29998779296875,434.010009765625,405.20001220703125,405.3999938964844,376.7716064453125,40648810,0.0,0.0
|
||||
2022-06-13 00:00:00+01:00,402.5,420.0,399.79998779296875,411.20001220703125,382.16204833984375,74196958,0.0,0.0
|
||||
2022-06-20 00:00:00+01:00,412.5,421.8999938964844,398.3999938964844,411.5,382.4408264160156,28679717,0.0,0.0
|
||||
2022-06-27 00:00:00+01:00,413.1000061035156,422.3999938964844,397.3999938964844,401.6000061035156,373.2399597167969,35468994,0.0,0.0
|
||||
2022-07-04 00:00:00+01:00,405.3999938964844,406.6000061035156,382.29998779296875,401.29998779296875,372.96112060546875,35304748,0.0,0.0
|
||||
2022-07-11 00:00:00+01:00,394.79998779296875,405.8500061035156,383.3999938964844,396.6000061035156,368.5930480957031,42308459,0.0,0.0
|
||||
2022-07-18 00:00:00+01:00,392.5,399.70001220703125,384.79998779296875,391.70001220703125,364.0390930175781,36656839,0.0,0.0
|
||||
2022-07-25 00:00:00+01:00,392.20001220703125,400.79998779296875,388.70001220703125,396.0,368.0354309082031,33124660,0.0,0.0
|
||||
2022-08-01 00:00:00+01:00,396.3999938964844,405.5,390.4150085449219,402.0,373.6117248535156,21753121,0.0,0.0
|
||||
2022-08-08 00:00:00+01:00,406.6000061035156,473.70001220703125,403.29998779296875,467.8999938964844,434.8580322265625,59155709,0.0,0.0
|
||||
2022-08-15 00:00:00+01:00,468.1000061035156,470.5,434.0,437.0,406.1401062011719,36989620,10.3,0.0
|
||||
2022-08-22 00:00:00+01:00,436.1000061035156,436.8699951171875,419.29998779296875,420.5,399.7803039550781,36492572,0.0,0.0
|
||||
2022-08-29 00:00:00+01:00,420.5,426.6000061035156,408.6000061035156,426.6000061035156,405.5797424316406,29573657,0.0,0.0
|
||||
2022-09-05 00:00:00+01:00,418.5,444.4169921875,416.1000061035156,443.1000061035156,421.2667236328125,34375126,0.0,0.0
|
||||
2022-09-12 00:00:00+01:00,444.6499938964844,448.8999938964844,435.20001220703125,440.1000061035156,418.4145202636719,39085960,0.0,0.0
|
||||
2022-09-19 00:00:00+01:00,440.1000061035156,447.20001220703125,419.29998779296875,422.8999938964844,402.0620422363281,27982081,0.0,0.0
|
||||
2022-09-26 00:00:00+01:00,421.20001220703125,421.20001220703125,373.31201171875,388.20001220703125,369.0718688964844,70408935,0.0,0.0
|
||||
2022-10-03 00:00:00+01:00,382.8999938964844,409.875,380.5559997558594,400.70001220703125,380.9559326171875,37581751,0.0,0.0
|
||||
2022-10-10 00:00:00+01:00,395.79998779296875,404.4700012207031,366.70001220703125,394.29998779296875,374.87127685546875,52952323,0.0,0.0
|
||||
2022-10-17 00:00:00+01:00,394.29998779296875,414.79998779296875,393.0,406.5,386.4701232910156,26441475,0.0,0.0
|
||||
2022-10-24 00:00:00+01:00,407.1000061035156,418.2279968261719,407.1000061035156,413.29998779296875,392.93505859375,26239756,0.0,0.0
|
||||
2022-10-31 00:00:00+00:00,413.8999938964844,430.20001220703125,412.0,429.29998779296875,408.14666748046875,23168047,0.0,0.0
|
||||
2022-11-07 00:00:00+00:00,427.29998779296875,445.8999938964844,420.6520080566406,438.3999938964844,416.79827880859375,36709117,0.0,0.0
|
||||
2022-11-14 00:00:00+00:00,438.29998779296875,458.489990234375,435.0,455.1000061035156,432.6754150390625,29106506,0.0,0.0
|
||||
2022-11-21 00:00:00+00:00,454.3999938964844,461.0,450.0,456.6000061035156,434.10150146484375,21667730,0.0,0.0
|
||||
2022-11-28 00:00:00+00:00,453.79998779296875,456.8999938964844,435.1000061035156,444.79998779296875,422.8829345703125,33326204,0.0,0.0
|
||||
2022-12-05 00:00:00+00:00,442.8999938964844,450.25,441.29998779296875,448.0,425.9252624511719,29147089,0.0,0.0
|
||||
2022-12-12 00:00:00+00:00,445.1000061035156,451.29998779296875,431.20001220703125,436.1000061035156,414.61163330078125,46593233,0.0,0.0
|
||||
2022-12-19 00:00:00+00:00,436.0,452.6000061035156,433.6000061035156,444.0,422.1223449707031,20982140,0.0,0.0
|
||||
2022-12-26 00:00:00+00:00,444.0,452.0580139160156,442.3999938964844,442.79998779296875,420.9814758300781,8249664,0.0,0.0
|
||||
2023-01-02 00:00:00+00:00,445.8999938964844,458.1499938964844,443.29998779296875,456.0,433.53106689453125,28687622,0.0,0.0
|
||||
2023-01-09 00:00:00+00:00,456.0,461.0660095214844,435.79998779296875,444.20001220703125,422.3125,39237336,0.0,0.0
|
||||
2023-01-16 00:00:00+00:00,444.29998779296875,447.20001220703125,434.3999938964844,439.0,417.36871337890625,35267336,0.0,0.0
|
||||
2023-01-23 00:00:00+00:00,440.0,459.29998779296875,439.5,457.3999938964844,434.8620910644531,37495012,0.0,0.0
|
||||
2023-01-30 00:00:00+00:00,454.3999938964844,459.3999938964844,447.79998779296875,450.29998779296875,428.1119079589844,48879358,0.0,0.0
|
||||
2023-02-06 00:00:00+00:00,448.0,449.20001220703125,436.29998779296875,440.0,418.3194580078125,38799772,0.0,0.0
|
||||
2023-02-13 00:00:00+00:00,441.20001220703125,450.29998779296875,440.0,447.6000061035156,425.54498291015625,30251441,0.0,0.0
|
||||
2023-02-20 00:00:00+00:00,448.5,450.79998779296875,434.29998779296875,440.0,418.3194580078125,26764528,0.0,0.0
|
||||
2023-02-27 00:00:00+00:00,442.8999938964844,450.5,441.6080017089844,447.20001220703125,425.1647033691406,29895454,0.0,0.0
|
||||
2023-03-06 00:00:00+00:00,447.3999938964844,467.29998779296875,443.1000061035156,449.70001220703125,427.54150390625,82322819,0.0,0.0
|
||||
2023-03-13 00:00:00+00:00,450.0,451.4179992675781,400.68701171875,402.20001220703125,382.38201904296875,85158023,0.0,0.0
|
||||
2023-03-20 00:00:00+00:00,396.20001220703125,425.3999938964844,383.4960021972656,408.29998779296875,388.1814270019531,60152666,0.0,0.0
|
||||
2023-03-27 00:00:00+01:00,416.0,422.04998779296875,399.54998779296875,404.20001220703125,384.2834777832031,81534829,20.7,0.0
|
||||
2023-04-03 00:00:00+01:00,405.0,434.1000061035156,404.3999938964844,417.1000061035156,417.1000061035156,43217151,0.0,0.0
|
||||
2023-04-10 00:00:00+01:00,419.1000061035156,426.70001220703125,419.1000061035156,421.70001220703125,421.70001220703125,32435695,0.0,0.0
|
||||
2023-04-17 00:00:00+01:00,423.70001220703125,427.635009765625,415.3999938964844,420.29998779296875,420.29998779296875,37715986,0.0,0.0
|
||||
2023-04-24 00:00:00+01:00,418.1000061035156,423.0,415.29998779296875,423.0,423.0,34331974,0.0,0.0
|
||||
2023-05-01 00:00:00+01:00,423.3999938964844,426.1000061035156,406.3999938964844,414.6000061035156,414.6000061035156,40446519,0.0,0.0
|
||||
2023-05-08 00:00:00+01:00,414.6000061035156,419.1000061035156,408.0,412.70001220703125,412.70001220703125,36950836,0.0,0.0
|
||||
2023-05-15 00:00:00+01:00,414.0,418.3999938964844,407.3999938964844,413.5,413.5,53109487,0.0,0.0
|
||||
2023-05-22 00:00:00+01:00,413.6000061035156,424.0,394.70001220703125,401.29998779296875,401.29998779296875,64363368,0.0,0.0
|
||||
2023-05-29 00:00:00+01:00,401.29998779296875,409.4779968261719,392.70001220703125,409.1000061035156,409.1000061035156,47587959,0.0,0.0
|
||||
2023-06-05 00:00:00+01:00,406.29998779296875,410.70001220703125,400.1000061035156,400.8999938964844,400.8999938964844,22494985,0.0,0.0
|
||||
2023-06-12 00:00:00+01:00,404.1000061035156,406.0,394.5,396.0,396.0,41531163,0.0,0.0
|
||||
2023-06-19 00:00:00+01:00,394.0,399.8999938964844,380.7200012207031,386.20001220703125,386.20001220703125,40439880,0.0,0.0
|
||||
2023-06-26 00:00:00+01:00,387.20001220703125,397.0,382.8999938964844,395.20001220703125,395.20001220703125,27701915,0.0,0.0
|
||||
2023-07-03 00:00:00+01:00,396.5,399.79998779296875,380.1000061035156,381.79998779296875,381.79998779296875,26005305,0.0,0.0
|
||||
2023-07-10 00:00:00+01:00,380.0,392.29998779296875,379.40399169921875,386.0,386.0,29789300,0.0,0.0
|
||||
2023-07-17 00:00:00+01:00,385.0,389.5,384.2510070800781,387.1000061035156,387.1000061035156,0,0.0,0.0
|
||||
|
11
tests/data/CNE-L-1d-bad-stock-split-fixed.csv
Normal file
11
tests/data/CNE-L-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,11 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-18 00:00:00+01:00,193.220001220703,200.839996337891,193.220001220703,196.839996337891,196.839996337891,653125,0,0
|
||||
2023-05-17 00:00:00+01:00,199.740005493164,207.738006591797,190.121994018555,197.860000610352,197.860000610352,822268,0,0
|
||||
2023-05-16 00:00:00+01:00,215.600006103516,215.600006103516,201.149993896484,205.100006103516,205.100006103516,451009,243.93939,0.471428571428571
|
||||
2023-05-15 00:00:00+01:00,456.9090,464.9696,446.7272,461.1515,217.2121,830506.0000,0,0
|
||||
2023-05-12 00:00:00+01:00,455.2121,458.6060,444.6060,448.4242,211.2173,717655.0000,0,0
|
||||
2023-05-11 00:00:00+01:00,466.6666,466.6666,450.1212,456.0606,214.8142,1682077.0000,0,0
|
||||
2023-05-10 00:00:00+01:00,462.8484,473.0303,450.9696,456.9090,215.2138,2639957.0000,0,0
|
||||
2023-05-09 00:00:00+01:00,475.1515,482.9746,462.8485,463.2727,218.2112,898585.2857,0,0
|
||||
2023-05-05 00:00:00+01:00,468.7878,477.6969,468.3636,476.0000,224.2061,454704.0000,0,0
|
||||
2023-05-04 00:00:00+01:00,460.3030,472.6060,460.0527,469.6363,221.2086,415321.0000,0,0
|
||||
|
11
tests/data/CNE-L-1d-bad-stock-split.csv
Normal file
11
tests/data/CNE-L-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,11 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-18 00:00:00+01:00,193.220001220703,200.839996337891,193.220001220703,196.839996337891,196.839996337891,653125,0,0
|
||||
2023-05-17 00:00:00+01:00,199.740005493164,207.738006591797,190.121994018555,197.860000610352,197.860000610352,822268,0,0
|
||||
2023-05-16 00:00:00+01:00,215.600006103516,215.600006103516,201.149993896484,205.100006103516,205.100006103516,451009,243.93939,0.471428571428571
|
||||
2023-05-15 00:00:00+01:00,456.908996582031,464.969604492188,446.727203369141,461.151489257813,217.21208190918,830506,0,0
|
||||
2023-05-12 00:00:00+01:00,455.212097167969,458.605987548828,444.605987548828,448.424194335938,211.217269897461,717655,0,0
|
||||
2023-05-11 00:00:00+01:00,466.666595458984,466.666595458984,450.121185302734,456.060607910156,214.814178466797,1682077,0,0
|
||||
2023-05-10 00:00:00+01:00,462.848388671875,473.030303955078,450.969604492188,456.908996582031,215.213790893555,2639957,0,0
|
||||
2023-05-09 00:00:00+01:00,224,227.688003540039,218.199996948242,218.399993896484,102.87100982666,1906090,0,0
|
||||
2023-05-05 00:00:00+01:00,468.787811279297,477.696899414063,468.363586425781,476,224.2060546875,454704,0,0
|
||||
2023-05-04 00:00:00+01:00,460.303009033203,472.605987548828,460.052703857422,469.636291503906,221.208602905273,415321,0,0
|
||||
|
24
tests/data/DEX-AX-1d-bad-stock-split-fixed.csv
Normal file
24
tests/data/DEX-AX-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-31 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-30 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0.4406
|
||||
2023-05-29 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-26 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-25 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-24 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-23 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-22 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-19 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-18 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-17 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-16 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-15 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-12 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-11 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-10 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-09 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-08 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-05 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-04 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-03 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-02 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-01 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
|
24
tests/data/DEX-AX-1d-bad-stock-split.csv
Normal file
24
tests/data/DEX-AX-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-31 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-30 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0.4406
|
||||
2023-05-29 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-26 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-25 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-24 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-23 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-22 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-19 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-18 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-17 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-16 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-15 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-12 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-11 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-10 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-09 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-08 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-05 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-04 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-03 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-02 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-01 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
|
42
tests/data/LA-V-1d-bad-stock-split-fixed.csv
Normal file
42
tests/data/LA-V-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,42 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2020-09-30 00:00:00-04:00,4.40000009536743,4.44999980926514,4.01999998092651,4.44999980926514,4.44999980926514,22600,0,0
|
||||
2020-09-29 00:00:00-04:00,4.3899998664856,4.40000009536743,4.13000011444092,4.30000019073486,4.30000019073486,10800,0,0
|
||||
2020-09-28 00:00:00-04:00,4.09000015258789,4.25,4.09000015258789,4.25,4.25,8000,0,0
|
||||
2020-09-25 00:00:00-04:00,3.95000004768372,4.09999990463257,3.95000004768372,4.05000019073486,4.05000019073486,13500,0,0
|
||||
2020-09-24 00:00:00-04:00,3.84999990463257,4,3.84999990463257,4,4,8800,0,0
|
||||
2020-09-23 00:00:00-04:00,3.99000000953674,4,3.99000000953674,4,4,5900,0,0
|
||||
2020-09-22 00:00:00-04:00,3.90000009536743,4.09999990463257,3.84999990463257,4.09999990463257,4.09999990463257,3100,0,0
|
||||
2020-09-21 00:00:00-04:00,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,1200,0,0
|
||||
2020-09-18 00:00:00-04:00,3.92000007629395,4.09999990463257,3.92000007629395,4.09999990463257,4.09999990463257,27200,0,0
|
||||
2020-09-17 00:00:00-04:00,3.90000009536743,3.99000000953674,3.8199999332428,3.99000000953674,3.99000000953674,3300,0,0
|
||||
2020-09-16 00:00:00-04:00,3.79999995231628,4,3.79999995231628,4,4,3300,0,0
|
||||
2020-09-15 00:00:00-04:00,3.95000004768372,4,3.95000004768372,4,4,2400,0,0
|
||||
2020-09-14 00:00:00-04:00,3.96000003814697,4,3.96000003814697,4,4,800,0,0
|
||||
2020-09-11 00:00:00-04:00,3.95000004768372,3.97000002861023,3.72000002861023,3.97000002861023,3.97000002861023,5700,0,0
|
||||
2020-09-10 00:00:00-04:00,4,4.09999990463257,4,4.09999990463257,4.09999990463257,7100,0,0
|
||||
2020-09-09 00:00:00-04:00,3.5699999332428,4,3.5699999332428,4,4,18100,0,0
|
||||
2020-09-08 00:00:00-04:00,3.40000009536743,3.59999990463257,3.40000009536743,3.59999990463257,3.59999990463257,19500,0,0
|
||||
2020-09-04 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,400,0,0
|
||||
2020-09-03 00:00:00-04:00,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,0,0,0
|
||||
2020-09-02 00:00:00-04:00,3.5,3.58999991416931,3.5,3.58999991416931,3.58999991416931,2000,0,0
|
||||
2020-09-01 00:00:00-04:00,3.5,3.59999990463257,3.5,3.59999990463257,3.59999990463257,1200,0,0
|
||||
2020-08-31 00:00:00-04:00,3.15000009536743,3.70000004768372,3.15000009536743,3.70000004768372,3.70000004768372,26500,0,0
|
||||
2020-08-28 00:00:00-04:00,3.76999998092651,3.76999998092651,3.70000004768372,3.70000004768372,3.70000004768372,1600,0,0
|
||||
2020-08-27 00:00:00-04:00,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,0,0,0
|
||||
2020-08-26 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0.1
|
||||
2020-08-25 00:00:00-04:00,3.40000009536743,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,2900,0,0
|
||||
2020-08-24 00:00:00-04:00,3.29999995231628,3.5,3.29999995231628,3.5,3.5,10000,0,0
|
||||
2020-08-21 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,150,0,0
|
||||
2020-08-20 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-19 00:00:00-04:00,3.40000009536743,3.5,3.40000009536743,3.5,3.5,9050,0,0
|
||||
2020-08-18 00:00:00-04:00,3.5,3.79999995231628,3.5,3.5,3.5,2250,0,0
|
||||
2020-08-17 00:00:00-04:00,2.79999995231628,3.70000004768372,2.79999995231628,3.70000004768372,3.70000004768372,5050,0,0
|
||||
2020-08-14 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-13 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-12 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-11 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-10 00:00:00-04:00,3.5,3.70000004768372,3.5,3.5,3.5,3300,0,0
|
||||
2020-08-07 00:00:00-04:00,3.5,3.79999995231628,3.5,3.79999995231628,3.79999995231628,2500,0,0
|
||||
2020-08-06 00:00:00-04:00,3.5,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,3000,0,0
|
||||
2020-08-05 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
2020-08-04 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
|
42
tests/data/LA-V-1d-bad-stock-split.csv
Normal file
42
tests/data/LA-V-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,42 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2020-09-30 00:00:00-04:00,4.40000009536743,4.44999980926514,4.01999998092651,4.44999980926514,4.44999980926514,22600,0,0
|
||||
2020-09-29 00:00:00-04:00,4.3899998664856,4.40000009536743,4.13000011444092,4.30000019073486,4.30000019073486,10800,0,0
|
||||
2020-09-28 00:00:00-04:00,4.09000015258789,4.25,4.09000015258789,4.25,4.25,8000,0,0
|
||||
2020-09-25 00:00:00-04:00,3.95000004768372,4.09999990463257,3.95000004768372,4.05000019073486,4.05000019073486,13500,0,0
|
||||
2020-09-24 00:00:00-04:00,3.84999990463257,4,3.84999990463257,4,4,8800,0,0
|
||||
2020-09-23 00:00:00-04:00,3.99000000953674,4,3.99000000953674,4,4,5900,0,0
|
||||
2020-09-22 00:00:00-04:00,3.90000009536743,4.09999990463257,3.84999990463257,4.09999990463257,4.09999990463257,3100,0,0
|
||||
2020-09-21 00:00:00-04:00,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,1200,0,0
|
||||
2020-09-18 00:00:00-04:00,3.92000007629395,4.09999990463257,3.92000007629395,4.09999990463257,4.09999990463257,27200,0,0
|
||||
2020-09-17 00:00:00-04:00,3.90000009536743,3.99000000953674,3.8199999332428,3.99000000953674,3.99000000953674,3300,0,0
|
||||
2020-09-16 00:00:00-04:00,3.79999995231628,4,3.79999995231628,4,4,3300,0,0
|
||||
2020-09-15 00:00:00-04:00,3.95000004768372,4,3.95000004768372,4,4,2400,0,0
|
||||
2020-09-14 00:00:00-04:00,3.96000003814697,4,3.96000003814697,4,4,800,0,0
|
||||
2020-09-11 00:00:00-04:00,3.95000004768372,3.97000002861023,3.72000002861023,3.97000002861023,3.97000002861023,5700,0,0
|
||||
2020-09-10 00:00:00-04:00,4,4.09999990463257,4,4.09999990463257,4.09999990463257,7100,0,0
|
||||
2020-09-09 00:00:00-04:00,3.5699999332428,4,3.5699999332428,4,4,18100,0,0
|
||||
2020-09-08 00:00:00-04:00,3.40000009536743,3.59999990463257,3.40000009536743,3.59999990463257,3.59999990463257,19500,0,0
|
||||
2020-09-04 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,400,0,0
|
||||
2020-09-03 00:00:00-04:00,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,0,0,0
|
||||
2020-09-02 00:00:00-04:00,3.5,3.58999991416931,3.5,3.58999991416931,3.58999991416931,2000,0,0
|
||||
2020-09-01 00:00:00-04:00,3.5,3.59999990463257,3.5,3.59999990463257,3.59999990463257,1200,0,0
|
||||
2020-08-31 00:00:00-04:00,3.15000009536743,3.70000004768372,3.15000009536743,3.70000004768372,3.70000004768372,26500,0,0
|
||||
2020-08-28 00:00:00-04:00,3.76999998092651,3.76999998092651,3.70000004768372,3.70000004768372,3.70000004768372,1600,0,0
|
||||
2020-08-27 00:00:00-04:00,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,0,0,0
|
||||
2020-08-26 00:00:00-04:00,0.370000004768372,0.370000004768372,0.370000004768372,0.370000004768372,0.370000004768372,0,0,0.1
|
||||
2020-08-25 00:00:00-04:00,3.40000009536743,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,2900,0,0
|
||||
2020-08-24 00:00:00-04:00,3.29999995231628,3.5,3.29999995231628,3.5,3.5,10000,0,0
|
||||
2020-08-21 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,150,0,0
|
||||
2020-08-20 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-19 00:00:00-04:00,3.40000009536743,3.5,3.40000009536743,3.5,3.5,9050,0,0
|
||||
2020-08-18 00:00:00-04:00,3.5,3.79999995231628,3.5,3.5,3.5,2250,0,0
|
||||
2020-08-17 00:00:00-04:00,2.79999995231628,3.70000004768372,2.79999995231628,3.70000004768372,3.70000004768372,5050,0,0
|
||||
2020-08-14 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-13 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-12 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-11 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-10 00:00:00-04:00,3.5,3.70000004768372,3.5,3.5,3.5,3300,0,0
|
||||
2020-08-07 00:00:00-04:00,3.5,3.79999995231628,3.5,3.79999995231628,3.79999995231628,2500,0,0
|
||||
2020-08-06 00:00:00-04:00,3.5,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,3000,0,0
|
||||
2020-08-05 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
2020-08-04 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
|
17
tests/data/MOB-ST-1d-bad-stock-split-fixed.csv
Normal file
17
tests/data/MOB-ST-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-08 00:00:00+02:00,24.8999996185303,24.9500007629395,24.1000003814697,24.75,24.75,7187,0,0
|
||||
2023-05-09 00:00:00+02:00,25,25.5,23.1499996185303,24.1499996185303,24.1499996185303,22753,0,0
|
||||
2023-05-10 00:00:00+02:00,24.1499996185303,24.1499996185303,22,22.9500007629395,22.9500007629395,62727,0,0
|
||||
2023-05-11 00:00:00+02:00,22.9500007629395,25,22.9500007629395,23.3500003814697,23.3500003814697,19550,0,0
|
||||
2023-05-12 00:00:00+02:00,23.3500003814697,24,22.1000003814697,23.8500003814697,23.8500003814697,17143,0,0
|
||||
2023-05-15 00:00:00+02:00,23,25.7999992370605,22.5,23,23,43709,0,0
|
||||
2023-05-16 00:00:00+02:00,22.75,24.0499992370605,22.5,22.75,22.75,16068,0,0
|
||||
2023-05-17 00:00:00+02:00,23,23.8500003814697,22.1000003814697,23.6499996185303,23.6499996185303,19926,0,0
|
||||
2023-05-19 00:00:00+02:00,23.6499996185303,23.8500003814697,22.1000003814697,22.2999992370605,22.2999992370605,41050,0,0
|
||||
2023-05-22 00:00:00+02:00,22.0000004768372,24.1499996185303,21.5499997138977,22.7500009536743,22.7500009536743,34022,0,0
|
||||
2023-05-23 00:00:00+02:00,22.75,22.8999996185303,21.75,22.5,22.5,13992,0,0
|
||||
2023-05-24 00:00:00+02:00,21,24,21,22.0100002288818,22.0100002288818,18306,0,0.1
|
||||
2023-05-25 00:00:00+02:00,21.5699996948242,22.8899993896484,20,21.1599998474121,21.1599998474121,35398,0,0
|
||||
2023-05-26 00:00:00+02:00,21.1599998474121,22.4950008392334,20.5,21.0949993133545,21.0949993133545,8039,0,0
|
||||
2023-05-29 00:00:00+02:00,22.1000003814697,22.1000003814697,20.25,20.75,20.75,17786,0,0
|
||||
2023-05-30 00:00:00+02:00,20.75,21.6499996185303,20.1499996185303,20.4500007629395,20.4500007629395,10709,0,0
|
||||
|
17
tests/data/MOB-ST-1d-bad-stock-split.csv
Normal file
17
tests/data/MOB-ST-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-08 00:00:00+02:00,24.899999618530273,24.950000762939453,24.100000381469727,24.75,24.75,7187,0.0,0.0
|
||||
2023-05-09 00:00:00+02:00,25.0,25.5,23.149999618530273,24.149999618530273,24.149999618530273,22753,0.0,0.0
|
||||
2023-05-10 00:00:00+02:00,24.149999618530273,24.149999618530273,22.0,22.950000762939453,22.950000762939453,62727,0.0,0.0
|
||||
2023-05-11 00:00:00+02:00,22.950000762939453,25.0,22.950000762939453,23.350000381469727,23.350000381469727,19550,0.0,0.0
|
||||
2023-05-12 00:00:00+02:00,23.350000381469727,24.0,22.100000381469727,23.850000381469727,23.850000381469727,17143,0.0,0.0
|
||||
2023-05-15 00:00:00+02:00,23.0,25.799999237060547,22.5,23.0,23.0,43709,0.0,0.0
|
||||
2023-05-16 00:00:00+02:00,22.75,24.049999237060547,22.5,22.75,22.75,16068,0.0,0.0
|
||||
2023-05-17 00:00:00+02:00,23.0,23.850000381469727,22.100000381469727,23.649999618530273,23.649999618530273,19926,0.0,0.0
|
||||
2023-05-19 00:00:00+02:00,23.649999618530273,23.850000381469727,22.100000381469727,22.299999237060547,22.299999237060547,41050,0.0,0.0
|
||||
2023-05-22 00:00:00+02:00,2.200000047683716,2.4149999618530273,2.1549999713897705,2.2750000953674316,2.2750000953674316,340215,0.0,0.0
|
||||
2023-05-23 00:00:00+02:00,22.75,22.899999618530273,21.75,22.5,22.5,13992,0.0,0.0
|
||||
2023-05-24 00:00:00+02:00,21.0,24.0,21.0,22.010000228881836,22.010000228881836,18306,0.0,0.1
|
||||
2023-05-25 00:00:00+02:00,21.56999969482422,22.889999389648438,20.0,21.15999984741211,21.15999984741211,35398,0.0,0.0
|
||||
2023-05-26 00:00:00+02:00,21.15999984741211,22.4950008392334,20.5,21.094999313354492,21.094999313354492,8039,0.0,0.0
|
||||
2023-05-29 00:00:00+02:00,22.100000381469727,22.100000381469727,20.25,20.75,20.75,17786,0.0,0.0
|
||||
2023-05-30 00:00:00+02:00,20.75,21.649999618530273,20.149999618530273,20.450000762939453,20.450000762939453,10709,0.0,0.0
|
||||
|
23
tests/data/SPM-MI-1d-bad-stock-split-fixed.csv
Normal file
23
tests/data/SPM-MI-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-01 00:00:00+02:00,5.72999992370606,5.78199996948242,5.3939998626709,5.3939998626709,5.3939998626709,3095860,0,0
|
||||
2022-06-02 00:00:00+02:00,5.38600006103516,5.38600006103516,5.26800003051758,5.2939998626709,5.2939998626709,1662880,0,0
|
||||
2022-06-03 00:00:00+02:00,5.34599990844727,5.34599990844727,5.15800018310547,5.16800003051758,5.16800003051758,1698900,0,0
|
||||
2022-06-06 00:00:00+02:00,5.16800003051758,5.25200004577637,5.13800010681152,5.18800010681152,5.18800010681152,1074910,0,0
|
||||
2022-06-07 00:00:00+02:00,5.21800003051758,5.22200012207031,5.07400016784668,5.1560001373291,5.1560001373291,1850680,0,0
|
||||
2022-06-08 00:00:00+02:00,5.1560001373291,5.17599983215332,5.07200012207031,5.10200004577637,5.10200004577637,1140360,0,0
|
||||
2022-06-09 00:00:00+02:00,5.09799995422363,5.09799995422363,4.87599983215332,4.8939998626709,4.8939998626709,2025480,0,0
|
||||
2022-06-10 00:00:00+02:00,4.87999992370606,4.87999992370606,4.50400009155274,4.50400009155274,4.50400009155274,2982730,0,0
|
||||
2022-06-13 00:00:00+02:00,4.3,4.37599983215332,3.83600006103516,3.83600006103516,3.83600006103516,4568210,0,0.1
|
||||
2022-06-14 00:00:00+02:00,3.87750015258789,4.15999984741211,3.85200004577637,3.9439998626709,3.9439998626709,5354500,0,0
|
||||
2022-06-15 00:00:00+02:00,4.03400001525879,4.16450004577637,3.73050003051758,3.73050003051758,3.73050003051758,6662610,0,0
|
||||
2022-06-16 00:00:00+02:00,3.73050003051758,3.98499984741211,3.72400016784668,3.82550010681152,3.82550010681152,13379960,0,0
|
||||
2022-06-17 00:00:00+02:00,3.8,4.29949989318848,3.75,4.29949989318848,4.29949989318848,12844160,0,0
|
||||
2022-06-20 00:00:00+02:00,2.19422197341919,2.2295401096344,2.13992595672607,2.2295401096344,2.2295401096344,12364104,0,0
|
||||
2022-06-21 00:00:00+02:00,2.24719905853272,2.28515291213989,2.19712090492249,2.21557092666626,2.21557092666626,8434013,0,0
|
||||
2022-06-22 00:00:00+02:00,1.98679196834564,2.00365996360779,1.73798203468323,1.73798203468323,1.73798203468323,26496542,0,0
|
||||
2022-06-23 00:00:00+02:00,1.62411904335022,1.68526804447174,1.37320005893707,1.59776198863983,1.59776198863983,48720201,0,0
|
||||
2022-06-24 00:00:00+02:00,1.47599303722382,1.54610300064087,1.1739410161972,1.24932205677032,1.24932205677032,56877192,0,0
|
||||
2022-06-27 00:00:00+02:00,1.49899995326996,1.79849994182587,1.49899995326996,1.79849994182587,1.79849994182587,460673,0,0
|
||||
2022-06-28 00:00:00+02:00,2.15799999237061,3.05100011825562,2.12599992752075,3.05100011825562,3.05100011825562,3058635,0,0
|
||||
2022-06-29 00:00:00+02:00,2.90000009536743,3.73799991607666,2.85899996757507,3.26399993896484,3.26399993896484,6516761,0,0
|
||||
2022-06-30 00:00:00+02:00,3.24900007247925,3.28099989891052,2.5,2.5550000667572,2.5550000667572,4805984,0,0
|
||||
|
23
tests/data/SPM-MI-1d-bad-stock-split.csv
Normal file
23
tests/data/SPM-MI-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-01 00:00:00+02:00,57.29999923706055,57.81999969482422,53.939998626708984,53.939998626708984,53.939998626708984,309586,0.0,0.0
|
||||
2022-06-02 00:00:00+02:00,53.86000061035156,53.86000061035156,52.68000030517578,52.939998626708984,52.939998626708984,166288,0.0,0.0
|
||||
2022-06-03 00:00:00+02:00,53.459999084472656,53.459999084472656,51.58000183105469,51.68000030517578,51.68000030517578,169890,0.0,0.0
|
||||
2022-06-06 00:00:00+02:00,51.68000030517578,52.52000045776367,51.380001068115234,51.880001068115234,51.880001068115234,107491,0.0,0.0
|
||||
2022-06-07 00:00:00+02:00,52.18000030517578,52.220001220703125,50.7400016784668,51.560001373291016,51.560001373291016,185068,0.0,0.0
|
||||
2022-06-08 00:00:00+02:00,51.560001373291016,51.7599983215332,50.720001220703125,51.02000045776367,51.02000045776367,114036,0.0,0.0
|
||||
2022-06-09 00:00:00+02:00,50.97999954223633,50.97999954223633,48.7599983215332,48.939998626708984,48.939998626708984,202548,0.0,0.0
|
||||
2022-06-10 00:00:00+02:00,48.79999923706055,48.79999923706055,45.040000915527344,45.040000915527344,45.040000915527344,298273,0.0,0.0
|
||||
2022-06-13 00:00:00+02:00,43.0,43.7599983215332,38.36000061035156,38.36000061035156,38.36000061035156,456821,0.0,0.1
|
||||
2022-06-14 00:00:00+02:00,38.775001525878906,41.599998474121094,38.52000045776367,39.439998626708984,39.439998626708984,535450,0.0,0.0
|
||||
2022-06-15 00:00:00+02:00,40.34000015258789,41.64500045776367,37.30500030517578,37.30500030517578,37.30500030517578,666261,0.0,0.0
|
||||
2022-06-16 00:00:00+02:00,37.30500030517578,39.849998474121094,37.2400016784668,38.255001068115234,38.255001068115234,1337996,0.0,0.0
|
||||
2022-06-17 00:00:00+02:00,38.0,42.994998931884766,37.5,42.994998931884766,42.994998931884766,1284416,0.0,0.0
|
||||
2022-06-20 00:00:00+02:00,2.1942219734191895,2.2295401096343994,2.139925956726074,2.2295401096343994,2.2295401096343994,12364104,0.0,0.0
|
||||
2022-06-21 00:00:00+02:00,2.247199058532715,2.2851529121398926,2.1971209049224854,2.2155709266662598,2.2155709266662598,8434013,0.0,0.0
|
||||
2022-06-22 00:00:00+02:00,1.986791968345642,2.003659963607788,1.7379820346832275,1.7379820346832275,1.7379820346832275,26496542,0.0,0.0
|
||||
2022-06-23 00:00:00+02:00,1.6241190433502197,1.6852680444717407,1.3732000589370728,1.5977619886398315,1.5977619886398315,48720201,0.0,0.0
|
||||
2022-06-24 00:00:00+02:00,1.475993037223816,1.5461030006408691,1.1739410161972046,1.2493220567703247,1.2493220567703247,56877192,0.0,0.0
|
||||
2022-06-27 00:00:00+02:00,1.4989999532699585,1.7984999418258667,1.4989999532699585,1.7984999418258667,1.7984999418258667,460673,0.0,0.0
|
||||
2022-06-28 00:00:00+02:00,2.1579999923706055,3.0510001182556152,2.125999927520752,3.0510001182556152,3.0510001182556152,3058635,0.0,0.0
|
||||
2022-06-29 00:00:00+02:00,2.9000000953674316,3.73799991607666,2.8589999675750732,3.2639999389648438,3.2639999389648438,6516761,0.0,0.0
|
||||
2022-06-30 00:00:00+02:00,3.249000072479248,3.2809998989105225,2.5,2.555000066757202,2.555000066757202,4805984,0.0,0.0
|
||||
|
30
tests/data/SSW-JO-1d-100x-error-fixed.csv
Normal file
30
tests/data/SSW-JO-1d-100x-error-fixed.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-06-09 00:00:00+02:00,34.7000,34.7100,33.2400,33.6200,33.6200,7148409,0,0
|
||||
2023-06-08 00:00:00+02:00,34.9000,34.9900,34.0400,34.3600,34.3600,10406999,0,0
|
||||
2023-06-07 00:00:00+02:00,34.5500,35.6400,34.3200,35.0900,35.0900,10118918,0,0
|
||||
2023-06-06 00:00:00+02:00,34.5000,34.8200,34.0500,34.4600,34.4600,9109709,0,0
|
||||
2023-06-05 00:00:00+02:00,35.0000,35.3000,34.2000,34.7000,34.7000,8791993,0,0
|
||||
2023-06-02 00:00:00+02:00,35.6900,36.1800,34.6000,34.9700,34.9700,8844549,0,0
|
||||
2023-06-01 00:00:00+02:00,35.2300,35.3800,34.2400,35.3500,35.3500,6721030,0,0
|
||||
2023-05-31 00:00:00+02:00,34.8,35.48,34.26,35.01,35.01,32605833,0,0
|
||||
2023-05-30 00:00:00+02:00,34.39,35.37,33.85,34.23,34.23,8970804,0,0
|
||||
2023-05-29 00:00:00+02:00,34.66,35.06,34.02,34.32,34.32,3912803,0,0
|
||||
2023-05-26 00:00:00+02:00,34.75,35.99,34.33,34.53,34.53,6744718,0,0
|
||||
2023-05-25 00:00:00+02:00,35.4,36.09,34.63,35.07,35.07,16900221,0,0
|
||||
2023-05-24 00:00:00+02:00,36.2,36.5,35.26,35.4,35.4,9049505,0,0
|
||||
2023-05-23 00:00:00+02:00,36.9,36.67,35.56,36.1,36.1,10797373,0,0
|
||||
2023-05-22 00:00:00+02:00,37.05,37.36,36.09,36.61,36.61,7132641,0,0
|
||||
2023-05-19 00:00:00+02:00,36.2,37.15,36.25,36.9,36.9,12648518,0,0
|
||||
2023-05-18 00:00:00+02:00,36.57,36.99,35.84,36.46,36.46,10674542,0,0
|
||||
2023-05-17 00:00:00+02:00,36.87,37.31,36.56,36.71,36.71,9892791,0,0
|
||||
2023-05-16 00:00:00+02:00,37.15,37.73,36.96,37.03,37.03,4706789,0,0
|
||||
2023-05-15 00:00:00+02:00,37.74,38.05,36.96,37.27,37.27,7890969,0,0
|
||||
2023-05-12 00:00:00+02:00,37.5,38.44,36.71,37.74,37.74,8724303,0,0
|
||||
2023-05-11 00:00:00+02:00,38.8,38.88,37.01,37.32,37.32,14371855,0,0
|
||||
2023-05-10 00:00:00+02:00,38.93,38.8,36.42,38.1,38.1,30393389,0,0
|
||||
2023-05-09 00:00:00+02:00,44.41,44.41,39.39,39.66,39.66,19833428,0,0
|
||||
2023-05-08 00:00:00+02:00,44.63,45.78,44.56,44.71,44.71,11092519,0,0
|
||||
2023-05-05 00:00:00+02:00,42.99,44.9,42.87,44.58,44.58,28539048,0,0
|
||||
2023-05-04 00:00:00+02:00,41.49,43.3,41.23,42.83,42.83,15506868,0,0
|
||||
2023-05-03 00:00:00+02:00,39.75,40.98,39.68,40.95,40.95,14657028,0,0
|
||||
2023-05-02 00:00:00+02:00,40.37,40.32,39.17,39.65,39.65,11818133,0,0
|
||||
|
30
tests/data/SSW-JO-1d-100x-error.csv
Normal file
30
tests/data/SSW-JO-1d-100x-error.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-06-09 00:00:00+02:00,34.700001,34.709999,33.240002,33.619999,33.619999,7148409,0,0
|
||||
2023-06-08 00:00:00+02:00,34.900002,34.990002,34.040001,34.360001,34.360001,10406999,0,0
|
||||
2023-06-07 00:00:00+02:00,34.549999,35.639999,34.320000,35.090000,35.090000,10118918,0,0
|
||||
2023-06-06 00:00:00+02:00,34.500000,34.820000,34.049999,34.459999,34.459999,9109709,0,0
|
||||
2023-06-05 00:00:00+02:00,35.000000,35.299999,34.200001,34.700001,34.700001,8791993,0,0
|
||||
2023-06-02 00:00:00+02:00,35.689999,36.180000,34.599998,34.970001,34.970001,8844549,0,0
|
||||
2023-06-01 00:00:00+02:00,35.230000,35.380001,34.240002,35.349998,35.349998,6721030,0,0
|
||||
2023-05-31 00:00:00+02:00,3480,3548,3426,3501,3501,32605833,0,0
|
||||
2023-05-30 00:00:00+02:00,3439,3537,3385,3423,3423,8970804,0,0
|
||||
2023-05-29 00:00:00+02:00,3466,3506,3402,3432,3432,3912803,0,0
|
||||
2023-05-26 00:00:00+02:00,3475,3599,3433,3453,3453,6744718,0,0
|
||||
2023-05-25 00:00:00+02:00,3540,3609,3463,3507,3507,16900221,0,0
|
||||
2023-05-24 00:00:00+02:00,3620,3650,3526,3540,3540,9049505,0,0
|
||||
2023-05-23 00:00:00+02:00,3690,3667,3556,3610,3610,10797373,0,0
|
||||
2023-05-22 00:00:00+02:00,3705,3736,3609,3661,3661,7132641,0,0
|
||||
2023-05-19 00:00:00+02:00,3620,3715,3625,3690,3690,12648518,0,0
|
||||
2023-05-18 00:00:00+02:00,3657,3699,3584,3646,3646,10674542,0,0
|
||||
2023-05-17 00:00:00+02:00,3687,3731,3656,3671,3671,9892791,0,0
|
||||
2023-05-16 00:00:00+02:00,3715,3773,3696,3703,3703,4706789,0,0
|
||||
2023-05-15 00:00:00+02:00,3774,3805,3696,3727,3727,7890969,0,0
|
||||
2023-05-12 00:00:00+02:00,3750,3844,3671,3774,3774,8724303,0,0
|
||||
2023-05-11 00:00:00+02:00,3880,3888,3701,3732,3732,14371855,0,0
|
||||
2023-05-10 00:00:00+02:00,3893,3880,3642,3810,3810,30393389,0,0
|
||||
2023-05-09 00:00:00+02:00,4441,4441,3939,3966,3966,19833428,0,0
|
||||
2023-05-08 00:00:00+02:00,4463,4578,4456,4471,4471,11092519,0,0
|
||||
2023-05-05 00:00:00+02:00,4299,4490,4287,4458,4458,28539048,0,0
|
||||
2023-05-04 00:00:00+02:00,4149,4330,4123,4283,4283,15506868,0,0
|
||||
2023-05-03 00:00:00+02:00,3975,4098,3968,4095,4095,14657028,0,0
|
||||
2023-05-02 00:00:00+02:00,4037,4032,3917,3965,3965,11818133,0,0
|
||||
|
@@ -3,6 +3,7 @@ from .context import session_gbl
|
||||
|
||||
import unittest
|
||||
|
||||
import os
|
||||
import datetime as _dt
|
||||
import pytz as _tz
|
||||
import numpy as _np
|
||||
@@ -43,13 +44,25 @@ class TestPriceHistory(unittest.TestCase):
|
||||
df_tkrs = df.columns.levels[1]
|
||||
self.assertEqual(sorted(tkrs), sorted(df_tkrs))
|
||||
|
||||
def test_download_with_invalid_ticker(self):
|
||||
#Checks if using an invalid symbol gives the same output as not using an invalid symbol in combination with a valid symbol (AAPL)
|
||||
#Checks to make sure that invalid symbol handling for the date column is the same as the base case (no invalid symbols)
|
||||
|
||||
invalid_tkrs = ["AAPL", "ATVI"] #AAPL exists and ATVI does not exist
|
||||
valid_tkrs = ["AAPL", "INTC"] #AAPL and INTC both exist
|
||||
|
||||
data_invalid_sym = yf.download(invalid_tkrs, start='2023-11-16', end='2023-11-17')
|
||||
data_valid_sym = yf.download(valid_tkrs, start='2023-11-16', end='2023-11-17')
|
||||
|
||||
self.assertEqual(data_invalid_sym['Close']['AAPL']['2023-11-16'],data_valid_sym['Close']['AAPL']['2023-11-16'])
|
||||
|
||||
def test_duplicatingHourly(self):
|
||||
tkrs = ["IMP.JO", "BHG.JO", "SSW.JO", "BP.L", "INTC"]
|
||||
for tkr in tkrs:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz = dat._get_ticker_tz(proxy=None, timeout=None)
|
||||
|
||||
dt_utc = _tz.timezone("UTC").localize(_dt.datetime.utcnow())
|
||||
dt_utc = _pd.Timestamp.utcnow()
|
||||
dt = dt_utc.astimezone(_tz.timezone(tz))
|
||||
start_d = dt.date() - _dt.timedelta(days=7)
|
||||
df = dat.history(start=start_d, interval="1h")
|
||||
@@ -58,7 +71,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
dt1 = df.index[-1]
|
||||
try:
|
||||
self.assertNotEqual(dt0.hour, dt1.hour)
|
||||
except:
|
||||
except AssertionError:
|
||||
print("Ticker = ", tkr)
|
||||
raise
|
||||
|
||||
@@ -69,7 +82,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz = dat._get_ticker_tz(proxy=None, timeout=None)
|
||||
|
||||
dt_utc = _tz.timezone("UTC").localize(_dt.datetime.utcnow())
|
||||
dt_utc = _pd.Timestamp.utcnow()
|
||||
dt = dt_utc.astimezone(_tz.timezone(tz))
|
||||
if dt.time() < _dt.time(17, 0):
|
||||
continue
|
||||
@@ -81,7 +94,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
dt1 = df.index[-1]
|
||||
try:
|
||||
self.assertNotEqual(dt0, dt1)
|
||||
except:
|
||||
except AssertionError:
|
||||
print("Ticker = ", tkr)
|
||||
raise
|
||||
|
||||
@@ -105,7 +118,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
dt1 = df.index[-1]
|
||||
try:
|
||||
self.assertNotEqual(dt0.week, dt1.week)
|
||||
except:
|
||||
except AssertionError:
|
||||
print("Ticker={}: Last two rows within same week:".format(tkr))
|
||||
print(df.iloc[df.shape[0] - 2:])
|
||||
raise
|
||||
@@ -113,7 +126,69 @@ class TestPriceHistory(unittest.TestCase):
|
||||
if not test_run:
|
||||
self.skipTest("Skipping test_duplicatingWeekly() because not possible to fail Monday/weekend")
|
||||
|
||||
def test_pricesEventsMerge(self):
|
||||
# Test case: dividend occurs after last row in price data
|
||||
tkr = 'INTC'
|
||||
start_d = _dt.date(2022, 1, 1)
|
||||
end_d = _dt.date(2023, 1, 1)
|
||||
df = yf.Ticker(tkr, session=self.session).history(interval='1d', start=start_d, end=end_d)
|
||||
div = 1.0
|
||||
future_div_dt = df.index[-1] + _dt.timedelta(days=1)
|
||||
if future_div_dt.weekday() in [5, 6]:
|
||||
future_div_dt += _dt.timedelta(days=1) * (7 - future_div_dt.weekday())
|
||||
divs = _pd.DataFrame(data={"Dividends":[div]}, index=[future_div_dt])
|
||||
df2 = yf.utils.safe_merge_dfs(df.drop(['Dividends', 'Stock Splits'], axis=1), divs, '1d')
|
||||
self.assertIn(future_div_dt, df2.index)
|
||||
self.assertIn("Dividends", df2.columns)
|
||||
self.assertEqual(df2['Dividends'].iloc[-1], div)
|
||||
|
||||
def test_pricesEventsMerge_bug(self):
|
||||
# Reproduce exception when merging intraday prices with future dividend
|
||||
interval = '30m'
|
||||
df_index = []
|
||||
d = 13
|
||||
for h in range(0, 16):
|
||||
for m in [0, 30]:
|
||||
df_index.append(_dt.datetime(2023, 9, d, h, m))
|
||||
df_index.append(_dt.datetime(2023, 9, d, 16))
|
||||
df = _pd.DataFrame(index=df_index)
|
||||
df.index = _pd.to_datetime(df.index)
|
||||
df['Close'] = 1.0
|
||||
|
||||
div = 1.0
|
||||
future_div_dt = _dt.datetime(2023, 9, 14, 10)
|
||||
divs = _pd.DataFrame(data={"Dividends":[div]}, index=[future_div_dt])
|
||||
|
||||
yf.utils.safe_merge_dfs(df, divs, interval)
|
||||
# No exception = test pass
|
||||
|
||||
def test_intraDayWithEvents(self):
|
||||
tkrs = ["BHP.AX", "IMP.JO", "BP.L", "PNL.L", "INTC"]
|
||||
test_run = False
|
||||
for tkr in tkrs:
|
||||
start_d = _dt.date.today() - _dt.timedelta(days=59)
|
||||
end_d = None
|
||||
df_daily = yf.Ticker(tkr, session=self.session).history(start=start_d, end=end_d, interval="1d", actions=True)
|
||||
df_daily_divs = df_daily["Dividends"][df_daily["Dividends"] != 0]
|
||||
if df_daily_divs.shape[0] == 0:
|
||||
continue
|
||||
|
||||
last_div_date = df_daily_divs.index[-1]
|
||||
start_d = last_div_date.date()
|
||||
end_d = last_div_date.date() + _dt.timedelta(days=1)
|
||||
df_intraday = yf.Ticker(tkr, session=self.session).history(start=start_d, end=end_d, interval="15m", actions=True)
|
||||
self.assertTrue((df_intraday["Dividends"] != 0.0).any())
|
||||
|
||||
df_intraday_divs = df_intraday["Dividends"][df_intraday["Dividends"] != 0]
|
||||
df_intraday_divs.index = df_intraday_divs.index.floor('D')
|
||||
self.assertTrue(df_daily_divs.equals(df_intraday_divs))
|
||||
|
||||
test_run = True
|
||||
|
||||
if not test_run:
|
||||
self.skipTest("Skipping test_intraDayWithEvents() because no tickers had a dividend in last 60 days")
|
||||
|
||||
def test_intraDayWithEvents_tase(self):
|
||||
# TASE dividend release pre-market, doesn't merge nicely with intra-day data so check still present
|
||||
|
||||
tase_tkrs = ["ICL.TA", "ESLT.TA", "ONE.TA", "MGDL.TA"]
|
||||
@@ -124,21 +199,48 @@ class TestPriceHistory(unittest.TestCase):
|
||||
df_daily = yf.Ticker(tkr, session=self.session).history(start=start_d, end=end_d, interval="1d", actions=True)
|
||||
df_daily_divs = df_daily["Dividends"][df_daily["Dividends"] != 0]
|
||||
if df_daily_divs.shape[0] == 0:
|
||||
# self.skipTest("Skipping test_intraDayWithEvents() because 'ICL.TA' has no dividend in last 60 days")
|
||||
continue
|
||||
|
||||
last_div_date = df_daily_divs.index[-1]
|
||||
start_d = last_div_date.date()
|
||||
end_d = last_div_date.date() + _dt.timedelta(days=1)
|
||||
df = yf.Ticker(tkr, session=self.session).history(start=start_d, end=end_d, interval="15m", actions=True)
|
||||
self.assertTrue((df["Dividends"] != 0.0).any())
|
||||
df_intraday = yf.Ticker(tkr, session=self.session).history(start=start_d, end=end_d, interval="15m", actions=True)
|
||||
self.assertTrue((df_intraday["Dividends"] != 0.0).any())
|
||||
|
||||
df_intraday_divs = df_intraday["Dividends"][df_intraday["Dividends"] != 0]
|
||||
df_intraday_divs.index = df_intraday_divs.index.floor('D')
|
||||
self.assertTrue(df_daily_divs.equals(df_intraday_divs))
|
||||
|
||||
test_run = True
|
||||
break
|
||||
|
||||
if not test_run:
|
||||
self.skipTest("Skipping test_intraDayWithEvents() because no tickers had a dividend in last 60 days")
|
||||
self.skipTest("Skipping test_intraDayWithEvents_tase() because no tickers had a dividend in last 60 days")
|
||||
|
||||
def test_dailyWithEvents(self):
|
||||
start_d = _dt.date(2022, 1, 1)
|
||||
end_d = _dt.date(2023, 1, 1)
|
||||
|
||||
tkr_div_dates = {'BHP.AX': [_dt.date(2022, 9, 1), _dt.date(2022, 2, 24)], # Yahoo claims 23-Feb but wrong because DST
|
||||
'IMP.JO': [_dt.date(2022, 9, 21), _dt.date(2022, 3, 16)],
|
||||
'BP.L': [_dt.date(2022, 11, 10), _dt.date(2022, 8, 11), _dt.date(2022, 5, 12),
|
||||
_dt.date(2022, 2, 17)],
|
||||
'INTC': [_dt.date(2022, 11, 4), _dt.date(2022, 8, 4), _dt.date(2022, 5, 5),
|
||||
_dt.date(2022, 2, 4)]}
|
||||
|
||||
for tkr, dates in tkr_div_dates.items():
|
||||
df = yf.Ticker(tkr, session=self.session).history(interval='1d', start=start_d, end=end_d)
|
||||
df_divs = df[df['Dividends'] != 0].sort_index(ascending=False)
|
||||
try:
|
||||
self.assertTrue((df_divs.index.date == dates).all())
|
||||
except AssertionError:
|
||||
print(f'- ticker = {tkr}')
|
||||
print('- response:')
|
||||
print(df_divs.index.date)
|
||||
print('- answer:')
|
||||
print(dates)
|
||||
raise
|
||||
|
||||
def test_dailyWithEvents_bugs(self):
|
||||
# Reproduce issue #521
|
||||
tkr1 = "QQQ"
|
||||
tkr2 = "GDX"
|
||||
@@ -150,7 +252,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
self.assertTrue(((df2["Dividends"] > 0) | (df2["Stock Splits"] > 0)).any())
|
||||
try:
|
||||
self.assertTrue(df1.index.equals(df2.index))
|
||||
except:
|
||||
except AssertionError:
|
||||
missing_from_df1 = df2.index.difference(df1.index)
|
||||
missing_from_df2 = df1.index.difference(df2.index)
|
||||
print("{} missing these dates: {}".format(tkr1, missing_from_df1))
|
||||
@@ -165,13 +267,22 @@ class TestPriceHistory(unittest.TestCase):
|
||||
self.assertTrue(((df1["Dividends"] > 0) | (df1["Stock Splits"] > 0)).any())
|
||||
try:
|
||||
self.assertTrue(df1.index.equals(df2.index))
|
||||
except:
|
||||
except AssertionError:
|
||||
missing_from_df1 = df2.index.difference(df1.index)
|
||||
missing_from_df2 = df1.index.difference(df2.index)
|
||||
print("{}-with-events missing these dates: {}".format(tkr, missing_from_df1))
|
||||
print("{}-without-events missing these dates: {}".format(tkr, missing_from_df2))
|
||||
raise
|
||||
|
||||
# Reproduce issue #1634 - 1d dividend out-of-range, should be prepended to prices
|
||||
div_dt = _pd.Timestamp(2022, 7, 21).tz_localize("America/New_York")
|
||||
df_dividends = _pd.DataFrame(data={"Dividends":[1.0]}, index=[div_dt])
|
||||
df_prices = _pd.DataFrame(data={c:[1.0] for c in yf.const._PRICE_COLNAMES_}|{'Volume':0}, index=[div_dt+_dt.timedelta(days=1)])
|
||||
df_merged = yf.utils.safe_merge_dfs(df_prices, df_dividends, '1d')
|
||||
self.assertEqual(df_merged.shape[0], 2)
|
||||
self.assertTrue(df_merged[df_prices.columns].iloc[1:].equals(df_prices))
|
||||
self.assertEqual(df_merged.index[0], div_dt)
|
||||
|
||||
def test_weeklyWithEvents(self):
|
||||
# Reproduce issue #521
|
||||
tkr1 = "QQQ"
|
||||
@@ -184,7 +295,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
self.assertTrue(((df2["Dividends"] > 0) | (df2["Stock Splits"] > 0)).any())
|
||||
try:
|
||||
self.assertTrue(df1.index.equals(df2.index))
|
||||
except:
|
||||
except AssertionError:
|
||||
missing_from_df1 = df2.index.difference(df1.index)
|
||||
missing_from_df2 = df1.index.difference(df2.index)
|
||||
print("{} missing these dates: {}".format(tkr1, missing_from_df1))
|
||||
@@ -199,7 +310,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
self.assertTrue(((df1["Dividends"] > 0) | (df1["Stock Splits"] > 0)).any())
|
||||
try:
|
||||
self.assertTrue(df1.index.equals(df2.index))
|
||||
except:
|
||||
except AssertionError:
|
||||
missing_from_df1 = df2.index.difference(df1.index)
|
||||
missing_from_df2 = df1.index.difference(df2.index)
|
||||
print("{}-with-events missing these dates: {}".format(tkr, missing_from_df1))
|
||||
@@ -217,7 +328,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
self.assertTrue(((df2["Dividends"] > 0) | (df2["Stock Splits"] > 0)).any())
|
||||
try:
|
||||
self.assertTrue(df1.index.equals(df2.index))
|
||||
except:
|
||||
except AssertionError:
|
||||
missing_from_df1 = df2.index.difference(df1.index)
|
||||
missing_from_df2 = df1.index.difference(df2.index)
|
||||
print("{} missing these dates: {}".format(tkr1, missing_from_df1))
|
||||
@@ -232,7 +343,7 @@ class TestPriceHistory(unittest.TestCase):
|
||||
self.assertTrue(((df1["Dividends"] > 0) | (df1["Stock Splits"] > 0)).any())
|
||||
try:
|
||||
self.assertTrue(df1.index.equals(df2.index))
|
||||
except:
|
||||
except AssertionError:
|
||||
missing_from_df1 = df2.index.difference(df1.index)
|
||||
missing_from_df2 = df1.index.difference(df2.index)
|
||||
print("{}-with-events missing these dates: {}".format(tkr, missing_from_df1))
|
||||
@@ -241,8 +352,12 @@ class TestPriceHistory(unittest.TestCase):
|
||||
|
||||
def test_monthlyWithEvents2(self):
|
||||
# Simply check no exception from internal merge
|
||||
tkr = "ABBV"
|
||||
yf.Ticker("ABBV").history(period="max", interval="1mo")
|
||||
dfm = yf.Ticker("ABBV").history(period="max", interval="1mo")
|
||||
dfd = yf.Ticker("ABBV").history(period="max", interval="1d")
|
||||
dfd = dfd[dfd.index > dfm.index[0]]
|
||||
dfm_divs = dfm[dfm['Dividends'] != 0]
|
||||
dfd_divs = dfd[dfd['Dividends'] != 0]
|
||||
self.assertEqual(dfm_divs.shape[0], dfd_divs.shape[0])
|
||||
|
||||
def test_tz_dst_ambiguous(self):
|
||||
# Reproduce issue #1100
|
||||
@@ -252,9 +367,9 @@ class TestPriceHistory(unittest.TestCase):
|
||||
raise Exception("Ambiguous DST issue not resolved")
|
||||
|
||||
def test_dst_fix(self):
|
||||
# Daily intervals should start at time 00:00. But for some combinations of date and timezone,
|
||||
# Daily intervals should start at time 00:00. But for some combinations of date and timezone,
|
||||
# Yahoo has time off by few hours (e.g. Brazil 23:00 around Jan-2022). Suspect DST problem.
|
||||
# The clue is (a) minutes=0 and (b) hour near 0.
|
||||
# The clue is (a) minutes=0 and (b) hour near 0.
|
||||
# Obviously Yahoo meant 00:00, so ensure this doesn't affect date conversion.
|
||||
|
||||
# The correction is successful if no days are weekend, and weekly data begins Monday
|
||||
@@ -272,29 +387,25 @@ class TestPriceHistory(unittest.TestCase):
|
||||
df = dat.history(start=start, end=end, interval=interval)
|
||||
try:
|
||||
self.assertTrue((df.index.weekday == 0).all())
|
||||
except:
|
||||
except AssertionError:
|
||||
print("Weekly data not aligned to Monday")
|
||||
raise
|
||||
|
||||
def test_prune_post_intraday_us(self):
|
||||
# Half-day before USA Thanksgiving. Yahoo normally
|
||||
# returns an interval starting when regular trading closes,
|
||||
# Half-day at USA Thanksgiving. Yahoo normally
|
||||
# returns an interval starting when regular trading closes,
|
||||
# even if prepost=False.
|
||||
|
||||
# Setup
|
||||
tkr = "AMZN"
|
||||
interval = "1h"
|
||||
interval_td = _dt.timedelta(hours=1)
|
||||
time_open = _dt.time(9, 30)
|
||||
time_close = _dt.time(16)
|
||||
special_day = _dt.date(2022, 11, 25)
|
||||
special_day = _dt.date(2023, 11, 24)
|
||||
time_early_close = _dt.time(13)
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
# Run
|
||||
start_d = special_day - _dt.timedelta(days=7)
|
||||
end_d = special_day + _dt.timedelta(days=7)
|
||||
df = dat.history(start=start_d, end=end_d, interval=interval, prepost=False, keepna=True)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
tg_last_dt = df.loc[str(special_day)].index[-1]
|
||||
self.assertTrue(tg_last_dt.time() < time_early_close)
|
||||
|
||||
@@ -303,88 +414,22 @@ class TestPriceHistory(unittest.TestCase):
|
||||
end_d = _dt.date(special_day.year+1, 1, 1)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
|
||||
f_early_close = (last_dts+interval_td).dt.time < time_close
|
||||
early_close_dates = last_dts.index[f_early_close].values
|
||||
self.assertEqual(len(early_close_dates), 1)
|
||||
self.assertEqual(early_close_dates[0], special_day)
|
||||
|
||||
first_dts = _pd.Series(df.index).groupby(df.index.date).first()
|
||||
f_late_open = first_dts.dt.time > time_open
|
||||
late_open_dates = first_dts.index[f_late_open]
|
||||
self.assertEqual(len(late_open_dates), 0)
|
||||
|
||||
def test_prune_post_intraday_omx(self):
|
||||
# Half-day before Sweden Christmas. Yahoo normally
|
||||
# returns an interval starting when regular trading closes,
|
||||
# even if prepost=False.
|
||||
# If prepost=False, test that yfinance is removing prepost intervals.
|
||||
|
||||
# Setup
|
||||
tkr = "AEC.ST"
|
||||
interval = "1h"
|
||||
interval_td = _dt.timedelta(hours=1)
|
||||
time_open = _dt.time(9)
|
||||
time_close = _dt.time(17,30)
|
||||
special_day = _dt.date(2022, 12, 23)
|
||||
time_early_close = _dt.time(13, 2)
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
# Half trading day Jan 5, Apr 14, May 25, Jun 23, Nov 4, Dec 23, Dec 30
|
||||
half_days = [_dt.date(special_day.year, x[0], x[1]) for x in [(1,5), (4,14), (5,25), (6,23), (11,4), (12,23), (12,30)]]
|
||||
|
||||
# Yahoo has incorrectly classified afternoon of 2022-04-13 as post-market.
|
||||
# Nothing yfinance can do because Yahoo doesn't return data with prepost=False.
|
||||
# But need to handle in this test.
|
||||
expected_incorrect_half_days = [_dt.date(2022,4,13)]
|
||||
half_days = sorted(half_days+expected_incorrect_half_days)
|
||||
|
||||
# Run
|
||||
start_d = special_day - _dt.timedelta(days=7)
|
||||
end_d = special_day + _dt.timedelta(days=7)
|
||||
df = dat.history(start=start_d, end=end_d, interval=interval, prepost=False, keepna=True)
|
||||
tg_last_dt = df.loc[str(special_day)].index[-1]
|
||||
self.assertTrue(tg_last_dt.time() < time_early_close)
|
||||
|
||||
# Test no other afternoons (or mornings) were pruned
|
||||
start_d = _dt.date(special_day.year, 1, 1)
|
||||
end_d = _dt.date(special_day.year+1, 1, 1)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
|
||||
f_early_close = (last_dts+interval_td).dt.time < time_close
|
||||
early_close_dates = last_dts.index[f_early_close].values
|
||||
unexpected_early_close_dates = [d for d in early_close_dates if not d in half_days]
|
||||
self.assertEqual(len(unexpected_early_close_dates), 0)
|
||||
self.assertEqual(len(early_close_dates), len(half_days))
|
||||
self.assertTrue(_np.equal(early_close_dates, half_days).all())
|
||||
|
||||
first_dts = _pd.Series(df.index).groupby(df.index.date).first()
|
||||
f_late_open = first_dts.dt.time > time_open
|
||||
late_open_dates = first_dts.index[f_late_open]
|
||||
self.assertEqual(len(late_open_dates), 0)
|
||||
dfd = dat.history(start=start_d, end=end_d, interval='1d', prepost=False, keepna=True)
|
||||
self.assertTrue(_np.equal(dfd.index.date, _pd.to_datetime(last_dts.index).date).all())
|
||||
|
||||
def test_prune_post_intraday_asx(self):
|
||||
# Setup
|
||||
tkr = "BHP.AX"
|
||||
interval = "1h"
|
||||
interval_td = _dt.timedelta(hours=1)
|
||||
time_open = _dt.time(10)
|
||||
time_close = _dt.time(16,12)
|
||||
# No early closes in 2022
|
||||
# No early closes in 2023
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
# Test no afternoons (or mornings) were pruned
|
||||
start_d = _dt.date(2022, 1, 1)
|
||||
end_d = _dt.date(2022+1, 1, 1)
|
||||
# Test no other afternoons (or mornings) were pruned
|
||||
start_d = _dt.date(2023, 1, 1)
|
||||
end_d = _dt.date(2023+1, 1, 1)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
|
||||
f_early_close = (last_dts+interval_td).dt.time < time_close
|
||||
early_close_dates = last_dts.index[f_early_close].values
|
||||
self.assertEqual(len(early_close_dates), 0)
|
||||
|
||||
first_dts = _pd.Series(df.index).groupby(df.index.date).first()
|
||||
f_late_open = first_dts.dt.time > time_open
|
||||
late_open_dates = first_dts.index[f_late_open]
|
||||
self.assertEqual(len(late_open_dates), 0)
|
||||
dfd = dat.history(start=start_d, end=end_d, interval='1d', prepost=False, keepna=True)
|
||||
self.assertTrue(_np.equal(dfd.index.date, _pd.to_datetime(last_dts.index).date).all())
|
||||
|
||||
def test_weekly_2rows_fix(self):
|
||||
tkr = "AMZN"
|
||||
@@ -403,7 +448,8 @@ class TestPriceHistory(unittest.TestCase):
|
||||
end = "2019-12-31"
|
||||
interval = "3mo"
|
||||
|
||||
df = dat.history(start=start, end=end, interval=interval)
|
||||
dat.history(start=start, end=end, interval=interval)
|
||||
|
||||
|
||||
class TestPriceRepair(unittest.TestCase):
|
||||
session = None
|
||||
@@ -417,6 +463,18 @@ class TestPriceRepair(unittest.TestCase):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def test_types(self):
|
||||
tkr = 'INTC'
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
data = dat.history(period="3mo", interval="1d", prepost=True, repair=True)
|
||||
self.assertIsInstance(data, _pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
reconstructed = dat._lazy_load_price_history()._reconstruct_intervals_batch(data, "1wk", True)
|
||||
self.assertIsInstance(reconstructed, _pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_reconstruct_2m(self):
|
||||
# 2m repair requires 1m data.
|
||||
# Yahoo restricts 1m fetches to 7 days max within last 30 days.
|
||||
@@ -425,7 +483,6 @@ class TestPriceRepair(unittest.TestCase):
|
||||
tkrs = ["BHP.AX", "IMP.JO", "BP.L", "PNL.L", "INTC"]
|
||||
|
||||
dt_now = _pd.Timestamp.utcnow()
|
||||
td_7d = _dt.timedelta(days=7)
|
||||
td_60d = _dt.timedelta(days=60)
|
||||
|
||||
# Round time for 'requests_cache' reuse
|
||||
@@ -435,25 +492,26 @@ class TestPriceRepair(unittest.TestCase):
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
end_dt = dt_now
|
||||
start_dt = end_dt - td_60d
|
||||
df = dat.history(start=start_dt, end=end_dt, interval="2m", repair=True)
|
||||
dat.history(start=start_dt, end=end_dt, interval="2m", repair=True)
|
||||
|
||||
def test_repair_100x_weekly(self):
|
||||
def test_repair_100x_random_weekly(self):
|
||||
# Setup:
|
||||
tkr = "PNL.L"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
|
||||
df = _pd.DataFrame(data={"Open": [470.5, 473.5, 474.5, 470],
|
||||
"High": [476, 476.5, 477, 480],
|
||||
"Low": [470.5, 470, 465.5, 468.26],
|
||||
"Close": [475, 473.5, 472, 473.5],
|
||||
"Adj Close": [475, 473.5, 472, 473.5],
|
||||
"Adj Close": [470.1, 468.6, 467.1, 468.6],
|
||||
"Volume": [2295613, 2245604, 3000287, 2635611]},
|
||||
index=_pd.to_datetime([_dt.date(2022, 10, 24),
|
||||
_dt.date(2022, 10, 17),
|
||||
_dt.date(2022, 10, 10),
|
||||
_dt.date(2022, 10, 3)]))
|
||||
index=_pd.to_datetime([_dt.date(2022, 10, 24),
|
||||
_dt.date(2022, 10, 17),
|
||||
_dt.date(2022, 10, 10),
|
||||
_dt.date(2022, 10, 3)]))
|
||||
df = df.sort_index()
|
||||
df.index.name = "Date"
|
||||
df_bad = df.copy()
|
||||
@@ -465,18 +523,17 @@ class TestPriceRepair(unittest.TestCase):
|
||||
|
||||
# Run test
|
||||
|
||||
df_repaired = dat._fix_unit_mixups(df_bad, "1wk", tz_exchange, prepost=False)
|
||||
df_repaired = hist._fix_unit_random_mixups(df_bad, "1wk", tz_exchange, prepost=False)
|
||||
|
||||
# First test - no errors left
|
||||
for c in data_cols:
|
||||
try:
|
||||
self.assertTrue(_np.isclose(df_repaired[c], df[c], rtol=1e-2).all())
|
||||
except:
|
||||
except AssertionError:
|
||||
print(df[c])
|
||||
print(df_repaired[c])
|
||||
raise
|
||||
|
||||
|
||||
# Second test - all differences should be either ~1x or ~100x
|
||||
ratio = df_bad[data_cols].values / df[data_cols].values
|
||||
ratio = ratio.round(2)
|
||||
@@ -491,24 +548,25 @@ class TestPriceRepair(unittest.TestCase):
|
||||
self.assertTrue("Repaired?" in df_repaired.columns)
|
||||
self.assertFalse(df_repaired["Repaired?"].isna().any())
|
||||
|
||||
def test_repair_100x_weekly_preSplit(self):
|
||||
def test_repair_100x_random_weekly_preSplit(self):
|
||||
# PNL.L has a stock-split in 2022. Sometimes requesting data before 2022 is not split-adjusted.
|
||||
|
||||
tkr = "PNL.L"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
|
||||
df = _pd.DataFrame(data={"Open": [400, 398, 392.5, 417],
|
||||
"High": [421, 425, 419, 420.5],
|
||||
"Low": [400, 380.5, 376.5, 396],
|
||||
"Close": [410, 409.5, 402, 399],
|
||||
"Adj Close": [398.02, 397.53, 390.25, 387.34],
|
||||
df = _pd.DataFrame(data={"Open": [400, 398, 392.5, 417],
|
||||
"High": [421, 425, 419, 420.5],
|
||||
"Low": [400, 380.5, 376.5, 396],
|
||||
"Close": [410, 409.5, 402, 399],
|
||||
"Adj Close": [393.91, 393.43, 386.22, 383.34],
|
||||
"Volume": [3232600, 3773900, 10835000, 4257900]},
|
||||
index=_pd.to_datetime([_dt.date(2020, 3, 30),
|
||||
_dt.date(2020, 3, 23),
|
||||
_dt.date(2020, 3, 16),
|
||||
_dt.date(2020, 3, 9)]))
|
||||
index=_pd.to_datetime([_dt.date(2020, 3, 30),
|
||||
_dt.date(2020, 3, 23),
|
||||
_dt.date(2020, 3, 16),
|
||||
_dt.date(2020, 3, 9)]))
|
||||
df = df.sort_index()
|
||||
# Simulate data missing split-adjustment:
|
||||
df[data_cols] *= 100.0
|
||||
@@ -523,13 +581,13 @@ class TestPriceRepair(unittest.TestCase):
|
||||
df.index = df.index.tz_localize(tz_exchange)
|
||||
df_bad.index = df_bad.index.tz_localize(tz_exchange)
|
||||
|
||||
df_repaired = dat._fix_unit_mixups(df_bad, "1wk", tz_exchange, prepost=False)
|
||||
df_repaired = hist._fix_unit_random_mixups(df_bad, "1wk", tz_exchange, prepost=False)
|
||||
|
||||
# First test - no errors left
|
||||
for c in data_cols:
|
||||
try:
|
||||
self.assertTrue(_np.isclose(df_repaired[c], df[c], rtol=1e-2).all())
|
||||
except:
|
||||
except AssertionError:
|
||||
print("Mismatch in column", c)
|
||||
print("- df_repaired:")
|
||||
print(df_repaired[c])
|
||||
@@ -551,10 +609,11 @@ class TestPriceRepair(unittest.TestCase):
|
||||
self.assertTrue("Repaired?" in df_repaired.columns)
|
||||
self.assertFalse(df_repaired["Repaired?"].isna().any())
|
||||
|
||||
def test_repair_100x_daily(self):
|
||||
def test_repair_100x_random_daily(self):
|
||||
tkr = "PNL.L"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
|
||||
df = _pd.DataFrame(data={"Open": [478, 476, 476, 472],
|
||||
@@ -563,10 +622,10 @@ class TestPriceRepair(unittest.TestCase):
|
||||
"Close": [475.5, 475.5, 474.5, 475],
|
||||
"Adj Close": [475.5, 475.5, 474.5, 475],
|
||||
"Volume": [436414, 485947, 358067, 287620]},
|
||||
index=_pd.to_datetime([_dt.date(2022, 11, 1),
|
||||
_dt.date(2022, 10, 31),
|
||||
_dt.date(2022, 10, 28),
|
||||
_dt.date(2022, 10, 27)]))
|
||||
index=_pd.to_datetime([_dt.date(2022, 11, 1),
|
||||
_dt.date(2022, 10, 31),
|
||||
_dt.date(2022, 10, 28),
|
||||
_dt.date(2022, 10, 27)]))
|
||||
df = df.sort_index()
|
||||
df.index.name = "Date"
|
||||
df_bad = df.copy()
|
||||
@@ -576,7 +635,7 @@ class TestPriceRepair(unittest.TestCase):
|
||||
df.index = df.index.tz_localize(tz_exchange)
|
||||
df_bad.index = df_bad.index.tz_localize(tz_exchange)
|
||||
|
||||
df_repaired = dat._fix_unit_mixups(df_bad, "1d", tz_exchange, prepost=False)
|
||||
df_repaired = hist._fix_unit_random_mixups(df_bad, "1d", tz_exchange, prepost=False)
|
||||
|
||||
# First test - no errors left
|
||||
for c in data_cols:
|
||||
@@ -596,9 +655,65 @@ class TestPriceRepair(unittest.TestCase):
|
||||
self.assertTrue("Repaired?" in df_repaired.columns)
|
||||
self.assertFalse(df_repaired["Repaired?"].isna().any())
|
||||
|
||||
def test_repair_100x_block_daily(self):
|
||||
# Some 100x errors are not sporadic.
|
||||
# Sometimes Yahoo suddenly shifts from cents->$ from some recent date.
|
||||
|
||||
tkrs = ['AET.L', 'SSW.JO']
|
||||
for tkr in tkrs:
|
||||
for interval in ['1d', '1wk']:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
data_cols = ["Low", "High", "Open", "Close", "Adj Close"]
|
||||
_dp = os.path.dirname(__file__)
|
||||
fp = os.path.join(_dp, "data", tkr.replace('.','-') + '-' + interval + "-100x-error.csv")
|
||||
if not os.path.isfile(fp):
|
||||
continue
|
||||
df_bad = _pd.read_csv(fp, index_col="Date")
|
||||
df_bad.index = _pd.to_datetime(df_bad.index, utc=True).tz_convert(tz_exchange)
|
||||
df_bad = df_bad.sort_index()
|
||||
|
||||
df = df_bad.copy()
|
||||
fp = os.path.join(_dp, "data", tkr.replace('.','-') + '-' + interval + "-100x-error-fixed.csv")
|
||||
df = _pd.read_csv(fp, index_col="Date")
|
||||
df.index = _pd.to_datetime(df.index, utc=True).tz_convert(tz_exchange)
|
||||
df = df.sort_index()
|
||||
|
||||
df_repaired = hist._fix_unit_switch(df_bad, interval, tz_exchange)
|
||||
df_repaired = df_repaired.sort_index()
|
||||
|
||||
# First test - no errors left
|
||||
for c in data_cols:
|
||||
try:
|
||||
self.assertTrue(_np.isclose(df_repaired[c], df[c], rtol=1e-2).all())
|
||||
except:
|
||||
print("- repaired:")
|
||||
print(df_repaired[c])
|
||||
print("- correct:")
|
||||
print(df[c])
|
||||
print(f"TEST FAIL on column '{c}' (tkr={tkr} interval={interval})")
|
||||
raise
|
||||
|
||||
# Second test - all differences should be either ~1x or ~100x
|
||||
ratio = df_bad[data_cols].values / df[data_cols].values
|
||||
ratio = ratio.round(2)
|
||||
# - round near-100 ratio to 100:
|
||||
f = ratio > 90
|
||||
ratio[f] = (ratio[f] / 10).round().astype(int) * 10 # round ratio to nearest 10
|
||||
# - now test
|
||||
f_100 = (ratio == 100) | (ratio == 0.01)
|
||||
f_1 = ratio == 1
|
||||
self.assertTrue((f_100 | f_1).all())
|
||||
|
||||
self.assertTrue("Repaired?" in df_repaired.columns)
|
||||
self.assertFalse(df_repaired["Repaired?"].isna().any())
|
||||
|
||||
def test_repair_zeroes_daily(self):
|
||||
tkr = "BBIL.L"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
hist = dat._lazy_load_price_history()
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
|
||||
df_bad = _pd.DataFrame(data={"Open": [0, 102.04, 102.04],
|
||||
@@ -607,14 +722,14 @@ class TestPriceRepair(unittest.TestCase):
|
||||
"Close": [103.03, 102.05, 102.08],
|
||||
"Adj Close": [102.03, 102.05, 102.08],
|
||||
"Volume": [560, 137, 117]},
|
||||
index=_pd.to_datetime([_dt.datetime(2022, 11, 1),
|
||||
_dt.datetime(2022, 10, 31),
|
||||
_dt.datetime(2022, 10, 30)]))
|
||||
index=_pd.to_datetime([_dt.datetime(2022, 11, 1),
|
||||
_dt.datetime(2022, 10, 31),
|
||||
_dt.datetime(2022, 10, 30)]))
|
||||
df_bad = df_bad.sort_index()
|
||||
df_bad.index.name = "Date"
|
||||
df_bad.index = df_bad.index.tz_localize(tz_exchange)
|
||||
|
||||
repaired_df = dat._fix_zeroes(df_bad, "1d", tz_exchange, prepost=False)
|
||||
repaired_df = hist._fix_zeroes(df_bad, "1d", tz_exchange, prepost=False)
|
||||
|
||||
correct_df = df_bad.copy()
|
||||
correct_df.loc["2022-11-01", "Open"] = 102.080002
|
||||
@@ -626,12 +741,50 @@ class TestPriceRepair(unittest.TestCase):
|
||||
self.assertTrue("Repaired?" in repaired_df.columns)
|
||||
self.assertFalse(repaired_df["Repaired?"].isna().any())
|
||||
|
||||
def test_repair_zeroes_daily_adjClose(self):
|
||||
# Test that 'Adj Close' is reconstructed correctly,
|
||||
# particularly when a dividend occurred within 1 day.
|
||||
|
||||
tkr = "INTC"
|
||||
df = _pd.DataFrame(data={"Open": [28.95, 28.65, 29.55, 29.62, 29.25],
|
||||
"High": [29.12, 29.27, 29.65, 31.17, 30.30],
|
||||
"Low": [28.21, 28.43, 28.61, 29.53, 28.80],
|
||||
"Close": [28.24, 29.05, 28.69, 30.32, 30.19],
|
||||
"Adj Close": [28.12, 28.93, 28.57, 29.83, 29.70],
|
||||
"Volume": [36e6, 51e6, 49e6, 58e6, 62e6],
|
||||
"Dividends": [0, 0, 0.365, 0, 0]},
|
||||
index=_pd.to_datetime([_dt.datetime(2023, 2, 8),
|
||||
_dt.datetime(2023, 2, 7),
|
||||
_dt.datetime(2023, 2, 6),
|
||||
_dt.datetime(2023, 2, 3),
|
||||
_dt.datetime(2023, 2, 2)]))
|
||||
df = df.sort_index()
|
||||
df.index.name = "Date"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
df.index = df.index.tz_localize(tz_exchange)
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
rtol = 5e-3
|
||||
for i in [0, 1, 2]:
|
||||
df_slice = df.iloc[i:i+3]
|
||||
for j in range(3):
|
||||
df_slice_bad = df_slice.copy()
|
||||
df_slice_bad.loc[df_slice_bad.index[j], "Adj Close"] = 0.0
|
||||
|
||||
df_slice_bad_repaired = hist._fix_zeroes(df_slice_bad, "1d", tz_exchange, prepost=False)
|
||||
for c in ["Close", "Adj Close"]:
|
||||
self.assertTrue(_np.isclose(df_slice_bad_repaired[c], df_slice[c], rtol=rtol).all())
|
||||
self.assertTrue("Repaired?" in df_slice_bad_repaired.columns)
|
||||
self.assertFalse(df_slice_bad_repaired["Repaired?"].isna().any())
|
||||
|
||||
def test_repair_zeroes_hourly(self):
|
||||
tkr = "INTC"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
correct_df = dat.history(period="1wk", interval="1h", auto_adjust=False, repair=True)
|
||||
correct_df = hist.history(period="5d", interval="1h", auto_adjust=False, repair=True)
|
||||
|
||||
df_bad = correct_df.copy()
|
||||
bad_idx = correct_df.index[10]
|
||||
@@ -642,12 +795,12 @@ class TestPriceRepair(unittest.TestCase):
|
||||
df_bad.loc[bad_idx, "Adj Close"] = _np.nan
|
||||
df_bad.loc[bad_idx, "Volume"] = 0
|
||||
|
||||
repaired_df = dat._fix_zeroes(df_bad, "1h", tz_exchange, prepost=False)
|
||||
repaired_df = hist._fix_zeroes(df_bad, "1h", tz_exchange, prepost=False)
|
||||
|
||||
for c in ["Open", "Low", "High", "Close"]:
|
||||
try:
|
||||
self.assertTrue(_np.isclose(repaired_df[c], correct_df[c], rtol=1e-7).all())
|
||||
except:
|
||||
except AssertionError:
|
||||
print("COLUMN", c)
|
||||
print("- repaired_df")
|
||||
print(repaired_df)
|
||||
@@ -660,5 +813,134 @@ class TestPriceRepair(unittest.TestCase):
|
||||
self.assertTrue("Repaired?" in repaired_df.columns)
|
||||
self.assertFalse(repaired_df["Repaired?"].isna().any())
|
||||
|
||||
def test_repair_bad_stock_splits(self):
|
||||
# Stocks that split in 2022 but no problems in Yahoo data,
|
||||
# so repair should change nothing
|
||||
good_tkrs = ['AMZN', 'DXCM', 'FTNT', 'GOOG', 'GME', 'PANW', 'SHOP', 'TSLA']
|
||||
good_tkrs += ['AEI', 'GHI', 'IRON', 'LXU', 'NUZE', 'RSLS', 'TISI']
|
||||
good_tkrs += ['BOL.ST', 'TUI1.DE']
|
||||
intervals = ['1d', '1wk', '1mo', '3mo']
|
||||
for tkr in good_tkrs:
|
||||
for interval in intervals:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
_dp = os.path.dirname(__file__)
|
||||
df_good = dat.history(start='2020-01-01', end=_dt.date.today(), interval=interval, auto_adjust=False)
|
||||
|
||||
repaired_df = hist._fix_bad_stock_splits(df_good, interval, tz_exchange)
|
||||
|
||||
# Expect no change from repair
|
||||
df_good = df_good.sort_index()
|
||||
repaired_df = repaired_df.sort_index()
|
||||
for c in ["Open", "Low", "High", "Close", "Adj Close", "Volume"]:
|
||||
try:
|
||||
self.assertTrue((repaired_df[c].to_numpy() == df_good[c].to_numpy()).all())
|
||||
except:
|
||||
print(f"tkr={tkr} interval={interval} COLUMN={c}")
|
||||
df_dbg = df_good[[c]].join(repaired_df[[c]], lsuffix='.good', rsuffix='.repaired')
|
||||
f_diff = repaired_df[c].to_numpy() != df_good[c].to_numpy()
|
||||
print(df_dbg[f_diff | _np.roll(f_diff, 1) | _np.roll(f_diff, -1)])
|
||||
raise
|
||||
|
||||
bad_tkrs = ['4063.T', 'ALPHA.PA', 'AV.L', 'CNE.L', 'MOB.ST', 'SPM.MI']
|
||||
bad_tkrs.append('LA.V') # special case - stock split error is 3 years ago! why not fixed?
|
||||
for tkr in bad_tkrs:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
_dp = os.path.dirname(__file__)
|
||||
interval = '1d'
|
||||
fp = os.path.join(_dp, "data", tkr.replace('.','-')+'-'+interval+"-bad-stock-split.csv")
|
||||
if not os.path.isfile(fp):
|
||||
interval = '1wk'
|
||||
fp = os.path.join(_dp, "data", tkr.replace('.','-')+'-'+interval+"-bad-stock-split.csv")
|
||||
df_bad = _pd.read_csv(fp, index_col="Date")
|
||||
df_bad.index = _pd.to_datetime(df_bad.index, utc=True)
|
||||
|
||||
repaired_df = hist._fix_bad_stock_splits(df_bad, "1d", tz_exchange)
|
||||
|
||||
fp = os.path.join(_dp, "data", tkr.replace('.','-')+'-'+interval+"-bad-stock-split-fixed.csv")
|
||||
correct_df = _pd.read_csv(fp, index_col="Date")
|
||||
correct_df.index = _pd.to_datetime(correct_df.index)
|
||||
|
||||
repaired_df = repaired_df.sort_index()
|
||||
correct_df = correct_df.sort_index()
|
||||
for c in ["Open", "Low", "High", "Close", "Adj Close", "Volume"]:
|
||||
try:
|
||||
self.assertTrue(_np.isclose(repaired_df[c], correct_df[c], rtol=5e-6).all())
|
||||
except AssertionError:
|
||||
print(f"tkr={tkr} COLUMN={c}")
|
||||
# print("- repaired_df")
|
||||
# print(repaired_df)
|
||||
# print("- correct_df[c]:")
|
||||
# print(correct_df[c])
|
||||
# print("- diff:")
|
||||
# print(repaired_df[c] - correct_df[c])
|
||||
raise
|
||||
|
||||
# Had very high price volatility in Jan-2021 around split date that could
|
||||
# be mistaken for missing stock split adjustment. And old logic did think
|
||||
# column 'High' required fixing - wrong!
|
||||
sketchy_tkrs = ['FIZZ']
|
||||
intervals = ['1wk']
|
||||
for tkr in sketchy_tkrs:
|
||||
for interval in intervals:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
_dp = os.path.dirname(__file__)
|
||||
df_good = hist.history(start='2020-11-30', end='2021-04-01', interval=interval, auto_adjust=False)
|
||||
|
||||
repaired_df = hist._fix_bad_stock_splits(df_good, interval, tz_exchange)
|
||||
|
||||
# Expect no change from repair
|
||||
df_good = df_good.sort_index()
|
||||
repaired_df = repaired_df.sort_index()
|
||||
for c in ["Open", "Low", "High", "Close", "Adj Close", "Volume"]:
|
||||
try:
|
||||
self.assertTrue((repaired_df[c].to_numpy() == df_good[c].to_numpy()).all())
|
||||
except AssertionError:
|
||||
print(f"tkr={tkr} interval={interval} COLUMN={c}")
|
||||
df_dbg = df_good[[c]].join(repaired_df[[c]], lsuffix='.good', rsuffix='.repaired')
|
||||
f_diff = repaired_df[c].to_numpy() != df_good[c].to_numpy()
|
||||
print(df_dbg[f_diff | _np.roll(f_diff, 1) | _np.roll(f_diff, -1)])
|
||||
raise
|
||||
|
||||
def test_repair_missing_div_adjust(self):
|
||||
tkr = '8TRA.DE'
|
||||
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz_exchange = dat.fast_info["timezone"]
|
||||
hist = dat._lazy_load_price_history()
|
||||
|
||||
_dp = os.path.dirname(__file__)
|
||||
df_bad = _pd.read_csv(os.path.join(_dp, "data", tkr.replace('.','-')+"-1d-missing-div-adjust.csv"), index_col="Date")
|
||||
df_bad.index = _pd.to_datetime(df_bad.index)
|
||||
|
||||
repaired_df = hist._fix_missing_div_adjust(df_bad, "1d", tz_exchange)
|
||||
|
||||
correct_df = _pd.read_csv(os.path.join(_dp, "data", tkr.replace('.','-')+"-1d-missing-div-adjust-fixed.csv"), index_col="Date")
|
||||
correct_df.index = _pd.to_datetime(correct_df.index)
|
||||
|
||||
repaired_df = repaired_df.sort_index()
|
||||
correct_df = correct_df.sort_index()
|
||||
for c in ["Open", "Low", "High", "Close", "Adj Close", "Volume"]:
|
||||
try:
|
||||
self.assertTrue(_np.isclose(repaired_df[c], correct_df[c], rtol=5e-6).all())
|
||||
except:
|
||||
print(f"tkr={tkr} COLUMN={c}")
|
||||
print("- repaired_df")
|
||||
print(repaired_df)
|
||||
print("- correct_df[c]:")
|
||||
print(correct_df[c])
|
||||
print("- diff:")
|
||||
print(repaired_df[c] - correct_df[c])
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
952
tests/test_ticker.py
Normal file
952
tests/test_ticker.py
Normal file
@@ -0,0 +1,952 @@
|
||||
"""
|
||||
Tests for Ticker
|
||||
|
||||
To run all tests in suite from commandline:
|
||||
python -m unittest tests.ticker
|
||||
|
||||
Specific test class:
|
||||
python -m unittest tests.ticker.TestTicker
|
||||
|
||||
"""
|
||||
import pandas as pd
|
||||
|
||||
from .context import yfinance as yf
|
||||
from .context import session_gbl
|
||||
from yfinance.exceptions import YFChartError, YFInvalidPeriodError, YFNotImplementedError, YFTickerMissingError, YFTzMissingError
|
||||
|
||||
|
||||
import unittest
|
||||
import requests_cache
|
||||
from typing import Union, Any, get_args, _GenericAlias
|
||||
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse
|
||||
|
||||
ticker_attributes = (
|
||||
("major_holders", pd.DataFrame),
|
||||
("institutional_holders", pd.DataFrame),
|
||||
("mutualfund_holders", pd.DataFrame),
|
||||
("insider_transactions", pd.DataFrame),
|
||||
("insider_purchases", pd.DataFrame),
|
||||
("insider_roster_holders", pd.DataFrame),
|
||||
("splits", pd.Series),
|
||||
("actions", pd.DataFrame),
|
||||
("shares", pd.DataFrame),
|
||||
("info", dict),
|
||||
("calendar", dict),
|
||||
("recommendations", Union[pd.DataFrame, dict]),
|
||||
("recommendations_summary", Union[pd.DataFrame, dict]),
|
||||
("upgrades_downgrades", Union[pd.DataFrame, dict]),
|
||||
("earnings", pd.DataFrame),
|
||||
("quarterly_earnings", pd.DataFrame),
|
||||
("quarterly_cashflow", pd.DataFrame),
|
||||
("cashflow", pd.DataFrame),
|
||||
("quarterly_balance_sheet", pd.DataFrame),
|
||||
("balance_sheet", pd.DataFrame),
|
||||
("quarterly_income_stmt", pd.DataFrame),
|
||||
("income_stmt", pd.DataFrame),
|
||||
("analyst_price_target", pd.DataFrame),
|
||||
("revenue_forecasts", pd.DataFrame),
|
||||
("sustainability", pd.DataFrame),
|
||||
("options", tuple),
|
||||
("news", Any),
|
||||
("earnings_trend", pd.DataFrame),
|
||||
("earnings_dates", pd.DataFrame),
|
||||
("earnings_forecasts", pd.DataFrame),
|
||||
)
|
||||
|
||||
def assert_attribute_type(testClass: unittest.TestCase, instance, attribute_name, expected_type):
|
||||
try:
|
||||
attribute = getattr(instance, attribute_name)
|
||||
if attribute is not None and expected_type is not Any:
|
||||
err_msg = f'{attribute_name} type is {type(attribute)} not {expected_type}'
|
||||
if isinstance(expected_type, _GenericAlias) and expected_type.__origin__ is Union:
|
||||
allowed_types = get_args(expected_type)
|
||||
testClass.assertTrue(isinstance(attribute, allowed_types), err_msg)
|
||||
else:
|
||||
testClass.assertEqual(type(attribute), expected_type, err_msg)
|
||||
except Exception:
|
||||
testClass.assertRaises(
|
||||
YFNotImplementedError, lambda: getattr(instance, attribute_name)
|
||||
)
|
||||
|
||||
class TestTicker(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
cls.proxy = None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def test_getTz(self):
|
||||
tkrs = ["IMP.JO", "BHG.JO", "SSW.JO", "BP.L", "INTC"]
|
||||
for tkr in tkrs:
|
||||
# First step: remove ticker from tz-cache
|
||||
yf.cache.get_tz_cache().store(tkr, None)
|
||||
|
||||
# Test:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz = dat._get_ticker_tz(proxy=None, timeout=5)
|
||||
|
||||
self.assertIsNotNone(tz)
|
||||
|
||||
def test_badTicker(self):
|
||||
# Check yfinance doesn't die when ticker delisted
|
||||
|
||||
tkr = "DJI" # typo of "^DJI"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
dat.history(period="5d")
|
||||
dat.history(start="2022-01-01")
|
||||
dat.history(start="2022-01-01", end="2022-03-01")
|
||||
yf.download([tkr], period="5d", threads=False, ignore_tz=False)
|
||||
yf.download([tkr], period="5d", threads=True, ignore_tz=False)
|
||||
yf.download([tkr], period="5d", threads=False, ignore_tz=True)
|
||||
yf.download([tkr], period="5d", threads=True, ignore_tz=True)
|
||||
|
||||
for k in dat.fast_info:
|
||||
dat.fast_info[k]
|
||||
|
||||
for attribute_name, attribute_type in ticker_attributes:
|
||||
assert_attribute_type(self, dat, attribute_name, attribute_type)
|
||||
|
||||
with self.assertRaises(YFNotImplementedError):
|
||||
assert isinstance(dat.earnings, pd.Series)
|
||||
assert dat.earnings.empty
|
||||
assert isinstance(dat.dividends, pd.Series)
|
||||
assert dat.dividends.empty
|
||||
assert isinstance(dat.splits, pd.Series)
|
||||
assert dat.splits.empty
|
||||
assert isinstance(dat.capital_gains, pd.Series)
|
||||
assert dat.capital_gains.empty
|
||||
with self.assertRaises(YFNotImplementedError):
|
||||
assert isinstance(dat.shares, pd.DataFrame)
|
||||
assert dat.shares.empty
|
||||
assert isinstance(dat.actions, pd.DataFrame)
|
||||
assert dat.actions.empty
|
||||
|
||||
def test_invalid_period(self):
|
||||
tkr = 'VALE'
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
with self.assertRaises(YFInvalidPeriodError):
|
||||
dat.history(period="2wks", interval="1d", raise_errors=True)
|
||||
with self.assertRaises(YFInvalidPeriodError):
|
||||
dat.history(period="2mo", interval="1d", raise_errors=True)
|
||||
|
||||
|
||||
def test_prices_missing(self):
|
||||
# this test will need to be updated every time someone wants to run a test
|
||||
# hard to find a ticker that matches this error other than options
|
||||
# META call option, 2024 April 26th @ strike of 180000
|
||||
tkr = 'META240426C00180000'
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
with self.assertRaises(YFChartError):
|
||||
dat.history(period="5d", interval="1m", raise_errors=True)
|
||||
|
||||
def test_ticker_missing(self):
|
||||
tkr = 'ATVI'
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
# A missing ticker can trigger either a niche error or the generalized error
|
||||
with self.assertRaises((YFTickerMissingError, YFTzMissingError, YFChartError)):
|
||||
dat.history(period="3mo", interval="1d", raise_errors=True)
|
||||
|
||||
def test_goodTicker(self):
|
||||
# that yfinance works when full api is called on same instance of ticker
|
||||
|
||||
tkrs = ["IBM"]
|
||||
tkrs.append("QCSTIX") # weird ticker, no price history but has previous close
|
||||
for tkr in tkrs:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
dat.history(period="5d")
|
||||
dat.history(start="2022-01-01")
|
||||
dat.history(start="2022-01-01", end="2022-03-01")
|
||||
yf.download([tkr], period="5d", threads=False, ignore_tz=False)
|
||||
yf.download([tkr], period="5d", threads=True, ignore_tz=False)
|
||||
yf.download([tkr], period="5d", threads=False, ignore_tz=True)
|
||||
yf.download([tkr], period="5d", threads=True, ignore_tz=True)
|
||||
|
||||
for k in dat.fast_info:
|
||||
dat.fast_info[k]
|
||||
|
||||
for attribute_name, attribute_type in ticker_attributes:
|
||||
assert_attribute_type(self, dat, attribute_name, attribute_type)
|
||||
|
||||
def test_goodTicker_withProxy(self):
|
||||
tkr = "IBM"
|
||||
dat = yf.Ticker(tkr, session=self.session, proxy=self.proxy)
|
||||
|
||||
dat._fetch_ticker_tz(proxy=None, timeout=5)
|
||||
dat._get_ticker_tz(proxy=None, timeout=5)
|
||||
dat.history(period="5d")
|
||||
|
||||
for attribute_name, attribute_type in ticker_attributes:
|
||||
assert_attribute_type(self, dat, attribute_name, attribute_type)
|
||||
|
||||
|
||||
class TestTickerHistory(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
# use a ticker that has dividends
|
||||
self.symbol = "IBM"
|
||||
self.ticker = yf.Ticker(self.symbol, session=self.session)
|
||||
|
||||
self.symbols = ["AMZN", "MSFT", "NVDA"]
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_history(self):
|
||||
md = self.ticker.history_metadata
|
||||
self.assertIn("IBM", md.values(), "metadata missing")
|
||||
data = self.ticker.history("1y")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_download(self):
|
||||
for t in [False, True]:
|
||||
for i in [False, True]:
|
||||
data = yf.download(self.symbols, threads=t, ignore_tz=i)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_no_expensive_calls_introduced(self):
|
||||
"""
|
||||
Make sure calling history to get price data has not introduced more calls to yahoo than absolutely necessary.
|
||||
As doing other type of scraping calls than "query2.finance.yahoo.com/v8/finance/chart" to yahoo website
|
||||
will quickly trigger spam-block when doing bulk download of history data.
|
||||
"""
|
||||
symbol = "GOOGL"
|
||||
period = "1y"
|
||||
with requests_cache.CachedSession(backend="memory") as session:
|
||||
ticker = yf.Ticker(symbol, session=session)
|
||||
ticker.history(period=period)
|
||||
actual_urls_called = [r.url for r in session.cache.filter()]
|
||||
|
||||
# Remove 'crumb' argument
|
||||
for i in range(len(actual_urls_called)):
|
||||
u = actual_urls_called[i]
|
||||
parsed_url = urlparse(u)
|
||||
query_params = parse_qs(parsed_url.query)
|
||||
query_params.pop('crumb', None)
|
||||
query_params.pop('cookie', None)
|
||||
u = urlunparse(parsed_url._replace(query=urlencode(query_params, doseq=True)))
|
||||
actual_urls_called[i] = u
|
||||
actual_urls_called = tuple(actual_urls_called)
|
||||
|
||||
expected_urls = (
|
||||
f"https://query2.finance.yahoo.com/v8/finance/chart/{symbol}?events=div%2Csplits%2CcapitalGains&includePrePost=False&interval=1d&range={period}",
|
||||
)
|
||||
self.assertEqual(
|
||||
expected_urls,
|
||||
actual_urls_called,
|
||||
"Different than expected url used to fetch history."
|
||||
)
|
||||
def test_dividends(self):
|
||||
data = self.ticker.dividends
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_splits(self):
|
||||
data = self.ticker.splits
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_actions(self):
|
||||
data = self.ticker.actions
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
|
||||
class TestTickerEarnings(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_earnings_dates(self):
|
||||
data = self.ticker.earnings_dates
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_earnings_dates_with_limit(self):
|
||||
# use ticker with lots of historic earnings
|
||||
ticker = yf.Ticker("IBM")
|
||||
limit = 110
|
||||
data = ticker.get_earnings_dates(limit=limit)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
self.assertEqual(len(data), limit, "Wrong number or rows")
|
||||
|
||||
data_cached = ticker.get_earnings_dates(limit=limit)
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# def test_earnings(self):
|
||||
# data = self.ticker.earnings
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_quarterly_earnings(self):
|
||||
# data = self.ticker.quarterly_earnings
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.quarterly_earnings
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_forecasts(self):
|
||||
# data = self.ticker.earnings_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# data_cached = self.ticker.earnings_dates
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_trend(self):
|
||||
# data = self.ticker.earnings_trend
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_trend
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
class TestTickerHolders(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_major_holders(self):
|
||||
data = self.ticker.major_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.major_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_institutional_holders(self):
|
||||
data = self.ticker.institutional_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.institutional_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_mutualfund_holders(self):
|
||||
data = self.ticker.mutualfund_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.mutualfund_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_insider_transactions(self):
|
||||
data = self.ticker.insider_transactions
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.insider_transactions
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_insider_purchases(self):
|
||||
data = self.ticker.insider_purchases
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.insider_purchases
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_insider_roster_holders(self):
|
||||
data = self.ticker.insider_roster_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.insider_roster_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
class TestTickerMiscFinancials(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
# For ticker 'BSE.AX' (and others), Yahoo not returning
|
||||
# full quarterly financials (usually cash-flow) with all entries,
|
||||
# instead returns a smaller version in different data store.
|
||||
self.ticker_old_fmt = yf.Ticker("BSE.AX", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_isin(self):
|
||||
data = self.ticker.isin
|
||||
self.assertIsInstance(data, str, "data has wrong type")
|
||||
self.assertEqual("ARDEUT116159", data, "data is empty")
|
||||
|
||||
data_cached = self.ticker.isin
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_options(self):
|
||||
data = self.ticker.options
|
||||
self.assertIsInstance(data, tuple, "data has wrong type")
|
||||
self.assertTrue(len(data) > 1, "data is empty")
|
||||
|
||||
def test_shares_full(self):
|
||||
data = self.ticker.get_shares_full()
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_income_statement(self):
|
||||
expected_keys = ["Total Revenue", "Basic EPS"]
|
||||
expected_periods_days = 365
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_income_stmt(pretty=True)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.income_stmt
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_income_stmt(pretty=False)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_income_stmt(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_quarterly_income_statement(self):
|
||||
expected_keys = ["Total Revenue", "Basic EPS"]
|
||||
expected_periods_days = 365//4
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_income_stmt(pretty=True, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.quarterly_income_stmt
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_income_stmt(pretty=False, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_income_stmt(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_balance_sheet(self):
|
||||
expected_keys = ["Total Assets", "Net PPE"]
|
||||
expected_periods_days = 365
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_balance_sheet(pretty=True)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.balance_sheet
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_balance_sheet(pretty=False)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_balance_sheet(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_quarterly_balance_sheet(self):
|
||||
expected_keys = ["Total Assets", "Net PPE"]
|
||||
expected_periods_days = 365//4
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_balance_sheet(pretty=True, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.quarterly_balance_sheet
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_balance_sheet(pretty=False, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_balance_sheet(as_dict=True, freq="quarterly")
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_cash_flow(self):
|
||||
expected_keys = ["Operating Cash Flow", "Net PPE Purchase And Sale"]
|
||||
expected_periods_days = 365
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_cashflow(pretty=True)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.cashflow
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_cashflow(pretty=False)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_cashflow(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_quarterly_cash_flow(self):
|
||||
expected_keys = ["Operating Cash Flow", "Net PPE Purchase And Sale"]
|
||||
expected_periods_days = 365//4
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_cashflow(pretty=True, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.quarterly_cashflow
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_cashflow(pretty=False, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_cashflow(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_income_alt_names(self):
|
||||
i1 = self.ticker.income_stmt
|
||||
i2 = self.ticker.incomestmt
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.financials
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
i1 = self.ticker.get_income_stmt()
|
||||
i2 = self.ticker.get_incomestmt()
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.get_financials()
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
i1 = self.ticker.quarterly_income_stmt
|
||||
i2 = self.ticker.quarterly_incomestmt
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.quarterly_financials
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
i1 = self.ticker.get_income_stmt(freq="quarterly")
|
||||
i2 = self.ticker.get_incomestmt(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.get_financials(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
def test_balance_sheet_alt_names(self):
|
||||
i1 = self.ticker.balance_sheet
|
||||
i2 = self.ticker.balancesheet
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_balance_sheet()
|
||||
i2 = self.ticker.get_balancesheet()
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.quarterly_balance_sheet
|
||||
i2 = self.ticker.quarterly_balancesheet
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_balance_sheet(freq="quarterly")
|
||||
i2 = self.ticker.get_balancesheet(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
def test_cash_flow_alt_names(self):
|
||||
i1 = self.ticker.cash_flow
|
||||
i2 = self.ticker.cashflow
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_cash_flow()
|
||||
i2 = self.ticker.get_cashflow()
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.quarterly_cash_flow
|
||||
i2 = self.ticker.quarterly_cashflow
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_cash_flow(freq="quarterly")
|
||||
i2 = self.ticker.get_cashflow(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
def test_bad_freq_value_raises_exception(self):
|
||||
self.assertRaises(ValueError, lambda: self.ticker.get_cashflow(freq="badarg"))
|
||||
|
||||
def test_calendar(self):
|
||||
data = self.ticker.calendar
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
self.assertTrue(len(data) > 0, "data is empty")
|
||||
self.assertIn("Earnings Date", data.keys(), "data missing expected key")
|
||||
self.assertIn("Earnings Average", data.keys(), "data missing expected key")
|
||||
self.assertIn("Earnings Low", data.keys(), "data missing expected key")
|
||||
self.assertIn("Earnings High", data.keys(), "data missing expected key")
|
||||
self.assertIn("Revenue Average", data.keys(), "data missing expected key")
|
||||
self.assertIn("Revenue Low", data.keys(), "data missing expected key")
|
||||
self.assertIn("Revenue High", data.keys(), "data missing expected key")
|
||||
# dividend date is not available for tested ticker GOOGL
|
||||
if self.ticker.ticker != "GOOGL":
|
||||
self.assertIn("Dividend Date", data.keys(), "data missing expected key")
|
||||
# ex-dividend date is not always available
|
||||
data_cached = self.ticker.calendar
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# def test_sustainability(self):
|
||||
# data = self.ticker.sustainability
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.sustainability
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_shares(self):
|
||||
# data = self.ticker.shares
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
|
||||
class TestTickerAnalysts(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_recommendations(self):
|
||||
data = self.ticker.recommendations
|
||||
data_summary = self.ticker.recommendations_summary
|
||||
self.assertTrue(data.equals(data_summary))
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.recommendations
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_recommendations_summary(self): # currently alias for recommendations
|
||||
data = self.ticker.recommendations_summary
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.recommendations_summary
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_upgrades_downgrades(self):
|
||||
data = self.ticker.upgrades_downgrades
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
self.assertTrue(len(data.columns) == 4, "data has wrong number of columns")
|
||||
self.assertEqual(data.columns.values.tolist(), ['Firm', 'ToGrade', 'FromGrade', 'Action'], "data has wrong column names")
|
||||
self.assertIsInstance(data.index, pd.DatetimeIndex, "data has wrong index type")
|
||||
|
||||
data_cached = self.ticker.upgrades_downgrades
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# def test_analyst_price_target(self):
|
||||
# data = self.ticker.analyst_price_target
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.analyst_price_target
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_revenue_forecasts(self):
|
||||
# data = self.ticker.revenue_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.revenue_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
|
||||
class TestTickerInfo(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.symbols = []
|
||||
self.symbols += ["ESLT.TA", "BP.L", "GOOGL"]
|
||||
self.symbols.append("QCSTIX") # good for testing, doesn't trade
|
||||
self.symbols += ["BTC-USD", "IWO", "VFINX", "^GSPC"]
|
||||
self.symbols += ["SOKE.IS", "ADS.DE"] # detected bugs
|
||||
self.tickers = [yf.Ticker(s, session=self.session) for s in self.symbols]
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_fast_info(self):
|
||||
f = yf.Ticker("AAPL", session=self.session).fast_info
|
||||
for k in f:
|
||||
self.assertIsNotNone(f[k])
|
||||
|
||||
def test_info(self):
|
||||
data = self.tickers[0].info
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
expected_keys = ['industry', 'currentPrice', 'exchange', 'floatShares', 'companyOfficers', 'bid']
|
||||
for k in expected_keys:
|
||||
print(k)
|
||||
self.assertIn("symbol", data.keys(), f"Did not find expected key '{k}' in info dict")
|
||||
self.assertEqual(self.symbols[0], data["symbol"], "Wrong symbol value in info dict")
|
||||
|
||||
def test_complementary_info(self):
|
||||
# This test is to check that we can successfully retrieve the trailing PEG ratio
|
||||
|
||||
# We don't expect this one to have a trailing PEG ratio
|
||||
data1 = self.tickers[0].info
|
||||
self.assertIsNone(data1['trailingPegRatio'])
|
||||
|
||||
# This one should have a trailing PEG ratio
|
||||
data2 = self.tickers[2].info
|
||||
self.assertIsInstance(data2['trailingPegRatio'], float)
|
||||
|
||||
# def test_fast_info_matches_info(self):
|
||||
# fast_info_keys = set()
|
||||
# for ticker in self.tickers:
|
||||
# fast_info_keys.update(set(ticker.fast_info.keys()))
|
||||
# fast_info_keys = sorted(list(fast_info_keys))
|
||||
|
||||
# key_rename_map = {}
|
||||
# key_rename_map["currency"] = "currency"
|
||||
# key_rename_map["quote_type"] = "quoteType"
|
||||
# key_rename_map["timezone"] = "exchangeTimezoneName"
|
||||
|
||||
# key_rename_map["last_price"] = ["currentPrice", "regularMarketPrice"]
|
||||
# key_rename_map["open"] = ["open", "regularMarketOpen"]
|
||||
# key_rename_map["day_high"] = ["dayHigh", "regularMarketDayHigh"]
|
||||
# key_rename_map["day_low"] = ["dayLow", "regularMarketDayLow"]
|
||||
# key_rename_map["previous_close"] = ["previousClose"]
|
||||
# key_rename_map["regular_market_previous_close"] = ["regularMarketPreviousClose"]
|
||||
|
||||
# key_rename_map["fifty_day_average"] = "fiftyDayAverage"
|
||||
# key_rename_map["two_hundred_day_average"] = "twoHundredDayAverage"
|
||||
# key_rename_map["year_change"] = ["52WeekChange", "fiftyTwoWeekChange"]
|
||||
# key_rename_map["year_high"] = "fiftyTwoWeekHigh"
|
||||
# key_rename_map["year_low"] = "fiftyTwoWeekLow"
|
||||
|
||||
# key_rename_map["last_volume"] = ["volume", "regularMarketVolume"]
|
||||
# key_rename_map["ten_day_average_volume"] = ["averageVolume10days", "averageDailyVolume10Day"]
|
||||
# key_rename_map["three_month_average_volume"] = "averageVolume"
|
||||
|
||||
# key_rename_map["market_cap"] = "marketCap"
|
||||
# key_rename_map["shares"] = "sharesOutstanding"
|
||||
|
||||
# for k in list(key_rename_map.keys()):
|
||||
# if '_' in k:
|
||||
# key_rename_map[yf.utils.snake_case_2_camelCase(k)] = key_rename_map[k]
|
||||
|
||||
# # Note: share count items in info[] are bad. Sometimes the float > outstanding!
|
||||
# # So often fast_info["shares"] does not match.
|
||||
# # Why isn't fast_info["shares"] wrong? Because using it to calculate market cap always correct.
|
||||
# bad_keys = {"shares"}
|
||||
|
||||
# # Loose tolerance for averages, no idea why don't match info[]. Is info wrong?
|
||||
# custom_tolerances = {}
|
||||
# custom_tolerances["year_change"] = 1.0
|
||||
# # custom_tolerances["ten_day_average_volume"] = 1e-3
|
||||
# custom_tolerances["ten_day_average_volume"] = 1e-1
|
||||
# # custom_tolerances["three_month_average_volume"] = 1e-2
|
||||
# custom_tolerances["three_month_average_volume"] = 5e-1
|
||||
# custom_tolerances["fifty_day_average"] = 1e-2
|
||||
# custom_tolerances["two_hundred_day_average"] = 1e-2
|
||||
# for k in list(custom_tolerances.keys()):
|
||||
# if '_' in k:
|
||||
# custom_tolerances[yf.utils.snake_case_2_camelCase(k)] = custom_tolerances[k]
|
||||
|
||||
# for k in fast_info_keys:
|
||||
# if k in key_rename_map:
|
||||
# k2 = key_rename_map[k]
|
||||
# else:
|
||||
# k2 = k
|
||||
|
||||
# if not isinstance(k2, list):
|
||||
# k2 = [k2]
|
||||
|
||||
# for m in k2:
|
||||
# for ticker in self.tickers:
|
||||
# if not m in ticker.info:
|
||||
# # print(f"symbol={ticker.ticker}: fast_info key '{k}' mapped to info key '{m}' but not present in info")
|
||||
# continue
|
||||
|
||||
# if k in bad_keys:
|
||||
# continue
|
||||
|
||||
# if k in custom_tolerances:
|
||||
# rtol = custom_tolerances[k]
|
||||
# else:
|
||||
# rtol = 5e-3
|
||||
# # rtol = 1e-4
|
||||
|
||||
# correct = ticker.info[m]
|
||||
# test = ticker.fast_info[k]
|
||||
# # print(f"Testing: symbol={ticker.ticker} m={m} k={k}: test={test} vs correct={correct}")
|
||||
# if k in ["market_cap","marketCap"] and ticker.fast_info["currency"] in ["GBp", "ILA"]:
|
||||
# # Adjust for currency to match Yahoo:
|
||||
# test *= 0.01
|
||||
# try:
|
||||
# if correct is None:
|
||||
# self.assertTrue(test is None or (not np.isnan(test)), f"{k}: {test} must be None or real value because correct={correct}")
|
||||
# elif isinstance(test, float) or isinstance(correct, int):
|
||||
# self.assertTrue(np.isclose(test, correct, rtol=rtol), f"{ticker.ticker} {k}: {test} != {correct}")
|
||||
# else:
|
||||
# self.assertEqual(test, correct, f"{k}: {test} != {correct}")
|
||||
# except:
|
||||
# if k in ["regularMarketPreviousClose"] and ticker.ticker in ["ADS.DE"]:
|
||||
# # Yahoo is wrong, is returning post-market close not regular
|
||||
# continue
|
||||
# else:
|
||||
# raise
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(TestTicker('Test ticker'))
|
||||
suite.addTest(TestTickerEarnings('Test earnings'))
|
||||
suite.addTest(TestTickerHolders('Test holders'))
|
||||
suite.addTest(TestTickerHistory('Test Ticker history'))
|
||||
suite.addTest(TestTickerMiscFinancials('Test misc financials'))
|
||||
suite.addTest(TestTickerInfo('Test info & fast_info'))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
92
tests/test_utils.py
Normal file
92
tests/test_utils.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""
|
||||
Tests for utils
|
||||
|
||||
To run all tests in suite from commandline:
|
||||
python -m unittest tests.utils
|
||||
|
||||
Specific test class:
|
||||
python -m unittest tests.utils.TestTicker
|
||||
|
||||
"""
|
||||
from unittest import TestSuite
|
||||
|
||||
# import pandas as pd
|
||||
# import numpy as np
|
||||
|
||||
from .context import yfinance as yf
|
||||
|
||||
import unittest
|
||||
# import requests_cache
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
|
||||
class TestCache(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.tempCacheDir = tempfile.TemporaryDirectory()
|
||||
yf.set_tz_cache_location(cls.tempCacheDir.name)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
cls.tempCacheDir.cleanup()
|
||||
|
||||
def test_storeTzNoRaise(self):
|
||||
# storing TZ to cache should never raise exception
|
||||
tkr = 'AMZN'
|
||||
tz1 = "America/New_York"
|
||||
tz2 = "London/Europe"
|
||||
cache = yf.cache.get_tz_cache()
|
||||
cache.store(tkr, tz1)
|
||||
cache.store(tkr, tz2)
|
||||
|
||||
def test_setTzCacheLocation(self):
|
||||
self.assertEqual(yf.cache._TzDBManager.get_location(), self.tempCacheDir.name)
|
||||
|
||||
tkr = 'AMZN'
|
||||
tz1 = "America/New_York"
|
||||
cache = yf.cache.get_tz_cache()
|
||||
cache.store(tkr, tz1)
|
||||
|
||||
self.assertTrue(os.path.exists(os.path.join(self.tempCacheDir.name, "tkr-tz.db")))
|
||||
|
||||
|
||||
class TestCacheNoPermission(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
yf.set_tz_cache_location("/root/yf-cache")
|
||||
|
||||
def test_tzCacheRootStore(self):
|
||||
# Test that if cache path in read-only filesystem, no exception.
|
||||
tkr = 'AMZN'
|
||||
tz1 = "America/New_York"
|
||||
|
||||
# During attempt to store, will discover cannot write
|
||||
yf.cache.get_tz_cache().store(tkr, tz1)
|
||||
|
||||
# Handling the store failure replaces cache with a dummy
|
||||
cache = yf.cache.get_tz_cache()
|
||||
self.assertTrue(cache.dummy)
|
||||
cache.store(tkr, tz1)
|
||||
|
||||
def test_tzCacheRootLookup(self):
|
||||
# Test that if cache path in read-only filesystem, no exception.
|
||||
tkr = 'AMZN'
|
||||
# During attempt to lookup, will discover cannot write
|
||||
yf.cache.get_tz_cache().lookup(tkr)
|
||||
|
||||
# Handling the lookup failure replaces cache with a dummy
|
||||
cache = yf.cache.get_tz_cache()
|
||||
self.assertTrue(cache.dummy)
|
||||
cache.lookup(tkr)
|
||||
|
||||
|
||||
def suite():
|
||||
ts: TestSuite = unittest.TestSuite()
|
||||
ts.addTest(TestCache('Test cache'))
|
||||
ts.addTest(TestCacheNoPermission('Test cache no permission'))
|
||||
return ts
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
841
tests/ticker.py
841
tests/ticker.py
@@ -1,841 +0,0 @@
|
||||
"""
|
||||
Tests for Ticker
|
||||
|
||||
To run all tests in suite from commandline:
|
||||
python -m unittest tests.ticker
|
||||
|
||||
Specific test class:
|
||||
python -m unittest tests.ticker.TestTicker
|
||||
|
||||
"""
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
from .context import yfinance as yf
|
||||
from .context import session_gbl
|
||||
|
||||
import unittest
|
||||
import requests_cache
|
||||
|
||||
|
||||
class TestTicker(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def test_getTz(self):
|
||||
tkrs = ["IMP.JO", "BHG.JO", "SSW.JO", "BP.L", "INTC"]
|
||||
for tkr in tkrs:
|
||||
# First step: remove ticker from tz-cache
|
||||
yf.utils.get_tz_cache().store(tkr, None)
|
||||
|
||||
# Test:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz = dat._get_ticker_tz(proxy=None, timeout=None)
|
||||
|
||||
self.assertIsNotNone(tz)
|
||||
|
||||
def test_badTicker(self):
|
||||
# Check yfinance doesn't die when ticker delisted
|
||||
|
||||
tkr = "DJI" # typo of "^DJI"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
dat.history(period="1wk")
|
||||
dat.history(start="2022-01-01")
|
||||
dat.history(start="2022-01-01", end="2022-03-01")
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=True)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=True)
|
||||
|
||||
for k in dat.fast_info:
|
||||
dat.fast_info[k]
|
||||
|
||||
dat.isin
|
||||
dat.major_holders
|
||||
dat.institutional_holders
|
||||
dat.mutualfund_holders
|
||||
dat.dividends
|
||||
dat.splits
|
||||
dat.actions
|
||||
dat.get_shares_full()
|
||||
dat.options
|
||||
dat.news
|
||||
dat.earnings_dates
|
||||
|
||||
# These require decryption which is broken:
|
||||
# dat.shares
|
||||
# dat.info
|
||||
# dat.calendar
|
||||
# dat.recommendations
|
||||
# dat.earnings
|
||||
# dat.quarterly_earnings
|
||||
# dat.income_stmt
|
||||
# dat.quarterly_income_stmt
|
||||
# dat.balance_sheet
|
||||
# dat.quarterly_balance_sheet
|
||||
# dat.cashflow
|
||||
# dat.quarterly_cashflow
|
||||
# dat.recommendations_summary
|
||||
# dat.analyst_price_target
|
||||
# dat.revenue_forecasts
|
||||
# dat.sustainability
|
||||
# dat.earnings_trend
|
||||
# dat.earnings_forecasts
|
||||
|
||||
def test_goodTicker(self):
|
||||
# that yfinance works when full api is called on same instance of ticker
|
||||
|
||||
tkrs = ["IBM"]
|
||||
tkrs.append("QCSTIX") # weird ticker, no price history but has previous close
|
||||
for tkr in tkrs:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
dat.history(period="1wk")
|
||||
dat.history(start="2022-01-01")
|
||||
dat.history(start="2022-01-01", end="2022-03-01")
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=True)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=True)
|
||||
|
||||
for k in dat.fast_info:
|
||||
dat.fast_info[k]
|
||||
|
||||
dat.isin
|
||||
dat.major_holders
|
||||
dat.institutional_holders
|
||||
dat.mutualfund_holders
|
||||
dat.dividends
|
||||
dat.splits
|
||||
dat.actions
|
||||
dat.get_shares_full()
|
||||
dat.options
|
||||
dat.news
|
||||
dat.earnings_dates
|
||||
|
||||
# These require decryption which is broken:
|
||||
# dat.shares
|
||||
# dat.info
|
||||
# dat.calendar
|
||||
# dat.recommendations
|
||||
# dat.earnings
|
||||
# dat.quarterly_earnings
|
||||
# dat.income_stmt
|
||||
# dat.quarterly_income_stmt
|
||||
# dat.balance_sheet
|
||||
# dat.quarterly_balance_sheet
|
||||
# dat.cashflow
|
||||
# dat.quarterly_cashflow
|
||||
# dat.recommendations_summary
|
||||
# dat.analyst_price_target
|
||||
# dat.revenue_forecasts
|
||||
# dat.sustainability
|
||||
# dat.earnings_trend
|
||||
# dat.earnings_forecasts
|
||||
|
||||
|
||||
class TestTickerHistory(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
# use a ticker that has dividends
|
||||
self.symbol = "IBM"
|
||||
self.ticker = yf.Ticker(self.symbol, session=self.session)
|
||||
|
||||
self.symbols = ["AMZN", "MSFT", "NVDA"]
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_history(self):
|
||||
md = self.ticker.history_metadata
|
||||
self.assertIn("IBM", md.values(), "metadata missing")
|
||||
data = self.ticker.history("1y")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_download(self):
|
||||
for t in [False, True]:
|
||||
for i in [False, True]:
|
||||
data = yf.download(self.symbols, threads=t, ignore_tz=i)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_no_expensive_calls_introduced(self):
|
||||
"""
|
||||
Make sure calling history to get price data has not introduced more calls to yahoo than absolutely necessary.
|
||||
As doing other type of scraping calls than "query2.finance.yahoo.com/v8/finance/chart" to yahoo website
|
||||
will quickly trigger spam-block when doing bulk download of history data.
|
||||
"""
|
||||
session = requests_cache.CachedSession(backend='memory')
|
||||
ticker = yf.Ticker("GOOGL", session=session)
|
||||
ticker.history("1y")
|
||||
actual_urls_called = tuple([r.url for r in session.cache.filter()])
|
||||
session.close()
|
||||
expected_urls = (
|
||||
'https://query2.finance.yahoo.com/v8/finance/chart/GOOGL?events=div,splits,capitalGains&includePrePost=False&interval=1d&range=1y',
|
||||
)
|
||||
self.assertEqual(expected_urls, actual_urls_called, "Different than expected url used to fetch history.")
|
||||
|
||||
def test_dividends(self):
|
||||
data = self.ticker.dividends
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_splits(self):
|
||||
data = self.ticker.splits
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_actions(self):
|
||||
data = self.ticker.actions
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
|
||||
# Below will fail because decryption broken
|
||||
# class TestTickerEarnings(unittest.TestCase):
|
||||
# session = None
|
||||
|
||||
# @classmethod
|
||||
# def setUpClass(cls):
|
||||
# cls.session = session_gbl
|
||||
|
||||
# @classmethod
|
||||
# def tearDownClass(cls):
|
||||
# if cls.session is not None:
|
||||
# cls.session.close()
|
||||
|
||||
# def setUp(self):
|
||||
# self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
# def tearDown(self):
|
||||
# self.ticker = None
|
||||
|
||||
# def test_earnings(self):
|
||||
# data = self.ticker.earnings
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_quarterly_earnings(self):
|
||||
# data = self.ticker.quarterly_earnings
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.quarterly_earnings
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_forecasts(self):
|
||||
# data = self.ticker.earnings_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_dates(self):
|
||||
# data = self.ticker.earnings_dates
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_dates
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_trend(self):
|
||||
# data = self.ticker.earnings_trend
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_trend
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_dates_with_limit(self):
|
||||
# # use ticker with lots of historic earnings
|
||||
# ticker = yf.Ticker("IBM")
|
||||
# limit = 110
|
||||
# data = ticker.get_earnings_dates(limit=limit)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# self.assertEqual(len(data), limit, "Wrong number or rows")
|
||||
|
||||
# data_cached = ticker.get_earnings_dates(limit=limit)
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
class TestTickerHolders(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_major_holders(self):
|
||||
data = self.ticker.major_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.major_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_institutional_holders(self):
|
||||
data = self.ticker.institutional_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.institutional_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_mutualfund_holders(self):
|
||||
data = self.ticker.mutualfund_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.mutualfund_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
class TestTickerMiscFinancials(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
# For ticker 'BSE.AX' (and others), Yahoo not returning
|
||||
# full quarterly financials (usually cash-flow) with all entries,
|
||||
# instead returns a smaller version in different data store.
|
||||
self.ticker_old_fmt = yf.Ticker("BSE.AX", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_isin(self):
|
||||
data = self.ticker.isin
|
||||
self.assertIsInstance(data, str, "data has wrong type")
|
||||
self.assertEqual("ARDEUT116159", data, "data is empty")
|
||||
|
||||
data_cached = self.ticker.isin
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_options(self):
|
||||
data = self.ticker.options
|
||||
self.assertIsInstance(data, tuple, "data has wrong type")
|
||||
self.assertTrue(len(data) > 1, "data is empty")
|
||||
|
||||
def test_shares_full(self):
|
||||
data = self.ticker.get_shares_full()
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# Below will fail because decryption broken
|
||||
|
||||
# def test_income_statement(self):
|
||||
# expected_keys = ["Total Revenue", "Basic EPS"]
|
||||
# expected_periods_days = 365
|
||||
|
||||
# # Test contents of table
|
||||
# data = self.ticker.get_income_stmt(pretty=True)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
# period = abs((data.columns[0]-data.columns[1]).days)
|
||||
# self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# # Test property defaults
|
||||
# data2 = self.ticker.income_stmt
|
||||
# self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# # Test pretty=False
|
||||
# expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
# data = self.ticker.get_income_stmt(pretty=False)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# # Test to_dict
|
||||
# data = self.ticker.get_income_stmt(as_dict=True)
|
||||
# self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
# def test_quarterly_income_statement(self):
|
||||
# expected_keys = ["Total Revenue", "Basic EPS"]
|
||||
# expected_periods_days = 365//4
|
||||
|
||||
# # Test contents of table
|
||||
# data = self.ticker.get_income_stmt(pretty=True, freq="quarterly")
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
# period = abs((data.columns[0]-data.columns[1]).days)
|
||||
# self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# # Test property defaults
|
||||
# data2 = self.ticker.quarterly_income_stmt
|
||||
# self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# # Test pretty=False
|
||||
# expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
# data = self.ticker.get_income_stmt(pretty=False, freq="quarterly")
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# # Test to_dict
|
||||
# data = self.ticker.get_income_stmt(as_dict=True)
|
||||
# self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
# def test_quarterly_income_statement_old_fmt(self):
|
||||
# expected_row = "TotalRevenue"
|
||||
# data = self.ticker_old_fmt.get_income_stmt(freq="quarterly", legacy=True)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# self.assertIn(expected_row, data.index, "Did not find expected row in index")
|
||||
|
||||
# data_cached = self.ticker_old_fmt.get_income_stmt(freq="quarterly", legacy=True)
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_balance_sheet(self):
|
||||
# expected_keys = ["Total Assets", "Net PPE"]
|
||||
# expected_periods_days = 365
|
||||
|
||||
# # Test contents of table
|
||||
# data = self.ticker.get_balance_sheet(pretty=True)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
# period = abs((data.columns[0]-data.columns[1]).days)
|
||||
# self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# # Test property defaults
|
||||
# data2 = self.ticker.balance_sheet
|
||||
# self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# # Test pretty=False
|
||||
# expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
# data = self.ticker.get_balance_sheet(pretty=False)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# # Test to_dict
|
||||
# data = self.ticker.get_balance_sheet(as_dict=True)
|
||||
# self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
# def test_quarterly_balance_sheet(self):
|
||||
# expected_keys = ["Total Assets", "Net PPE"]
|
||||
# expected_periods_days = 365//4
|
||||
|
||||
# # Test contents of table
|
||||
# data = self.ticker.get_balance_sheet(pretty=True, freq="quarterly")
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
# period = abs((data.columns[0]-data.columns[1]).days)
|
||||
# self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# # Test property defaults
|
||||
# data2 = self.ticker.quarterly_balance_sheet
|
||||
# self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# # Test pretty=False
|
||||
# expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
# data = self.ticker.get_balance_sheet(pretty=False, freq="quarterly")
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# # Test to_dict
|
||||
# data = self.ticker.get_balance_sheet(as_dict=True, freq="quarterly")
|
||||
# self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
# def test_quarterly_balance_sheet_old_fmt(self):
|
||||
# expected_row = "TotalAssets"
|
||||
# data = self.ticker_old_fmt.get_balance_sheet(freq="quarterly", legacy=True)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# self.assertIn(expected_row, data.index, "Did not find expected row in index")
|
||||
|
||||
# data_cached = self.ticker_old_fmt.get_balance_sheet(freq="quarterly", legacy=True)
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_cash_flow(self):
|
||||
# expected_keys = ["Operating Cash Flow", "Net PPE Purchase And Sale"]
|
||||
# expected_periods_days = 365
|
||||
|
||||
# # Test contents of table
|
||||
# data = self.ticker.get_cashflow(pretty=True)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
# period = abs((data.columns[0]-data.columns[1]).days)
|
||||
# self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# # Test property defaults
|
||||
# data2 = self.ticker.cashflow
|
||||
# self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# # Test pretty=False
|
||||
# expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
# data = self.ticker.get_cashflow(pretty=False)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# # Test to_dict
|
||||
# data = self.ticker.get_cashflow(as_dict=True)
|
||||
# self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
# def test_quarterly_cash_flow(self):
|
||||
# expected_keys = ["Operating Cash Flow", "Net PPE Purchase And Sale"]
|
||||
# expected_periods_days = 365//4
|
||||
|
||||
# # Test contents of table
|
||||
# data = self.ticker.get_cashflow(pretty=True, freq="quarterly")
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
# period = abs((data.columns[0]-data.columns[1]).days)
|
||||
# self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# # Test property defaults
|
||||
# data2 = self.ticker.quarterly_cashflow
|
||||
# self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# # Test pretty=False
|
||||
# expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
# data = self.ticker.get_cashflow(pretty=False, freq="quarterly")
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# for k in expected_keys:
|
||||
# self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# # Test to_dict
|
||||
# data = self.ticker.get_cashflow(as_dict=True)
|
||||
# self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
# def test_quarterly_cashflow_old_fmt(self):
|
||||
# expected_row = "NetIncome"
|
||||
# data = self.ticker_old_fmt.get_cashflow(legacy=True, freq="quarterly")
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# self.assertIn(expected_row, data.index, "Did not find expected row in index")
|
||||
|
||||
# data_cached = self.ticker_old_fmt.get_cashflow(legacy=True, freq="quarterly")
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_income_alt_names(self):
|
||||
# i1 = self.ticker.income_stmt
|
||||
# i2 = self.ticker.incomestmt
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
# i3 = self.ticker.financials
|
||||
# self.assertTrue(i1.equals(i3))
|
||||
|
||||
# i1 = self.ticker.get_income_stmt()
|
||||
# i2 = self.ticker.get_incomestmt()
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
# i3 = self.ticker.get_financials()
|
||||
# self.assertTrue(i1.equals(i3))
|
||||
|
||||
# i1 = self.ticker.quarterly_income_stmt
|
||||
# i2 = self.ticker.quarterly_incomestmt
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
# i3 = self.ticker.quarterly_financials
|
||||
# self.assertTrue(i1.equals(i3))
|
||||
|
||||
# i1 = self.ticker.get_income_stmt(freq="quarterly")
|
||||
# i2 = self.ticker.get_incomestmt(freq="quarterly")
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
# i3 = self.ticker.get_financials(freq="quarterly")
|
||||
# self.assertTrue(i1.equals(i3))
|
||||
|
||||
# def test_balance_sheet_alt_names(self):
|
||||
# i1 = self.ticker.balance_sheet
|
||||
# i2 = self.ticker.balancesheet
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# i1 = self.ticker.get_balance_sheet()
|
||||
# i2 = self.ticker.get_balancesheet()
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# i1 = self.ticker.quarterly_balance_sheet
|
||||
# i2 = self.ticker.quarterly_balancesheet
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# i1 = self.ticker.get_balance_sheet(freq="quarterly")
|
||||
# i2 = self.ticker.get_balancesheet(freq="quarterly")
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# def test_cash_flow_alt_names(self):
|
||||
# i1 = self.ticker.cash_flow
|
||||
# i2 = self.ticker.cashflow
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# i1 = self.ticker.get_cash_flow()
|
||||
# i2 = self.ticker.get_cashflow()
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# i1 = self.ticker.quarterly_cash_flow
|
||||
# i2 = self.ticker.quarterly_cashflow
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# i1 = self.ticker.get_cash_flow(freq="quarterly")
|
||||
# i2 = self.ticker.get_cashflow(freq="quarterly")
|
||||
# self.assertTrue(i1.equals(i2))
|
||||
|
||||
# def test_sustainability(self):
|
||||
# data = self.ticker.sustainability
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.sustainability
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_recommendations(self):
|
||||
# data = self.ticker.recommendations
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.recommendations
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_recommendations_summary(self):
|
||||
# data = self.ticker.recommendations_summary
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.recommendations_summary
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_analyst_price_target(self):
|
||||
# data = self.ticker.analyst_price_target
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.analyst_price_target
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_revenue_forecasts(self):
|
||||
# data = self.ticker.revenue_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.revenue_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_calendar(self):
|
||||
# data = self.ticker.calendar
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.calendar
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_shares(self):
|
||||
# data = self.ticker.shares
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# def test_bad_freq_value_raises_exception(self):
|
||||
# self.assertRaises(ValueError, lambda: self.ticker.get_cashflow(freq="badarg"))
|
||||
|
||||
|
||||
class TestTickerInfo(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.symbols = []
|
||||
self.symbols += ["ESLT.TA", "BP.L", "GOOGL"]
|
||||
self.symbols.append("QCSTIX") # good for testing, doesn't trade
|
||||
self.symbols += ["BTC-USD", "IWO", "VFINX", "^GSPC"]
|
||||
self.symbols += ["SOKE.IS", "ADS.DE"] # detected bugs
|
||||
self.tickers = [yf.Ticker(s, session=self.session) for s in self.symbols]
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_fast_info(self):
|
||||
f = yf.Ticker("AAPL", session=self.session).fast_info
|
||||
for k in f:
|
||||
self.assertIsNotNone(f[k])
|
||||
|
||||
# Below will fail because decryption broken
|
||||
|
||||
# def test_info(self):
|
||||
# data = self.tickers[0].info
|
||||
# self.assertIsInstance(data, dict, "data has wrong type")
|
||||
# self.assertIn("symbol", data.keys(), "Did not find expected key in info dict")
|
||||
# self.assertEqual(self.symbols[0], data["symbol"], "Wrong symbol value in info dict")
|
||||
|
||||
# def test_fast_info_matches_info(self):
|
||||
# yf.scrapers.quote.PRUNE_INFO = False
|
||||
|
||||
# fast_info_keys = set()
|
||||
# for ticker in self.tickers:
|
||||
# fast_info_keys.update(set(ticker.fast_info.keys()))
|
||||
# fast_info_keys = sorted(list(fast_info_keys))
|
||||
|
||||
# key_rename_map = {}
|
||||
# key_rename_map["currency"] = "currency"
|
||||
# key_rename_map["quote_type"] = "quoteType"
|
||||
# key_rename_map["timezone"] = "exchangeTimezoneName"
|
||||
|
||||
# key_rename_map["last_price"] = ["currentPrice", "regularMarketPrice"]
|
||||
# key_rename_map["open"] = ["open", "regularMarketOpen"]
|
||||
# key_rename_map["day_high"] = ["dayHigh", "regularMarketDayHigh"]
|
||||
# key_rename_map["day_low"] = ["dayLow", "regularMarketDayLow"]
|
||||
# key_rename_map["previous_close"] = ["previousClose"]
|
||||
# key_rename_map["regular_market_previous_close"] = ["regularMarketPreviousClose"]
|
||||
|
||||
# key_rename_map["fifty_day_average"] = "fiftyDayAverage"
|
||||
# key_rename_map["two_hundred_day_average"] = "twoHundredDayAverage"
|
||||
# key_rename_map["year_change"] = ["52WeekChange", "fiftyTwoWeekChange"]
|
||||
# key_rename_map["year_high"] = "fiftyTwoWeekHigh"
|
||||
# key_rename_map["year_low"] = "fiftyTwoWeekLow"
|
||||
|
||||
# key_rename_map["last_volume"] = ["volume", "regularMarketVolume"]
|
||||
# key_rename_map["ten_day_average_volume"] = ["averageVolume10days", "averageDailyVolume10Day"]
|
||||
# key_rename_map["three_month_average_volume"] = "averageVolume"
|
||||
|
||||
# key_rename_map["market_cap"] = "marketCap"
|
||||
# key_rename_map["shares"] = "sharesOutstanding"
|
||||
|
||||
# for k in list(key_rename_map.keys()):
|
||||
# if '_' in k:
|
||||
# key_rename_map[yf.utils.snake_case_2_camelCase(k)] = key_rename_map[k]
|
||||
|
||||
# # Note: share count items in info[] are bad. Sometimes the float > outstanding!
|
||||
# # So often fast_info["shares"] does not match.
|
||||
# # Why isn't fast_info["shares"] wrong? Because using it to calculate market cap always correct.
|
||||
# bad_keys = {"shares"}
|
||||
|
||||
# # Loose tolerance for averages, no idea why don't match info[]. Is info wrong?
|
||||
# custom_tolerances = {}
|
||||
# custom_tolerances["year_change"] = 1.0
|
||||
# # custom_tolerances["ten_day_average_volume"] = 1e-3
|
||||
# custom_tolerances["ten_day_average_volume"] = 1e-1
|
||||
# # custom_tolerances["three_month_average_volume"] = 1e-2
|
||||
# custom_tolerances["three_month_average_volume"] = 5e-1
|
||||
# custom_tolerances["fifty_day_average"] = 1e-2
|
||||
# custom_tolerances["two_hundred_day_average"] = 1e-2
|
||||
# for k in list(custom_tolerances.keys()):
|
||||
# if '_' in k:
|
||||
# custom_tolerances[yf.utils.snake_case_2_camelCase(k)] = custom_tolerances[k]
|
||||
|
||||
# for k in fast_info_keys:
|
||||
# if k in key_rename_map:
|
||||
# k2 = key_rename_map[k]
|
||||
# else:
|
||||
# k2 = k
|
||||
|
||||
# if not isinstance(k2, list):
|
||||
# k2 = [k2]
|
||||
|
||||
# for m in k2:
|
||||
# for ticker in self.tickers:
|
||||
# if not m in ticker.info:
|
||||
# # print(f"symbol={ticker.ticker}: fast_info key '{k}' mapped to info key '{m}' but not present in info")
|
||||
# continue
|
||||
|
||||
# if k in bad_keys:
|
||||
# continue
|
||||
|
||||
# if k in custom_tolerances:
|
||||
# rtol = custom_tolerances[k]
|
||||
# else:
|
||||
# rtol = 5e-3
|
||||
# # rtol = 1e-4
|
||||
|
||||
# correct = ticker.info[m]
|
||||
# test = ticker.fast_info[k]
|
||||
# # print(f"Testing: symbol={ticker.ticker} m={m} k={k}: test={test} vs correct={correct}")
|
||||
# if k in ["market_cap","marketCap"] and ticker.fast_info["currency"] in ["GBp", "ILA"]:
|
||||
# # Adjust for currency to match Yahoo:
|
||||
# test *= 0.01
|
||||
# try:
|
||||
# if correct is None:
|
||||
# self.assertTrue(test is None or (not np.isnan(test)), f"{k}: {test} must be None or real value because correct={correct}")
|
||||
# elif isinstance(test, float) or isinstance(correct, int):
|
||||
# self.assertTrue(np.isclose(test, correct, rtol=rtol), f"{ticker.ticker} {k}: {test} != {correct}")
|
||||
# else:
|
||||
# self.assertEqual(test, correct, f"{k}: {test} != {correct}")
|
||||
# except:
|
||||
# if k in ["regularMarketPreviousClose"] and ticker.ticker in ["ADS.DE"]:
|
||||
# # Yahoo is wrong, is returning post-market close not regular
|
||||
# continue
|
||||
# else:
|
||||
# raise
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(TestTicker('Test ticker'))
|
||||
suite.addTest(TestTickerEarnings('Test earnings'))
|
||||
suite.addTest(TestTickerHolders('Test holders'))
|
||||
suite.addTest(TestTickerHistory('Test Ticker history'))
|
||||
suite.addTest(TestTickerMiscFinancials('Test misc financials'))
|
||||
suite.addTest(TestTickerInfo('Test info & fast_info'))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -23,7 +23,8 @@ from . import version
|
||||
from .ticker import Ticker
|
||||
from .tickers import Tickers
|
||||
from .multi import download
|
||||
from .utils import set_tz_cache_location
|
||||
from .utils import enable_debug_mode
|
||||
from .cache import set_tz_cache_location
|
||||
|
||||
__version__ = version.version
|
||||
__author__ = "Ran Aroussi"
|
||||
@@ -34,6 +35,8 @@ def pdr_override():
|
||||
make pandas datareader optional
|
||||
otherwise can be called via fix_yahoo_finance.download(...)
|
||||
"""
|
||||
from .utils import print_once
|
||||
print_once("yfinance: pandas_datareader support is deprecated & semi-broken so will be removed in a future verison. Just use yfinance.")
|
||||
try:
|
||||
import pandas_datareader
|
||||
pandas_datareader.data.get_data_yahoo = download
|
||||
@@ -43,4 +46,4 @@ def pdr_override():
|
||||
pass
|
||||
|
||||
|
||||
__all__ = ['download', 'Ticker', 'Tickers', 'pdr_override', 'set_tz_cache_location']
|
||||
__all__ = ['download', 'Ticker', 'Tickers', 'pdr_override', 'enable_debug_mode', 'set_tz_cache_location']
|
||||
|
||||
1261
yfinance/base.py
1261
yfinance/base.py
File diff suppressed because it is too large
Load Diff
431
yfinance/cache.py
Normal file
431
yfinance/cache.py
Normal file
@@ -0,0 +1,431 @@
|
||||
import peewee as _peewee
|
||||
from threading import Lock
|
||||
import os as _os
|
||||
import platformdirs as _ad
|
||||
import atexit as _atexit
|
||||
import datetime as _datetime
|
||||
import pickle as _pkl
|
||||
|
||||
from .utils import get_yf_logger
|
||||
|
||||
_cache_init_lock = Lock()
|
||||
|
||||
# --------------
|
||||
# TimeZone cache
|
||||
# --------------
|
||||
|
||||
class _TzCacheException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class _TzCacheDummy:
|
||||
"""Dummy cache to use if tz cache is disabled"""
|
||||
|
||||
def lookup(self, tkr):
|
||||
return None
|
||||
|
||||
def store(self, tkr, tz):
|
||||
pass
|
||||
|
||||
@property
|
||||
def tz_db(self):
|
||||
return None
|
||||
|
||||
|
||||
class _TzCacheManager:
|
||||
_tz_cache = None
|
||||
|
||||
@classmethod
|
||||
def get_tz_cache(cls):
|
||||
if cls._tz_cache is None:
|
||||
with _cache_init_lock:
|
||||
cls._initialise()
|
||||
return cls._tz_cache
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
cls._tz_cache = _TzCache()
|
||||
|
||||
|
||||
class _TzDBManager:
|
||||
_db = None
|
||||
_cache_dir = _os.path.join(_ad.user_cache_dir(), "py-yfinance")
|
||||
|
||||
@classmethod
|
||||
def get_database(cls):
|
||||
if cls._db is None:
|
||||
cls._initialise()
|
||||
return cls._db
|
||||
|
||||
@classmethod
|
||||
def close_db(cls):
|
||||
if cls._db is not None:
|
||||
try:
|
||||
cls._db.close()
|
||||
except Exception:
|
||||
# Must discard exceptions because Python trying to quit.
|
||||
pass
|
||||
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
if cache_dir is not None:
|
||||
cls._cache_dir = cache_dir
|
||||
|
||||
if not _os.path.isdir(cls._cache_dir):
|
||||
try:
|
||||
_os.makedirs(cls._cache_dir)
|
||||
except OSError as err:
|
||||
raise _TzCacheException(f"Error creating TzCache folder: '{cls._cache_dir}' reason: {err}")
|
||||
elif not (_os.access(cls._cache_dir, _os.R_OK) and _os.access(cls._cache_dir, _os.W_OK)):
|
||||
raise _TzCacheException(f"Cannot read and write in TzCache folder: '{cls._cache_dir}'")
|
||||
|
||||
cls._db = _peewee.SqliteDatabase(
|
||||
_os.path.join(cls._cache_dir, 'tkr-tz.db'),
|
||||
pragmas={'journal_mode': 'wal', 'cache_size': -64}
|
||||
)
|
||||
|
||||
old_cache_file_path = _os.path.join(cls._cache_dir, "tkr-tz.csv")
|
||||
if _os.path.isfile(old_cache_file_path):
|
||||
_os.remove(old_cache_file_path)
|
||||
|
||||
@classmethod
|
||||
def set_location(cls, new_cache_dir):
|
||||
if cls._db is not None:
|
||||
cls._db.close()
|
||||
cls._db = None
|
||||
cls._cache_dir = new_cache_dir
|
||||
|
||||
@classmethod
|
||||
def get_location(cls):
|
||||
return cls._cache_dir
|
||||
|
||||
# close DB when Python exists
|
||||
_atexit.register(_TzDBManager.close_db)
|
||||
|
||||
|
||||
tz_db_proxy = _peewee.Proxy()
|
||||
class _KV(_peewee.Model):
|
||||
key = _peewee.CharField(primary_key=True)
|
||||
value = _peewee.CharField(null=True)
|
||||
|
||||
class Meta:
|
||||
database = tz_db_proxy
|
||||
without_rowid = True
|
||||
|
||||
|
||||
class _TzCache:
|
||||
def __init__(self):
|
||||
self.initialised = -1
|
||||
self.db = None
|
||||
self.dummy = False
|
||||
|
||||
def get_db(self):
|
||||
if self.db is not None:
|
||||
return self.db
|
||||
|
||||
try:
|
||||
self.db = _TzDBManager.get_database()
|
||||
except _TzCacheException as err:
|
||||
get_yf_logger().info(f"Failed to create TzCache, reason: {err}. "
|
||||
"TzCache will not be used. "
|
||||
"Tip: You can direct cache to use a different location with 'set_tz_cache_location(mylocation)'")
|
||||
self.dummy = True
|
||||
return None
|
||||
return self.db
|
||||
|
||||
def initialise(self):
|
||||
if self.initialised != -1:
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
self.initialised = 0 # failure
|
||||
return
|
||||
|
||||
db.connect()
|
||||
tz_db_proxy.initialize(db)
|
||||
try:
|
||||
db.create_tables([_KV])
|
||||
except _peewee.OperationalError as e:
|
||||
if 'WITHOUT' in str(e):
|
||||
_KV._meta.without_rowid = False
|
||||
db.create_tables([_KV])
|
||||
else:
|
||||
raise
|
||||
self.initialised = 1 # success
|
||||
|
||||
def lookup(self, key):
|
||||
if self.dummy:
|
||||
return None
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return None
|
||||
|
||||
try:
|
||||
return _KV.get(_KV.key == key).value
|
||||
except _KV.DoesNotExist:
|
||||
return None
|
||||
|
||||
def store(self, key, value):
|
||||
if self.dummy:
|
||||
return
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
return
|
||||
try:
|
||||
if value is None:
|
||||
q = _KV.delete().where(_KV.key == key)
|
||||
q.execute()
|
||||
return
|
||||
with db.atomic():
|
||||
_KV.insert(key=key, value=value).execute()
|
||||
except _peewee.IntegrityError:
|
||||
# Integrity error means the key already exists. Try updating the key.
|
||||
old_value = self.lookup(key)
|
||||
if old_value != value:
|
||||
get_yf_logger().debug(f"Value for key {key} changed from {old_value} to {value}.")
|
||||
with db.atomic():
|
||||
q = _KV.update(value=value).where(_KV.key == key)
|
||||
q.execute()
|
||||
|
||||
|
||||
def get_tz_cache():
|
||||
return _TzCacheManager.get_tz_cache()
|
||||
|
||||
|
||||
|
||||
# --------------
|
||||
# Cookie cache
|
||||
# --------------
|
||||
|
||||
class _CookieCacheException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class _CookieCacheDummy:
|
||||
"""Dummy cache to use if Cookie cache is disabled"""
|
||||
|
||||
def lookup(self, tkr):
|
||||
return None
|
||||
|
||||
def store(self, tkr, Cookie):
|
||||
pass
|
||||
|
||||
@property
|
||||
def Cookie_db(self):
|
||||
return None
|
||||
|
||||
|
||||
class _CookieCacheManager:
|
||||
_Cookie_cache = None
|
||||
|
||||
@classmethod
|
||||
def get_cookie_cache(cls):
|
||||
if cls._Cookie_cache is None:
|
||||
with _cache_init_lock:
|
||||
cls._initialise()
|
||||
return cls._Cookie_cache
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
cls._Cookie_cache = _CookieCache()
|
||||
|
||||
|
||||
class _CookieDBManager:
|
||||
_db = None
|
||||
_cache_dir = _os.path.join(_ad.user_cache_dir(), "py-yfinance")
|
||||
|
||||
@classmethod
|
||||
def get_database(cls):
|
||||
if cls._db is None:
|
||||
cls._initialise()
|
||||
return cls._db
|
||||
|
||||
@classmethod
|
||||
def close_db(cls):
|
||||
if cls._db is not None:
|
||||
try:
|
||||
cls._db.close()
|
||||
except Exception:
|
||||
# Must discard exceptions because Python trying to quit.
|
||||
pass
|
||||
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
if cache_dir is not None:
|
||||
cls._cache_dir = cache_dir
|
||||
|
||||
if not _os.path.isdir(cls._cache_dir):
|
||||
try:
|
||||
_os.makedirs(cls._cache_dir)
|
||||
except OSError as err:
|
||||
raise _CookieCacheException(f"Error creating CookieCache folder: '{cls._cache_dir}' reason: {err}")
|
||||
elif not (_os.access(cls._cache_dir, _os.R_OK) and _os.access(cls._cache_dir, _os.W_OK)):
|
||||
raise _CookieCacheException(f"Cannot read and write in CookieCache folder: '{cls._cache_dir}'")
|
||||
|
||||
cls._db = _peewee.SqliteDatabase(
|
||||
_os.path.join(cls._cache_dir, 'cookies.db'),
|
||||
pragmas={'journal_mode': 'wal', 'cache_size': -64}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_location(cls, new_cache_dir):
|
||||
if cls._db is not None:
|
||||
cls._db.close()
|
||||
cls._db = None
|
||||
cls._cache_dir = new_cache_dir
|
||||
|
||||
@classmethod
|
||||
def get_location(cls):
|
||||
return cls._cache_dir
|
||||
|
||||
# close DB when Python exists
|
||||
_atexit.register(_CookieDBManager.close_db)
|
||||
|
||||
|
||||
Cookie_db_proxy = _peewee.Proxy()
|
||||
class ISODateTimeField(_peewee.DateTimeField):
|
||||
# Ensure Python datetime is read & written correctly for sqlite,
|
||||
# because user discovered peewee allowed an invalid datetime
|
||||
# to get written.
|
||||
def db_value(self, value):
|
||||
if value and isinstance(value, _datetime.datetime):
|
||||
return value.isoformat()
|
||||
return super().db_value(value)
|
||||
def python_value(self, value):
|
||||
if value and isinstance(value, str) and 'T' in value:
|
||||
return _datetime.datetime.fromisoformat(value)
|
||||
return super().python_value(value)
|
||||
class _CookieSchema(_peewee.Model):
|
||||
strategy = _peewee.CharField(primary_key=True)
|
||||
fetch_date = ISODateTimeField(default=_datetime.datetime.now)
|
||||
|
||||
# Which cookie type depends on strategy
|
||||
cookie_bytes = _peewee.BlobField()
|
||||
|
||||
class Meta:
|
||||
database = Cookie_db_proxy
|
||||
without_rowid = True
|
||||
|
||||
|
||||
class _CookieCache:
|
||||
def __init__(self):
|
||||
self.initialised = -1
|
||||
self.db = None
|
||||
self.dummy = False
|
||||
|
||||
def get_db(self):
|
||||
if self.db is not None:
|
||||
return self.db
|
||||
|
||||
try:
|
||||
self.db = _CookieDBManager.get_database()
|
||||
except _CookieCacheException as err:
|
||||
get_yf_logger().info(f"Failed to create CookieCache, reason: {err}. "
|
||||
"CookieCache will not be used. "
|
||||
"Tip: You can direct cache to use a different location with 'set_tz_cache_location(mylocation)'")
|
||||
self.dummy = True
|
||||
return None
|
||||
return self.db
|
||||
|
||||
def initialise(self):
|
||||
if self.initialised != -1:
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
self.initialised = 0 # failure
|
||||
return
|
||||
|
||||
db.connect()
|
||||
Cookie_db_proxy.initialize(db)
|
||||
try:
|
||||
db.create_tables([_CookieSchema])
|
||||
except _peewee.OperationalError as e:
|
||||
if 'WITHOUT' in str(e):
|
||||
_CookieSchema._meta.without_rowid = False
|
||||
db.create_tables([_CookieSchema])
|
||||
else:
|
||||
raise
|
||||
self.initialised = 1 # success
|
||||
|
||||
def lookup(self, strategy):
|
||||
if self.dummy:
|
||||
return None
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return None
|
||||
|
||||
try:
|
||||
data = _CookieSchema.get(_CookieSchema.strategy == strategy)
|
||||
cookie = _pkl.loads(data.cookie_bytes)
|
||||
return {'cookie':cookie, 'age':_datetime.datetime.now()-data.fetch_date}
|
||||
except _CookieSchema.DoesNotExist:
|
||||
return None
|
||||
|
||||
def store(self, strategy, cookie):
|
||||
if self.dummy:
|
||||
return
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
return
|
||||
try:
|
||||
q = _CookieSchema.delete().where(_CookieSchema.strategy == strategy)
|
||||
q.execute()
|
||||
if cookie is None:
|
||||
return
|
||||
with db.atomic():
|
||||
cookie_pkl = _pkl.dumps(cookie, _pkl.HIGHEST_PROTOCOL)
|
||||
_CookieSchema.insert(strategy=strategy, cookie_bytes=cookie_pkl).execute()
|
||||
except _peewee.IntegrityError:
|
||||
raise
|
||||
# # Integrity error means the strategy already exists. Try updating the strategy.
|
||||
# old_value = self.lookup(strategy)
|
||||
# if old_value != cookie:
|
||||
# get_yf_logger().debug(f"cookie for strategy {strategy} changed from {old_value} to {cookie}.")
|
||||
# with db.atomic():
|
||||
# q = _CookieSchema.update(cookie=cookie).where(_CookieSchema.strategy == strategy)
|
||||
# q.execute()
|
||||
|
||||
|
||||
def get_cookie_cache():
|
||||
return _CookieCacheManager.get_cookie_cache()
|
||||
|
||||
|
||||
|
||||
def set_cache_location(cache_dir: str):
|
||||
"""
|
||||
Sets the path to create the "py-yfinance" cache folder in.
|
||||
Useful if the default folder returned by "appdir.user_cache_dir()" is not writable.
|
||||
Must be called before cache is used (that is, before fetching tickers).
|
||||
:param cache_dir: Path to use for caches
|
||||
:return: None
|
||||
"""
|
||||
_TzDBManager.set_location(cache_dir)
|
||||
_CookieDBManager.set_location(cache_dir)
|
||||
|
||||
def set_tz_cache_location(cache_dir: str):
|
||||
set_cache_location(cache_dir)
|
||||
|
||||
154
yfinance/const.py
Normal file
154
yfinance/const.py
Normal file
@@ -0,0 +1,154 @@
|
||||
_BASE_URL_ = 'https://query2.finance.yahoo.com'
|
||||
_ROOT_URL_ = 'https://finance.yahoo.com'
|
||||
|
||||
fundamentals_keys = {
|
||||
'financials': ["TaxEffectOfUnusualItems", "TaxRateForCalcs", "NormalizedEBITDA", "NormalizedDilutedEPS",
|
||||
"NormalizedBasicEPS", "TotalUnusualItems", "TotalUnusualItemsExcludingGoodwill",
|
||||
"NetIncomeFromContinuingOperationNetMinorityInterest", "ReconciledDepreciation",
|
||||
"ReconciledCostOfRevenue", "EBITDA", "EBIT", "NetInterestIncome", "InterestExpense",
|
||||
"InterestIncome", "ContinuingAndDiscontinuedDilutedEPS", "ContinuingAndDiscontinuedBasicEPS",
|
||||
"NormalizedIncome", "NetIncomeFromContinuingAndDiscontinuedOperation", "TotalExpenses",
|
||||
"RentExpenseSupplemental", "ReportedNormalizedDilutedEPS", "ReportedNormalizedBasicEPS",
|
||||
"TotalOperatingIncomeAsReported", "DividendPerShare", "DilutedAverageShares", "BasicAverageShares",
|
||||
"DilutedEPS", "DilutedEPSOtherGainsLosses", "TaxLossCarryforwardDilutedEPS",
|
||||
"DilutedAccountingChange", "DilutedExtraordinary", "DilutedDiscontinuousOperations",
|
||||
"DilutedContinuousOperations", "BasicEPS", "BasicEPSOtherGainsLosses", "TaxLossCarryforwardBasicEPS",
|
||||
"BasicAccountingChange", "BasicExtraordinary", "BasicDiscontinuousOperations",
|
||||
"BasicContinuousOperations", "DilutedNIAvailtoComStockholders", "AverageDilutionEarnings",
|
||||
"NetIncomeCommonStockholders", "OtherunderPreferredStockDividend", "PreferredStockDividends",
|
||||
"NetIncome", "MinorityInterests", "NetIncomeIncludingNoncontrollingInterests",
|
||||
"NetIncomeFromTaxLossCarryforward", "NetIncomeExtraordinary", "NetIncomeDiscontinuousOperations",
|
||||
"NetIncomeContinuousOperations", "EarningsFromEquityInterestNetOfTax", "TaxProvision",
|
||||
"PretaxIncome", "OtherIncomeExpense", "OtherNonOperatingIncomeExpenses", "SpecialIncomeCharges",
|
||||
"GainOnSaleOfPPE", "GainOnSaleOfBusiness", "OtherSpecialCharges", "WriteOff",
|
||||
"ImpairmentOfCapitalAssets", "RestructuringAndMergernAcquisition", "SecuritiesAmortization",
|
||||
"EarningsFromEquityInterest", "GainOnSaleOfSecurity", "NetNonOperatingInterestIncomeExpense",
|
||||
"TotalOtherFinanceCost", "InterestExpenseNonOperating", "InterestIncomeNonOperating",
|
||||
"OperatingIncome", "OperatingExpense", "OtherOperatingExpenses", "OtherTaxes",
|
||||
"ProvisionForDoubtfulAccounts", "DepreciationAmortizationDepletionIncomeStatement",
|
||||
"DepletionIncomeStatement", "DepreciationAndAmortizationInIncomeStatement", "Amortization",
|
||||
"AmortizationOfIntangiblesIncomeStatement", "DepreciationIncomeStatement", "ResearchAndDevelopment",
|
||||
"SellingGeneralAndAdministration", "SellingAndMarketingExpense", "GeneralAndAdministrativeExpense",
|
||||
"OtherGandA", "InsuranceAndClaims", "RentAndLandingFees", "SalariesAndWages", "GrossProfit",
|
||||
"CostOfRevenue", "TotalRevenue", "ExciseTaxes", "OperatingRevenue"],
|
||||
'balance-sheet': ["TreasurySharesNumber", "PreferredSharesNumber", "OrdinarySharesNumber", "ShareIssued", "NetDebt",
|
||||
"TotalDebt", "TangibleBookValue", "InvestedCapital", "WorkingCapital", "NetTangibleAssets",
|
||||
"CapitalLeaseObligations", "CommonStockEquity", "PreferredStockEquity", "TotalCapitalization",
|
||||
"TotalEquityGrossMinorityInterest", "MinorityInterest", "StockholdersEquity",
|
||||
"OtherEquityInterest", "GainsLossesNotAffectingRetainedEarnings", "OtherEquityAdjustments",
|
||||
"FixedAssetsRevaluationReserve", "ForeignCurrencyTranslationAdjustments",
|
||||
"MinimumPensionLiabilities", "UnrealizedGainLoss", "TreasuryStock", "RetainedEarnings",
|
||||
"AdditionalPaidInCapital", "CapitalStock", "OtherCapitalStock", "CommonStock", "PreferredStock",
|
||||
"TotalPartnershipCapital", "GeneralPartnershipCapital", "LimitedPartnershipCapital",
|
||||
"TotalLiabilitiesNetMinorityInterest", "TotalNonCurrentLiabilitiesNetMinorityInterest",
|
||||
"OtherNonCurrentLiabilities", "LiabilitiesHeldforSaleNonCurrent", "RestrictedCommonStock",
|
||||
"PreferredSecuritiesOutsideStockEquity", "DerivativeProductLiabilities", "EmployeeBenefits",
|
||||
"NonCurrentPensionAndOtherPostretirementBenefitPlans", "NonCurrentAccruedExpenses",
|
||||
"DuetoRelatedPartiesNonCurrent", "TradeandOtherPayablesNonCurrent",
|
||||
"NonCurrentDeferredLiabilities", "NonCurrentDeferredRevenue",
|
||||
"NonCurrentDeferredTaxesLiabilities", "LongTermDebtAndCapitalLeaseObligation",
|
||||
"LongTermCapitalLeaseObligation", "LongTermDebt", "LongTermProvisions", "CurrentLiabilities",
|
||||
"OtherCurrentLiabilities", "CurrentDeferredLiabilities", "CurrentDeferredRevenue",
|
||||
"CurrentDeferredTaxesLiabilities", "CurrentDebtAndCapitalLeaseObligation",
|
||||
"CurrentCapitalLeaseObligation", "CurrentDebt", "OtherCurrentBorrowings", "LineOfCredit",
|
||||
"CommercialPaper", "CurrentNotesPayable", "PensionandOtherPostRetirementBenefitPlansCurrent",
|
||||
"CurrentProvisions", "PayablesAndAccruedExpenses", "CurrentAccruedExpenses", "InterestPayable",
|
||||
"Payables", "OtherPayable", "DuetoRelatedPartiesCurrent", "DividendsPayable", "TotalTaxPayable",
|
||||
"IncomeTaxPayable", "AccountsPayable", "TotalAssets", "TotalNonCurrentAssets",
|
||||
"OtherNonCurrentAssets", "DefinedPensionBenefit", "NonCurrentPrepaidAssets",
|
||||
"NonCurrentDeferredAssets", "NonCurrentDeferredTaxesAssets", "DuefromRelatedPartiesNonCurrent",
|
||||
"NonCurrentNoteReceivables", "NonCurrentAccountsReceivable", "FinancialAssets",
|
||||
"InvestmentsAndAdvances", "OtherInvestments", "InvestmentinFinancialAssets",
|
||||
"HeldToMaturitySecurities", "AvailableForSaleSecurities",
|
||||
"FinancialAssetsDesignatedasFairValueThroughProfitorLossTotal", "TradingSecurities",
|
||||
"LongTermEquityInvestment", "InvestmentsinJointVenturesatCost",
|
||||
"InvestmentsInOtherVenturesUnderEquityMethod", "InvestmentsinAssociatesatCost",
|
||||
"InvestmentsinSubsidiariesatCost", "InvestmentProperties", "GoodwillAndOtherIntangibleAssets",
|
||||
"OtherIntangibleAssets", "Goodwill", "NetPPE", "AccumulatedDepreciation", "GrossPPE", "Leases",
|
||||
"ConstructionInProgress", "OtherProperties", "MachineryFurnitureEquipment",
|
||||
"BuildingsAndImprovements", "LandAndImprovements", "Properties", "CurrentAssets",
|
||||
"OtherCurrentAssets", "HedgingAssetsCurrent", "AssetsHeldForSaleCurrent", "CurrentDeferredAssets",
|
||||
"CurrentDeferredTaxesAssets", "RestrictedCash", "PrepaidAssets", "Inventory",
|
||||
"InventoriesAdjustmentsAllowances", "OtherInventories", "FinishedGoods", "WorkInProcess",
|
||||
"RawMaterials", "Receivables", "ReceivablesAdjustmentsAllowances", "OtherReceivables",
|
||||
"DuefromRelatedPartiesCurrent", "TaxesReceivable", "AccruedInterestReceivable", "NotesReceivable",
|
||||
"LoansReceivable", "AccountsReceivable", "AllowanceForDoubtfulAccountsReceivable",
|
||||
"GrossAccountsReceivable", "CashCashEquivalentsAndShortTermInvestments",
|
||||
"OtherShortTermInvestments", "CashAndCashEquivalents", "CashEquivalents", "CashFinancial"],
|
||||
'cash-flow': ["ForeignSales", "DomesticSales", "AdjustedGeographySegmentData", "FreeCashFlow",
|
||||
"RepurchaseOfCapitalStock", "RepaymentOfDebt", "IssuanceOfDebt", "IssuanceOfCapitalStock",
|
||||
"CapitalExpenditure", "InterestPaidSupplementalData", "IncomeTaxPaidSupplementalData",
|
||||
"EndCashPosition", "OtherCashAdjustmentOutsideChangeinCash", "BeginningCashPosition",
|
||||
"EffectOfExchangeRateChanges", "ChangesInCash", "OtherCashAdjustmentInsideChangeinCash",
|
||||
"CashFlowFromDiscontinuedOperation", "FinancingCashFlow", "CashFromDiscontinuedFinancingActivities",
|
||||
"CashFlowFromContinuingFinancingActivities", "NetOtherFinancingCharges", "InterestPaidCFF",
|
||||
"ProceedsFromStockOptionExercised", "CashDividendsPaid", "PreferredStockDividendPaid",
|
||||
"CommonStockDividendPaid", "NetPreferredStockIssuance", "PreferredStockPayments",
|
||||
"PreferredStockIssuance", "NetCommonStockIssuance", "CommonStockPayments", "CommonStockIssuance",
|
||||
"NetIssuancePaymentsOfDebt", "NetShortTermDebtIssuance", "ShortTermDebtPayments",
|
||||
"ShortTermDebtIssuance", "NetLongTermDebtIssuance", "LongTermDebtPayments", "LongTermDebtIssuance",
|
||||
"InvestingCashFlow", "CashFromDiscontinuedInvestingActivities",
|
||||
"CashFlowFromContinuingInvestingActivities", "NetOtherInvestingChanges", "InterestReceivedCFI",
|
||||
"DividendsReceivedCFI", "NetInvestmentPurchaseAndSale", "SaleOfInvestment", "PurchaseOfInvestment",
|
||||
"NetInvestmentPropertiesPurchaseAndSale", "SaleOfInvestmentProperties",
|
||||
"PurchaseOfInvestmentProperties", "NetBusinessPurchaseAndSale", "SaleOfBusiness",
|
||||
"PurchaseOfBusiness", "NetIntangiblesPurchaseAndSale", "SaleOfIntangibles", "PurchaseOfIntangibles",
|
||||
"NetPPEPurchaseAndSale", "SaleOfPPE", "PurchaseOfPPE", "CapitalExpenditureReported",
|
||||
"OperatingCashFlow", "CashFromDiscontinuedOperatingActivities",
|
||||
"CashFlowFromContinuingOperatingActivities", "TaxesRefundPaid", "InterestReceivedCFO",
|
||||
"InterestPaidCFO", "DividendReceivedCFO", "DividendPaidCFO", "ChangeInWorkingCapital",
|
||||
"ChangeInOtherWorkingCapital", "ChangeInOtherCurrentLiabilities", "ChangeInOtherCurrentAssets",
|
||||
"ChangeInPayablesAndAccruedExpense", "ChangeInAccruedExpense", "ChangeInInterestPayable",
|
||||
"ChangeInPayable", "ChangeInDividendPayable", "ChangeInAccountPayable", "ChangeInTaxPayable",
|
||||
"ChangeInIncomeTaxPayable", "ChangeInPrepaidAssets", "ChangeInInventory", "ChangeInReceivables",
|
||||
"ChangesInAccountReceivables", "OtherNonCashItems", "ExcessTaxBenefitFromStockBasedCompensation",
|
||||
"StockBasedCompensation", "UnrealizedGainLossOnInvestmentSecurities", "ProvisionandWriteOffofAssets",
|
||||
"AssetImpairmentCharge", "AmortizationOfSecurities", "DeferredTax", "DeferredIncomeTax",
|
||||
"DepreciationAmortizationDepletion", "Depletion", "DepreciationAndAmortization",
|
||||
"AmortizationCashFlow", "AmortizationOfIntangibles", "Depreciation", "OperatingGainsLosses",
|
||||
"PensionAndEmployeeBenefitExpense", "EarningsLossesFromEquityInvestments",
|
||||
"GainLossOnInvestmentSecurities", "NetForeignCurrencyExchangeGainLoss", "GainLossOnSaleOfPPE",
|
||||
"GainLossOnSaleOfBusiness", "NetIncomeFromContinuingOperations",
|
||||
"CashFlowsfromusedinOperatingActivitiesDirect", "TaxesRefundPaidDirect", "InterestReceivedDirect",
|
||||
"InterestPaidDirect", "DividendsReceivedDirect", "DividendsPaidDirect", "ClassesofCashPayments",
|
||||
"OtherCashPaymentsfromOperatingActivities", "PaymentsonBehalfofEmployees",
|
||||
"PaymentstoSuppliersforGoodsandServices", "ClassesofCashReceiptsfromOperatingActivities",
|
||||
"OtherCashReceiptsfromOperatingActivities", "ReceiptsfromGovernmentGrants", "ReceiptsfromCustomers"]}
|
||||
|
||||
_PRICE_COLNAMES_ = ['Open', 'High', 'Low', 'Close', 'Adj Close']
|
||||
|
||||
quote_summary_valid_modules = (
|
||||
"summaryProfile", # contains general information about the company
|
||||
"summaryDetail", # prices + volume + market cap + etc
|
||||
"assetProfile", # summaryProfile + company officers
|
||||
"fundProfile",
|
||||
"price", # current prices
|
||||
"quoteType", # quoteType
|
||||
"esgScores", # Environmental, social, and governance (ESG) scores, sustainability and ethical performance of companies
|
||||
"incomeStatementHistory",
|
||||
"incomeStatementHistoryQuarterly",
|
||||
"balanceSheetHistory",
|
||||
"balanceSheetHistoryQuarterly",
|
||||
"cashFlowStatementHistory",
|
||||
"cashFlowStatementHistoryQuarterly",
|
||||
"defaultKeyStatistics", # KPIs (PE, enterprise value, EPS, EBITA, and more)
|
||||
"financialData", # Financial KPIs (revenue, gross margins, operating cash flow, free cash flow, and more)
|
||||
"calendarEvents", # future earnings date
|
||||
"secFilings", # SEC filings, such as 10K and 10Q reports
|
||||
"upgradeDowngradeHistory", # upgrades and downgrades that analysts have given a company's stock
|
||||
"institutionOwnership", # institutional ownership, holders and shares outstanding
|
||||
"fundOwnership", # mutual fund ownership, holders and shares outstanding
|
||||
"majorDirectHolders",
|
||||
"majorHoldersBreakdown",
|
||||
"insiderTransactions", # insider transactions, such as the number of shares bought and sold by company executives
|
||||
"insiderHolders", # insider holders, such as the number of shares held by company executives
|
||||
"netSharePurchaseActivity", # net share purchase activity, such as the number of shares bought and sold by company executives
|
||||
"earnings", # earnings history
|
||||
"earningsHistory",
|
||||
"earningsTrend", # earnings trend
|
||||
"industryTrend",
|
||||
"indexTrend",
|
||||
"sectorTrend",
|
||||
"recommendationTrend",
|
||||
"futuresChain",
|
||||
)
|
||||
616
yfinance/data.py
616
yfinance/data.py
@@ -1,37 +1,17 @@
|
||||
import functools
|
||||
from functools import lru_cache
|
||||
|
||||
import logging
|
||||
import hashlib
|
||||
from base64 import b64decode
|
||||
usePycryptodome = False # slightly faster
|
||||
# usePycryptodome = True
|
||||
if usePycryptodome:
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util.Padding import unpad
|
||||
else:
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
|
||||
import requests as requests
|
||||
import re
|
||||
from bs4 import BeautifulSoup
|
||||
import random
|
||||
import time
|
||||
import datetime
|
||||
|
||||
from frozendict import frozendict
|
||||
|
||||
try:
|
||||
import ujson as json
|
||||
except ImportError:
|
||||
import json as json
|
||||
|
||||
from . import utils
|
||||
from . import utils, cache
|
||||
import threading
|
||||
|
||||
cache_maxsize = 64
|
||||
|
||||
logger = utils.get_yf_logger()
|
||||
|
||||
|
||||
def lru_cache_freezeargs(func):
|
||||
"""
|
||||
@@ -54,146 +34,350 @@ def lru_cache_freezeargs(func):
|
||||
return wrapped
|
||||
|
||||
|
||||
def _extract_extra_keys_from_stores(data):
|
||||
new_keys = [k for k in data.keys() if k not in ["context", "plugins"]]
|
||||
new_keys_values = set([data[k] for k in new_keys])
|
||||
|
||||
# Maybe multiple keys have same value - keep one of each
|
||||
new_keys_uniq = []
|
||||
new_keys_uniq_values = set()
|
||||
for k in new_keys:
|
||||
v = data[k]
|
||||
if not v in new_keys_uniq_values:
|
||||
new_keys_uniq.append(k)
|
||||
new_keys_uniq_values.add(v)
|
||||
|
||||
return [data[k] for k in new_keys_uniq]
|
||||
|
||||
|
||||
def decrypt_cryptojs_aes_stores(data, keys=None):
|
||||
encrypted_stores = data['context']['dispatcher']['stores']
|
||||
|
||||
password = None
|
||||
if keys is not None:
|
||||
if not isinstance(keys, list):
|
||||
raise TypeError("'keys' must be list")
|
||||
candidate_passwords = keys
|
||||
else:
|
||||
candidate_passwords = []
|
||||
|
||||
if "_cs" in data and "_cr" in data:
|
||||
_cs = data["_cs"]
|
||||
_cr = data["_cr"]
|
||||
_cr = b"".join(int.to_bytes(i, length=4, byteorder="big", signed=True) for i in json.loads(_cr)["words"])
|
||||
password = hashlib.pbkdf2_hmac("sha1", _cs.encode("utf8"), _cr, 1, dklen=32).hex()
|
||||
|
||||
encrypted_stores = b64decode(encrypted_stores)
|
||||
assert encrypted_stores[0:8] == b"Salted__"
|
||||
salt = encrypted_stores[8:16]
|
||||
encrypted_stores = encrypted_stores[16:]
|
||||
|
||||
def _EVPKDF(password, salt, keySize=32, ivSize=16, iterations=1, hashAlgorithm="md5") -> tuple:
|
||||
"""OpenSSL EVP Key Derivation Function
|
||||
Args:
|
||||
password (Union[str, bytes, bytearray]): Password to generate key from.
|
||||
salt (Union[bytes, bytearray]): Salt to use.
|
||||
keySize (int, optional): Output key length in bytes. Defaults to 32.
|
||||
ivSize (int, optional): Output Initialization Vector (IV) length in bytes. Defaults to 16.
|
||||
iterations (int, optional): Number of iterations to perform. Defaults to 1.
|
||||
hashAlgorithm (str, optional): Hash algorithm to use for the KDF. Defaults to 'md5'.
|
||||
Returns:
|
||||
key, iv: Derived key and Initialization Vector (IV) bytes.
|
||||
|
||||
Taken from: https://gist.github.com/rafiibrahim8/0cd0f8c46896cafef6486cb1a50a16d3
|
||||
OpenSSL original code: https://github.com/openssl/openssl/blob/master/crypto/evp/evp_key.c#L78
|
||||
"""
|
||||
|
||||
assert iterations > 0, "Iterations can not be less than 1."
|
||||
|
||||
if isinstance(password, str):
|
||||
password = password.encode("utf-8")
|
||||
|
||||
final_length = keySize + ivSize
|
||||
key_iv = b""
|
||||
block = None
|
||||
|
||||
while len(key_iv) < final_length:
|
||||
hasher = hashlib.new(hashAlgorithm)
|
||||
if block:
|
||||
hasher.update(block)
|
||||
hasher.update(password)
|
||||
hasher.update(salt)
|
||||
block = hasher.digest()
|
||||
for _ in range(1, iterations):
|
||||
block = hashlib.new(hashAlgorithm, block).digest()
|
||||
key_iv += block
|
||||
|
||||
key, iv = key_iv[:keySize], key_iv[keySize:final_length]
|
||||
return key, iv
|
||||
|
||||
def _decrypt(encrypted_stores, password, key, iv):
|
||||
if usePycryptodome:
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv=iv)
|
||||
plaintext = cipher.decrypt(encrypted_stores)
|
||||
plaintext = unpad(plaintext, 16, style="pkcs7")
|
||||
else:
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv))
|
||||
decryptor = cipher.decryptor()
|
||||
plaintext = decryptor.update(encrypted_stores) + decryptor.finalize()
|
||||
unpadder = padding.PKCS7(128).unpadder()
|
||||
plaintext = unpadder.update(plaintext) + unpadder.finalize()
|
||||
plaintext = plaintext.decode("utf-8")
|
||||
return plaintext
|
||||
|
||||
if not password is None:
|
||||
try:
|
||||
key, iv = _EVPKDF(password, salt, keySize=32, ivSize=16, iterations=1, hashAlgorithm="md5")
|
||||
except:
|
||||
raise Exception("yfinance failed to decrypt Yahoo data response")
|
||||
plaintext = _decrypt(encrypted_stores, password, key, iv)
|
||||
else:
|
||||
success = False
|
||||
for i in range(len(candidate_passwords)):
|
||||
# print(f"Trying candiate pw {i+1}/{len(candidate_passwords)}")
|
||||
password = candidate_passwords[i]
|
||||
try:
|
||||
key, iv = _EVPKDF(password, salt, keySize=32, ivSize=16, iterations=1, hashAlgorithm="md5")
|
||||
|
||||
plaintext = _decrypt(encrypted_stores, password, key, iv)
|
||||
|
||||
success = True
|
||||
break
|
||||
except:
|
||||
pass
|
||||
if not success:
|
||||
raise Exception("yfinance failed to decrypt Yahoo data response")
|
||||
|
||||
decoded_stores = json.loads(plaintext)
|
||||
return decoded_stores
|
||||
|
||||
|
||||
_SCRAPE_URL_ = 'https://finance.yahoo.com/quote'
|
||||
|
||||
|
||||
class TickerData:
|
||||
class SingletonMeta(type):
|
||||
"""
|
||||
Have one place to retrieve data from Yahoo API in order to ease caching and speed up operations
|
||||
Metaclass that creates a Singleton instance.
|
||||
"""
|
||||
_instances = {}
|
||||
_lock = threading.Lock()
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
with cls._lock:
|
||||
if cls not in cls._instances:
|
||||
instance = super().__call__(*args, **kwargs)
|
||||
cls._instances[cls] = instance
|
||||
else:
|
||||
cls._instances[cls]._set_session(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class YfData(metaclass=SingletonMeta):
|
||||
"""
|
||||
Have one place to retrieve data from Yahoo API in order to ease caching and speed up operations.
|
||||
Singleton means one session one cookie shared by all threads.
|
||||
"""
|
||||
user_agent_headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
|
||||
|
||||
def __init__(self, ticker: str, session=None):
|
||||
self.ticker = ticker
|
||||
self._session = session or requests
|
||||
def __init__(self, session=None):
|
||||
self._crumb = None
|
||||
self._cookie = None
|
||||
|
||||
def get(self, url, user_agent_headers=None, params=None, proxy=None, timeout=30):
|
||||
proxy = self._get_proxy(proxy)
|
||||
# Default to using 'basic' strategy
|
||||
self._cookie_strategy = 'basic'
|
||||
# If it fails, then fallback method is 'csrf'
|
||||
# self._cookie_strategy = 'csrf'
|
||||
|
||||
self._cookie_lock = threading.Lock()
|
||||
|
||||
self._set_session(session or requests.Session())
|
||||
|
||||
def _set_session(self, session):
|
||||
if session is None:
|
||||
return
|
||||
with self._cookie_lock:
|
||||
self._session = session
|
||||
|
||||
try:
|
||||
self._session.cache
|
||||
except AttributeError:
|
||||
# Not caching
|
||||
self._session_is_caching = False
|
||||
else:
|
||||
# Is caching. This is annoying.
|
||||
# Can't simply use a non-caching session to fetch cookie & crumb,
|
||||
# because then the caching-session won't have cookie.
|
||||
self._session_is_caching = True
|
||||
from requests_cache import DO_NOT_CACHE
|
||||
self._expire_after = DO_NOT_CACHE
|
||||
|
||||
def _set_cookie_strategy(self, strategy, have_lock=False):
|
||||
if strategy == self._cookie_strategy:
|
||||
return
|
||||
if not have_lock:
|
||||
self._cookie_lock.acquire()
|
||||
|
||||
try:
|
||||
if self._cookie_strategy == 'csrf':
|
||||
utils.get_yf_logger().debug(f'toggling cookie strategy {self._cookie_strategy} -> basic')
|
||||
self._session.cookies.clear()
|
||||
self._cookie_strategy = 'basic'
|
||||
else:
|
||||
utils.get_yf_logger().debug(f'toggling cookie strategy {self._cookie_strategy} -> csrf')
|
||||
self._cookie_strategy = 'csrf'
|
||||
self._cookie = None
|
||||
self._crumb = None
|
||||
except Exception:
|
||||
self._cookie_lock.release()
|
||||
raise
|
||||
|
||||
if not have_lock:
|
||||
self._cookie_lock.release()
|
||||
|
||||
def _save_session_cookies(self):
|
||||
try:
|
||||
cache.get_cookie_cache().store('csrf', self._session.cookies)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _load_session_cookies(self):
|
||||
cookie_dict = cache.get_cookie_cache().lookup('csrf')
|
||||
if cookie_dict is None:
|
||||
return False
|
||||
# Periodically refresh, 24 hours seems fair.
|
||||
if cookie_dict['age'] > datetime.timedelta(days=1):
|
||||
return False
|
||||
self._session.cookies.update(cookie_dict['cookie'])
|
||||
utils.get_yf_logger().debug('loaded persistent cookie')
|
||||
|
||||
def _save_cookie_basic(self, cookie):
|
||||
try:
|
||||
cache.get_cookie_cache().store('basic', cookie)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
def _load_cookie_basic(self):
|
||||
cookie_dict = cache.get_cookie_cache().lookup('basic')
|
||||
if cookie_dict is None:
|
||||
return None
|
||||
# Periodically refresh, 24 hours seems fair.
|
||||
if cookie_dict['age'] > datetime.timedelta(days=1):
|
||||
return None
|
||||
utils.get_yf_logger().debug('loaded persistent cookie')
|
||||
return cookie_dict['cookie']
|
||||
|
||||
def _get_cookie_basic(self, proxy=None, timeout=30):
|
||||
if self._cookie is not None:
|
||||
utils.get_yf_logger().debug('reusing cookie')
|
||||
return self._cookie
|
||||
|
||||
self._cookie = self._load_cookie_basic()
|
||||
if self._cookie is not None:
|
||||
return self._cookie
|
||||
|
||||
# To avoid infinite recursion, do NOT use self.get()
|
||||
# - 'allow_redirects' copied from @psychoz971 solution - does it help USA?
|
||||
response = self._session.get(
|
||||
url=url,
|
||||
params=params,
|
||||
url='https://fc.yahoo.com',
|
||||
headers=self.user_agent_headers,
|
||||
proxies=proxy,
|
||||
timeout=timeout,
|
||||
headers=user_agent_headers or self.user_agent_headers)
|
||||
allow_redirects=True)
|
||||
|
||||
if not response.cookies:
|
||||
utils.get_yf_logger().debug("response.cookies = None")
|
||||
return None
|
||||
self._cookie = list(response.cookies)[0]
|
||||
if self._cookie == '':
|
||||
utils.get_yf_logger().debug("list(response.cookies)[0] = ''")
|
||||
return None
|
||||
self._save_cookie_basic(self._cookie)
|
||||
utils.get_yf_logger().debug(f"fetched basic cookie = {self._cookie}")
|
||||
return self._cookie
|
||||
|
||||
def _get_crumb_basic(self, proxy=None, timeout=30):
|
||||
if self._crumb is not None:
|
||||
utils.get_yf_logger().debug('reusing crumb')
|
||||
return self._crumb
|
||||
|
||||
cookie = self._get_cookie_basic()
|
||||
if cookie is None:
|
||||
return None
|
||||
|
||||
# - 'allow_redirects' copied from @psychoz971 solution - does it help USA?
|
||||
get_args = {
|
||||
'url': "https://query1.finance.yahoo.com/v1/test/getcrumb",
|
||||
'headers': self.user_agent_headers,
|
||||
'cookies': {cookie.name: cookie.value},
|
||||
'proxies': proxy,
|
||||
'timeout': timeout,
|
||||
'allow_redirects': True
|
||||
}
|
||||
if self._session_is_caching:
|
||||
get_args['expire_after'] = self._expire_after
|
||||
crumb_response = self._session.get(**get_args)
|
||||
else:
|
||||
crumb_response = self._session.get(**get_args)
|
||||
self._crumb = crumb_response.text
|
||||
if self._crumb is None or '<html>' in self._crumb:
|
||||
utils.get_yf_logger().debug("Didn't receive crumb")
|
||||
return None
|
||||
|
||||
utils.get_yf_logger().debug(f"crumb = '{self._crumb}'")
|
||||
return self._crumb
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _get_cookie_and_crumb_basic(self, proxy, timeout):
|
||||
cookie = self._get_cookie_basic(proxy, timeout)
|
||||
crumb = self._get_crumb_basic(proxy, timeout)
|
||||
return cookie, crumb
|
||||
|
||||
def _get_cookie_csrf(self, proxy, timeout):
|
||||
if self._cookie is not None:
|
||||
utils.get_yf_logger().debug('reusing cookie')
|
||||
return True
|
||||
|
||||
elif self._load_session_cookies():
|
||||
utils.get_yf_logger().debug('reusing persistent cookie')
|
||||
self._cookie = True
|
||||
return True
|
||||
|
||||
base_args = {
|
||||
'headers': self.user_agent_headers,
|
||||
'proxies': proxy,
|
||||
'timeout': timeout}
|
||||
|
||||
get_args = {**base_args, 'url': 'https://guce.yahoo.com/consent'}
|
||||
if self._session_is_caching:
|
||||
get_args['expire_after'] = self._expire_after
|
||||
response = self._session.get(**get_args)
|
||||
else:
|
||||
response = self._session.get(**get_args)
|
||||
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
csrfTokenInput = soup.find('input', attrs={'name': 'csrfToken'})
|
||||
if csrfTokenInput is None:
|
||||
utils.get_yf_logger().debug('Failed to find "csrfToken" in response')
|
||||
return False
|
||||
csrfToken = csrfTokenInput['value']
|
||||
utils.get_yf_logger().debug(f'csrfToken = {csrfToken}')
|
||||
sessionIdInput = soup.find('input', attrs={'name': 'sessionId'})
|
||||
sessionId = sessionIdInput['value']
|
||||
utils.get_yf_logger().debug(f"sessionId='{sessionId}")
|
||||
|
||||
originalDoneUrl = 'https://finance.yahoo.com/'
|
||||
namespace = 'yahoo'
|
||||
data = {
|
||||
'agree': ['agree', 'agree'],
|
||||
'consentUUID': 'default',
|
||||
'sessionId': sessionId,
|
||||
'csrfToken': csrfToken,
|
||||
'originalDoneUrl': originalDoneUrl,
|
||||
'namespace': namespace,
|
||||
}
|
||||
post_args = {**base_args,
|
||||
'url': f'https://consent.yahoo.com/v2/collectConsent?sessionId={sessionId}',
|
||||
'data': data}
|
||||
get_args = {**base_args,
|
||||
'url': f'https://guce.yahoo.com/copyConsent?sessionId={sessionId}',
|
||||
'data': data}
|
||||
if self._session_is_caching:
|
||||
post_args['expire_after'] = self._expire_after
|
||||
get_args['expire_after'] = self._expire_after
|
||||
self._session.post(**post_args)
|
||||
self._session.get(**get_args)
|
||||
else:
|
||||
self._session.post(**post_args)
|
||||
self._session.get(**get_args)
|
||||
self._cookie = True
|
||||
self._save_session_cookies()
|
||||
return True
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _get_crumb_csrf(self, proxy=None, timeout=30):
|
||||
# Credit goes to @bot-unit #1729
|
||||
|
||||
if self._crumb is not None:
|
||||
utils.get_yf_logger().debug('reusing crumb')
|
||||
return self._crumb
|
||||
|
||||
if not self._get_cookie_csrf(proxy, timeout):
|
||||
# This cookie stored in session
|
||||
return None
|
||||
|
||||
get_args = {
|
||||
'url': 'https://query2.finance.yahoo.com/v1/test/getcrumb',
|
||||
'headers': self.user_agent_headers,
|
||||
'proxies': proxy,
|
||||
'timeout': timeout}
|
||||
if self._session_is_caching:
|
||||
get_args['expire_after'] = self._expire_after
|
||||
r = self._session.get(**get_args)
|
||||
else:
|
||||
r = self._session.get(**get_args)
|
||||
self._crumb = r.text
|
||||
|
||||
if self._crumb is None or '<html>' in self._crumb or self._crumb == '':
|
||||
utils.get_yf_logger().debug("Didn't receive crumb")
|
||||
return None
|
||||
|
||||
utils.get_yf_logger().debug(f"crumb = '{self._crumb}'")
|
||||
return self._crumb
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _get_cookie_and_crumb(self, proxy=None, timeout=30):
|
||||
cookie, crumb, strategy = None, None, None
|
||||
|
||||
utils.get_yf_logger().debug(f"cookie_mode = '{self._cookie_strategy}'")
|
||||
|
||||
with self._cookie_lock:
|
||||
if self._cookie_strategy == 'csrf':
|
||||
crumb = self._get_crumb_csrf()
|
||||
if crumb is None:
|
||||
# Fail
|
||||
self._set_cookie_strategy('basic', have_lock=True)
|
||||
cookie, crumb = self._get_cookie_and_crumb_basic(proxy, timeout)
|
||||
else:
|
||||
# Fallback strategy
|
||||
cookie, crumb = self._get_cookie_and_crumb_basic(proxy, timeout)
|
||||
if cookie is None or crumb is None:
|
||||
# Fail
|
||||
self._set_cookie_strategy('csrf', have_lock=True)
|
||||
crumb = self._get_crumb_csrf()
|
||||
strategy = self._cookie_strategy
|
||||
return cookie, crumb, strategy
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def get(self, url, user_agent_headers=None, params=None, proxy=None, timeout=30):
|
||||
# Important: treat input arguments as immutable.
|
||||
|
||||
if len(url) > 200:
|
||||
utils.get_yf_logger().debug(f'url={url[:200]}...')
|
||||
else:
|
||||
utils.get_yf_logger().debug(f'url={url}')
|
||||
utils.get_yf_logger().debug(f'params={params}')
|
||||
proxy = self._get_proxy(proxy)
|
||||
|
||||
if params is None:
|
||||
params = {}
|
||||
if 'crumb' in params:
|
||||
raise Exception("Don't manually add 'crumb' to params dict, let data.py handle it")
|
||||
|
||||
cookie, crumb, strategy = self._get_cookie_and_crumb()
|
||||
if crumb is not None:
|
||||
crumbs = {'crumb': crumb}
|
||||
else:
|
||||
crumbs = {}
|
||||
if strategy == 'basic' and cookie is not None:
|
||||
# Basic cookie strategy adds cookie to GET parameters
|
||||
cookies = {cookie.name: cookie.value}
|
||||
else:
|
||||
cookies = None
|
||||
|
||||
request_args = {
|
||||
'url': url,
|
||||
'params': {**params, **crumbs},
|
||||
'cookies': cookies,
|
||||
'proxies': proxy,
|
||||
'timeout': timeout,
|
||||
'headers': user_agent_headers or self.user_agent_headers
|
||||
}
|
||||
response = self._session.get(**request_args)
|
||||
utils.get_yf_logger().debug(f'response code={response.status_code}')
|
||||
if response.status_code >= 400:
|
||||
# Retry with other cookie strategy
|
||||
if strategy == 'basic':
|
||||
self._set_cookie_strategy('csrf')
|
||||
else:
|
||||
self._set_cookie_strategy('basic')
|
||||
cookie, crumb, strategy = self._get_cookie_and_crumb(proxy, timeout)
|
||||
request_args['params']['crumb'] = crumb
|
||||
if strategy == 'basic':
|
||||
request_args['cookies'] = {cookie.name: cookie.value}
|
||||
response = self._session.get(**request_args)
|
||||
utils.get_yf_logger().debug(f'response code={response.status_code}')
|
||||
|
||||
return response
|
||||
|
||||
@lru_cache_freezeargs
|
||||
@@ -204,133 +388,13 @@ class TickerData:
|
||||
def _get_proxy(self, proxy):
|
||||
# setup proxy in requests format
|
||||
if proxy is not None:
|
||||
if isinstance(proxy, dict) and "https" in proxy:
|
||||
if isinstance(proxy, (dict, frozendict)) and "https" in proxy:
|
||||
proxy = proxy["https"]
|
||||
proxy = {"https": proxy}
|
||||
return proxy
|
||||
|
||||
def get_raw_json(self, url, user_agent_headers=None, params=None, proxy=None, timeout=30):
|
||||
utils.get_yf_logger().debug(f'get_raw_json(): {url}')
|
||||
response = self.get(url, user_agent_headers=user_agent_headers, params=params, proxy=proxy, timeout=timeout)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def _get_decryption_keys_from_yahoo_js(self, soup):
|
||||
result = None
|
||||
|
||||
key_count = 4
|
||||
re_script = soup.find("script", string=re.compile("root.App.main")).text
|
||||
re_data = json.loads(re.search("root.App.main\s+=\s+(\{.*\})", re_script).group(1))
|
||||
re_data.pop("context", None)
|
||||
key_list = list(re_data.keys())
|
||||
if re_data.get("plugins"): # 1) attempt to get last 4 keys after plugins
|
||||
ind = key_list.index("plugins")
|
||||
if len(key_list) > ind+1:
|
||||
sub_keys = key_list[ind+1:]
|
||||
if len(sub_keys) == key_count:
|
||||
re_obj = {}
|
||||
missing_val = False
|
||||
for k in sub_keys:
|
||||
if not re_data.get(k):
|
||||
missing_val = True
|
||||
break
|
||||
re_obj.update({k: re_data.get(k)})
|
||||
if not missing_val:
|
||||
result = re_obj
|
||||
|
||||
if not result is None:
|
||||
return [''.join(result.values())]
|
||||
|
||||
re_keys = [] # 2) attempt scan main.js file approach to get keys
|
||||
prefix = "https://s.yimg.com/uc/finance/dd-site/js/main."
|
||||
tags = [tag['src'] for tag in soup.find_all('script') if prefix in tag.get('src', '')]
|
||||
for t in tags:
|
||||
response_js = self.cache_get(t)
|
||||
#
|
||||
if response_js.status_code != 200:
|
||||
time.sleep(random.randrange(10, 20))
|
||||
response_js.close()
|
||||
else:
|
||||
r_data = response_js.content.decode("utf8")
|
||||
re_list = [
|
||||
x.group() for x in re.finditer(r"context.dispatcher.stores=JSON.parse((?:.*?\r?\n?)*)toString", r_data)
|
||||
]
|
||||
for rl in re_list:
|
||||
re_sublist = [x.group() for x in re.finditer(r"t\[\"((?:.*?\r?\n?)*)\"\]", rl)]
|
||||
if len(re_sublist) == key_count:
|
||||
re_keys = [sl.replace('t["', '').replace('"]', '') for sl in re_sublist]
|
||||
break
|
||||
response_js.close()
|
||||
if len(re_keys) == key_count:
|
||||
break
|
||||
if len(re_keys) > 0:
|
||||
re_obj = {}
|
||||
missing_val = False
|
||||
for k in re_keys:
|
||||
if not re_data.get(k):
|
||||
missing_val = True
|
||||
break
|
||||
re_obj.update({k: re_data.get(k)})
|
||||
if not missing_val:
|
||||
return [''.join(re_obj.values())]
|
||||
|
||||
return []
|
||||
|
||||
@lru_cache_freezeargs
|
||||
@lru_cache(maxsize=cache_maxsize)
|
||||
def get_json_data_stores(self, sub_page: str = None, proxy=None) -> dict:
|
||||
'''
|
||||
get_json_data_stores returns a python dictionary of the data stores in yahoo finance web page.
|
||||
'''
|
||||
if sub_page:
|
||||
ticker_url = "{}/{}/{}".format(_SCRAPE_URL_, self.ticker, sub_page)
|
||||
else:
|
||||
ticker_url = "{}/{}".format(_SCRAPE_URL_, self.ticker)
|
||||
|
||||
response = self.get(url=ticker_url, proxy=proxy)
|
||||
html = response.text
|
||||
|
||||
# The actual json-data for stores is in a javascript assignment in the webpage
|
||||
try:
|
||||
json_str = html.split('root.App.main =')[1].split(
|
||||
'(this)')[0].split(';\n}')[0].strip()
|
||||
except IndexError:
|
||||
# Fetch failed, probably because Yahoo spam triggered
|
||||
return {}
|
||||
|
||||
data = json.loads(json_str)
|
||||
|
||||
# Gather decryption keys:
|
||||
soup = BeautifulSoup(response.content, "html.parser")
|
||||
keys = self._get_decryption_keys_from_yahoo_js(soup)
|
||||
if len(keys) == 0:
|
||||
msg = "No decryption keys could be extracted from JS file."
|
||||
if "requests_cache" in str(type(response)):
|
||||
msg += " Try flushing your 'requests_cache', probably parsing old JS."
|
||||
logger.warning("%s Falling back to backup decrypt methods.", msg)
|
||||
if len(keys) == 0:
|
||||
keys = []
|
||||
try:
|
||||
extra_keys = _extract_extra_keys_from_stores(data)
|
||||
keys = [''.join(extra_keys[-4:])]
|
||||
except:
|
||||
pass
|
||||
#
|
||||
keys_url = "https://github.com/ranaroussi/yfinance/raw/main/yfinance/scrapers/yahoo-keys.txt"
|
||||
response_gh = self.cache_get(keys_url)
|
||||
keys += response_gh.text.splitlines()
|
||||
|
||||
# Decrypt!
|
||||
stores = decrypt_cryptojs_aes_stores(data, keys)
|
||||
if stores is None:
|
||||
# Maybe Yahoo returned old format, not encrypted
|
||||
if "context" in data and "dispatcher" in data["context"]:
|
||||
stores = data['context']['dispatcher']['stores']
|
||||
if stores is None:
|
||||
raise Exception(f"{self.ticker}: Failed to extract data stores from web request")
|
||||
|
||||
# return data
|
||||
new_data = json.dumps(stores).replace('{}', 'null')
|
||||
new_data = re.sub(
|
||||
r'{[\'|\"]raw[\'|\"]:(.*?),(.*?)}', r'\1', new_data)
|
||||
|
||||
return json.loads(new_data)
|
||||
|
||||
@@ -1,6 +1,50 @@
|
||||
class YFinanceException(Exception):
|
||||
class YFException(Exception):
|
||||
def __init__(self, description=""):
|
||||
super().__init__(description)
|
||||
|
||||
|
||||
class YFDataException(YFException):
|
||||
pass
|
||||
|
||||
|
||||
class YFinanceDataException(YFinanceException):
|
||||
pass
|
||||
class YFChartError(YFException):
|
||||
def __init__(self, ticker, description):
|
||||
self.ticker = ticker
|
||||
super().__init__(f"{self.ticker}: {description}")
|
||||
|
||||
|
||||
class YFNotImplementedError(NotImplementedError):
|
||||
def __init__(self, method_name):
|
||||
super().__init__(f"Have not implemented fetching '{method_name}' from Yahoo API")
|
||||
|
||||
|
||||
class YFTickerMissingError(YFException):
|
||||
def __init__(self, ticker, rationale):
|
||||
super().__init__(f"${ticker}: possibly delisted; {rationale}")
|
||||
self.rationale = rationale
|
||||
self.ticker = ticker
|
||||
|
||||
|
||||
class YFTzMissingError(YFTickerMissingError):
|
||||
def __init__(self, ticker):
|
||||
super().__init__(ticker, "No timezone found")
|
||||
|
||||
|
||||
class YFPricesMissingError(YFTickerMissingError):
|
||||
def __init__(self, ticker, debug_info):
|
||||
self.debug_info = debug_info
|
||||
super().__init__(ticker, f"No price data found {debug_info}")
|
||||
|
||||
|
||||
class YFEarningsDateMissing(YFTickerMissingError):
|
||||
# note that this does not get raised. Added in case of raising it in the future
|
||||
def __init__(self, ticker):
|
||||
super().__init__(ticker, "No earnings dates found")
|
||||
|
||||
|
||||
class YFInvalidPeriodError(YFException):
|
||||
def __init__(self, ticker, invalid_period, valid_ranges):
|
||||
self.ticker = ticker
|
||||
self.invalid_period = invalid_period
|
||||
self.valid_ranges = valid_ranges
|
||||
super().__init__(f"{self.ticker}: Period '{invalid_period}' is invalid, must be one of {valid_ranges}")
|
||||
|
||||
@@ -22,17 +22,21 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
import time as _time
|
||||
import traceback
|
||||
|
||||
import multitasking as _multitasking
|
||||
import pandas as _pd
|
||||
|
||||
from . import Ticker, utils
|
||||
from .data import YfData
|
||||
from . import shared
|
||||
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def download(tickers, start=None, end=None, actions=False, threads=True, ignore_tz=None,
|
||||
group_by='column', auto_adjust=False, back_adjust=False, repair=False, keepna=False,
|
||||
progress=True, period="max", show_errors=None, interval="1d", prepost=False,
|
||||
progress=True, period="max", interval="1d", prepost=False,
|
||||
proxy=None, rounding=False, timeout=10, session=None):
|
||||
"""Download yahoo tickers
|
||||
:Parameters:
|
||||
@@ -46,7 +50,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
Intraday data cannot extend last 60 days
|
||||
start: str
|
||||
Download start date string (YYYY-MM-DD) or _datetime, inclusive.
|
||||
Default is 1900-01-01
|
||||
Default is 99 years ago
|
||||
E.g. for start="2020-01-01", the first data point will be on "2020-01-01"
|
||||
end: str
|
||||
Download end date string (YYYY-MM-DD) or _datetime, exclusive.
|
||||
@@ -76,23 +80,24 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
Optional. Proxy server URL scheme. Default is None
|
||||
rounding: bool
|
||||
Optional. Round values to 2 decimal places?
|
||||
show_errors: bool
|
||||
Optional. Doesn't print errors if False
|
||||
DEPRECATED, will be removed in future version
|
||||
timeout: None or float
|
||||
If not None stops waiting for a response after given number of
|
||||
seconds. (Can also be a fraction of a second e.g. 0.01)
|
||||
session: None or Session
|
||||
Optional. Pass your own session object to be used for all requests
|
||||
"""
|
||||
logger = utils.get_yf_logger()
|
||||
|
||||
if show_errors is not None:
|
||||
if show_errors:
|
||||
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
|
||||
logging.getLogger('yfinance').setLevel(logging.ERROR)
|
||||
else:
|
||||
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
|
||||
logging.getLogger('yfinance').setLevel(logging.CRITICAL)
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
if threads:
|
||||
# With DEBUG, each thread generates a lot of log messages.
|
||||
# And with multi-threading, these messages will be interleaved, bad!
|
||||
# So disable multi-threading to make log readable.
|
||||
logger.debug('Disabling multithreading because DEBUG logging enabled')
|
||||
threads = False
|
||||
if progress:
|
||||
# Disable progress bar, interferes with display of log messages
|
||||
progress = False
|
||||
|
||||
if ignore_tz is None:
|
||||
# Set default value depending on interval
|
||||
@@ -128,6 +133,9 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
shared._ERRORS = {}
|
||||
shared._TRACEBACKS = {}
|
||||
|
||||
# Ensure data initialised with session.
|
||||
YfData(session=session)
|
||||
|
||||
# download using threads
|
||||
if threads:
|
||||
if threads is True:
|
||||
@@ -139,7 +147,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
actions=actions, auto_adjust=auto_adjust,
|
||||
back_adjust=back_adjust, repair=repair, keepna=keepna,
|
||||
progress=(progress and i > 0), proxy=proxy,
|
||||
rounding=rounding, timeout=timeout, session=session)
|
||||
rounding=rounding, timeout=timeout)
|
||||
while len(shared._DFS) < len(tickers):
|
||||
_time.sleep(0.01)
|
||||
# download synchronously
|
||||
@@ -150,10 +158,10 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
actions=actions, auto_adjust=auto_adjust,
|
||||
back_adjust=back_adjust, repair=repair, keepna=keepna,
|
||||
proxy=proxy,
|
||||
rounding=rounding, timeout=timeout, session=session)
|
||||
rounding=rounding, timeout=timeout)
|
||||
if progress:
|
||||
shared._PROGRESS_BAR.animate()
|
||||
|
||||
|
||||
if progress:
|
||||
shared._PROGRESS_BAR.completed()
|
||||
|
||||
@@ -167,7 +175,8 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
errors = {}
|
||||
for ticker in shared._ERRORS:
|
||||
err = shared._ERRORS[ticker]
|
||||
if not err in errors:
|
||||
err = err.replace(f'{ticker}', '%ticker%')
|
||||
if err not in errors:
|
||||
errors[err] = [ticker]
|
||||
else:
|
||||
errors[err].append(ticker)
|
||||
@@ -178,7 +187,8 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
tbs = {}
|
||||
for ticker in shared._TRACEBACKS:
|
||||
tb = shared._TRACEBACKS[ticker]
|
||||
if not tb in tbs:
|
||||
tb = tb.replace(f'{ticker}', '%ticker%')
|
||||
if tb not in tbs:
|
||||
tbs[tb] = [ticker]
|
||||
else:
|
||||
tbs[tb].append(ticker)
|
||||
@@ -192,16 +202,16 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
|
||||
if len(tickers) == 1:
|
||||
ticker = tickers[0]
|
||||
return shared._DFS[shared._ISINS.get(ticker, ticker)]
|
||||
return shared._DFS[ticker]
|
||||
|
||||
try:
|
||||
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
|
||||
keys=shared._DFS.keys())
|
||||
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
|
||||
except Exception:
|
||||
_realign_dfs()
|
||||
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
|
||||
keys=shared._DFS.keys())
|
||||
|
||||
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
|
||||
data.index = _pd.to_datetime(data.index)
|
||||
# switch names back to isins if applicable
|
||||
data.rename(columns=shared._ISINS, inplace=True)
|
||||
|
||||
@@ -240,10 +250,10 @@ def _download_one_threaded(ticker, start=None, end=None,
|
||||
auto_adjust=False, back_adjust=False, repair=False,
|
||||
actions=False, progress=True, period="max",
|
||||
interval="1d", prepost=False, proxy=None,
|
||||
keepna=False, rounding=False, timeout=10, session=None):
|
||||
data = _download_one(ticker, start, end, auto_adjust, back_adjust, repair,
|
||||
keepna=False, rounding=False, timeout=10):
|
||||
_download_one(ticker, start, end, auto_adjust, back_adjust, repair,
|
||||
actions, period, interval, prepost, proxy, rounding,
|
||||
keepna, timeout, session)
|
||||
keepna, timeout)
|
||||
if progress:
|
||||
shared._PROGRESS_BAR.animate()
|
||||
|
||||
@@ -252,10 +262,10 @@ def _download_one(ticker, start=None, end=None,
|
||||
auto_adjust=False, back_adjust=False, repair=False,
|
||||
actions=False, period="max", interval="1d",
|
||||
prepost=False, proxy=None, rounding=False,
|
||||
keepna=False, timeout=10, session=None):
|
||||
keepna=False, timeout=10):
|
||||
data = None
|
||||
try:
|
||||
data = Ticker(ticker, session=session).history(
|
||||
data = Ticker(ticker).history(
|
||||
period=period, interval=interval,
|
||||
start=start, end=end, prepost=prepost,
|
||||
actions=actions, auto_adjust=auto_adjust,
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import pandas as pd
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import TickerData
|
||||
from yfinance.data import YfData
|
||||
from yfinance.exceptions import YFNotImplementedError
|
||||
|
||||
|
||||
class Analysis:
|
||||
|
||||
def __init__(self, data: TickerData, proxy=None):
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._earnings_trend = None
|
||||
@@ -20,99 +21,29 @@ class Analysis:
|
||||
@property
|
||||
def earnings_trend(self) -> pd.DataFrame:
|
||||
if self._earnings_trend is None:
|
||||
self._scrape(self.proxy)
|
||||
raise YFNotImplementedError('earnings_trend')
|
||||
return self._earnings_trend
|
||||
|
||||
@property
|
||||
def analyst_trend_details(self) -> pd.DataFrame:
|
||||
if self._analyst_trend_details is None:
|
||||
self._scrape(self.proxy)
|
||||
raise YFNotImplementedError('analyst_trend_details')
|
||||
return self._analyst_trend_details
|
||||
|
||||
@property
|
||||
def analyst_price_target(self) -> pd.DataFrame:
|
||||
if self._analyst_price_target is None:
|
||||
self._scrape(self.proxy)
|
||||
raise YFNotImplementedError('analyst_price_target')
|
||||
return self._analyst_price_target
|
||||
|
||||
@property
|
||||
def rev_est(self) -> pd.DataFrame:
|
||||
if self._rev_est is None:
|
||||
self._scrape(self.proxy)
|
||||
raise YFNotImplementedError('rev_est')
|
||||
return self._rev_est
|
||||
|
||||
@property
|
||||
def eps_est(self) -> pd.DataFrame:
|
||||
if self._eps_est is None:
|
||||
self._scrape(self.proxy)
|
||||
raise YFNotImplementedError('eps_est')
|
||||
return self._eps_est
|
||||
|
||||
def _scrape(self, proxy):
|
||||
if self._already_scraped:
|
||||
return
|
||||
self._already_scraped = True
|
||||
|
||||
# Analysis Data/Analyst Forecasts
|
||||
analysis_data = self._data.get_json_data_stores("analysis", proxy=proxy)
|
||||
try:
|
||||
analysis_data = analysis_data['QuoteSummaryStore']
|
||||
except KeyError as e:
|
||||
err_msg = "No analysis data found, symbol may be delisted"
|
||||
logger.error('%s: %s', self._data.ticker, err_msg)
|
||||
return
|
||||
|
||||
if isinstance(analysis_data.get('earningsTrend'), dict):
|
||||
try:
|
||||
analysis = pd.DataFrame(analysis_data['earningsTrend']['trend'])
|
||||
analysis['endDate'] = pd.to_datetime(analysis['endDate'])
|
||||
analysis.set_index('period', inplace=True)
|
||||
analysis.index = analysis.index.str.upper()
|
||||
analysis.index.name = 'Period'
|
||||
analysis.columns = utils.camel2title(analysis.columns)
|
||||
|
||||
dict_cols = []
|
||||
|
||||
for idx, row in analysis.iterrows():
|
||||
for colname, colval in row.items():
|
||||
if isinstance(colval, dict):
|
||||
dict_cols.append(colname)
|
||||
for k, v in colval.items():
|
||||
new_colname = colname + ' ' + \
|
||||
utils.camel2title([k])[0]
|
||||
analysis.loc[idx, new_colname] = v
|
||||
|
||||
self._earnings_trend = analysis[[
|
||||
c for c in analysis.columns if c not in dict_cols]]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._analyst_trend_details = pd.DataFrame(analysis_data['recommendationTrend']['trend'])
|
||||
except Exception as e:
|
||||
self._analyst_trend_details = None
|
||||
try:
|
||||
self._analyst_price_target = pd.DataFrame(analysis_data['financialData'], index=[0])[
|
||||
['targetLowPrice', 'currentPrice', 'targetMeanPrice', 'targetHighPrice', 'numberOfAnalystOpinions']].T
|
||||
except Exception as e:
|
||||
self._analyst_price_target = None
|
||||
earnings_estimate = []
|
||||
revenue_estimate = []
|
||||
if self._analyst_trend_details is not None :
|
||||
for key in analysis_data['earningsTrend']['trend']:
|
||||
try:
|
||||
earnings_dict = key['earningsEstimate']
|
||||
earnings_dict['period'] = key['period']
|
||||
earnings_dict['endDate'] = key['endDate']
|
||||
earnings_estimate.append(earnings_dict)
|
||||
|
||||
revenue_dict = key['revenueEstimate']
|
||||
revenue_dict['period'] = key['period']
|
||||
revenue_dict['endDate'] = key['endDate']
|
||||
revenue_estimate.append(revenue_dict)
|
||||
except Exception as e:
|
||||
pass
|
||||
self._rev_est = pd.DataFrame(revenue_estimate)
|
||||
self._eps_est = pd.DataFrame(earnings_estimate)
|
||||
else:
|
||||
self._rev_est = pd.DataFrame()
|
||||
self._eps_est = pd.DataFrame()
|
||||
|
||||
@@ -1,20 +1,18 @@
|
||||
import datetime
|
||||
import logging
|
||||
import json
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import TickerData
|
||||
from yfinance.exceptions import YFinanceDataException, YFinanceException
|
||||
from yfinance import utils, const
|
||||
from yfinance.data import YfData
|
||||
from yfinance.exceptions import YFException, YFNotImplementedError
|
||||
|
||||
logger = utils.get_yf_logger()
|
||||
|
||||
class Fundamentals:
|
||||
|
||||
def __init__(self, data: TickerData, proxy=None):
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._earnings = None
|
||||
@@ -24,7 +22,7 @@ class Fundamentals:
|
||||
self._financials_data = None
|
||||
self._fin_data_quote = None
|
||||
self._basics_already_scraped = False
|
||||
self._financials = Financials(data)
|
||||
self._financials = Financials(data, symbol)
|
||||
|
||||
@property
|
||||
def financials(self) -> "Financials":
|
||||
@@ -33,120 +31,63 @@ class Fundamentals:
|
||||
@property
|
||||
def earnings(self) -> dict:
|
||||
if self._earnings is None:
|
||||
self._scrape_earnings(self.proxy)
|
||||
raise YFNotImplementedError('earnings')
|
||||
return self._earnings
|
||||
|
||||
@property
|
||||
def shares(self) -> pd.DataFrame:
|
||||
if self._shares is None:
|
||||
self._scrape_shares(self.proxy)
|
||||
raise YFNotImplementedError('shares')
|
||||
return self._shares
|
||||
|
||||
def _scrape_basics(self, proxy):
|
||||
if self._basics_already_scraped:
|
||||
return
|
||||
self._basics_already_scraped = True
|
||||
|
||||
self._financials_data = self._data.get_json_data_stores('financials', proxy)
|
||||
try:
|
||||
self._fin_data_quote = self._financials_data['QuoteSummaryStore']
|
||||
except KeyError:
|
||||
err_msg = "No financials data found, symbol may be delisted"
|
||||
logger.error('%s: %s', self._data.ticker, err_msg)
|
||||
return None
|
||||
|
||||
def _scrape_earnings(self, proxy):
|
||||
self._scrape_basics(proxy)
|
||||
# earnings
|
||||
self._earnings = {"yearly": pd.DataFrame(), "quarterly": pd.DataFrame()}
|
||||
if self._fin_data_quote is None:
|
||||
return
|
||||
if isinstance(self._fin_data_quote.get('earnings'), dict):
|
||||
try:
|
||||
earnings = self._fin_data_quote['earnings']['financialsChart']
|
||||
earnings['financialCurrency'] = self._fin_data_quote['earnings'].get('financialCurrency', 'USD')
|
||||
self._earnings['financialCurrency'] = earnings['financialCurrency']
|
||||
df = pd.DataFrame(earnings['yearly']).set_index('date')
|
||||
df.columns = utils.camel2title(df.columns)
|
||||
df.index.name = 'Year'
|
||||
self._earnings['yearly'] = df
|
||||
|
||||
df = pd.DataFrame(earnings['quarterly']).set_index('date')
|
||||
df.columns = utils.camel2title(df.columns)
|
||||
df.index.name = 'Quarter'
|
||||
self._earnings['quarterly'] = df
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _scrape_shares(self, proxy):
|
||||
self._scrape_basics(proxy)
|
||||
# shares outstanding
|
||||
try:
|
||||
# keep only years with non None data
|
||||
available_shares = [shares_data for shares_data in
|
||||
self._financials_data['QuoteTimeSeriesStore']['timeSeries']['annualBasicAverageShares']
|
||||
if
|
||||
shares_data]
|
||||
shares = pd.DataFrame(available_shares)
|
||||
shares['Year'] = shares['asOfDate'].agg(lambda x: int(x[:4]))
|
||||
shares.set_index('Year', inplace=True)
|
||||
shares.drop(columns=['dataId', 'asOfDate',
|
||||
'periodType', 'currencyCode'], inplace=True)
|
||||
shares.rename(
|
||||
columns={'reportedValue': "BasicShares"}, inplace=True)
|
||||
self._shares = shares
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class Financials:
|
||||
def __init__(self, data: TickerData):
|
||||
def __init__(self, data: YfData, symbol: str):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self._income_time_series = {}
|
||||
self._balance_sheet_time_series = {}
|
||||
self._cash_flow_time_series = {}
|
||||
self._income_scraped = {}
|
||||
self._balance_sheet_scraped = {}
|
||||
self._cash_flow_scraped = {}
|
||||
|
||||
def get_income_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._income_time_series
|
||||
if freq not in res:
|
||||
res[freq] = self._fetch_time_series("income", freq, proxy=None)
|
||||
res[freq] = self._fetch_time_series("income", freq, proxy)
|
||||
return res[freq]
|
||||
|
||||
def get_balance_sheet_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._balance_sheet_time_series
|
||||
if freq not in res:
|
||||
res[freq] = self._fetch_time_series("balance-sheet", freq, proxy=None)
|
||||
res[freq] = self._fetch_time_series("balance-sheet", freq, proxy)
|
||||
return res[freq]
|
||||
|
||||
def get_cash_flow_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._cash_flow_time_series
|
||||
if freq not in res:
|
||||
res[freq] = self._fetch_time_series("cash-flow", freq, proxy=None)
|
||||
res[freq] = self._fetch_time_series("cash-flow", freq, proxy)
|
||||
return res[freq]
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _fetch_time_series(self, name, timescale, proxy=None):
|
||||
# Fetching time series preferred over scraping 'QuoteSummaryStore',
|
||||
# because it matches what Yahoo shows. But for some tickers returns nothing,
|
||||
# because it matches what Yahoo shows. But for some tickers returns nothing,
|
||||
# despite 'QuoteSummaryStore' containing valid data.
|
||||
|
||||
allowed_names = ["income", "balance-sheet", "cash-flow"]
|
||||
allowed_timescales = ["yearly", "quarterly"]
|
||||
|
||||
if name not in allowed_names:
|
||||
raise ValueError("Illegal argument: name must be one of: {}".format(allowed_names))
|
||||
raise ValueError(f"Illegal argument: name must be one of: {allowed_names}")
|
||||
if timescale not in allowed_timescales:
|
||||
raise ValueError("Illegal argument: timescale must be one of: {}".format(allowed_names))
|
||||
raise ValueError(f"Illegal argument: timescale must be one of: {allowed_timescales}")
|
||||
|
||||
try:
|
||||
statement = self._create_financials_table(name, timescale, proxy)
|
||||
|
||||
if statement is not None:
|
||||
return statement
|
||||
except YFinanceException as e:
|
||||
logger.error("%s: Failed to create %s financials table for reason: %r", self._data.ticker, name, e)
|
||||
except YFException as e:
|
||||
utils.get_yf_logger().error(f"{self._symbol}: Failed to create {name} financials table for reason: {e}")
|
||||
return pd.DataFrame()
|
||||
|
||||
def _create_financials_table(self, name, timescale, proxy):
|
||||
@@ -154,51 +95,24 @@ class Financials:
|
||||
# Yahoo stores the 'income' table internally under 'financials' key
|
||||
name = "financials"
|
||||
|
||||
keys = self._get_datastore_keys(name, proxy)
|
||||
keys = const.fundamentals_keys[name]
|
||||
|
||||
try:
|
||||
return self.get_financials_time_series(timescale, keys, proxy)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _get_datastore_keys(self, sub_page, proxy) -> list:
|
||||
data_stores = self._data.get_json_data_stores(sub_page, proxy)
|
||||
|
||||
# Step 1: get the keys:
|
||||
def _finditem1(key, obj):
|
||||
values = []
|
||||
if isinstance(obj, dict):
|
||||
if key in obj.keys():
|
||||
values.append(obj[key])
|
||||
for k, v in obj.items():
|
||||
values += _finditem1(key, v)
|
||||
elif isinstance(obj, list):
|
||||
for v in obj:
|
||||
values += _finditem1(key, v)
|
||||
return values
|
||||
|
||||
try:
|
||||
keys = _finditem1("key", data_stores['FinancialTemplateStore'])
|
||||
except KeyError as e:
|
||||
raise YFinanceDataException("Parsing FinancialTemplateStore failed, reason: {}".format(repr(e)))
|
||||
|
||||
if not keys:
|
||||
raise YFinanceDataException("No keys in FinancialTemplateStore")
|
||||
return keys
|
||||
|
||||
def get_financials_time_series(self, timescale, keys: list, proxy=None) -> pd.DataFrame:
|
||||
timescale_translation = {"yearly": "annual", "quarterly": "quarterly"}
|
||||
timescale = timescale_translation[timescale]
|
||||
|
||||
# Step 2: construct url:
|
||||
ts_url_base = \
|
||||
"https://query2.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{0}?symbol={0}" \
|
||||
.format(self._data.ticker)
|
||||
|
||||
ts_url_base = f"https://query2.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{self._symbol}?symbol={self._symbol}"
|
||||
url = ts_url_base + "&type=" + ",".join([timescale + k for k in keys])
|
||||
# Yahoo returns maximum 4 years or 5 quarters, regardless of start_dt:
|
||||
start_dt = datetime.datetime(2016, 12, 31)
|
||||
end = pd.Timestamp.utcnow().ceil("D")
|
||||
url += "&period1={}&period2={}".format(int(start_dt.timestamp()), int(end.timestamp()))
|
||||
url += f"&period1={int(start_dt.timestamp())}&period2={int(end.timestamp())}"
|
||||
|
||||
# Step 3: fetch and reshape data
|
||||
json_str = self._data.cache_get(url=url, proxy=proxy).text
|
||||
@@ -233,89 +147,3 @@ class Financials:
|
||||
df = df[sorted(df.columns, reverse=True)]
|
||||
|
||||
return df
|
||||
|
||||
def get_income_scrape(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._income_scraped
|
||||
if freq not in res:
|
||||
res[freq] = self._scrape("income", freq, proxy=None)
|
||||
return res[freq]
|
||||
|
||||
def get_balance_sheet_scrape(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._balance_sheet_scraped
|
||||
if freq not in res:
|
||||
res[freq] = self._scrape("balance-sheet", freq, proxy=None)
|
||||
return res[freq]
|
||||
|
||||
def get_cash_flow_scrape(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._cash_flow_scraped
|
||||
if freq not in res:
|
||||
res[freq] = self._scrape("cash-flow", freq, proxy=None)
|
||||
return res[freq]
|
||||
|
||||
def _scrape(self, name, timescale, proxy=None):
|
||||
# Backup in case _fetch_time_series() fails to return data
|
||||
|
||||
allowed_names = ["income", "balance-sheet", "cash-flow"]
|
||||
allowed_timescales = ["yearly", "quarterly"]
|
||||
|
||||
if name not in allowed_names:
|
||||
raise ValueError("Illegal argument: name must be one of: {}".format(allowed_names))
|
||||
if timescale not in allowed_timescales:
|
||||
raise ValueError("Illegal argument: timescale must be one of: {}".format(allowed_names))
|
||||
|
||||
try:
|
||||
statement = self._create_financials_table_old(name, timescale, proxy)
|
||||
|
||||
if statement is not None:
|
||||
return statement
|
||||
except YFinanceException as e:
|
||||
logger.error("%s: Failed to create financials table for %s reason: %r", self._data.ticker, name, e)
|
||||
return pd.DataFrame()
|
||||
|
||||
def _create_financials_table_old(self, name, timescale, proxy):
|
||||
data_stores = self._data.get_json_data_stores("financials", proxy)
|
||||
|
||||
# Fetch raw data
|
||||
if not "QuoteSummaryStore" in data_stores:
|
||||
raise YFinanceDataException(f"Yahoo not returning legacy financials data")
|
||||
data = data_stores["QuoteSummaryStore"]
|
||||
|
||||
if name == "cash-flow":
|
||||
key1 = "cashflowStatement"
|
||||
key2 = "cashflowStatements"
|
||||
elif name == "balance-sheet":
|
||||
key1 = "balanceSheet"
|
||||
key2 = "balanceSheetStatements"
|
||||
else:
|
||||
key1 = "incomeStatement"
|
||||
key2 = "incomeStatementHistory"
|
||||
key1 += "History"
|
||||
if timescale == "quarterly":
|
||||
key1 += "Quarterly"
|
||||
if key1 not in data or data[key1] is None or key2 not in data[key1]:
|
||||
raise YFinanceDataException(f"Yahoo not returning legacy {name} financials data")
|
||||
data = data[key1][key2]
|
||||
|
||||
# Tabulate
|
||||
df = pd.DataFrame(data)
|
||||
if len(df) == 0:
|
||||
raise YFinanceDataException(f"Yahoo not returning legacy {name} financials data")
|
||||
df = df.drop(columns=['maxAge'])
|
||||
for col in df.columns:
|
||||
df[col] = df[col].replace('-', np.nan)
|
||||
df.set_index('endDate', inplace=True)
|
||||
try:
|
||||
df.index = pd.to_datetime(df.index, unit='s')
|
||||
except ValueError:
|
||||
df.index = pd.to_datetime(df.index)
|
||||
df = df.T
|
||||
df.columns.name = ''
|
||||
df.index.name = 'Breakdown'
|
||||
# rename incorrect yahoo key
|
||||
df.rename(index={'treasuryStock': 'gainsLossesNotAffectingRetainedEarnings'}, inplace=True)
|
||||
|
||||
# Upper-case first letter, leave rest unchanged:
|
||||
s0 = df.index[0]
|
||||
df.index = [s[0].upper()+s[1:] for s in df.index]
|
||||
|
||||
return df
|
||||
|
||||
1676
yfinance/scrapers/history.py
Normal file
1676
yfinance/scrapers/history.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,66 +1,244 @@
|
||||
import pandas as pd
|
||||
# from io import StringIO
|
||||
|
||||
import pandas as pd
|
||||
import requests
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import YfData
|
||||
from yfinance.const import _BASE_URL_
|
||||
from yfinance.exceptions import YFDataException
|
||||
|
||||
_QUOTE_SUMMARY_URL_ = f"{_BASE_URL_}/v10/finance/quoteSummary/"
|
||||
|
||||
from yfinance.data import TickerData
|
||||
|
||||
class Holders:
|
||||
_SCRAPE_URL_ = 'https://finance.yahoo.com/quote'
|
||||
|
||||
def __init__(self, data: TickerData, proxy=None):
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._major = None
|
||||
self._major_direct_holders = None
|
||||
self._institutional = None
|
||||
self._mutualfund = None
|
||||
|
||||
self._insider_transactions = None
|
||||
self._insider_purchases = None
|
||||
self._insider_roster = None
|
||||
|
||||
@property
|
||||
def major(self) -> pd.DataFrame:
|
||||
if self._major is None:
|
||||
self._scrape(self.proxy)
|
||||
# self._scrape(self.proxy)
|
||||
self._fetch_and_parse()
|
||||
return self._major
|
||||
|
||||
@property
|
||||
def institutional(self) -> pd.DataFrame:
|
||||
if self._institutional is None:
|
||||
self._scrape(self.proxy)
|
||||
# self._scrape(self.proxy)
|
||||
self._fetch_and_parse()
|
||||
return self._institutional
|
||||
|
||||
@property
|
||||
def mutualfund(self) -> pd.DataFrame:
|
||||
if self._mutualfund is None:
|
||||
self._scrape(self.proxy)
|
||||
# self._scrape(self.proxy)
|
||||
self._fetch_and_parse()
|
||||
return self._mutualfund
|
||||
|
||||
def _scrape(self, proxy):
|
||||
ticker_url = "{}/{}".format(self._SCRAPE_URL_, self._data.ticker)
|
||||
@property
|
||||
def insider_transactions(self) -> pd.DataFrame:
|
||||
if self._insider_transactions is None:
|
||||
# self._scrape_insider_transactions(self.proxy)
|
||||
self._fetch_and_parse()
|
||||
return self._insider_transactions
|
||||
|
||||
@property
|
||||
def insider_purchases(self) -> pd.DataFrame:
|
||||
if self._insider_purchases is None:
|
||||
# self._scrape_insider_transactions(self.proxy)
|
||||
self._fetch_and_parse()
|
||||
return self._insider_purchases
|
||||
|
||||
@property
|
||||
def insider_roster(self) -> pd.DataFrame:
|
||||
if self._insider_roster is None:
|
||||
# self._scrape_insider_ros(self.proxy)
|
||||
self._fetch_and_parse()
|
||||
return self._insider_roster
|
||||
|
||||
def _fetch(self, proxy):
|
||||
modules = ','.join(
|
||||
["institutionOwnership", "fundOwnership", "majorDirectHolders", "majorHoldersBreakdown", "insiderTransactions", "insiderHolders", "netSharePurchaseActivity"])
|
||||
params_dict = {"modules": modules, "corsDomain": "finance.yahoo.com", "formatted": "false"}
|
||||
result = self._data.get_raw_json(f"{_QUOTE_SUMMARY_URL_}/{self._symbol}", user_agent_headers=self._data.user_agent_headers, params=params_dict, proxy=proxy)
|
||||
return result
|
||||
|
||||
def _fetch_and_parse(self):
|
||||
try:
|
||||
resp = self._data.cache_get(ticker_url + '/holders', proxy)
|
||||
holders = pd.read_html(resp.text)
|
||||
except Exception:
|
||||
holders = []
|
||||
result = self._fetch(self.proxy)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
utils.get_yf_logger().error(str(e))
|
||||
|
||||
if len(holders) >= 3:
|
||||
self._major = holders[0]
|
||||
self._institutional = holders[1]
|
||||
self._mutualfund = holders[2]
|
||||
elif len(holders) >= 2:
|
||||
self._major = holders[0]
|
||||
self._institutional = holders[1]
|
||||
elif len(holders) >= 1:
|
||||
self._major = holders[0]
|
||||
self._major = pd.DataFrame()
|
||||
self._major_direct_holders = pd.DataFrame()
|
||||
self._institutional = pd.DataFrame()
|
||||
self._mutualfund = pd.DataFrame()
|
||||
self._insider_transactions = pd.DataFrame()
|
||||
self._insider_purchases = pd.DataFrame()
|
||||
self._insider_roster = pd.DataFrame()
|
||||
|
||||
if self._institutional is not None:
|
||||
if 'Date Reported' in self._institutional:
|
||||
self._institutional['Date Reported'] = pd.to_datetime(
|
||||
self._institutional['Date Reported'])
|
||||
if '% Out' in self._institutional:
|
||||
self._institutional['% Out'] = self._institutional[
|
||||
'% Out'].str.replace('%', '').astype(float) / 100
|
||||
return
|
||||
|
||||
if self._mutualfund is not None:
|
||||
if 'Date Reported' in self._mutualfund:
|
||||
self._mutualfund['Date Reported'] = pd.to_datetime(
|
||||
self._mutualfund['Date Reported'])
|
||||
if '% Out' in self._mutualfund:
|
||||
self._mutualfund['% Out'] = self._mutualfund[
|
||||
'% Out'].str.replace('%', '').astype(float) / 100
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]
|
||||
# parse "institutionOwnership", "fundOwnership", "majorDirectHolders", "majorHoldersBreakdown", "insiderTransactions", "insiderHolders", "netSharePurchaseActivity"
|
||||
self._parse_institution_ownership(data["institutionOwnership"])
|
||||
self._parse_fund_ownership(data["fundOwnership"])
|
||||
# self._parse_major_direct_holders(data["majorDirectHolders"]) # need more data to investigate
|
||||
self._parse_major_holders_breakdown(data["majorHoldersBreakdown"])
|
||||
self._parse_insider_transactions(data["insiderTransactions"])
|
||||
self._parse_insider_holders(data["insiderHolders"])
|
||||
self._parse_net_share_purchase_activity(data["netSharePurchaseActivity"])
|
||||
except (KeyError, IndexError):
|
||||
raise YFDataException("Failed to parse holders json data.")
|
||||
|
||||
@staticmethod
|
||||
def _parse_raw_values(data):
|
||||
if isinstance(data, dict) and "raw" in data:
|
||||
return data["raw"]
|
||||
return data
|
||||
|
||||
def _parse_institution_ownership(self, data):
|
||||
holders = data["ownershipList"]
|
||||
for owner in holders:
|
||||
for k, v in owner.items():
|
||||
owner[k] = self._parse_raw_values(v)
|
||||
del owner["maxAge"]
|
||||
df = pd.DataFrame(holders)
|
||||
if not df.empty:
|
||||
df["reportDate"] = pd.to_datetime(df["reportDate"], unit="s")
|
||||
df.rename(columns={"reportDate": "Date Reported", "organization": "Holder", "position": "Shares", "value": "Value"}, inplace=True) # "pctHeld": "% Out"
|
||||
self._institutional = df
|
||||
|
||||
def _parse_fund_ownership(self, data):
|
||||
holders = data["ownershipList"]
|
||||
for owner in holders:
|
||||
for k, v in owner.items():
|
||||
owner[k] = self._parse_raw_values(v)
|
||||
del owner["maxAge"]
|
||||
df = pd.DataFrame(holders)
|
||||
if not df.empty:
|
||||
df["reportDate"] = pd.to_datetime(df["reportDate"], unit="s")
|
||||
df.rename(columns={"reportDate": "Date Reported", "organization": "Holder", "position": "Shares", "value": "Value"}, inplace=True)
|
||||
self._mutualfund = df
|
||||
|
||||
def _parse_major_direct_holders(self, data):
|
||||
holders = data["holders"]
|
||||
for owner in holders:
|
||||
for k, v in owner.items():
|
||||
owner[k] = self._parse_raw_values(v)
|
||||
del owner["maxAge"]
|
||||
df = pd.DataFrame(holders)
|
||||
if not df.empty:
|
||||
df["reportDate"] = pd.to_datetime(df["reportDate"], unit="s")
|
||||
df.rename(columns={"reportDate": "Date Reported", "organization": "Holder", "positionDirect": "Shares", "valueDirect": "Value"}, inplace=True)
|
||||
self._major_direct_holders = df
|
||||
|
||||
def _parse_major_holders_breakdown(self, data):
|
||||
if "maxAge" in data:
|
||||
del data["maxAge"]
|
||||
df = pd.DataFrame.from_dict(data, orient="index")
|
||||
if not df.empty:
|
||||
df.columns.name = "Breakdown"
|
||||
df.rename(columns={df.columns[0]: 'Value'}, inplace=True)
|
||||
self._major = df
|
||||
|
||||
def _parse_insider_transactions(self, data):
|
||||
holders = data["transactions"]
|
||||
for owner in holders:
|
||||
for k, v in owner.items():
|
||||
owner[k] = self._parse_raw_values(v)
|
||||
del owner["maxAge"]
|
||||
df = pd.DataFrame(holders)
|
||||
if not df.empty:
|
||||
df["startDate"] = pd.to_datetime(df["startDate"], unit="s")
|
||||
df.rename(columns={
|
||||
"startDate": "Start Date",
|
||||
"filerName": "Insider",
|
||||
"filerRelation": "Position",
|
||||
"filerUrl": "URL",
|
||||
"moneyText": "Transaction",
|
||||
"transactionText": "Text",
|
||||
"shares": "Shares",
|
||||
"value": "Value",
|
||||
"ownership": "Ownership" # ownership flag, direct or institutional
|
||||
}, inplace=True)
|
||||
self._insider_transactions = df
|
||||
|
||||
def _parse_insider_holders(self, data):
|
||||
holders = data["holders"]
|
||||
for owner in holders:
|
||||
for k, v in owner.items():
|
||||
owner[k] = self._parse_raw_values(v)
|
||||
del owner["maxAge"]
|
||||
df = pd.DataFrame(holders)
|
||||
if not df.empty:
|
||||
df["positionDirectDate"] = pd.to_datetime(df["positionDirectDate"], unit="s")
|
||||
df["latestTransDate"] = pd.to_datetime(df["latestTransDate"], unit="s")
|
||||
|
||||
df.rename(columns={
|
||||
"name": "Name",
|
||||
"relation": "Position",
|
||||
"url": "URL",
|
||||
"transactionDescription": "Most Recent Transaction",
|
||||
"latestTransDate": "Latest Transaction Date",
|
||||
"positionDirectDate": "Position Direct Date",
|
||||
"positionDirect": "Shares Owned Directly",
|
||||
"positionIndirectDate": "Position Indirect Date",
|
||||
"positionIndirect": "Shares Owned Indirectly"
|
||||
}, inplace=True)
|
||||
|
||||
df["Name"] = df["Name"].astype(str)
|
||||
df["Position"] = df["Position"].astype(str)
|
||||
df["URL"] = df["URL"].astype(str)
|
||||
df["Most Recent Transaction"] = df["Most Recent Transaction"].astype(str)
|
||||
|
||||
self._insider_roster = df
|
||||
|
||||
def _parse_net_share_purchase_activity(self, data):
|
||||
df = pd.DataFrame(
|
||||
{
|
||||
"Insider Purchases Last " + data.get("period", ""): [
|
||||
"Purchases",
|
||||
"Sales",
|
||||
"Net Shares Purchased (Sold)",
|
||||
"Total Insider Shares Held",
|
||||
"% Net Shares Purchased (Sold)",
|
||||
"% Buy Shares",
|
||||
"% Sell Shares"
|
||||
],
|
||||
"Shares": [
|
||||
data.get('buyInfoShares'),
|
||||
data.get('sellInfoShares'),
|
||||
data.get('netInfoShares'),
|
||||
data.get('totalInsiderShares'),
|
||||
data.get('netPercentInsiderShares'),
|
||||
data.get('buyPercentInsiderShares'),
|
||||
data.get('sellPercentInsiderShares')
|
||||
],
|
||||
"Trans": [
|
||||
data.get('buyInfoCount'),
|
||||
data.get('sellInfoCount'),
|
||||
data.get('netInfoCount'),
|
||||
pd.NA,
|
||||
pd.NA,
|
||||
pd.NA,
|
||||
pd.NA
|
||||
]
|
||||
}
|
||||
).convert_dtypes()
|
||||
self._insider_purchases = df
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
import datetime
|
||||
import logging
|
||||
import json
|
||||
import warnings
|
||||
from collections.abc import MutableMapping
|
||||
|
||||
import pandas as pd
|
||||
import numpy as _np
|
||||
import pandas as pd
|
||||
import requests
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import TickerData
|
||||
|
||||
logger = utils.get_yf_logger()
|
||||
from yfinance.data import YfData
|
||||
from yfinance.const import quote_summary_valid_modules, _BASE_URL_
|
||||
from yfinance.exceptions import YFNotImplementedError, YFDataException, YFException
|
||||
|
||||
info_retired_keys_price = {"currentPrice", "dayHigh", "dayLow", "open", "previousClose", "volume", "volume24Hr"}
|
||||
info_retired_keys_price.update({"regularMarket"+s for s in ["DayHigh", "DayLow", "Open", "PreviousClose", "Price", "Volume"]})
|
||||
@@ -21,14 +22,11 @@ info_retired_keys_symbol = {"symbol"}
|
||||
info_retired_keys = info_retired_keys_price | info_retired_keys_exchange | info_retired_keys_marketCap | info_retired_keys_symbol
|
||||
|
||||
|
||||
PRUNE_INFO = True
|
||||
# PRUNE_INFO = False
|
||||
_BASIC_URL_ = "https://query2.finance.yahoo.com/v10/finance/quoteSummary"
|
||||
_QUOTE_SUMMARY_URL_ = f"{_BASE_URL_}/v10/finance/quoteSummary"
|
||||
|
||||
|
||||
from collections.abc import MutableMapping
|
||||
class InfoDictWrapper(MutableMapping):
|
||||
""" Simple wrapper around info dict, intercepting 'gets' to
|
||||
""" Simple wrapper around info dict, intercepting 'gets' to
|
||||
print how-to-migrate messages for specific keys. Requires
|
||||
override dict API"""
|
||||
|
||||
@@ -70,7 +68,7 @@ class InfoDictWrapper(MutableMapping):
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.info)
|
||||
|
||||
|
||||
def __len__(self):
|
||||
return len(self.info)
|
||||
|
||||
@@ -81,10 +79,9 @@ class InfoDictWrapper(MutableMapping):
|
||||
class FastInfo:
|
||||
# Contain small subset of info[] items that can be fetched faster elsewhere.
|
||||
# Imitates a dict.
|
||||
def __init__(self, tickerBaseObject):
|
||||
utils.print_once("yfinance: Note: 'Ticker.info' dict is now fixed & improved, 'fast_info' is no longer faster")
|
||||
|
||||
def __init__(self, tickerBaseObject, proxy=None):
|
||||
self._tkr = tickerBaseObject
|
||||
self.proxy = proxy
|
||||
|
||||
self._prices_1y = None
|
||||
self._prices_1wk_1h_prepost = None
|
||||
@@ -129,67 +126,69 @@ class FastInfo:
|
||||
_properties += ["fifty_day_average", "two_hundred_day_average", "ten_day_average_volume", "three_month_average_volume"]
|
||||
_properties += ["year_high", "year_low", "year_change"]
|
||||
|
||||
# Because released before fixing key case, need to officially support
|
||||
# Because released before fixing key case, need to officially support
|
||||
# camel-case but also secretly support snake-case
|
||||
base_keys = [k for k in _properties if not '_' in k]
|
||||
base_keys = [k for k in _properties if '_' not in k]
|
||||
|
||||
sc_keys = [k for k in _properties if '_' in k]
|
||||
|
||||
self._sc_to_cc_key = {k:utils.snake_case_2_camelCase(k) for k in sc_keys}
|
||||
self._cc_to_sc_key = {v:k for k,v in self._sc_to_cc_key.items()}
|
||||
|
||||
self._sc_to_cc_key = {k: utils.snake_case_2_camelCase(k) for k in sc_keys}
|
||||
self._cc_to_sc_key = {v: k for k, v in self._sc_to_cc_key.items()}
|
||||
|
||||
self._public_keys = sorted(base_keys + list(self._sc_to_cc_key.values()))
|
||||
self._keys = sorted(self._public_keys + sc_keys)
|
||||
|
||||
# dict imitation:
|
||||
def keys(self):
|
||||
return self._public_keys
|
||||
|
||||
def items(self):
|
||||
return [(k,self[k]) for k in self._public_keys]
|
||||
return [(k, self[k]) for k in self._public_keys]
|
||||
|
||||
def values(self):
|
||||
return [self[k] for k in self._public_keys]
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key in self.keys():
|
||||
if key in self._cc_to_sc_key:
|
||||
key = self._cc_to_sc_key[key]
|
||||
return self[key]
|
||||
return default
|
||||
|
||||
def __getitem__(self, k):
|
||||
if not isinstance(k, str):
|
||||
raise KeyError(f"key must be a string")
|
||||
if not k in self._keys:
|
||||
raise KeyError("key must be a string")
|
||||
if k not in self._keys:
|
||||
raise KeyError(f"'{k}' not valid key. Examine 'FastInfo.keys()'")
|
||||
if k in self._cc_to_sc_key:
|
||||
k = self._cc_to_sc_key[k]
|
||||
return getattr(self, k)
|
||||
|
||||
def __contains__(self, k):
|
||||
return k in self.keys()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.keys())
|
||||
|
||||
def __str__(self):
|
||||
return "lazy-loading dict with keys = " + str(self.keys())
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def toJSON(self, indent=4):
|
||||
d = {k:self[k] for k in self.keys()}
|
||||
return _json.dumps({k:self[k] for k in self.keys()}, indent=indent)
|
||||
return json.dumps({k: self[k] for k in self.keys()}, indent=indent)
|
||||
|
||||
def _get_1y_prices(self, fullDaysOnly=False):
|
||||
if self._prices_1y is None:
|
||||
# Temporarily disable error printing
|
||||
l = logger.level
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
self._prices_1y = self._tkr.history(period="380d", auto_adjust=False, keepna=True)
|
||||
logger.setLevel(l)
|
||||
self._md = self._tkr.get_history_metadata()
|
||||
self._prices_1y = self._tkr.history(period="1y", auto_adjust=False, keepna=True, proxy=self.proxy)
|
||||
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
try:
|
||||
ctp = self._md["currentTradingPeriod"]
|
||||
self._today_open = pd.to_datetime(ctp["regular"]["start"], unit='s', utc=True).tz_convert(self.timezone)
|
||||
self._today_close = pd.to_datetime(ctp["regular"]["end"], unit='s', utc=True).tz_convert(self.timezone)
|
||||
self._today_midnight = self._today_close.ceil("D")
|
||||
except:
|
||||
except Exception:
|
||||
self._today_open = None
|
||||
self._today_close = None
|
||||
self._today_midnight = None
|
||||
@@ -208,20 +207,12 @@ class FastInfo:
|
||||
|
||||
def _get_1wk_1h_prepost_prices(self):
|
||||
if self._prices_1wk_1h_prepost is None:
|
||||
# Temporarily disable error printing
|
||||
l = logger.level
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
self._prices_1wk_1h_prepost = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=True)
|
||||
logger.setLevel(l)
|
||||
self._prices_1wk_1h_prepost = self._tkr.history(period="5d", interval="1h", auto_adjust=False, prepost=True, proxy=self.proxy)
|
||||
return self._prices_1wk_1h_prepost
|
||||
|
||||
def _get_1wk_1h_reg_prices(self):
|
||||
if self._prices_1wk_1h_reg is None:
|
||||
# Temporarily disable error printing
|
||||
l = logger.level
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
self._prices_1wk_1h_reg = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=False)
|
||||
logger.setLevel(l)
|
||||
self._prices_1wk_1h_reg = self._tkr.history(period="5d", interval="1h", auto_adjust=False, prepost=False, proxy=self.proxy)
|
||||
return self._prices_1wk_1h_reg
|
||||
|
||||
def _get_exchange_metadata(self):
|
||||
@@ -229,7 +220,7 @@ class FastInfo:
|
||||
return self._md
|
||||
|
||||
self._get_1y_prices()
|
||||
self._md = self._tkr.get_history_metadata()
|
||||
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
return self._md
|
||||
|
||||
def _exchange_open_now(self):
|
||||
@@ -260,9 +251,7 @@ class FastInfo:
|
||||
if self._currency is not None:
|
||||
return self._currency
|
||||
|
||||
if self._tkr._history_metadata is None:
|
||||
self._get_1y_prices()
|
||||
md = self._tkr.get_history_metadata()
|
||||
md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
self._currency = md["currency"]
|
||||
return self._currency
|
||||
|
||||
@@ -271,9 +260,7 @@ class FastInfo:
|
||||
if self._quote_type is not None:
|
||||
return self._quote_type
|
||||
|
||||
if self._tkr._history_metadata is None:
|
||||
self._get_1y_prices()
|
||||
md = self._tkr.get_history_metadata()
|
||||
md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
self._quote_type = md["instrumentType"]
|
||||
return self._quote_type
|
||||
|
||||
@@ -298,10 +285,10 @@ class FastInfo:
|
||||
if self._shares is not None:
|
||||
return self._shares
|
||||
|
||||
shares = self._tkr.get_shares_full(start=pd.Timestamp.utcnow().date()-pd.Timedelta(days=548))
|
||||
if shares is None:
|
||||
# Requesting 18 months failed, so fallback to shares which should include last year
|
||||
shares = self._tkr.get_shares()
|
||||
shares = self._tkr.get_shares_full(start=pd.Timestamp.utcnow().date()-pd.Timedelta(days=548), proxy=self.proxy)
|
||||
# if shares is None:
|
||||
# # Requesting 18 months failed, so fallback to shares which should include last year
|
||||
# shares = self._tkr.get_shares()
|
||||
if shares is not None:
|
||||
if isinstance(shares, pd.DataFrame):
|
||||
shares = shares[shares.columns[0]]
|
||||
@@ -336,7 +323,7 @@ class FastInfo:
|
||||
else:
|
||||
prices = prices[["Close"]].groupby(prices.index.date).last()
|
||||
if prices.shape[0] < 2:
|
||||
# Very few symbols have previousClose despite no
|
||||
# Very few symbols have previousClose despite no
|
||||
# no trading data e.g. 'QCSTIX'.
|
||||
fail = True
|
||||
else:
|
||||
@@ -355,12 +342,12 @@ class FastInfo:
|
||||
return self._reg_prev_close
|
||||
prices = self._get_1y_prices()
|
||||
if prices.shape[0] == 1:
|
||||
# Tiny % of tickers don't return daily history before last trading day,
|
||||
# Tiny % of tickers don't return daily history before last trading day,
|
||||
# so backup option is hourly history:
|
||||
prices = self._get_1wk_1h_reg_prices()
|
||||
prices = prices[["Close"]].groupby(prices.index.date).last()
|
||||
if prices.shape[0] < 2:
|
||||
# Very few symbols have regularMarketPreviousClose despite no
|
||||
# Very few symbols have regularMarketPreviousClose despite no
|
||||
# no trading data. E.g. 'QCSTIX'.
|
||||
# So fallback to original info[] if available.
|
||||
self._tkr.info # trigger fetch
|
||||
@@ -551,14 +538,16 @@ class FastInfo:
|
||||
|
||||
class Quote:
|
||||
|
||||
def __init__(self, data: TickerData, proxy=None):
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._info = None
|
||||
self._retired_info = None
|
||||
self._sustainability = None
|
||||
self._recommendations = None
|
||||
self._upgrades_downgrades = None
|
||||
self._calendar = None
|
||||
|
||||
self._already_scraped = False
|
||||
@@ -568,9 +557,7 @@ class Quote:
|
||||
@property
|
||||
def info(self) -> dict:
|
||||
if self._info is None:
|
||||
# self._scrape(self.proxy) # decrypt broken
|
||||
self._fetch(self.proxy)
|
||||
|
||||
self._fetch_info(self.proxy)
|
||||
self._fetch_complementary(self.proxy)
|
||||
|
||||
return self._info
|
||||
@@ -578,154 +565,81 @@ class Quote:
|
||||
@property
|
||||
def sustainability(self) -> pd.DataFrame:
|
||||
if self._sustainability is None:
|
||||
self._scrape(self.proxy)
|
||||
raise YFNotImplementedError('sustainability')
|
||||
return self._sustainability
|
||||
|
||||
@property
|
||||
def recommendations(self) -> pd.DataFrame:
|
||||
if self._recommendations is None:
|
||||
self._scrape(self.proxy)
|
||||
result = self._fetch(self.proxy, modules=['recommendationTrend'])
|
||||
if result is None:
|
||||
self._recommendations = pd.DataFrame()
|
||||
else:
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]["recommendationTrend"]["trend"]
|
||||
except (KeyError, IndexError):
|
||||
raise YFDataException(f"Failed to parse json response from Yahoo Finance: {result}")
|
||||
self._recommendations = pd.DataFrame(data)
|
||||
return self._recommendations
|
||||
|
||||
@property
|
||||
def calendar(self) -> pd.DataFrame:
|
||||
def upgrades_downgrades(self) -> pd.DataFrame:
|
||||
if self._upgrades_downgrades is None:
|
||||
result = self._fetch(self.proxy, modules=['upgradeDowngradeHistory'])
|
||||
if result is None:
|
||||
self._upgrades_downgrades = pd.DataFrame()
|
||||
else:
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]["upgradeDowngradeHistory"]["history"]
|
||||
if len(data) == 0:
|
||||
raise YFDataException(f"No upgrade/downgrade history found for {self._symbol}")
|
||||
df = pd.DataFrame(data)
|
||||
df.rename(columns={"epochGradeDate": "GradeDate", 'firm': 'Firm', 'toGrade': 'ToGrade', 'fromGrade': 'FromGrade', 'action': 'Action'}, inplace=True)
|
||||
df.set_index('GradeDate', inplace=True)
|
||||
df.index = pd.to_datetime(df.index, unit='s')
|
||||
self._upgrades_downgrades = df
|
||||
except (KeyError, IndexError):
|
||||
raise YFDataException(f"Failed to parse json response from Yahoo Finance: {result}")
|
||||
return self._upgrades_downgrades
|
||||
|
||||
@property
|
||||
def calendar(self) -> dict:
|
||||
if self._calendar is None:
|
||||
self._scrape(self.proxy)
|
||||
self._fetch_calendar()
|
||||
return self._calendar
|
||||
|
||||
def _scrape(self, proxy):
|
||||
if self._already_scraped:
|
||||
return
|
||||
self._already_scraped = True
|
||||
@staticmethod
|
||||
def valid_modules():
|
||||
return quote_summary_valid_modules
|
||||
|
||||
# get info and sustainability
|
||||
json_data = self._data.get_json_data_stores(proxy=proxy)
|
||||
def _fetch(self, proxy, modules: list):
|
||||
if not isinstance(modules, list):
|
||||
raise YFException("Should provide a list of modules, see available modules using `valid_modules`")
|
||||
|
||||
modules = ','.join([m for m in modules if m in quote_summary_valid_modules])
|
||||
if len(modules) == 0:
|
||||
raise YFException("No valid modules provided, see available modules using `valid_modules`")
|
||||
params_dict = {"modules": modules, "corsDomain": "finance.yahoo.com", "formatted": "false", "symbol": self._symbol}
|
||||
try:
|
||||
quote_summary_store = json_data['QuoteSummaryStore']
|
||||
except KeyError:
|
||||
err_msg = "No summary info found, symbol may be delisted"
|
||||
logger.error('%s: %s', self._data.ticker, err_msg)
|
||||
result = self._data.get_raw_json(_QUOTE_SUMMARY_URL_ + f"/{self._symbol}", user_agent_headers=self._data.user_agent_headers, params=params_dict, proxy=proxy)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
utils.get_yf_logger().error(str(e))
|
||||
return None
|
||||
return result
|
||||
|
||||
# sustainability
|
||||
d = {}
|
||||
try:
|
||||
if isinstance(quote_summary_store.get('esgScores'), dict):
|
||||
for item in quote_summary_store['esgScores']:
|
||||
if not isinstance(quote_summary_store['esgScores'][item], (dict, list)):
|
||||
d[item] = quote_summary_store['esgScores'][item]
|
||||
|
||||
s = pd.DataFrame(index=[0], data=d)[-1:].T
|
||||
s.columns = ['Value']
|
||||
s.index.name = '%.f-%.f' % (
|
||||
s[s.index == 'ratingYear']['Value'].values[0],
|
||||
s[s.index == 'ratingMonth']['Value'].values[0])
|
||||
|
||||
self._sustainability = s[~s.index.isin(
|
||||
['maxAge', 'ratingYear', 'ratingMonth'])]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self._info = {}
|
||||
try:
|
||||
items = ['summaryProfile', 'financialData', 'quoteType',
|
||||
'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
|
||||
for item in items:
|
||||
if isinstance(quote_summary_store.get(item), dict):
|
||||
self._info.update(quote_summary_store[item])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# For ETFs, provide this valuable data: the top holdings of the ETF
|
||||
try:
|
||||
if 'topHoldings' in quote_summary_store:
|
||||
self._info.update(quote_summary_store['topHoldings'])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
if not isinstance(quote_summary_store.get('summaryDetail'), dict):
|
||||
# For some reason summaryDetail did not give any results. The price dict
|
||||
# usually has most of the same info
|
||||
self._info.update(quote_summary_store.get('price', {}))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
# self._info['regularMarketPrice'] = self._info['regularMarketOpen']
|
||||
self._info['regularMarketPrice'] = quote_summary_store.get('price', {}).get(
|
||||
'regularMarketPrice', self._info.get('regularMarketOpen', None))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
self._info['preMarketPrice'] = quote_summary_store.get('price', {}).get(
|
||||
'preMarketPrice', self._info.get('preMarketPrice', None))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self._info['logo_url'] = ""
|
||||
try:
|
||||
if not 'website' in self._info:
|
||||
self._info['logo_url'] = 'https://logo.clearbit.com/%s.com' % \
|
||||
self._info['shortName'].split(' ')[0].split(',')[0]
|
||||
else:
|
||||
domain = self._info['website'].split(
|
||||
'://')[1].split('/')[0].replace('www.', '')
|
||||
self._info['logo_url'] = 'https://logo.clearbit.com/%s' % domain
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Delete redundant info[] keys, because values can be accessed faster
|
||||
# elsewhere - e.g. price keys. Hope is reduces Yahoo spam effect.
|
||||
# But record the dropped keys, because in rare cases they are needed.
|
||||
self._retired_info = {}
|
||||
for k in info_retired_keys:
|
||||
if k in self._info:
|
||||
self._retired_info[k] = self._info[k]
|
||||
if PRUNE_INFO:
|
||||
del self._info[k]
|
||||
if PRUNE_INFO:
|
||||
# InfoDictWrapper will explain how to access above data elsewhere
|
||||
self._info = InfoDictWrapper(self._info)
|
||||
|
||||
# events
|
||||
try:
|
||||
cal = pd.DataFrame(quote_summary_store['calendarEvents']['earnings'])
|
||||
cal['earningsDate'] = pd.to_datetime(
|
||||
cal['earningsDate'], unit='s')
|
||||
self._calendar = cal.T
|
||||
self._calendar.index = utils.camel2title(self._calendar.index)
|
||||
self._calendar.columns = ['Value']
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# analyst recommendations
|
||||
try:
|
||||
rec = pd.DataFrame(
|
||||
quote_summary_store['upgradeDowngradeHistory']['history'])
|
||||
rec['earningsDate'] = pd.to_datetime(
|
||||
rec['epochGradeDate'], unit='s')
|
||||
rec.set_index('earningsDate', inplace=True)
|
||||
rec.index.name = 'Date'
|
||||
rec.columns = utils.camel2title(rec.columns)
|
||||
self._recommendations = rec[[
|
||||
'Firm', 'To Grade', 'From Grade', 'Action']].sort_index()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _fetch(self, proxy):
|
||||
def _fetch_info(self, proxy):
|
||||
if self._already_fetched:
|
||||
return
|
||||
self._already_fetched = True
|
||||
modules = ['summaryProfile', 'financialData', 'quoteType',
|
||||
'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
|
||||
result = self._data.get_raw_json(
|
||||
_BASIC_URL_ + f"/{self._data.ticker}", params={"modules": ",".join(modules), "ssl": "true"}, proxy=proxy
|
||||
)
|
||||
result["quoteSummary"]["result"][0]["symbol"] = self._data.ticker
|
||||
modules = ['financialData', 'quoteType', 'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
|
||||
result = self._fetch(proxy, modules=modules)
|
||||
if result is None:
|
||||
self._info = {}
|
||||
return
|
||||
|
||||
result["quoteSummary"]["result"][0]["symbol"] = self._symbol
|
||||
query1_info = next(
|
||||
(info for info in result.get("quoteSummary", {}).get("result", []) if info["symbol"] == self._data.ticker),
|
||||
(info for info in result.get("quoteSummary", {}).get("result", []) if info["symbol"] == self._symbol),
|
||||
None,
|
||||
)
|
||||
# Most keys that appear in multiple dicts have same value. Except 'maxAge' because
|
||||
@@ -734,20 +648,21 @@ class Quote:
|
||||
if "maxAge" in query1_info[k] and query1_info[k]["maxAge"] == 1:
|
||||
query1_info[k]["maxAge"] = 86400
|
||||
query1_info = {
|
||||
k1: v1
|
||||
for k, v in query1_info.items()
|
||||
if isinstance(v, dict)
|
||||
for k1, v1 in v.items()
|
||||
k1: v1
|
||||
for k, v in query1_info.items()
|
||||
if isinstance(v, dict)
|
||||
for k1, v1 in v.items()
|
||||
if v1
|
||||
}
|
||||
# recursively format but only because of 'companyOfficers'
|
||||
|
||||
def _format(k, v):
|
||||
if isinstance(v, dict) and "raw" in v and "fmt" in v:
|
||||
v2 = v["fmt"] if k in {"regularMarketTime", "postMarketTime"} else v["raw"]
|
||||
elif isinstance(v, list):
|
||||
v2 = [_format(None, x) for x in v]
|
||||
elif isinstance(v, dict):
|
||||
v2 = {k:_format(k, x) for k, x in v.items()}
|
||||
v2 = {k: _format(k, x) for k, x in v.items()}
|
||||
elif isinstance(v, str):
|
||||
v2 = v.replace("\xa0", " ")
|
||||
else:
|
||||
@@ -763,7 +678,7 @@ class Quote:
|
||||
self._already_fetched_complementary = True
|
||||
|
||||
# self._scrape(proxy) # decrypt broken
|
||||
self._fetch(proxy)
|
||||
self._fetch_info(proxy)
|
||||
if self._info is None:
|
||||
return
|
||||
|
||||
@@ -792,8 +707,7 @@ class Quote:
|
||||
# pass
|
||||
#
|
||||
# For just one/few variable is faster to query directly:
|
||||
url = "https://query1.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{}?symbol={}".format(
|
||||
self._data.ticker, self._data.ticker)
|
||||
url = f"https://query1.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{self._symbol}?symbol={self._symbol}"
|
||||
for k in keys:
|
||||
url += "&type=" + k
|
||||
# Request 6 months of data
|
||||
@@ -805,14 +719,39 @@ class Quote:
|
||||
|
||||
json_str = self._data.cache_get(url=url, proxy=proxy).text
|
||||
json_data = json.loads(json_str)
|
||||
try:
|
||||
key_stats = json_data["timeseries"]["result"][0]
|
||||
if k not in key_stats:
|
||||
# Yahoo website prints N/A, indicates Yahoo lacks necessary data to calculate
|
||||
v = None
|
||||
json_result = json_data.get("timeseries") or json_data.get("finance")
|
||||
if json_result["error"] is not None:
|
||||
raise YFException("Failed to parse json response from Yahoo Finance: " + str(json_result["error"]))
|
||||
for k in keys:
|
||||
keydict = json_result["result"][0]
|
||||
if k in keydict:
|
||||
self._info[k] = keydict[k][-1]["reportedValue"]["raw"]
|
||||
else:
|
||||
# Select most recent (last) raw value in list:
|
||||
v = key_stats[k][-1]["reportedValue"]["raw"]
|
||||
except Exception:
|
||||
v = None
|
||||
self._info[k] = v
|
||||
self.info[k] = None
|
||||
|
||||
def _fetch_calendar(self):
|
||||
# secFilings return too old data, so not requesting it for now
|
||||
result = self._fetch(self.proxy, modules=['calendarEvents'])
|
||||
if result is None:
|
||||
self._calendar = {}
|
||||
return
|
||||
|
||||
try:
|
||||
self._calendar = dict()
|
||||
_events = result["quoteSummary"]["result"][0]["calendarEvents"]
|
||||
if 'dividendDate' in _events:
|
||||
self._calendar['Dividend Date'] = datetime.datetime.fromtimestamp(_events['dividendDate']).date()
|
||||
if 'exDividendDate' in _events:
|
||||
self._calendar['Ex-Dividend Date'] = datetime.datetime.fromtimestamp(_events['exDividendDate']).date()
|
||||
# splits = _events.get('splitDate') # need to check later, i will add code for this if found data
|
||||
earnings = _events.get('earnings')
|
||||
if earnings is not None:
|
||||
self._calendar['Earnings Date'] = [datetime.datetime.fromtimestamp(d).date() for d in earnings.get('earningsDate', [])]
|
||||
self._calendar['Earnings High'] = earnings.get('earningsHigh', None)
|
||||
self._calendar['Earnings Low'] = earnings.get('earningsLow', None)
|
||||
self._calendar['Earnings Average'] = earnings.get('earningsAverage', None)
|
||||
self._calendar['Revenue High'] = earnings.get('revenueHigh', None)
|
||||
self._calendar['Revenue Low'] = earnings.get('revenueLow', None)
|
||||
self._calendar['Revenue Average'] = earnings.get('revenueAverage', None)
|
||||
except (KeyError, IndexError):
|
||||
raise YFDataException(f"Failed to parse json response from Yahoo Finance: {result}")
|
||||
|
||||
@@ -21,37 +21,40 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import datetime as _datetime
|
||||
import pandas as _pd
|
||||
|
||||
from collections import namedtuple as _namedtuple
|
||||
|
||||
import pandas as _pd
|
||||
|
||||
from .base import TickerBase
|
||||
from .const import _BASE_URL_
|
||||
|
||||
|
||||
class Ticker(TickerBase):
|
||||
def __init__(self, ticker, session=None):
|
||||
super(Ticker, self).__init__(ticker, session=session)
|
||||
def __init__(self, ticker, session=None, proxy=None):
|
||||
super(Ticker, self).__init__(ticker, session=session, proxy=proxy)
|
||||
self._expirations = {}
|
||||
self._underlying = {}
|
||||
|
||||
def __repr__(self):
|
||||
return 'yfinance.Ticker object <%s>' % self.ticker
|
||||
return f'yfinance.Ticker object <{self.ticker}>'
|
||||
|
||||
def _download_options(self, date=None, proxy=None):
|
||||
def _download_options(self, date=None):
|
||||
if date is None:
|
||||
url = "{}/v7/finance/options/{}".format(
|
||||
self._base_url, self.ticker)
|
||||
url = f"{_BASE_URL_}/v7/finance/options/{self.ticker}"
|
||||
else:
|
||||
url = "{}/v7/finance/options/{}?date={}".format(
|
||||
self._base_url, self.ticker, date)
|
||||
url = f"{_BASE_URL_}/v7/finance/options/{self.ticker}?date={date}"
|
||||
|
||||
r = self._data.get(url=url, proxy=proxy).json()
|
||||
r = self._data.get(url=url, proxy=self.proxy).json()
|
||||
if len(r.get('optionChain', {}).get('result', [])) > 0:
|
||||
for exp in r['optionChain']['result'][0]['expirationDates']:
|
||||
self._expirations[_datetime.datetime.utcfromtimestamp(
|
||||
exp).strftime('%Y-%m-%d')] = exp
|
||||
self._expirations[_pd.Timestamp(exp, unit='s').strftime('%Y-%m-%d')] = exp
|
||||
|
||||
self._underlying = r['optionChain']['result'][0].get('quote', {})
|
||||
|
||||
opt = r['optionChain']['result'][0].get('options', [])
|
||||
return opt[0] if len(opt) > 0 else []
|
||||
|
||||
return dict(**opt[0],underlying=self._underlying) if len(opt) > 0 else {}
|
||||
return {}
|
||||
|
||||
def _options2df(self, opt, tz=None):
|
||||
data = _pd.DataFrame(opt).reindex(columns=[
|
||||
@@ -76,23 +79,23 @@ class Ticker(TickerBase):
|
||||
data['lastTradeDate'] = data['lastTradeDate'].dt.tz_convert(tz)
|
||||
return data
|
||||
|
||||
def option_chain(self, date=None, proxy=None, tz=None):
|
||||
def option_chain(self, date=None, tz=None):
|
||||
if date is None:
|
||||
options = self._download_options(proxy=proxy)
|
||||
options = self._download_options()
|
||||
else:
|
||||
if not self._expirations:
|
||||
self._download_options()
|
||||
if date not in self._expirations:
|
||||
raise ValueError(
|
||||
"Expiration `%s` cannot be found. "
|
||||
"Available expiration are: [%s]" % (
|
||||
date, ', '.join(self._expirations)))
|
||||
f"Expiration `{date}` cannot be found. "
|
||||
f"Available expirations are: [{', '.join(self._expirations)}]")
|
||||
date = self._expirations[date]
|
||||
options = self._download_options(date, proxy=proxy)
|
||||
options = self._download_options(date)
|
||||
|
||||
return _namedtuple('Options', ['calls', 'puts'])(**{
|
||||
return _namedtuple('Options', ['calls', 'puts', 'underlying'])(**{
|
||||
"calls": self._options2df(options['calls'], tz=tz),
|
||||
"puts": self._options2df(options['puts'], tz=tz)
|
||||
"puts": self._options2df(options['puts'], tz=tz),
|
||||
"underlying": options['underlying']
|
||||
})
|
||||
|
||||
# ------------------------
|
||||
@@ -113,12 +116,24 @@ class Ticker(TickerBase):
|
||||
def mutualfund_holders(self) -> _pd.DataFrame:
|
||||
return self.get_mutualfund_holders()
|
||||
|
||||
@property
|
||||
def insider_purchases(self) -> _pd.DataFrame:
|
||||
return self.get_insider_purchases()
|
||||
|
||||
@property
|
||||
def insider_transactions(self) -> _pd.DataFrame:
|
||||
return self.get_insider_transactions()
|
||||
|
||||
@property
|
||||
def insider_roster_holders(self) -> _pd.DataFrame:
|
||||
return self.get_insider_roster_holders()
|
||||
|
||||
@property
|
||||
def dividends(self) -> _pd.Series:
|
||||
return self.get_dividends()
|
||||
|
||||
@property
|
||||
def capital_gains(self):
|
||||
def capital_gains(self) -> _pd.Series:
|
||||
return self.get_capital_gains()
|
||||
|
||||
@property
|
||||
@@ -130,7 +145,7 @@ class Ticker(TickerBase):
|
||||
return self.get_actions()
|
||||
|
||||
@property
|
||||
def shares(self) -> _pd.DataFrame :
|
||||
def shares(self) -> _pd.DataFrame:
|
||||
return self.get_shares()
|
||||
|
||||
@property
|
||||
@@ -138,13 +153,28 @@ class Ticker(TickerBase):
|
||||
return self.get_info()
|
||||
|
||||
@property
|
||||
def calendar(self) -> _pd.DataFrame:
|
||||
def fast_info(self):
|
||||
return self.get_fast_info()
|
||||
|
||||
@property
|
||||
def calendar(self) -> dict:
|
||||
"""
|
||||
Returns a dictionary of events, earnings, and dividends for the ticker
|
||||
"""
|
||||
return self.get_calendar()
|
||||
|
||||
@property
|
||||
def recommendations(self):
|
||||
return self.get_recommendations()
|
||||
|
||||
@property
|
||||
def recommendations_summary(self):
|
||||
return self.get_recommendations_summary()
|
||||
|
||||
@property
|
||||
def upgrades_downgrades(self):
|
||||
return self.get_upgrades_downgrades()
|
||||
|
||||
@property
|
||||
def earnings(self) -> _pd.DataFrame:
|
||||
return self.get_earnings()
|
||||
@@ -209,10 +239,6 @@ class Ticker(TickerBase):
|
||||
def quarterly_cashflow(self) -> _pd.DataFrame:
|
||||
return self.quarterly_cash_flow
|
||||
|
||||
@property
|
||||
def recommendations_summary(self):
|
||||
return self.get_recommendations_summary()
|
||||
|
||||
@property
|
||||
def analyst_price_target(self) -> _pd.DataFrame:
|
||||
return self.get_analyst_price_target()
|
||||
@@ -232,9 +258,13 @@ class Ticker(TickerBase):
|
||||
return tuple(self._expirations.keys())
|
||||
|
||||
@property
|
||||
def news(self):
|
||||
def news(self) -> list:
|
||||
return self.get_news()
|
||||
|
||||
@property
|
||||
def trend_details(self) -> _pd.DataFrame:
|
||||
return self.get_trend_details()
|
||||
|
||||
@property
|
||||
def earnings_trend(self) -> _pd.DataFrame:
|
||||
return self.get_earnings_trend()
|
||||
|
||||
@@ -22,19 +22,21 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from . import Ticker, multi
|
||||
|
||||
|
||||
# from collections import namedtuple as _namedtuple
|
||||
|
||||
|
||||
class Tickers:
|
||||
|
||||
def __repr__(self):
|
||||
return 'yfinance.Tickers object <%s>' % ",".join(self.symbols)
|
||||
return f"yfinance.Tickers object <{','.join(self.symbols)}>"
|
||||
|
||||
def __init__(self, tickers, session=None):
|
||||
tickers = tickers if isinstance(
|
||||
tickers, list) else tickers.replace(',', ' ').split()
|
||||
self.symbols = [ticker.upper() for ticker in tickers]
|
||||
self.tickers = {ticker:Ticker(ticker, session=session) for ticker in self.symbols}
|
||||
self.tickers = {ticker: Ticker(ticker, session=session) for ticker in self.symbols}
|
||||
|
||||
# self.tickers = _namedtuple(
|
||||
# "Tickers", ticker_objects.keys(), rename=True
|
||||
|
||||
@@ -22,79 +22,169 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import datetime as _datetime
|
||||
import dateutil as _dateutil
|
||||
from typing import Dict, Union, List, Optional
|
||||
import logging
|
||||
import re as _re
|
||||
import sys as _sys
|
||||
import threading
|
||||
from functools import lru_cache, wraps
|
||||
from inspect import getmembers
|
||||
from types import FunctionType
|
||||
from typing import List, Optional
|
||||
|
||||
import numpy as _np
|
||||
import pandas as _pd
|
||||
import pytz as _tz
|
||||
import requests as _requests
|
||||
import re as _re
|
||||
import pandas as _pd
|
||||
import numpy as _np
|
||||
import sys as _sys
|
||||
import os as _os
|
||||
import appdirs as _ad
|
||||
import sqlite3 as _sqlite3
|
||||
import atexit as _atexit
|
||||
from functools import lru_cache
|
||||
import logging
|
||||
|
||||
from threading import Lock
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from pytz import UnknownTimeZoneError
|
||||
|
||||
try:
|
||||
import ujson as _json
|
||||
except ImportError:
|
||||
import json as _json
|
||||
from yfinance import const
|
||||
from .const import _BASE_URL_
|
||||
|
||||
user_agent_headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
|
||||
|
||||
|
||||
# From https://stackoverflow.com/a/59128615
|
||||
from types import FunctionType
|
||||
from inspect import getmembers
|
||||
def attributes(obj):
|
||||
disallowed_names = {
|
||||
name for name, value in getmembers(type(obj))
|
||||
name for name, value in getmembers(type(obj))
|
||||
if isinstance(value, FunctionType)}
|
||||
return {
|
||||
name: getattr(obj, name) for name in dir(obj)
|
||||
name: getattr(obj, name) for name in dir(obj)
|
||||
if name[0] != '_' and name not in disallowed_names and hasattr(obj, name)}
|
||||
|
||||
|
||||
@lru_cache(maxsize=20)
|
||||
def print_once(msg):
|
||||
# 'warnings' module suppression of repeat messages does not work.
|
||||
# 'warnings' module suppression of repeat messages does not work.
|
||||
# This function replicates correct behaviour
|
||||
print(msg)
|
||||
|
||||
|
||||
# Logging
|
||||
# Note: most of this logic is adding indentation with function depth,
|
||||
# so that DEBUG log is readable.
|
||||
class IndentLoggerAdapter(logging.LoggerAdapter):
|
||||
def process(self, msg, kwargs):
|
||||
if get_yf_logger().isEnabledFor(logging.DEBUG):
|
||||
i = ' ' * self.extra['indent']
|
||||
if not isinstance(msg, str):
|
||||
msg = str(msg)
|
||||
msg = '\n'.join([i + m for m in msg.split('\n')])
|
||||
return msg, kwargs
|
||||
|
||||
|
||||
_indentation_level = threading.local()
|
||||
|
||||
|
||||
class IndentationContext:
|
||||
def __init__(self, increment=1):
|
||||
self.increment = increment
|
||||
|
||||
def __enter__(self):
|
||||
_indentation_level.indent = getattr(_indentation_level, 'indent', 0) + self.increment
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
_indentation_level.indent -= self.increment
|
||||
|
||||
|
||||
def get_indented_logger(name=None):
|
||||
# Never cache the returned value! Will break indentation.
|
||||
return IndentLoggerAdapter(logging.getLogger(name), {'indent': getattr(_indentation_level, 'indent', 0)})
|
||||
|
||||
|
||||
def log_indent_decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
logger = get_indented_logger('yfinance')
|
||||
logger.debug(f'Entering {func.__name__}()')
|
||||
|
||||
with IndentationContext():
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
logger.debug(f'Exiting {func.__name__}()')
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MultiLineFormatter(logging.Formatter):
|
||||
# The 'fmt' formatting further down is only applied to first line
|
||||
# of log message, specifically the padding after %level%.
|
||||
# For multi-line messages, need to manually copy over padding.
|
||||
def __init__(self, fmt):
|
||||
super().__init__(fmt)
|
||||
# Extract amount of padding
|
||||
match = _re.search(r'%\(levelname\)-(\d+)s', fmt)
|
||||
self.level_length = int(match.group(1)) if match else 0
|
||||
|
||||
def format(self, record):
|
||||
original = super().format(record)
|
||||
lines = original.split('\n')
|
||||
levelname = lines[0].split(' ')[0]
|
||||
if len(lines) <= 1:
|
||||
return original
|
||||
else:
|
||||
# Apply padding to all lines below first
|
||||
formatted = [lines[0]]
|
||||
if self.level_length == 0:
|
||||
padding = ' ' * len(levelname)
|
||||
else:
|
||||
padding = ' ' * self.level_length
|
||||
padding += ' ' # +1 for space between level and message
|
||||
formatted.extend(padding + line for line in lines[1:])
|
||||
return '\n'.join(formatted)
|
||||
|
||||
|
||||
yf_logger = None
|
||||
yf_log_indented = False
|
||||
|
||||
|
||||
def get_yf_logger():
|
||||
global yf_logger
|
||||
if yf_logger is None:
|
||||
yf_logger = logging.getLogger("yfinance")
|
||||
if yf_logger.handlers is None or len(yf_logger.handlers) == 0:
|
||||
# Add stream handler if user not already added one
|
||||
h = logging.StreamHandler()
|
||||
formatter = logging.Formatter(fmt='%(levelname)s %(message)s')
|
||||
h.setFormatter(formatter)
|
||||
yf_logger.addHandler(h)
|
||||
yf_logger = logging.getLogger('yfinance')
|
||||
global yf_log_indented
|
||||
if yf_log_indented:
|
||||
yf_logger = get_indented_logger('yfinance')
|
||||
return yf_logger
|
||||
|
||||
|
||||
def setup_debug_formatting():
|
||||
global yf_logger
|
||||
yf_logger = get_yf_logger()
|
||||
|
||||
if not yf_logger.isEnabledFor(logging.DEBUG):
|
||||
yf_logger.warning("logging mode not set to 'DEBUG', so not setting up debug formatting")
|
||||
return
|
||||
|
||||
global yf_log_indented
|
||||
if not yf_log_indented:
|
||||
if yf_logger.handlers is None or len(yf_logger.handlers) == 0:
|
||||
h = logging.StreamHandler()
|
||||
# Ensure different level strings don't interfere with indentation
|
||||
formatter = MultiLineFormatter(fmt='%(levelname)-8s %(message)s')
|
||||
h.setFormatter(formatter)
|
||||
yf_logger.addHandler(h)
|
||||
|
||||
yf_log_indented = True
|
||||
|
||||
|
||||
def enable_debug_mode():
|
||||
get_yf_logger().setLevel(logging.DEBUG)
|
||||
setup_debug_formatting()
|
||||
|
||||
|
||||
def is_isin(string):
|
||||
return bool(_re.match("^([A-Z]{2})([A-Z0-9]{9})([0-9]{1})$", string))
|
||||
return bool(_re.match("^([A-Z]{2})([A-Z0-9]{9})([0-9])$", string))
|
||||
|
||||
|
||||
def get_all_by_isin(isin, proxy=None, session=None):
|
||||
if not (is_isin(isin)):
|
||||
raise ValueError("Invalid ISIN number")
|
||||
|
||||
from .base import _BASE_URL_
|
||||
session = session or _requests
|
||||
url = "{}/v1/finance/search?q={}".format(_BASE_URL_, isin)
|
||||
url = f"{_BASE_URL_}/v1/finance/search?q={isin}"
|
||||
data = session.get(url=url, proxies=proxy, headers=user_agent_headers)
|
||||
try:
|
||||
data = data.json()
|
||||
@@ -146,7 +236,7 @@ def empty_earnings_dates_df():
|
||||
|
||||
|
||||
def build_template(data):
|
||||
'''
|
||||
"""
|
||||
build_template returns the details required to rebuild any of the yahoo finance financial statements in the same order as the yahoo finance webpage. The function is built to be used on the "FinancialTemplateStore" json which appears in any one of the three yahoo finance webpages: "/financials", "/cash-flow" and "/balance-sheet".
|
||||
|
||||
Returns:
|
||||
@@ -155,95 +245,80 @@ def build_template(data):
|
||||
- template_order: The order that quarterlies should be in (note that quarterlies have no pre-fix - hence why this is required).
|
||||
- level_detail: The level of each individual line item. E.g. for the "/financials" webpage, "Total Revenue" is a level 0 item and is the summation of "Operating Revenue" and "Excise Taxes" which are level 1 items.
|
||||
|
||||
'''
|
||||
"""
|
||||
template_ttm_order = [] # Save the TTM (Trailing Twelve Months) ordering to an object.
|
||||
template_annual_order = [] # Save the annual ordering to an object.
|
||||
template_order = [] # Save the ordering to an object (this can be utilized for quarterlies)
|
||||
level_detail = [] # Record the level of each line item of the income statement ("Operating Revenue" and "Excise Taxes" sum to return "Total Revenue" we need to keep track of this)
|
||||
for key in data['template']:
|
||||
# Loop through the json to retreive the exact financial order whilst appending to the objects
|
||||
template_ttm_order.append('trailing{}'.format(key['key']))
|
||||
template_annual_order.append('annual{}'.format(key['key']))
|
||||
template_order.append('{}'.format(key['key']))
|
||||
level_detail.append(0)
|
||||
if 'children' in key:
|
||||
for child1 in key['children']: # Level 1
|
||||
template_ttm_order.append('trailing{}'.format(child1['key']))
|
||||
template_annual_order.append('annual{}'.format(child1['key']))
|
||||
template_order.append('{}'.format(child1['key']))
|
||||
level_detail.append(1)
|
||||
if 'children' in child1:
|
||||
for child2 in child1['children']: # Level 2
|
||||
template_ttm_order.append('trailing{}'.format(child2['key']))
|
||||
template_annual_order.append('annual{}'.format(child2['key']))
|
||||
template_order.append('{}'.format(child2['key']))
|
||||
level_detail.append(2)
|
||||
if 'children' in child2:
|
||||
for child3 in child2['children']: # Level 3
|
||||
template_ttm_order.append('trailing{}'.format(child3['key']))
|
||||
template_annual_order.append('annual{}'.format(child3['key']))
|
||||
template_order.append('{}'.format(child3['key']))
|
||||
level_detail.append(3)
|
||||
if 'children' in child3:
|
||||
for child4 in child3['children']: # Level 4
|
||||
template_ttm_order.append('trailing{}'.format(child4['key']))
|
||||
template_annual_order.append('annual{}'.format(child4['key']))
|
||||
template_order.append('{}'.format(child4['key']))
|
||||
level_detail.append(4)
|
||||
if 'children' in child4:
|
||||
for child5 in child4['children']: # Level 5
|
||||
template_ttm_order.append('trailing{}'.format(child5['key']))
|
||||
template_annual_order.append('annual{}'.format(child5['key']))
|
||||
template_order.append('{}'.format(child5['key']))
|
||||
level_detail.append(5)
|
||||
|
||||
def traverse(node, level):
|
||||
"""
|
||||
A recursive function that visits a node and its children.
|
||||
|
||||
Args:
|
||||
node: The current node in the data structure.
|
||||
level: The depth of the current node in the data structure.
|
||||
"""
|
||||
if level > 5: # Stop when level is above 5
|
||||
return
|
||||
template_ttm_order.append(f"trailing{node['key']}")
|
||||
template_annual_order.append(f"annual{node['key']}")
|
||||
template_order.append(f"{node['key']}")
|
||||
level_detail.append(level)
|
||||
if 'children' in node: # Check if the node has children
|
||||
for child in node['children']: # If yes, traverse each child
|
||||
traverse(child, level + 1) # Increment the level by 1 for each child
|
||||
|
||||
for key in data['template']: # Loop through the data
|
||||
traverse(key, 0) # Call the traverse function with initial level being 0
|
||||
|
||||
return template_ttm_order, template_annual_order, template_order, level_detail
|
||||
|
||||
|
||||
def retreive_financial_details(data):
|
||||
'''
|
||||
retreive_financial_details returns all of the available financial details under the "QuoteTimeSeriesStore" for any of the following three yahoo finance webpages: "/financials", "/cash-flow" and "/balance-sheet".
|
||||
def retrieve_financial_details(data):
|
||||
"""
|
||||
retrieve_financial_details returns all of the available financial details under the
|
||||
"QuoteTimeSeriesStore" for any of the following three yahoo finance webpages:
|
||||
"/financials", "/cash-flow" and "/balance-sheet".
|
||||
|
||||
Returns:
|
||||
- TTM_dicts: A dictionary full of all of the available Trailing Twelve Month figures, this can easily be converted to a pandas dataframe.
|
||||
- Annual_dicts: A dictionary full of all of the available Annual figures, this can easily be converted to a pandas dataframe.
|
||||
'''
|
||||
"""
|
||||
TTM_dicts = [] # Save a dictionary object to store the TTM financials.
|
||||
Annual_dicts = [] # Save a dictionary object to store the Annual financials.
|
||||
for key in data['timeSeries']: # Loop through the time series data to grab the key financial figures.
|
||||
|
||||
for key, timeseries in data.get('timeSeries', {}).items(): # Loop through the time series data to grab the key financial figures.
|
||||
try:
|
||||
if len(data['timeSeries'][key]) > 0:
|
||||
time_series_dict = {}
|
||||
time_series_dict['index'] = key
|
||||
for each in data['timeSeries'][key]: # Loop through the years
|
||||
if each == None:
|
||||
if timeseries:
|
||||
time_series_dict = {'index': key}
|
||||
for each in timeseries: # Loop through the years
|
||||
if not each:
|
||||
continue
|
||||
else:
|
||||
time_series_dict[each['asOfDate']] = each['reportedValue']
|
||||
# time_series_dict["{}".format(each['asOfDate'])] = data['timeSeries'][key][each]['reportedValue']
|
||||
time_series_dict[each.get('asOfDate')] = each.get('reportedValue')
|
||||
if 'trailing' in key:
|
||||
TTM_dicts.append(time_series_dict)
|
||||
elif 'annual' in key:
|
||||
Annual_dicts.append(time_series_dict)
|
||||
except Exception as e:
|
||||
pass
|
||||
except KeyError as e:
|
||||
print(f"An error occurred while processing the key: {e}")
|
||||
return TTM_dicts, Annual_dicts
|
||||
|
||||
|
||||
def format_annual_financial_statement(level_detail, annual_dicts, annual_order, ttm_dicts=None, ttm_order=None):
|
||||
'''
|
||||
"""
|
||||
format_annual_financial_statement formats any annual financial statement
|
||||
|
||||
Returns:
|
||||
- _statement: A fully formatted annual financial statement in pandas dataframe.
|
||||
'''
|
||||
"""
|
||||
Annual = _pd.DataFrame.from_dict(annual_dicts).set_index("index")
|
||||
Annual = Annual.reindex(annual_order)
|
||||
Annual.index = Annual.index.str.replace(r'annual', '')
|
||||
|
||||
# Note: balance sheet is the only financial statement with no ttm detail
|
||||
if (ttm_dicts not in [[], None]) and (ttm_order not in [[], None]):
|
||||
TTM = _pd.DataFrame.from_dict(ttm_dicts).set_index("index")
|
||||
TTM = TTM.reindex(ttm_order)
|
||||
if ttm_dicts and ttm_order:
|
||||
TTM = _pd.DataFrame.from_dict(ttm_dicts).set_index("index").reindex(ttm_order)
|
||||
# Add 'TTM' prefix to all column names, so if combined we can tell
|
||||
# the difference between actuals and TTM (similar to yahoo finance).
|
||||
TTM.columns = ['TTM ' + str(col) for col in TTM.columns]
|
||||
@@ -261,12 +336,12 @@ def format_annual_financial_statement(level_detail, annual_dicts, annual_order,
|
||||
|
||||
|
||||
def format_quarterly_financial_statement(_statement, level_detail, order):
|
||||
'''
|
||||
"""
|
||||
format_quarterly_financial_statements formats any quarterly financial statement
|
||||
|
||||
Returns:
|
||||
- _statement: A fully formatted quarterly financial statement in pandas dataframe.
|
||||
'''
|
||||
"""
|
||||
_statement = _statement.reindex(order)
|
||||
_statement.index = camel2title(_statement.T)
|
||||
_statement['level_detail'] = level_detail
|
||||
@@ -317,7 +392,7 @@ def camel2title(strings: List[str], sep: str = ' ', acronyms: Optional[List[str]
|
||||
|
||||
# Apply str.title() to non-acronym words
|
||||
strings = [s.split(sep) for s in strings]
|
||||
strings = [[j.title() if not j in acronyms else j for j in s] for s in strings]
|
||||
strings = [[j.title() if j not in acronyms else j for j in s] for s in strings]
|
||||
strings = [sep.join(s) for s in strings]
|
||||
|
||||
return strings
|
||||
@@ -347,14 +422,14 @@ def _parse_user_dt(dt, exchange_tz):
|
||||
|
||||
def _interval_to_timedelta(interval):
|
||||
if interval == "1mo":
|
||||
return _dateutil.relativedelta.relativedelta(months=1)
|
||||
return relativedelta(months=1)
|
||||
elif interval == "3mo":
|
||||
return _dateutil.relativedelta.relativedelta(months=3)
|
||||
return relativedelta(months=3)
|
||||
elif interval == "1y":
|
||||
return _dateutil.relativedelta.relativedelta(years=1)
|
||||
return relativedelta(years=1)
|
||||
elif interval == "1wk":
|
||||
return _pd.Timedelta(days=7, unit='d')
|
||||
else:
|
||||
return _pd.Timedelta(days=7)
|
||||
else:
|
||||
return _pd.Timedelta(interval)
|
||||
|
||||
|
||||
@@ -454,8 +529,7 @@ def parse_actions(data):
|
||||
splits.set_index("date", inplace=True)
|
||||
splits.index = _pd.to_datetime(splits.index, unit="s")
|
||||
splits.sort_index(inplace=True)
|
||||
splits["Stock Splits"] = splits["numerator"] / \
|
||||
splits["denominator"]
|
||||
splits["Stock Splits"] = splits["numerator"] / splits["denominator"]
|
||||
splits = splits[["Stock Splits"]]
|
||||
|
||||
if dividends is None:
|
||||
@@ -507,8 +581,8 @@ def fix_Yahoo_returning_prepost_unrequested(quotes, interval, tradingPeriods):
|
||||
|
||||
|
||||
def fix_Yahoo_returning_live_separate(quotes, interval, tz_exchange):
|
||||
# Yahoo bug fix. If market is open today then Yahoo normally returns
|
||||
# todays data as a separate row from rest-of week/month interval in above row.
|
||||
# Yahoo bug fix. If market is open today then Yahoo normally returns
|
||||
# todays data as a separate row from rest-of week/month interval in above row.
|
||||
# Seems to depend on what exchange e.g. crypto OK.
|
||||
# Fix = merge them together
|
||||
n = quotes.shape[0]
|
||||
@@ -534,7 +608,7 @@ def fix_Yahoo_returning_live_separate(quotes, interval, tz_exchange):
|
||||
elif interval == "3mo":
|
||||
last_rows_same_interval = dt1.year == dt2.year and dt1.quarter == dt2.quarter
|
||||
else:
|
||||
last_rows_same_interval = (dt1-dt2) < _pd.Timedelta(interval)
|
||||
last_rows_same_interval = (dt1 - dt2) < _pd.Timedelta(interval)
|
||||
|
||||
if last_rows_same_interval:
|
||||
# Last two rows are within same interval
|
||||
@@ -545,42 +619,117 @@ def fix_Yahoo_returning_live_separate(quotes, interval, tz_exchange):
|
||||
# Yahoo is not returning live data (phew!)
|
||||
return quotes
|
||||
if _np.isnan(quotes.loc[idx2, "Open"]):
|
||||
quotes.loc[idx2, "Open"] = quotes["Open"][n - 1]
|
||||
quotes.loc[idx2, "Open"] = quotes["Open"].iloc[n - 1]
|
||||
# Note: nanmax() & nanmin() ignores NaNs, but still need to check not all are NaN to avoid warnings
|
||||
if not _np.isnan(quotes["High"][n - 1]):
|
||||
quotes.loc[idx2, "High"] = _np.nanmax([quotes["High"][n - 1], quotes["High"][n - 2]])
|
||||
if not _np.isnan(quotes["High"].iloc[n - 1]):
|
||||
quotes.loc[idx2, "High"] = _np.nanmax([quotes["High"].iloc[n - 1], quotes["High"].iloc[n - 2]])
|
||||
if "Adj High" in quotes.columns:
|
||||
quotes.loc[idx2, "Adj High"] = _np.nanmax([quotes["Adj High"][n - 1], quotes["Adj High"][n - 2]])
|
||||
quotes.loc[idx2, "Adj High"] = _np.nanmax([quotes["Adj High"].iloc[n - 1], quotes["Adj High"].iloc[n - 2]])
|
||||
|
||||
if not _np.isnan(quotes["Low"][n - 1]):
|
||||
quotes.loc[idx2, "Low"] = _np.nanmin([quotes["Low"][n - 1], quotes["Low"][n - 2]])
|
||||
if not _np.isnan(quotes["Low"].iloc[n - 1]):
|
||||
quotes.loc[idx2, "Low"] = _np.nanmin([quotes["Low"].iloc[n - 1], quotes["Low"].iloc[n - 2]])
|
||||
if "Adj Low" in quotes.columns:
|
||||
quotes.loc[idx2, "Adj Low"] = _np.nanmin([quotes["Adj Low"][n - 1], quotes["Adj Low"][n - 2]])
|
||||
quotes.loc[idx2, "Adj Low"] = _np.nanmin([quotes["Adj Low"].iloc[n - 1], quotes["Adj Low"].iloc[n - 2]])
|
||||
|
||||
quotes.loc[idx2, "Close"] = quotes["Close"][n - 1]
|
||||
quotes.loc[idx2, "Close"] = quotes["Close"].iloc[n - 1]
|
||||
if "Adj Close" in quotes.columns:
|
||||
quotes.loc[idx2, "Adj Close"] = quotes["Adj Close"][n - 1]
|
||||
quotes.loc[idx2, "Volume"] += quotes["Volume"][n - 1]
|
||||
quotes.loc[idx2, "Adj Close"] = quotes["Adj Close"].iloc[n - 1]
|
||||
quotes.loc[idx2, "Volume"] += quotes["Volume"].iloc[n - 1]
|
||||
quotes = quotes.drop(quotes.index[n - 1])
|
||||
|
||||
return quotes
|
||||
|
||||
|
||||
def safe_merge_dfs(df_main, df_sub, interval):
|
||||
# Carefully merge 'df_sub' onto 'df_main'
|
||||
# If naive merge fails, try again with reindexing df_sub:
|
||||
# 1) if interval is weekly or monthly, then try with index set to start of week/month
|
||||
# 2) if still failing then manually search through df_main.index to reindex df_sub
|
||||
|
||||
if df_sub.shape[0] == 0:
|
||||
if df_sub.empty:
|
||||
raise Exception("No data to merge")
|
||||
if df_main.empty:
|
||||
return df_main
|
||||
|
||||
df_sub_backup = df_sub.copy()
|
||||
data_cols = [c for c in df_sub.columns if c not in df_main]
|
||||
if len(data_cols) > 1:
|
||||
raise Exception("Expected 1 data col")
|
||||
data_col = data_cols[0]
|
||||
|
||||
df_main = df_main.sort_index()
|
||||
intraday = interval.endswith('m') or interval.endswith('s')
|
||||
|
||||
td = _interval_to_timedelta(interval)
|
||||
if intraday:
|
||||
# On some exchanges the event can occur before market open.
|
||||
# Problem when combining with intraday data.
|
||||
# Solution = use dates, not datetimes, to map/merge.
|
||||
df_main['_date'] = df_main.index.date
|
||||
df_sub['_date'] = df_sub.index.date
|
||||
indices = _np.searchsorted(_np.append(df_main['_date'], [df_main['_date'].iloc[-1]+td]), df_sub['_date'], side='left')
|
||||
df_main = df_main.drop('_date', axis=1)
|
||||
df_sub = df_sub.drop('_date', axis=1)
|
||||
else:
|
||||
indices = _np.searchsorted(_np.append(df_main.index, df_main.index[-1] + td), df_sub.index, side='right')
|
||||
indices -= 1 # Convert from [[i-1], [i]) to [[i], [i+1])
|
||||
# Numpy.searchsorted does not handle out-of-range well, so handle manually:
|
||||
if intraday:
|
||||
for i in range(len(df_sub.index)):
|
||||
dt = df_sub.index[i].date()
|
||||
if dt < df_main.index[0].date() or dt >= df_main.index[-1].date() + _datetime.timedelta(days=1):
|
||||
# Out-of-range
|
||||
indices[i] = -1
|
||||
else:
|
||||
for i in range(len(df_sub.index)):
|
||||
dt = df_sub.index[i]
|
||||
if dt < df_main.index[0] or dt >= df_main.index[-1] + td:
|
||||
# Out-of-range
|
||||
indices[i] = -1
|
||||
|
||||
f_outOfRange = indices == -1
|
||||
if f_outOfRange.any():
|
||||
if intraday:
|
||||
# Discard out-of-range dividends in intraday data, assume user not interested
|
||||
df_sub = df_sub[~f_outOfRange]
|
||||
if df_sub.empty:
|
||||
df_main['Dividends'] = 0.0
|
||||
return df_main
|
||||
else:
|
||||
empty_row_data = {**{c:[_np.nan] for c in const._PRICE_COLNAMES_}, 'Volume':[0]}
|
||||
if interval == '1d':
|
||||
# For 1d, add all out-of-range event dates
|
||||
for i in _np.where(f_outOfRange)[0]:
|
||||
dt = df_sub.index[i]
|
||||
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
|
||||
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
|
||||
df_main = _pd.concat([df_main, empty_row], sort=True)
|
||||
else:
|
||||
# Else, only add out-of-range event dates if occurring in interval
|
||||
# immediately after last price row
|
||||
last_dt = df_main.index[-1]
|
||||
next_interval_start_dt = last_dt + td
|
||||
next_interval_end_dt = next_interval_start_dt + td
|
||||
for i in _np.where(f_outOfRange)[0]:
|
||||
dt = df_sub.index[i]
|
||||
if next_interval_start_dt <= dt < next_interval_end_dt:
|
||||
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
|
||||
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
|
||||
df_main = _pd.concat([df_main, empty_row], sort=True)
|
||||
df_main = df_main.sort_index()
|
||||
|
||||
# Re-calculate indices
|
||||
indices = _np.searchsorted(_np.append(df_main.index, df_main.index[-1] + td), df_sub.index, side='right')
|
||||
indices -= 1 # Convert from [[i-1], [i]) to [[i], [i+1])
|
||||
# Numpy.searchsorted does not handle out-of-range well, so handle manually:
|
||||
for i in range(len(df_sub.index)):
|
||||
dt = df_sub.index[i]
|
||||
if dt < df_main.index[0] or dt >= df_main.index[-1] + td:
|
||||
# Out-of-range
|
||||
indices[i] = -1
|
||||
|
||||
f_outOfRange = indices == -1
|
||||
if f_outOfRange.any():
|
||||
if intraday or interval in ['1d', '1wk']:
|
||||
raise Exception(f"The following '{data_col}' events are out-of-range, did not expect with interval {interval}: {df_sub.index[f_outOfRange]}")
|
||||
get_yf_logger().debug(f'Discarding these {data_col} events:' + '\n' + str(df_sub[f_outOfRange]))
|
||||
df_sub = df_sub[~f_outOfRange].copy()
|
||||
indices = indices[~f_outOfRange]
|
||||
|
||||
def _reindex_events(df, new_index, data_col_name):
|
||||
if len(new_index) == len(set(new_index)):
|
||||
# No duplicates, easy
|
||||
@@ -598,124 +747,33 @@ def safe_merge_dfs(df_main, df_sub, interval):
|
||||
df = df.groupby("_NewIndex").prod()
|
||||
df.index.name = None
|
||||
else:
|
||||
raise Exception("New index contains duplicates but unsure how to aggregate for '{}'".format(data_col_name))
|
||||
raise Exception(f"New index contains duplicates but unsure how to aggregate for '{data_col_name}'")
|
||||
if "_NewIndex" in df.columns:
|
||||
df = df.drop("_NewIndex", axis=1)
|
||||
return df
|
||||
|
||||
df = df_main.join(df_sub)
|
||||
|
||||
f_na = df[data_col].isna()
|
||||
data_lost = sum(~f_na) < df_sub.shape[0]
|
||||
if not data_lost:
|
||||
return df
|
||||
# Lost data during join()
|
||||
# Backdate all df_sub.index dates to start of week/month
|
||||
if interval == "1wk":
|
||||
new_index = _pd.PeriodIndex(df_sub.index, freq='W').to_timestamp()
|
||||
elif interval == "1mo":
|
||||
new_index = _pd.PeriodIndex(df_sub.index, freq='M').to_timestamp()
|
||||
elif interval == "3mo":
|
||||
new_index = _pd.PeriodIndex(df_sub.index, freq='Q').to_timestamp()
|
||||
else:
|
||||
new_index = None
|
||||
|
||||
if new_index is not None:
|
||||
new_index = new_index.tz_localize(df.index.tz, ambiguous=True, nonexistent='shift_forward')
|
||||
df_sub = _reindex_events(df_sub, new_index, data_col)
|
||||
df = df_main.join(df_sub)
|
||||
|
||||
f_na = df[data_col].isna()
|
||||
data_lost = sum(~f_na) < df_sub.shape[0]
|
||||
if not data_lost:
|
||||
return df
|
||||
# Lost data during join(). Manually check each df_sub.index date against df_main.index to
|
||||
# find matching interval
|
||||
df_sub = df_sub_backup.copy()
|
||||
new_index = [-1] * df_sub.shape[0]
|
||||
for i in range(df_sub.shape[0]):
|
||||
dt_sub_i = df_sub.index[i]
|
||||
if dt_sub_i in df_main.index:
|
||||
new_index[i] = dt_sub_i
|
||||
continue
|
||||
# Found a bad index date, need to search for near-match in df_main (same week/month)
|
||||
fixed = False
|
||||
for j in range(df_main.shape[0] - 1):
|
||||
dt_main_j0 = df_main.index[j]
|
||||
dt_main_j1 = df_main.index[j + 1]
|
||||
if (dt_main_j0 <= dt_sub_i) and (dt_sub_i < dt_main_j1):
|
||||
fixed = True
|
||||
if interval.endswith('h') or interval.endswith('m'):
|
||||
# Must also be same day
|
||||
fixed = (dt_main_j0.date() == dt_sub_i.date()) and (dt_sub_i.date() == dt_main_j1.date())
|
||||
if fixed:
|
||||
dt_sub_i = dt_main_j0
|
||||
break
|
||||
if not fixed:
|
||||
last_main_dt = df_main.index[df_main.shape[0] - 1]
|
||||
diff = dt_sub_i - last_main_dt
|
||||
if interval == "1mo" and last_main_dt.month == dt_sub_i.month:
|
||||
dt_sub_i = last_main_dt
|
||||
fixed = True
|
||||
elif interval == "3mo" and last_main_dt.year == dt_sub_i.year and last_main_dt.quarter == dt_sub_i.quarter:
|
||||
dt_sub_i = last_main_dt
|
||||
fixed = True
|
||||
elif interval == "1wk":
|
||||
if last_main_dt.week == dt_sub_i.week:
|
||||
dt_sub_i = last_main_dt
|
||||
fixed = True
|
||||
elif (dt_sub_i >= last_main_dt) and (dt_sub_i - last_main_dt < _datetime.timedelta(weeks=1)):
|
||||
# With some specific start dates (e.g. around early Jan), Yahoo
|
||||
# messes up start-of-week, is Saturday not Monday. So check
|
||||
# if same week another way
|
||||
dt_sub_i = last_main_dt
|
||||
fixed = True
|
||||
elif interval == "1d" and last_main_dt.day == dt_sub_i.day:
|
||||
dt_sub_i = last_main_dt
|
||||
fixed = True
|
||||
elif interval == "1h" and last_main_dt.hour == dt_sub_i.hour:
|
||||
dt_sub_i = last_main_dt
|
||||
fixed = True
|
||||
elif interval.endswith('m') or interval.endswith('h'):
|
||||
td = _pd.to_timedelta(interval)
|
||||
if (dt_sub_i >= last_main_dt) and (dt_sub_i - last_main_dt < td):
|
||||
dt_sub_i = last_main_dt
|
||||
fixed = True
|
||||
new_index[i] = dt_sub_i
|
||||
new_index = df_main.index[indices]
|
||||
df_sub = _reindex_events(df_sub, new_index, data_col)
|
||||
df = df_main.join(df_sub)
|
||||
|
||||
df = df_main.join(df_sub)
|
||||
f_na = df[data_col].isna()
|
||||
data_lost = sum(~f_na) < df_sub.shape[0]
|
||||
if data_lost:
|
||||
## Not always possible to match events with trading, e.g. when released pre-market.
|
||||
## So have to append to bottom with nan prices.
|
||||
## But should only be impossible with intra-day price data.
|
||||
if interval.endswith('m') or interval.endswith('h') or interval == "1d":
|
||||
# Update: is possible with daily data when dividend very recent
|
||||
f_missing = ~df_sub.index.isin(df.index)
|
||||
df_sub_missing = df_sub[f_missing].copy()
|
||||
keys = {"Adj Open", "Open", "Adj High", "High", "Adj Low", "Low", "Adj Close",
|
||||
"Close"}.intersection(df.columns)
|
||||
df_sub_missing[list(keys)] = _np.nan
|
||||
col_ordering = df.columns
|
||||
df = _pd.concat([df, df_sub_missing], sort=True)[col_ordering]
|
||||
else:
|
||||
raise Exception("Lost data during merge despite all attempts to align data (see above)")
|
||||
raise Exception('Data was lost in merge, investigate')
|
||||
|
||||
return df
|
||||
|
||||
|
||||
def fix_Yahoo_dst_issue(df, interval):
|
||||
if interval in ["1d", "1w", "1wk"]:
|
||||
# These intervals should start at time 00:00. But for some combinations of date and timezone,
|
||||
# These intervals should start at time 00:00. But for some combinations of date and timezone,
|
||||
# Yahoo has time off by few hours (e.g. Brazil 23:00 around Jan-2022). Suspect DST problem.
|
||||
# The clue is (a) minutes=0 and (b) hour near 0.
|
||||
# The clue is (a) minutes=0 and (b) hour near 0.
|
||||
# Obviously Yahoo meant 00:00, so ensure this doesn't affect date conversion:
|
||||
f_pre_midnight = (df.index.minute == 0) & (df.index.hour.isin([22, 23]))
|
||||
dst_error_hours = _np.array([0] * df.shape[0])
|
||||
dst_error_hours[f_pre_midnight] = 24 - df.index[f_pre_midnight].hour
|
||||
df.index += _pd.TimedeltaIndex(dst_error_hours, 'h')
|
||||
df.index += _pd.to_timedelta(dst_error_hours, 'h')
|
||||
return df
|
||||
|
||||
|
||||
@@ -752,7 +810,7 @@ def format_history_metadata(md, tradingPeriodsOnly=True):
|
||||
|
||||
if "tradingPeriods" in md:
|
||||
tps = md["tradingPeriods"]
|
||||
if tps == {"pre":[], "post":[]}:
|
||||
if tps == {"pre": [], "post": []}:
|
||||
# Ignore
|
||||
pass
|
||||
elif isinstance(tps, (list, dict)):
|
||||
@@ -768,8 +826,8 @@ def format_history_metadata(md, tradingPeriodsOnly=True):
|
||||
post_df = _pd.DataFrame.from_records(_np.hstack(tps["post"]))
|
||||
regular_df = _pd.DataFrame.from_records(_np.hstack(tps["regular"]))
|
||||
|
||||
pre_df = pre_df.rename(columns={"start":"pre_start", "end":"pre_end"}).drop(["timezone", "gmtoffset"], axis=1)
|
||||
post_df = post_df.rename(columns={"start":"post_start", "end":"post_end"}).drop(["timezone", "gmtoffset"], axis=1)
|
||||
pre_df = pre_df.rename(columns={"start": "pre_start", "end": "pre_end"}).drop(["timezone", "gmtoffset"], axis=1)
|
||||
post_df = post_df.rename(columns={"start": "post_start", "end": "post_end"}).drop(["timezone", "gmtoffset"], axis=1)
|
||||
regular_df = regular_df.drop(["timezone", "gmtoffset"], axis=1)
|
||||
|
||||
cols = ["pre_start", "pre_end", "start", "end", "post_start", "post_end"]
|
||||
@@ -786,6 +844,7 @@ def format_history_metadata(md, tradingPeriodsOnly=True):
|
||||
|
||||
return md
|
||||
|
||||
|
||||
class ProgressBar:
|
||||
def __init__(self, iterations, text='completed'):
|
||||
self.text = text
|
||||
@@ -800,9 +859,9 @@ class ProgressBar:
|
||||
if self.elapsed > self.iterations:
|
||||
self.elapsed = self.iterations
|
||||
self.update_iteration(1)
|
||||
print('\r' + str(self), end='')
|
||||
_sys.stdout.flush()
|
||||
print()
|
||||
print('\r' + str(self), end='', file=_sys.stderr)
|
||||
_sys.stderr.flush()
|
||||
print("", file=_sys.stderr)
|
||||
|
||||
def animate(self, iteration=None):
|
||||
if iteration is None:
|
||||
@@ -811,211 +870,24 @@ class ProgressBar:
|
||||
else:
|
||||
self.elapsed += iteration
|
||||
|
||||
print('\r' + str(self), end='')
|
||||
_sys.stdout.flush()
|
||||
print('\r' + str(self), end='', file=_sys.stderr)
|
||||
_sys.stderr.flush()
|
||||
self.update_iteration()
|
||||
|
||||
def update_iteration(self, val=None):
|
||||
val = val if val is not None else self.elapsed / float(self.iterations)
|
||||
self.__update_amount(val * 100.0)
|
||||
self.prog_bar += ' %s of %s %s' % (
|
||||
self.elapsed, self.iterations, self.text)
|
||||
self.prog_bar += f" {self.elapsed} of {self.iterations} {self.text}"
|
||||
|
||||
def __update_amount(self, new_amount):
|
||||
percent_done = int(round((new_amount / 100.0) * 100.0))
|
||||
all_full = self.width - 2
|
||||
num_hashes = int(round((percent_done / 100.0) * all_full))
|
||||
self.prog_bar = '[' + self.fill_char * \
|
||||
num_hashes + ' ' * (all_full - num_hashes) + ']'
|
||||
self.prog_bar = '[' + self.fill_char * num_hashes + ' ' * (all_full - num_hashes) + ']'
|
||||
pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))
|
||||
pct_string = '%d%%' % percent_done
|
||||
self.prog_bar = self.prog_bar[0:pct_place] + \
|
||||
(pct_string + self.prog_bar[pct_place + len(pct_string):])
|
||||
pct_string = f'{percent_done}%%'
|
||||
self.prog_bar = self.prog_bar[0:pct_place] + (pct_string + self.prog_bar[pct_place + len(pct_string):])
|
||||
|
||||
def __str__(self):
|
||||
return str(self.prog_bar)
|
||||
|
||||
|
||||
# ---------------------------------
|
||||
# TimeZone cache related code
|
||||
# ---------------------------------
|
||||
|
||||
class _KVStore:
|
||||
"""Simpel Sqlite backed key/value store, key and value are strings. Should be thread safe."""
|
||||
|
||||
def __init__(self, filename):
|
||||
self._cache_mutex = Lock()
|
||||
with self._cache_mutex:
|
||||
self.conn = _sqlite3.connect(filename, timeout=10, check_same_thread=False)
|
||||
self.conn.execute('pragma journal_mode=wal')
|
||||
try:
|
||||
self.conn.execute('create table if not exists "kv" (key TEXT primary key, value TEXT) without rowid')
|
||||
except Exception as e:
|
||||
if 'near "without": syntax error' in str(e):
|
||||
# "without rowid" requires sqlite 3.8.2. Older versions will raise exception
|
||||
self.conn.execute('create table if not exists "kv" (key TEXT primary key, value TEXT)')
|
||||
else:
|
||||
raise
|
||||
self.conn.commit()
|
||||
_atexit.register(self.close)
|
||||
|
||||
def close(self):
|
||||
if self.conn is not None:
|
||||
with self._cache_mutex:
|
||||
self.conn.close()
|
||||
self.conn = None
|
||||
|
||||
def get(self, key: str) -> Union[str, None]:
|
||||
"""Get value for key if it exists else returns None"""
|
||||
try:
|
||||
item = self.conn.execute('select value from "kv" where key=?', (key,))
|
||||
except _sqlite3.IntegrityError as e:
|
||||
self.delete(key)
|
||||
return None
|
||||
if item:
|
||||
return next(item, (None,))[0]
|
||||
|
||||
def set(self, key: str, value: str) -> None:
|
||||
if value is None:
|
||||
self.delete(key)
|
||||
else:
|
||||
with self._cache_mutex:
|
||||
self.conn.execute('replace into "kv" (key, value) values (?,?)', (key, value))
|
||||
self.conn.commit()
|
||||
|
||||
def bulk_set(self, kvdata: Dict[str, str]):
|
||||
records = tuple(i for i in kvdata.items())
|
||||
with self._cache_mutex:
|
||||
self.conn.executemany('replace into "kv" (key, value) values (?,?)', records)
|
||||
self.conn.commit()
|
||||
|
||||
def delete(self, key: str):
|
||||
with self._cache_mutex:
|
||||
self.conn.execute('delete from "kv" where key=?', (key,))
|
||||
self.conn.commit()
|
||||
|
||||
|
||||
class _TzCacheException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class _TzCache:
|
||||
"""Simple sqlite file cache of ticker->timezone"""
|
||||
|
||||
def __init__(self):
|
||||
self._setup_cache_folder()
|
||||
# Must init db here, where is thread-safe
|
||||
self._tz_db = _KVStore(_os.path.join(self._db_dir, "tkr-tz.db"))
|
||||
self._migrate_cache_tkr_tz()
|
||||
|
||||
def _setup_cache_folder(self):
|
||||
if not _os.path.isdir(self._db_dir):
|
||||
try:
|
||||
_os.makedirs(self._db_dir)
|
||||
except OSError as err:
|
||||
raise _TzCacheException("Error creating TzCache folder: '{}' reason: {}"
|
||||
.format(self._db_dir, err))
|
||||
|
||||
elif not (_os.access(self._db_dir, _os.R_OK) and _os.access(self._db_dir, _os.W_OK)):
|
||||
raise _TzCacheException("Cannot read and write in TzCache folder: '{}'"
|
||||
.format(self._db_dir, ))
|
||||
|
||||
def lookup(self, tkr):
|
||||
return self.tz_db.get(tkr)
|
||||
|
||||
def store(self, tkr, tz):
|
||||
if tz is None:
|
||||
self.tz_db.delete(tkr)
|
||||
elif self.tz_db.get(tkr) is not None:
|
||||
raise Exception("Tkr {} tz already in cache".format(tkr))
|
||||
else:
|
||||
self.tz_db.set(tkr, tz)
|
||||
|
||||
@property
|
||||
def _db_dir(self):
|
||||
global _cache_dir
|
||||
return _os.path.join(_cache_dir, "py-yfinance")
|
||||
|
||||
@property
|
||||
def tz_db(self):
|
||||
return self._tz_db
|
||||
|
||||
def _migrate_cache_tkr_tz(self):
|
||||
"""Migrate contents from old ticker CSV-cache to SQLite db"""
|
||||
old_cache_file_path = _os.path.join(self._db_dir, "tkr-tz.csv")
|
||||
|
||||
if not _os.path.isfile(old_cache_file_path):
|
||||
return None
|
||||
try:
|
||||
df = _pd.read_csv(old_cache_file_path, index_col="Ticker", on_bad_lines="skip")
|
||||
except _pd.errors.EmptyDataError:
|
||||
_os.remove(old_cache_file_path)
|
||||
except TypeError:
|
||||
_os.remove(old_cache_file_path)
|
||||
else:
|
||||
# Discard corrupt data:
|
||||
df = df[~df["Tz"].isna().to_numpy()]
|
||||
df = df[~(df["Tz"]=='').to_numpy()]
|
||||
df = df[~df.index.isna()]
|
||||
if not df.empty:
|
||||
try:
|
||||
self.tz_db.bulk_set(df.to_dict()['Tz'])
|
||||
except Exception as e:
|
||||
# Ignore
|
||||
pass
|
||||
|
||||
_os.remove(old_cache_file_path)
|
||||
|
||||
|
||||
class _TzCacheDummy:
|
||||
"""Dummy cache to use if tz cache is disabled"""
|
||||
|
||||
def lookup(self, tkr):
|
||||
return None
|
||||
|
||||
def store(self, tkr, tz):
|
||||
pass
|
||||
|
||||
@property
|
||||
def tz_db(self):
|
||||
return None
|
||||
|
||||
|
||||
def get_tz_cache():
|
||||
"""
|
||||
Get the timezone cache, initializes it and creates cache folder if needed on first call.
|
||||
If folder cannot be created for some reason it will fall back to initialize a
|
||||
dummy cache with same interface as real cash.
|
||||
"""
|
||||
# as this can be called from multiple threads, protect it.
|
||||
with _cache_init_lock:
|
||||
global _tz_cache
|
||||
if _tz_cache is None:
|
||||
try:
|
||||
_tz_cache = _TzCache()
|
||||
except _TzCacheException as err:
|
||||
logger.error("Failed to create TzCache, reason: %s. "
|
||||
"TzCache will not be used. "
|
||||
"Tip: You can direct cache to use a different location with 'set_tz_cache_location(mylocation)'",
|
||||
err)
|
||||
_tz_cache = _TzCacheDummy()
|
||||
|
||||
return _tz_cache
|
||||
|
||||
|
||||
_cache_dir = _ad.user_cache_dir()
|
||||
_cache_init_lock = Lock()
|
||||
_tz_cache = None
|
||||
|
||||
|
||||
def set_tz_cache_location(cache_dir: str):
|
||||
"""
|
||||
Sets the path to create the "py-yfinance" cache folder in.
|
||||
Useful if the default folder returned by "appdir.user_cache_dir()" is not writable.
|
||||
Must be called before cache is used (that is, before fetching tickers).
|
||||
:param cache_dir: Path to use for caches
|
||||
:return: None
|
||||
"""
|
||||
global _cache_dir, _tz_cache
|
||||
assert _tz_cache is None, "Time Zone cache already initialized, setting path must be done before cache is created"
|
||||
_cache_dir = cache_dir
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "0.2.20"
|
||||
version = "0.2.40"
|
||||
|
||||
Reference in New Issue
Block a user