Compare commits
660 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b0d8357d6 | ||
|
|
4466e57b95 | ||
|
|
308e58b914 | ||
|
|
f6beadf448 | ||
|
|
7da64b679e | ||
|
|
38f8ccd40a | ||
|
|
13acc3dc97 | ||
|
|
cc1ac7bbcc | ||
|
|
75449fd0ac | ||
|
|
22e0c414c4 | ||
|
|
37d60e6efb | ||
|
|
dac9a48742 | ||
|
|
bd52326091 | ||
|
|
9581b8bd45 | ||
|
|
62b2c25da8 | ||
|
|
7618dda5d0 | ||
|
|
95ef486e13 | ||
|
|
9e59f6b61c | ||
|
|
716cd65fd3 | ||
|
|
5b1605b5a1 | ||
|
|
412cfbcd6d | ||
|
|
6abee6df44 | ||
|
|
fad21dfeac | ||
|
|
fc27f9c367 | ||
|
|
bb79b573ed | ||
|
|
127b53ee7f | ||
|
|
88525abcbd | ||
|
|
99ef055cc4 | ||
|
|
0f36f7980b | ||
|
|
8282af9ce4 | ||
|
|
5208c8cf05 | ||
|
|
d3dfb4c6a8 | ||
|
|
279726afe4 | ||
|
|
937386f3ef | ||
|
|
32e569f652 | ||
|
|
de59f0b2c6 | ||
|
|
7d6d8562e8 | ||
|
|
6cae6d45b1 | ||
|
|
ec3de0710d | ||
|
|
0713d93867 | ||
|
|
67e81a8f9a | ||
|
|
b6372c0945 | ||
|
|
c9dd582dd8 | ||
|
|
677f3d5702 | ||
|
|
4f9b05a546 | ||
|
|
e1f94ed337 | ||
|
|
93a7ee6161 | ||
|
|
5b0cb60cf5 | ||
|
|
1a97c22874 | ||
|
|
b0de31da63 | ||
|
|
cc87608824 | ||
|
|
6c1e26093c | ||
|
|
e8fdd12cb1 | ||
|
|
93b6e024da | ||
|
|
d5282967ce | ||
|
|
9908c1ff48 | ||
|
|
a4d7d6c577 | ||
|
|
f9080c22a5 | ||
|
|
32e1d479b1 | ||
|
|
5729ce3cb6 | ||
|
|
d0b2070036 | ||
|
|
688120cab7 | ||
|
|
4a1e1c4447 | ||
|
|
f99677ed1e | ||
|
|
6a613eb114 | ||
|
|
0503240973 | ||
|
|
ae6c05fa74 | ||
|
|
aa9a0286a1 | ||
|
|
ddf0cf19cd | ||
|
|
a2bde88c36 | ||
|
|
1bd819ac4d | ||
|
|
1b9fc5f12f | ||
|
|
274f309052 | ||
|
|
edac283a60 | ||
|
|
781fad501f | ||
|
|
39527d24d4 | ||
|
|
45f1c88460 | ||
|
|
7d638e1040 | ||
|
|
97b13dfa8c | ||
|
|
693565a85b | ||
|
|
957051e0e8 | ||
|
|
bd81ebb4e9 | ||
|
|
46f53f9957 | ||
|
|
056b84d8fe | ||
|
|
835dbd9629 | ||
|
|
07a4594455 | ||
|
|
736c03ac5b | ||
|
|
adfa2e9beb | ||
|
|
b286797e8c | ||
|
|
b306bef350 | ||
|
|
61c89660df | ||
|
|
31af2ab1d5 | ||
|
|
21c380fa61 | ||
|
|
e0000cd787 | ||
|
|
11d43eb1a1 | ||
|
|
509a109f29 | ||
|
|
b0639409a3 | ||
|
|
ed10feee9a | ||
|
|
aba81eedc2 | ||
|
|
d424d027ac | ||
|
|
9268fcfa76 | ||
|
|
711e1138d3 | ||
|
|
0789b690a4 | ||
|
|
6055566de8 | ||
|
|
398a19a855 | ||
|
|
e771cfabb6 | ||
|
|
5b676f803b | ||
|
|
eb5c50d5c7 | ||
|
|
1cb0b215c4 | ||
|
|
50dcb2ce5a | ||
|
|
1ce9ce2784 | ||
|
|
cd4816e289 | ||
|
|
27e9ce7542 | ||
|
|
02c1c60f3b | ||
|
|
27ea9472c1 | ||
|
|
801f58790a | ||
|
|
080834e3ce | ||
|
|
4e7b2094d0 | ||
|
|
c72e04bf55 | ||
|
|
abbe4c3a2f | ||
|
|
9e21b85043 | ||
|
|
b44917b7f9 | ||
|
|
6f78dd6e6b | ||
|
|
593dc8fcee | ||
|
|
b94baa4cc5 | ||
|
|
1a054135fb | ||
|
|
4e2253a406 | ||
|
|
9af7ec0a4e | ||
|
|
8624216e21 | ||
|
|
954e71d19c | ||
|
|
5124059422 | ||
|
|
d18cd6f42f | ||
|
|
c20211a06c | ||
|
|
cdfe7d0d2d | ||
|
|
e57647c1d7 | ||
|
|
762abd8bba | ||
|
|
d1ea402792 | ||
|
|
65f65b1776 | ||
|
|
9388c29207 | ||
|
|
9f91f4b180 | ||
|
|
cac616a24c | ||
|
|
72a9e45e56 | ||
|
|
4802199ae7 | ||
|
|
d9bfd29113 | ||
|
|
4711aab7b3 | ||
|
|
30d20c1206 | ||
|
|
5c565c8934 | ||
|
|
2fff97290b | ||
|
|
62ca5ab6be | ||
|
|
83b177b7fb | ||
|
|
e8b99cb4e6 | ||
|
|
503d234020 | ||
|
|
144efd3b08 | ||
|
|
80fc91ffa9 | ||
|
|
9821197fd1 | ||
|
|
45b5cac33b | ||
|
|
d755b8c7ff | ||
|
|
ab1042b4c9 | ||
|
|
8172fc02d2 | ||
|
|
836082280b | ||
|
|
6a98c2eda6 | ||
|
|
46f55c8983 | ||
|
|
b025fef22c | ||
|
|
b96319dd64 | ||
|
|
74b88dc62c | ||
|
|
e3778465d8 | ||
|
|
f82177ea2e | ||
|
|
d30a2a0915 | ||
|
|
142b1f3eb4 | ||
|
|
afad7fcf0b | ||
|
|
0baedbe4f5 | ||
|
|
2c3c3dc8a9 | ||
|
|
8585dda77a | ||
|
|
3eb60fbd4a | ||
|
|
d3e2e71a6e | ||
|
|
4937c933a2 | ||
|
|
045cd45893 | ||
|
|
6d52cb6e3a | ||
|
|
a24c0e1391 | ||
|
|
1e941fc86a | ||
|
|
0b52e8f118 | ||
|
|
d45bed3d53 | ||
|
|
4152f7c897 | ||
|
|
e7a3848f69 | ||
|
|
fc4350e463 | ||
|
|
13556afd90 | ||
|
|
3d29ced428 | ||
|
|
6a63ce9e15 | ||
|
|
2fe5a0a361 | ||
|
|
63699a6aad | ||
|
|
a649b40dc9 | ||
|
|
a01edee4fa | ||
|
|
5367f62bd7 | ||
|
|
27cb90c596 | ||
|
|
6c2682654a | ||
|
|
e89e190d11 | ||
|
|
a236270389 | ||
|
|
ef1205388c | ||
|
|
bb477989d4 | ||
|
|
478dc0a350 | ||
|
|
b5dca4941a | ||
|
|
6b71ba977c | ||
|
|
195a7aa304 | ||
|
|
a58d7456fe | ||
|
|
1edeaf07dc | ||
|
|
7f04a9dcb6 | ||
|
|
7b95f554bd | ||
|
|
6c70b866c7 | ||
|
|
bd696fb4db | ||
|
|
d13aafa633 | ||
|
|
00823f6fa6 | ||
|
|
21fdba9021 | ||
|
|
972547ca8c | ||
|
|
23b400f0fb | ||
|
|
ca8c1c8cb4 | ||
|
|
6b8b0d5c86 | ||
|
|
952a04338f | ||
|
|
a1a385196b | ||
|
|
62a442bd15 | ||
|
|
a0046439d1 | ||
|
|
63a8476575 | ||
|
|
e96f4f3cc0 | ||
|
|
cd5d0dfc3b | ||
|
|
ece41cdb06 | ||
|
|
c362d54b1a | ||
|
|
543e4fe582 | ||
|
|
f8aab533ba | ||
|
|
53fca7016e | ||
|
|
4b6529c3a5 | ||
|
|
8957147926 | ||
|
|
4c7392ed17 | ||
|
|
0efda4f5af | ||
|
|
508de4aefb | ||
|
|
3d39992280 | ||
|
|
b462836540 | ||
|
|
2795660c28 | ||
|
|
3dc87753ea | ||
|
|
645cc19037 | ||
|
|
86d6acccf7 | ||
|
|
0f5db35b6e | ||
|
|
7c6742a60a | ||
|
|
4fa32a98ed | ||
|
|
36ace8017d | ||
|
|
5cdc78f479 | ||
|
|
ba634fad0e | ||
|
|
35f4071c0b | ||
|
|
8a5ca71f52 | ||
|
|
ead0bce96e | ||
|
|
86b00091a9 | ||
|
|
141ce7e471 | ||
|
|
4eae728a06 | ||
|
|
2d6b6b26ed | ||
|
|
ec3dfaf305 | ||
|
|
e89d390824 | ||
|
|
563a1a3448 | ||
|
|
2e6d3d0e60 | ||
|
|
2a2928b4a0 | ||
|
|
d47133e5bf | ||
|
|
8f0c58dafa | ||
|
|
27a721c7dd | ||
|
|
3e964d5319 | ||
|
|
84a31ae0b4 | ||
|
|
891b533ec2 | ||
|
|
b9fb3e4979 | ||
|
|
09342982a4 | ||
|
|
da8c49011e | ||
|
|
b805f0a010 | ||
|
|
5b0feb3d20 | ||
|
|
ecbfc2957d | ||
|
|
e96248dec7 | ||
|
|
7d0045f03c | ||
|
|
c3d7449844 | ||
|
|
a4f11b0243 | ||
|
|
1702fd0797 | ||
|
|
464b3333d7 | ||
|
|
685f2ec351 | ||
|
|
aad46baf28 | ||
|
|
a97db0aac6 | ||
|
|
553bc5965a | ||
|
|
af5f96f97e | ||
|
|
a4bdaea888 | ||
|
|
ac5a9d2793 | ||
|
|
b17ad32a47 | ||
|
|
af39855e28 | ||
|
|
ac6e047f0d | ||
|
|
1e24337f29 | ||
|
|
2cc82ae12f | ||
|
|
d11f385049 | ||
|
|
7377611e1f | ||
|
|
f3b5fb85c9 | ||
|
|
a4faef83ac | ||
|
|
e1184f745b | ||
|
|
fe630008e9 | ||
|
|
b43072cf0a | ||
|
|
ad3f4cabc9 | ||
|
|
f70567872c | ||
|
|
a8ade72113 | ||
|
|
1dcc8c9c8b | ||
|
|
dd5462b307 | ||
|
|
e39c03e8e3 | ||
|
|
9297504b84 | ||
|
|
3971115ab9 | ||
|
|
b5badbbc61 | ||
|
|
ba8621f5be | ||
|
|
8e5c94a4eb | ||
|
|
66a1c1a174 | ||
|
|
ab6214df79 | ||
|
|
dc5d42c8e2 | ||
|
|
ab75495cd3 | ||
|
|
39c1ecc7a2 | ||
|
|
af7720668c | ||
|
|
9051fba601 | ||
|
|
03ea6acec0 | ||
|
|
ddc93033d7 | ||
|
|
eb6d830e2a | ||
|
|
2b0ae5a6c1 | ||
|
|
1636839b67 | ||
|
|
65b97d024b | ||
|
|
fb77d35863 | ||
|
|
197d2968e3 | ||
|
|
7460dbea17 | ||
|
|
b49fd797fc | ||
|
|
6bd8fb2290 | ||
|
|
cd1e16ad9e | ||
|
|
3fd9ea2204 | ||
|
|
d5a1266cbe | ||
|
|
89bbe8ad4c | ||
|
|
e44c6f8b0e | ||
|
|
0ba810fda5 | ||
|
|
677bbfed8b | ||
|
|
97671b78dd | ||
|
|
2865c0df9f | ||
|
|
0c037ddd12 | ||
|
|
3ee4674098 | ||
|
|
5d9a91da4a | ||
|
|
47c579ff22 | ||
|
|
caf5cba801 | ||
|
|
486c7894ce | ||
|
|
db8a00edae | ||
|
|
805523b924 | ||
|
|
32ab2e648d | ||
|
|
4d91ae740a | ||
|
|
05ec4b4312 | ||
|
|
cd2c1ada14 | ||
|
|
4ca9642403 | ||
|
|
b438f29a71 | ||
|
|
4db178b8d6 | ||
|
|
38637a9821 | ||
|
|
de8c0bdcdd | ||
|
|
fd35975cf9 | ||
|
|
1495834a09 | ||
|
|
2a7588dead | ||
|
|
051de748b9 | ||
|
|
97adb30d41 | ||
|
|
eacfbc45c0 | ||
|
|
8deddd7ee9 | ||
|
|
beb494b67e | ||
|
|
e2948a8b48 | ||
|
|
ff3d3f2f78 | ||
|
|
85783da515 | ||
|
|
9dbfad4294 | ||
|
|
5e54b92efd | ||
|
|
cffdbd47b5 | ||
|
|
f398f46509 | ||
|
|
097c76aa46 | ||
|
|
a9da16e048 | ||
|
|
8e5f0984af | ||
|
|
38b738e766 | ||
|
|
55772d30a4 | ||
|
|
382285cfd9 | ||
|
|
d2e5ce284e | ||
|
|
88d21d742d | ||
|
|
7a0356d47b | ||
|
|
a13bf0cd6c | ||
|
|
7cacf233ce | ||
|
|
b48212e420 | ||
|
|
f10f9970b2 | ||
|
|
96ff214107 | ||
|
|
e7bf3607e8 | ||
|
|
2883362a0e | ||
|
|
df7af507f0 | ||
|
|
46dbed3e7e | ||
|
|
46d5579caa | ||
|
|
11a3a9d457 | ||
|
|
6dca1eea96 | ||
|
|
85ef53c6bb | ||
|
|
4c41ba0a50 | ||
|
|
6f60a78262 | ||
|
|
8f083818c3 | ||
|
|
791c845d23 | ||
|
|
aeea23229f | ||
|
|
e91ffe4844 | ||
|
|
df9d456cf6 | ||
|
|
4c89e8aefa | ||
|
|
7ddce7f80b | ||
|
|
b3dbbc46e2 | ||
|
|
762d446661 | ||
|
|
1aa3c3d9a8 | ||
|
|
0f6ad3290d | ||
|
|
e26a4c5a1c | ||
|
|
d963e3fe1c | ||
|
|
0cd54486d0 | ||
|
|
f93c3d76ce | ||
|
|
8bf7576b33 | ||
|
|
2eae33bd33 | ||
|
|
5e333f53ee | ||
|
|
9c249a100f | ||
|
|
0ee3d6d72d | ||
|
|
3c218b81a3 | ||
|
|
80dc0e8488 | ||
|
|
4064ec53c3 | ||
|
|
37ac9bd1d5 | ||
|
|
e234b8c5ab | ||
|
|
efc56c43c2 | ||
|
|
50de008820 | ||
|
|
d7baa0713e | ||
|
|
3b19ef12bc | ||
|
|
dfb15e6778 | ||
|
|
379b87d925 | ||
|
|
b856041b53 | ||
|
|
b3b36c5cc9 | ||
|
|
ab1476c0d1 | ||
|
|
566a38b432 | ||
|
|
96e4532a9d | ||
|
|
bd3569367e | ||
|
|
20680b0e38 | ||
|
|
44e8d2b46b | ||
|
|
80c659be71 | ||
|
|
06640102f8 | ||
|
|
a0c47c9944 | ||
|
|
744e70ffff | ||
|
|
e6211896f7 | ||
|
|
ca27d070f0 | ||
|
|
82b99b5c9e | ||
|
|
c5c1567321 | ||
|
|
1adc908788 | ||
|
|
2970d9460f | ||
|
|
f0b5db234a | ||
|
|
c6f760e61c | ||
|
|
6067d2a590 | ||
|
|
c56e3496db | ||
|
|
55fd565ef0 | ||
|
|
231d985c82 | ||
|
|
0f433d7e5d | ||
|
|
e188c7e41f | ||
|
|
fa7d743826 | ||
|
|
f4b3348c8e | ||
|
|
724118a671 | ||
|
|
ea95d718ee | ||
|
|
9ba3d5a1ea | ||
|
|
b67372e4eb | ||
|
|
77107c6ea0 | ||
|
|
1ed4b4b65d | ||
|
|
2a0e14962e | ||
|
|
43aae83a1b | ||
|
|
fff8e9145d | ||
|
|
4f1e7a49c4 | ||
|
|
357da735ea | ||
|
|
b7b3b4975d | ||
|
|
7d5fdb6f83 | ||
|
|
1c2ed86313 | ||
|
|
23e8423b8b | ||
|
|
c7cf4378f6 | ||
|
|
5bfbec5df0 | ||
|
|
a775669ac5 | ||
|
|
f96dfc25c2 | ||
|
|
f6c311815d | ||
|
|
112fd5cf64 | ||
|
|
2be718700f | ||
|
|
080d33f597 | ||
|
|
c248b422da | ||
|
|
b050692ee4 | ||
|
|
2fed55a0d1 | ||
|
|
438f512f47 | ||
|
|
157b45269d | ||
|
|
9b169e60fb | ||
|
|
1b439c4af6 | ||
|
|
2cc3cbb1e8 | ||
|
|
743f3acb87 | ||
|
|
9f9f7b00d8 | ||
|
|
07e19f187a | ||
|
|
45169d9ff4 | ||
|
|
05520ee108 | ||
|
|
e1dec42950 | ||
|
|
369cbc41e5 | ||
|
|
4d4c5c1819 | ||
|
|
028334de92 | ||
|
|
2b1d5c848c | ||
|
|
fe5a9d70e5 | ||
|
|
e89fe6357b | ||
|
|
bcd6e5b11d | ||
|
|
e83cc74800 | ||
|
|
9e529f3c8f | ||
|
|
e29df56253 | ||
|
|
f6a0979916 | ||
|
|
e3d2c5d6d7 | ||
|
|
a836f24144 | ||
|
|
fb5c67b3bd | ||
|
|
3f33aa0377 | ||
|
|
ecdc36ab8e | ||
|
|
fbc5de153a | ||
|
|
e4a228b830 | ||
|
|
3cee66dea7 | ||
|
|
bec5b38189 | ||
|
|
f5973b2c89 | ||
|
|
edb911b913 | ||
|
|
6117b0a042 | ||
|
|
5cb5484a9a | ||
|
|
4e33ddf615 | ||
|
|
6d87f3d689 | ||
|
|
b30b97fa36 | ||
|
|
6253e1d8a0 | ||
|
|
2dce6a705c | ||
|
|
df11fcdb37 | ||
|
|
567e2cf0d3 | ||
|
|
3d6e88857b | ||
|
|
59af19d84c | ||
|
|
e07191b627 | ||
|
|
2623ba967d | ||
|
|
fe1c705e24 | ||
|
|
9315f7b61d | ||
|
|
c49cf626bb | ||
|
|
fa6f3fc537 | ||
|
|
f76c788881 | ||
|
|
561f56c9f9 | ||
|
|
cf795ea0c7 | ||
|
|
643536b53b | ||
|
|
ae8a5ff996 | ||
|
|
d01d378c8d | ||
|
|
34dfe944d9 | ||
|
|
9619839bf5 | ||
|
|
90e00a71ca | ||
|
|
f525ee2f5e | ||
|
|
ef12c8b600 | ||
|
|
9e0152aae4 | ||
|
|
6c21c1994e | ||
|
|
d24a25f579 | ||
|
|
422a50672d | ||
|
|
6e09410c7d | ||
|
|
3c51687351 | ||
|
|
783df54978 | ||
|
|
c76bf0128f | ||
|
|
33f57ac002 | ||
|
|
42e6d0894e | ||
|
|
de1c3c091b | ||
|
|
c6c0fa3347 | ||
|
|
c0e1536179 | ||
|
|
303e0ea655 | ||
|
|
40424b71a6 | ||
|
|
b018f917a9 | ||
|
|
28e50946ca | ||
|
|
841b485b1d | ||
|
|
75c823a72c | ||
|
|
e842a9d657 | ||
|
|
0f14728591 | ||
|
|
69dfe325ae | ||
|
|
f1ad8f0061 | ||
|
|
f20aa9a875 | ||
|
|
5707c1aa65 | ||
|
|
053e0b9abb | ||
|
|
730afda4a7 | ||
|
|
1e7f4a9a91 | ||
|
|
37c36549e4 | ||
|
|
bda339b170 | ||
|
|
f5995161ed | ||
|
|
b27cc0cf40 | ||
|
|
1d7f8139d6 | ||
|
|
01ef1bb813 | ||
|
|
1db6be75b8 | ||
|
|
7902ec8667 | ||
|
|
ff42a3ac87 | ||
|
|
6e96a1a8e6 | ||
|
|
68b8671cea | ||
|
|
3b8114c135 | ||
|
|
d65391b798 | ||
|
|
6c4da51519 | ||
|
|
4734e92090 | ||
|
|
5fdf2463e9 | ||
|
|
c679551faa | ||
|
|
fdf52ac360 | ||
|
|
94ad0bd955 | ||
|
|
51c0ea0050 | ||
|
|
3401d4dbe7 | ||
|
|
a724585552 | ||
|
|
1c85433cc0 | ||
|
|
34e1b2f157 | ||
|
|
c80bfc0417 | ||
|
|
51f2c7301d | ||
|
|
632a16670a | ||
|
|
fea0dca6f4 | ||
|
|
c7e95152a0 | ||
|
|
a52e972d04 | ||
|
|
a197d9f78e | ||
|
|
dbb9bbfbf3 | ||
|
|
5c0b2bbaa3 | ||
|
|
7d45a6709a | ||
|
|
a7b053addd | ||
|
|
e8ca256c10 | ||
|
|
f651dd1e93 | ||
|
|
f40cf0aae1 | ||
|
|
200f57c458 | ||
|
|
e5d45eaa85 | ||
|
|
42b77a9b54 | ||
|
|
42e5751705 | ||
|
|
2ff2c57dcf | ||
|
|
bca005a2c0 | ||
|
|
ca891bb187 | ||
|
|
0939ff3c78 | ||
|
|
6f5c5635be | ||
|
|
809622e426 | ||
|
|
eec1f3dbad | ||
|
|
1de789ad72 | ||
|
|
cd68ff68c6 | ||
|
|
9673970f45 | ||
|
|
6ea69a70ac | ||
|
|
c723a5ab44 | ||
|
|
50741d1409 | ||
|
|
69d0dcd62b | ||
|
|
5c9348f255 | ||
|
|
a472546e7b | ||
|
|
c914f1f183 | ||
|
|
92c82342fe | ||
|
|
7ae08b04f3 | ||
|
|
4b50f1e81c | ||
|
|
1ed58be749 | ||
|
|
375b4f9376 | ||
|
|
b6b4426ca9 | ||
|
|
149ebe46db | ||
|
|
d80b27cfde | ||
|
|
36e277317b | ||
|
|
0e1ea4d2c6 | ||
|
|
2d96c383ef | ||
|
|
ec6279736b | ||
|
|
5d942d9668 | ||
|
|
5782cb59fd | ||
|
|
4c4861a8f1 | ||
|
|
4d221ca70e | ||
|
|
1a8d045baf | ||
|
|
67a55c35ce | ||
|
|
e547fe4e41 | ||
|
|
9d5366d707 | ||
|
|
4b07d1dceb | ||
|
|
9440c1e1c1 | ||
|
|
773d003a67 | ||
|
|
a2905a0f8d | ||
|
|
1810455e15 | ||
|
|
76a9b09e8e | ||
|
|
d757b8f25f | ||
|
|
3f23c067f9 | ||
|
|
7a395c37e9 | ||
|
|
e7c55bbdec | ||
|
|
87dc9fb345 | ||
|
|
81a0a4e665 | ||
|
|
4774485477 | ||
|
|
c3da55f2e4 | ||
|
|
a1769e4fe1 | ||
|
|
cb31036153 | ||
|
|
da2672f338 | ||
|
|
91f4891475 | ||
|
|
883b7f0775 |
17
.github/ISSUE_TEMPLATE/bug_report.md
vendored
17
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,17 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
*** READ BEFORE POSTING ***
|
||||
|
||||
Before posting an issue - please upgrade to the latest version and confirm the issue/bug is still there.
|
||||
|
||||
Upgrade using:
|
||||
`$ pip install yfinance --upgrade --no-cache-dir`
|
||||
|
||||
Bug still there? Delete this content and submit your bug report here...
|
||||
95
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
95
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
name: Bug report
|
||||
description: Report a bug in our project
|
||||
labels: ["bug"]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# IMPORTANT - Read and follow these instructions carefully. Help us help you.
|
||||
|
||||
### Does issue already exist?
|
||||
|
||||
Use the search tool. Don't annoy everyone by duplicating existing Issues.
|
||||
|
||||
### Are you up-to-date?
|
||||
|
||||
Upgrade to the latest version and confirm the issue/bug is still there.
|
||||
|
||||
`$ pip install yfinance --upgrade --no-cache-dir`
|
||||
|
||||
Confirm by running:
|
||||
|
||||
`import yfinance as yf ; print(yf.__version__)`
|
||||
|
||||
and comparing against [PIP](https://pypi.org/project/yfinance/#history).
|
||||
|
||||
### Does Yahoo actually have the data?
|
||||
|
||||
Are you spelling symbol *exactly* same as Yahoo?
|
||||
|
||||
Then visit `finance.yahoo.com` and confirm they have the data you want. Maybe your symbol was delisted, or your expectations of `yfinance` are wrong.
|
||||
|
||||
### Are you spamming Yahoo?
|
||||
|
||||
Yahoo Finance free service has rate-limiting https://github.com/ranaroussi/yfinance/discussions/1513. Once limit hit, Yahoo can delay, block, or return bad data -> not a `yfinance` bug.
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
---
|
||||
## Still think it's a bug?
|
||||
|
||||
Provide the following as best you can:
|
||||
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: "Describe bug"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: code
|
||||
attributes:
|
||||
label: "Simple code that reproduces your problem"
|
||||
description: "Provide a snippet of code that we can copy-paste-run. Wrap code in Python Markdown code blocks for proper formatting (```` ```python ... ``` ````)."
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: debug-log
|
||||
attributes:
|
||||
label: "Debug log"
|
||||
description: "Run code with debug logging enabled and post the full output. Instructions: https://github.com/ranaroussi/yfinance/tree/main#logging"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: bad-data-proof
|
||||
attributes:
|
||||
label: "Bad data proof"
|
||||
description: "If you think `yfinance` returning bad data, provide your proof here."
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: version-yfinance
|
||||
attributes:
|
||||
label: "`yfinance` version"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: version-python
|
||||
attributes:
|
||||
label: "Python version"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: "Operating system"
|
||||
validations:
|
||||
required: false
|
||||
4
.github/workflows/python-publish.yml
vendored
4
.github/workflows/python-publish.yml
vendored
@@ -13,9 +13,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -4,8 +4,16 @@ dist
|
||||
yfinance.egg-info
|
||||
*.pyc
|
||||
.coverage
|
||||
.idea/
|
||||
.vscode/
|
||||
build/
|
||||
*.html
|
||||
*.css
|
||||
*.png
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
236
CHANGELOG.rst
236
CHANGELOG.rst
@@ -1,6 +1,242 @@
|
||||
Change Log
|
||||
===========
|
||||
|
||||
0.2.32b1
|
||||
------
|
||||
Beta release - add cookie & crumb to request.
|
||||
Why beta? Thread-safety needs stress-testing.
|
||||
|
||||
0.2.31
|
||||
------
|
||||
- Fix TZ cache exception blocking import #1705 #1709
|
||||
- Fix merging pre-market events with intraday prices #1703
|
||||
|
||||
0.2.30
|
||||
------
|
||||
- Fix OperationalError #1698
|
||||
|
||||
0.2.29
|
||||
------
|
||||
- Fix pandas warning when retrieving quotes. #1672
|
||||
- Replace sqlite3 with peewee for 100% thread-safety #1675
|
||||
- Fix merging events with intraday prices #1684
|
||||
- Fix error when calling enable_debug_mode twice #1687
|
||||
- Price repair fixes #1688
|
||||
|
||||
0.2.28
|
||||
------
|
||||
- Fix TypeError: 'FastInfo' object is not callable #1636
|
||||
- Improve & fix price repair #1633 #1660
|
||||
- option_chain() also return underlying data #1606
|
||||
|
||||
0.2.27
|
||||
------
|
||||
Bug fixes:
|
||||
- fix merging 1d-prices with out-of-range divs/splits #1635
|
||||
- fix multithread error 'tz already in cache' #1648
|
||||
|
||||
0.2.26
|
||||
------
|
||||
Proxy improvements
|
||||
- bug fixes #1371
|
||||
- security fix #1625
|
||||
|
||||
0.2.25
|
||||
------
|
||||
Fix single ISIN as ticker #1611
|
||||
Fix 'Only 100 years allowed' error #1576
|
||||
|
||||
0.2.24
|
||||
------
|
||||
Fix info[] missing values #1603
|
||||
|
||||
0.2.23
|
||||
------
|
||||
Fix 'Unauthorized' error #1595
|
||||
|
||||
0.2.22
|
||||
------
|
||||
Fix unhandled 'sqlite3.DatabaseError' #1574
|
||||
|
||||
0.2.21
|
||||
------
|
||||
Fix financials tables #1568
|
||||
Price repair update: fix Yahoo messing up dividend and split adjustments #1543
|
||||
Fix logging behaviour #1562
|
||||
Fix merge future div/split into prices #1567
|
||||
|
||||
0.2.20
|
||||
------
|
||||
Switch to `logging` module #1493 #1522 #1541
|
||||
Price history:
|
||||
- optimise #1514
|
||||
- fixes #1523
|
||||
- fix TZ-cache corruption #1528
|
||||
|
||||
0.2.18
|
||||
------
|
||||
Fix 'fast_info' error '_np not found' #1496
|
||||
Fix bug in timezone cache #1498
|
||||
|
||||
0.2.17
|
||||
------
|
||||
Fix prices error with Pandas 2.0 #1488
|
||||
|
||||
0.2.16
|
||||
------
|
||||
Fix 'fast_info deprecated' msg appearing at Ticker() init
|
||||
|
||||
0.2.15
|
||||
------
|
||||
Restore missing Ticker.info keys #1480
|
||||
|
||||
0.2.14
|
||||
------
|
||||
Fix Ticker.info dict by fetching from API #1461
|
||||
|
||||
0.2.13
|
||||
------
|
||||
Price bug fixes:
|
||||
- fetch big-interval with Capital Gains #1455
|
||||
- merging dividends & splits with prices #1452
|
||||
|
||||
0.2.12
|
||||
------
|
||||
Disable annoying 'backup decrypt' msg
|
||||
|
||||
0.2.11
|
||||
------
|
||||
Fix history_metadata accesses for unusual symbols #1411
|
||||
|
||||
0.2.10
|
||||
------
|
||||
General
|
||||
- allow using sqlite3 < 3.8.2 #1380
|
||||
- add another backup decrypt option #1379
|
||||
Prices
|
||||
- restore original download() timezone handling #1385
|
||||
- fix & improve price repair #1289 2a2928b 86d6acc
|
||||
- drop intraday intervals if in post-market but prepost=False #1311
|
||||
Info
|
||||
- fast_info improvements:
|
||||
- add camelCase keys, add dict functions values() & items() #1368
|
||||
- fix fast_info["previousClose"] #1383
|
||||
- catch TypeError Exception #1397
|
||||
|
||||
0.2.9
|
||||
-----
|
||||
- Fix fast_info bugs #1362
|
||||
|
||||
0.2.7
|
||||
-----
|
||||
- Fix Yahoo decryption, smarter this time #1353
|
||||
- Rename basic_info -> fast_info #1354
|
||||
|
||||
0.2.6
|
||||
-----
|
||||
- Fix Ticker.basic_info lazy-loading #1342
|
||||
|
||||
0.2.5
|
||||
-----
|
||||
- Fix Yahoo data decryption again #1336
|
||||
- New: Ticker.basic_info - faster Ticker.info #1317
|
||||
|
||||
0.2.4
|
||||
-----
|
||||
- Fix Yahoo data decryption #1297
|
||||
- New feature: 'Ticker.get_shares_full()' #1301
|
||||
- Improve caching of financials data #1284
|
||||
- Restore download() original alignment behaviour #1283
|
||||
- Fix the database lock error in multithread download #1276
|
||||
|
||||
0.2.3
|
||||
-----
|
||||
- Make financials API '_' use consistent
|
||||
|
||||
0.2.2
|
||||
-----
|
||||
- Restore 'financials' attribute (map to 'income_stmt')
|
||||
|
||||
0.2.1
|
||||
-----
|
||||
Release!
|
||||
|
||||
0.2.0rc5
|
||||
--------
|
||||
- Improve financials error handling #1243
|
||||
- Fix '100x price' repair #1244
|
||||
|
||||
0.2.0rc4
|
||||
--------
|
||||
- Access to old financials tables via `get_income_stmt(legacy=True)`
|
||||
- Optimise scraping financials & fundamentals, 2x faster
|
||||
- Add 'capital gains' alongside dividends & splits for ETFs, and metadata available via `history_metadata`, plus a bunch of price fixes
|
||||
For full list of changes see #1238
|
||||
|
||||
0.2.0rc2
|
||||
--------
|
||||
Financials
|
||||
- fix financials tables to match website #1128 #1157
|
||||
- lru_cache to optimise web requests #1147
|
||||
Prices
|
||||
- improve price repair #1148
|
||||
- fix merging dividends/splits with day/week/monthly prices #1161
|
||||
- fix the Yahoo DST fixes #1143
|
||||
- improve bad/delisted ticker handling #1140
|
||||
Misc
|
||||
- fix 'trailingPegRatio' #1138
|
||||
- improve error handling #1118
|
||||
|
||||
0.2.0rc1
|
||||
--------
|
||||
Jumping to 0.2 for this big update. 0.1.* will continue to receive bug-fixes
|
||||
- timezone cache performance massively improved. Thanks @fredrik-corneliusson #1113 #1112 #1109 #1105 #1099
|
||||
- price repair feature #1110
|
||||
- fix merging of dividends/splits with prices #1069 #1086 #1102
|
||||
- fix Yahoo returning latest price interval across 2 rows #1070
|
||||
- optional: raise errors as exceptions: raise_errors=True #1104
|
||||
- add proper unit tests #1069
|
||||
|
||||
0.1.81
|
||||
------
|
||||
- Fix unhandled tz-cache exception #1107
|
||||
|
||||
0.1.80
|
||||
------
|
||||
- Fix `download(ignore_tz=True)` for single ticker #1097
|
||||
- Fix rare case of error "Cannot infer DST time" #1100
|
||||
|
||||
0.1.79
|
||||
------
|
||||
- Fix when Yahoo returns price=NaNs on dividend day
|
||||
|
||||
0.1.78
|
||||
------
|
||||
- Fix download() when different timezones #1085
|
||||
|
||||
0.1.77
|
||||
------
|
||||
- Fix user experience bug #1078
|
||||
|
||||
0.1.75
|
||||
------
|
||||
- Fixed datetime-related issues: #1048
|
||||
- Add 'keepna' argument #1032
|
||||
- Speedup Ticker() creation #1042
|
||||
- Improve a bugfix #1033
|
||||
|
||||
0.1.74
|
||||
------
|
||||
- Fixed bug introduced in 0.1.73 (sorry :/)
|
||||
|
||||
0.1.73
|
||||
------
|
||||
- Merged several PR that fixed misc issues
|
||||
|
||||
0.1.72
|
||||
------
|
||||
- Misc bugfixs
|
||||
|
||||
0.1.71
|
||||
------
|
||||
- Added Tickers(…).news()
|
||||
|
||||
199
README.md
199
README.md
@@ -48,60 +48,48 @@ Yahoo! finance API is intended for personal use only.**
|
||||
|
||||
The `Ticker` module, which allows you to access ticker data in a more Pythonic way:
|
||||
|
||||
Note: yahoo finance datetimes are received as UTC.
|
||||
|
||||
```python
|
||||
import yfinance as yf
|
||||
|
||||
msft = yf.Ticker("MSFT")
|
||||
|
||||
# get stock info
|
||||
# get all stock info
|
||||
msft.info
|
||||
|
||||
# get historical market data
|
||||
hist = msft.history(period="max")
|
||||
hist = msft.history(period="1mo")
|
||||
|
||||
# show actions (dividends, splits)
|
||||
# show meta information about the history (requires history() to be called first)
|
||||
msft.history_metadata
|
||||
|
||||
# show actions (dividends, splits, capital gains)
|
||||
msft.actions
|
||||
|
||||
# show dividends
|
||||
msft.dividends
|
||||
|
||||
# show splits
|
||||
msft.splits
|
||||
msft.capital_gains # only for mutual funds & etfs
|
||||
|
||||
# show financials
|
||||
msft.financials
|
||||
msft.quarterly_financials
|
||||
# show share count
|
||||
msft.get_shares_full(start="2022-01-01", end=None)
|
||||
|
||||
# show major holders
|
||||
msft.major_holders
|
||||
|
||||
# show institutional holders
|
||||
msft.institutional_holders
|
||||
|
||||
# show balance sheet
|
||||
# show financials:
|
||||
# - income statement
|
||||
msft.income_stmt
|
||||
msft.quarterly_income_stmt
|
||||
# - balance sheet
|
||||
msft.balance_sheet
|
||||
msft.quarterly_balance_sheet
|
||||
|
||||
# show cashflow
|
||||
# - cash flow statement
|
||||
msft.cashflow
|
||||
msft.quarterly_cashflow
|
||||
# see `Ticker.get_income_stmt()` for more options
|
||||
|
||||
# show earnings
|
||||
msft.earnings
|
||||
msft.quarterly_earnings
|
||||
# show holders
|
||||
msft.major_holders
|
||||
msft.institutional_holders
|
||||
msft.mutualfund_holders
|
||||
|
||||
# show sustainability
|
||||
msft.sustainability
|
||||
|
||||
# show analysts recommendations
|
||||
msft.recommendations
|
||||
|
||||
# show next event (earnings, etc)
|
||||
msft.calendar
|
||||
|
||||
# show all earnings dates
|
||||
# Show future and historic earnings dates, returns at most next 4 quarters and last 8 quarters by default.
|
||||
# Note: If more are needed use msft.get_earnings_dates(limit=XX) with increased limit argument.
|
||||
msft.earnings_dates
|
||||
|
||||
# show ISIN code - *experimental*
|
||||
@@ -130,12 +118,43 @@ msft.history(..., proxy="PROXY_SERVER")
|
||||
msft.get_actions(proxy="PROXY_SERVER")
|
||||
msft.get_dividends(proxy="PROXY_SERVER")
|
||||
msft.get_splits(proxy="PROXY_SERVER")
|
||||
msft.get_capital_gains(proxy="PROXY_SERVER")
|
||||
msft.get_balance_sheet(proxy="PROXY_SERVER")
|
||||
msft.get_cashflow(proxy="PROXY_SERVER")
|
||||
msft.option_chain(..., proxy="PROXY_SERVER")
|
||||
...
|
||||
```
|
||||
|
||||
### Multiple tickers
|
||||
|
||||
To initialize multiple `Ticker` objects, use
|
||||
|
||||
```python
|
||||
import yfinance as yf
|
||||
|
||||
tickers = yf.Tickers('msft aapl goog')
|
||||
|
||||
# access each ticker using (example)
|
||||
tickers.tickers['MSFT'].info
|
||||
tickers.tickers['AAPL'].history(period="1mo")
|
||||
tickers.tickers['GOOG'].actions
|
||||
```
|
||||
|
||||
To download price history into one table:
|
||||
|
||||
```python
|
||||
import yfinance as yf
|
||||
data = yf.download("SPY AAPL", period="1mo")
|
||||
```
|
||||
|
||||
#### `yf.download()` and `Ticker.history()` have many options for configuring fetching and processing. [Review the Wiki](https://github.com/ranaroussi/yfinance/wiki) for more options and detail.
|
||||
|
||||
### Logging
|
||||
|
||||
`yfinance` now uses the `logging` module to handle messages, default behaviour is only print errors. If debugging, use `yf.enable_debug_mode()` to switch logging to debug with custom formatting.
|
||||
|
||||
### Smarter scraping
|
||||
|
||||
To use a custom `requests` session (for example to cache calls to the
|
||||
API or customize the `User-agent` header), pass a `session=` argument to
|
||||
the Ticker constructor.
|
||||
@@ -144,69 +163,25 @@ the Ticker constructor.
|
||||
import requests_cache
|
||||
session = requests_cache.CachedSession('yfinance.cache')
|
||||
session.headers['User-agent'] = 'my-program/1.0'
|
||||
ticker = yf.Ticker('msft aapl goog', session=session)
|
||||
ticker = yf.Ticker('msft', session=session)
|
||||
# The scraped response will be stored in the cache
|
||||
ticker.actions
|
||||
```
|
||||
|
||||
To initialize multiple `Ticker` objects, use
|
||||
|
||||
Combine a `requests_cache` with rate-limiting to avoid triggering Yahoo's rate-limiter/blocker that can corrupt data.
|
||||
```python
|
||||
import yfinance as yf
|
||||
from requests import Session
|
||||
from requests_cache import CacheMixin, SQLiteCache
|
||||
from requests_ratelimiter import LimiterMixin, MemoryQueueBucket
|
||||
from pyrate_limiter import Duration, RequestRate, Limiter
|
||||
class CachedLimiterSession(CacheMixin, LimiterMixin, Session):
|
||||
pass
|
||||
|
||||
tickers = yf.Tickers('msft aapl goog')
|
||||
# ^ returns a named tuple of Ticker objects
|
||||
|
||||
# access each ticker using (example)
|
||||
tickers.tickers.MSFT.info
|
||||
tickers.tickers.AAPL.history(period="1mo")
|
||||
tickers.tickers.GOOG.actions
|
||||
```
|
||||
|
||||
### Fetching data for multiple tickers
|
||||
|
||||
```python
|
||||
import yfinance as yf
|
||||
data = yf.download("SPY AAPL", start="2017-01-01", end="2017-04-30")
|
||||
```
|
||||
|
||||
I've also added some options to make life easier :)
|
||||
|
||||
```python
|
||||
data = yf.download( # or pdr.get_data_yahoo(...
|
||||
# tickers list or string as well
|
||||
tickers = "SPY AAPL MSFT",
|
||||
|
||||
# use "period" instead of start/end
|
||||
# valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max
|
||||
# (optional, default is '1mo')
|
||||
period = "ytd",
|
||||
|
||||
# fetch data by interval (including intraday if period < 60 days)
|
||||
# valid intervals: 1m,2m,5m,15m,30m,60m,90m,1h,1d,5d,1wk,1mo,3mo
|
||||
# (optional, default is '1d')
|
||||
interval = "1m",
|
||||
|
||||
# group by ticker (to access via data['SPY'])
|
||||
# (optional, default is 'column')
|
||||
group_by = 'ticker',
|
||||
|
||||
# adjust all OHLC automatically
|
||||
# (optional, default is False)
|
||||
auto_adjust = True,
|
||||
|
||||
# download pre/post regular market hours data
|
||||
# (optional, default is False)
|
||||
prepost = True,
|
||||
|
||||
# use threads for mass downloading? (True/False/Integer)
|
||||
# (optional, default is True)
|
||||
threads = True,
|
||||
|
||||
# proxy URL scheme use use when downloading?
|
||||
# (optional, default is None)
|
||||
proxy = None
|
||||
)
|
||||
session = CachedLimiterSession(
|
||||
limiter=Limiter(RequestRate(2, Duration.SECOND*5)), # max 2 requests per 5 seconds
|
||||
bucket_class=MemoryQueueBucket,
|
||||
backend=SQLiteCache("yfinance.cache"),
|
||||
)
|
||||
```
|
||||
|
||||
### Managing Multi-Level Columns
|
||||
@@ -224,9 +199,7 @@ yfinance?](https://stackoverflow.com/questions/63107801)
|
||||
- How to download single or multiple tickers into a single
|
||||
dataframe with single level column names and a ticker column
|
||||
|
||||
---
|
||||
|
||||
## `pandas_datareader` override
|
||||
### `pandas_datareader` override
|
||||
|
||||
If your code uses `pandas_datareader` and you want to download data
|
||||
faster, you can "hijack" `pandas_datareader.data.get_data_yahoo()`
|
||||
@@ -243,6 +216,18 @@ yf.pdr_override() # <== that's all it takes :-)
|
||||
data = pdr.get_data_yahoo("SPY", start="2017-01-01", end="2017-04-30")
|
||||
```
|
||||
|
||||
### Timezone cache store
|
||||
|
||||
When fetching price data, all dates are localized to stock exchange timezone.
|
||||
But timezone retrieval is relatively slow, so yfinance attemps to cache them
|
||||
in your users cache folder.
|
||||
You can direct cache to use a different location with `set_tz_cache_location()`:
|
||||
```python
|
||||
import yfinance as yf
|
||||
yf.set_tz_cache_location("custom/cache/location")
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Installation
|
||||
@@ -253,23 +238,37 @@ Install `yfinance` using `pip`:
|
||||
$ pip install yfinance --upgrade --no-cache-dir
|
||||
```
|
||||
|
||||
Test new features by installing betas, provide feedback in [corresponding Discussion](https://github.com/ranaroussi/yfinance/discussions):
|
||||
``` {.sourceCode .bash}
|
||||
$ pip install yfinance --upgrade --no-cache-dir --pre
|
||||
```
|
||||
|
||||
To install `yfinance` using `conda`, see
|
||||
[this](https://anaconda.org/ranaroussi/yfinance).
|
||||
|
||||
### Requirements
|
||||
|
||||
- [Python](https://www.python.org) \>= 2.7, 3.4+
|
||||
- [Pandas](https://github.com/pydata/pandas) (tested to work with
|
||||
\>=0.23.1)
|
||||
- [Numpy](http://www.numpy.org) \>= 1.11.1
|
||||
- [requests](http://docs.python-requests.org/en/master/) \>= 2.14.2
|
||||
- [lxml](https://pypi.org/project/lxml/) \>= 4.5.1
|
||||
- [Pandas](https://github.com/pydata/pandas) \>= 1.3.0
|
||||
- [Numpy](http://www.numpy.org) \>= 1.16.5
|
||||
- [requests](http://docs.python-requests.org/en/master) \>= 2.31
|
||||
- [lxml](https://pypi.org/project/lxml) \>= 4.9.1
|
||||
- [appdirs](https://pypi.org/project/appdirs) \>= 1.4.4
|
||||
- [pytz](https://pypi.org/project/pytz) \>=2022.5
|
||||
- [frozendict](https://pypi.org/project/frozendict) \>= 2.3.4
|
||||
- [beautifulsoup4](https://pypi.org/project/beautifulsoup4) \>= 4.11.1
|
||||
- [html5lib](https://pypi.org/project/html5lib) \>= 1.1
|
||||
- [peewee](https://pypi.org/project/peewee) \>= 3.16.2
|
||||
|
||||
### Optional (if you want to use `pandas_datareader`)
|
||||
#### Optional (if you want to use `pandas_datareader`)
|
||||
|
||||
- [pandas\_datareader](https://github.com/pydata/pandas-datareader)
|
||||
\>= 0.4.0
|
||||
|
||||
## Developers: want to contribute?
|
||||
|
||||
`yfinance` relies on community to investigate bugs and contribute code. Developer guide: https://github.com/ranaroussi/yfinance/discussions/1084
|
||||
|
||||
---
|
||||
|
||||
### Legal Stuff
|
||||
|
||||
28
meta.yaml
28
meta.yaml
@@ -1,5 +1,5 @@
|
||||
{% set name = "yfinance" %}
|
||||
{% set version = "0.1.58" %}
|
||||
{% set version = "0.2.32b1" %}
|
||||
|
||||
package:
|
||||
name: "{{ name|lower }}"
|
||||
@@ -16,20 +16,34 @@ build:
|
||||
|
||||
requirements:
|
||||
host:
|
||||
- pandas >=0.24.0
|
||||
- pandas >=1.3.0
|
||||
- numpy >=1.16.5
|
||||
- requests >=2.21
|
||||
- requests >=2.31
|
||||
- multitasking >=0.0.7
|
||||
- lxml >=4.5.1
|
||||
- lxml >=4.9.1
|
||||
- appdirs >=1.4.4
|
||||
- pytz >=2022.5
|
||||
- frozendict >=2.3.4
|
||||
- beautifulsoup4 >=4.11.1
|
||||
- html5lib >=1.1
|
||||
- peewee >=3.16.2
|
||||
# - pycryptodome >=3.6.6
|
||||
- pip
|
||||
- python
|
||||
|
||||
run:
|
||||
- pandas >=0.24.0
|
||||
- pandas >=1.3.0
|
||||
- numpy >=1.16.5
|
||||
- requests >=2.21
|
||||
- requests >=2.31
|
||||
- multitasking >=0.0.7
|
||||
- lxml >=4.5.1
|
||||
- lxml >=4.9.1
|
||||
- appdirs >=1.4.4
|
||||
- pytz >=2022.5
|
||||
- frozendict >=2.3.4
|
||||
- beautifulsoup4 >=4.11.1
|
||||
- html5lib >=1.1
|
||||
- peewee >=3.16.2
|
||||
# - pycryptodome >=3.6.6
|
||||
- python
|
||||
|
||||
test:
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
pandas>=0.24.0
|
||||
pandas>=1.3.0
|
||||
numpy>=1.16.5
|
||||
requests>=2.26
|
||||
requests>=2.31
|
||||
multitasking>=0.0.7
|
||||
lxml>=4.5.1
|
||||
lxml>=4.9.1
|
||||
appdirs>=1.4.4
|
||||
pytz>=2022.5
|
||||
frozendict>=2.3.4
|
||||
beautifulsoup4>=4.11.1
|
||||
html5lib>=1.1
|
||||
peewee>=3.16.2
|
||||
19
setup.py
19
setup.py
@@ -38,8 +38,8 @@ setup(
|
||||
classifiers=[
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
# 'Development Status :: 3 - Alpha',
|
||||
# 'Development Status :: 4 - Beta',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Development Status :: 4 - Beta',
|
||||
# 'Development Status :: 5 - Production/Stable',
|
||||
|
||||
|
||||
'Operating System :: OS Independent',
|
||||
@@ -50,20 +50,21 @@ setup(
|
||||
'Topic :: Software Development :: Libraries',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
# 'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
],
|
||||
platforms=['any'],
|
||||
keywords='pandas, yahoo finance, pandas datareader',
|
||||
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'examples']),
|
||||
install_requires=['pandas>=0.24.0', 'numpy>=1.15',
|
||||
'requests>=2.26', 'multitasking>=0.0.7',
|
||||
'lxml>=4.5.1'],
|
||||
install_requires=['pandas>=1.3.0', 'numpy>=1.16.5',
|
||||
'requests>=2.31', 'multitasking>=0.0.7',
|
||||
'lxml>=4.9.1', 'appdirs>=1.4.4', 'pytz>=2022.5',
|
||||
'frozendict>=2.3.4', 'peewee>=3.16.2',
|
||||
'beautifulsoup4>=4.11.1', 'html5lib>=1.1'],
|
||||
# Note: Pandas.read_html() needs html5lib & beautifulsoup4
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'sample=sample:main',
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
#
|
||||
# yfinance - market data downloader
|
||||
# https://github.com/ranaroussi/yfinance
|
||||
|
||||
"""
|
||||
Sanity check for most common library uses all working
|
||||
- Stock: Microsoft
|
||||
- ETF: Russell 2000 Growth
|
||||
- Mutual fund: Vanguard 500 Index fund
|
||||
- Index: S&P500
|
||||
- Currency BTC-USD
|
||||
"""
|
||||
|
||||
import yfinance as yf
|
||||
import unittest
|
||||
|
||||
symbols = ['MSFT', 'IWO', 'VFINX', '^GSPC', 'BTC-USD']
|
||||
tickers = [yf.Ticker(symbol) for symbol in symbols]
|
||||
|
||||
|
||||
class TestTicker(unittest.TestCase):
|
||||
def test_info_history(self):
|
||||
for ticker in tickers:
|
||||
# always should have info and history for valid symbols
|
||||
assert(ticker.info is not None and ticker.info != {})
|
||||
history = ticker.history(period="max")
|
||||
assert(history.empty is False and history is not None)
|
||||
|
||||
def test_attributes(self):
|
||||
for ticker in tickers:
|
||||
ticker.isin
|
||||
ticker.major_holders
|
||||
ticker.institutional_holders
|
||||
ticker.mutualfund_holders
|
||||
ticker.dividends
|
||||
ticker.splits
|
||||
ticker.actions
|
||||
ticker.info
|
||||
ticker.calendar
|
||||
ticker.recommendations
|
||||
ticker.earnings
|
||||
ticker.quarterly_earnings
|
||||
ticker.financials
|
||||
ticker.quarterly_financials
|
||||
ticker.balance_sheet
|
||||
ticker.quarterly_balance_sheet
|
||||
ticker.cashflow
|
||||
ticker.quarterly_cashflow
|
||||
ticker.sustainability
|
||||
ticker.options
|
||||
ticker.news
|
||||
ticker.shares
|
||||
ticker.earnings_history
|
||||
ticker.earnings_dates
|
||||
|
||||
def test_holders(self):
|
||||
for ticker in tickers:
|
||||
assert(ticker.info is not None and ticker.info != {})
|
||||
assert(ticker.major_holders is not None)
|
||||
assert(ticker.institutional_holders is not None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
#!/usr/bin/env python
|
||||
47
tests/context.py
Normal file
47
tests/context.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import appdirs as _ad
|
||||
import datetime as _dt
|
||||
import sys
|
||||
import os
|
||||
_parent_dp = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
_src_dp = _parent_dp
|
||||
sys.path.insert(0, _src_dp)
|
||||
|
||||
import yfinance
|
||||
|
||||
|
||||
# Optional: see the exact requests that are made during tests:
|
||||
# import logging
|
||||
# logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
# Use adjacent cache folder for testing, delete if already exists and older than today
|
||||
testing_cache_dirpath = os.path.join(_ad.user_cache_dir(), "py-yfinance-testing")
|
||||
yfinance.set_tz_cache_location(testing_cache_dirpath)
|
||||
if os.path.isdir(testing_cache_dirpath):
|
||||
mtime = _dt.datetime.fromtimestamp(os.path.getmtime(testing_cache_dirpath))
|
||||
if mtime.date() < _dt.date.today():
|
||||
import shutil
|
||||
shutil.rmtree(testing_cache_dirpath)
|
||||
|
||||
|
||||
# Setup a session to rate-limit and cache persistently:
|
||||
from requests import Session
|
||||
from requests_cache import CacheMixin, SQLiteCache
|
||||
from requests_ratelimiter import LimiterMixin, MemoryQueueBucket
|
||||
class CachedLimiterSession(CacheMixin, LimiterMixin, Session):
|
||||
pass
|
||||
from pyrate_limiter import Duration, RequestRate, Limiter
|
||||
history_rate = RequestRate(1, Duration.SECOND*2)
|
||||
limiter = Limiter(history_rate)
|
||||
cache_fp = os.path.join(testing_cache_dirpath, "unittests-cache")
|
||||
session_gbl = CachedLimiterSession(
|
||||
limiter=limiter,
|
||||
bucket_class=MemoryQueueBucket,
|
||||
backend=SQLiteCache(cache_fp, expire_after=_dt.timedelta(hours=1)),
|
||||
)
|
||||
# Use this instead if only want rate-limiting:
|
||||
# from requests_ratelimiter import LimiterSession
|
||||
# session_gbl = LimiterSession(limiter=limiter)
|
||||
|
||||
23
tests/data/4063-T-1d-bad-stock-split-fixed.csv
Normal file
23
tests/data/4063-T-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-14 00:00:00+09:00,4126,4130,4055,4129,4129,7459400,0,0
|
||||
2023-04-13 00:00:00+09:00,4064,4099,4026,4081,4081,5160200,0,0
|
||||
2023-04-12 00:00:00+09:00,3968,4084,3966,4064,4064,6372000,0,0
|
||||
2023-04-11 00:00:00+09:00,3990,4019,3954,3960,3960,6476500,0,0
|
||||
2023-04-10 00:00:00+09:00,3996,4009,3949,3964,3964,3485200,0,0
|
||||
2023-04-07 00:00:00+09:00,3897,3975,3892,3953,3953,4554700,0,0
|
||||
2023-04-06 00:00:00+09:00,4002,4004,3920,3942,3942,8615200,0,0
|
||||
2023-04-05 00:00:00+09:00,4150,4150,4080,4088,4088,6063700,0,0
|
||||
2023-04-04 00:00:00+09:00,4245,4245,4144,4155,4155,6780600,0,0
|
||||
2023-04-03 00:00:00+09:00,4250,4259,4162,4182,4182,7076800,0,0
|
||||
2023-03-31 00:00:00+09:00,4229,4299,4209,4275,4275,9608400,0,0
|
||||
2023-03-30 00:00:00+09:00,4257,4268,4119,4161,4161,5535200,55,5
|
||||
2023-03-29 00:00:00+09:00,4146,4211,4146,4206,4151,6514500,0,0
|
||||
2023-03-28 00:00:00+09:00,4200,4207,4124,4142,4087.837109375,4505500,0,0
|
||||
2023-03-27 00:00:00+09:00,4196,4204,4151,4192,4137.183203125,5959500,0,0
|
||||
2023-03-24 00:00:00+09:00,4130,4187,4123,4177,4122.379296875,8961500,0,0
|
||||
2023-03-23 00:00:00+09:00,4056,4106,4039,4086,4032.569140625,5480000,0,0
|
||||
2023-03-22 00:00:00+09:00,4066,4128,4057,4122,4068.0984375,8741500,0,0
|
||||
2023-03-20 00:00:00+09:00,4000,4027,3980,3980,3927.95546875,7006500,0,0
|
||||
2023-03-17 00:00:00+09:00,4018,4055,4016,4031,3978.28828125,6961500,0,0
|
||||
2023-03-16 00:00:00+09:00,3976,4045,3972,4035,3982.236328125,5019000,0,0
|
||||
2023-03-15 00:00:00+09:00,4034,4050,4003,4041,3988.1578125,6122000,0,0
|
||||
|
23
tests/data/4063-T-1d-bad-stock-split.csv
Normal file
23
tests/data/4063-T-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-14 00:00:00+09:00,4126,4130,4055,4129,4129,7459400,0,0
|
||||
2023-04-13 00:00:00+09:00,4064,4099,4026,4081,4081,5160200,0,0
|
||||
2023-04-12 00:00:00+09:00,3968,4084,3966,4064,4064,6372000,0,0
|
||||
2023-04-11 00:00:00+09:00,3990,4019,3954,3960,3960,6476500,0,0
|
||||
2023-04-10 00:00:00+09:00,3996,4009,3949,3964,3964,3485200,0,0
|
||||
2023-04-07 00:00:00+09:00,3897,3975,3892,3953,3953,4554700,0,0
|
||||
2023-04-06 00:00:00+09:00,4002,4004,3920,3942,3942,8615200,0,0
|
||||
2023-04-05 00:00:00+09:00,4150,4150,4080,4088,4088,6063700,0,0
|
||||
2023-04-04 00:00:00+09:00,4245,4245,4144,4155,4155,6780600,0,0
|
||||
2023-04-03 00:00:00+09:00,4250,4259,4162,4182,4182,7076800,0,0
|
||||
2023-03-31 00:00:00+09:00,4229,4299,4209,4275,4275,9608400,0,0
|
||||
2023-03-30 00:00:00+09:00,4257,4268,4119,4161,4161,5535200,55,5
|
||||
2023-03-29 00:00:00+09:00,4146,4211,4146,4206,4151,6514500,0,0
|
||||
2023-03-28 00:00:00+09:00,21000,21035,20620,20710,20439.185546875,901100,0,0
|
||||
2023-03-27 00:00:00+09:00,20980,21020,20755,20960,20685.916015625,1191900,0,0
|
||||
2023-03-24 00:00:00+09:00,20650,20935,20615,20885,20611.896484375,1792300,0,0
|
||||
2023-03-23 00:00:00+09:00,20280,20530,20195,20430,20162.845703125,1096000,0,0
|
||||
2023-03-22 00:00:00+09:00,20330,20640,20285,20610,20340.4921875,1748300,0,0
|
||||
2023-03-20 00:00:00+09:00,20000,20135,19900,19900,19639.77734375,1401300,0,0
|
||||
2023-03-17 00:00:00+09:00,20090,20275,20080,20155,19891.44140625,1392300,0,0
|
||||
2023-03-16 00:00:00+09:00,19880,20225,19860,20175,19911.181640625,1003800,0,0
|
||||
2023-03-15 00:00:00+09:00,20170,20250,20015,20205,19940.7890625,1224400,0,0
|
||||
|
6
tests/data/8TRA-DE-1d-missing-div-adjust-fixed.csv
Normal file
6
tests/data/8TRA-DE-1d-missing-div-adjust-fixed.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-30 00:00:00+02:00,19.5900001525879,19.7999992370605,19.2700004577637,19.3500003814697,18.6291382416581,196309,0,0
|
||||
2023-05-31 00:00:00+02:00,19.1200008392334,19.1399993896484,18.7000007629395,18.7900009155273,18.0900009155273,156652,0,0
|
||||
2023-06-02 00:00:00+02:00,18.5499992370605,19,18.5100002288818,18.8999996185303,18.8999996185303,83439,0.7,0
|
||||
2023-06-05 00:00:00+02:00,18.9300003051758,19.0900001525879,18.8400001525879,19,19,153167,0,0
|
||||
2023-06-06 00:00:00+02:00,18.9099998474121,18.9500007629395,18.5100002288818,18.6599998474121,18.6599998474121,104352,0,0
|
||||
|
6
tests/data/8TRA-DE-1d-missing-div-adjust.csv
Normal file
6
tests/data/8TRA-DE-1d-missing-div-adjust.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-30 00:00:00+02:00,19.59000015258789,19.799999237060547,19.270000457763672,19.350000381469727,19.350000381469727,196309,0.0,0.0
|
||||
2023-05-31 00:00:00+02:00,19.1200008392334,19.139999389648438,18.700000762939453,18.790000915527344,18.790000915527344,156652,0.0,0.0
|
||||
2023-06-02 00:00:00+02:00,18.549999237060547,19.0,18.510000228881836,18.899999618530273,18.899999618530273,83439,0.7,0.0
|
||||
2023-06-05 00:00:00+02:00,18.93000030517578,19.09000015258789,18.84000015258789,19.0,19.0,153167,0.0,0.0
|
||||
2023-06-06 00:00:00+02:00,18.90999984741211,18.950000762939453,18.510000228881836,18.65999984741211,18.65999984741211,104352,0.0,0.0
|
||||
|
24
tests/data/AET-L-1d-100x-error-fixed.csv
Normal file
24
tests/data/AET-L-1d-100x-error-fixed.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-06 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-06-01 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-31 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-30 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-27 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-26 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-25 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-24 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-23 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-20 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-19 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-18 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,532454,0,0
|
||||
2022-05-17 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-16 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-13 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-12 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-11 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-10 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-09 00:00:00+01:00,0.1455,0.1455,0.1455,0.1455,0.1455,0,0,0
|
||||
2022-05-06 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-05 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-04 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
2022-05-03 00:00:00+01:00,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0.145500004291534,0,0,0
|
||||
|
24
tests/data/AET-L-1d-100x-error.csv
Normal file
24
tests/data/AET-L-1d-100x-error.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-06 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-06-01 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-31 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-30 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-27 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-26 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-24 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-23 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-20 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-19 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-18 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,532454,0.0,0.0
|
||||
2022-05-17 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-16 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-13 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-12 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-11 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-10 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-09 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-06 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-05 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-04 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-03 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
|
37
tests/data/AET-L-1wk-100x-error-fixed.csv
Normal file
37
tests/data/AET-L-1wk-100x-error-fixed.csv
Normal file
@@ -0,0 +1,37 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-05-30 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-05-23 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-05-16 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,532454,0,0
|
||||
2022-05-09 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-05-02 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-25 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-18 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-11 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-04-04 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-28 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-21 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-14 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-03-07 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-28 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-21 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-14 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-02-07 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-31 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-24 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-17 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-10 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-01-03 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-27 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-20 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-13 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-12-06 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-29 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-22 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-15 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-08 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-11-01 00:00:00+00:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-25 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-18 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-11 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2021-10-04 00:00:00+01:00,14.8000,15.3400,14.4000,14.5500,14.5500,2171373,0,0
|
||||
2021-09-27 00:00:00+01:00,15.6000,16.0000,14.9000,15.0500,15.0500,3860549,0,0
|
||||
|
25
tests/data/AET-L-1wk-100x-error-fixed.csv.old
Normal file
25
tests/data/AET-L-1wk-100x-error-fixed.csv.old
Normal file
@@ -0,0 +1,25 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-08-15 00:00:00+01:00,27.6000,28.2000,26.2000,27.6000,27.6000,3535668,0,0
|
||||
2022-08-12 00:00:00+01:00,27.3000,29.8000,26.4030,27.0000,27.0000,7223353,0,0
|
||||
2022-08-11 00:00:00+01:00,26.0000,29.8000,24.2000,27.1000,27.1000,12887933,0,0
|
||||
2022-08-10 00:00:00+01:00,25.0000,29.2000,22.5000,25.0000,25.0000,26572680,0,0
|
||||
2022-08-09 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-08 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-05 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-04 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-03 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-02 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-08-01 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-29 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-28 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-27 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-26 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-25 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-22 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-21 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-20 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-19 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-18 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-15 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-14 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
2022-07-13 00:00:00+01:00,14.5500,14.5500,14.5500,14.5500,14.5500,0,0,0
|
||||
37
tests/data/AET-L-1wk-100x-error.csv
Normal file
37
tests/data/AET-L-1wk-100x-error.csv
Normal file
@@ -0,0 +1,37 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-05-30 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-23 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-05-16 00:00:00+01:00,14.550000190734863,14.550000190734863,0.14550000429153442,0.14550000429153442,0.14550000429153442,532454,0.0,0.0
|
||||
2022-05-09 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-05-02 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-18 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-11 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-04-04 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-28 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-21 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-14 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-03-07 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-28 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-21 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-14 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-02-07 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-31 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-24 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-17 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-10 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-01-03 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-27 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-20 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-13 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-12-06 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-29 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-22 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-15 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-08 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-11-01 00:00:00+00:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-18 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-11 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2021-10-04 00:00:00+01:00,14.800000190734863,15.34000015258789,0.14399999380111694,0.14550000429153442,0.14550000429153442,2171373,0.0,0.0
|
||||
2021-09-27 00:00:00+01:00,15.600000381469727,16.0,14.899999618530273,15.050000190734863,15.050000190734863,3860549,0.0,0.0
|
||||
|
25
tests/data/AET-L-1wk-100x-error.csv.old
Normal file
25
tests/data/AET-L-1wk-100x-error.csv.old
Normal file
@@ -0,0 +1,25 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-08-15 00:00:00+01:00,27.600000381469727,28.200000762939453,26.200000762939453,27.600000381469727,27.600000381469727,3535668,0.0,0.0
|
||||
2022-08-12 00:00:00+01:00,27.299999237060547,29.799999237060547,26.402999877929688,27.0,27.0,7223353,0.0,0.0
|
||||
2022-08-11 00:00:00+01:00,26.0,29.799999237060547,24.200000762939453,27.100000381469727,27.100000381469727,12887933,0.0,0.0
|
||||
2022-08-10 00:00:00+01:00,25.0,29.200000762939453,22.5,25.0,25.0,26572680,0.0,0.0
|
||||
2022-08-09 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-08 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-05 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-04 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-03 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-02 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-08-01 00:00:00+01:00,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,14.550000190734863,0,0.0,0.0
|
||||
2022-07-29 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-28 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-27 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-26 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-25 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-22 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-21 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-20 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-19 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-18 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-15 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-14 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
2022-07-13 00:00:00+01:00,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0.14550000429153442,0,0.0,0.0
|
||||
30
tests/data/ALPHA-PA-1d-bad-stock-split-fixed.csv
Normal file
30
tests/data/ALPHA-PA-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-20 00:00:00+02:00,3,3,2,3,3,2076,0,0
|
||||
2023-04-21 00:00:00+02:00,3,3,2,3,3,2136,0,0
|
||||
2023-04-24 00:00:00+02:00,3,3,1,1,1,77147,0,0
|
||||
2023-04-25 00:00:00+02:00,1,2,1,2,2,9625,0,0
|
||||
2023-04-26 00:00:00+02:00,2,2,1,2,2,5028,0,0
|
||||
2023-04-27 00:00:00+02:00,2,2,1,1,1,3235,0,0
|
||||
2023-04-28 00:00:00+02:00,2,2,1,2,2,10944,0,0
|
||||
2023-05-02 00:00:00+02:00,2,2,2,2,2,12220,0,0
|
||||
2023-05-03 00:00:00+02:00,2,2,2,2,2,4683,0,0
|
||||
2023-05-04 00:00:00+02:00,2,2,1,2,2,3368,0,0
|
||||
2023-05-05 00:00:00+02:00,2,2,1,2,2,26069,0,0
|
||||
2023-05-08 00:00:00+02:00,1,2,1,1,1,70540,0,0
|
||||
2023-05-09 00:00:00+02:00,1,2,1,1,1,14228,0,0
|
||||
2023-05-10 00:00:00+02:00,1.08000004291534,1.39999997615814,0.879999995231628,1,1,81012,0,0.0001
|
||||
2023-05-11 00:00:00+02:00,1.03999996185303,1.03999996185303,0.850000023841858,1,1,40254,0,0
|
||||
2023-05-12 00:00:00+02:00,0.949999988079071,1.10000002384186,0.949999988079071,1.01999998092651,1.01999998092651,35026,0,0
|
||||
2023-05-15 00:00:00+02:00,0.949999988079071,1.01999998092651,0.860000014305115,0.939999997615814,0.939999997615814,41486,0,0
|
||||
2023-05-16 00:00:00+02:00,0.899999976158142,0.944000005722046,0.800000011920929,0.800000011920929,0.800000011920929,43583,0,0
|
||||
2023-05-17 00:00:00+02:00,0.850000023841858,0.850000023841858,0.779999971389771,0.810000002384186,0.810000002384186,29984,0,0
|
||||
2023-05-18 00:00:00+02:00,0.779999971389771,0.78600001335144,0.740000009536743,0.740000009536743,0.740000009536743,24679,0,0
|
||||
2023-05-19 00:00:00+02:00,0.78600001335144,0.78600001335144,0.649999976158142,0.65200001001358,0.65200001001358,26732,0,0
|
||||
2023-05-22 00:00:00+02:00,0.8299999833107,1.05999994277954,0.709999978542328,0.709999978542328,0.709999978542328,169538,0,0
|
||||
2023-05-23 00:00:00+02:00,0.899999976158142,1.60800004005432,0.860000014305115,1.22000002861023,1.22000002861023,858471,0,0
|
||||
2023-05-24 00:00:00+02:00,1.19400000572205,1.25999999046326,0.779999971389771,0.779999971389771,0.779999971389771,627823,0,0
|
||||
2023-05-25 00:00:00+02:00,0.980000019073486,1.22000002861023,0.702000021934509,0.732999980449677,0.732999980449677,1068939,0,0
|
||||
2023-05-26 00:00:00+02:00,0.660000026226044,0.72000002861023,0.602999985218048,0.611999988555908,0.611999988555908,631580,0,0
|
||||
2023-05-29 00:00:00+02:00,0.620000004768372,0.75,0.578999996185303,0.600000023841858,0.600000023841858,586150,0,0
|
||||
2023-05-30 00:00:00+02:00,0.610000014305115,0.634999990463257,0.497000008821487,0.497000008821487,0.497000008821487,552308,0,0
|
||||
2023-05-31 00:00:00+02:00,0.458999991416931,0.469999998807907,0.374000012874603,0.379999995231628,0.379999995231628,899067,0,0
|
||||
|
30
tests/data/ALPHA-PA-1d-bad-stock-split.csv
Normal file
30
tests/data/ALPHA-PA-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-04-20 00:00:00+02:00,3.0,3.0,2.0,3.0,3.0,2076,0.0,0.0
|
||||
2023-04-21 00:00:00+02:00,3.0,3.0,2.0,3.0,3.0,2136,0.0,0.0
|
||||
2023-04-24 00:00:00+02:00,3.0,3.0,1.0,1.0,1.0,77147,0.0,0.0
|
||||
2023-04-25 00:00:00+02:00,1.0,2.0,1.0,2.0,2.0,9625,0.0,0.0
|
||||
2023-04-26 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,5028,0.0,0.0
|
||||
2023-04-27 00:00:00+02:00,2.0,2.0,1.0,1.0,1.0,3235,0.0,0.0
|
||||
2023-04-28 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,10944,0.0,0.0
|
||||
2023-05-02 00:00:00+02:00,2.0,2.0,2.0,2.0,2.0,12220,0.0,0.0
|
||||
2023-05-03 00:00:00+02:00,2.0,2.0,2.0,2.0,2.0,4683,0.0,0.0
|
||||
2023-05-04 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,3368,0.0,0.0
|
||||
2023-05-05 00:00:00+02:00,2.0,2.0,1.0,2.0,2.0,26069,0.0,0.0
|
||||
2023-05-08 00:00:00+02:00,9.999999747378752e-05,0.00019999999494757503,9.999999747378752e-05,9.999999747378752e-05,9.999999747378752e-05,705399568,0.0,0.0
|
||||
2023-05-09 00:00:00+02:00,1.0,2.0,1.0,1.0,1.0,14228,0.0,0.0
|
||||
2023-05-10 00:00:00+02:00,1.0800000429153442,1.399999976158142,0.8799999952316284,1.0,1.0,81012,0.0,0.0001
|
||||
2023-05-11 00:00:00+02:00,1.0399999618530273,1.0399999618530273,0.8500000238418579,1.0,1.0,40254,0.0,0.0
|
||||
2023-05-12 00:00:00+02:00,0.949999988079071,1.100000023841858,0.949999988079071,1.0199999809265137,1.0199999809265137,35026,0.0,0.0
|
||||
2023-05-15 00:00:00+02:00,0.949999988079071,1.0199999809265137,0.8600000143051147,0.9399999976158142,0.9399999976158142,41486,0.0,0.0
|
||||
2023-05-16 00:00:00+02:00,0.8999999761581421,0.9440000057220459,0.800000011920929,0.800000011920929,0.800000011920929,43583,0.0,0.0
|
||||
2023-05-17 00:00:00+02:00,0.8500000238418579,0.8500000238418579,0.7799999713897705,0.8100000023841858,0.8100000023841858,29984,0.0,0.0
|
||||
2023-05-18 00:00:00+02:00,0.7799999713897705,0.7860000133514404,0.7400000095367432,0.7400000095367432,0.7400000095367432,24679,0.0,0.0
|
||||
2023-05-19 00:00:00+02:00,0.7860000133514404,0.7860000133514404,0.6499999761581421,0.6520000100135803,0.6520000100135803,26732,0.0,0.0
|
||||
2023-05-22 00:00:00+02:00,0.8299999833106995,1.059999942779541,0.7099999785423279,0.7099999785423279,0.7099999785423279,169538,0.0,0.0
|
||||
2023-05-23 00:00:00+02:00,0.8999999761581421,1.6080000400543213,0.8600000143051147,1.2200000286102295,1.2200000286102295,858471,0.0,0.0
|
||||
2023-05-24 00:00:00+02:00,1.194000005722046,1.2599999904632568,0.7799999713897705,0.7799999713897705,0.7799999713897705,627823,0.0,0.0
|
||||
2023-05-25 00:00:00+02:00,0.9800000190734863,1.2200000286102295,0.7020000219345093,0.7329999804496765,0.7329999804496765,1068939,0.0,0.0
|
||||
2023-05-26 00:00:00+02:00,0.6600000262260437,0.7200000286102295,0.6029999852180481,0.6119999885559082,0.6119999885559082,631580,0.0,0.0
|
||||
2023-05-29 00:00:00+02:00,0.6200000047683716,0.75,0.5789999961853027,0.6000000238418579,0.6000000238418579,586150,0.0,0.0
|
||||
2023-05-30 00:00:00+02:00,0.6100000143051147,0.6349999904632568,0.4970000088214874,0.4970000088214874,0.4970000088214874,552308,0.0,0.0
|
||||
2023-05-31 00:00:00+02:00,0.45899999141693115,0.4699999988079071,0.37400001287460327,0.3799999952316284,0.3799999952316284,899067,0.0,0.0
|
||||
|
85
tests/data/AV-L-1wk-bad-stock-split-fixed.csv
Normal file
85
tests/data/AV-L-1wk-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,85 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2021-12-13 00:00:00+00:00,393.999975585938,406.6,391.4,402.899916992188,291.232287597656,62714764.4736842,0,0
|
||||
2021-12-20 00:00:00+00:00,393.999975585938,412.199990234375,392.502983398438,409.899997558594,296.292243652344,46596651.3157895,0,0
|
||||
2021-12-27 00:00:00+00:00,409.899997558594,416.550971679688,408.387001953125,410.4,296.653642578125,10818482.8947368,0,0
|
||||
2022-01-03 00:00:00+00:00,410.4,432.199995117188,410.4,432.099985351563,312.339265136719,44427327.6315789,0,0
|
||||
2022-01-10 00:00:00+00:00,431.3,439.199982910156,429.099970703125,436.099912109375,315.230618896484,29091400,0,0
|
||||
2022-01-17 00:00:00+00:00,437.999912109375,445.199965820313,426.999997558594,431.999975585938,312.267017822266,43787351.3157895,0,0
|
||||
2022-01-24 00:00:00+00:00,430.099975585938,440.999973144531,420.999968261719,433.499982910156,313.351237792969,58487296.0526316,0,0
|
||||
2022-01-31 00:00:00+00:00,436.199968261719,443.049987792969,432.099985351563,435.199916992188,314.580045166016,43335806.5789474,0,0
|
||||
2022-02-07 00:00:00+00:00,437.899995117188,448.799992675781,436.051994628906,444.39998046875,321.230207519531,39644061.8421053,0,0
|
||||
2022-02-14 00:00:00+00:00,437.699975585938,441.999978027344,426.699968261719,432.199995117188,312.411558837891,49972693.4210526,0,0
|
||||
2022-02-21 00:00:00+00:00,435.499992675781,438.476999511719,408.29998046875,423.399970703125,306.050571289063,65719596.0526316,0,0
|
||||
2022-02-28 00:00:00+00:00,415.099995117188,427.999909667969,386.199932861328,386.799945068359,279.594578857422,94057936.8421053,4.1875,0
|
||||
2022-03-07 00:00:00+00:00,374.999952392578,417.299978027344,361.101981201172,409.599968261719,298.389248046875,71269101.3157895,0,0
|
||||
2022-03-14 00:00:00+00:00,413.099985351563,426.699968261719,408.899992675781,422.399965820313,307.713929443359,55431927.6315789,0,0
|
||||
2022-03-21 00:00:00+00:00,422.699995117188,442.7,422.399965820313,437.799985351563,318.932696533203,39896352.6315789,0,0
|
||||
2022-03-28 00:00:00+01:00,442.49998046875,460.999978027344,440.097983398438,444.6,323.886403808594,56413515.7894737,0,0
|
||||
2022-04-04 00:00:00+01:00,439.699985351563,445.399985351563,421.999973144531,425.799973144531,310.190817871094,49415836.8421053,19.342106,0
|
||||
2022-04-11 00:00:00+01:00,425.39998046875,435.599909667969,420.799995117188,434.299968261719,327.211427001953,29875081.5789474,0,0
|
||||
2022-04-18 00:00:00+01:00,434.299968261719,447.799987792969,433.599992675781,437.799985351563,329.848419189453,49288272.3684211,0,0
|
||||
2022-04-25 00:00:00+01:00,430.699987792969,438.799990234375,423.999982910156,433.299916992188,326.457967529297,44656776.3157895,0,0
|
||||
2022-05-02 00:00:00+01:00,433.299916992188,450.999975585938,414.499982910156,414.899975585938,312.595018310547,29538167.1052632,0,0
|
||||
2022-05-09 00:00:00+01:00,413.199995117188,417.449992675781,368.282923583984,408.199970703125,307.547099609375,73989611.8421053,0,0
|
||||
2022-05-16 00:00:00+01:00,384,423.600006103516,384,412.100006103516,310.485473632813,81938261,101.69,0.76
|
||||
2022-05-23 00:00:00+01:00,416.100006103516,442.399993896484,341.915008544922,440.899993896484,409.764678955078,45432941,0,0
|
||||
2022-05-30 00:00:00+01:00,442.700012207031,444.200012207031,426.600006103516,428.700012207031,398.426239013672,37906659,0,0
|
||||
2022-06-06 00:00:00+01:00,425.299987792969,434.010009765625,405.200012207031,405.399993896484,376.771606445313,40648810,0,0
|
||||
2022-06-13 00:00:00+01:00,402.5,420,399.799987792969,411.200012207031,382.162048339844,74196958,0,0
|
||||
2022-06-20 00:00:00+01:00,412.5,421.899993896484,398.399993896484,411.5,382.440826416016,28679717,0,0
|
||||
2022-06-27 00:00:00+01:00,413.100006103516,422.399993896484,397.399993896484,401.600006103516,373.239959716797,35468994,0,0
|
||||
2022-07-04 00:00:00+01:00,405.399993896484,406.600006103516,382.299987792969,401.299987792969,372.961120605469,35304748,0,0
|
||||
2022-07-11 00:00:00+01:00,394.799987792969,405.850006103516,383.399993896484,396.600006103516,368.593048095703,42308459,0,0
|
||||
2022-07-18 00:00:00+01:00,392.5,399.700012207031,384.799987792969,391.700012207031,364.039093017578,36656839,0,0
|
||||
2022-07-25 00:00:00+01:00,392.200012207031,400.799987792969,388.700012207031,396,368.035430908203,33124660,0,0
|
||||
2022-08-01 00:00:00+01:00,396.399993896484,405.5,390.415008544922,402,373.611724853516,21753121,0,0
|
||||
2022-08-08 00:00:00+01:00,406.600006103516,473.700012207031,403.299987792969,467.899993896484,434.858032226563,59155709,0,0
|
||||
2022-08-15 00:00:00+01:00,468.100006103516,470.5,434,437,406.140106201172,36989620,10.3,0
|
||||
2022-08-22 00:00:00+01:00,436.100006103516,436.869995117188,419.299987792969,420.5,399.780303955078,36492572,0,0
|
||||
2022-08-29 00:00:00+01:00,420.5,426.600006103516,408.600006103516,426.600006103516,405.579742431641,29573657,0,0
|
||||
2022-09-05 00:00:00+01:00,418.5,444.4169921875,416.100006103516,443.100006103516,421.266723632813,34375126,0,0
|
||||
2022-09-12 00:00:00+01:00,444.649993896484,448.899993896484,435.200012207031,440.100006103516,418.414520263672,39085960,0,0
|
||||
2022-09-19 00:00:00+01:00,440.100006103516,447.200012207031,419.299987792969,422.899993896484,402.062042236328,27982081,0,0
|
||||
2022-09-26 00:00:00+01:00,421.200012207031,421.200012207031,373.31201171875,388.200012207031,369.071868896484,70408935,0,0
|
||||
2022-10-03 00:00:00+01:00,382.899993896484,409.875,380.555999755859,400.700012207031,380.955932617188,37581751,0,0
|
||||
2022-10-10 00:00:00+01:00,395.799987792969,404.470001220703,366.700012207031,394.299987792969,374.871276855469,52952323,0,0
|
||||
2022-10-17 00:00:00+01:00,394.299987792969,414.799987792969,393,406.5,386.470123291016,26441475,0,0
|
||||
2022-10-24 00:00:00+01:00,407.100006103516,418.227996826172,407.100006103516,413.299987792969,392.93505859375,26239756,0,0
|
||||
2022-10-31 00:00:00+00:00,413.899993896484,430.200012207031,412,429.299987792969,408.146667480469,23168047,0,0
|
||||
2022-11-07 00:00:00+00:00,427.299987792969,445.899993896484,420.652008056641,438.399993896484,416.798278808594,36709117,0,0
|
||||
2022-11-14 00:00:00+00:00,438.299987792969,458.489990234375,435,455.100006103516,432.675415039063,29106506,0,0
|
||||
2022-11-21 00:00:00+00:00,454.399993896484,461,450,456.600006103516,434.101501464844,21667730,0,0
|
||||
2022-11-28 00:00:00+00:00,453.799987792969,456.899993896484,435.100006103516,444.799987792969,422.882934570313,33326204,0,0
|
||||
2022-12-05 00:00:00+00:00,442.899993896484,450.25,441.299987792969,448,425.925262451172,29147089,0,0
|
||||
2022-12-12 00:00:00+00:00,445.100006103516,451.299987792969,431.200012207031,436.100006103516,414.611633300781,46593233,0,0
|
||||
2022-12-19 00:00:00+00:00,436,452.600006103516,433.600006103516,444,422.122344970703,20982140,0,0
|
||||
2022-12-26 00:00:00+00:00,444,452.058013916016,442.399993896484,442.799987792969,420.981475830078,8249664,0,0
|
||||
2023-01-02 00:00:00+00:00,445.899993896484,458.149993896484,443.299987792969,456,433.531066894531,28687622,0,0
|
||||
2023-01-09 00:00:00+00:00,456,461.066009521484,435.799987792969,444.200012207031,422.3125,39237336,0,0
|
||||
2023-01-16 00:00:00+00:00,444.299987792969,447.200012207031,434.399993896484,439,417.368713378906,35267336,0,0
|
||||
2023-01-23 00:00:00+00:00,440,459.299987792969,439.5,457.399993896484,434.862091064453,37495012,0,0
|
||||
2023-01-30 00:00:00+00:00,454.399993896484,459.399993896484,447.799987792969,450.299987792969,428.111907958984,48879358,0,0
|
||||
2023-02-06 00:00:00+00:00,448,449.200012207031,436.299987792969,440,418.319458007813,38799772,0,0
|
||||
2023-02-13 00:00:00+00:00,441.200012207031,450.299987792969,440,447.600006103516,425.544982910156,30251441,0,0
|
||||
2023-02-20 00:00:00+00:00,448.5,450.799987792969,434.299987792969,440,418.319458007813,26764528,0,0
|
||||
2023-02-27 00:00:00+00:00,442.899993896484,450.5,441.608001708984,447.200012207031,425.164703369141,29895454,0,0
|
||||
2023-03-06 00:00:00+00:00,447.399993896484,467.299987792969,443.100006103516,449.700012207031,427.54150390625,82322819,0,0
|
||||
2023-03-13 00:00:00+00:00,450,451.417999267578,400.68701171875,402.200012207031,382.382019042969,85158023,0,0
|
||||
2023-03-20 00:00:00+00:00,396.200012207031,425.399993896484,383.496002197266,408.299987792969,388.181427001953,60152666,0,0
|
||||
2023-03-27 00:00:00+01:00,416,422.049987792969,399.549987792969,404.200012207031,384.283477783203,81534829,20.7,0
|
||||
2023-04-03 00:00:00+01:00,405,434.100006103516,404.399993896484,417.100006103516,417.100006103516,43217151,0,0
|
||||
2023-04-10 00:00:00+01:00,419.100006103516,426.700012207031,419.100006103516,421.700012207031,421.700012207031,32435695,0,0
|
||||
2023-04-17 00:00:00+01:00,423.700012207031,427.635009765625,415.399993896484,420.299987792969,420.299987792969,37715986,0,0
|
||||
2023-04-24 00:00:00+01:00,418.100006103516,423,415.299987792969,423,423,34331974,0,0
|
||||
2023-05-01 00:00:00+01:00,423.399993896484,426.100006103516,406.399993896484,414.600006103516,414.600006103516,40446519,0,0
|
||||
2023-05-08 00:00:00+01:00,414.600006103516,419.100006103516,408,412.700012207031,412.700012207031,36950836,0,0
|
||||
2023-05-15 00:00:00+01:00,414,418.399993896484,407.399993896484,413.5,413.5,53109487,0,0
|
||||
2023-05-22 00:00:00+01:00,413.600006103516,424,394.700012207031,401.299987792969,401.299987792969,64363368,0,0
|
||||
2023-05-29 00:00:00+01:00,401.299987792969,409.477996826172,392.700012207031,409.100006103516,409.100006103516,47587959,0,0
|
||||
2023-06-05 00:00:00+01:00,406.299987792969,410.700012207031,400.100006103516,400.899993896484,400.899993896484,22494985,0,0
|
||||
2023-06-12 00:00:00+01:00,404.100006103516,406,394.5,396,396,41531163,0,0
|
||||
2023-06-19 00:00:00+01:00,394,399.899993896484,380.720001220703,386.200012207031,386.200012207031,40439880,0,0
|
||||
2023-06-26 00:00:00+01:00,387.200012207031,397,382.899993896484,395.200012207031,395.200012207031,27701915,0,0
|
||||
2023-07-03 00:00:00+01:00,396.5,399.799987792969,380.100006103516,381.799987792969,381.799987792969,26005305,0,0
|
||||
2023-07-10 00:00:00+01:00,380,392.299987792969,379.403991699219,386,386,29789300,0,0
|
||||
2023-07-17 00:00:00+01:00,385,389.5,384.251007080078,387.100006103516,387.100006103516,0,0,0
|
||||
|
85
tests/data/AV-L-1wk-bad-stock-split.csv
Normal file
85
tests/data/AV-L-1wk-bad-stock-split.csv
Normal file
@@ -0,0 +1,85 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2021-12-13 00:00:00+00:00,518.4210205078125,535.0,515.0,530.1314697265625,383.20037841796875,47663221,0.0,0.0
|
||||
2021-12-20 00:00:00+00:00,518.4210205078125,542.368408203125,516.4512939453125,539.3421020507812,389.85821533203125,35413455,0.0,0.0
|
||||
2021-12-27 00:00:00+00:00,539.3421020507812,548.0933837890625,537.351318359375,540.0,390.333740234375,8222047,0.0,0.0
|
||||
2022-01-03 00:00:00+00:00,540.0,568.6842041015625,540.0,568.5526123046875,410.97271728515625,33764769,0.0,0.0
|
||||
2022-01-10 00:00:00+00:00,567.5,577.8947143554688,564.605224609375,573.815673828125,414.7771301269531,22109464,0.0,0.0
|
||||
2022-01-17 00:00:00+00:00,576.315673828125,585.7894287109375,561.8421020507812,568.4210205078125,410.8776550292969,33278387,0.0,0.0
|
||||
2022-01-24 00:00:00+00:00,565.9210205078125,580.2631225585938,553.9473266601562,570.3947143554688,412.30426025390625,44450345,0.0,0.0
|
||||
2022-01-31 00:00:00+00:00,573.9473266601562,582.9605102539062,568.5526123046875,572.6314697265625,413.9211120605469,32935213,0.0,0.0
|
||||
2022-02-07 00:00:00+00:00,576.1842041015625,590.5263061523438,573.7526245117188,584.73681640625,422.67132568359375,30129487,0.0,0.0
|
||||
2022-02-14 00:00:00+00:00,575.9210205078125,581.5789184570312,561.4473266601562,568.6842041015625,411.0678405761719,37979247,0.0,0.0
|
||||
2022-02-21 00:00:00+00:00,573.0263061523438,576.9434204101562,537.23681640625,557.105224609375,402.6981201171875,49946893,0.0,0.0
|
||||
2022-02-28 00:00:00+00:00,546.1842041015625,563.1577758789062,508.1578063964844,508.9472961425781,367.8876037597656,71484032,4.1875,0.0
|
||||
2022-03-07 00:00:00+00:00,493.4209899902344,549.0789184570312,475.1341857910156,538.9473266601562,392.617431640625,54164517,0.0,0.0
|
||||
2022-03-14 00:00:00+00:00,543.5526123046875,561.4473266601562,538.0263061523438,555.7894287109375,404.8867492675781,42128265,0.0,0.0
|
||||
2022-03-21 00:00:00+00:00,556.1842041015625,582.5,555.7894287109375,576.0526123046875,419.6482849121094,30321228,0.0,0.0
|
||||
2022-03-28 00:00:00+01:00,582.23681640625,606.5789184570312,579.0762939453125,585.0,426.16632080078125,42874272,0.0,0.0
|
||||
2022-04-04 00:00:00+01:00,578.5526123046875,586.0526123046875,555.2631225585938,560.2631225585938,408.14581298828125,37556036,19.342106,0.0
|
||||
2022-04-11 00:00:00+01:00,559.73681640625,573.1577758789062,553.6842041015625,571.4473266601562,430.5413513183594,22705062,0.0,0.0
|
||||
2022-04-18 00:00:00+01:00,571.4473266601562,589.2105102539062,570.5263061523438,576.0526123046875,434.0110778808594,37459087,0.0,0.0
|
||||
2022-04-25 00:00:00+01:00,566.7105102539062,577.368408203125,557.8947143554688,570.1314697265625,429.5499572753906,33939150,0.0,0.0
|
||||
2022-05-02 00:00:00+01:00,570.1314697265625,593.4210205078125,545.3947143554688,545.9210205078125,411.3092346191406,22449007,0.0,0.0
|
||||
2022-05-09 00:00:00+01:00,543.6842041015625,549.2763061523438,484.5827941894531,537.105224609375,404.667236328125,56232105,0.0,0.0
|
||||
2022-05-16 00:00:00+01:00,384.0,423.6000061035156,384.0,412.1000061035156,310.4854736328125,81938261,101.69,0.76
|
||||
2022-05-23 00:00:00+01:00,416.1000061035156,442.3999938964844,341.9150085449219,440.8999938964844,409.7646789550781,45432941,0.0,0.0
|
||||
2022-05-30 00:00:00+01:00,442.70001220703125,444.20001220703125,426.6000061035156,428.70001220703125,398.4262390136719,37906659,0.0,0.0
|
||||
2022-06-06 00:00:00+01:00,425.29998779296875,434.010009765625,405.20001220703125,405.3999938964844,376.7716064453125,40648810,0.0,0.0
|
||||
2022-06-13 00:00:00+01:00,402.5,420.0,399.79998779296875,411.20001220703125,382.16204833984375,74196958,0.0,0.0
|
||||
2022-06-20 00:00:00+01:00,412.5,421.8999938964844,398.3999938964844,411.5,382.4408264160156,28679717,0.0,0.0
|
||||
2022-06-27 00:00:00+01:00,413.1000061035156,422.3999938964844,397.3999938964844,401.6000061035156,373.2399597167969,35468994,0.0,0.0
|
||||
2022-07-04 00:00:00+01:00,405.3999938964844,406.6000061035156,382.29998779296875,401.29998779296875,372.96112060546875,35304748,0.0,0.0
|
||||
2022-07-11 00:00:00+01:00,394.79998779296875,405.8500061035156,383.3999938964844,396.6000061035156,368.5930480957031,42308459,0.0,0.0
|
||||
2022-07-18 00:00:00+01:00,392.5,399.70001220703125,384.79998779296875,391.70001220703125,364.0390930175781,36656839,0.0,0.0
|
||||
2022-07-25 00:00:00+01:00,392.20001220703125,400.79998779296875,388.70001220703125,396.0,368.0354309082031,33124660,0.0,0.0
|
||||
2022-08-01 00:00:00+01:00,396.3999938964844,405.5,390.4150085449219,402.0,373.6117248535156,21753121,0.0,0.0
|
||||
2022-08-08 00:00:00+01:00,406.6000061035156,473.70001220703125,403.29998779296875,467.8999938964844,434.8580322265625,59155709,0.0,0.0
|
||||
2022-08-15 00:00:00+01:00,468.1000061035156,470.5,434.0,437.0,406.1401062011719,36989620,10.3,0.0
|
||||
2022-08-22 00:00:00+01:00,436.1000061035156,436.8699951171875,419.29998779296875,420.5,399.7803039550781,36492572,0.0,0.0
|
||||
2022-08-29 00:00:00+01:00,420.5,426.6000061035156,408.6000061035156,426.6000061035156,405.5797424316406,29573657,0.0,0.0
|
||||
2022-09-05 00:00:00+01:00,418.5,444.4169921875,416.1000061035156,443.1000061035156,421.2667236328125,34375126,0.0,0.0
|
||||
2022-09-12 00:00:00+01:00,444.6499938964844,448.8999938964844,435.20001220703125,440.1000061035156,418.4145202636719,39085960,0.0,0.0
|
||||
2022-09-19 00:00:00+01:00,440.1000061035156,447.20001220703125,419.29998779296875,422.8999938964844,402.0620422363281,27982081,0.0,0.0
|
||||
2022-09-26 00:00:00+01:00,421.20001220703125,421.20001220703125,373.31201171875,388.20001220703125,369.0718688964844,70408935,0.0,0.0
|
||||
2022-10-03 00:00:00+01:00,382.8999938964844,409.875,380.5559997558594,400.70001220703125,380.9559326171875,37581751,0.0,0.0
|
||||
2022-10-10 00:00:00+01:00,395.79998779296875,404.4700012207031,366.70001220703125,394.29998779296875,374.87127685546875,52952323,0.0,0.0
|
||||
2022-10-17 00:00:00+01:00,394.29998779296875,414.79998779296875,393.0,406.5,386.4701232910156,26441475,0.0,0.0
|
||||
2022-10-24 00:00:00+01:00,407.1000061035156,418.2279968261719,407.1000061035156,413.29998779296875,392.93505859375,26239756,0.0,0.0
|
||||
2022-10-31 00:00:00+00:00,413.8999938964844,430.20001220703125,412.0,429.29998779296875,408.14666748046875,23168047,0.0,0.0
|
||||
2022-11-07 00:00:00+00:00,427.29998779296875,445.8999938964844,420.6520080566406,438.3999938964844,416.79827880859375,36709117,0.0,0.0
|
||||
2022-11-14 00:00:00+00:00,438.29998779296875,458.489990234375,435.0,455.1000061035156,432.6754150390625,29106506,0.0,0.0
|
||||
2022-11-21 00:00:00+00:00,454.3999938964844,461.0,450.0,456.6000061035156,434.10150146484375,21667730,0.0,0.0
|
||||
2022-11-28 00:00:00+00:00,453.79998779296875,456.8999938964844,435.1000061035156,444.79998779296875,422.8829345703125,33326204,0.0,0.0
|
||||
2022-12-05 00:00:00+00:00,442.8999938964844,450.25,441.29998779296875,448.0,425.9252624511719,29147089,0.0,0.0
|
||||
2022-12-12 00:00:00+00:00,445.1000061035156,451.29998779296875,431.20001220703125,436.1000061035156,414.61163330078125,46593233,0.0,0.0
|
||||
2022-12-19 00:00:00+00:00,436.0,452.6000061035156,433.6000061035156,444.0,422.1223449707031,20982140,0.0,0.0
|
||||
2022-12-26 00:00:00+00:00,444.0,452.0580139160156,442.3999938964844,442.79998779296875,420.9814758300781,8249664,0.0,0.0
|
||||
2023-01-02 00:00:00+00:00,445.8999938964844,458.1499938964844,443.29998779296875,456.0,433.53106689453125,28687622,0.0,0.0
|
||||
2023-01-09 00:00:00+00:00,456.0,461.0660095214844,435.79998779296875,444.20001220703125,422.3125,39237336,0.0,0.0
|
||||
2023-01-16 00:00:00+00:00,444.29998779296875,447.20001220703125,434.3999938964844,439.0,417.36871337890625,35267336,0.0,0.0
|
||||
2023-01-23 00:00:00+00:00,440.0,459.29998779296875,439.5,457.3999938964844,434.8620910644531,37495012,0.0,0.0
|
||||
2023-01-30 00:00:00+00:00,454.3999938964844,459.3999938964844,447.79998779296875,450.29998779296875,428.1119079589844,48879358,0.0,0.0
|
||||
2023-02-06 00:00:00+00:00,448.0,449.20001220703125,436.29998779296875,440.0,418.3194580078125,38799772,0.0,0.0
|
||||
2023-02-13 00:00:00+00:00,441.20001220703125,450.29998779296875,440.0,447.6000061035156,425.54498291015625,30251441,0.0,0.0
|
||||
2023-02-20 00:00:00+00:00,448.5,450.79998779296875,434.29998779296875,440.0,418.3194580078125,26764528,0.0,0.0
|
||||
2023-02-27 00:00:00+00:00,442.8999938964844,450.5,441.6080017089844,447.20001220703125,425.1647033691406,29895454,0.0,0.0
|
||||
2023-03-06 00:00:00+00:00,447.3999938964844,467.29998779296875,443.1000061035156,449.70001220703125,427.54150390625,82322819,0.0,0.0
|
||||
2023-03-13 00:00:00+00:00,450.0,451.4179992675781,400.68701171875,402.20001220703125,382.38201904296875,85158023,0.0,0.0
|
||||
2023-03-20 00:00:00+00:00,396.20001220703125,425.3999938964844,383.4960021972656,408.29998779296875,388.1814270019531,60152666,0.0,0.0
|
||||
2023-03-27 00:00:00+01:00,416.0,422.04998779296875,399.54998779296875,404.20001220703125,384.2834777832031,81534829,20.7,0.0
|
||||
2023-04-03 00:00:00+01:00,405.0,434.1000061035156,404.3999938964844,417.1000061035156,417.1000061035156,43217151,0.0,0.0
|
||||
2023-04-10 00:00:00+01:00,419.1000061035156,426.70001220703125,419.1000061035156,421.70001220703125,421.70001220703125,32435695,0.0,0.0
|
||||
2023-04-17 00:00:00+01:00,423.70001220703125,427.635009765625,415.3999938964844,420.29998779296875,420.29998779296875,37715986,0.0,0.0
|
||||
2023-04-24 00:00:00+01:00,418.1000061035156,423.0,415.29998779296875,423.0,423.0,34331974,0.0,0.0
|
||||
2023-05-01 00:00:00+01:00,423.3999938964844,426.1000061035156,406.3999938964844,414.6000061035156,414.6000061035156,40446519,0.0,0.0
|
||||
2023-05-08 00:00:00+01:00,414.6000061035156,419.1000061035156,408.0,412.70001220703125,412.70001220703125,36950836,0.0,0.0
|
||||
2023-05-15 00:00:00+01:00,414.0,418.3999938964844,407.3999938964844,413.5,413.5,53109487,0.0,0.0
|
||||
2023-05-22 00:00:00+01:00,413.6000061035156,424.0,394.70001220703125,401.29998779296875,401.29998779296875,64363368,0.0,0.0
|
||||
2023-05-29 00:00:00+01:00,401.29998779296875,409.4779968261719,392.70001220703125,409.1000061035156,409.1000061035156,47587959,0.0,0.0
|
||||
2023-06-05 00:00:00+01:00,406.29998779296875,410.70001220703125,400.1000061035156,400.8999938964844,400.8999938964844,22494985,0.0,0.0
|
||||
2023-06-12 00:00:00+01:00,404.1000061035156,406.0,394.5,396.0,396.0,41531163,0.0,0.0
|
||||
2023-06-19 00:00:00+01:00,394.0,399.8999938964844,380.7200012207031,386.20001220703125,386.20001220703125,40439880,0.0,0.0
|
||||
2023-06-26 00:00:00+01:00,387.20001220703125,397.0,382.8999938964844,395.20001220703125,395.20001220703125,27701915,0.0,0.0
|
||||
2023-07-03 00:00:00+01:00,396.5,399.79998779296875,380.1000061035156,381.79998779296875,381.79998779296875,26005305,0.0,0.0
|
||||
2023-07-10 00:00:00+01:00,380.0,392.29998779296875,379.40399169921875,386.0,386.0,29789300,0.0,0.0
|
||||
2023-07-17 00:00:00+01:00,385.0,389.5,384.2510070800781,387.1000061035156,387.1000061035156,0,0.0,0.0
|
||||
|
11
tests/data/CNE-L-1d-bad-stock-split-fixed.csv
Normal file
11
tests/data/CNE-L-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,11 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-18 00:00:00+01:00,193.220001220703,200.839996337891,193.220001220703,196.839996337891,196.839996337891,653125,0,0
|
||||
2023-05-17 00:00:00+01:00,199.740005493164,207.738006591797,190.121994018555,197.860000610352,197.860000610352,822268,0,0
|
||||
2023-05-16 00:00:00+01:00,215.600006103516,215.600006103516,201.149993896484,205.100006103516,205.100006103516,451009,243.93939,0.471428571428571
|
||||
2023-05-15 00:00:00+01:00,215.399955531529,219.19995640346,210.599967302595,217.399987792969,102.39998147147,1761679.3939394,0,0
|
||||
2023-05-12 00:00:00+01:00,214.599988664899,216.199965558733,209.599965558733,211.399977329799,99.573855808803,1522298.48484849,0,0
|
||||
2023-05-11 00:00:00+01:00,219.999966430664,219.999966430664,212.199987357003,215.000000871931,101.269541277204,3568042.12121213,0,0
|
||||
2023-05-10 00:00:00+01:00,218.199954659598,223.000000435965,212.59995640346,215.399955531529,101.457929992676,5599908.78787879,0,0
|
||||
2023-05-09 00:00:00+01:00,224,227.688003540039,218.199996948242,218.399993896484,102.87100982666,1906090,0,0
|
||||
2023-05-05 00:00:00+01:00,220.999968174526,225.19996686663,220.799976457868,224.4,105.697140066964,964523.636363637,0,0
|
||||
2023-05-04 00:00:00+01:00,216.999989972796,222.799965558733,216.881988961356,221.399965994698,104.284055655343,880983.93939394,0,0
|
||||
|
11
tests/data/CNE-L-1d-bad-stock-split.csv
Normal file
11
tests/data/CNE-L-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,11 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-18 00:00:00+01:00,193.220001220703,200.839996337891,193.220001220703,196.839996337891,196.839996337891,653125,0,0
|
||||
2023-05-17 00:00:00+01:00,199.740005493164,207.738006591797,190.121994018555,197.860000610352,197.860000610352,822268,0,0
|
||||
2023-05-16 00:00:00+01:00,215.600006103516,215.600006103516,201.149993896484,205.100006103516,205.100006103516,451009,243.93939,0.471428571428571
|
||||
2023-05-15 00:00:00+01:00,456.908996582031,464.969604492188,446.727203369141,461.151489257813,217.21208190918,830506,0,0
|
||||
2023-05-12 00:00:00+01:00,455.212097167969,458.605987548828,444.605987548828,448.424194335938,211.217269897461,717655,0,0
|
||||
2023-05-11 00:00:00+01:00,466.666595458984,466.666595458984,450.121185302734,456.060607910156,214.814178466797,1682077,0,0
|
||||
2023-05-10 00:00:00+01:00,462.848388671875,473.030303955078,450.969604492188,456.908996582031,215.213790893555,2639957,0,0
|
||||
2023-05-09 00:00:00+01:00,224,227.688003540039,218.199996948242,218.399993896484,102.87100982666,1906090,0,0
|
||||
2023-05-05 00:00:00+01:00,468.787811279297,477.696899414063,468.363586425781,476,224.2060546875,454704,0,0
|
||||
2023-05-04 00:00:00+01:00,460.303009033203,472.605987548828,460.052703857422,469.636291503906,221.208602905273,415321,0,0
|
||||
|
24
tests/data/DEX-AX-1d-bad-stock-split-fixed.csv
Normal file
24
tests/data/DEX-AX-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-31 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-30 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0.4406
|
||||
2023-05-29 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-26 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-25 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-24 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-23 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-22 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-19 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-18 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-17 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-16 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-15 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-12 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-11 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-10 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-09 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-08 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-05 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-04 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-03 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-02 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-01 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
|
24
tests/data/DEX-AX-1d-bad-stock-split.csv
Normal file
24
tests/data/DEX-AX-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,24 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-31 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-30 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0.4406
|
||||
2023-05-29 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-26 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-25 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-24 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-23 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-22 00:00:00+10:00,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0.120290003716946,0,0,0
|
||||
2023-05-19 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-18 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-17 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-16 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-15 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-12 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-11 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-10 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-09 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-08 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-05 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-04 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-03 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-02 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
2023-05-01 00:00:00+10:00,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0.0529999993741512,0,0,0
|
||||
|
42
tests/data/LA-V-1d-bad-stock-split-fixed.csv
Normal file
42
tests/data/LA-V-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,42 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2020-09-30 00:00:00-04:00,4.40000009536743,4.44999980926514,4.01999998092651,4.44999980926514,4.44999980926514,22600,0,0
|
||||
2020-09-29 00:00:00-04:00,4.3899998664856,4.40000009536743,4.13000011444092,4.30000019073486,4.30000019073486,10800,0,0
|
||||
2020-09-28 00:00:00-04:00,4.09000015258789,4.25,4.09000015258789,4.25,4.25,8000,0,0
|
||||
2020-09-25 00:00:00-04:00,3.95000004768372,4.09999990463257,3.95000004768372,4.05000019073486,4.05000019073486,13500,0,0
|
||||
2020-09-24 00:00:00-04:00,3.84999990463257,4,3.84999990463257,4,4,8800,0,0
|
||||
2020-09-23 00:00:00-04:00,3.99000000953674,4,3.99000000953674,4,4,5900,0,0
|
||||
2020-09-22 00:00:00-04:00,3.90000009536743,4.09999990463257,3.84999990463257,4.09999990463257,4.09999990463257,3100,0,0
|
||||
2020-09-21 00:00:00-04:00,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,1200,0,0
|
||||
2020-09-18 00:00:00-04:00,3.92000007629395,4.09999990463257,3.92000007629395,4.09999990463257,4.09999990463257,27200,0,0
|
||||
2020-09-17 00:00:00-04:00,3.90000009536743,3.99000000953674,3.8199999332428,3.99000000953674,3.99000000953674,3300,0,0
|
||||
2020-09-16 00:00:00-04:00,3.79999995231628,4,3.79999995231628,4,4,3300,0,0
|
||||
2020-09-15 00:00:00-04:00,3.95000004768372,4,3.95000004768372,4,4,2400,0,0
|
||||
2020-09-14 00:00:00-04:00,3.96000003814697,4,3.96000003814697,4,4,800,0,0
|
||||
2020-09-11 00:00:00-04:00,3.95000004768372,3.97000002861023,3.72000002861023,3.97000002861023,3.97000002861023,5700,0,0
|
||||
2020-09-10 00:00:00-04:00,4,4.09999990463257,4,4.09999990463257,4.09999990463257,7100,0,0
|
||||
2020-09-09 00:00:00-04:00,3.5699999332428,4,3.5699999332428,4,4,18100,0,0
|
||||
2020-09-08 00:00:00-04:00,3.40000009536743,3.59999990463257,3.40000009536743,3.59999990463257,3.59999990463257,19500,0,0
|
||||
2020-09-04 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,400,0,0
|
||||
2020-09-03 00:00:00-04:00,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,0,0,0
|
||||
2020-09-02 00:00:00-04:00,3.5,3.58999991416931,3.5,3.58999991416931,3.58999991416931,2000,0,0
|
||||
2020-09-01 00:00:00-04:00,3.5,3.59999990463257,3.5,3.59999990463257,3.59999990463257,1200,0,0
|
||||
2020-08-31 00:00:00-04:00,3.15000009536743,3.70000004768372,3.15000009536743,3.70000004768372,3.70000004768372,26500,0,0
|
||||
2020-08-28 00:00:00-04:00,3.76999998092651,3.76999998092651,3.70000004768372,3.70000004768372,3.70000004768372,1600,0,0
|
||||
2020-08-27 00:00:00-04:00,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,0,0,0
|
||||
2020-08-26 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0.1
|
||||
2020-08-25 00:00:00-04:00,3.40000009536743,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,2900,0,0
|
||||
2020-08-24 00:00:00-04:00,3.29999995231628,3.5,3.29999995231628,3.5,3.5,10000,0,0
|
||||
2020-08-21 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,150,0,0
|
||||
2020-08-20 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-19 00:00:00-04:00,3.40000009536743,3.5,3.40000009536743,3.5,3.5,9050,0,0
|
||||
2020-08-18 00:00:00-04:00,3.5,3.79999995231628,3.5,3.5,3.5,2250,0,0
|
||||
2020-08-17 00:00:00-04:00,2.79999995231628,3.70000004768372,2.79999995231628,3.70000004768372,3.70000004768372,5050,0,0
|
||||
2020-08-14 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-13 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-12 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-11 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-10 00:00:00-04:00,3.5,3.70000004768372,3.5,3.5,3.5,3300,0,0
|
||||
2020-08-07 00:00:00-04:00,3.5,3.79999995231628,3.5,3.79999995231628,3.79999995231628,2500,0,0
|
||||
2020-08-06 00:00:00-04:00,3.5,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,3000,0,0
|
||||
2020-08-05 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
2020-08-04 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
|
42
tests/data/LA-V-1d-bad-stock-split.csv
Normal file
42
tests/data/LA-V-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,42 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2020-09-30 00:00:00-04:00,4.40000009536743,4.44999980926514,4.01999998092651,4.44999980926514,4.44999980926514,22600,0,0
|
||||
2020-09-29 00:00:00-04:00,4.3899998664856,4.40000009536743,4.13000011444092,4.30000019073486,4.30000019073486,10800,0,0
|
||||
2020-09-28 00:00:00-04:00,4.09000015258789,4.25,4.09000015258789,4.25,4.25,8000,0,0
|
||||
2020-09-25 00:00:00-04:00,3.95000004768372,4.09999990463257,3.95000004768372,4.05000019073486,4.05000019073486,13500,0,0
|
||||
2020-09-24 00:00:00-04:00,3.84999990463257,4,3.84999990463257,4,4,8800,0,0
|
||||
2020-09-23 00:00:00-04:00,3.99000000953674,4,3.99000000953674,4,4,5900,0,0
|
||||
2020-09-22 00:00:00-04:00,3.90000009536743,4.09999990463257,3.84999990463257,4.09999990463257,4.09999990463257,3100,0,0
|
||||
2020-09-21 00:00:00-04:00,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,4.09999990463257,1200,0,0
|
||||
2020-09-18 00:00:00-04:00,3.92000007629395,4.09999990463257,3.92000007629395,4.09999990463257,4.09999990463257,27200,0,0
|
||||
2020-09-17 00:00:00-04:00,3.90000009536743,3.99000000953674,3.8199999332428,3.99000000953674,3.99000000953674,3300,0,0
|
||||
2020-09-16 00:00:00-04:00,3.79999995231628,4,3.79999995231628,4,4,3300,0,0
|
||||
2020-09-15 00:00:00-04:00,3.95000004768372,4,3.95000004768372,4,4,2400,0,0
|
||||
2020-09-14 00:00:00-04:00,3.96000003814697,4,3.96000003814697,4,4,800,0,0
|
||||
2020-09-11 00:00:00-04:00,3.95000004768372,3.97000002861023,3.72000002861023,3.97000002861023,3.97000002861023,5700,0,0
|
||||
2020-09-10 00:00:00-04:00,4,4.09999990463257,4,4.09999990463257,4.09999990463257,7100,0,0
|
||||
2020-09-09 00:00:00-04:00,3.5699999332428,4,3.5699999332428,4,4,18100,0,0
|
||||
2020-09-08 00:00:00-04:00,3.40000009536743,3.59999990463257,3.40000009536743,3.59999990463257,3.59999990463257,19500,0,0
|
||||
2020-09-04 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,400,0,0
|
||||
2020-09-03 00:00:00-04:00,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,3.58999991416931,0,0,0
|
||||
2020-09-02 00:00:00-04:00,3.5,3.58999991416931,3.5,3.58999991416931,3.58999991416931,2000,0,0
|
||||
2020-09-01 00:00:00-04:00,3.5,3.59999990463257,3.5,3.59999990463257,3.59999990463257,1200,0,0
|
||||
2020-08-31 00:00:00-04:00,3.15000009536743,3.70000004768372,3.15000009536743,3.70000004768372,3.70000004768372,26500,0,0
|
||||
2020-08-28 00:00:00-04:00,3.76999998092651,3.76999998092651,3.70000004768372,3.70000004768372,3.70000004768372,1600,0,0
|
||||
2020-08-27 00:00:00-04:00,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,3.65000009536743,0,0,0
|
||||
2020-08-26 00:00:00-04:00,0.370000004768372,0.370000004768372,0.370000004768372,0.370000004768372,0.370000004768372,0,0,0.1
|
||||
2020-08-25 00:00:00-04:00,3.40000009536743,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,2900,0,0
|
||||
2020-08-24 00:00:00-04:00,3.29999995231628,3.5,3.29999995231628,3.5,3.5,10000,0,0
|
||||
2020-08-21 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,150,0,0
|
||||
2020-08-20 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-19 00:00:00-04:00,3.40000009536743,3.5,3.40000009536743,3.5,3.5,9050,0,0
|
||||
2020-08-18 00:00:00-04:00,3.5,3.79999995231628,3.5,3.5,3.5,2250,0,0
|
||||
2020-08-17 00:00:00-04:00,2.79999995231628,3.70000004768372,2.79999995231628,3.70000004768372,3.70000004768372,5050,0,0
|
||||
2020-08-14 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-13 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-12 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-11 00:00:00-04:00,3.5,3.5,3.5,3.5,3.5,0,0,0
|
||||
2020-08-10 00:00:00-04:00,3.5,3.70000004768372,3.5,3.5,3.5,3300,0,0
|
||||
2020-08-07 00:00:00-04:00,3.5,3.79999995231628,3.5,3.79999995231628,3.79999995231628,2500,0,0
|
||||
2020-08-06 00:00:00-04:00,3.5,3.70000004768372,3.40000009536743,3.70000004768372,3.70000004768372,3000,0,0
|
||||
2020-08-05 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
2020-08-04 00:00:00-04:00,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,3.70000004768372,0,0,0
|
||||
|
17
tests/data/MOB-ST-1d-bad-stock-split-fixed.csv
Normal file
17
tests/data/MOB-ST-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-08 00:00:00+02:00,24.8999996185303,24.9500007629395,24.1000003814697,24.75,24.75,7187,0,0
|
||||
2023-05-09 00:00:00+02:00,25,25.5,23.1499996185303,24.1499996185303,24.1499996185303,22753,0,0
|
||||
2023-05-10 00:00:00+02:00,24.1499996185303,24.1499996185303,22,22.9500007629395,22.9500007629395,62727,0,0
|
||||
2023-05-11 00:00:00+02:00,22.9500007629395,25,22.9500007629395,23.3500003814697,23.3500003814697,19550,0,0
|
||||
2023-05-12 00:00:00+02:00,23.3500003814697,24,22.1000003814697,23.8500003814697,23.8500003814697,17143,0,0
|
||||
2023-05-15 00:00:00+02:00,23,25.7999992370605,22.5,23,23,43709,0,0
|
||||
2023-05-16 00:00:00+02:00,22.75,24.0499992370605,22.5,22.75,22.75,16068,0,0
|
||||
2023-05-17 00:00:00+02:00,23,23.8500003814697,22.1000003814697,23.6499996185303,23.6499996185303,19926,0,0
|
||||
2023-05-19 00:00:00+02:00,23.6499996185303,23.8500003814697,22.1000003814697,22.2999992370605,22.2999992370605,41050,0,0
|
||||
2023-05-22 00:00:00+02:00,22.0000004768372,24.1499996185303,21.5499997138977,22.7500009536743,22.7500009536743,34022,0,0
|
||||
2023-05-23 00:00:00+02:00,22.75,22.8999996185303,21.75,22.5,22.5,13992,0,0
|
||||
2023-05-24 00:00:00+02:00,21,24,21,22.0100002288818,22.0100002288818,18306,0,0.1
|
||||
2023-05-25 00:00:00+02:00,21.5699996948242,22.8899993896484,20,21.1599998474121,21.1599998474121,35398,0,0
|
||||
2023-05-26 00:00:00+02:00,21.1599998474121,22.4950008392334,20.5,21.0949993133545,21.0949993133545,8039,0,0
|
||||
2023-05-29 00:00:00+02:00,22.1000003814697,22.1000003814697,20.25,20.75,20.75,17786,0,0
|
||||
2023-05-30 00:00:00+02:00,20.75,21.6499996185303,20.1499996185303,20.4500007629395,20.4500007629395,10709,0,0
|
||||
|
17
tests/data/MOB-ST-1d-bad-stock-split.csv
Normal file
17
tests/data/MOB-ST-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-05-08 00:00:00+02:00,24.899999618530273,24.950000762939453,24.100000381469727,24.75,24.75,7187,0.0,0.0
|
||||
2023-05-09 00:00:00+02:00,25.0,25.5,23.149999618530273,24.149999618530273,24.149999618530273,22753,0.0,0.0
|
||||
2023-05-10 00:00:00+02:00,24.149999618530273,24.149999618530273,22.0,22.950000762939453,22.950000762939453,62727,0.0,0.0
|
||||
2023-05-11 00:00:00+02:00,22.950000762939453,25.0,22.950000762939453,23.350000381469727,23.350000381469727,19550,0.0,0.0
|
||||
2023-05-12 00:00:00+02:00,23.350000381469727,24.0,22.100000381469727,23.850000381469727,23.850000381469727,17143,0.0,0.0
|
||||
2023-05-15 00:00:00+02:00,23.0,25.799999237060547,22.5,23.0,23.0,43709,0.0,0.0
|
||||
2023-05-16 00:00:00+02:00,22.75,24.049999237060547,22.5,22.75,22.75,16068,0.0,0.0
|
||||
2023-05-17 00:00:00+02:00,23.0,23.850000381469727,22.100000381469727,23.649999618530273,23.649999618530273,19926,0.0,0.0
|
||||
2023-05-19 00:00:00+02:00,23.649999618530273,23.850000381469727,22.100000381469727,22.299999237060547,22.299999237060547,41050,0.0,0.0
|
||||
2023-05-22 00:00:00+02:00,2.200000047683716,2.4149999618530273,2.1549999713897705,2.2750000953674316,2.2750000953674316,340215,0.0,0.0
|
||||
2023-05-23 00:00:00+02:00,22.75,22.899999618530273,21.75,22.5,22.5,13992,0.0,0.0
|
||||
2023-05-24 00:00:00+02:00,21.0,24.0,21.0,22.010000228881836,22.010000228881836,18306,0.0,0.1
|
||||
2023-05-25 00:00:00+02:00,21.56999969482422,22.889999389648438,20.0,21.15999984741211,21.15999984741211,35398,0.0,0.0
|
||||
2023-05-26 00:00:00+02:00,21.15999984741211,22.4950008392334,20.5,21.094999313354492,21.094999313354492,8039,0.0,0.0
|
||||
2023-05-29 00:00:00+02:00,22.100000381469727,22.100000381469727,20.25,20.75,20.75,17786,0.0,0.0
|
||||
2023-05-30 00:00:00+02:00,20.75,21.649999618530273,20.149999618530273,20.450000762939453,20.450000762939453,10709,0.0,0.0
|
||||
|
23
tests/data/SPM-MI-1d-bad-stock-split-fixed.csv
Normal file
23
tests/data/SPM-MI-1d-bad-stock-split-fixed.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-01 00:00:00+02:00,5.72999992370606,5.78199996948242,5.3939998626709,5.3939998626709,5.3939998626709,3095860,0,0
|
||||
2022-06-02 00:00:00+02:00,5.38600006103516,5.38600006103516,5.26800003051758,5.2939998626709,5.2939998626709,1662880,0,0
|
||||
2022-06-03 00:00:00+02:00,5.34599990844727,5.34599990844727,5.15800018310547,5.16800003051758,5.16800003051758,1698900,0,0
|
||||
2022-06-06 00:00:00+02:00,5.16800003051758,5.25200004577637,5.13800010681152,5.18800010681152,5.18800010681152,1074910,0,0
|
||||
2022-06-07 00:00:00+02:00,5.21800003051758,5.22200012207031,5.07400016784668,5.1560001373291,5.1560001373291,1850680,0,0
|
||||
2022-06-08 00:00:00+02:00,5.1560001373291,5.17599983215332,5.07200012207031,5.10200004577637,5.10200004577637,1140360,0,0
|
||||
2022-06-09 00:00:00+02:00,5.09799995422363,5.09799995422363,4.87599983215332,4.8939998626709,4.8939998626709,2025480,0,0
|
||||
2022-06-10 00:00:00+02:00,4.87999992370606,4.87999992370606,4.50400009155274,4.50400009155274,4.50400009155274,2982730,0,0
|
||||
2022-06-13 00:00:00+02:00,4.3,4.37599983215332,3.83600006103516,3.83600006103516,3.83600006103516,4568210,0,0.1
|
||||
2022-06-14 00:00:00+02:00,3.87750015258789,4.15999984741211,3.85200004577637,3.9439998626709,3.9439998626709,5354500,0,0
|
||||
2022-06-15 00:00:00+02:00,4.03400001525879,4.16450004577637,3.73050003051758,3.73050003051758,3.73050003051758,6662610,0,0
|
||||
2022-06-16 00:00:00+02:00,3.73050003051758,3.98499984741211,3.72400016784668,3.82550010681152,3.82550010681152,13379960,0,0
|
||||
2022-06-17 00:00:00+02:00,3.8,4.29949989318848,3.75,4.29949989318848,4.29949989318848,12844160,0,0
|
||||
2022-06-20 00:00:00+02:00,2.19422197341919,2.2295401096344,2.13992595672607,2.2295401096344,2.2295401096344,12364104,0,0
|
||||
2022-06-21 00:00:00+02:00,2.24719905853272,2.28515291213989,2.19712090492249,2.21557092666626,2.21557092666626,8434013,0,0
|
||||
2022-06-22 00:00:00+02:00,1.98679196834564,2.00365996360779,1.73798203468323,1.73798203468323,1.73798203468323,26496542,0,0
|
||||
2022-06-23 00:00:00+02:00,1.62411904335022,1.68526804447174,1.37320005893707,1.59776198863983,1.59776198863983,48720201,0,0
|
||||
2022-06-24 00:00:00+02:00,1.47599303722382,1.54610300064087,1.1739410161972,1.24932205677032,1.24932205677032,56877192,0,0
|
||||
2022-06-27 00:00:00+02:00,1.49899995326996,1.79849994182587,1.49899995326996,1.79849994182587,1.79849994182587,460673,0,0
|
||||
2022-06-28 00:00:00+02:00,2.15799999237061,3.05100011825562,2.12599992752075,3.05100011825562,3.05100011825562,3058635,0,0
|
||||
2022-06-29 00:00:00+02:00,2.90000009536743,3.73799991607666,2.85899996757507,3.26399993896484,3.26399993896484,6516761,0,0
|
||||
2022-06-30 00:00:00+02:00,3.24900007247925,3.28099989891052,2.5,2.5550000667572,2.5550000667572,4805984,0,0
|
||||
|
23
tests/data/SPM-MI-1d-bad-stock-split.csv
Normal file
23
tests/data/SPM-MI-1d-bad-stock-split.csv
Normal file
@@ -0,0 +1,23 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2022-06-01 00:00:00+02:00,57.29999923706055,57.81999969482422,53.939998626708984,53.939998626708984,53.939998626708984,309586,0.0,0.0
|
||||
2022-06-02 00:00:00+02:00,53.86000061035156,53.86000061035156,52.68000030517578,52.939998626708984,52.939998626708984,166288,0.0,0.0
|
||||
2022-06-03 00:00:00+02:00,53.459999084472656,53.459999084472656,51.58000183105469,51.68000030517578,51.68000030517578,169890,0.0,0.0
|
||||
2022-06-06 00:00:00+02:00,51.68000030517578,52.52000045776367,51.380001068115234,51.880001068115234,51.880001068115234,107491,0.0,0.0
|
||||
2022-06-07 00:00:00+02:00,52.18000030517578,52.220001220703125,50.7400016784668,51.560001373291016,51.560001373291016,185068,0.0,0.0
|
||||
2022-06-08 00:00:00+02:00,51.560001373291016,51.7599983215332,50.720001220703125,51.02000045776367,51.02000045776367,114036,0.0,0.0
|
||||
2022-06-09 00:00:00+02:00,50.97999954223633,50.97999954223633,48.7599983215332,48.939998626708984,48.939998626708984,202548,0.0,0.0
|
||||
2022-06-10 00:00:00+02:00,48.79999923706055,48.79999923706055,45.040000915527344,45.040000915527344,45.040000915527344,298273,0.0,0.0
|
||||
2022-06-13 00:00:00+02:00,43.0,43.7599983215332,38.36000061035156,38.36000061035156,38.36000061035156,456821,0.0,0.1
|
||||
2022-06-14 00:00:00+02:00,38.775001525878906,41.599998474121094,38.52000045776367,39.439998626708984,39.439998626708984,535450,0.0,0.0
|
||||
2022-06-15 00:00:00+02:00,40.34000015258789,41.64500045776367,37.30500030517578,37.30500030517578,37.30500030517578,666261,0.0,0.0
|
||||
2022-06-16 00:00:00+02:00,37.30500030517578,39.849998474121094,37.2400016784668,38.255001068115234,38.255001068115234,1337996,0.0,0.0
|
||||
2022-06-17 00:00:00+02:00,38.0,42.994998931884766,37.5,42.994998931884766,42.994998931884766,1284416,0.0,0.0
|
||||
2022-06-20 00:00:00+02:00,2.1942219734191895,2.2295401096343994,2.139925956726074,2.2295401096343994,2.2295401096343994,12364104,0.0,0.0
|
||||
2022-06-21 00:00:00+02:00,2.247199058532715,2.2851529121398926,2.1971209049224854,2.2155709266662598,2.2155709266662598,8434013,0.0,0.0
|
||||
2022-06-22 00:00:00+02:00,1.986791968345642,2.003659963607788,1.7379820346832275,1.7379820346832275,1.7379820346832275,26496542,0.0,0.0
|
||||
2022-06-23 00:00:00+02:00,1.6241190433502197,1.6852680444717407,1.3732000589370728,1.5977619886398315,1.5977619886398315,48720201,0.0,0.0
|
||||
2022-06-24 00:00:00+02:00,1.475993037223816,1.5461030006408691,1.1739410161972046,1.2493220567703247,1.2493220567703247,56877192,0.0,0.0
|
||||
2022-06-27 00:00:00+02:00,1.4989999532699585,1.7984999418258667,1.4989999532699585,1.7984999418258667,1.7984999418258667,460673,0.0,0.0
|
||||
2022-06-28 00:00:00+02:00,2.1579999923706055,3.0510001182556152,2.125999927520752,3.0510001182556152,3.0510001182556152,3058635,0.0,0.0
|
||||
2022-06-29 00:00:00+02:00,2.9000000953674316,3.73799991607666,2.8589999675750732,3.2639999389648438,3.2639999389648438,6516761,0.0,0.0
|
||||
2022-06-30 00:00:00+02:00,3.249000072479248,3.2809998989105225,2.5,2.555000066757202,2.555000066757202,4805984,0.0,0.0
|
||||
|
30
tests/data/SSW-JO-1d-100x-error-fixed.csv
Normal file
30
tests/data/SSW-JO-1d-100x-error-fixed.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-06-09 00:00:00+02:00,34.7000,34.7100,33.2400,33.6200,33.6200,7148409,0,0
|
||||
2023-06-08 00:00:00+02:00,34.9000,34.9900,34.0400,34.3600,34.3600,10406999,0,0
|
||||
2023-06-07 00:00:00+02:00,34.5500,35.6400,34.3200,35.0900,35.0900,10118918,0,0
|
||||
2023-06-06 00:00:00+02:00,34.5000,34.8200,34.0500,34.4600,34.4600,9109709,0,0
|
||||
2023-06-05 00:00:00+02:00,35.0000,35.3000,34.2000,34.7000,34.7000,8791993,0,0
|
||||
2023-06-02 00:00:00+02:00,35.6900,36.1800,34.6000,34.9700,34.9700,8844549,0,0
|
||||
2023-06-01 00:00:00+02:00,35.2300,35.3800,34.2400,35.3500,35.3500,6721030,0,0
|
||||
2023-05-31 00:00:00+02:00,34.8,35.48,34.26,35.01,35.01,32605833,0,0
|
||||
2023-05-30 00:00:00+02:00,34.39,35.37,33.85,34.23,34.23,8970804,0,0
|
||||
2023-05-29 00:00:00+02:00,34.66,35.06,34.02,34.32,34.32,3912803,0,0
|
||||
2023-05-26 00:00:00+02:00,34.75,35.99,34.33,34.53,34.53,6744718,0,0
|
||||
2023-05-25 00:00:00+02:00,35.4,36.09,34.63,35.07,35.07,16900221,0,0
|
||||
2023-05-24 00:00:00+02:00,36.2,36.5,35.26,35.4,35.4,9049505,0,0
|
||||
2023-05-23 00:00:00+02:00,36.9,36.67,35.56,36.1,36.1,10797373,0,0
|
||||
2023-05-22 00:00:00+02:00,37.05,37.36,36.09,36.61,36.61,7132641,0,0
|
||||
2023-05-19 00:00:00+02:00,36.2,37.15,36.25,36.9,36.9,12648518,0,0
|
||||
2023-05-18 00:00:00+02:00,36.57,36.99,35.84,36.46,36.46,10674542,0,0
|
||||
2023-05-17 00:00:00+02:00,36.87,37.31,36.56,36.71,36.71,9892791,0,0
|
||||
2023-05-16 00:00:00+02:00,37.15,37.73,36.96,37.03,37.03,4706789,0,0
|
||||
2023-05-15 00:00:00+02:00,37.74,38.05,36.96,37.27,37.27,7890969,0,0
|
||||
2023-05-12 00:00:00+02:00,37.5,38.44,36.71,37.74,37.74,8724303,0,0
|
||||
2023-05-11 00:00:00+02:00,38.8,38.88,37.01,37.32,37.32,14371855,0,0
|
||||
2023-05-10 00:00:00+02:00,38.93,38.8,36.42,38.1,38.1,30393389,0,0
|
||||
2023-05-09 00:00:00+02:00,44.41,44.41,39.39,39.66,39.66,19833428,0,0
|
||||
2023-05-08 00:00:00+02:00,44.63,45.78,44.56,44.71,44.71,11092519,0,0
|
||||
2023-05-05 00:00:00+02:00,42.99,44.9,42.87,44.58,44.58,28539048,0,0
|
||||
2023-05-04 00:00:00+02:00,41.49,43.3,41.23,42.83,42.83,15506868,0,0
|
||||
2023-05-03 00:00:00+02:00,39.75,40.98,39.68,40.95,40.95,14657028,0,0
|
||||
2023-05-02 00:00:00+02:00,40.37,40.32,39.17,39.65,39.65,11818133,0,0
|
||||
|
30
tests/data/SSW-JO-1d-100x-error.csv
Normal file
30
tests/data/SSW-JO-1d-100x-error.csv
Normal file
@@ -0,0 +1,30 @@
|
||||
Date,Open,High,Low,Close,Adj Close,Volume,Dividends,Stock Splits
|
||||
2023-06-09 00:00:00+02:00,34.700001,34.709999,33.240002,33.619999,33.619999,7148409,0,0
|
||||
2023-06-08 00:00:00+02:00,34.900002,34.990002,34.040001,34.360001,34.360001,10406999,0,0
|
||||
2023-06-07 00:00:00+02:00,34.549999,35.639999,34.320000,35.090000,35.090000,10118918,0,0
|
||||
2023-06-06 00:00:00+02:00,34.500000,34.820000,34.049999,34.459999,34.459999,9109709,0,0
|
||||
2023-06-05 00:00:00+02:00,35.000000,35.299999,34.200001,34.700001,34.700001,8791993,0,0
|
||||
2023-06-02 00:00:00+02:00,35.689999,36.180000,34.599998,34.970001,34.970001,8844549,0,0
|
||||
2023-06-01 00:00:00+02:00,35.230000,35.380001,34.240002,35.349998,35.349998,6721030,0,0
|
||||
2023-05-31 00:00:00+02:00,3480,3548,3426,3501,3501,32605833,0,0
|
||||
2023-05-30 00:00:00+02:00,3439,3537,3385,3423,3423,8970804,0,0
|
||||
2023-05-29 00:00:00+02:00,3466,3506,3402,3432,3432,3912803,0,0
|
||||
2023-05-26 00:00:00+02:00,3475,3599,3433,3453,3453,6744718,0,0
|
||||
2023-05-25 00:00:00+02:00,3540,3609,3463,3507,3507,16900221,0,0
|
||||
2023-05-24 00:00:00+02:00,3620,3650,3526,3540,3540,9049505,0,0
|
||||
2023-05-23 00:00:00+02:00,3690,3667,3556,3610,3610,10797373,0,0
|
||||
2023-05-22 00:00:00+02:00,3705,3736,3609,3661,3661,7132641,0,0
|
||||
2023-05-19 00:00:00+02:00,3620,3715,3625,3690,3690,12648518,0,0
|
||||
2023-05-18 00:00:00+02:00,3657,3699,3584,3646,3646,10674542,0,0
|
||||
2023-05-17 00:00:00+02:00,3687,3731,3656,3671,3671,9892791,0,0
|
||||
2023-05-16 00:00:00+02:00,3715,3773,3696,3703,3703,4706789,0,0
|
||||
2023-05-15 00:00:00+02:00,3774,3805,3696,3727,3727,7890969,0,0
|
||||
2023-05-12 00:00:00+02:00,3750,3844,3671,3774,3774,8724303,0,0
|
||||
2023-05-11 00:00:00+02:00,3880,3888,3701,3732,3732,14371855,0,0
|
||||
2023-05-10 00:00:00+02:00,3893,3880,3642,3810,3810,30393389,0,0
|
||||
2023-05-09 00:00:00+02:00,4441,4441,3939,3966,3966,19833428,0,0
|
||||
2023-05-08 00:00:00+02:00,4463,4578,4456,4471,4471,11092519,0,0
|
||||
2023-05-05 00:00:00+02:00,4299,4490,4287,4458,4458,28539048,0,0
|
||||
2023-05-04 00:00:00+02:00,4149,4330,4123,4283,4283,15506868,0,0
|
||||
2023-05-03 00:00:00+02:00,3975,4098,3968,4095,4095,14657028,0,0
|
||||
2023-05-02 00:00:00+02:00,4037,4032,3917,3965,3965,11818133,0,0
|
||||
|
1042
tests/prices.py
Normal file
1042
tests/prices.py
Normal file
File diff suppressed because it is too large
Load Diff
937
tests/ticker.py
Normal file
937
tests/ticker.py
Normal file
@@ -0,0 +1,937 @@
|
||||
"""
|
||||
Tests for Ticker
|
||||
|
||||
To run all tests in suite from commandline:
|
||||
python -m unittest tests.ticker
|
||||
|
||||
Specific test class:
|
||||
python -m unittest tests.ticker.TestTicker
|
||||
|
||||
"""
|
||||
import pandas as pd
|
||||
|
||||
from .context import yfinance as yf
|
||||
from .context import session_gbl
|
||||
from yfinance.exceptions import YFNotImplementedError
|
||||
|
||||
|
||||
import unittest
|
||||
import requests_cache
|
||||
from typing import Union, Any
|
||||
import re
|
||||
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse
|
||||
|
||||
ticker_attributes = (
|
||||
("major_holders", pd.DataFrame),
|
||||
("institutional_holders", pd.DataFrame),
|
||||
("mutualfund_holders", pd.DataFrame),
|
||||
("splits", pd.Series),
|
||||
("actions", pd.DataFrame),
|
||||
("shares", pd.DataFrame),
|
||||
("info", dict),
|
||||
("calendar", pd.DataFrame),
|
||||
("recommendations", Union[pd.DataFrame, dict]),
|
||||
("earnings", pd.DataFrame),
|
||||
("quarterly_earnings", pd.DataFrame),
|
||||
("recommendations_summary", Union[pd.DataFrame, dict]),
|
||||
("quarterly_cashflow", pd.DataFrame),
|
||||
("cashflow", pd.DataFrame),
|
||||
("quarterly_balance_sheet", pd.DataFrame),
|
||||
("balance_sheet", pd.DataFrame),
|
||||
("quarterly_income_stmt", pd.DataFrame),
|
||||
("income_stmt", pd.DataFrame),
|
||||
("analyst_price_target", pd.DataFrame),
|
||||
("revenue_forecasts", pd.DataFrame),
|
||||
("sustainability", pd.DataFrame),
|
||||
("options", tuple),
|
||||
("news", Any),
|
||||
("earnings_trend", pd.DataFrame),
|
||||
("earnings_dates", pd.DataFrame),
|
||||
("earnings_forecasts", pd.DataFrame),
|
||||
)
|
||||
|
||||
def assert_attribute_type(testClass: unittest.TestCase, instance, attribute_name, expected_type):
|
||||
try:
|
||||
attribute = getattr(instance, attribute_name)
|
||||
if attribute is not None and expected_type is not Any:
|
||||
testClass.assertEqual(type(attribute), expected_type)
|
||||
except Exception:
|
||||
testClass.assertRaises(
|
||||
YFNotImplementedError, lambda: getattr(instance, attribute_name)
|
||||
)
|
||||
|
||||
class TestTicker(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
cls.proxy = None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def test_getTz(self):
|
||||
tkrs = ["IMP.JO", "BHG.JO", "SSW.JO", "BP.L", "INTC"]
|
||||
for tkr in tkrs:
|
||||
# First step: remove ticker from tz-cache
|
||||
yf.cache.get_tz_cache().store(tkr, None)
|
||||
|
||||
# Test:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
tz = dat._get_ticker_tz(proxy=None, timeout=None)
|
||||
|
||||
self.assertIsNotNone(tz)
|
||||
|
||||
def test_badTicker(self):
|
||||
# Check yfinance doesn't die when ticker delisted
|
||||
|
||||
tkr = "DJI" # typo of "^DJI"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
dat.history(period="1wk")
|
||||
dat.history(start="2022-01-01")
|
||||
dat.history(start="2022-01-01", end="2022-03-01")
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=True)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=True)
|
||||
|
||||
for k in dat.fast_info:
|
||||
dat.fast_info[k]
|
||||
|
||||
for attribute_name, attribute_type in ticker_attributes:
|
||||
assert_attribute_type(self, dat, attribute_name, attribute_type)
|
||||
|
||||
def test_goodTicker(self):
|
||||
# that yfinance works when full api is called on same instance of ticker
|
||||
|
||||
tkrs = ["IBM"]
|
||||
tkrs.append("QCSTIX") # weird ticker, no price history but has previous close
|
||||
for tkr in tkrs:
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
dat.history(period="1wk")
|
||||
dat.history(start="2022-01-01")
|
||||
dat.history(start="2022-01-01", end="2022-03-01")
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=False)
|
||||
yf.download([tkr], period="1wk", threads=False, ignore_tz=True)
|
||||
yf.download([tkr], period="1wk", threads=True, ignore_tz=True)
|
||||
|
||||
for k in dat.fast_info:
|
||||
dat.fast_info[k]
|
||||
|
||||
for attribute_name, attribute_type in ticker_attributes:
|
||||
assert_attribute_type(self, dat, attribute_name, attribute_type)
|
||||
|
||||
#TODO:: Refactor with `assert_attribute` once proxy is accepted as a parameter of `Ticker`
|
||||
def test_goodTicker_withProxy(self):
|
||||
# that yfinance works when full api is called on same instance of ticker
|
||||
|
||||
tkr = "IBM"
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
dat._fetch_ticker_tz(proxy=self.proxy, timeout=5)
|
||||
dat._get_ticker_tz(proxy=self.proxy, timeout=5)
|
||||
dat.history(period="1wk", proxy=self.proxy)
|
||||
|
||||
v = dat.get_major_holders(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_institutional_holders(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_mutualfund_holders(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_info(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertTrue(len(v) > 0)
|
||||
|
||||
v = dat.get_income_stmt(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_incomestmt(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_financials(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_balance_sheet(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_balancesheet(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_cash_flow(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_cashflow(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_shares_full(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
v = dat.get_isin(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertTrue(v != "")
|
||||
|
||||
v = dat.get_news(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertTrue(len(v) > 0)
|
||||
|
||||
v = dat.get_earnings_dates(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertFalse(v.empty)
|
||||
|
||||
dat.get_history_metadata(proxy=self.proxy)
|
||||
self.assertIsNotNone(v)
|
||||
self.assertTrue(len(v) > 0)
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# v = dat.stats(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertTrue(len(v) > 0)
|
||||
|
||||
# v = dat.get_recommendations(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_calendar(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_sustainability(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_recommendations_summary(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_analyst_price_target(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_rev_forecast(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_earnings_forecast(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_trend_details(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_earnings_trend(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_earnings(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
# v = dat.get_shares(proxy=self.proxy)
|
||||
# self.assertIsNotNone(v)
|
||||
# self.assertFalse(v.empty)
|
||||
|
||||
|
||||
class TestTickerHistory(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
# use a ticker that has dividends
|
||||
self.symbol = "IBM"
|
||||
self.ticker = yf.Ticker(self.symbol, session=self.session)
|
||||
|
||||
self.symbols = ["AMZN", "MSFT", "NVDA"]
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_history(self):
|
||||
md = self.ticker.history_metadata
|
||||
self.assertIn("IBM", md.values(), "metadata missing")
|
||||
data = self.ticker.history("1y")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_download(self):
|
||||
for t in [False, True]:
|
||||
for i in [False, True]:
|
||||
data = yf.download(self.symbols, threads=t, ignore_tz=i)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_no_expensive_calls_introduced(self):
|
||||
"""
|
||||
Make sure calling history to get price data has not introduced more calls to yahoo than absolutely necessary.
|
||||
As doing other type of scraping calls than "query2.finance.yahoo.com/v8/finance/chart" to yahoo website
|
||||
will quickly trigger spam-block when doing bulk download of history data.
|
||||
"""
|
||||
symbol = "GOOGL"
|
||||
period = "1y"
|
||||
with requests_cache.CachedSession(backend="memory") as session:
|
||||
ticker = yf.Ticker(symbol, session=session)
|
||||
ticker.history(period=period)
|
||||
actual_urls_called = [r.url for r in session.cache.filter()]
|
||||
|
||||
# Remove 'crumb' argument
|
||||
for i in range(len(actual_urls_called)):
|
||||
u = actual_urls_called[i]
|
||||
parsed_url = urlparse(u)
|
||||
query_params = parse_qs(parsed_url.query)
|
||||
query_params.pop('crumb', None)
|
||||
query_params.pop('cookie', None)
|
||||
u = urlunparse(parsed_url._replace(query=urlencode(query_params, doseq=True)))
|
||||
actual_urls_called[i] = u
|
||||
actual_urls_called = tuple(actual_urls_called)
|
||||
|
||||
expected_urls = (
|
||||
f"https://query2.finance.yahoo.com/v8/finance/chart/{symbol}?events=div%2Csplits%2CcapitalGains&includePrePost=False&interval=1d&range={period}",
|
||||
)
|
||||
self.assertEqual(
|
||||
expected_urls,
|
||||
actual_urls_called,
|
||||
"Different than expected url used to fetch history."
|
||||
)
|
||||
def test_dividends(self):
|
||||
data = self.ticker.dividends
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_splits(self):
|
||||
data = self.ticker.splits
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_actions(self):
|
||||
data = self.ticker.actions
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
# class TestTickerEarnings(unittest.TestCase):
|
||||
# session = None
|
||||
|
||||
# @classmethod
|
||||
# def setUpClass(cls):
|
||||
# cls.session = session_gbl
|
||||
|
||||
# @classmethod
|
||||
# def tearDownClass(cls):
|
||||
# if cls.session is not None:
|
||||
# cls.session.close()
|
||||
|
||||
# def setUp(self):
|
||||
# self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
# def tearDown(self):
|
||||
# self.ticker = None
|
||||
|
||||
# def test_earnings(self):
|
||||
# data = self.ticker.earnings
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_quarterly_earnings(self):
|
||||
# data = self.ticker.quarterly_earnings
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.quarterly_earnings
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_forecasts(self):
|
||||
# data = self.ticker.earnings_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_dates(self):
|
||||
# data = self.ticker.earnings_dates
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_dates
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_trend(self):
|
||||
# data = self.ticker.earnings_trend
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.earnings_trend
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_earnings_dates_with_limit(self):
|
||||
# # use ticker with lots of historic earnings
|
||||
# ticker = yf.Ticker("IBM")
|
||||
# limit = 110
|
||||
# data = ticker.get_earnings_dates(limit=limit)
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
# self.assertEqual(len(data), limit, "Wrong number or rows")
|
||||
|
||||
# data_cached = ticker.get_earnings_dates(limit=limit)
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
class TestTickerHolders(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_major_holders(self):
|
||||
data = self.ticker.major_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.major_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_institutional_holders(self):
|
||||
data = self.ticker.institutional_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.institutional_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_mutualfund_holders(self):
|
||||
data = self.ticker.mutualfund_holders
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.mutualfund_holders
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
class TestTickerMiscFinancials(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
# For ticker 'BSE.AX' (and others), Yahoo not returning
|
||||
# full quarterly financials (usually cash-flow) with all entries,
|
||||
# instead returns a smaller version in different data store.
|
||||
self.ticker_old_fmt = yf.Ticker("BSE.AX", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_isin(self):
|
||||
data = self.ticker.isin
|
||||
self.assertIsInstance(data, str, "data has wrong type")
|
||||
self.assertEqual("ARDEUT116159", data, "data is empty")
|
||||
|
||||
data_cached = self.ticker.isin
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_options(self):
|
||||
data = self.ticker.options
|
||||
self.assertIsInstance(data, tuple, "data has wrong type")
|
||||
self.assertTrue(len(data) > 1, "data is empty")
|
||||
|
||||
def test_shares_full(self):
|
||||
data = self.ticker.get_shares_full()
|
||||
self.assertIsInstance(data, pd.Series, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
def test_income_statement(self):
|
||||
expected_keys = ["Total Revenue", "Basic EPS"]
|
||||
expected_periods_days = 365
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_income_stmt(pretty=True)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.income_stmt
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_income_stmt(pretty=False)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_income_stmt(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_quarterly_income_statement(self):
|
||||
expected_keys = ["Total Revenue", "Basic EPS"]
|
||||
expected_periods_days = 365//4
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_income_stmt(pretty=True, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.quarterly_income_stmt
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_income_stmt(pretty=False, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_income_stmt(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_balance_sheet(self):
|
||||
expected_keys = ["Total Assets", "Net PPE"]
|
||||
expected_periods_days = 365
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_balance_sheet(pretty=True)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.balance_sheet
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_balance_sheet(pretty=False)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_balance_sheet(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_quarterly_balance_sheet(self):
|
||||
expected_keys = ["Total Assets", "Net PPE"]
|
||||
expected_periods_days = 365//4
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_balance_sheet(pretty=True, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.quarterly_balance_sheet
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_balance_sheet(pretty=False, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_balance_sheet(as_dict=True, freq="quarterly")
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_cash_flow(self):
|
||||
expected_keys = ["Operating Cash Flow", "Net PPE Purchase And Sale"]
|
||||
expected_periods_days = 365
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_cashflow(pretty=True)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning annual financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.cashflow
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_cashflow(pretty=False)
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_cashflow(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_quarterly_cash_flow(self):
|
||||
expected_keys = ["Operating Cash Flow", "Net PPE Purchase And Sale"]
|
||||
expected_periods_days = 365//4
|
||||
|
||||
# Test contents of table
|
||||
data = self.ticker.get_cashflow(pretty=True, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
period = abs((data.columns[0]-data.columns[1]).days)
|
||||
self.assertLess(abs(period-expected_periods_days), 20, "Not returning quarterly financials")
|
||||
|
||||
# Test property defaults
|
||||
data2 = self.ticker.quarterly_cashflow
|
||||
self.assertTrue(data.equals(data2), "property not defaulting to 'pretty=True'")
|
||||
|
||||
# Test pretty=False
|
||||
expected_keys = [k.replace(' ', '') for k in expected_keys]
|
||||
data = self.ticker.get_cashflow(pretty=False, freq="quarterly")
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
for k in expected_keys:
|
||||
self.assertIn(k, data.index, "Did not find expected row in index")
|
||||
|
||||
# Test to_dict
|
||||
data = self.ticker.get_cashflow(as_dict=True)
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
|
||||
def test_income_alt_names(self):
|
||||
i1 = self.ticker.income_stmt
|
||||
i2 = self.ticker.incomestmt
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.financials
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
i1 = self.ticker.get_income_stmt()
|
||||
i2 = self.ticker.get_incomestmt()
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.get_financials()
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
i1 = self.ticker.quarterly_income_stmt
|
||||
i2 = self.ticker.quarterly_incomestmt
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.quarterly_financials
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
i1 = self.ticker.get_income_stmt(freq="quarterly")
|
||||
i2 = self.ticker.get_incomestmt(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i2))
|
||||
i3 = self.ticker.get_financials(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i3))
|
||||
|
||||
def test_balance_sheet_alt_names(self):
|
||||
i1 = self.ticker.balance_sheet
|
||||
i2 = self.ticker.balancesheet
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_balance_sheet()
|
||||
i2 = self.ticker.get_balancesheet()
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.quarterly_balance_sheet
|
||||
i2 = self.ticker.quarterly_balancesheet
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_balance_sheet(freq="quarterly")
|
||||
i2 = self.ticker.get_balancesheet(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
def test_cash_flow_alt_names(self):
|
||||
i1 = self.ticker.cash_flow
|
||||
i2 = self.ticker.cashflow
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_cash_flow()
|
||||
i2 = self.ticker.get_cashflow()
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.quarterly_cash_flow
|
||||
i2 = self.ticker.quarterly_cashflow
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
i1 = self.ticker.get_cash_flow(freq="quarterly")
|
||||
i2 = self.ticker.get_cashflow(freq="quarterly")
|
||||
self.assertTrue(i1.equals(i2))
|
||||
|
||||
def test_bad_freq_value_raises_exception(self):
|
||||
self.assertRaises(ValueError, lambda: self.ticker.get_cashflow(freq="badarg"))
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# def test_sustainability(self):
|
||||
# data = self.ticker.sustainability
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.sustainability
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_recommendations(self):
|
||||
# data = self.ticker.recommendations
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.recommendations
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_recommendations_summary(self):
|
||||
# data = self.ticker.recommendations_summary
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.recommendations_summary
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_analyst_price_target(self):
|
||||
# data = self.ticker.analyst_price_target
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.analyst_price_target
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_revenue_forecasts(self):
|
||||
# data = self.ticker.revenue_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.revenue_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_calendar(self):
|
||||
# data = self.ticker.calendar
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.calendar
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_shares(self):
|
||||
# data = self.ticker.shares
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
|
||||
class TestTickerInfo(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.symbols = []
|
||||
self.symbols += ["ESLT.TA", "BP.L", "GOOGL"]
|
||||
self.symbols.append("QCSTIX") # good for testing, doesn't trade
|
||||
self.symbols += ["BTC-USD", "IWO", "VFINX", "^GSPC"]
|
||||
self.symbols += ["SOKE.IS", "ADS.DE"] # detected bugs
|
||||
self.tickers = [yf.Ticker(s, session=self.session) for s in self.symbols]
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_fast_info(self):
|
||||
f = yf.Ticker("AAPL", session=self.session).fast_info
|
||||
for k in f:
|
||||
self.assertIsNotNone(f[k])
|
||||
|
||||
def test_info(self):
|
||||
data = self.tickers[0].info
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
expected_keys = ['industry', 'currentPrice', 'exchange', 'floatShares', 'companyOfficers', 'bid']
|
||||
for k in expected_keys:
|
||||
print(k)
|
||||
self.assertIn("symbol", data.keys(), f"Did not find expected key '{k}' in info dict")
|
||||
self.assertEqual(self.symbols[0], data["symbol"], "Wrong symbol value in info dict")
|
||||
|
||||
# def test_fast_info_matches_info(self):
|
||||
# fast_info_keys = set()
|
||||
# for ticker in self.tickers:
|
||||
# fast_info_keys.update(set(ticker.fast_info.keys()))
|
||||
# fast_info_keys = sorted(list(fast_info_keys))
|
||||
|
||||
# key_rename_map = {}
|
||||
# key_rename_map["currency"] = "currency"
|
||||
# key_rename_map["quote_type"] = "quoteType"
|
||||
# key_rename_map["timezone"] = "exchangeTimezoneName"
|
||||
|
||||
# key_rename_map["last_price"] = ["currentPrice", "regularMarketPrice"]
|
||||
# key_rename_map["open"] = ["open", "regularMarketOpen"]
|
||||
# key_rename_map["day_high"] = ["dayHigh", "regularMarketDayHigh"]
|
||||
# key_rename_map["day_low"] = ["dayLow", "regularMarketDayLow"]
|
||||
# key_rename_map["previous_close"] = ["previousClose"]
|
||||
# key_rename_map["regular_market_previous_close"] = ["regularMarketPreviousClose"]
|
||||
|
||||
# key_rename_map["fifty_day_average"] = "fiftyDayAverage"
|
||||
# key_rename_map["two_hundred_day_average"] = "twoHundredDayAverage"
|
||||
# key_rename_map["year_change"] = ["52WeekChange", "fiftyTwoWeekChange"]
|
||||
# key_rename_map["year_high"] = "fiftyTwoWeekHigh"
|
||||
# key_rename_map["year_low"] = "fiftyTwoWeekLow"
|
||||
|
||||
# key_rename_map["last_volume"] = ["volume", "regularMarketVolume"]
|
||||
# key_rename_map["ten_day_average_volume"] = ["averageVolume10days", "averageDailyVolume10Day"]
|
||||
# key_rename_map["three_month_average_volume"] = "averageVolume"
|
||||
|
||||
# key_rename_map["market_cap"] = "marketCap"
|
||||
# key_rename_map["shares"] = "sharesOutstanding"
|
||||
|
||||
# for k in list(key_rename_map.keys()):
|
||||
# if '_' in k:
|
||||
# key_rename_map[yf.utils.snake_case_2_camelCase(k)] = key_rename_map[k]
|
||||
|
||||
# # Note: share count items in info[] are bad. Sometimes the float > outstanding!
|
||||
# # So often fast_info["shares"] does not match.
|
||||
# # Why isn't fast_info["shares"] wrong? Because using it to calculate market cap always correct.
|
||||
# bad_keys = {"shares"}
|
||||
|
||||
# # Loose tolerance for averages, no idea why don't match info[]. Is info wrong?
|
||||
# custom_tolerances = {}
|
||||
# custom_tolerances["year_change"] = 1.0
|
||||
# # custom_tolerances["ten_day_average_volume"] = 1e-3
|
||||
# custom_tolerances["ten_day_average_volume"] = 1e-1
|
||||
# # custom_tolerances["three_month_average_volume"] = 1e-2
|
||||
# custom_tolerances["three_month_average_volume"] = 5e-1
|
||||
# custom_tolerances["fifty_day_average"] = 1e-2
|
||||
# custom_tolerances["two_hundred_day_average"] = 1e-2
|
||||
# for k in list(custom_tolerances.keys()):
|
||||
# if '_' in k:
|
||||
# custom_tolerances[yf.utils.snake_case_2_camelCase(k)] = custom_tolerances[k]
|
||||
|
||||
# for k in fast_info_keys:
|
||||
# if k in key_rename_map:
|
||||
# k2 = key_rename_map[k]
|
||||
# else:
|
||||
# k2 = k
|
||||
|
||||
# if not isinstance(k2, list):
|
||||
# k2 = [k2]
|
||||
|
||||
# for m in k2:
|
||||
# for ticker in self.tickers:
|
||||
# if not m in ticker.info:
|
||||
# # print(f"symbol={ticker.ticker}: fast_info key '{k}' mapped to info key '{m}' but not present in info")
|
||||
# continue
|
||||
|
||||
# if k in bad_keys:
|
||||
# continue
|
||||
|
||||
# if k in custom_tolerances:
|
||||
# rtol = custom_tolerances[k]
|
||||
# else:
|
||||
# rtol = 5e-3
|
||||
# # rtol = 1e-4
|
||||
|
||||
# correct = ticker.info[m]
|
||||
# test = ticker.fast_info[k]
|
||||
# # print(f"Testing: symbol={ticker.ticker} m={m} k={k}: test={test} vs correct={correct}")
|
||||
# if k in ["market_cap","marketCap"] and ticker.fast_info["currency"] in ["GBp", "ILA"]:
|
||||
# # Adjust for currency to match Yahoo:
|
||||
# test *= 0.01
|
||||
# try:
|
||||
# if correct is None:
|
||||
# self.assertTrue(test is None or (not np.isnan(test)), f"{k}: {test} must be None or real value because correct={correct}")
|
||||
# elif isinstance(test, float) or isinstance(correct, int):
|
||||
# self.assertTrue(np.isclose(test, correct, rtol=rtol), f"{ticker.ticker} {k}: {test} != {correct}")
|
||||
# else:
|
||||
# self.assertEqual(test, correct, f"{k}: {test} != {correct}")
|
||||
# except:
|
||||
# if k in ["regularMarketPreviousClose"] and ticker.ticker in ["ADS.DE"]:
|
||||
# # Yahoo is wrong, is returning post-market close not regular
|
||||
# continue
|
||||
# else:
|
||||
# raise
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(TestTicker('Test ticker'))
|
||||
suite.addTest(TestTickerEarnings('Test earnings'))
|
||||
suite.addTest(TestTickerHolders('Test holders'))
|
||||
suite.addTest(TestTickerHistory('Test Ticker history'))
|
||||
suite.addTest(TestTickerMiscFinancials('Test misc financials'))
|
||||
suite.addTest(TestTickerInfo('Test info & fast_info'))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
91
tests/utils.py
Normal file
91
tests/utils.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Tests for utils
|
||||
|
||||
To run all tests in suite from commandline:
|
||||
python -m unittest tests.utils
|
||||
|
||||
Specific test class:
|
||||
python -m unittest tests.utils.TestTicker
|
||||
|
||||
"""
|
||||
# import pandas as pd
|
||||
# import numpy as np
|
||||
|
||||
from .context import yfinance as yf
|
||||
from .context import session_gbl
|
||||
|
||||
import unittest
|
||||
# import requests_cache
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
|
||||
class TestCache(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.tempCacheDir = tempfile.TemporaryDirectory()
|
||||
yf.set_tz_cache_location(cls.tempCacheDir.name)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
cls.tempCacheDir.cleanup()
|
||||
|
||||
def test_storeTzNoRaise(self):
|
||||
# storing TZ to cache should never raise exception
|
||||
tkr = 'AMZN'
|
||||
tz1 = "America/New_York"
|
||||
tz2 = "London/Europe"
|
||||
cache = yf.utils.get_tz_cache()
|
||||
cache.store(tkr, tz1)
|
||||
cache.store(tkr, tz2)
|
||||
|
||||
def test_setTzCacheLocation(self):
|
||||
self.assertEqual(yf.utils._DBManager.get_location(), self.tempCacheDir.name)
|
||||
|
||||
tkr = 'AMZN'
|
||||
tz1 = "America/New_York"
|
||||
cache = yf.utils.get_tz_cache()
|
||||
cache.store(tkr, tz1)
|
||||
|
||||
self.assertTrue(os.path.exists(os.path.join(self.tempCacheDir.name, "tkr-tz.db")))
|
||||
|
||||
|
||||
class TestCacheNoPermission(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
yf.set_tz_cache_location("/root/yf-cache")
|
||||
|
||||
def test_tzCacheRootStore(self):
|
||||
# Test that if cache path in read-only filesystem, no exception.
|
||||
tkr = 'AMZN'
|
||||
tz1 = "America/New_York"
|
||||
|
||||
# During attempt to store, will discover cannot write
|
||||
yf.utils.get_tz_cache().store(tkr, tz1)
|
||||
|
||||
# Handling the store failure replaces cache with a dummy
|
||||
cache = yf.utils.get_tz_cache()
|
||||
self.assertTrue(cache.dummy)
|
||||
cache.store(tkr, tz1)
|
||||
|
||||
def test_tzCacheRootLookup(self):
|
||||
# Test that if cache path in read-only filesystem, no exception.
|
||||
tkr = 'AMZN'
|
||||
# During attempt to lookup, will discover cannot write
|
||||
yf.utils.get_tz_cache().lookup(tkr)
|
||||
|
||||
# Handling the lookup failure replaces cache with a dummy
|
||||
cache = yf.utils.get_tz_cache()
|
||||
self.assertTrue(cache.dummy)
|
||||
cache.lookup(tkr)
|
||||
|
||||
|
||||
def suite():
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTest(TestCache('Test cache'))
|
||||
suite.addTest(TestCacheNoPermission('Test cache no permission'))
|
||||
return suite
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -23,6 +23,8 @@ from . import version
|
||||
from .ticker import Ticker
|
||||
from .tickers import Tickers
|
||||
from .multi import download
|
||||
from .utils import enable_debug_mode
|
||||
from .cache import set_tz_cache_location
|
||||
|
||||
__version__ = version.version
|
||||
__author__ = "Ran Aroussi"
|
||||
@@ -42,4 +44,4 @@ def pdr_override():
|
||||
pass
|
||||
|
||||
|
||||
__all__ = ['download', 'Ticker', 'Tickers', 'pdr_override']
|
||||
__all__ = ['download', 'Ticker', 'Tickers', 'pdr_override', 'enable_debug_mode', 'set_tz_cache_location']
|
||||
|
||||
2337
yfinance/base.py
2337
yfinance/base.py
File diff suppressed because it is too large
Load Diff
400
yfinance/cache.py
Normal file
400
yfinance/cache.py
Normal file
@@ -0,0 +1,400 @@
|
||||
import peewee as _peewee
|
||||
from threading import Lock
|
||||
import os as _os
|
||||
import appdirs as _ad
|
||||
import atexit as _atexit
|
||||
import datetime as _datetime
|
||||
import pickle as _pkl
|
||||
|
||||
from .utils import get_yf_logger
|
||||
|
||||
_cache_init_lock = Lock()
|
||||
|
||||
# --------------
|
||||
# TimeZone cache
|
||||
# --------------
|
||||
|
||||
class _TzCacheException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class _TzCacheDummy:
|
||||
"""Dummy cache to use if tz cache is disabled"""
|
||||
|
||||
def lookup(self, tkr):
|
||||
return None
|
||||
|
||||
def store(self, tkr, tz):
|
||||
pass
|
||||
|
||||
@property
|
||||
def tz_db(self):
|
||||
return None
|
||||
|
||||
|
||||
class _TzCacheManager:
|
||||
_tz_cache = None
|
||||
|
||||
@classmethod
|
||||
def get_tz_cache(cls):
|
||||
if cls._tz_cache is None:
|
||||
with _cache_init_lock:
|
||||
cls._initialise()
|
||||
return cls._tz_cache
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
cls._tz_cache = _TzCache()
|
||||
|
||||
|
||||
class _TzDBManager:
|
||||
_db = None
|
||||
_cache_dir = _os.path.join(_ad.user_cache_dir(), "py-yfinance")
|
||||
|
||||
@classmethod
|
||||
def get_database(cls):
|
||||
if cls._db is None:
|
||||
cls._initialise()
|
||||
return cls._db
|
||||
|
||||
@classmethod
|
||||
def close_db(cls):
|
||||
if cls._db is not None:
|
||||
try:
|
||||
cls._db.close()
|
||||
except Exception:
|
||||
# Must discard exceptions because Python trying to quit.
|
||||
pass
|
||||
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
if cache_dir is not None:
|
||||
cls._cache_dir = cache_dir
|
||||
|
||||
if not _os.path.isdir(cls._cache_dir):
|
||||
try:
|
||||
_os.makedirs(cls._cache_dir)
|
||||
except OSError as err:
|
||||
raise _TzCacheException(f"Error creating TzCache folder: '{cls._cache_dir}' reason: {err}")
|
||||
elif not (_os.access(cls._cache_dir, _os.R_OK) and _os.access(cls._cache_dir, _os.W_OK)):
|
||||
raise _TzCacheException(f"Cannot read and write in TzCache folder: '{cls._cache_dir}'")
|
||||
|
||||
cls._db = _peewee.SqliteDatabase(
|
||||
_os.path.join(cls._cache_dir, 'tkr-tz.db'),
|
||||
pragmas={'journal_mode': 'wal', 'cache_size': -64}
|
||||
)
|
||||
|
||||
old_cache_file_path = _os.path.join(cls._cache_dir, "tkr-tz.csv")
|
||||
if _os.path.isfile(old_cache_file_path):
|
||||
_os.remove(old_cache_file_path)
|
||||
|
||||
@classmethod
|
||||
def set_location(cls, new_cache_dir):
|
||||
if cls._db is not None:
|
||||
cls._db.close()
|
||||
cls._db = None
|
||||
cls._cache_dir = new_cache_dir
|
||||
|
||||
@classmethod
|
||||
def get_location(cls):
|
||||
return cls._cache_dir
|
||||
|
||||
# close DB when Python exists
|
||||
_atexit.register(_TzDBManager.close_db)
|
||||
|
||||
|
||||
tz_db_proxy = _peewee.Proxy()
|
||||
class _KV(_peewee.Model):
|
||||
key = _peewee.CharField(primary_key=True)
|
||||
value = _peewee.CharField(null=True)
|
||||
|
||||
class Meta:
|
||||
database = tz_db_proxy
|
||||
without_rowid = True
|
||||
|
||||
|
||||
class _TzCache:
|
||||
def __init__(self):
|
||||
self.initialised = -1
|
||||
self.db = None
|
||||
self.dummy = False
|
||||
|
||||
def get_db(self):
|
||||
if self.db is not None:
|
||||
return self.db
|
||||
|
||||
try:
|
||||
self.db = _TzDBManager.get_database()
|
||||
except _TzCacheException as err:
|
||||
get_yf_logger().info(f"Failed to create TzCache, reason: {err}. "
|
||||
"TzCache will not be used. "
|
||||
"Tip: You can direct cache to use a different location with 'set_tz_cache_location(mylocation)'")
|
||||
self.dummy = True
|
||||
return None
|
||||
return self.db
|
||||
|
||||
def initialise(self):
|
||||
if self.initialised != -1:
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
self.initialised = 0 # failure
|
||||
return
|
||||
|
||||
db.connect()
|
||||
tz_db_proxy.initialize(db)
|
||||
db.create_tables([_KV])
|
||||
self.initialised = 1 # success
|
||||
|
||||
def lookup(self, key):
|
||||
if self.dummy:
|
||||
return None
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return None
|
||||
|
||||
try:
|
||||
return _KV.get(_KV.key == key).value
|
||||
except _KV.DoesNotExist:
|
||||
return None
|
||||
|
||||
def store(self, key, value):
|
||||
if self.dummy:
|
||||
return
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
return
|
||||
try:
|
||||
if value is None:
|
||||
q = _KV.delete().where(_KV.key == key)
|
||||
q.execute()
|
||||
return
|
||||
with db.atomic():
|
||||
_KV.insert(key=key, value=value).execute()
|
||||
except _peewee.IntegrityError:
|
||||
# Integrity error means the key already exists. Try updating the key.
|
||||
old_value = self.lookup(key)
|
||||
if old_value != value:
|
||||
get_yf_logger().debug(f"Value for key {key} changed from {old_value} to {value}.")
|
||||
with db.atomic():
|
||||
q = _KV.update(value=value).where(_KV.key == key)
|
||||
q.execute()
|
||||
|
||||
|
||||
def get_tz_cache():
|
||||
return _TzCacheManager.get_tz_cache()
|
||||
|
||||
|
||||
def set_tz_cache_location(cache_dir: str):
|
||||
"""
|
||||
Sets the path to create the "py-yfinance" cache folder in.
|
||||
Useful if the default folder returned by "appdir.user_cache_dir()" is not writable.
|
||||
Must be called before cache is used (that is, before fetching tickers).
|
||||
:param cache_dir: Path to use for caches
|
||||
:return: None
|
||||
"""
|
||||
_TzDBManager.set_location(cache_dir)
|
||||
|
||||
|
||||
|
||||
# --------------
|
||||
# Cookie cache
|
||||
# --------------
|
||||
|
||||
class _CookieCacheException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class _CookieCacheDummy:
|
||||
"""Dummy cache to use if Cookie cache is disabled"""
|
||||
|
||||
def lookup(self, tkr):
|
||||
return None
|
||||
|
||||
def store(self, tkr, Cookie):
|
||||
pass
|
||||
|
||||
@property
|
||||
def Cookie_db(self):
|
||||
return None
|
||||
|
||||
|
||||
class _CookieCacheManager:
|
||||
_Cookie_cache = None
|
||||
|
||||
@classmethod
|
||||
def get_cookie_cache(cls):
|
||||
if cls._Cookie_cache is None:
|
||||
with _cache_init_lock:
|
||||
cls._initialise()
|
||||
return cls._Cookie_cache
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
cls._Cookie_cache = _CookieCache()
|
||||
|
||||
|
||||
class _CookieDBManager:
|
||||
_db = None
|
||||
_cache_dir = _os.path.join(_ad.user_cache_dir(), "py-yfinance")
|
||||
|
||||
@classmethod
|
||||
def get_database(cls):
|
||||
if cls._db is None:
|
||||
cls._initialise()
|
||||
return cls._db
|
||||
|
||||
@classmethod
|
||||
def close_db(cls):
|
||||
if cls._db is not None:
|
||||
try:
|
||||
cls._db.close()
|
||||
except Exception:
|
||||
# Must discard exceptions because Python trying to quit.
|
||||
pass
|
||||
|
||||
|
||||
@classmethod
|
||||
def _initialise(cls, cache_dir=None):
|
||||
if cache_dir is not None:
|
||||
cls._cache_dir = cache_dir
|
||||
|
||||
if not _os.path.isdir(cls._cache_dir):
|
||||
try:
|
||||
_os.makedirs(cls._cache_dir)
|
||||
except OSError as err:
|
||||
raise _CookieCacheException(f"Error creating CookieCache folder: '{cls._cache_dir}' reason: {err}")
|
||||
elif not (_os.access(cls._cache_dir, _os.R_OK) and _os.access(cls._cache_dir, _os.W_OK)):
|
||||
raise _CookieCacheException(f"Cannot read and write in CookieCache folder: '{cls._cache_dir}'")
|
||||
|
||||
cls._db = _peewee.SqliteDatabase(
|
||||
_os.path.join(cls._cache_dir, 'cookies.db'),
|
||||
pragmas={'journal_mode': 'wal', 'cache_size': -64}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def set_location(cls, new_cache_dir):
|
||||
if cls._db is not None:
|
||||
cls._db.close()
|
||||
cls._db = None
|
||||
cls._cache_dir = new_cache_dir
|
||||
|
||||
@classmethod
|
||||
def get_location(cls):
|
||||
return cls._cache_dir
|
||||
|
||||
# close DB when Python exists
|
||||
_atexit.register(_CookieDBManager.close_db)
|
||||
|
||||
|
||||
Cookie_db_proxy = _peewee.Proxy()
|
||||
class _CookieSchema(_peewee.Model):
|
||||
strategy = _peewee.CharField(primary_key=True)
|
||||
fetch_date = _peewee.DateTimeField(default=_datetime.datetime.now)
|
||||
|
||||
# Which cookie type depends on strategy
|
||||
cookie_bytes = _peewee.BlobField()
|
||||
|
||||
class Meta:
|
||||
database = Cookie_db_proxy
|
||||
without_rowid = True
|
||||
|
||||
|
||||
class _CookieCache:
|
||||
def __init__(self):
|
||||
self.initialised = -1
|
||||
self.db = None
|
||||
self.dummy = False
|
||||
|
||||
def get_db(self):
|
||||
if self.db is not None:
|
||||
return self.db
|
||||
|
||||
try:
|
||||
self.db = _CookieDBManager.get_database()
|
||||
except _CookieCacheException as err:
|
||||
get_yf_logger().info(f"Failed to create CookieCache, reason: {err}. "
|
||||
"CookieCache will not be used. "
|
||||
"Tip: You can direct cache to use a different location with 'set_tz_cache_location(mylocation)'")
|
||||
self.dummy = True
|
||||
return None
|
||||
return self.db
|
||||
|
||||
def initialise(self):
|
||||
if self.initialised != -1:
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
self.initialised = 0 # failure
|
||||
return
|
||||
|
||||
db.connect()
|
||||
Cookie_db_proxy.initialize(db)
|
||||
db.create_tables([_CookieSchema])
|
||||
self.initialised = 1 # success
|
||||
|
||||
def lookup(self, strategy):
|
||||
if self.dummy:
|
||||
return None
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return None
|
||||
|
||||
try:
|
||||
data = _CookieSchema.get(_CookieSchema.strategy == strategy)
|
||||
cookie = _pkl.loads(data.cookie_bytes)
|
||||
return {'cookie':cookie, 'age':_datetime.datetime.now()-data.fetch_date}
|
||||
except _CookieSchema.DoesNotExist:
|
||||
return None
|
||||
|
||||
def store(self, strategy, cookie):
|
||||
if self.dummy:
|
||||
return
|
||||
|
||||
if self.initialised == -1:
|
||||
self.initialise()
|
||||
|
||||
if self.initialised == 0: # failure
|
||||
return
|
||||
|
||||
db = self.get_db()
|
||||
if db is None:
|
||||
return
|
||||
try:
|
||||
q = _CookieSchema.delete().where(_CookieSchema.strategy == strategy)
|
||||
q.execute()
|
||||
if cookie is None:
|
||||
return
|
||||
with db.atomic():
|
||||
cookie_pkl = _pkl.dumps(cookie, _pkl.HIGHEST_PROTOCOL)
|
||||
_CookieSchema.insert(strategy=strategy, cookie_bytes=cookie_pkl).execute()
|
||||
except _peewee.IntegrityError:
|
||||
raise
|
||||
# # Integrity error means the strategy already exists. Try updating the strategy.
|
||||
# old_value = self.lookup(strategy)
|
||||
# if old_value != cookie:
|
||||
# get_yf_logger().debug(f"cookie for strategy {strategy} changed from {old_value} to {cookie}.")
|
||||
# with db.atomic():
|
||||
# q = _CookieSchema.update(cookie=cookie).where(_CookieSchema.strategy == strategy)
|
||||
# q.execute()
|
||||
|
||||
|
||||
def get_cookie_cache():
|
||||
return _CookieCacheManager.get_cookie_cache()
|
||||
|
||||
118
yfinance/const.py
Normal file
118
yfinance/const.py
Normal file
@@ -0,0 +1,118 @@
|
||||
_BASE_URL_ = 'https://query2.finance.yahoo.com'
|
||||
_ROOT_URL_ = 'https://finance.yahoo.com'
|
||||
|
||||
fundamentals_keys = {
|
||||
'financials': ["TaxEffectOfUnusualItems", "TaxRateForCalcs", "NormalizedEBITDA", "NormalizedDilutedEPS",
|
||||
"NormalizedBasicEPS", "TotalUnusualItems", "TotalUnusualItemsExcludingGoodwill",
|
||||
"NetIncomeFromContinuingOperationNetMinorityInterest", "ReconciledDepreciation",
|
||||
"ReconciledCostOfRevenue", "EBITDA", "EBIT", "NetInterestIncome", "InterestExpense",
|
||||
"InterestIncome", "ContinuingAndDiscontinuedDilutedEPS", "ContinuingAndDiscontinuedBasicEPS",
|
||||
"NormalizedIncome", "NetIncomeFromContinuingAndDiscontinuedOperation", "TotalExpenses",
|
||||
"RentExpenseSupplemental", "ReportedNormalizedDilutedEPS", "ReportedNormalizedBasicEPS",
|
||||
"TotalOperatingIncomeAsReported", "DividendPerShare", "DilutedAverageShares", "BasicAverageShares",
|
||||
"DilutedEPS", "DilutedEPSOtherGainsLosses", "TaxLossCarryforwardDilutedEPS",
|
||||
"DilutedAccountingChange", "DilutedExtraordinary", "DilutedDiscontinuousOperations",
|
||||
"DilutedContinuousOperations", "BasicEPS", "BasicEPSOtherGainsLosses", "TaxLossCarryforwardBasicEPS",
|
||||
"BasicAccountingChange", "BasicExtraordinary", "BasicDiscontinuousOperations",
|
||||
"BasicContinuousOperations", "DilutedNIAvailtoComStockholders", "AverageDilutionEarnings",
|
||||
"NetIncomeCommonStockholders", "OtherunderPreferredStockDividend", "PreferredStockDividends",
|
||||
"NetIncome", "MinorityInterests", "NetIncomeIncludingNoncontrollingInterests",
|
||||
"NetIncomeFromTaxLossCarryforward", "NetIncomeExtraordinary", "NetIncomeDiscontinuousOperations",
|
||||
"NetIncomeContinuousOperations", "EarningsFromEquityInterestNetOfTax", "TaxProvision",
|
||||
"PretaxIncome", "OtherIncomeExpense", "OtherNonOperatingIncomeExpenses", "SpecialIncomeCharges",
|
||||
"GainOnSaleOfPPE", "GainOnSaleOfBusiness", "OtherSpecialCharges", "WriteOff",
|
||||
"ImpairmentOfCapitalAssets", "RestructuringAndMergernAcquisition", "SecuritiesAmortization",
|
||||
"EarningsFromEquityInterest", "GainOnSaleOfSecurity", "NetNonOperatingInterestIncomeExpense",
|
||||
"TotalOtherFinanceCost", "InterestExpenseNonOperating", "InterestIncomeNonOperating",
|
||||
"OperatingIncome", "OperatingExpense", "OtherOperatingExpenses", "OtherTaxes",
|
||||
"ProvisionForDoubtfulAccounts", "DepreciationAmortizationDepletionIncomeStatement",
|
||||
"DepletionIncomeStatement", "DepreciationAndAmortizationInIncomeStatement", "Amortization",
|
||||
"AmortizationOfIntangiblesIncomeStatement", "DepreciationIncomeStatement", "ResearchAndDevelopment",
|
||||
"SellingGeneralAndAdministration", "SellingAndMarketingExpense", "GeneralAndAdministrativeExpense",
|
||||
"OtherGandA", "InsuranceAndClaims", "RentAndLandingFees", "SalariesAndWages", "GrossProfit",
|
||||
"CostOfRevenue", "TotalRevenue", "ExciseTaxes", "OperatingRevenue"],
|
||||
'balance-sheet': ["TreasurySharesNumber", "PreferredSharesNumber", "OrdinarySharesNumber", "ShareIssued", "NetDebt",
|
||||
"TotalDebt", "TangibleBookValue", "InvestedCapital", "WorkingCapital", "NetTangibleAssets",
|
||||
"CapitalLeaseObligations", "CommonStockEquity", "PreferredStockEquity", "TotalCapitalization",
|
||||
"TotalEquityGrossMinorityInterest", "MinorityInterest", "StockholdersEquity",
|
||||
"OtherEquityInterest", "GainsLossesNotAffectingRetainedEarnings", "OtherEquityAdjustments",
|
||||
"FixedAssetsRevaluationReserve", "ForeignCurrencyTranslationAdjustments",
|
||||
"MinimumPensionLiabilities", "UnrealizedGainLoss", "TreasuryStock", "RetainedEarnings",
|
||||
"AdditionalPaidInCapital", "CapitalStock", "OtherCapitalStock", "CommonStock", "PreferredStock",
|
||||
"TotalPartnershipCapital", "GeneralPartnershipCapital", "LimitedPartnershipCapital",
|
||||
"TotalLiabilitiesNetMinorityInterest", "TotalNonCurrentLiabilitiesNetMinorityInterest",
|
||||
"OtherNonCurrentLiabilities", "LiabilitiesHeldforSaleNonCurrent", "RestrictedCommonStock",
|
||||
"PreferredSecuritiesOutsideStockEquity", "DerivativeProductLiabilities", "EmployeeBenefits",
|
||||
"NonCurrentPensionAndOtherPostretirementBenefitPlans", "NonCurrentAccruedExpenses",
|
||||
"DuetoRelatedPartiesNonCurrent", "TradeandOtherPayablesNonCurrent",
|
||||
"NonCurrentDeferredLiabilities", "NonCurrentDeferredRevenue",
|
||||
"NonCurrentDeferredTaxesLiabilities", "LongTermDebtAndCapitalLeaseObligation",
|
||||
"LongTermCapitalLeaseObligation", "LongTermDebt", "LongTermProvisions", "CurrentLiabilities",
|
||||
"OtherCurrentLiabilities", "CurrentDeferredLiabilities", "CurrentDeferredRevenue",
|
||||
"CurrentDeferredTaxesLiabilities", "CurrentDebtAndCapitalLeaseObligation",
|
||||
"CurrentCapitalLeaseObligation", "CurrentDebt", "OtherCurrentBorrowings", "LineOfCredit",
|
||||
"CommercialPaper", "CurrentNotesPayable", "PensionandOtherPostRetirementBenefitPlansCurrent",
|
||||
"CurrentProvisions", "PayablesAndAccruedExpenses", "CurrentAccruedExpenses", "InterestPayable",
|
||||
"Payables", "OtherPayable", "DuetoRelatedPartiesCurrent", "DividendsPayable", "TotalTaxPayable",
|
||||
"IncomeTaxPayable", "AccountsPayable", "TotalAssets", "TotalNonCurrentAssets",
|
||||
"OtherNonCurrentAssets", "DefinedPensionBenefit", "NonCurrentPrepaidAssets",
|
||||
"NonCurrentDeferredAssets", "NonCurrentDeferredTaxesAssets", "DuefromRelatedPartiesNonCurrent",
|
||||
"NonCurrentNoteReceivables", "NonCurrentAccountsReceivable", "FinancialAssets",
|
||||
"InvestmentsAndAdvances", "OtherInvestments", "InvestmentinFinancialAssets",
|
||||
"HeldToMaturitySecurities", "AvailableForSaleSecurities",
|
||||
"FinancialAssetsDesignatedasFairValueThroughProfitorLossTotal", "TradingSecurities",
|
||||
"LongTermEquityInvestment", "InvestmentsinJointVenturesatCost",
|
||||
"InvestmentsInOtherVenturesUnderEquityMethod", "InvestmentsinAssociatesatCost",
|
||||
"InvestmentsinSubsidiariesatCost", "InvestmentProperties", "GoodwillAndOtherIntangibleAssets",
|
||||
"OtherIntangibleAssets", "Goodwill", "NetPPE", "AccumulatedDepreciation", "GrossPPE", "Leases",
|
||||
"ConstructionInProgress", "OtherProperties", "MachineryFurnitureEquipment",
|
||||
"BuildingsAndImprovements", "LandAndImprovements", "Properties", "CurrentAssets",
|
||||
"OtherCurrentAssets", "HedgingAssetsCurrent", "AssetsHeldForSaleCurrent", "CurrentDeferredAssets",
|
||||
"CurrentDeferredTaxesAssets", "RestrictedCash", "PrepaidAssets", "Inventory",
|
||||
"InventoriesAdjustmentsAllowances", "OtherInventories", "FinishedGoods", "WorkInProcess",
|
||||
"RawMaterials", "Receivables", "ReceivablesAdjustmentsAllowances", "OtherReceivables",
|
||||
"DuefromRelatedPartiesCurrent", "TaxesReceivable", "AccruedInterestReceivable", "NotesReceivable",
|
||||
"LoansReceivable", "AccountsReceivable", "AllowanceForDoubtfulAccountsReceivable",
|
||||
"GrossAccountsReceivable", "CashCashEquivalentsAndShortTermInvestments",
|
||||
"OtherShortTermInvestments", "CashAndCashEquivalents", "CashEquivalents", "CashFinancial"],
|
||||
'cash-flow': ["ForeignSales", "DomesticSales", "AdjustedGeographySegmentData", "FreeCashFlow",
|
||||
"RepurchaseOfCapitalStock", "RepaymentOfDebt", "IssuanceOfDebt", "IssuanceOfCapitalStock",
|
||||
"CapitalExpenditure", "InterestPaidSupplementalData", "IncomeTaxPaidSupplementalData",
|
||||
"EndCashPosition", "OtherCashAdjustmentOutsideChangeinCash", "BeginningCashPosition",
|
||||
"EffectOfExchangeRateChanges", "ChangesInCash", "OtherCashAdjustmentInsideChangeinCash",
|
||||
"CashFlowFromDiscontinuedOperation", "FinancingCashFlow", "CashFromDiscontinuedFinancingActivities",
|
||||
"CashFlowFromContinuingFinancingActivities", "NetOtherFinancingCharges", "InterestPaidCFF",
|
||||
"ProceedsFromStockOptionExercised", "CashDividendsPaid", "PreferredStockDividendPaid",
|
||||
"CommonStockDividendPaid", "NetPreferredStockIssuance", "PreferredStockPayments",
|
||||
"PreferredStockIssuance", "NetCommonStockIssuance", "CommonStockPayments", "CommonStockIssuance",
|
||||
"NetIssuancePaymentsOfDebt", "NetShortTermDebtIssuance", "ShortTermDebtPayments",
|
||||
"ShortTermDebtIssuance", "NetLongTermDebtIssuance", "LongTermDebtPayments", "LongTermDebtIssuance",
|
||||
"InvestingCashFlow", "CashFromDiscontinuedInvestingActivities",
|
||||
"CashFlowFromContinuingInvestingActivities", "NetOtherInvestingChanges", "InterestReceivedCFI",
|
||||
"DividendsReceivedCFI", "NetInvestmentPurchaseAndSale", "SaleOfInvestment", "PurchaseOfInvestment",
|
||||
"NetInvestmentPropertiesPurchaseAndSale", "SaleOfInvestmentProperties",
|
||||
"PurchaseOfInvestmentProperties", "NetBusinessPurchaseAndSale", "SaleOfBusiness",
|
||||
"PurchaseOfBusiness", "NetIntangiblesPurchaseAndSale", "SaleOfIntangibles", "PurchaseOfIntangibles",
|
||||
"NetPPEPurchaseAndSale", "SaleOfPPE", "PurchaseOfPPE", "CapitalExpenditureReported",
|
||||
"OperatingCashFlow", "CashFromDiscontinuedOperatingActivities",
|
||||
"CashFlowFromContinuingOperatingActivities", "TaxesRefundPaid", "InterestReceivedCFO",
|
||||
"InterestPaidCFO", "DividendReceivedCFO", "DividendPaidCFO", "ChangeInWorkingCapital",
|
||||
"ChangeInOtherWorkingCapital", "ChangeInOtherCurrentLiabilities", "ChangeInOtherCurrentAssets",
|
||||
"ChangeInPayablesAndAccruedExpense", "ChangeInAccruedExpense", "ChangeInInterestPayable",
|
||||
"ChangeInPayable", "ChangeInDividendPayable", "ChangeInAccountPayable", "ChangeInTaxPayable",
|
||||
"ChangeInIncomeTaxPayable", "ChangeInPrepaidAssets", "ChangeInInventory", "ChangeInReceivables",
|
||||
"ChangesInAccountReceivables", "OtherNonCashItems", "ExcessTaxBenefitFromStockBasedCompensation",
|
||||
"StockBasedCompensation", "UnrealizedGainLossOnInvestmentSecurities", "ProvisionandWriteOffofAssets",
|
||||
"AssetImpairmentCharge", "AmortizationOfSecurities", "DeferredTax", "DeferredIncomeTax",
|
||||
"DepreciationAmortizationDepletion", "Depletion", "DepreciationAndAmortization",
|
||||
"AmortizationCashFlow", "AmortizationOfIntangibles", "Depreciation", "OperatingGainsLosses",
|
||||
"PensionAndEmployeeBenefitExpense", "EarningsLossesFromEquityInvestments",
|
||||
"GainLossOnInvestmentSecurities", "NetForeignCurrencyExchangeGainLoss", "GainLossOnSaleOfPPE",
|
||||
"GainLossOnSaleOfBusiness", "NetIncomeFromContinuingOperations",
|
||||
"CashFlowsfromusedinOperatingActivitiesDirect", "TaxesRefundPaidDirect", "InterestReceivedDirect",
|
||||
"InterestPaidDirect", "DividendsReceivedDirect", "DividendsPaidDirect", "ClassesofCashPayments",
|
||||
"OtherCashPaymentsfromOperatingActivities", "PaymentsonBehalfofEmployees",
|
||||
"PaymentstoSuppliersforGoodsandServices", "ClassesofCashReceiptsfromOperatingActivities",
|
||||
"OtherCashReceiptsfromOperatingActivities", "ReceiptsfromGovernmentGrants", "ReceiptsfromCustomers"]}
|
||||
|
||||
price_colnames = ['Open', 'High', 'Low', 'Close', 'Adj Close']
|
||||
399
yfinance/data.py
Normal file
399
yfinance/data.py
Normal file
@@ -0,0 +1,399 @@
|
||||
import functools
|
||||
from functools import lru_cache
|
||||
|
||||
import requests as requests
|
||||
from bs4 import BeautifulSoup
|
||||
import datetime
|
||||
|
||||
from frozendict import frozendict
|
||||
|
||||
from . import utils, cache
|
||||
|
||||
cache_maxsize = 64
|
||||
|
||||
|
||||
def lru_cache_freezeargs(func):
|
||||
"""
|
||||
Decorator transforms mutable dictionary and list arguments into immutable types
|
||||
Needed so lru_cache can cache method calls what has dict or list arguments.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
args = tuple([frozendict(arg) if isinstance(arg, dict) else arg for arg in args])
|
||||
kwargs = {k: frozendict(v) if isinstance(v, dict) else v for k, v in kwargs.items()}
|
||||
args = tuple([tuple(arg) if isinstance(arg, list) else arg for arg in args])
|
||||
kwargs = {k: tuple(v) if isinstance(v, list) else v for k, v in kwargs.items()}
|
||||
return func(*args, **kwargs)
|
||||
|
||||
# copy over the lru_cache extra methods to this wrapper to be able to access them
|
||||
# after this decorator has been applied
|
||||
wrapped.cache_info = func.cache_info
|
||||
wrapped.cache_clear = func.cache_clear
|
||||
return wrapped
|
||||
|
||||
|
||||
import threading
|
||||
class SingletonMeta(type):
|
||||
"""
|
||||
Metaclass that creates a Singleton instance.
|
||||
"""
|
||||
_instances = {}
|
||||
_lock = threading.Lock()
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
with cls._lock:
|
||||
if cls not in cls._instances:
|
||||
instance = super().__call__(*args, **kwargs)
|
||||
cls._instances[cls] = instance
|
||||
else:
|
||||
cls._instances[cls]._set_session(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class YfData(metaclass=SingletonMeta):
|
||||
"""
|
||||
Have one place to retrieve data from Yahoo API in order to ease caching and speed up operations
|
||||
|
||||
Credit for code for cookie & crumb goes to StackOverflow:
|
||||
https://stackoverflow.com/questions/76065035/yahoo-finance-v7-api-now-requiring-cookies-python
|
||||
"""
|
||||
user_agent_headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
|
||||
|
||||
def __init__(self, session=None):
|
||||
self._session = session or requests.Session()
|
||||
|
||||
try:
|
||||
self._session.cache
|
||||
except AttributeError:
|
||||
# Not caching
|
||||
self._session_is_caching = False
|
||||
else:
|
||||
# Is caching. This is annoying.
|
||||
# Can't simply use a non-caching session to fetch cookie & crumb,
|
||||
# because then the caching-session won't have cookie.
|
||||
self._session_is_caching = True
|
||||
from requests_cache import DO_NOT_CACHE
|
||||
self._expire_after = DO_NOT_CACHE
|
||||
self._crumb = None
|
||||
self._cookie = None
|
||||
if self._session_is_caching and self._cookie is None:
|
||||
utils.print_once("WARNING: cookie & crumb does not work well with requests_cache. Am experimenting with 'expire_after=DO_NOT_CACHE', but you need to help stress-test.")
|
||||
|
||||
# Default to using 'basic' strategy
|
||||
self._cookie_strategy = 'basic'
|
||||
# If it fails, then fallback method is 'csrf'
|
||||
# self._cookie_strategy = 'csrf'
|
||||
|
||||
self._cookie_lock = threading.Lock()
|
||||
|
||||
def _set_session(self, session):
|
||||
if session is None:
|
||||
return
|
||||
with self._cookie_lock:
|
||||
self._session = session
|
||||
|
||||
def _set_cookie_strategy(self, strategy, have_lock=False):
|
||||
if strategy == self._cookie_strategy:
|
||||
return
|
||||
if not have_lock:
|
||||
self._cookie_lock.acquire()
|
||||
|
||||
try:
|
||||
self._cookie_strategy = strategy
|
||||
if self._cookie_strategy == 'csrf':
|
||||
utils.get_yf_logger().debug(f'toggling cookie strategy {self._cookie_strategy} -> basic')
|
||||
self._session.cookies.clear()
|
||||
self._cookie_strategy = 'basic'
|
||||
else:
|
||||
utils.get_yf_logger().debug(f'toggling cookie strategy {self._cookie_strategy} -> csrf')
|
||||
self._cookie_strategy = 'csrf'
|
||||
self._cookie = None
|
||||
self._crumb = None
|
||||
except Exception:
|
||||
self._cookie_lock.release()
|
||||
raise
|
||||
|
||||
if not have_lock:
|
||||
self._cookie_lock.release()
|
||||
|
||||
def _save_session_cookies(self):
|
||||
try:
|
||||
cache.get_cookie_cache().store('csrf', self._session.cookies)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _load_session_cookies(self):
|
||||
cookie_dict = cache.get_cookie_cache().lookup('csrf')
|
||||
if cookie_dict is None:
|
||||
return False
|
||||
# Periodically refresh, 24 hours seems fair.
|
||||
if cookie_dict['age'] > datetime.timedelta(days=1):
|
||||
return False
|
||||
self._session.cookies.update(cookie_dict['cookie'])
|
||||
utils.get_yf_logger().debug('loaded persistent cookie')
|
||||
|
||||
def _save_cookie_basic(self, cookie):
|
||||
try:
|
||||
cache.get_cookie_cache().store('basic', cookie)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
def _load_cookie_basic(self):
|
||||
cookie_dict = cache.get_cookie_cache().lookup('basic')
|
||||
if cookie_dict is None:
|
||||
return None
|
||||
# Periodically refresh, 24 hours seems fair.
|
||||
if cookie_dict['age'] > datetime.timedelta(days=1):
|
||||
return None
|
||||
utils.get_yf_logger().debug('loaded persistent cookie')
|
||||
return cookie_dict['cookie']
|
||||
|
||||
def _get_cookie_basic(self, proxy=None, timeout=30):
|
||||
if self._cookie is not None:
|
||||
utils.get_yf_logger().debug('reusing cookie')
|
||||
return self._cookie
|
||||
|
||||
self._cookie = self._load_cookie_basic()
|
||||
if self._cookie is not None:
|
||||
return self._cookie
|
||||
|
||||
# To avoid infinite recursion, do NOT use self.get()
|
||||
# - 'allow_redirects' copied from @psychoz971 solution - does it help USA?
|
||||
response = self._session.get(
|
||||
url='https://fc.yahoo.com',
|
||||
headers=self.user_agent_headers,
|
||||
proxies=proxy,
|
||||
timeout=timeout,
|
||||
allow_redirects=True)
|
||||
|
||||
if not response.cookies:
|
||||
utils.get_yf_logger().debug("response.cookies = None")
|
||||
return None
|
||||
self._cookie = list(response.cookies)[0]
|
||||
if self._cookie == '':
|
||||
utils.get_yf_logger().debug("list(response.cookies)[0] = ''")
|
||||
return None
|
||||
self._save_cookie_basic(self._cookie)
|
||||
utils.get_yf_logger().debug(f"fetched basic cookie = {self._cookie}")
|
||||
return self._cookie
|
||||
|
||||
def _get_crumb_basic(self, proxy=None, timeout=30):
|
||||
if self._crumb is not None:
|
||||
utils.get_yf_logger().debug('reusing crumb')
|
||||
return self._crumb
|
||||
|
||||
cookie = self._get_cookie_basic()
|
||||
if cookie is None:
|
||||
return None
|
||||
|
||||
# - 'allow_redirects' copied from @psychoz971 solution - does it help USA?
|
||||
get_args = {
|
||||
'url': "https://query1.finance.yahoo.com/v1/test/getcrumb",
|
||||
'headers': self.user_agent_headers,
|
||||
'cookies': {cookie.name: cookie.value},
|
||||
'proxies': proxy,
|
||||
'timeout': timeout,
|
||||
'allow_redirects': True
|
||||
}
|
||||
if self._session_is_caching:
|
||||
get_args['expire_after'] = self._expire_after
|
||||
crumb_response = self._session.get(**get_args)
|
||||
else:
|
||||
crumb_response = self._session.get(**get_args)
|
||||
self._crumb = crumb_response.text
|
||||
if self._crumb is None or '<html>' in self._crumb:
|
||||
return None
|
||||
|
||||
utils.get_yf_logger().debug(f"crumb = '{self._crumb}'")
|
||||
return self._crumb
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _get_cookie_and_crumb_basic(self, proxy, timeout):
|
||||
cookie = self._get_cookie_basic(proxy, timeout)
|
||||
crumb = self._get_crumb_basic(proxy, timeout)
|
||||
return cookie, crumb
|
||||
|
||||
def _get_cookie_csrf(self, proxy, timeout):
|
||||
if utils.reuse_cookie and self._cookie is not None:
|
||||
utils.get_yf_logger().debug('reusing cookie')
|
||||
return True
|
||||
|
||||
elif self._load_session_cookies():
|
||||
utils.get_yf_logger().debug('reusing persistent cookie')
|
||||
self._cookie = True
|
||||
return True
|
||||
|
||||
base_args = {
|
||||
'headers': self.user_agent_headers,
|
||||
'proxies': proxy,
|
||||
'timeout': timeout}
|
||||
|
||||
get_args = base_args | {'url': 'https://guce.yahoo.com/consent'}
|
||||
if self._session_is_caching:
|
||||
get_args['expire_after'] = self._expire_after
|
||||
response = self._session.get(**get_args)
|
||||
else:
|
||||
response = self._session.get(**get_args)
|
||||
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
csrfTokenInput = soup.find('input', attrs={'name': 'csrfToken'})
|
||||
if csrfTokenInput is None:
|
||||
return False
|
||||
csrfToken = csrfTokenInput['value']
|
||||
utils.get_yf_logger().debug(f'csrfToken = {csrfToken}')
|
||||
sessionIdInput = soup.find('input', attrs={'name': 'sessionId'})
|
||||
sessionId = sessionIdInput['value']
|
||||
utils.get_yf_logger().debug(f"sessionId='{sessionId}")
|
||||
|
||||
originalDoneUrl = 'https://finance.yahoo.com/'
|
||||
namespace = 'yahoo'
|
||||
data = {
|
||||
'agree': ['agree', 'agree'],
|
||||
'consentUUID': 'default',
|
||||
'sessionId': sessionId,
|
||||
'csrfToken': csrfToken,
|
||||
'originalDoneUrl': originalDoneUrl,
|
||||
'namespace': namespace,
|
||||
}
|
||||
post_args = base_args | {
|
||||
'url': f'https://consent.yahoo.com/v2/collectConsent?sessionId={sessionId}',
|
||||
'data': data}
|
||||
get_args = base_args | {
|
||||
'url': f'https://guce.yahoo.com/copyConsent?sessionId={sessionId}',
|
||||
'data': data}
|
||||
if self._session_is_caching:
|
||||
post_args['expire_after'] = self._expire_after
|
||||
get_args['expire_after'] = self._expire_after
|
||||
self._session.post(**post_args)
|
||||
self._session.get(**get_args)
|
||||
else:
|
||||
self._session.post(**post_args)
|
||||
self._session.get(**get_args)
|
||||
self._cookie = True
|
||||
self._save_session_cookies()
|
||||
return True
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _get_crumb_csrf(self, proxy=None, timeout=30):
|
||||
# Credit goes to @bot-unit #1729
|
||||
|
||||
if self._crumb is not None:
|
||||
utils.get_yf_logger().debug('reusing crumb')
|
||||
return self._crumb
|
||||
|
||||
if not self._get_cookie_csrf(proxy, timeout):
|
||||
# This cookie stored in session
|
||||
return None
|
||||
|
||||
get_args = {
|
||||
'url': 'https://query2.finance.yahoo.com/v1/test/getcrumb',
|
||||
'headers': self.user_agent_headers,
|
||||
'proxies': proxy,
|
||||
'timeout': timeout}
|
||||
if self._session_is_caching:
|
||||
get_args['expire_after'] = self._expire_after
|
||||
r = self._session.get(**get_args)
|
||||
else:
|
||||
r = self._session.get(**get_args)
|
||||
self._crumb = r.text
|
||||
|
||||
if self._crumb is None or '<html>' in self._crumb or self._crumb == '':
|
||||
return None
|
||||
|
||||
utils.get_yf_logger().debug(f"crumb = '{self._crumb}'")
|
||||
return self._crumb
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _get_cookie_and_crumb(self, proxy=None, timeout=30):
|
||||
cookie, crumb, strategy = None, None, None
|
||||
|
||||
utils.get_yf_logger().debug(f"cookie_mode = '{self._cookie_strategy}'")
|
||||
|
||||
with self._cookie_lock:
|
||||
if self._cookie_strategy == 'csrf':
|
||||
crumb = self._get_crumb_csrf()
|
||||
if crumb is None:
|
||||
# Fail
|
||||
self._set_cookie_strategy('basic', have_lock=True)
|
||||
cookie, crumb = self._get_cookie_and_crumb_basic(proxy, timeout)
|
||||
else:
|
||||
# Fallback strategy
|
||||
cookie, crumb = self._get_cookie_and_crumb_basic(proxy, timeout)
|
||||
if cookie is None or crumb is None:
|
||||
# Fail
|
||||
self._set_cookie_strategy('csrf', have_lock=True)
|
||||
crumb = self._get_crumb_csrf()
|
||||
strategy = self._cookie_strategy
|
||||
return cookie, crumb, strategy
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def get(self, url, user_agent_headers=None, params=None, proxy=None, timeout=30):
|
||||
# Important: treat input arguments as immutable.
|
||||
|
||||
if len(url) > 200:
|
||||
utils.get_yf_logger().debug(f'url={url[:200]}...')
|
||||
else:
|
||||
utils.get_yf_logger().debug(f'url={url}')
|
||||
utils.get_yf_logger().debug(f'params={params}')
|
||||
proxy = self._get_proxy(proxy)
|
||||
|
||||
if params is None:
|
||||
params = {}
|
||||
if 'crumb' in params:
|
||||
raise Exception("Don't manually add 'crumb' to params dict, let data.py handle it")
|
||||
|
||||
cookie, crumb, strategy = self._get_cookie_and_crumb()
|
||||
if crumb is not None:
|
||||
crumbs = {'crumb': crumb}
|
||||
else:
|
||||
crumbs = {}
|
||||
if strategy == 'basic' and cookie is not None:
|
||||
# Basic cookie strategy adds cookie to GET parameters
|
||||
cookies = {cookie.name: cookie.value}
|
||||
else:
|
||||
cookies = None
|
||||
|
||||
request_args = {
|
||||
'url': url,
|
||||
'params': params | crumbs,
|
||||
'cookies': cookies,
|
||||
'proxies': proxy,
|
||||
'timeout': timeout,
|
||||
'headers': user_agent_headers or self.user_agent_headers
|
||||
}
|
||||
response = self._session.get(**request_args)
|
||||
if response.status_code >= 400:
|
||||
# Retry with other cookie strategy
|
||||
if strategy == 'basic':
|
||||
self._set_cookie_strategy('csrf')
|
||||
else:
|
||||
self._set_cookie_strategy('basic')
|
||||
cookie, crumb, strategy = self._get_cookie_and_crumb(proxy, timeout)
|
||||
request_args['params']['crumb'] = crumb
|
||||
if strategy == 'basic':
|
||||
request_args['cookies'] = {cookie.name: cookie.value}
|
||||
response = self._session.get(**request_args)
|
||||
|
||||
return response
|
||||
|
||||
@lru_cache_freezeargs
|
||||
@lru_cache(maxsize=cache_maxsize)
|
||||
def cache_get(self, url, user_agent_headers=None, params=None, proxy=None, timeout=30):
|
||||
return self.get(url, user_agent_headers, params, proxy, timeout)
|
||||
|
||||
def _get_proxy(self, proxy):
|
||||
# setup proxy in requests format
|
||||
if proxy is not None:
|
||||
if isinstance(proxy, (dict, frozendict)) and "https" in proxy:
|
||||
proxy = proxy["https"]
|
||||
proxy = {"https": proxy}
|
||||
return proxy
|
||||
|
||||
def get_raw_json(self, url, user_agent_headers=None, params=None, proxy=None, timeout=30):
|
||||
utils.get_yf_logger().debug(f'get_raw_json(): {url}')
|
||||
response = self.get(url, user_agent_headers=user_agent_headers, params=params, proxy=proxy, timeout=timeout)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
12
yfinance/exceptions.py
Normal file
12
yfinance/exceptions.py
Normal file
@@ -0,0 +1,12 @@
|
||||
class YFinanceException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class YFinanceDataException(YFinanceException):
|
||||
pass
|
||||
|
||||
|
||||
class YFNotImplementedError(NotImplementedError):
|
||||
def __init__(self, method_name):
|
||||
super().__init__(f"Have not implemented fetching '{method_name}' from Yahoo API")
|
||||
|
||||
@@ -21,18 +21,23 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import time as _time
|
||||
import traceback
|
||||
|
||||
import multitasking as _multitasking
|
||||
import pandas as _pd
|
||||
|
||||
from . import Ticker, utils
|
||||
from .data import YfData
|
||||
from . import shared
|
||||
|
||||
|
||||
def download(tickers, start=None, end=None, actions=False, threads=True,
|
||||
group_by='column', auto_adjust=False, back_adjust=False,
|
||||
progress=True, period="max", show_errors=True, interval="1d", prepost=False,
|
||||
proxy=None, rounding=False, timeout=None, **kwargs):
|
||||
@utils.log_indent_decorator
|
||||
def download(tickers, start=None, end=None, actions=False, threads=True, ignore_tz=None,
|
||||
group_by='column', auto_adjust=False, back_adjust=False, repair=False, keepna=False,
|
||||
progress=True, period="max", show_errors=None, interval="1d", prepost=False,
|
||||
proxy=None, rounding=False, timeout=10, session=None):
|
||||
"""Download yahoo tickers
|
||||
:Parameters:
|
||||
tickers : str, list
|
||||
@@ -44,11 +49,13 @@ def download(tickers, start=None, end=None, actions=False, threads=True,
|
||||
Valid intervals: 1m,2m,5m,15m,30m,60m,90m,1h,1d,5d,1wk,1mo,3mo
|
||||
Intraday data cannot extend last 60 days
|
||||
start: str
|
||||
Download start date string (YYYY-MM-DD) or _datetime.
|
||||
Default is 1900-01-01
|
||||
Download start date string (YYYY-MM-DD) or _datetime, inclusive.
|
||||
Default is 99 years ago
|
||||
E.g. for start="2020-01-01", the first data point will be on "2020-01-01"
|
||||
end: str
|
||||
Download end date string (YYYY-MM-DD) or _datetime.
|
||||
Download end date string (YYYY-MM-DD) or _datetime, exclusive.
|
||||
Default is now
|
||||
E.g. for end="2023-01-01", the last data point will be on "2022-12-31"
|
||||
group_by : str
|
||||
Group by 'ticker' or 'column' (default)
|
||||
prepost : bool
|
||||
@@ -56,20 +63,60 @@ def download(tickers, start=None, end=None, actions=False, threads=True,
|
||||
Default is False
|
||||
auto_adjust: bool
|
||||
Adjust all OHLC automatically? Default is False
|
||||
repair: bool
|
||||
Detect currency unit 100x mixups and attempt repair
|
||||
Default is False
|
||||
keepna: bool
|
||||
Keep NaN rows returned by Yahoo?
|
||||
Default is False
|
||||
actions: bool
|
||||
Download dividend + stock splits data. Default is False
|
||||
threads: bool / int
|
||||
How many threads to use for mass downloading. Default is True
|
||||
ignore_tz: bool
|
||||
When combining from different timezones, ignore that part of datetime.
|
||||
Default depends on interval. Intraday = False. Day+ = True.
|
||||
proxy: str
|
||||
Optional. Proxy server URL scheme. Default is None
|
||||
rounding: bool
|
||||
Optional. Round values to 2 decimal places?
|
||||
show_errors: bool
|
||||
Optional. Doesn't print errors if True
|
||||
Optional. Doesn't print errors if False
|
||||
DEPRECATED, will be removed in future version
|
||||
timeout: None or float
|
||||
If not None stops waiting for a response after given number of
|
||||
seconds. (Can also be a fraction of a second e.g. 0.01)
|
||||
session: None or Session
|
||||
Optional. Pass your own session object to be used for all requests
|
||||
"""
|
||||
logger = utils.get_yf_logger()
|
||||
|
||||
if show_errors is not None:
|
||||
if show_errors:
|
||||
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
|
||||
logger.setLevel(logging.ERROR)
|
||||
else:
|
||||
utils.print_once(f"yfinance: download(show_errors={show_errors}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
if threads:
|
||||
# With DEBUG, each thread generates a lot of log messages.
|
||||
# And with multi-threading, these messages will be interleaved, bad!
|
||||
# So disable multi-threading to make log readable.
|
||||
logger.debug('Disabling multithreading because DEBUG logging enabled')
|
||||
threads = False
|
||||
if progress:
|
||||
# Disable progress bar, interferes with display of log messages
|
||||
progress = False
|
||||
|
||||
if ignore_tz is None:
|
||||
# Set default value depending on interval
|
||||
if interval[1:] in ['m', 'h']:
|
||||
# Intraday
|
||||
ignore_tz = False
|
||||
else:
|
||||
ignore_tz = True
|
||||
|
||||
# create ticker list
|
||||
tickers = tickers if isinstance(
|
||||
@@ -81,7 +128,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True,
|
||||
for ticker in tickers:
|
||||
if utils.is_isin(ticker):
|
||||
isin = ticker
|
||||
ticker = utils.get_ticker_by_isin(ticker, proxy)
|
||||
ticker = utils.get_ticker_by_isin(ticker, proxy, session=session)
|
||||
shared._ISINS[ticker] = isin
|
||||
_tickers_.append(ticker)
|
||||
|
||||
@@ -95,6 +142,10 @@ def download(tickers, start=None, end=None, actions=False, threads=True,
|
||||
# reset shared._DFS
|
||||
shared._DFS = {}
|
||||
shared._ERRORS = {}
|
||||
shared._TRACEBACKS = {}
|
||||
|
||||
# Ensure data initialised with session.
|
||||
YfData(session=session)
|
||||
|
||||
# download using threads
|
||||
if threads:
|
||||
@@ -105,44 +156,71 @@ def download(tickers, start=None, end=None, actions=False, threads=True,
|
||||
_download_one_threaded(ticker, period=period, interval=interval,
|
||||
start=start, end=end, prepost=prepost,
|
||||
actions=actions, auto_adjust=auto_adjust,
|
||||
back_adjust=back_adjust,
|
||||
back_adjust=back_adjust, repair=repair, keepna=keepna,
|
||||
progress=(progress and i > 0), proxy=proxy,
|
||||
rounding=rounding, timeout=timeout)
|
||||
while len(shared._DFS) < len(tickers):
|
||||
_time.sleep(0.01)
|
||||
|
||||
# download synchronously
|
||||
else:
|
||||
for i, ticker in enumerate(tickers):
|
||||
data = _download_one(ticker, period=period, interval=interval,
|
||||
start=start, end=end, prepost=prepost,
|
||||
actions=actions, auto_adjust=auto_adjust,
|
||||
back_adjust=back_adjust, proxy=proxy,
|
||||
back_adjust=back_adjust, repair=repair, keepna=keepna,
|
||||
proxy=proxy,
|
||||
rounding=rounding, timeout=timeout)
|
||||
shared._DFS[ticker.upper()] = data
|
||||
if progress:
|
||||
shared._PROGRESS_BAR.animate()
|
||||
|
||||
|
||||
if progress:
|
||||
shared._PROGRESS_BAR.completed()
|
||||
|
||||
if shared._ERRORS and show_errors:
|
||||
print('\n%.f Failed download%s:' % (
|
||||
if shared._ERRORS:
|
||||
# Send errors to logging module
|
||||
logger = utils.get_yf_logger()
|
||||
logger.error('\n%.f Failed download%s:' % (
|
||||
len(shared._ERRORS), 's' if len(shared._ERRORS) > 1 else ''))
|
||||
# print(shared._ERRORS)
|
||||
print("\n".join(['- %s: %s' %
|
||||
v for v in list(shared._ERRORS.items())]))
|
||||
|
||||
# Log each distinct error once, with list of symbols affected
|
||||
errors = {}
|
||||
for ticker in shared._ERRORS:
|
||||
err = shared._ERRORS[ticker]
|
||||
err = err.replace(f'{ticker}', '%ticker%')
|
||||
if err not in errors:
|
||||
errors[err] = [ticker]
|
||||
else:
|
||||
errors[err].append(ticker)
|
||||
for err in errors.keys():
|
||||
logger.error(f'{errors[err]}: ' + err)
|
||||
|
||||
# Log each distinct traceback once, with list of symbols affected
|
||||
tbs = {}
|
||||
for ticker in shared._TRACEBACKS:
|
||||
tb = shared._TRACEBACKS[ticker]
|
||||
tb = tb.replace(f'{ticker}', '%ticker%')
|
||||
if tb not in tbs:
|
||||
tbs[tb] = [ticker]
|
||||
else:
|
||||
tbs[tb].append(ticker)
|
||||
for tb in tbs.keys():
|
||||
logger.debug(f'{tbs[tb]}: ' + tb)
|
||||
|
||||
if ignore_tz:
|
||||
for tkr in shared._DFS.keys():
|
||||
if (shared._DFS[tkr] is not None) and (shared._DFS[tkr].shape[0] > 0):
|
||||
shared._DFS[tkr].index = shared._DFS[tkr].index.tz_localize(None)
|
||||
|
||||
if len(tickers) == 1:
|
||||
ticker = tickers[0]
|
||||
return shared._DFS[shared._ISINS.get(ticker, ticker)]
|
||||
return shared._DFS[ticker]
|
||||
|
||||
try:
|
||||
data = _pd.concat(shared._DFS.values(), axis=1,
|
||||
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
|
||||
keys=shared._DFS.keys())
|
||||
except Exception:
|
||||
_realign_dfs()
|
||||
data = _pd.concat(shared._DFS.values(), axis=1,
|
||||
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
|
||||
keys=shared._DFS.keys())
|
||||
|
||||
# switch names back to isins if applicable
|
||||
@@ -180,28 +258,38 @@ def _realign_dfs():
|
||||
|
||||
@_multitasking.task
|
||||
def _download_one_threaded(ticker, start=None, end=None,
|
||||
auto_adjust=False, back_adjust=False,
|
||||
auto_adjust=False, back_adjust=False, repair=False,
|
||||
actions=False, progress=True, period="max",
|
||||
interval="1d", prepost=False, proxy=None,
|
||||
rounding=False, timeout=None):
|
||||
|
||||
data = _download_one(ticker, start, end, auto_adjust, back_adjust,
|
||||
keepna=False, rounding=False, timeout=10):
|
||||
data = _download_one(ticker, start, end, auto_adjust, back_adjust, repair,
|
||||
actions, period, interval, prepost, proxy, rounding,
|
||||
timeout)
|
||||
shared._DFS[ticker.upper()] = data
|
||||
keepna, timeout)
|
||||
if progress:
|
||||
shared._PROGRESS_BAR.animate()
|
||||
|
||||
|
||||
def _download_one(ticker, start=None, end=None,
|
||||
auto_adjust=False, back_adjust=False,
|
||||
auto_adjust=False, back_adjust=False, repair=False,
|
||||
actions=False, period="max", interval="1d",
|
||||
prepost=False, proxy=None, rounding=False,
|
||||
timeout=None):
|
||||
keepna=False, timeout=10):
|
||||
data = None
|
||||
try:
|
||||
data = Ticker(ticker).history(
|
||||
period=period, interval=interval,
|
||||
start=start, end=end, prepost=prepost,
|
||||
actions=actions, auto_adjust=auto_adjust,
|
||||
back_adjust=back_adjust, repair=repair, proxy=proxy,
|
||||
rounding=rounding, keepna=keepna, timeout=timeout,
|
||||
raise_errors=True
|
||||
)
|
||||
except Exception as e:
|
||||
# glob try/except needed as current thead implementation breaks if exception is raised.
|
||||
shared._DFS[ticker.upper()] = utils.empty_df()
|
||||
shared._ERRORS[ticker.upper()] = repr(e)
|
||||
shared._TRACEBACKS[ticker.upper()] = traceback.format_exc()
|
||||
else:
|
||||
shared._DFS[ticker.upper()] = data
|
||||
|
||||
return Ticker(ticker).history(period=period, interval=interval,
|
||||
start=start, end=end, prepost=prepost,
|
||||
actions=actions, auto_adjust=auto_adjust,
|
||||
back_adjust=back_adjust, proxy=proxy,
|
||||
rounding=rounding, many=True,
|
||||
timeout=timeout)
|
||||
return data
|
||||
|
||||
1
yfinance/run.stdout
Normal file
1
yfinance/run.stdout
Normal file
@@ -0,0 +1 @@
|
||||
python: can't open file '/home/gonzo/ReposExternal/yfinance.feature.cookie-and-crumb/yfinance/test-single.py': [Errno 2] No such file or directory
|
||||
0
yfinance/scrapers/__init__.py
Normal file
0
yfinance/scrapers/__init__.py
Normal file
50
yfinance/scrapers/analysis.py
Normal file
50
yfinance/scrapers/analysis.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import pandas as pd
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import YfData
|
||||
from yfinance.exceptions import YFNotImplementedError
|
||||
|
||||
|
||||
class Analysis:
|
||||
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._earnings_trend = None
|
||||
self._analyst_trend_details = None
|
||||
self._analyst_price_target = None
|
||||
self._rev_est = None
|
||||
self._eps_est = None
|
||||
self._already_scraped = False
|
||||
|
||||
@property
|
||||
def earnings_trend(self) -> pd.DataFrame:
|
||||
if self._earnings_trend is None:
|
||||
raise YFNotImplementedError('earnings_trend')
|
||||
return self._earnings_trend
|
||||
|
||||
@property
|
||||
def analyst_trend_details(self) -> pd.DataFrame:
|
||||
if self._analyst_trend_details is None:
|
||||
raise YFNotImplementedError('analyst_trend_details')
|
||||
return self._analyst_trend_details
|
||||
|
||||
@property
|
||||
def analyst_price_target(self) -> pd.DataFrame:
|
||||
if self._analyst_price_target is None:
|
||||
raise YFNotImplementedError('analyst_price_target')
|
||||
return self._analyst_price_target
|
||||
|
||||
@property
|
||||
def rev_est(self) -> pd.DataFrame:
|
||||
if self._rev_est is None:
|
||||
raise YFNotImplementedError('rev_est')
|
||||
return self._rev_est
|
||||
|
||||
@property
|
||||
def eps_est(self) -> pd.DataFrame:
|
||||
if self._eps_est is None:
|
||||
raise YFNotImplementedError('eps_est')
|
||||
return self._eps_est
|
||||
149
yfinance/scrapers/fundamentals.py
Normal file
149
yfinance/scrapers/fundamentals.py
Normal file
@@ -0,0 +1,149 @@
|
||||
import datetime
|
||||
import json
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from yfinance import utils, const
|
||||
from yfinance.data import YfData
|
||||
from yfinance.exceptions import YFinanceException, YFNotImplementedError
|
||||
|
||||
|
||||
class Fundamentals:
|
||||
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._earnings = None
|
||||
self._financials = None
|
||||
self._shares = None
|
||||
|
||||
self._financials_data = None
|
||||
self._fin_data_quote = None
|
||||
self._basics_already_scraped = False
|
||||
self._financials = Financials(data, symbol)
|
||||
|
||||
@property
|
||||
def financials(self) -> "Financials":
|
||||
return self._financials
|
||||
|
||||
@property
|
||||
def earnings(self) -> dict:
|
||||
if self._earnings is None:
|
||||
raise YFNotImplementedError('earnings')
|
||||
return self._earnings
|
||||
|
||||
@property
|
||||
def shares(self) -> pd.DataFrame:
|
||||
if self._shares is None:
|
||||
raise YFNotImplementedError('shares')
|
||||
return self._shares
|
||||
|
||||
|
||||
class Financials:
|
||||
def __init__(self, data: YfData, symbol: str):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self._income_time_series = {}
|
||||
self._balance_sheet_time_series = {}
|
||||
self._cash_flow_time_series = {}
|
||||
|
||||
def get_income_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._income_time_series
|
||||
if freq not in res:
|
||||
res[freq] = self._fetch_time_series("income", freq, proxy)
|
||||
return res[freq]
|
||||
|
||||
def get_balance_sheet_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._balance_sheet_time_series
|
||||
if freq not in res:
|
||||
res[freq] = self._fetch_time_series("balance-sheet", freq, proxy)
|
||||
return res[freq]
|
||||
|
||||
def get_cash_flow_time_series(self, freq="yearly", proxy=None) -> pd.DataFrame:
|
||||
res = self._cash_flow_time_series
|
||||
if freq not in res:
|
||||
res[freq] = self._fetch_time_series("cash-flow", freq, proxy)
|
||||
return res[freq]
|
||||
|
||||
@utils.log_indent_decorator
|
||||
def _fetch_time_series(self, name, timescale, proxy=None):
|
||||
# Fetching time series preferred over scraping 'QuoteSummaryStore',
|
||||
# because it matches what Yahoo shows. But for some tickers returns nothing,
|
||||
# despite 'QuoteSummaryStore' containing valid data.
|
||||
|
||||
allowed_names = ["income", "balance-sheet", "cash-flow"]
|
||||
allowed_timescales = ["yearly", "quarterly"]
|
||||
|
||||
if name not in allowed_names:
|
||||
raise ValueError(f"Illegal argument: name must be one of: {allowed_names}")
|
||||
if timescale not in allowed_timescales:
|
||||
raise ValueError(f"Illegal argument: timescale must be one of: {allowed_names}")
|
||||
|
||||
try:
|
||||
statement = self._create_financials_table(name, timescale, proxy)
|
||||
|
||||
if statement is not None:
|
||||
return statement
|
||||
except YFinanceException as e:
|
||||
utils.get_yf_logger().error(f"{self._symbol}: Failed to create {name} financials table for reason: {e}")
|
||||
return pd.DataFrame()
|
||||
|
||||
def _create_financials_table(self, name, timescale, proxy):
|
||||
if name == "income":
|
||||
# Yahoo stores the 'income' table internally under 'financials' key
|
||||
name = "financials"
|
||||
|
||||
keys = const.fundamentals_keys[name]
|
||||
|
||||
try:
|
||||
return self.get_financials_time_series(timescale, keys, proxy)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
def get_financials_time_series(self, timescale, keys: list, proxy=None) -> pd.DataFrame:
|
||||
timescale_translation = {"yearly": "annual", "quarterly": "quarterly"}
|
||||
timescale = timescale_translation[timescale]
|
||||
|
||||
# Step 2: construct url:
|
||||
ts_url_base = f"https://query2.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{self._symbol}?symbol={self._symbol}"
|
||||
url = ts_url_base + "&type=" + ",".join([timescale + k for k in keys])
|
||||
# Yahoo returns maximum 4 years or 5 quarters, regardless of start_dt:
|
||||
start_dt = datetime.datetime(2016, 12, 31)
|
||||
end = pd.Timestamp.utcnow().ceil("D")
|
||||
url += f"&period1={int(start_dt.timestamp())}&period2={int(end.timestamp())}"
|
||||
|
||||
# Step 3: fetch and reshape data
|
||||
json_str = self._data.cache_get(url=url, proxy=proxy).text
|
||||
json_data = json.loads(json_str)
|
||||
data_raw = json_data["timeseries"]["result"]
|
||||
# data_raw = [v for v in data_raw if len(v) > 1] # Discard keys with no data
|
||||
for d in data_raw:
|
||||
del d["meta"]
|
||||
|
||||
# Now reshape data into a table:
|
||||
# Step 1: get columns and index:
|
||||
timestamps = set()
|
||||
data_unpacked = {}
|
||||
for x in data_raw:
|
||||
for k in x.keys():
|
||||
if k == "timestamp":
|
||||
timestamps.update(x[k])
|
||||
else:
|
||||
data_unpacked[k] = x[k]
|
||||
timestamps = sorted(list(timestamps))
|
||||
dates = pd.to_datetime(timestamps, unit="s")
|
||||
df = pd.DataFrame(columns=dates, index=list(data_unpacked.keys()))
|
||||
for k, v in data_unpacked.items():
|
||||
if df is None:
|
||||
df = pd.DataFrame(columns=dates, index=[k])
|
||||
df.loc[k] = {pd.Timestamp(x["asOfDate"]): x["reportedValue"]["raw"] for x in v}
|
||||
|
||||
df.index = df.index.str.replace("^" + timescale, "", regex=True)
|
||||
|
||||
# Reorder table to match order on Yahoo website
|
||||
df = df.reindex([k for k in keys if k in df.index])
|
||||
df = df[sorted(df.columns, reverse=True)]
|
||||
|
||||
return df
|
||||
68
yfinance/scrapers/holders.py
Normal file
68
yfinance/scrapers/holders.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import pandas as pd
|
||||
|
||||
from yfinance.data import YfData
|
||||
|
||||
|
||||
class Holders:
|
||||
_SCRAPE_URL_ = 'https://finance.yahoo.com/quote'
|
||||
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._major = None
|
||||
self._institutional = None
|
||||
self._mutualfund = None
|
||||
|
||||
@property
|
||||
def major(self) -> pd.DataFrame:
|
||||
if self._major is None:
|
||||
self._scrape(self.proxy)
|
||||
return self._major
|
||||
|
||||
@property
|
||||
def institutional(self) -> pd.DataFrame:
|
||||
if self._institutional is None:
|
||||
self._scrape(self.proxy)
|
||||
return self._institutional
|
||||
|
||||
@property
|
||||
def mutualfund(self) -> pd.DataFrame:
|
||||
if self._mutualfund is None:
|
||||
self._scrape(self.proxy)
|
||||
return self._mutualfund
|
||||
|
||||
def _scrape(self, proxy):
|
||||
ticker_url = f"{self._SCRAPE_URL_}/{self._symbol}"
|
||||
try:
|
||||
resp = self._data.cache_get(ticker_url + '/holders', proxy=proxy)
|
||||
holders = pd.read_html(resp.text)
|
||||
except Exception:
|
||||
holders = []
|
||||
|
||||
if len(holders) >= 3:
|
||||
self._major = holders[0]
|
||||
self._institutional = holders[1]
|
||||
self._mutualfund = holders[2]
|
||||
elif len(holders) >= 2:
|
||||
self._major = holders[0]
|
||||
self._institutional = holders[1]
|
||||
elif len(holders) >= 1:
|
||||
self._major = holders[0]
|
||||
|
||||
if self._institutional is not None:
|
||||
if 'Date Reported' in self._institutional:
|
||||
self._institutional['Date Reported'] = pd.to_datetime(
|
||||
self._institutional['Date Reported'])
|
||||
if '% Out' in self._institutional:
|
||||
self._institutional['% Out'] = self._institutional[
|
||||
'% Out'].str.replace('%', '').astype(float) / 100
|
||||
|
||||
if self._mutualfund is not None:
|
||||
if 'Date Reported' in self._mutualfund:
|
||||
self._mutualfund['Date Reported'] = pd.to_datetime(
|
||||
self._mutualfund['Date Reported'])
|
||||
if '% Out' in self._mutualfund:
|
||||
self._mutualfund['% Out'] = self._mutualfund[
|
||||
'% Out'].str.replace('%', '').astype(float) / 100
|
||||
697
yfinance/scrapers/quote.py
Normal file
697
yfinance/scrapers/quote.py
Normal file
@@ -0,0 +1,697 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import warnings
|
||||
from collections.abc import MutableMapping
|
||||
|
||||
import numpy as _np
|
||||
import pandas as pd
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import YfData
|
||||
from yfinance.exceptions import YFNotImplementedError
|
||||
|
||||
info_retired_keys_price = {"currentPrice", "dayHigh", "dayLow", "open", "previousClose", "volume", "volume24Hr"}
|
||||
info_retired_keys_price.update({"regularMarket"+s for s in ["DayHigh", "DayLow", "Open", "PreviousClose", "Price", "Volume"]})
|
||||
info_retired_keys_price.update({"fiftyTwoWeekLow", "fiftyTwoWeekHigh", "fiftyTwoWeekChange", "52WeekChange", "fiftyDayAverage", "twoHundredDayAverage"})
|
||||
info_retired_keys_price.update({"averageDailyVolume10Day", "averageVolume10days", "averageVolume"})
|
||||
info_retired_keys_exchange = {"currency", "exchange", "exchangeTimezoneName", "exchangeTimezoneShortName", "quoteType"}
|
||||
info_retired_keys_marketCap = {"marketCap"}
|
||||
info_retired_keys_symbol = {"symbol"}
|
||||
info_retired_keys = info_retired_keys_price | info_retired_keys_exchange | info_retired_keys_marketCap | info_retired_keys_symbol
|
||||
|
||||
|
||||
_BASIC_URL_ = "https://query2.finance.yahoo.com/v10/finance/quoteSummary"
|
||||
|
||||
|
||||
class InfoDictWrapper(MutableMapping):
|
||||
""" Simple wrapper around info dict, intercepting 'gets' to
|
||||
print how-to-migrate messages for specific keys. Requires
|
||||
override dict API"""
|
||||
|
||||
def __init__(self, info):
|
||||
self.info = info
|
||||
|
||||
def keys(self):
|
||||
return self.info.keys()
|
||||
|
||||
def __str__(self):
|
||||
return self.info.__str__()
|
||||
|
||||
def __repr__(self):
|
||||
return self.info.__repr__()
|
||||
|
||||
def __contains__(self, k):
|
||||
return k in self.info.keys()
|
||||
|
||||
def __getitem__(self, k):
|
||||
if k in info_retired_keys_price:
|
||||
warnings.warn(f"Price data removed from info (key='{k}'). Use Ticker.fast_info or history() instead", DeprecationWarning)
|
||||
return None
|
||||
elif k in info_retired_keys_exchange:
|
||||
warnings.warn(f"Exchange data removed from info (key='{k}'). Use Ticker.fast_info or Ticker.get_history_metadata() instead", DeprecationWarning)
|
||||
return None
|
||||
elif k in info_retired_keys_marketCap:
|
||||
warnings.warn(f"Market cap removed from info (key='{k}'). Use Ticker.fast_info instead", DeprecationWarning)
|
||||
return None
|
||||
elif k in info_retired_keys_symbol:
|
||||
warnings.warn(f"Symbol removed from info (key='{k}'). You know this already", DeprecationWarning)
|
||||
return None
|
||||
return self.info[self._keytransform(k)]
|
||||
|
||||
def __setitem__(self, k, value):
|
||||
self.info[self._keytransform(k)] = value
|
||||
|
||||
def __delitem__(self, k):
|
||||
del self.info[self._keytransform(k)]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.info)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.info)
|
||||
|
||||
def _keytransform(self, k):
|
||||
return k
|
||||
|
||||
|
||||
class FastInfo:
|
||||
# Contain small subset of info[] items that can be fetched faster elsewhere.
|
||||
# Imitates a dict.
|
||||
def __init__(self, tickerBaseObject, proxy=None):
|
||||
self._tkr = tickerBaseObject
|
||||
self.proxy = proxy
|
||||
|
||||
self._prices_1y = None
|
||||
self._prices_1wk_1h_prepost = None
|
||||
self._prices_1wk_1h_reg = None
|
||||
self._md = None
|
||||
|
||||
self._currency = None
|
||||
self._quote_type = None
|
||||
self._exchange = None
|
||||
self._timezone = None
|
||||
|
||||
self._shares = None
|
||||
self._mcap = None
|
||||
|
||||
self._open = None
|
||||
self._day_high = None
|
||||
self._day_low = None
|
||||
self._last_price = None
|
||||
self._last_volume = None
|
||||
|
||||
self._prev_close = None
|
||||
|
||||
self._reg_prev_close = None
|
||||
|
||||
self._50d_day_average = None
|
||||
self._200d_day_average = None
|
||||
self._year_high = None
|
||||
self._year_low = None
|
||||
self._year_change = None
|
||||
|
||||
self._10d_avg_vol = None
|
||||
self._3mo_avg_vol = None
|
||||
|
||||
# attrs = utils.attributes(self)
|
||||
# self.keys = attrs.keys()
|
||||
# utils.attributes is calling each method, bad! Have to hardcode
|
||||
_properties = ["currency", "quote_type", "exchange", "timezone"]
|
||||
_properties += ["shares", "market_cap"]
|
||||
_properties += ["last_price", "previous_close", "open", "day_high", "day_low"]
|
||||
_properties += ["regular_market_previous_close"]
|
||||
_properties += ["last_volume"]
|
||||
_properties += ["fifty_day_average", "two_hundred_day_average", "ten_day_average_volume", "three_month_average_volume"]
|
||||
_properties += ["year_high", "year_low", "year_change"]
|
||||
|
||||
# Because released before fixing key case, need to officially support
|
||||
# camel-case but also secretly support snake-case
|
||||
base_keys = [k for k in _properties if '_' not in k]
|
||||
|
||||
sc_keys = [k for k in _properties if '_' in k]
|
||||
|
||||
self._sc_to_cc_key = {k: utils.snake_case_2_camelCase(k) for k in sc_keys}
|
||||
self._cc_to_sc_key = {v: k for k, v in self._sc_to_cc_key.items()}
|
||||
|
||||
self._public_keys = sorted(base_keys + list(self._sc_to_cc_key.values()))
|
||||
self._keys = sorted(self._public_keys + sc_keys)
|
||||
|
||||
# dict imitation:
|
||||
def keys(self):
|
||||
return self._public_keys
|
||||
|
||||
def items(self):
|
||||
return [(k, self[k]) for k in self._public_keys]
|
||||
|
||||
def values(self):
|
||||
return [self[k] for k in self._public_keys]
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key in self.keys():
|
||||
if key in self._cc_to_sc_key:
|
||||
key = self._cc_to_sc_key[key]
|
||||
return self[key]
|
||||
return default
|
||||
|
||||
def __getitem__(self, k):
|
||||
if not isinstance(k, str):
|
||||
raise KeyError(f"key must be a string")
|
||||
if k not in self._keys:
|
||||
raise KeyError(f"'{k}' not valid key. Examine 'FastInfo.keys()'")
|
||||
if k in self._cc_to_sc_key:
|
||||
k = self._cc_to_sc_key[k]
|
||||
return getattr(self, k)
|
||||
|
||||
def __contains__(self, k):
|
||||
return k in self.keys()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.keys())
|
||||
|
||||
def __str__(self):
|
||||
return "lazy-loading dict with keys = " + str(self.keys())
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def toJSON(self, indent=4):
|
||||
d = {k: self[k] for k in self.keys()}
|
||||
return json.dumps({k: self[k] for k in self.keys()}, indent=indent)
|
||||
|
||||
def _get_1y_prices(self, fullDaysOnly=False):
|
||||
if self._prices_1y is None:
|
||||
# Temporarily disable error printing
|
||||
logging.disable(logging.CRITICAL)
|
||||
self._prices_1y = self._tkr.history(period="380d", auto_adjust=False, keepna=True, proxy=self.proxy)
|
||||
logging.disable(logging.NOTSET)
|
||||
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
try:
|
||||
ctp = self._md["currentTradingPeriod"]
|
||||
self._today_open = pd.to_datetime(ctp["regular"]["start"], unit='s', utc=True).tz_convert(self.timezone)
|
||||
self._today_close = pd.to_datetime(ctp["regular"]["end"], unit='s', utc=True).tz_convert(self.timezone)
|
||||
self._today_midnight = self._today_close.ceil("D")
|
||||
except Exception:
|
||||
self._today_open = None
|
||||
self._today_close = None
|
||||
self._today_midnight = None
|
||||
raise
|
||||
|
||||
if self._prices_1y.empty:
|
||||
return self._prices_1y
|
||||
|
||||
dnow = pd.Timestamp.utcnow().tz_convert(self.timezone).date()
|
||||
d1 = dnow
|
||||
d0 = (d1 + datetime.timedelta(days=1)) - utils._interval_to_timedelta("1y")
|
||||
if fullDaysOnly and self._exchange_open_now():
|
||||
# Exclude today
|
||||
d1 -= utils._interval_to_timedelta("1d")
|
||||
return self._prices_1y.loc[str(d0):str(d1)]
|
||||
|
||||
def _get_1wk_1h_prepost_prices(self):
|
||||
if self._prices_1wk_1h_prepost is None:
|
||||
# Temporarily disable error printing
|
||||
logging.disable(logging.CRITICAL)
|
||||
self._prices_1wk_1h_prepost = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=True, proxy=self.proxy)
|
||||
logging.disable(logging.NOTSET)
|
||||
return self._prices_1wk_1h_prepost
|
||||
|
||||
def _get_1wk_1h_reg_prices(self):
|
||||
if self._prices_1wk_1h_reg is None:
|
||||
# Temporarily disable error printing
|
||||
logging.disable(logging.CRITICAL)
|
||||
self._prices_1wk_1h_reg = self._tkr.history(period="1wk", interval="1h", auto_adjust=False, prepost=False, proxy=self.proxy)
|
||||
logging.disable(logging.NOTSET)
|
||||
return self._prices_1wk_1h_reg
|
||||
|
||||
def _get_exchange_metadata(self):
|
||||
if self._md is not None:
|
||||
return self._md
|
||||
|
||||
self._get_1y_prices()
|
||||
self._md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
return self._md
|
||||
|
||||
def _exchange_open_now(self):
|
||||
t = pd.Timestamp.utcnow()
|
||||
self._get_exchange_metadata()
|
||||
|
||||
# if self._today_open is None and self._today_close is None:
|
||||
# r = False
|
||||
# else:
|
||||
# r = self._today_open <= t and t < self._today_close
|
||||
|
||||
# if self._today_midnight is None:
|
||||
# r = False
|
||||
# elif self._today_midnight.date() > t.tz_convert(self.timezone).date():
|
||||
# r = False
|
||||
# else:
|
||||
# r = t < self._today_midnight
|
||||
|
||||
last_day_cutoff = self._get_1y_prices().index[-1] + datetime.timedelta(days=1)
|
||||
last_day_cutoff += datetime.timedelta(minutes=20)
|
||||
r = t < last_day_cutoff
|
||||
|
||||
# print("_exchange_open_now() returning", r)
|
||||
return r
|
||||
|
||||
@property
|
||||
def currency(self):
|
||||
if self._currency is not None:
|
||||
return self._currency
|
||||
|
||||
if self._tkr._history_metadata is None:
|
||||
self._get_1y_prices()
|
||||
md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
self._currency = md["currency"]
|
||||
return self._currency
|
||||
|
||||
@property
|
||||
def quote_type(self):
|
||||
if self._quote_type is not None:
|
||||
return self._quote_type
|
||||
|
||||
if self._tkr._history_metadata is None:
|
||||
self._get_1y_prices()
|
||||
md = self._tkr.get_history_metadata(proxy=self.proxy)
|
||||
self._quote_type = md["instrumentType"]
|
||||
return self._quote_type
|
||||
|
||||
@property
|
||||
def exchange(self):
|
||||
if self._exchange is not None:
|
||||
return self._exchange
|
||||
|
||||
self._exchange = self._get_exchange_metadata()["exchangeName"]
|
||||
return self._exchange
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
if self._timezone is not None:
|
||||
return self._timezone
|
||||
|
||||
self._timezone = self._get_exchange_metadata()["exchangeTimezoneName"]
|
||||
return self._timezone
|
||||
|
||||
@property
|
||||
def shares(self):
|
||||
if self._shares is not None:
|
||||
return self._shares
|
||||
|
||||
shares = self._tkr.get_shares_full(start=pd.Timestamp.utcnow().date()-pd.Timedelta(days=548), proxy=self.proxy)
|
||||
# if shares is None:
|
||||
# # Requesting 18 months failed, so fallback to shares which should include last year
|
||||
# shares = self._tkr.get_shares()
|
||||
if shares is not None:
|
||||
if isinstance(shares, pd.DataFrame):
|
||||
shares = shares[shares.columns[0]]
|
||||
self._shares = int(shares.iloc[-1])
|
||||
return self._shares
|
||||
|
||||
@property
|
||||
def last_price(self):
|
||||
if self._last_price is not None:
|
||||
return self._last_price
|
||||
prices = self._get_1y_prices()
|
||||
if prices.empty:
|
||||
md = self._get_exchange_metadata()
|
||||
if "regularMarketPrice" in md:
|
||||
self._last_price = md["regularMarketPrice"]
|
||||
else:
|
||||
self._last_price = float(prices["Close"].iloc[-1])
|
||||
if _np.isnan(self._last_price):
|
||||
md = self._get_exchange_metadata()
|
||||
if "regularMarketPrice" in md:
|
||||
self._last_price = md["regularMarketPrice"]
|
||||
return self._last_price
|
||||
|
||||
@property
|
||||
def previous_close(self):
|
||||
if self._prev_close is not None:
|
||||
return self._prev_close
|
||||
prices = self._get_1wk_1h_prepost_prices()
|
||||
fail = False
|
||||
if prices.empty:
|
||||
fail = True
|
||||
else:
|
||||
prices = prices[["Close"]].groupby(prices.index.date).last()
|
||||
if prices.shape[0] < 2:
|
||||
# Very few symbols have previousClose despite no
|
||||
# no trading data e.g. 'QCSTIX'.
|
||||
fail = True
|
||||
else:
|
||||
self._prev_close = float(prices["Close"].iloc[-2])
|
||||
if fail:
|
||||
# Fallback to original info[] if available.
|
||||
self._tkr.info # trigger fetch
|
||||
k = "previousClose"
|
||||
if self._tkr._quote._retired_info is not None and k in self._tkr._quote._retired_info:
|
||||
self._prev_close = self._tkr._quote._retired_info[k]
|
||||
return self._prev_close
|
||||
|
||||
@property
|
||||
def regular_market_previous_close(self):
|
||||
if self._reg_prev_close is not None:
|
||||
return self._reg_prev_close
|
||||
prices = self._get_1y_prices()
|
||||
if prices.shape[0] == 1:
|
||||
# Tiny % of tickers don't return daily history before last trading day,
|
||||
# so backup option is hourly history:
|
||||
prices = self._get_1wk_1h_reg_prices()
|
||||
prices = prices[["Close"]].groupby(prices.index.date).last()
|
||||
if prices.shape[0] < 2:
|
||||
# Very few symbols have regularMarketPreviousClose despite no
|
||||
# no trading data. E.g. 'QCSTIX'.
|
||||
# So fallback to original info[] if available.
|
||||
self._tkr.info # trigger fetch
|
||||
k = "regularMarketPreviousClose"
|
||||
if self._tkr._quote._retired_info is not None and k in self._tkr._quote._retired_info:
|
||||
self._reg_prev_close = self._tkr._quote._retired_info[k]
|
||||
else:
|
||||
self._reg_prev_close = float(prices["Close"].iloc[-2])
|
||||
return self._reg_prev_close
|
||||
|
||||
@property
|
||||
def open(self):
|
||||
if self._open is not None:
|
||||
return self._open
|
||||
prices = self._get_1y_prices()
|
||||
if prices.empty:
|
||||
self._open = None
|
||||
else:
|
||||
self._open = float(prices["Open"].iloc[-1])
|
||||
if _np.isnan(self._open):
|
||||
self._open = None
|
||||
return self._open
|
||||
|
||||
@property
|
||||
def day_high(self):
|
||||
if self._day_high is not None:
|
||||
return self._day_high
|
||||
prices = self._get_1y_prices()
|
||||
if prices.empty:
|
||||
self._day_high = None
|
||||
else:
|
||||
self._day_high = float(prices["High"].iloc[-1])
|
||||
if _np.isnan(self._day_high):
|
||||
self._day_high = None
|
||||
return self._day_high
|
||||
|
||||
@property
|
||||
def day_low(self):
|
||||
if self._day_low is not None:
|
||||
return self._day_low
|
||||
prices = self._get_1y_prices()
|
||||
if prices.empty:
|
||||
self._day_low = None
|
||||
else:
|
||||
self._day_low = float(prices["Low"].iloc[-1])
|
||||
if _np.isnan(self._day_low):
|
||||
self._day_low = None
|
||||
return self._day_low
|
||||
|
||||
@property
|
||||
def last_volume(self):
|
||||
if self._last_volume is not None:
|
||||
return self._last_volume
|
||||
prices = self._get_1y_prices()
|
||||
self._last_volume = None if prices.empty else int(prices["Volume"].iloc[-1])
|
||||
return self._last_volume
|
||||
|
||||
@property
|
||||
def fifty_day_average(self):
|
||||
if self._50d_day_average is not None:
|
||||
return self._50d_day_average
|
||||
|
||||
prices = self._get_1y_prices(fullDaysOnly=True)
|
||||
if prices.empty:
|
||||
self._50d_day_average = None
|
||||
else:
|
||||
n = prices.shape[0]
|
||||
a = n-50
|
||||
b = n
|
||||
if a < 0:
|
||||
a = 0
|
||||
self._50d_day_average = float(prices["Close"].iloc[a:b].mean())
|
||||
|
||||
return self._50d_day_average
|
||||
|
||||
@property
|
||||
def two_hundred_day_average(self):
|
||||
if self._200d_day_average is not None:
|
||||
return self._200d_day_average
|
||||
|
||||
prices = self._get_1y_prices(fullDaysOnly=True)
|
||||
if prices.empty:
|
||||
self._200d_day_average = None
|
||||
else:
|
||||
n = prices.shape[0]
|
||||
a = n-200
|
||||
b = n
|
||||
if a < 0:
|
||||
a = 0
|
||||
|
||||
self._200d_day_average = float(prices["Close"].iloc[a:b].mean())
|
||||
|
||||
return self._200d_day_average
|
||||
|
||||
@property
|
||||
def ten_day_average_volume(self):
|
||||
if self._10d_avg_vol is not None:
|
||||
return self._10d_avg_vol
|
||||
|
||||
prices = self._get_1y_prices(fullDaysOnly=True)
|
||||
if prices.empty:
|
||||
self._10d_avg_vol = None
|
||||
else:
|
||||
n = prices.shape[0]
|
||||
a = n-10
|
||||
b = n
|
||||
if a < 0:
|
||||
a = 0
|
||||
self._10d_avg_vol = int(prices["Volume"].iloc[a:b].mean())
|
||||
|
||||
return self._10d_avg_vol
|
||||
|
||||
@property
|
||||
def three_month_average_volume(self):
|
||||
if self._3mo_avg_vol is not None:
|
||||
return self._3mo_avg_vol
|
||||
|
||||
prices = self._get_1y_prices(fullDaysOnly=True)
|
||||
if prices.empty:
|
||||
self._3mo_avg_vol = None
|
||||
else:
|
||||
dt1 = prices.index[-1]
|
||||
dt0 = dt1 - utils._interval_to_timedelta("3mo") + utils._interval_to_timedelta("1d")
|
||||
self._3mo_avg_vol = int(prices.loc[dt0:dt1, "Volume"].mean())
|
||||
|
||||
return self._3mo_avg_vol
|
||||
|
||||
@property
|
||||
def year_high(self):
|
||||
if self._year_high is not None:
|
||||
return self._year_high
|
||||
|
||||
prices = self._get_1y_prices(fullDaysOnly=True)
|
||||
if prices.empty:
|
||||
prices = self._get_1y_prices(fullDaysOnly=False)
|
||||
self._year_high = float(prices["High"].max())
|
||||
return self._year_high
|
||||
|
||||
@property
|
||||
def year_low(self):
|
||||
if self._year_low is not None:
|
||||
return self._year_low
|
||||
|
||||
prices = self._get_1y_prices(fullDaysOnly=True)
|
||||
if prices.empty:
|
||||
prices = self._get_1y_prices(fullDaysOnly=False)
|
||||
self._year_low = float(prices["Low"].min())
|
||||
return self._year_low
|
||||
|
||||
@property
|
||||
def year_change(self):
|
||||
if self._year_change is not None:
|
||||
return self._year_change
|
||||
|
||||
prices = self._get_1y_prices(fullDaysOnly=True)
|
||||
if prices.shape[0] >= 2:
|
||||
self._year_change = (prices["Close"].iloc[-1] - prices["Close"].iloc[0]) / prices["Close"].iloc[0]
|
||||
self._year_change = float(self._year_change)
|
||||
return self._year_change
|
||||
|
||||
@property
|
||||
def market_cap(self):
|
||||
if self._mcap is not None:
|
||||
return self._mcap
|
||||
|
||||
try:
|
||||
shares = self.shares
|
||||
except Exception as e:
|
||||
if "Cannot retrieve share count" in str(e):
|
||||
shares = None
|
||||
elif "failed to decrypt Yahoo" in str(e):
|
||||
shares = None
|
||||
else:
|
||||
raise
|
||||
|
||||
if shares is None:
|
||||
# Very few symbols have marketCap despite no share count.
|
||||
# E.g. 'BTC-USD'
|
||||
# So fallback to original info[] if available.
|
||||
self._tkr.info
|
||||
k = "marketCap"
|
||||
if self._tkr._quote._retired_info is not None and k in self._tkr._quote._retired_info:
|
||||
self._mcap = self._tkr._quote._retired_info[k]
|
||||
else:
|
||||
self._mcap = float(shares * self.last_price)
|
||||
return self._mcap
|
||||
|
||||
|
||||
class Quote:
|
||||
|
||||
def __init__(self, data: YfData, symbol: str, proxy=None):
|
||||
self._data = data
|
||||
self._symbol = symbol
|
||||
self.proxy = proxy
|
||||
|
||||
self._info = None
|
||||
self._retired_info = None
|
||||
self._sustainability = None
|
||||
self._recommendations = None
|
||||
self._calendar = None
|
||||
|
||||
self._already_scraped = False
|
||||
self._already_fetched = False
|
||||
self._already_fetched_complementary = False
|
||||
|
||||
@property
|
||||
def info(self) -> dict:
|
||||
if self._info is None:
|
||||
self._fetch(self.proxy)
|
||||
self._fetch_complementary(self.proxy)
|
||||
|
||||
return self._info
|
||||
|
||||
@property
|
||||
def sustainability(self) -> pd.DataFrame:
|
||||
if self._sustainability is None:
|
||||
raise YFNotImplementedError('sustainability')
|
||||
return self._sustainability
|
||||
|
||||
@property
|
||||
def recommendations(self) -> pd.DataFrame:
|
||||
if self._recommendations is None:
|
||||
raise YFNotImplementedError('recommendations')
|
||||
return self._recommendations
|
||||
|
||||
@property
|
||||
def calendar(self) -> pd.DataFrame:
|
||||
if self._calendar is None:
|
||||
raise YFNotImplementedError('calendar')
|
||||
return self._calendar
|
||||
|
||||
def _fetch(self, proxy):
|
||||
if self._already_fetched:
|
||||
return
|
||||
self._already_fetched = True
|
||||
modules = ['financialData', 'quoteType', 'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
|
||||
modules = ','.join(modules)
|
||||
params_dict = {"modules": modules, "ssl": "true"}
|
||||
result = self._data.get_raw_json(
|
||||
_BASIC_URL_ + f"/{self._symbol}", params=params_dict, proxy=proxy
|
||||
)
|
||||
result["quoteSummary"]["result"][0]["symbol"] = self._symbol
|
||||
query1_info = next(
|
||||
(info for info in result.get("quoteSummary", {}).get("result", []) if info["symbol"] == self._symbol),
|
||||
None,
|
||||
)
|
||||
# Most keys that appear in multiple dicts have same value. Except 'maxAge' because
|
||||
# Yahoo not consistent with days vs seconds. Fix it here:
|
||||
for k in query1_info:
|
||||
if "maxAge" in query1_info[k] and query1_info[k]["maxAge"] == 1:
|
||||
query1_info[k]["maxAge"] = 86400
|
||||
query1_info = {
|
||||
k1: v1
|
||||
for k, v in query1_info.items()
|
||||
if isinstance(v, dict)
|
||||
for k1, v1 in v.items()
|
||||
if v1
|
||||
}
|
||||
# recursively format but only because of 'companyOfficers'
|
||||
|
||||
def _format(k, v):
|
||||
if isinstance(v, dict) and "raw" in v and "fmt" in v:
|
||||
v2 = v["fmt"] if k in {"regularMarketTime", "postMarketTime"} else v["raw"]
|
||||
elif isinstance(v, list):
|
||||
v2 = [_format(None, x) for x in v]
|
||||
elif isinstance(v, dict):
|
||||
v2 = {k: _format(k, x) for k, x in v.items()}
|
||||
elif isinstance(v, str):
|
||||
v2 = v.replace("\xa0", " ")
|
||||
else:
|
||||
v2 = v
|
||||
return v2
|
||||
for k, v in query1_info.items():
|
||||
query1_info[k] = _format(k, v)
|
||||
self._info = query1_info
|
||||
|
||||
def _fetch_complementary(self, proxy):
|
||||
if self._already_fetched_complementary:
|
||||
return
|
||||
self._already_fetched_complementary = True
|
||||
|
||||
# self._scrape(proxy) # decrypt broken
|
||||
self._fetch(proxy)
|
||||
if self._info is None:
|
||||
return
|
||||
|
||||
# Complementary key-statistics. For now just want 'trailing PEG ratio'
|
||||
keys = {"trailingPegRatio"}
|
||||
if keys:
|
||||
# Simplified the original scrape code for key-statistics. Very expensive for fetching
|
||||
# just one value, best if scraping most/all:
|
||||
#
|
||||
# p = _re.compile(r'root\.App\.main = (.*);')
|
||||
# url = 'https://finance.yahoo.com/quote/{}/key-statistics?p={}'.format(self._ticker.ticker, self._ticker.ticker)
|
||||
# try:
|
||||
# r = session.get(url, headers=utils.user_agent_headers)
|
||||
# data = _json.loads(p.findall(r.text)[0])
|
||||
# key_stats = data['context']['dispatcher']['stores']['QuoteTimeSeriesStore']["timeSeries"]
|
||||
# for k in keys:
|
||||
# if k not in key_stats or len(key_stats[k])==0:
|
||||
# # Yahoo website prints N/A, indicates Yahoo lacks necessary data to calculate
|
||||
# v = None
|
||||
# else:
|
||||
# # Select most recent (last) raw value in list:
|
||||
# v = key_stats[k][-1]["reportedValue"]["raw"]
|
||||
# self._info[k] = v
|
||||
# except Exception:
|
||||
# raise
|
||||
# pass
|
||||
#
|
||||
# For just one/few variable is faster to query directly:
|
||||
url = f"https://query1.finance.yahoo.com/ws/fundamentals-timeseries/v1/finance/timeseries/{self._symbol}?symbol={self._symbol}"
|
||||
for k in keys:
|
||||
url += "&type=" + k
|
||||
# Request 6 months of data
|
||||
start = pd.Timestamp.utcnow().floor("D") - datetime.timedelta(days=365 // 2)
|
||||
start = int(start.timestamp())
|
||||
end = pd.Timestamp.utcnow().ceil("D")
|
||||
end = int(end.timestamp())
|
||||
url += f"&period1={start}&period2={end}"
|
||||
|
||||
json_str = self._data.cache_get(url=url, proxy=proxy).text
|
||||
json_data = json.loads(json_str)
|
||||
try:
|
||||
key_stats = json_data["timeseries"]["result"][0]
|
||||
if k not in key_stats:
|
||||
# Yahoo website prints N/A, indicates Yahoo lacks necessary data to calculate
|
||||
v = None
|
||||
else:
|
||||
# Select most recent (last) raw value in list:
|
||||
v = key_stats[k][-1]["reportedValue"]["raw"]
|
||||
except Exception:
|
||||
v = None
|
||||
self._info[k] = v
|
||||
8
yfinance/scrapers/yahoo-keys.txt
Normal file
8
yfinance/scrapers/yahoo-keys.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
daf93e37cbf219cd4c1f3f74ec4551265ec5565b99e8c9322dccd6872941cf13c818cbb88cba6f530e643b4e2329b17ec7161f4502ce6a02bb0dbbe5fc0d0474
|
||||
ad4d90b3c9f2e1d156ef98eadfa0ff93e4042f6960e54aa2a13f06f528e6b50ba4265a26a1fd5b9cd3db0d268a9c34e1d080592424309429a58bce4adc893c87
|
||||
e9a8ab8e5620b712ebc2fb4f33d5c8b9c80c0d07e8c371911c785cf674789f1747d76a909510158a7b7419e86857f2d7abbd777813ff64840e4cbc514d12bcae
|
||||
6ae2523aeafa283dad746556540145bf603f44edbf37ad404d3766a8420bb5eb1d3738f52a227b88283cca9cae44060d5f0bba84b6a495082589f5fe7acbdc9e
|
||||
3365117c2a368ffa5df7313a4a84988f73926a86358e8eea9497c5ff799ce27d104b68e5f2fbffa6f8f92c1fef41765a7066fa6bcf050810a9c4c7872fd3ebf0
|
||||
15d8f57919857d5a5358d2082c7ef0f1129cfacd2a6480333dcfb954b7bb67d820abefebfdb0eaa6ef18a1c57f617b67d7e7b0ec040403b889630ae5db5a4dbb
|
||||
db9630d707a7d0953ac795cd8db1ca9ca6c9d8239197cdfda24b4e0ec9c37eaec4db82dab68b8f606ab7b5b4af3e65dab50606f8cf508269ec927e6ee605fb78
|
||||
3c895fb5ddcc37d20d3073ed74ee3efad59bcb147c8e80fd279f83701b74b092d503dcd399604c6d8be8f3013429d3c2c76ed5b31b80c9df92d5eab6d3339fce
|
||||
@@ -22,4 +22,5 @@
|
||||
_DFS = {}
|
||||
_PROGRESS_BAR = None
|
||||
_ERRORS = {}
|
||||
_TRACEBACKS = {}
|
||||
_ISINS = {}
|
||||
|
||||
@@ -21,50 +21,41 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# import time as _time
|
||||
import datetime as _datetime
|
||||
import requests as _requests
|
||||
import pandas as _pd
|
||||
# import numpy as _np
|
||||
|
||||
# import json as _json
|
||||
# import re as _re
|
||||
from collections import namedtuple as _namedtuple
|
||||
|
||||
from . import utils
|
||||
import pandas as _pd
|
||||
|
||||
from .base import TickerBase
|
||||
|
||||
|
||||
class Ticker(TickerBase):
|
||||
def __init__(self, ticker, session=None):
|
||||
super(Ticker, self).__init__(ticker, session=session)
|
||||
self._expirations = {}
|
||||
self._underlying = {}
|
||||
|
||||
def __repr__(self):
|
||||
return 'yfinance.Ticker object <%s>' % self.ticker
|
||||
return f'yfinance.Ticker object <{self.ticker}>'
|
||||
|
||||
def _download_options(self, date=None, proxy=None):
|
||||
if date is None:
|
||||
url = "{}/v7/finance/options/{}".format(
|
||||
self._base_url, self.ticker)
|
||||
url = f"{self._base_url}/v7/finance/options/{self.ticker}"
|
||||
else:
|
||||
url = "{}/v7/finance/options/{}?date={}".format(
|
||||
self._base_url, self.ticker, date)
|
||||
url = f"{self._base_url}/v7/finance/options/{self.ticker}?date={date}"
|
||||
|
||||
# setup proxy in requests format
|
||||
if proxy is not None:
|
||||
if isinstance(proxy, dict) and "https" in proxy:
|
||||
proxy = proxy["https"]
|
||||
proxy = {"https": proxy}
|
||||
|
||||
r = _requests.get(
|
||||
url=url,
|
||||
proxies=proxy,
|
||||
headers=utils.user_agent_headers
|
||||
).json()
|
||||
r = self._data.get(url=url, proxy=proxy).json()
|
||||
if len(r.get('optionChain', {}).get('result', [])) > 0:
|
||||
for exp in r['optionChain']['result'][0]['expirationDates']:
|
||||
self._expirations[_datetime.datetime.utcfromtimestamp(
|
||||
exp).strftime('%Y-%m-%d')] = exp
|
||||
|
||||
self._underlying = r['optionChain']['result'][0].get('quote', {})
|
||||
|
||||
opt = r['optionChain']['result'][0].get('options', [])
|
||||
return opt[0] if len(opt) > 0 else []
|
||||
|
||||
return dict(**opt[0],underlying=self._underlying) if len(opt) > 0 else {}
|
||||
return {}
|
||||
|
||||
def _options2df(self, opt, tz=None):
|
||||
data = _pd.DataFrame(opt).reindex(columns=[
|
||||
@@ -97,15 +88,15 @@ class Ticker(TickerBase):
|
||||
self._download_options()
|
||||
if date not in self._expirations:
|
||||
raise ValueError(
|
||||
"Expiration `%s` cannot be found. "
|
||||
"Available expiration are: [%s]" % (
|
||||
date, ', '.join(self._expirations)))
|
||||
f"Expiration `{date}` cannot be found. "
|
||||
f"Available expirations are: [{', '.join(self._expirations)}]")
|
||||
date = self._expirations[date]
|
||||
options = self._download_options(date, proxy=proxy)
|
||||
|
||||
return _namedtuple('Options', ['calls', 'puts'])(**{
|
||||
return _namedtuple('Options', ['calls', 'puts', 'underlying'])(**{
|
||||
"calls": self._options2df(options['calls'], tz=tz),
|
||||
"puts": self._options2df(options['puts'], tz=tz)
|
||||
"puts": self._options2df(options['puts'], tz=tz),
|
||||
"underlying": options['underlying']
|
||||
})
|
||||
|
||||
# ------------------------
|
||||
@@ -115,39 +106,47 @@ class Ticker(TickerBase):
|
||||
return self.get_isin()
|
||||
|
||||
@property
|
||||
def major_holders(self):
|
||||
def major_holders(self) -> _pd.DataFrame:
|
||||
return self.get_major_holders()
|
||||
|
||||
@property
|
||||
def institutional_holders(self):
|
||||
def institutional_holders(self) -> _pd.DataFrame:
|
||||
return self.get_institutional_holders()
|
||||
|
||||
@property
|
||||
def mutualfund_holders(self):
|
||||
def mutualfund_holders(self) -> _pd.DataFrame:
|
||||
return self.get_mutualfund_holders()
|
||||
|
||||
@property
|
||||
def dividends(self):
|
||||
def dividends(self) -> _pd.Series:
|
||||
return self.get_dividends()
|
||||
|
||||
@property
|
||||
def splits(self):
|
||||
def capital_gains(self):
|
||||
return self.get_capital_gains()
|
||||
|
||||
@property
|
||||
def splits(self) -> _pd.Series:
|
||||
return self.get_splits()
|
||||
|
||||
@property
|
||||
def actions(self):
|
||||
def actions(self) -> _pd.DataFrame:
|
||||
return self.get_actions()
|
||||
|
||||
@property
|
||||
def shares(self):
|
||||
def shares(self) -> _pd.DataFrame :
|
||||
return self.get_shares()
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
def info(self) -> dict:
|
||||
return self.get_info()
|
||||
|
||||
@property
|
||||
def calendar(self):
|
||||
def fast_info(self):
|
||||
return self.get_fast_info()
|
||||
|
||||
@property
|
||||
def calendar(self) -> _pd.DataFrame:
|
||||
return self.get_calendar()
|
||||
|
||||
@property
|
||||
@@ -155,51 +154,87 @@ class Ticker(TickerBase):
|
||||
return self.get_recommendations()
|
||||
|
||||
@property
|
||||
def earnings(self):
|
||||
def earnings(self) -> _pd.DataFrame:
|
||||
return self.get_earnings()
|
||||
|
||||
@property
|
||||
def quarterly_earnings(self):
|
||||
def quarterly_earnings(self) -> _pd.DataFrame:
|
||||
return self.get_earnings(freq='quarterly')
|
||||
|
||||
@property
|
||||
def financials(self):
|
||||
return self.get_financials()
|
||||
def income_stmt(self) -> _pd.DataFrame:
|
||||
return self.get_income_stmt(pretty=True)
|
||||
|
||||
@property
|
||||
def quarterly_financials(self):
|
||||
return self.get_financials(freq='quarterly')
|
||||
def quarterly_income_stmt(self) -> _pd.DataFrame:
|
||||
return self.get_income_stmt(pretty=True, freq='quarterly')
|
||||
|
||||
@property
|
||||
def balance_sheet(self):
|
||||
return self.get_balancesheet()
|
||||
def incomestmt(self) -> _pd.DataFrame:
|
||||
return self.income_stmt
|
||||
|
||||
@property
|
||||
def quarterly_balance_sheet(self):
|
||||
return self.get_balancesheet(freq='quarterly')
|
||||
def quarterly_incomestmt(self) -> _pd.DataFrame:
|
||||
return self.quarterly_income_stmt
|
||||
|
||||
@property
|
||||
def balancesheet(self):
|
||||
return self.get_balancesheet()
|
||||
def financials(self) -> _pd.DataFrame:
|
||||
return self.income_stmt
|
||||
|
||||
@property
|
||||
def quarterly_balancesheet(self):
|
||||
return self.get_balancesheet(freq='quarterly')
|
||||
def quarterly_financials(self) -> _pd.DataFrame:
|
||||
return self.quarterly_income_stmt
|
||||
|
||||
@property
|
||||
def cashflow(self):
|
||||
return self.get_cashflow()
|
||||
def balance_sheet(self) -> _pd.DataFrame:
|
||||
return self.get_balance_sheet(pretty=True)
|
||||
|
||||
@property
|
||||
def quarterly_cashflow(self):
|
||||
return self.get_cashflow(freq='quarterly')
|
||||
def quarterly_balance_sheet(self) -> _pd.DataFrame:
|
||||
return self.get_balance_sheet(pretty=True, freq='quarterly')
|
||||
|
||||
@property
|
||||
def sustainability(self):
|
||||
def balancesheet(self) -> _pd.DataFrame:
|
||||
return self.balance_sheet
|
||||
|
||||
@property
|
||||
def quarterly_balancesheet(self) -> _pd.DataFrame:
|
||||
return self.quarterly_balance_sheet
|
||||
|
||||
@property
|
||||
def cash_flow(self) -> _pd.DataFrame:
|
||||
return self.get_cash_flow(pretty=True, freq="yearly")
|
||||
|
||||
@property
|
||||
def quarterly_cash_flow(self) -> _pd.DataFrame:
|
||||
return self.get_cash_flow(pretty=True, freq='quarterly')
|
||||
|
||||
@property
|
||||
def cashflow(self) -> _pd.DataFrame:
|
||||
return self.cash_flow
|
||||
|
||||
@property
|
||||
def quarterly_cashflow(self) -> _pd.DataFrame:
|
||||
return self.quarterly_cash_flow
|
||||
|
||||
@property
|
||||
def recommendations_summary(self):
|
||||
return self.get_recommendations_summary()
|
||||
|
||||
@property
|
||||
def analyst_price_target(self) -> _pd.DataFrame:
|
||||
return self.get_analyst_price_target()
|
||||
|
||||
@property
|
||||
def revenue_forecasts(self) -> _pd.DataFrame:
|
||||
return self.get_rev_forecast()
|
||||
|
||||
@property
|
||||
def sustainability(self) -> _pd.DataFrame:
|
||||
return self.get_sustainability()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
def options(self) -> tuple:
|
||||
if not self._expirations:
|
||||
self._download_options()
|
||||
return tuple(self._expirations.keys())
|
||||
@@ -209,13 +244,21 @@ class Ticker(TickerBase):
|
||||
return self.get_news()
|
||||
|
||||
@property
|
||||
def analysis(self):
|
||||
return self.get_analysis()
|
||||
def trend_details(self) -> _pd.DataFrame:
|
||||
return self.get_trend_details()
|
||||
|
||||
@property
|
||||
def earnings_history(self):
|
||||
return self.get_earnings_history()
|
||||
def earnings_trend(self) -> _pd.DataFrame:
|
||||
return self.get_earnings_trend()
|
||||
|
||||
@property
|
||||
def earnings_dates(self):
|
||||
def earnings_dates(self) -> _pd.DataFrame:
|
||||
return self.get_earnings_dates()
|
||||
|
||||
@property
|
||||
def earnings_forecasts(self) -> _pd.DataFrame:
|
||||
return self.get_earnings_forecast()
|
||||
|
||||
@property
|
||||
def history_metadata(self) -> dict:
|
||||
return self.get_history_metadata()
|
||||
|
||||
@@ -22,51 +22,53 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from . import Ticker, multi
|
||||
|
||||
|
||||
# from collections import namedtuple as _namedtuple
|
||||
|
||||
|
||||
class Tickers():
|
||||
class Tickers:
|
||||
|
||||
def __repr__(self):
|
||||
return 'yfinance.Tickers object <%s>' % ",".join(self.symbols)
|
||||
return f"yfinance.Tickers object <{','.join(self.symbols)}>"
|
||||
|
||||
def __init__(self, tickers, session=None):
|
||||
tickers = tickers if isinstance(
|
||||
tickers, list) else tickers.replace(',', ' ').split()
|
||||
self.symbols = [ticker.upper() for ticker in tickers]
|
||||
ticker_objects = {}
|
||||
self.tickers = {ticker: Ticker(ticker, session=session) for ticker in self.symbols}
|
||||
|
||||
for ticker in self.symbols:
|
||||
ticker_objects[ticker] = Ticker(ticker, session=session)
|
||||
|
||||
self.tickers = ticker_objects
|
||||
# self.tickers = _namedtuple(
|
||||
# "Tickers", ticker_objects.keys(), rename=True
|
||||
# )(*ticker_objects.values())
|
||||
|
||||
def history(self, period="1mo", interval="1d",
|
||||
start=None, end=None, prepost=False,
|
||||
actions=True, auto_adjust=True, proxy=None,
|
||||
actions=True, auto_adjust=True, repair=False,
|
||||
proxy=None,
|
||||
threads=True, group_by='column', progress=True,
|
||||
timeout=None, **kwargs):
|
||||
timeout=10, **kwargs):
|
||||
|
||||
return self.download(
|
||||
period, interval,
|
||||
start, end, prepost,
|
||||
actions, auto_adjust, proxy,
|
||||
actions, auto_adjust, repair,
|
||||
proxy,
|
||||
threads, group_by, progress,
|
||||
timeout, **kwargs)
|
||||
|
||||
def download(self, period="1mo", interval="1d",
|
||||
start=None, end=None, prepost=False,
|
||||
actions=True, auto_adjust=True, proxy=None,
|
||||
actions=True, auto_adjust=True, repair=False,
|
||||
proxy=None,
|
||||
threads=True, group_by='column', progress=True,
|
||||
timeout=None, **kwargs):
|
||||
timeout=10, **kwargs):
|
||||
|
||||
data = multi.download(self.symbols,
|
||||
start=start, end=end,
|
||||
actions=actions,
|
||||
auto_adjust=auto_adjust,
|
||||
repair=repair,
|
||||
period=period,
|
||||
interval=interval,
|
||||
prepost=prepost,
|
||||
@@ -87,10 +89,4 @@ class Tickers():
|
||||
return data
|
||||
|
||||
def news(self):
|
||||
collection = {}
|
||||
for ticker in self.symbols:
|
||||
collection[ticker] = []
|
||||
items = Ticker(ticker).news
|
||||
for item in items:
|
||||
collection[ticker].append(item)
|
||||
return collection
|
||||
return {ticker: [item for item in Ticker(ticker).news] for ticker in self.symbols}
|
||||
|
||||
@@ -21,33 +21,173 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import requests as _requests
|
||||
import datetime as _datetime
|
||||
import logging
|
||||
import re as _re
|
||||
import pandas as _pd
|
||||
import numpy as _np
|
||||
import sys as _sys
|
||||
import threading
|
||||
from functools import lru_cache
|
||||
from inspect import getmembers
|
||||
from types import FunctionType
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import numpy as _np
|
||||
import pandas as _pd
|
||||
import pytz as _tz
|
||||
import requests as _requests
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from pytz import UnknownTimeZoneError
|
||||
|
||||
from yfinance import const
|
||||
from .const import _BASE_URL_
|
||||
|
||||
try:
|
||||
import ujson as _json
|
||||
except ImportError:
|
||||
import json as _json
|
||||
|
||||
|
||||
user_agent_headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
|
||||
|
||||
# From https://stackoverflow.com/a/59128615
|
||||
def attributes(obj):
|
||||
disallowed_names = {
|
||||
name for name, value in getmembers(type(obj))
|
||||
if isinstance(value, FunctionType)}
|
||||
return {
|
||||
name: getattr(obj, name) for name in dir(obj)
|
||||
if name[0] != '_' and name not in disallowed_names and hasattr(obj, name)}
|
||||
|
||||
|
||||
@lru_cache(maxsize=20)
|
||||
def print_once(msg):
|
||||
# 'warnings' module suppression of repeat messages does not work.
|
||||
# This function replicates correct behaviour
|
||||
print(msg)
|
||||
|
||||
|
||||
# Logging
|
||||
# Note: most of this logic is adding indentation with function depth,
|
||||
# so that DEBUG log is readable.
|
||||
class IndentLoggerAdapter(logging.LoggerAdapter):
|
||||
def process(self, msg, kwargs):
|
||||
if get_yf_logger().isEnabledFor(logging.DEBUG):
|
||||
i = ' ' * self.extra['indent']
|
||||
if not isinstance(msg, str):
|
||||
msg = str(msg)
|
||||
msg = '\n'.join([i + m for m in msg.split('\n')])
|
||||
return msg, kwargs
|
||||
|
||||
|
||||
_indentation_level = threading.local()
|
||||
|
||||
|
||||
class IndentationContext:
|
||||
def __init__(self, increment=1):
|
||||
self.increment = increment
|
||||
|
||||
def __enter__(self):
|
||||
_indentation_level.indent = getattr(_indentation_level, 'indent', 0) + self.increment
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
_indentation_level.indent -= self.increment
|
||||
|
||||
|
||||
def get_indented_logger(name=None):
|
||||
# Never cache the returned value! Will break indentation.
|
||||
return IndentLoggerAdapter(logging.getLogger(name), {'indent': getattr(_indentation_level, 'indent', 0)})
|
||||
|
||||
|
||||
def log_indent_decorator(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
logger = get_indented_logger('yfinance')
|
||||
logger.debug(f'Entering {func.__name__}()')
|
||||
|
||||
with IndentationContext():
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
logger.debug(f'Exiting {func.__name__}()')
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MultiLineFormatter(logging.Formatter):
|
||||
# The 'fmt' formatting further down is only applied to first line
|
||||
# of log message, specifically the padding after %level%.
|
||||
# For multi-line messages, need to manually copy over padding.
|
||||
def __init__(self, fmt):
|
||||
super().__init__(fmt)
|
||||
# Extract amount of padding
|
||||
match = _re.search(r'%\(levelname\)-(\d+)s', fmt)
|
||||
self.level_length = int(match.group(1)) if match else 0
|
||||
|
||||
def format(self, record):
|
||||
original = super().format(record)
|
||||
lines = original.split('\n')
|
||||
levelname = lines[0].split(' ')[0]
|
||||
if len(lines) <= 1:
|
||||
return original
|
||||
else:
|
||||
# Apply padding to all lines below first
|
||||
formatted = [lines[0]]
|
||||
if self.level_length == 0:
|
||||
padding = ' ' * len(levelname)
|
||||
else:
|
||||
padding = ' ' * self.level_length
|
||||
padding += ' ' # +1 for space between level and message
|
||||
formatted.extend(padding + line for line in lines[1:])
|
||||
return '\n'.join(formatted)
|
||||
|
||||
|
||||
yf_logger = None
|
||||
yf_log_indented = False
|
||||
|
||||
|
||||
def get_yf_logger():
|
||||
global yf_logger
|
||||
if yf_logger is None:
|
||||
yf_logger = logging.getLogger('yfinance')
|
||||
global yf_log_indented
|
||||
if yf_log_indented:
|
||||
yf_logger = get_indented_logger('yfinance')
|
||||
return yf_logger
|
||||
|
||||
|
||||
def setup_debug_formatting():
|
||||
global yf_logger
|
||||
yf_logger = get_yf_logger()
|
||||
|
||||
if not yf_logger.isEnabledFor(logging.DEBUG):
|
||||
yf_logger.warning("logging mode not set to 'DEBUG', so not setting up debug formatting")
|
||||
return
|
||||
|
||||
global yf_log_indented
|
||||
if not yf_log_indented:
|
||||
if yf_logger.handlers is None or len(yf_logger.handlers) == 0:
|
||||
h = logging.StreamHandler()
|
||||
# Ensure different level strings don't interfere with indentation
|
||||
formatter = MultiLineFormatter(fmt='%(levelname)-8s %(message)s')
|
||||
h.setFormatter(formatter)
|
||||
yf_logger.addHandler(h)
|
||||
|
||||
yf_log_indented = True
|
||||
|
||||
|
||||
def enable_debug_mode():
|
||||
get_yf_logger().setLevel(logging.DEBUG)
|
||||
setup_debug_formatting()
|
||||
|
||||
|
||||
def is_isin(string):
|
||||
return bool(_re.match("^([A-Z]{2})([A-Z0-9]{9})([0-9]{1})$", string))
|
||||
return bool(_re.match("^([A-Z]{2})([A-Z0-9]{9})([0-9])$", string))
|
||||
|
||||
|
||||
def get_all_by_isin(isin, proxy=None, session=None):
|
||||
if not(is_isin(isin)):
|
||||
if not (is_isin(isin)):
|
||||
raise ValueError("Invalid ISIN number")
|
||||
|
||||
from .base import _BASE_URL_
|
||||
session = session or _requests
|
||||
url = "{}/v1/finance/search?q={}".format(_BASE_URL_, isin)
|
||||
url = f"{_BASE_URL_}/v1/finance/search?q={isin}"
|
||||
data = session.get(url=url, proxies=proxy, headers=user_agent_headers)
|
||||
try:
|
||||
data = data.json()
|
||||
@@ -81,7 +221,9 @@ def get_news_by_isin(isin, proxy=None, session=None):
|
||||
return data.get('news', {})
|
||||
|
||||
|
||||
def empty_df(index=[]):
|
||||
def empty_df(index=None):
|
||||
if index is None:
|
||||
index = []
|
||||
empty = _pd.DataFrame(index=index, data={
|
||||
'Open': _np.nan, 'High': _np.nan, 'Low': _np.nan,
|
||||
'Close': _np.nan, 'Adj Close': _np.nan, 'Volume': _np.nan})
|
||||
@@ -96,51 +238,211 @@ def empty_earnings_dates_df():
|
||||
return empty
|
||||
|
||||
|
||||
def get_html(url, proxy=None, session=None):
|
||||
session = session or _requests
|
||||
html = session.get(url=url, proxies=proxy, headers=user_agent_headers).text
|
||||
return html
|
||||
def build_template(data):
|
||||
"""
|
||||
build_template returns the details required to rebuild any of the yahoo finance financial statements in the same order as the yahoo finance webpage. The function is built to be used on the "FinancialTemplateStore" json which appears in any one of the three yahoo finance webpages: "/financials", "/cash-flow" and "/balance-sheet".
|
||||
|
||||
Returns:
|
||||
- template_annual_order: The order that annual figures should be listed in.
|
||||
- template_ttm_order: The order that TTM (Trailing Twelve Month) figures should be listed in.
|
||||
- template_order: The order that quarterlies should be in (note that quarterlies have no pre-fix - hence why this is required).
|
||||
- level_detail: The level of each individual line item. E.g. for the "/financials" webpage, "Total Revenue" is a level 0 item and is the summation of "Operating Revenue" and "Excise Taxes" which are level 1 items.
|
||||
|
||||
"""
|
||||
template_ttm_order = [] # Save the TTM (Trailing Twelve Months) ordering to an object.
|
||||
template_annual_order = [] # Save the annual ordering to an object.
|
||||
template_order = [] # Save the ordering to an object (this can be utilized for quarterlies)
|
||||
level_detail = [] # Record the level of each line item of the income statement ("Operating Revenue" and "Excise Taxes" sum to return "Total Revenue" we need to keep track of this)
|
||||
|
||||
def traverse(node, level):
|
||||
"""
|
||||
A recursive function that visits a node and its children.
|
||||
|
||||
Args:
|
||||
node: The current node in the data structure.
|
||||
level: The depth of the current node in the data structure.
|
||||
"""
|
||||
if level > 5: # Stop when level is above 5
|
||||
return
|
||||
template_ttm_order.append(f"trailing{node['key']}")
|
||||
template_annual_order.append(f"annual{node['key']}")
|
||||
template_order.append(f"{node['key']}")
|
||||
level_detail.append(level)
|
||||
if 'children' in node: # Check if the node has children
|
||||
for child in node['children']: # If yes, traverse each child
|
||||
traverse(child, level + 1) # Increment the level by 1 for each child
|
||||
|
||||
for key in data['template']: # Loop through the data
|
||||
traverse(key, 0) # Call the traverse function with initial level being 0
|
||||
|
||||
return template_ttm_order, template_annual_order, template_order, level_detail
|
||||
|
||||
|
||||
def get_json(url, proxy=None, session=None):
|
||||
session = session or _requests
|
||||
html = session.get(url=url, proxies=proxy, headers=user_agent_headers).text
|
||||
def retrieve_financial_details(data):
|
||||
"""
|
||||
retrieve_financial_details returns all of the available financial details under the
|
||||
"QuoteTimeSeriesStore" for any of the following three yahoo finance webpages:
|
||||
"/financials", "/cash-flow" and "/balance-sheet".
|
||||
|
||||
if "QuoteSummaryStore" not in html:
|
||||
html = session.get(url=url, proxies=proxy).text
|
||||
if "QuoteSummaryStore" not in html:
|
||||
return {}
|
||||
Returns:
|
||||
- TTM_dicts: A dictionary full of all of the available Trailing Twelve Month figures, this can easily be converted to a pandas dataframe.
|
||||
- Annual_dicts: A dictionary full of all of the available Annual figures, this can easily be converted to a pandas dataframe.
|
||||
"""
|
||||
TTM_dicts = [] # Save a dictionary object to store the TTM financials.
|
||||
Annual_dicts = [] # Save a dictionary object to store the Annual financials.
|
||||
|
||||
json_str = html.split('root.App.main =')[1].split(
|
||||
'(this)')[0].split(';\n}')[0].strip()
|
||||
data = _json.loads(json_str)[
|
||||
'context']['dispatcher']['stores']['QuoteSummaryStore']
|
||||
# add data about Shares Outstanding for companies' tickers if they are available
|
||||
try:
|
||||
data['annualBasicAverageShares'] = _json.loads(
|
||||
json_str)['context']['dispatcher']['stores'][
|
||||
'QuoteTimeSeriesStore']['timeSeries']['annualBasicAverageShares']
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# return data
|
||||
new_data = _json.dumps(data).replace('{}', 'null')
|
||||
new_data = _re.sub(
|
||||
r'\{[\'|\"]raw[\'|\"]:(.*?),(.*?)\}', r'\1', new_data)
|
||||
|
||||
return _json.loads(new_data)
|
||||
for key, timeseries in data.get('timeSeries', {}).items(): # Loop through the time series data to grab the key financial figures.
|
||||
try:
|
||||
if timeseries:
|
||||
time_series_dict = {'index': key}
|
||||
for each in timeseries: # Loop through the years
|
||||
if not each:
|
||||
continue
|
||||
time_series_dict[each.get('asOfDate')] = each.get('reportedValue')
|
||||
if 'trailing' in key:
|
||||
TTM_dicts.append(time_series_dict)
|
||||
elif 'annual' in key:
|
||||
Annual_dicts.append(time_series_dict)
|
||||
except KeyError as e:
|
||||
print(f"An error occurred while processing the key: {e}")
|
||||
return TTM_dicts, Annual_dicts
|
||||
|
||||
|
||||
def camel2title(o):
|
||||
return [_re.sub("([a-z])([A-Z])", r"\g<1> \g<2>", i).title() for i in o]
|
||||
def format_annual_financial_statement(level_detail, annual_dicts, annual_order, ttm_dicts=None, ttm_order=None):
|
||||
"""
|
||||
format_annual_financial_statement formats any annual financial statement
|
||||
|
||||
Returns:
|
||||
- _statement: A fully formatted annual financial statement in pandas dataframe.
|
||||
"""
|
||||
Annual = _pd.DataFrame.from_dict(annual_dicts).set_index("index")
|
||||
Annual = Annual.reindex(annual_order)
|
||||
Annual.index = Annual.index.str.replace(r'annual', '')
|
||||
|
||||
# Note: balance sheet is the only financial statement with no ttm detail
|
||||
if ttm_dicts and ttm_order:
|
||||
TTM = _pd.DataFrame.from_dict(ttm_dicts).set_index("index").reindex(ttm_order)
|
||||
# Add 'TTM' prefix to all column names, so if combined we can tell
|
||||
# the difference between actuals and TTM (similar to yahoo finance).
|
||||
TTM.columns = ['TTM ' + str(col) for col in TTM.columns]
|
||||
TTM.index = TTM.index.str.replace(r'trailing', '')
|
||||
_statement = Annual.merge(TTM, left_index=True, right_index=True)
|
||||
else:
|
||||
_statement = Annual
|
||||
|
||||
_statement.index = camel2title(_statement.T.index)
|
||||
_statement['level_detail'] = level_detail
|
||||
_statement = _statement.set_index([_statement.index, 'level_detail'])
|
||||
_statement = _statement[sorted(_statement.columns, reverse=True)]
|
||||
_statement = _statement.dropna(how='all')
|
||||
return _statement
|
||||
|
||||
|
||||
def format_quarterly_financial_statement(_statement, level_detail, order):
|
||||
"""
|
||||
format_quarterly_financial_statements formats any quarterly financial statement
|
||||
|
||||
Returns:
|
||||
- _statement: A fully formatted quarterly financial statement in pandas dataframe.
|
||||
"""
|
||||
_statement = _statement.reindex(order)
|
||||
_statement.index = camel2title(_statement.T)
|
||||
_statement['level_detail'] = level_detail
|
||||
_statement = _statement.set_index([_statement.index, 'level_detail'])
|
||||
_statement = _statement[sorted(_statement.columns, reverse=True)]
|
||||
_statement = _statement.dropna(how='all')
|
||||
_statement.columns = _pd.to_datetime(_statement.columns).date
|
||||
return _statement
|
||||
|
||||
|
||||
def camel2title(strings: List[str], sep: str = ' ', acronyms: Optional[List[str]] = None) -> List[str]:
|
||||
if isinstance(strings, str) or not hasattr(strings, '__iter__'):
|
||||
raise TypeError("camel2title() 'strings' argument must be iterable of strings")
|
||||
if len(strings) == 0:
|
||||
return strings
|
||||
if not isinstance(strings[0], str):
|
||||
raise TypeError("camel2title() 'strings' argument must be iterable of strings")
|
||||
if not isinstance(sep, str) or len(sep) != 1:
|
||||
raise ValueError(f"camel2title() 'sep' argument = '{sep}' must be single character")
|
||||
if _re.match("[a-zA-Z0-9]", sep):
|
||||
raise ValueError(f"camel2title() 'sep' argument = '{sep}' cannot be alpha-numeric")
|
||||
if _re.escape(sep) != sep and sep not in {' ', '-'}:
|
||||
# Permit some exceptions, I don't understand why they get escaped
|
||||
raise ValueError(f"camel2title() 'sep' argument = '{sep}' cannot be special character")
|
||||
|
||||
if acronyms is None:
|
||||
pat = "([a-z])([A-Z])"
|
||||
rep = rf"\g<1>{sep}\g<2>"
|
||||
return [_re.sub(pat, rep, s).title() for s in strings]
|
||||
|
||||
# Handling acronyms requires more care. Assumes Yahoo returns acronym strings upper-case
|
||||
if isinstance(acronyms, str) or not hasattr(acronyms, '__iter__') or not isinstance(acronyms[0], str):
|
||||
raise TypeError("camel2title() 'acronyms' argument must be iterable of strings")
|
||||
for a in acronyms:
|
||||
if not _re.match("^[A-Z]+$", a):
|
||||
raise ValueError(f"camel2title() 'acronyms' argument must only contain upper-case, but '{a}' detected")
|
||||
|
||||
# Insert 'sep' between lower-then-upper-case
|
||||
pat = "([a-z])([A-Z])"
|
||||
rep = rf"\g<1>{sep}\g<2>"
|
||||
strings = [_re.sub(pat, rep, s) for s in strings]
|
||||
|
||||
# Insert 'sep' after acronyms
|
||||
for a in acronyms:
|
||||
pat = f"({a})([A-Z][a-z])"
|
||||
rep = rf"\g<1>{sep}\g<2>"
|
||||
strings = [_re.sub(pat, rep, s) for s in strings]
|
||||
|
||||
# Apply str.title() to non-acronym words
|
||||
strings = [s.split(sep) for s in strings]
|
||||
strings = [[j.title() if j not in acronyms else j for j in s] for s in strings]
|
||||
strings = [sep.join(s) for s in strings]
|
||||
|
||||
return strings
|
||||
|
||||
|
||||
def snake_case_2_camelCase(s):
|
||||
sc = s.split('_')[0] + ''.join(x.title() for x in s.split('_')[1:])
|
||||
return sc
|
||||
|
||||
|
||||
def _parse_user_dt(dt, exchange_tz):
|
||||
if isinstance(dt, int):
|
||||
# Should already be epoch, test with conversion:
|
||||
_datetime.datetime.fromtimestamp(dt)
|
||||
else:
|
||||
# Convert str/date -> datetime, set tzinfo=exchange, get timestamp:
|
||||
if isinstance(dt, str):
|
||||
dt = _datetime.datetime.strptime(str(dt), '%Y-%m-%d')
|
||||
if isinstance(dt, _datetime.date) and not isinstance(dt, _datetime.datetime):
|
||||
dt = _datetime.datetime.combine(dt, _datetime.time(0))
|
||||
if isinstance(dt, _datetime.datetime) and dt.tzinfo is None:
|
||||
# Assume user is referring to exchange's timezone
|
||||
dt = _tz.timezone(exchange_tz).localize(dt)
|
||||
dt = int(dt.timestamp())
|
||||
return dt
|
||||
|
||||
|
||||
def _interval_to_timedelta(interval):
|
||||
if interval == "1mo":
|
||||
return relativedelta(months=1)
|
||||
elif interval == "3mo":
|
||||
return relativedelta(months=3)
|
||||
elif interval == "1y":
|
||||
return relativedelta(years=1)
|
||||
elif interval == "1wk":
|
||||
return _pd.Timedelta(days=7)
|
||||
else:
|
||||
return _pd.Timedelta(interval)
|
||||
|
||||
|
||||
def auto_adjust(data):
|
||||
col_order = data.columns
|
||||
df = data.copy()
|
||||
ratio = df["Close"] / df["Adj Close"]
|
||||
df["Adj Open"] = df["Open"] / ratio
|
||||
df["Adj High"] = df["High"] / ratio
|
||||
df["Adj Low"] = df["Low"] / ratio
|
||||
ratio = (df["Adj Close"] / df["Close"]).to_numpy()
|
||||
df["Adj Open"] = df["Open"] * ratio
|
||||
df["Adj High"] = df["High"] * ratio
|
||||
df["Adj Low"] = df["Low"] * ratio
|
||||
|
||||
df.drop(
|
||||
["Open", "High", "Low", "Close"],
|
||||
@@ -151,13 +453,13 @@ def auto_adjust(data):
|
||||
"Adj Low": "Low", "Adj Close": "Close"
|
||||
}, inplace=True)
|
||||
|
||||
df = df[["Open", "High", "Low", "Close", "Volume"]]
|
||||
return df[["Open", "High", "Low", "Close", "Volume"]]
|
||||
return df[[c for c in col_order if c in df.columns]]
|
||||
|
||||
|
||||
def back_adjust(data):
|
||||
""" back-adjusted data to mimic true historical prices """
|
||||
|
||||
col_order = data.columns
|
||||
df = data.copy()
|
||||
ratio = df["Adj Close"] / df["Close"]
|
||||
df["Adj Open"] = df["Open"] * ratio
|
||||
@@ -173,10 +475,10 @@ def back_adjust(data):
|
||||
"Adj Low": "Low"
|
||||
}, inplace=True)
|
||||
|
||||
return df[["Open", "High", "Low", "Close", "Volume"]]
|
||||
return df[[c for c in col_order if c in df.columns]]
|
||||
|
||||
|
||||
def parse_quotes(data, tz=None):
|
||||
def parse_quotes(data):
|
||||
timestamps = data["timestamp"]
|
||||
ohlc = data["indicators"]["quote"][0]
|
||||
volumes = ohlc["volume"]
|
||||
@@ -199,17 +501,13 @@ def parse_quotes(data, tz=None):
|
||||
quotes.index = _pd.to_datetime(timestamps, unit="s")
|
||||
quotes.sort_index(inplace=True)
|
||||
|
||||
if tz is not None:
|
||||
quotes.index = quotes.index.tz_localize(tz)
|
||||
|
||||
return quotes
|
||||
|
||||
|
||||
def parse_actions(data, tz=None):
|
||||
dividends = _pd.DataFrame(
|
||||
columns=["Dividends"], index=_pd.DatetimeIndex([]))
|
||||
splits = _pd.DataFrame(
|
||||
columns=["Stock Splits"], index=_pd.DatetimeIndex([]))
|
||||
def parse_actions(data):
|
||||
dividends = None
|
||||
capital_gains = None
|
||||
splits = None
|
||||
|
||||
if "events" in data:
|
||||
if "dividends" in data["events"]:
|
||||
@@ -218,24 +516,338 @@ def parse_actions(data, tz=None):
|
||||
dividends.set_index("date", inplace=True)
|
||||
dividends.index = _pd.to_datetime(dividends.index, unit="s")
|
||||
dividends.sort_index(inplace=True)
|
||||
if tz is not None:
|
||||
dividends.index = dividends.index.tz_localize(tz)
|
||||
|
||||
dividends.columns = ["Dividends"]
|
||||
|
||||
if "capitalGains" in data["events"]:
|
||||
capital_gains = _pd.DataFrame(
|
||||
data=list(data["events"]["capitalGains"].values()))
|
||||
capital_gains.set_index("date", inplace=True)
|
||||
capital_gains.index = _pd.to_datetime(capital_gains.index, unit="s")
|
||||
capital_gains.sort_index(inplace=True)
|
||||
capital_gains.columns = ["Capital Gains"]
|
||||
|
||||
if "splits" in data["events"]:
|
||||
splits = _pd.DataFrame(
|
||||
data=list(data["events"]["splits"].values()))
|
||||
splits.set_index("date", inplace=True)
|
||||
splits.index = _pd.to_datetime(splits.index, unit="s")
|
||||
splits.sort_index(inplace=True)
|
||||
if tz is not None:
|
||||
splits.index = splits.index.tz_localize(tz)
|
||||
splits["Stock Splits"] = splits["numerator"] / \
|
||||
splits["denominator"]
|
||||
splits = splits["Stock Splits"]
|
||||
splits["Stock Splits"] = splits["numerator"] / splits["denominator"]
|
||||
splits = splits[["Stock Splits"]]
|
||||
|
||||
return dividends, splits
|
||||
if dividends is None:
|
||||
dividends = _pd.DataFrame(
|
||||
columns=["Dividends"], index=_pd.DatetimeIndex([]))
|
||||
if capital_gains is None:
|
||||
capital_gains = _pd.DataFrame(
|
||||
columns=["Capital Gains"], index=_pd.DatetimeIndex([]))
|
||||
if splits is None:
|
||||
splits = _pd.DataFrame(
|
||||
columns=["Stock Splits"], index=_pd.DatetimeIndex([]))
|
||||
|
||||
return dividends, splits, capital_gains
|
||||
|
||||
|
||||
def set_df_tz(df, interval, tz):
|
||||
if df.index.tz is None:
|
||||
df.index = df.index.tz_localize("UTC")
|
||||
df.index = df.index.tz_convert(tz)
|
||||
return df
|
||||
|
||||
|
||||
def fix_Yahoo_returning_prepost_unrequested(quotes, interval, tradingPeriods):
|
||||
# Sometimes Yahoo returns post-market data despite not requesting it.
|
||||
# Normally happens on half-day early closes.
|
||||
#
|
||||
# And sometimes returns pre-market data despite not requesting it.
|
||||
# E.g. some London tickers.
|
||||
tps_df = tradingPeriods.copy()
|
||||
tps_df["_date"] = tps_df.index.date
|
||||
quotes["_date"] = quotes.index.date
|
||||
idx = quotes.index.copy()
|
||||
quotes = quotes.merge(tps_df, how="left")
|
||||
quotes.index = idx
|
||||
# "end" = end of regular trading hours (including any auction)
|
||||
f_drop = quotes.index >= quotes["end"]
|
||||
f_drop = f_drop | (quotes.index < quotes["start"])
|
||||
if f_drop.any():
|
||||
# When printing report, ignore rows that were already NaNs:
|
||||
# f_na = quotes[["Open","Close"]].isna().all(axis=1)
|
||||
# n_nna = quotes.shape[0] - _np.sum(f_na)
|
||||
# n_drop_nna = _np.sum(f_drop & ~f_na)
|
||||
# quotes_dropped = quotes[f_drop]
|
||||
# if debug and n_drop_nna > 0:
|
||||
# print(f"Dropping {n_drop_nna}/{n_nna} intervals for falling outside regular trading hours")
|
||||
quotes = quotes[~f_drop]
|
||||
quotes = quotes.drop(["_date", "start", "end"], axis=1)
|
||||
return quotes
|
||||
|
||||
|
||||
def fix_Yahoo_returning_live_separate(quotes, interval, tz_exchange):
|
||||
# Yahoo bug fix. If market is open today then Yahoo normally returns
|
||||
# todays data as a separate row from rest-of week/month interval in above row.
|
||||
# Seems to depend on what exchange e.g. crypto OK.
|
||||
# Fix = merge them together
|
||||
n = quotes.shape[0]
|
||||
if n > 1:
|
||||
dt1 = quotes.index[n - 1]
|
||||
dt2 = quotes.index[n - 2]
|
||||
if quotes.index.tz is None:
|
||||
dt1 = dt1.tz_localize("UTC")
|
||||
dt2 = dt2.tz_localize("UTC")
|
||||
dt1 = dt1.tz_convert(tz_exchange)
|
||||
dt2 = dt2.tz_convert(tz_exchange)
|
||||
if interval == "1d":
|
||||
# Similar bug in daily data except most data is simply duplicated
|
||||
# - exception is volume, *slightly* greater on final row (and matches website)
|
||||
if dt1.date() == dt2.date():
|
||||
# Last two rows are on same day. Drop second-to-last row
|
||||
quotes = quotes.drop(quotes.index[n - 2])
|
||||
else:
|
||||
if interval == "1wk":
|
||||
last_rows_same_interval = dt1.year == dt2.year and dt1.week == dt2.week
|
||||
elif interval == "1mo":
|
||||
last_rows_same_interval = dt1.month == dt2.month
|
||||
elif interval == "3mo":
|
||||
last_rows_same_interval = dt1.year == dt2.year and dt1.quarter == dt2.quarter
|
||||
else:
|
||||
last_rows_same_interval = (dt1 - dt2) < _pd.Timedelta(interval)
|
||||
|
||||
if last_rows_same_interval:
|
||||
# Last two rows are within same interval
|
||||
idx1 = quotes.index[n - 1]
|
||||
idx2 = quotes.index[n - 2]
|
||||
if idx1 == idx2:
|
||||
# Yahoo returning last interval duplicated, which means
|
||||
# Yahoo is not returning live data (phew!)
|
||||
return quotes
|
||||
if _np.isnan(quotes.loc[idx2, "Open"]):
|
||||
quotes.loc[idx2, "Open"] = quotes["Open"].iloc[n - 1]
|
||||
# Note: nanmax() & nanmin() ignores NaNs, but still need to check not all are NaN to avoid warnings
|
||||
if not _np.isnan(quotes["High"].iloc[n - 1]):
|
||||
quotes.loc[idx2, "High"] = _np.nanmax([quotes["High"].iloc[n - 1], quotes["High"].iloc[n - 2]])
|
||||
if "Adj High" in quotes.columns:
|
||||
quotes.loc[idx2, "Adj High"] = _np.nanmax([quotes["Adj High"].iloc[n - 1], quotes["Adj High"].iloc[n - 2]])
|
||||
|
||||
if not _np.isnan(quotes["Low"].iloc[n - 1]):
|
||||
quotes.loc[idx2, "Low"] = _np.nanmin([quotes["Low"].iloc[n - 1], quotes["Low"].iloc[n - 2]])
|
||||
if "Adj Low" in quotes.columns:
|
||||
quotes.loc[idx2, "Adj Low"] = _np.nanmin([quotes["Adj Low"].iloc[n - 1], quotes["Adj Low"].iloc[n - 2]])
|
||||
|
||||
quotes.loc[idx2, "Close"] = quotes["Close"].iloc[n - 1]
|
||||
if "Adj Close" in quotes.columns:
|
||||
quotes.loc[idx2, "Adj Close"] = quotes["Adj Close"].iloc[n - 1]
|
||||
quotes.loc[idx2, "Volume"] += quotes["Volume"].iloc[n - 1]
|
||||
quotes = quotes.drop(quotes.index[n - 1])
|
||||
|
||||
return quotes
|
||||
|
||||
|
||||
def safe_merge_dfs(df_main, df_sub, interval):
|
||||
if df_sub.empty:
|
||||
raise Exception("No data to merge")
|
||||
if df_main.empty:
|
||||
return df_main
|
||||
|
||||
df_sub_backup = df_sub.copy()
|
||||
data_cols = [c for c in df_sub.columns if c not in df_main]
|
||||
if len(data_cols) > 1:
|
||||
raise Exception("Expected 1 data col")
|
||||
data_col = data_cols[0]
|
||||
|
||||
df_main = df_main.sort_index()
|
||||
intraday = interval.endswith('m') or interval.endswith('s')
|
||||
|
||||
td = _interval_to_timedelta(interval)
|
||||
if intraday:
|
||||
# On some exchanges the event can occur before market open.
|
||||
# Problem when combining with intraday data.
|
||||
# Solution = use dates, not datetimes, to map/merge.
|
||||
df_main['_date'] = df_main.index.date
|
||||
df_sub['_date'] = df_sub.index.date
|
||||
indices = _np.searchsorted(_np.append(df_main['_date'], [df_main['_date'].iloc[-1]+td]), df_sub['_date'], side='left')
|
||||
df_main = df_main.drop('_date', axis=1)
|
||||
df_sub = df_sub.drop('_date', axis=1)
|
||||
else:
|
||||
indices = _np.searchsorted(_np.append(df_main.index, df_main.index[-1] + td), df_sub.index, side='right')
|
||||
indices -= 1 # Convert from [[i-1], [i]) to [[i], [i+1])
|
||||
# Numpy.searchsorted does not handle out-of-range well, so handle manually:
|
||||
if intraday:
|
||||
for i in range(len(df_sub.index)):
|
||||
dt = df_sub.index[i].date()
|
||||
if dt < df_main.index[0].date() or dt >= df_main.index[-1].date() + _datetime.timedelta(days=1):
|
||||
# Out-of-range
|
||||
indices[i] = -1
|
||||
else:
|
||||
for i in range(len(df_sub.index)):
|
||||
dt = df_sub.index[i]
|
||||
if dt < df_main.index[0] or dt >= df_main.index[-1] + td:
|
||||
# Out-of-range
|
||||
indices[i] = -1
|
||||
|
||||
f_outOfRange = indices == -1
|
||||
if f_outOfRange.any():
|
||||
if intraday:
|
||||
# Discard out-of-range dividends in intraday data, assume user not interested
|
||||
df_sub = df_sub[~f_outOfRange]
|
||||
if df_sub.empty:
|
||||
df_main['Dividends'] = 0.0
|
||||
return df_main
|
||||
else:
|
||||
empty_row_data = {c:[_np.nan] for c in const.price_colnames}|{'Volume':[0]}
|
||||
if interval == '1d':
|
||||
# For 1d, add all out-of-range event dates
|
||||
for i in _np.where(f_outOfRange)[0]:
|
||||
dt = df_sub.index[i]
|
||||
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
|
||||
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
|
||||
df_main = _pd.concat([df_main, empty_row], sort=True)
|
||||
else:
|
||||
# Else, only add out-of-range event dates if occurring in interval
|
||||
# immediately after last price row
|
||||
last_dt = df_main.index[-1]
|
||||
next_interval_start_dt = last_dt + td
|
||||
next_interval_end_dt = next_interval_start_dt + td
|
||||
for i in _np.where(f_outOfRange)[0]:
|
||||
dt = df_sub.index[i]
|
||||
if next_interval_start_dt <= dt < next_interval_end_dt:
|
||||
new_dt = next_interval_start_dt
|
||||
get_yf_logger().debug(f"Adding out-of-range {data_col} @ {dt.date()} in new prices row of NaNs")
|
||||
empty_row = _pd.DataFrame(data=empty_row_data, index=[dt])
|
||||
df_main = _pd.concat([df_main, empty_row], sort=True)
|
||||
df_main = df_main.sort_index()
|
||||
|
||||
# Re-calculate indices
|
||||
indices = _np.searchsorted(_np.append(df_main.index, df_main.index[-1] + td), df_sub.index, side='right')
|
||||
indices -= 1 # Convert from [[i-1], [i]) to [[i], [i+1])
|
||||
# Numpy.searchsorted does not handle out-of-range well, so handle manually:
|
||||
for i in range(len(df_sub.index)):
|
||||
dt = df_sub.index[i]
|
||||
if dt < df_main.index[0] or dt >= df_main.index[-1] + td:
|
||||
# Out-of-range
|
||||
indices[i] = -1
|
||||
|
||||
f_outOfRange = indices == -1
|
||||
if f_outOfRange.any():
|
||||
if intraday or interval in ['1d', '1wk']:
|
||||
raise Exception(f"The following '{data_col}' events are out-of-range, did not expect with interval {interval}: {df_sub.index[f_outOfRange]}")
|
||||
get_yf_logger().debug(f'Discarding these {data_col} events:' + '\n' + str(df_sub[f_outOfRange]))
|
||||
df_sub = df_sub[~f_outOfRange].copy()
|
||||
indices = indices[~f_outOfRange]
|
||||
|
||||
def _reindex_events(df, new_index, data_col_name):
|
||||
if len(new_index) == len(set(new_index)):
|
||||
# No duplicates, easy
|
||||
df.index = new_index
|
||||
return df
|
||||
|
||||
df["_NewIndex"] = new_index
|
||||
# Duplicates present within periods but can aggregate
|
||||
if data_col_name in ["Dividends", "Capital Gains"]:
|
||||
# Add
|
||||
df = df.groupby("_NewIndex").sum()
|
||||
df.index.name = None
|
||||
elif data_col_name == "Stock Splits":
|
||||
# Product
|
||||
df = df.groupby("_NewIndex").prod()
|
||||
df.index.name = None
|
||||
else:
|
||||
raise Exception(f"New index contains duplicates but unsure how to aggregate for '{data_col_name}'")
|
||||
if "_NewIndex" in df.columns:
|
||||
df = df.drop("_NewIndex", axis=1)
|
||||
return df
|
||||
|
||||
new_index = df_main.index[indices]
|
||||
df_sub = _reindex_events(df_sub, new_index, data_col)
|
||||
|
||||
df = df_main.join(df_sub)
|
||||
f_na = df[data_col].isna()
|
||||
data_lost = sum(~f_na) < df_sub.shape[0]
|
||||
if data_lost:
|
||||
raise Exception('Data was lost in merge, investigate')
|
||||
|
||||
return df
|
||||
|
||||
|
||||
def fix_Yahoo_dst_issue(df, interval):
|
||||
if interval in ["1d", "1w", "1wk"]:
|
||||
# These intervals should start at time 00:00. But for some combinations of date and timezone,
|
||||
# Yahoo has time off by few hours (e.g. Brazil 23:00 around Jan-2022). Suspect DST problem.
|
||||
# The clue is (a) minutes=0 and (b) hour near 0.
|
||||
# Obviously Yahoo meant 00:00, so ensure this doesn't affect date conversion:
|
||||
f_pre_midnight = (df.index.minute == 0) & (df.index.hour.isin([22, 23]))
|
||||
dst_error_hours = _np.array([0] * df.shape[0])
|
||||
dst_error_hours[f_pre_midnight] = 24 - df.index[f_pre_midnight].hour
|
||||
df.index += _pd.TimedeltaIndex(dst_error_hours, 'h')
|
||||
return df
|
||||
|
||||
|
||||
def is_valid_timezone(tz: str) -> bool:
|
||||
try:
|
||||
_tz.timezone(tz)
|
||||
except UnknownTimeZoneError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def format_history_metadata(md, tradingPeriodsOnly=True):
|
||||
if not isinstance(md, dict):
|
||||
return md
|
||||
if len(md) == 0:
|
||||
return md
|
||||
|
||||
tz = md["exchangeTimezoneName"]
|
||||
|
||||
if not tradingPeriodsOnly:
|
||||
for k in ["firstTradeDate", "regularMarketTime"]:
|
||||
if k in md and md[k] is not None:
|
||||
if isinstance(md[k], int):
|
||||
md[k] = _pd.to_datetime(md[k], unit='s', utc=True).tz_convert(tz)
|
||||
|
||||
if "currentTradingPeriod" in md:
|
||||
for m in ["regular", "pre", "post"]:
|
||||
if m in md["currentTradingPeriod"] and isinstance(md["currentTradingPeriod"][m]["start"], int):
|
||||
for t in ["start", "end"]:
|
||||
md["currentTradingPeriod"][m][t] = \
|
||||
_pd.to_datetime(md["currentTradingPeriod"][m][t], unit='s', utc=True).tz_convert(tz)
|
||||
del md["currentTradingPeriod"][m]["gmtoffset"]
|
||||
del md["currentTradingPeriod"][m]["timezone"]
|
||||
|
||||
if "tradingPeriods" in md:
|
||||
tps = md["tradingPeriods"]
|
||||
if tps == {"pre": [], "post": []}:
|
||||
# Ignore
|
||||
pass
|
||||
elif isinstance(tps, (list, dict)):
|
||||
if isinstance(tps, list):
|
||||
# Only regular times
|
||||
df = _pd.DataFrame.from_records(_np.hstack(tps))
|
||||
df = df.drop(["timezone", "gmtoffset"], axis=1)
|
||||
df["start"] = _pd.to_datetime(df["start"], unit='s', utc=True).dt.tz_convert(tz)
|
||||
df["end"] = _pd.to_datetime(df["end"], unit='s', utc=True).dt.tz_convert(tz)
|
||||
elif isinstance(tps, dict):
|
||||
# Includes pre- and post-market
|
||||
pre_df = _pd.DataFrame.from_records(_np.hstack(tps["pre"]))
|
||||
post_df = _pd.DataFrame.from_records(_np.hstack(tps["post"]))
|
||||
regular_df = _pd.DataFrame.from_records(_np.hstack(tps["regular"]))
|
||||
|
||||
pre_df = pre_df.rename(columns={"start": "pre_start", "end": "pre_end"}).drop(["timezone", "gmtoffset"], axis=1)
|
||||
post_df = post_df.rename(columns={"start": "post_start", "end": "post_end"}).drop(["timezone", "gmtoffset"], axis=1)
|
||||
regular_df = regular_df.drop(["timezone", "gmtoffset"], axis=1)
|
||||
|
||||
cols = ["pre_start", "pre_end", "start", "end", "post_start", "post_end"]
|
||||
df = regular_df.join(pre_df).join(post_df)
|
||||
for c in cols:
|
||||
df[c] = _pd.to_datetime(df[c], unit='s', utc=True).dt.tz_convert(tz)
|
||||
df = df[cols]
|
||||
|
||||
df.index = _pd.to_datetime(df["start"].dt.date)
|
||||
df.index = df.index.tz_localize(tz)
|
||||
df.index.name = "Date"
|
||||
|
||||
md["tradingPeriods"] = df
|
||||
|
||||
return md
|
||||
|
||||
|
||||
class ProgressBar:
|
||||
@@ -270,19 +882,17 @@ class ProgressBar:
|
||||
def update_iteration(self, val=None):
|
||||
val = val if val is not None else self.elapsed / float(self.iterations)
|
||||
self.__update_amount(val * 100.0)
|
||||
self.prog_bar += ' %s of %s %s' % (
|
||||
self.elapsed, self.iterations, self.text)
|
||||
self.prog_bar += f" {self.elapsed} of {self.iterations} {self.text}"
|
||||
|
||||
def __update_amount(self, new_amount):
|
||||
percent_done = int(round((new_amount / 100.0) * 100.0))
|
||||
all_full = self.width - 2
|
||||
num_hashes = int(round((percent_done / 100.0) * all_full))
|
||||
self.prog_bar = '[' + self.fill_char * \
|
||||
num_hashes + ' ' * (all_full - num_hashes) + ']'
|
||||
self.prog_bar = '[' + self.fill_char * num_hashes + ' ' * (all_full - num_hashes) + ']'
|
||||
pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))
|
||||
pct_string = '%d%%' % percent_done
|
||||
self.prog_bar = self.prog_bar[0:pct_place] + \
|
||||
(pct_string + self.prog_bar[pct_place + len(pct_string):])
|
||||
pct_string = f'{percent_done}%%'
|
||||
self.prog_bar = self.prog_bar[0:pct_place] + (pct_string + self.prog_bar[pct_place + len(pct_string):])
|
||||
|
||||
def __str__(self):
|
||||
return str(self.prog_bar)
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "0.1.71"
|
||||
version = "0.2.32b1"
|
||||
|
||||
Reference in New Issue
Block a user