Compare commits

...

3 Commits

Author SHA1 Message Date
Value Raider
5367f62bd7 Bump version to 0.2.14 2023-03-25 11:39:21 +00:00
ValueRaider
27cb90c596 Merge pull request #1461 from qianyun210603/main
Add failback for decryption error in info interface
2023-03-25 11:33:27 +00:00
BookSword
6c2682654a Fetch 'info' dict via API 2023-03-24 18:04:07 +00:00
5 changed files with 46 additions and 9 deletions

View File

@@ -1,6 +1,10 @@
Change Log
===========
0.2.14
------
Fix Ticker.info dict by fetching from API #1461
0.2.13
------
Price bug fixes:

View File

@@ -1,5 +1,5 @@
{% set name = "yfinance" %}
{% set version = "0.2.13" %}
{% set version = "0.2.14" %}
package:
name: "{{ name|lower }}"

View File

@@ -15,6 +15,8 @@ else:
import requests as requests
import re
from bs4 import BeautifulSoup
import random
import time
from frozendict import frozendict
@@ -202,6 +204,11 @@ class TickerData:
proxy = {"https": proxy}
return proxy
def get_raw_json(self, url, user_agent_headers=None, params=None, proxy=None, timeout=30):
response = self.get(url, user_agent_headers=user_agent_headers, params=params, proxy=proxy, timeout=timeout)
response.raise_for_status()
return response.json()
def _get_decryption_keys_from_yahoo_js(self, soup):
result = None

View File

@@ -19,6 +19,7 @@ info_retired_keys = info_retired_keys_price | info_retired_keys_exchange | info_
PRUNE_INFO = True
# PRUNE_INFO = False
_BASIC_URL_ = "https://query1.finance.yahoo.com/v7/finance/quote"
from collections.abc import MutableMapping
@@ -87,13 +88,16 @@ class Quote:
self._calendar = None
self._already_scraped = False
self._already_scraped_complementary = False
self._already_fetched = False
self._already_fetched_complementary = False
@property
def info(self) -> dict:
if self._info is None:
self._scrape(self.proxy)
self._scrape_complementary(self.proxy)
# self._scrape(self.proxy) # decrypt broken
self._fetch(self.proxy)
self._fetch_complementary(self.proxy)
return self._info
@@ -236,12 +240,34 @@ class Quote:
except Exception:
pass
def _scrape_complementary(self, proxy):
if self._already_scraped_complementary:
def _fetch(self, proxy):
if self._already_fetched:
return
self._already_scraped_complementary = True
self._already_fetched = True
self._scrape(proxy)
result = self._data.get_raw_json(
_BASIC_URL_, params={"formatted": "true", "lang": "en-US", "symbols": self._data.ticker}, proxy=proxy
)
query1_info = next(
(info for info in result.get("quoteResponse", {}).get("result", []) if info["symbol"] == self._data.ticker),
None,
)
for k, v in query1_info.items():
if isinstance(v, dict) and "raw" in v and "fmt" in v:
query1_info[k] = v["fmt"] if k in {"regularMarketTime", "postMarketTime"} else v["raw"]
elif isinstance(v, str):
query1_info[k] = v.replace("\xa0", " ")
elif isinstance(v, (int, bool)):
query1_info[k] = v
self._info = query1_info
def _fetch_complementary(self, proxy):
if self._already_fetched_complementary:
return
self._already_fetched_complementary = True
# self._scrape(proxy) # decrypt broken
self._fetch(proxy)
if self._info is None:
return

View File

@@ -1 +1 @@
version = "0.2.13"
version = "0.2.14"