Update utils and strategies

This commit is contained in:
Charmve
2023-09-16 17:28:21 +08:00
parent 7936634114
commit 35ae37867f
27 changed files with 2036 additions and 48 deletions

View File

@@ -25,3 +25,18 @@ jobs:
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
favorite-image: 'https://your.favorite/image1.png,https://your.favorite/image2.png'
post-lgtm-image:
runs-on: ubuntu-latest
if: (!contains(github.actor, '[bot]')) # Exclude bot comment
steps:
- uses: ddradar/choose-random-action@v2
id: act
with:
contents: |
https://example.com/your-lgtm-image-1.jpg
https://example.com/your-lgtm-image-2.jpg
https://example.com/your-lgtm-image-3.jpg
- uses: ddradar/lgtm-action@v2.0.2
with:
image-url: ${{ steps.act.outputs.selected }}

58
.vscode/launch.json vendored
View File

@@ -1,22 +1,38 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch fund",
"args": [
"${workspaceFolder}/pyfunds/fund-strategy/src/utils/fund-stragegy/index.ts"
],
"runtimeArgs": [
"-r",
"ts-node/register",
"-r", "tsconfig-paths/register"
],
"env": { "TS_NODE_PROJECT": "${workspaceFolder}/tsconfig.json" }
},
]
}
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Qbot",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": true
},
{
"name": "Python: Qbot",
"type": "python",
"request": "launch",
"cwd": "${workspaceRoot}",
},
{
"type": "node",
"request": "launch",
"name": "Launch fund",
"args": [
"${workspaceFolder}/pyfunds/fund-strategy/src/utils/fund-stragegy/index.ts"
],
"runtimeArgs": [
"-r",
"ts-node/register",
"-r", "tsconfig-paths/register"
],
"env": {
"TS_NODE_PROJECT": "${workspaceFolder}/tsconfig.json"
}
},
]
}

71
.vscode/settings.json vendored
View File

@@ -1 +1,70 @@
// Place your settings in this file to overwrite default and user settings.
{
"files.exclude": {
"out": true, // set this to true to hide the "out" folder with the compiled JS files
"dist": true,
"**/*.pyc": true,
".nyc_output": true,
"obj": true,
"bin": true,
"**/__pycache__": true,
"**/node_modules": true,
".vscode-test": false,
".vscode test": false,
"**/.mypy_cache/**": true
},
"search.exclude": {
"out": true, // set this to false to include "out" folder in search results
"dist": true,
"**/node_modules": true,
"coverage": true,
"languageServer*/**": true,
".vscode-test": true,
".vscode test": true
},
"[python]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[JSON]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[YAML]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"typescript.tsdk": "./node_modules/typescript/lib", // we want to use the TS server from our node_modules folder to control its version
"python.linting.enabled": false,
"python.pythonPath": "/usr/local/anaconda3/bin/python",
"python.formatting.provider": "black",
"python.sortImports.args": ["--profile", "black"],
"typescript.preferences.quoteStyle": "single",
"javascript.preferences.quoteStyle": "single",
"typescriptHero.imports.stringQuoteStyle": "'",
"prettier.printWidth": 120,
"prettier.singleQuote": true,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
},
"python.languageServer": "Default",
"python.linting.pylintEnabled": false,
"python.linting.flake8Enabled": true,
"cucumberautocomplete.skipDocStringsFormat": true,
"python.linting.flake8Args": [
// Match what black does.
"--max-line-length=88"
],
"typescript.preferences.importModuleSpecifier": "relative",
"debug.javascript.usePreview": false
}

View File

@@ -64,9 +64,9 @@
## Quick Start
Qbot是一个免费的投研平台提供从数据获取、交易策略开发、策略回测、模拟交易到最终实盘交易的全闭环流程。在实盘接入前有股票、基金评测和策略回测在模拟环境下做交易验证近乎实盘的时延、滑点仿真。故本平台提供GUI前端/客户端部分功能也支持网页后端做数据处理、交易调度实现事件驱动的交易流程。对于策略研究部分尤其强调机器学习、强化学习的AI策略结合多因子模型提高收益比。
Qbot是一个免费的量化投研平台提供从数据获取、交易策略开发、策略回测、模拟交易到最终实盘交易的全闭环流程。在实盘接入前有股票、基金评测和策略回测在模拟环境下做交易验证近乎实盘的时延、滑点仿真。故本平台提供GUI前端/客户端部分功能也支持网页后端做数据处理、交易调度实现事件驱动的交易流程。对于策略研究部分尤其强调机器学习、强化学习的AI策略结合多因子模型提高收益比。
但本项目可能需要一点python基础知识有一点点交易经验会更容易体会作者的初衷解决当下产品空缺和广大散户朋友的交易痛点现在直接免费开源出来
但本项目可能需要一点python基础知识有一点点交易经验会更容易体会作者的初衷解决当下产品空缺和广大散户朋友的交易痛点现在直接免费开源出来
```bash
cd ~ # $HOME as workspace
@@ -194,15 +194,13 @@ pip install -r requirements.txt
python main.py
```
主要包含四个窗口,如果启动界面有问题可以参考这里的启动方式
主要包含四个窗口,如果启动界面未显示或有问题可以参考下图中对应的启动方式。👉 点击[这里](gui/mainframe.py)查看源码,下文也有文字介绍
![image](https://github.com/UFund-Me/Qbot/assets/29084184/9f1dcc07-ca76-4600-a02c-76104fb28c51)
👉 点击[这里](gui/mainframe.py)查看源码
#### 后端/服务端
1. 选基、选股助手对应客户端第二个菜单AI选股/选基)
1. 选基、选股助手(对应前端/客户端第二个菜单AI选股/选基)
运行命令
@@ -212,11 +210,12 @@ go build
./investool webserver
```
2. 基金策略在线分析(对应客户端第四个菜单:基金投资策略分析)
2. 基金策略在线分析(对应于前端/客户端第四个菜单:基金投资策略分析)
需要 node 开发环境: `npm`、`node`,点击[查看](pyfunds/fund-strategies/README.md)详细操作文档
<details><summary>版本信息(作为参考)</summary>
```
▶ go version
go version go1.20.4 darwin/amd64
@@ -227,6 +226,7 @@ v19.7.0
▶ npm --version
9.5.0
```
</details>
使用docker运行项目在项目路径下运行以下命令构建项目的docker镜像

27
WORKSPACE Normal file
View File

@@ -0,0 +1,27 @@
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "rules_python",
sha256 = "c03246c11efd49266e8e41e12931090b613e12a59e6f55ba2efd29a7cb8b4258",
strip_prefix = "rules_python-0.11.0",
url = "https://github.com/bazelbuild/rules_python/archive/refs/tags/0.11.0.tar.gz",
)
load("@rules_python//python:pip.bzl", "pip_install")
load("@rules_python//python:repositories.bzl", "python_register_toolchains")
# Use a hermetic Python interpreter so that builds are reproducible
# irrespective of the Python version available on the host machine.
python_register_toolchains(
name = "python3_10",
python_version = "3.10",
)
load("@python3_10//:defs.bzl", "interpreter")
# Translate requirements.txt into a @third_party external repository.
pip_install(
name = "third_party",
python_interpreter_target = interpreter,
requirements = "//third_party:requirements.txt",
)

View File

@@ -68,9 +68,9 @@
## Quick Start
Qbot是一个免费的投研平台提供从数据获取、交易策略开发、策略回测、模拟交易到最终实盘交易的全闭环流程。在实盘接入前有股票、基金评测和策略回测在模拟环境下做交易验证近乎实盘的时延、滑点仿真。故本平台提供GUI前端/客户端部分功能也支持网页后端做数据处理、交易调度实现事件驱动的交易流程。对于策略研究部分尤其强调机器学习、强化学习的AI策略结合多因子模型提高收益比。
Qbot是一个免费的量化投研平台提供从数据获取、交易策略开发、策略回测、模拟交易到最终实盘交易的全闭环流程。在实盘接入前有股票、基金评测和策略回测在模拟环境下做交易验证近乎实盘的时延、滑点仿真。故本平台提供GUI前端/客户端部分功能也支持网页后端做数据处理、交易调度实现事件驱动的交易流程。对于策略研究部分尤其强调机器学习、强化学习的AI策略结合多因子模型提高收益比。
但本项目可能需要一点python基础知识有一点点交易经验会更容易体会作者的初衷解决当下产品空缺和广大散户朋友的交易痛点现在直接免费开源出来
但本项目可能需要一点python基础知识有一点点交易经验会更容易体会作者的初衷解决当下产品空缺和广大散户朋友的交易痛点现在直接免费开源出来
```bash
cd ~ # $HOME as workspace
@@ -197,16 +197,13 @@ pip install -r requirements.txt
# if run on Mac, please use 'pythonw main.py'
python main.py
```
主要包含四个窗口,如果启动界面有问题可以参考这里的启动方式。
主要包含四个窗口,如果启动界面未显示或有问题可以参考下图中对应的启动方式。👉 点击[这里](https://github.com/UFund-Me/Qbot/blob/main/gui/mainframe.py#L122-L141)查看源码,下文也有文字介绍。
![image](https://github.com/UFund-Me/Qbot/assets/29084184/9f1dcc07-ca76-4600-a02c-76104fb28c51)
👉 点击[这里](https://github.com/UFund-Me/Qbot/blob/main/gui/mainframe.py#L122-L141)查看源码
#### 后端/服务端
1. 选基、选股助手对应客户端第二个菜单AI选股/选基)
1. 选基、选股助手(对应前端/客户端第二个菜单AI选股/选基)
运行命令
@@ -216,11 +213,12 @@ go build
./investool webserver
```
2. 基金策略在线分析(对应客户端第四个菜单:基金投资策略分析)
2. 基金策略在线分析(对应于前端/客户端第四个菜单:基金投资策略分析)
需要 node 开发环境: `npm`、`node`,点击[查看](https://github.com/UFund-Me/Qbot/blob/main/pyfunds/fund-strategies/README.md)详细操作文档。
<details><summary>版本信息(作为参考)</summary>
```
▶ go version
go version go1.20.4 darwin/amd64
@@ -231,6 +229,7 @@ v19.7.0
▶ npm --version
9.5.0
```
</details>
使用docker运行项目在项目路径下运行以下命令构建项目的docker镜像

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -13,9 +13,9 @@ class PanelBacktest(wx.Panel):
def __init__(self, parent):
super(PanelBacktest, self).__init__(parent)
# # 回测按钮
# self.btn_bkt = wx.Button(self, label="回测")
# self.Bind(wx.EVT_BUTTON, OnBkt, self.btn_bkt)
# 回测按钮
self.btn_bkt = wx.Button(self, label="回测")
self.Bind(wx.EVT_BUTTON, OnBkt, self.btn_bkt)
# 进度条

View File

@@ -1,3 +1,17 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2022-12-04 12:03:50
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-07-02 20:11:13
FilePath: /Qbot/pytrader/data/data_utils.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
GitHub: https://github.com/Charmve
Description:
Copyright (c) 2023 by Charmve, All Rights Reserved.
Licensed under the MIT License.
'''
import os
import pandas as pd
@@ -18,21 +32,32 @@ def load_data(codes, start_time="20100101", end_time="20211231"):
def load_from_file(code):
path = os.path.dirname(__file__)
filename = "{}/{}.csv".format(os.path.dirname(path) + "/data/indexes", code)
# print("stack data:" + filename)
print("stack data:" + filename)
if os.path.exists(filename):
df = pd.read_csv(filename, index_col=[0])
# print(df.head())
print(df.head())
df.rename(
columns={"trade_date": "date", "ts_code": "code", "vol": "volume"},
inplace=True,
)
df["date"] = df["date"].apply(lambda x: str(x))
df = df[["code", "open", "high", "low", "close", "date", "volume"]]
# df = df[['open', 'high', 'low', 'close', 'date', 'volume']]
df.index = df["date"]
# df = df[["code", "open", "high", "low", "close", "date", "volume"]]
df = df[['open', 'high', 'low', 'close', 'date', 'volume']]
df.index = df["code"]
df.sort_index(ascending=True, inplace=True)
df["rate"] = df["close"].pct_change()
else:
print("load_from_file error")
return None
return df
class DataUtil:
def __init__(self, df_results, benchmarks=['000300.SH']):
self.df_results = df_results
self.benchmarks = benchmarks
def to_backtrader_dataframe(self.df):
df.index = pd.to_datetime(df.index)
df['openinterest'] = 0
df = df[['open', 'high', 'low', 'close', 'volume', 'openinterest']]
return df

View File

@@ -7,7 +7,7 @@ from pandas import DataFrame
from ..context import Context
from ..event_engine import Event
from easytrader.webtrader import WebTrader
from ..easytrader.webtrader import WebTrader
class StrategyTemplate:

View File

@@ -9,9 +9,168 @@ import matplotlib.pyplot as plt
import pandas as pd
import talib as ta
from base import Strategy
from data.data_utils import load_data, load_from_file
# from data.data_utils import load_data, load_from_file
from IPython.display import display
from model.lgb import LGBModel
# from model.lgb import LGBModel
import os
import pandas as pd
def load_data(codes, start_time="20100101", end_time="20211231"):
dfs = []
for code in codes:
df = load_from_file(code)
# df.dropna(inplace=True)
dfs.append(df)
df_all = pd.concat(dfs, axis=0)
df_all.sort_index(inplace=True)
df_all = df_all.loc[start_time:end_time]
return df_all
def load_from_file(code):
path = os.path.dirname(__file__)
filename = "{}/{}.csv".format(os.path.dirname(path) + "/data/indexes", code)
# print("stack data:" + filename)
if os.path.exists(filename):
df = pd.read_csv(filename, index_col=[0])
# print(df.head())
df.rename(
columns={"trade_date": "date", "ts_code": "code", "vol": "volume"},
inplace=True,
)
df["date"] = df["date"].apply(lambda x: str(x))
## code,date,close,open,high,low,volume,amount
# df = df[["code", "open", "high", "low", "close", "date", "volume"]]
df = df[['open', 'high', 'low', 'close', 'date', 'volume']]
df.index = df["date"]
df.sort_index(ascending=True, inplace=True)
df["rate"] = df["close"].pct_change()
else:
print("load_from_file error")
return None
return df
import numpy as np
import pandas as pd
import lightgbm as lgb
from sklearn.metrics import r2_score, accuracy_score
class LGBModel:
def __init__(self, regression = True):
self.regression = regression
def fit(self, dataset):
X_train, X_valid, y_train, y_valid = dataset.split()
dtrain = lgb.Dataset(X_train, label=y_train)
dvalid = lgb.Dataset(X_valid, label=y_valid)
#params = {"objective": 'mse', "verbosity": -1}
# 参数
params_regression = {
'learning_rate': 0.1,
'metrics':{'auc','mse'},
'lambda_l1': 0.1,
'lambda_l2': 0.2,
'max_depth': 4,
'objective': 'mse'#'mse', # 目标函数
}
params = {'num_leaves': 90,
'min_data_in_leaf': 30,
'objective': 'multiclass',
'num_class': 10,
'max_depth': -1,
'learning_rate': 0.03,
"min_sum_hessian_in_leaf": 6,
"boosting": "gbdt",
"feature_fraction": 0.9,
"bagging_freq": 1,
"bagging_fraction": 0.8,
"bagging_seed": 11,
"lambda_l1": 0.1,
"verbosity": -1,
"nthread": 15,
'metric': {'multi_logloss'},
"random_state": 2022,
#'device': 'gpu'
}
if self.regression:
params = params_regression
self.model = lgb.train(
params,
dtrain,
num_boost_round=1000,
valid_sets=[dtrain, dvalid],
valid_names=["train", "valid"],
early_stopping_rounds=50,
verbose_eval=True,
# evals_result=evals_result,
#**kwargs
)
y_pred = self.model.predict(X_valid)
if not self.regression:
y_pred = np.argmax(y_pred, axis=1)
print('accuracy:',accuracy_score(y_pred, y_valid))
y_pred_train = np.argmax(self.model.predict(X_train), axis=1)
print('accuracy_train:',accuracy_score(y_pred_train, y_train))
else:
print('R2系数', r2_score(y_valid, y_pred))
print('训练集——R2系数', r2_score(y_train, self.model.predict(X_train)))
def predict(self, dataset):
if self.model is None:
raise ValueError("model is not fitted yet!")
x_test,_ = dataset.get_data(date_range=['20160101', '20211231'])
pred = self.model.predict(x_test)
print(pred)
if not self.regression:
return pd.Series(np.argmax(pred, axis=1), index=x_test.index)
else:
return pd.Series(pred, index=x_test.index)
# if __name__ == '__main__':
# from bak.data.dataset import Dataset
# from engine.data.datahandler import DataHandler
# fields = ['Return($close,5)', 'Return($close,20)', 'Ref($close,126)/$close -1','$close','$open','$high','$low','$volume','$amount']
# names = ['return_5', 'return_20', 'return_126','close','open','high','low','volume','amount']
# #fields += ['Ref($close,-5)/$close -1']
# #names += ['return_-5']
# #ds = Dataset(codes=, fields=fields, feature_names=names,
# # label_expr='QCut(Ref($close,-20)/$close -1,10)')
# #print(ds.df)
# codes = ['512690.SH', '512170.SH', '512660.SH','159928.SZ','512010.SH']
# codes = ['159915.SZ','510300.SH','512690.SH', '512170.SH', '512660.SH','159928.SZ','512010.SH']
# codes = ['159928.SZ','510050.SH','512010.SH','513100.SH','518880.SH','511220.SH','511010.SH','161716.SZ']
# codes = [
# '000300.SH',
# '000905.SH',
# '399006.SZ', #创业板
# '000852.SH', #中证1000
# '399324.SZ', #深证红利
# #'000922.SH', #中证红利
# '399997.SZ', #中证白酒
# '399396.SZ', #食品饮料
# '000013.SH',#上证企债
# '000016.SH' #上证50
# ]
# ds = Dataset(codes=codes, handler=DataHandler())
# print(ds.df)
# m = LGBModel()
# m.fit(ds)
# pred = m.predict(ds)
# print(pred)
# Step 1: load dataset and generate features

View File

@@ -0,0 +1,64 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2023-03-18 18:06:06
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-03-18 18:14:39
FilePath: /Qbot/qbot/strategies/adx_strategy.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
GitHub: https://github.com/Charmve
Description: 获取股票数据并进行量化回测——基于ADX和MACD趋势策略
https://blog.csdn.net/ndhtou222/article/details/121219649
Copyright (c) 2023 by Charmve, All Rights Reserved.
Licensed under the MIT License.
'''
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#正常显示画图时出现的中文和负号
from pylab import mpl
mpl.rcParams['font.sans-serif']=['SimHei']
mpl.rcParams['axes.unicode_minus']=False
#不显示警告信息
import warnings
warnings.filterwarnings('ignore')
from get_stack_data import get_from_tushare
import pyfolio as pf
import talib as ta
def adx_strategy(df,ma1=13,ma2=55,ma3=89,adx=25):
#计算MACD和ADX指标
df['EMA1'] = ta.EMA(df.close,ma1)
df['EMA2'] = ta.EMA(df.close,ma2)
df['EMA3'] = ta.EMA(df.close,ma3)
df['MACD'],df['MACDSignal'],df['MACDHist'] = ta.MACD(df.close,12,26,9)
df['ADX'] = ta.ADX(df.high,df.low,df.close,14)
#设计买卖信号:21日均线大于42日均线且42日均线大于63日均线;ADX大于前值小于25MACD大于前值
df['Buy_Sig'] =(df['EMA1']>df['EMA2'])&(df['EMA2']>df['EMA3'])&(df['ADX']<=adx)\
&(df['ADX']>df['ADX'].shift(1))&(df['MACDHist']>df['MACDHist'].shift(1))
df.loc[df.Buy_Sig,'Buy_Trade'] = 1
df.loc[df.Buy_Trade.shift(1)==1,'Buy_Trade'] = " "
#避免最后三天内出现交易
df.Buy_Trade.iloc[-3:] = " "
df.loc[df.Buy_Trade==1,'Buy_Price'] = df.close
df.Buy_Price = df.Buy_Price.ffill()
df['Buy_Daily_Return']= (df.close - df.Buy_Price)/df.Buy_Price
df.loc[df.Buy_Trade.shift(3)==1,'Sell_Trade'] = -1
df.loc[df.Sell_Trade==-1,'Buy_Total_Return'] = df.Buy_Daily_Return
df.loc[(df.Sell_Trade==-1)&(df.Buy_Daily_Return==0),'Buy_Total_Return'] = \
(df.Buy_Price - df.Buy_Price.shift(1))/df.Buy_Price.shift(1)
df.loc[(df.Sell_Trade==-1)&(df.Buy_Trade.shift(1)==1),'Buy_Total_Return'] = \
(df.close-df.Buy_Price.shift(2))/df.Buy_Price.shift(2)
#返回策略的日收益率
return df.Buy_Total_Return.fillna(0)
df=get_from_tushare('300002')
df.close.plot(figsize=(12,6))
plt.title('神州泰岳股价走势\n2010-2021',size=15)
pf.create_simple_tear_sheet((df.close.pct_change()).fillna(0).tz_localize('UTC'))
pf.create_simple_tear_sheet(adx_strategy(df).tz_localize('UTC'))

View File

@@ -0,0 +1,84 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2023-03-18 18:09:25
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-03-18 18:09:26
FilePath: /Qbot/qbot/strategies/get_stack_data.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
GitHub: https://github.com/Charmve
Description:
Copyright (c) 2023 by Charmve, All Rights Reserved.
Licensed under the MIT License.
'''
#使用tushare旧版获取数据
import tushare as ts
def get_from_tushare(code,adj='hfq',start='2010-01-01',end='2021-11-05'):
df=ts.get_k_data(code,autype=adj,start=start,end=end)
df.index=pd.to_datetime(df.date)
#原数据已默认按日期进行了排序
return df
#使用tushare pro获取数据
import tushare as ts
token='输入你自己的token'
pro=ts.pro_api(token)
ts.set_token(token)
def get_from_tushare_pro(code,adj='hfq',start='2010-01-01',end='2021-11-05'):
#code:输入数字字符串300002
#start和end输入'年-月-日'需转为'年月日'格式
if code.startswith('6'):
code=code+'.SH'
else:
code=code+'.SZ'
start=''.join(start.split('-'))
end=''.join(end.split('-'))
df=ts.pro_bar(ts_code=code,adj=adj,start_date=start,end_date=end)
#原数据是倒序的,所以将时间设置为索引,根据索引重新排序
df.index=pd.to_datetime(df.trade_date)
df=df.sort_index()
return df
#使用akshare获取数据,其数据源来自新浪与tushare旧版本相似
import akshare as ak
def get_from_akshare(code,adj='hfq',start='2010-01-01',end='2021-11-05'):
if code.startswith('6'):
code='sh'+code
else:
code='sz'+code
start=''.join(start.split('-'))
end=''.join(end.split('-'))
df = ak.stock_zh_a_daily(symbol=code, start_date=start, end_date=end, adjust=adj)
return df
#使用baostock获取数据
import baostock as bs
def get_from_baostock(code,adj='hfq',start='2010-01-01',end='2021-11-05'):
if code.startswith('6'):
code='sh.'+code
else:
code='sz.'+code
#转换复权为数字
if adj=='hfq':
adj='1'
elif adj=='qfq':
adj='2'
else:
adj='3'
#必须登陆和登出系统
bs.login() #登陆系统
rs = bs.query_history_k_data_plus(code,
fields="date,code,open,high,low,close,volume",
start_date=start, end_date=end,
frequency="d", adjustflag=adj)
#adjustflag复权类型默认不复权31后复权2前复权
data_list = []
while (rs.error_code == '0') & rs.next():
data_list.append(rs.get_row_data())
#将数据转为dataframe格式
df = pd.DataFrame(data_list, columns=rs.fields)
df.index=pd.to_datetime(df.date)
bs.logout() #登出系统
return df

View File

@@ -0,0 +1,106 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2023-03-12 18:44:54
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-05-17 09:35:01
FilePath: /Qbot/qbot/strategies/klines_bt.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
GitHub: https://github.com/Charmve
Description:
Copyright (c) 2023 by Charmve, All Rights Reserved.
Licensed under the MIT License.
'''
from datetime import datetime
import backtrader
from loguru import logger
import matplotlib.pyplot as plt
import pandas as pd
import efinance
def get_k_data(stock_code, begin: datetime, end: datetime) -> pd.DataFrame:
"""
根据efinance工具包获取股票数据
:param stock_code:股票代码
:param begin: 开始日期
:param end: 结束日期
:return:
"""
# stock_code = '600519' # 股票代码,茅台
k_dataframe: pd.DataFrame = efinance.stock.get_quote_history(
stock_code, beg=begin.strftime("%Y%m%d"), end=end.strftime("%Y%m%d"))
k_dataframe = k_dataframe.iloc[:, :9]
k_dataframe.columns = ['name', 'code', 'date', 'open', 'close', 'high', 'low', 'volume', 'turnover']
k_dataframe.index = pd.to_datetime(k_dataframe.date)
k_dataframe.drop(['name', 'code', 'date'], axis=1, inplace=True)
return k_dataframe
class KlinesStrategy(backtrader.Strategy): # 策略
def __init__(self):
# 初始化交易指令、买卖价格和手续费
self.close_price = self.datas[0].close # 这里加一个数据引用,方便后续操作
self.sma = backtrader.indicators.SimpleMovingAverage(self.datas[0], period=5) # 借用这个策略计算5日的均线
def notify_order(self, order): # 固定写法,查看订单情况
# 查看订单情况
if order.status in [order.Submitted, order.Accepted]: # 接受订单交易,正常情况
return
if order.status in [order.Completed]:
if order.isbuy():
logger.debug('已买入, 购入金额 %.2f' % order.executed.price)
elif order.issell():
logger.debug('已卖出, 卖出金额 %.2f' % order.executed.price)
elif order.status in [order.Canceled, order.Margin, order.Rejected]:
logger.debug('订单取消、保证金不足、金额不足拒绝交易')
def next(self): # 固定的函数框架执行过程中会不断循环next()过一个K线执行一次next()
# 此时调用 self.datas[0]即可查看当天的数据
# 执行买入条件判断当天收盘价格突破5日均线
if self.close_price[0] > self.sma[0]:
# 执行买入
logger.debug("buy 500 in {}, 预期购入金额 {}, 剩余可用资金 {}", self.datetime.date(), self.data.close[0],
self.broker.getcash())
self.buy(size=500, price=self.data.close[0])
# 执行卖出条件已有持仓且收盘价格跌破5日均线
if self.position:
if self.close_price[0] < self.sma[0]:
# 执行卖出
logger.debug("sell in {}, 预期卖出金额 {}, 剩余可用资金 {}", self.datetime.date(), self.data.close[0],
self.broker.getcash())
self.sell(size=500, price=self.data.close[0])
if __name__ == '__main__':
# 获取数据
start_time = datetime(2015, 1, 1)
end_time = datetime(2021, 1, 1)
dataframe = get_k_data('600519', begin=start_time, end=end_time)
# =============== 为系统注入数据 =================
# 加载数据
data = backtrader.feeds.PandasData(dataname=dataframe, fromdate=start_time, todate=end_time)
# 初始化cerebro回测系统
cerebral_system = backtrader.Cerebro() # Cerebro引擎在后台创建了broker(经纪人)实例系统默认每个broker的初始资金量为10000
# 将数据传入回测系统
cerebral_system.adddata(data) # 导入数据,在策略中使用 self.datas 来获取数据源
# 将交易策略加载到回测系统中
cerebral_system.addstrategy(KlinesStrategy)
# =============== 系统设置 ==================
# 设置启动资金为 100000
start_cash = 1000000
cerebral_system.broker.setcash(start_cash)
# 设置手续费 万2.5
cerebral_system.broker.setcommission(commission=0.00025)
logger.debug('初始资金: {} 回测期间from {} to {}'.format(start_cash, start_time, end_time))
# 运行回测系统
cerebral_system.run()
# 获取回测结束后的总资金
portvalue = cerebral_system.broker.getvalue()
pnl = portvalue - start_cash
# 打印结果
logger.debug('净收益: {}', pnl)
logger.debug("总资金: {}", portvalue)
cerebral_system.plot(style='candlestick')
plt.show()

96
qbot/strategies/util.py Normal file
View File

@@ -0,0 +1,96 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2023-03-14 01:49:02
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-03-14 01:50:11
FilePath: /Qbot/qbot/strategies/util.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
GitHub: https://github.com/Charmve
Description:
Copyright (c) 2023 by Charmve, All Rights Reserved.
Licensed under the MIT License.
'''
import baostock as bs
import pandas as pd
import talib as ta
import matplotlib.pyplot as plt
def computeMACD(code, startdate, enddate):
login_result = bs.login(user_id='anonymous', password='123456')
print(login_result)
# 获取股票日 K 线数据
rs = bs.query_history_k_data(code,
"date,code,close,tradeStatus",
start_date=startdate,
end_date=enddate,
frequency="d", adjustflag="3")
# 打印结果集
result_list = []
while (rs.error_code == '0') & rs.next():
# 获取一条记录,将记录合并在一起
result_list.append(rs.get_row_data())
df = pd.DataFrame(result_list, columns=rs.fields)
# 剔除停盘数据
df2 = df[df['tradeStatus'] == '1']
# 获取 dif,dea,hist它们的数据类似是 tuple且跟 df2 的 date 日期一一对应
# 记住了 dif,dea,hist 前 33 个为 Nan所以推荐用于计算的数据量一般为你所求日期之间数据量的 3 倍
# 这里计算的 hist 就是 dif-dea,而很多证券商计算的 MACD=hist*2=(difdea)*2
dif, dea, hist = ta.MACD(df2['close'].astype(float).values, fastperiod=12, slowperiod=26, signalperiod=9)
df3 = pd.DataFrame({'dif': dif[33:], 'dea': dea[33:], 'hist':hist[33:]},index=df2['date'][33:], columns=['dif', 'dea','hist'])
df3.plot(title='MACD')
plt.show()
# 寻找 MACD 金叉和死叉
datenumber = int(df3.shape[0])
for i in range(datenumber - 1):
if ((df3.iloc[i, 0] <= df3.iloc[i, 1]) & (df3.iloc[i + 1, 0] >= df3.iloc[i + 1, 1])):
print("MACD 金叉的日期:" + df3.index[i + 1])
if ((df3.iloc[i, 0] >= df3.iloc[i, 1]) & (df3.iloc[i + 1, 0] <=df3.iloc[i + 1, 1])):
print("MACD 死叉的日期:" + df3.index[i + 1])
bs.logout()
return (dif, dea, hist)
def calculateEMA(period, closeArray, emaArray=[]):
length = len(closeArray)
nanCounter = np.count_nonzero(np.isnan(closeArray))
if not emaArray:
emaArray.extend(np.tile([np.nan], (nanCounter + period - 1)))
firstema = np.mean(closeArray[nanCounter:nanCounter + period - 1])
emaArray.append(firstema)
for i in range(nanCounter + period, length):
ema = (2 * closeArray[i] + (period - 1) * emaArray[-1]) / (period + 1)
emaArray.append(ema)
return np.array(emaArray)
def calculateMACD(closeArray, shortPeriod=12, longPeriod=26, signalPeriod=9):
ema12 = calculateEMA(shortPeriod, closeArray, [])
ema26 = calculateEMA(longPeriod, closeArray, [])
diff = ema12 - ema26
dea = calculateEMA(signalPeriod, diff, [])
macd = (diff - dea)*2
fast_values = diff # 快线
slow_values = dea # 慢线
diff_values = macd # macd
# return fast_values, slow_values, diff_values # 返回所有的快慢线和macd值
return fast_values[-1], slow_values[-1], diff_values[-1] # 返回最新的快慢线和macd值
# return round(fast_values[-1],5), round(slow_values[-1],5), round(diff_values[-1],5)
def getMACD():
data = RequestUtil.sendRequest_GET(UrlConstant.Get_K_Line)
closeArray = [float(i[4]) for i in data]
closeArray.reverse()
return calculateMACD(closeArray)
if __name__ == '__main__':
code = 'sh.600000'
startdate = '2022-03-01'
enddate = '2023-03-18'
(dif, dea, hist) = computeMACD(code, startdate, enddate)
print((dif, dea, hist))

View File

@@ -0,0 +1,246 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2023-03-12 20:11:54
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-03-12 20:12:24
FilePath: /Qbot/utils/common/AShareDailyData.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
GitHub: https://github.com/Charmve
Description:
Copyright (c) 2023 by Charmve, All Rights Reserved.
Licensed under the MIT License.
'''
import multiprocessing
import os
import sys
import traceback
from datetime import datetime, timedelta, time
from time import sleep
from typing import List
from tqdm import tqdm
from vnpy.trader.constant import Interval, Exchange
from vnpy.trader.database import database_manager, BarOverview
from vnpy.trader.object import HistoryRequest, BarData
from utils import log
sys.path.append(os.getcwd())
from TuShare import tushare_client, to_split_ts_codes, TS_DATE_FORMATE
class AShareDailyDataManager:
def __init__(self):
""""""
self.tushare_client = tushare_client
self.symbols = None
self.trade_cal = None
self.bar_overviews: List[BarOverview] = None
self.init()
def init(self):
""""""
self.tushare_client.init()
self.symbols = self.tushare_client.symbols
self.trade_cal = self.tushare_client.trade_cal
self.bar_overviews = database_manager.get_bar_overview()
def download_all(self):
"""
使用tushare下载A股股票全市场日线数据
:return:
"""
log.info("开始下载A股股票全市场日线数据")
if self.symbols is not None:
with tqdm(total=len(self.symbols)) as pbar:
for tscode, list_date in zip(self.symbols['ts_code'], self.symbols['list_date']):
symbol, exchange = to_split_ts_codes(tscode)
pbar.set_description_str("下载A股日线数据股票代码:" + tscode)
start_date = datetime.strptime(list_date, TS_DATE_FORMATE)
req = HistoryRequest(symbol=symbol,
exchange=exchange,
start=start_date,
end=datetime.now(),
interval=Interval.DAILY)
bardata = self.tushare_client.query_history(req=req)
if bardata:
try:
database_manager.save_bar_data(bardata)
except Exception as ex:
log.error(tscode + "数据存入数据库异常")
log.error(ex)
traceback.print_exc()
pbar.update(1)
log.info(pbar.desc)
log.info("A股股票全市场日线数据下载完毕")
def get_newest_bar_data(self, symbol: str, exchange: Exchange, interval: Interval) -> BarData or None:
""""""
for overview in self.bar_overviews:
if exchange == overview.exchange and interval == overview.interval and symbol == overview.symbol:
bars = database_manager.load_bar_data(symbol=symbol, exchange=exchange, interval=interval,
start=overview.end, end=overview.end)
return bars[0] if bars is not None else None
return None
def update_newest(self):
"""
使用tushare更新本地数据库中的最新数据默认本地数据库中原最新的数据之前的数据都是完备的
:return:
"""
log.info("开始更新最新的A股股票全市场日线数据")
if self.symbols is not None:
with tqdm(total=len(self.symbols)) as pbar:
for tscode, list_date in zip(self.symbols['ts_code'], self.symbols['list_date']):
symbol, exchange = to_split_ts_codes(tscode)
newest_local_bar = self.get_newest_bar_data(symbol=symbol,
exchange=exchange,
interval=Interval.DAILY)
if newest_local_bar is not None:
pbar.set_description_str("正在处理股票代码:" + tscode + "本地最新数据:" +
newest_local_bar.datetime.strftime(TS_DATE_FORMATE))
start_date = newest_local_bar.datetime + timedelta(days=1)
else:
pbar.set_description_str("正在处理股票代码:" + tscode + "无本地数据")
start_date = datetime.strptime(list_date, TS_DATE_FORMATE)
req = HistoryRequest(symbol=symbol,
exchange=exchange,
start=start_date,
end=datetime.now(),
interval=Interval.DAILY)
bardata = self.tushare_client.query_history(req=req)
if bardata:
try:
database_manager.save_bar_data(bardata)
except Exception as ex:
log.error(tscode + "数据存入数据库异常")
log.error(ex)
traceback.print_exc()
pbar.update(1)
log.info(pbar.desc)
log.info("A股股票全市场日线数据更新完毕")
def check_update_all(self):
"""
这个方法太慢了,不建议调用。
这个方法用于本地数据库已经建立,但可能有部分数据缺失时使用
使用tushare检查更新所有的A股股票全市场日线数据
检查哪一个交易日的数据是缺失的,补全它
检查上市后是否每个交易日都有数据若存在某一交易日无数据尝试从tushare查询该日数据若仍无则说明当天停盘
:return:
"""
log.info("开始检查更新所有的A股股票全市场日线数据")
if self.symbols is not None:
with tqdm(total=len(self.symbols)) as pbar:
for tscode, list_date in zip(self.symbols['ts_code'], self.symbols['list_date']):
pbar.set_description_str("正在检查A股日线数据股票代码:" + tscode)
symbol, exchange = to_split_ts_codes(tscode)
local_bar = database_manager.load_bar_data(symbol=symbol,
exchange=exchange,
interval=Interval.DAILY,
start=datetime.strptime(list_date, TS_DATE_FORMATE),
end=datetime.now())
local_bar_dates = [bar.datetime.strftime(TS_DATE_FORMATE) for bar in local_bar]
index = (self.trade_cal[exchange.value][(self.trade_cal[exchange.value].cal_date == list_date)])
trade_cal = self.trade_cal[exchange.value].iloc[index.index[0]:]
for trade_date in trade_cal['cal_date']:
if trade_date not in local_bar_dates:
req = HistoryRequest(symbol=symbol,
exchange=exchange,
start=datetime.strptime(trade_date, TS_DATE_FORMATE),
end=datetime.strptime(trade_date, TS_DATE_FORMATE),
interval=Interval.DAILY)
bardata = self.tushare_client.query_history(req=req)
if bardata:
log.info(tscode + "本地数据库缺失:" + trade_date)
try:
database_manager.save_bar_data(bardata)
except Exception as ex:
log.error(tscode + "数据存入数据库异常")
log.error(ex)
traceback.print_exc()
pbar.update(1)
log.info(pbar.desc)
log.info("A股股票全市场日线数据检查更新完毕")
a_share_daily_data_manager = AShareDailyDataManager()
def auto_update(start_time: time = time(18, 0)):
"""
每日盘后自动更新最新日线数据到本地数据库
"""
log.info("启动A股股票全市场日线数据定时更新")
run_parent(start_time=start_time)
def run_parent(start_time: time = time(18, 0)):
"""
运行父进程,定时启动子进程下载任务
:return:
"""
log.info("启动A股股票全市场日线数据定时更新父进程")
# 每天晚上1830从tushare更新当时K线数据
UPDATE_TIME = start_time
child_process = None
while True:
current_time = datetime.now().time()
if current_time.hour == UPDATE_TIME.hour and current_time.minute == UPDATE_TIME.minute and child_process is None:
log.info("启动日线数据更新子进程")
child_process = multiprocessing.Process(target=run_child)
child_process.start()
log.info("日线数据更新子进程启动成功")
if (not (current_time.hour == UPDATE_TIME.hour and current_time.minute == UPDATE_TIME.minute)) \
and child_process is not None:
child_process.join()
child_process = None
log.info("数据更新子进程关闭成功")
log.info("进入A股股票全市场日线数据定时更新父进程")
sleep(10)
def run_child():
"""
子线程下载数据
:return:
"""
log.info("启动A股股票全市场日线数据定时更新子进程")
try:
a_share_daily_data_manager.update_newest()
except Exception:
log.info("子进程异常")
traceback.print_exc()
if __name__ == '__main__':
log.info("自动更新A股股票全市场日线数据")
# a_share_daily_data_manager.download_all()
# a_share_daily_data_manager.update_newest()
# a_share_daily_data_manager.check_update_all()
auto_update(start_time=time(21, 47))

271
utils/common/BaseService.py Normal file
View File

@@ -0,0 +1,271 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2023-03-10 00:45:44
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-03-12 20:10:22
FilePath: /Qbot/utils/common/BaseService.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
GitHub: https://github.com/Charmve
Description:
Copyright (c) 2023 by Charmve, All Rights Reserved.
Licensed under the MIT License.
'''
# -*-coding=utf-8-*-
import datetime
import json
import os
import re
import time
import parsel
import requests
from configure.util import send_message_via_wechat
from loguru import logger
class BaseService(object):
def __init__(self, logfile="default.log"):
self.logger = logger
self.logger.add(logfile)
self.init_const_data()
self.params = None
self.cookies = None
def init_const_data(self):
"""
常见的数据初始化
"""
self.today = datetime.datetime.now().strftime("%Y-%m-%d")
def check_path(self, path):
if not os.path.exists(path):
try:
os.makedirs(path)
except Exception as e:
self.logger.error(e)
def get_url_filename(self, url):
return url.split("/")[-1]
def save_iamge(self, content, path):
with open(path, "wb") as fp:
fp.write(content)
def get(self, url, _json=False, binary=False, retry=5):
start = 0
while start < retry:
try:
r = requests.get(
url=url,
params=self.params,
headers=self.headers,
cookies=self.cookies,
)
except Exception as e:
self.logger.error("base class error ".format(e))
start += 1
continue
else:
if _json:
result = r.json()
elif binary:
result = r.content
else:
r.encoding = "utf8"
result = r.text
return result
return None
def post(self, url, post_data, _json=False, binary=False, retry=5):
start = 0
while start < retry:
try:
r = requests.post(url=url, headers=self.headers, data=post_data)
except Exception as e:
print(e)
start += 1
continue
else:
if _json:
result = r.json()
elif binary:
result = r.content
else:
result = r.text
return result
return None
@property
def headers(self):
raise NotImplementedError
def parse(self, content):
"""
页面解析
"""
response = parsel.Selector(text=content)
return response
def process(self, data, history=False):
"""
数据存储
"""
pass
def time_str(self, x):
return x.strftime("%Y-%m-%d")
def trading_time(self):
"""
判定时候交易时间 0 为交易时间, 1和-1为非交易时间
:return:
"""
TRADING = 0
MORNING_STOP = -1
AFTERNOON_STOP = 1
NOON_STOP = -1
current = datetime.datetime.now()
year, month, day = current.year, current.month, current.day
start = datetime.datetime(year, month, day, 9, 23, 0)
noon_start = datetime.datetime(year, month, day, 12, 58, 0)
morning_end = datetime.datetime(year, month, day, 11, 31, 0)
end = datetime.datetime(year, month, day, 15, 2, 5)
if current > start and current < morning_end:
return TRADING
elif current > noon_start and current < end:
return TRADING
elif current > end:
return AFTERNOON_STOP
elif current < start:
return MORNING_STOP
else:
return NOON_STOP
def notify(self, title):
send_message_via_wechat(title)
def weekday(self, day=datetime.datetime.now().strftime("%Y-%m-%d")):
"""判断星期几"""
if re.search(f"\d{4}-\d{2}-\d{2}", day):
fmt = "%Y-%m-%d"
elif re.search("\d{8}", day):
fmt = "%Y%m%d"
else:
raise ValueError("请输入正确的日期格式")
current_date = datetime.datetime.strptime(day, fmt)
year_2000th = datetime.datetime(year=2000, month=1, day=2)
day_diff = current_date - year_2000th
return day_diff.days % 7
def is_weekday(self, day=datetime.datetime.now().strftime("%Y-%m-%d")):
if self.weekday(day) in [0, 6]:
return False
else:
return True
def execute(self, cmd, data, conn, logger=None):
cursor = conn.cursor()
if not isinstance(data, tuple):
data = (data,)
try:
cursor.execute(cmd, data)
except Exception as e:
conn.rollback()
logger.error("执行数据库错误 {},{}".format(e, cmd))
ret = None
else:
ret = cursor.fetchall()
conn.commit()
return ret
def jsonp2json(self, str_):
return json.loads(str_[str_.find("{") : str_.rfind("}") + 1])
def set_proxy_param(self, proxy):
self.proxy_ip = proxy
def get_proxy(self, retry=10):
if not hasattr(self, "proxy_ip"):
raise AttributeError("Please set proxy ip before use it")
proxyurl = f"http://{self.proxy_ip}/dynamicIp/common/getDynamicIp.do"
count = 0
for i in range(retry):
try:
r = requests.get(proxyurl, timeout=10)
except Exception as e:
print(e)
count += 1
print("代理获取失败,重试" + str(count))
time.sleep(1)
else:
js = r.json()
proxyServer = "://{0}:{1}".format(js.get("ip"), js.get("port"))
proxies_random = {
"http": "http" + proxyServer,
"https": "https" + proxyServer,
}
return proxies_random
return None
def convert_timestamp(self, t):
return datetime.datetime.fromtimestamp(int(t / 1000)).strftime("%Y-%m-%d")
class HistorySet(object):
def __init__(self, expire=1800):
self.data = {}
self.expire = expire
def add(self, value):
now = datetime.datetime.now()
expire = now + datetime.timedelta(seconds=self.expire)
try:
hash(value)
except: # noqa E722
raise ValueError("value not hashble")
else:
self.data.update({value: expire})
def is_expire(self, value):
# 没有过期 返回 False
if value not in self.data or self.data[value] < datetime.datetime.now():
return True
else:
return False
if __name__ == "__main__":
base = BaseService()
base.is_weekday()
# base.set_proxy_param()
print(base.get_proxy())

235
utils/common/TuShare.py Normal file
View File

@@ -0,0 +1,235 @@
import requests
import tushare as ts
from tushare.pro import client
from pytz import timezone
from typing import List, Optional, Dict
import pandas as pd
from datetime import datetime, timedelta
import time
import traceback
from vnpy.trader.object import HistoryRequest, BarData
from vnpy.trader.constant import Exchange, Interval
from utils import log
CHINA_TZ = timezone("Asia/Shanghai")
tushare_token: str = ""
MAX_QUERY_SIZE: int = 5000
TS_DATE_FORMATE: str = '%Y%m%d'
MAX_QUERY_TIMES: int = 500
EXCHANGE_TS2VT: Dict[str, Exchange] = {
'SH': Exchange.SSE,
'SZ': Exchange.SZSE
}
EXCHANGE_VT2TS: Dict[Exchange, str] = {v: k for k, v in EXCHANGE_TS2VT.items()}
def to_ts_symbol(symbol: str, exchange: Exchange):
"""
转换合约代码为tushare查询代码
"""
if exchange == Exchange.SSE:
tcode = f'{symbol}' + '.' + f'{EXCHANGE_VT2TS[exchange]}'
elif exchange == Exchange.SZSE:
tcode = f'{symbol}' + '.' + f'{EXCHANGE_VT2TS[exchange]}'
else:
print("目前只研究深圳证券交易所和上海证券交易所A股股票")
raise TypeError("目前只研究深圳证券交易所和上海证券交易所A股股票")
return tcode
def to_split_ts_codes(tscode: str):
symbol, exchange_ts = tscode.split('.')
exchange = EXCHANGE_TS2VT[exchange_ts]
return symbol, exchange
class TuShareClient:
"""
从TuShare中查询历史数据的Client
tushare日线数据说明交易日每天15点~16点之间更新数据daily接口是未复权行情停牌期间不提供数据。
tushare调取说明基础积分每分钟内最多调取500次每次5000条数据
"""
def __init__(self):
""""""
self.pro: client.DataApi = None
self.inited: bool = False
# 获得所有股票代码
self.symbols: pd.DataFrame = None
# 获得交易日历
self.trade_cal: Dict[str, pd.DataFrame] = None
def init(self, token: str = "") -> bool:
""""""
if self.inited:
return True
if token:
ts.set_token(tushare_token)
else:
ts.set_token(tushare_token)
try:
self.pro = ts.pro_api()
self.stock_list()
self.trade_day_list()
except (BaseException, "tushare连接失败"):
return False
self.inited = True
return True
def query_history(self, req: HistoryRequest) -> Optional[List[BarData]]:
"""
从tushare里查询历史数据
:param req:查询请求
:return: Optional[List[BarData]]
"""
if self.symbols is None:
return None
symbol = req.symbol
exchange = req.exchange
interval = req.interval
start = req.start.strftime(TS_DATE_FORMATE)
end = req.end.strftime(TS_DATE_FORMATE)
if interval is not Interval.DAILY:
return None
if exchange not in [Exchange.SSE, Exchange.SZSE]:
return None
tscode = to_ts_symbol(symbol, exchange)
# 修改查询数据逻辑在每次5000条数据的限制下很可能一次无法读取完
cnt = 0
df: pd.DataFrame = None
while datetime.strptime(start, TS_DATE_FORMATE) <= datetime.strptime(end, TS_DATE_FORMATE):
# 保证每次查询最多5000天数据
start_date = datetime.strptime(start, TS_DATE_FORMATE)
simulate_end_date = min(datetime.strptime(end, TS_DATE_FORMATE),
start_date + timedelta(days=MAX_QUERY_SIZE))
simulate_end = simulate_end_date.strftime(TS_DATE_FORMATE)
# 保证每次调用时间在60/500=0.12秒内以保证每分钟调用次数少于500次
# begin_time = time.time()
tushare_df = None
while True:
try:
tushare_df = self.pro.query('daily', ts_code=tscode, start_date=start, end_date=simulate_end)
except (requests.exceptions.SSLError, requests.exceptions.ConnectionError) as e:
log.error(e)
# traceback.print_exc()
# ('Connection aborted.', ConnectionResetError(10054, '远程主机强迫关闭了一个现有的连接。', None, 10054, None))
if '10054' in str(e):
sleep_time = 60.0
log.info("请求过于频繁sleep" + str(sleep_time) + "s")
time.sleep(sleep_time)
log.info("继续发送请求:" + tscode)
continue # 继续发请求
else:
raise Exception(e) # 其他异常,抛出来
break
if tushare_df is not None:
if df is None:
df = tushare_df
else:
df = pd.concat([df, tushare_df], ignore_index=True)
# end_time = time.time()
# delta = round(end_time - begin_time, 3)
# if delta < 60 / MAX_QUERY_TIMES:
sleep_time = 0.50
log.info("sleep" + str(sleep_time) + "s")
time.sleep(sleep_time)
cnt += 1
start = (simulate_end_date + timedelta(days=1)).strftime(TS_DATE_FORMATE)
data: List[BarData] = []
if df is not None:
for ix, row in df.iterrows():
date = datetime.strptime(row.trade_date, '%Y%m%d')
date = CHINA_TZ.localize(date)
if pd.isnull(row['open']):
log.info(symbol + '.' + EXCHANGE_VT2TS[exchange] + row['trade_date'] + "open_price为None")
elif pd.isnull(row['high']):
log.info(symbol + '.' + EXCHANGE_VT2TS[exchange] + row['trade_date'] + "high_price为None")
elif pd.isnull(row['low']):
log.info(symbol + '.' + EXCHANGE_VT2TS[exchange] + row['trade_date'] + "low_price为None")
elif pd.isnull(row['close']):
log.info(symbol + '.' + EXCHANGE_VT2TS[exchange] + row['trade_date'] + "close_price为None")
elif pd.isnull(row['amount']):
log.info(symbol + '.' + EXCHANGE_VT2TS[exchange] + row['trade_date'] + "volume为None")
row.fillna(0)
bar = BarData(
symbol=symbol,
exchange=exchange,
interval=interval,
datetime=date,
open_price=row['open'],
high_price=row['high'],
low_price=row['low'],
close_price=row['close'],
volume=row['amount'],
gateway_name='tushare'
)
data.append(bar)
return data
def stock_list(self):
"""
调用tushare stock_basic 接口
获得上海证券交易所和深圳证券交易所所有股票代码
获取基础信息数据,包括股票代码、名称、上市日期、退市日期等
:return:
"""
if self.symbols is None:
symbols_sse = self.pro.query('stock_basic', exchange=Exchange.SSE.value, fields='ts_code,symbol,name,'
'fullname,enname,market,'
'list_status,list_date,'
'delist_date,is_hs')
symbols_szse = self.pro.query('stock_basic', exchange=Exchange.SZSE.value, fields='ts_code,symbol,name,'
'fullname,enname,market,'
'list_status,list_date,'
'delist_date,is_hs')
self.symbols = pd.concat([symbols_sse, symbols_szse], axis=0, ignore_index=True)
def trade_day_list(self):
"""
查询交易日历
:return:
"""
if self.trade_cal is None:
self.trade_cal = dict()
self.trade_cal[Exchange.SSE.value] = self.pro.query('trade_cal', exchange=Exchange.SSE.value, is_open='1')
self.trade_cal[Exchange.SZSE.value] = self.pro.query('trade_cal', exchange=Exchange.SZSE.value, is_open='1')
tushare_client = TuShareClient()
if __name__ == "__main__":
print("测试TuShare数据接口")
# tushare_client = TuShareClient()
tushare_client.init()
# print(tushare_client.symbols)
# print(tushare_client.trade_cal)
req = HistoryRequest(symbol='600600', exchange=Exchange.SSE,
start=datetime(year=1999, month=11, day=10), end=datetime.now(), interval=Interval.DAILY)
ts_data = tushare_client.query_history(req)
print(len(ts_data))

0
utils/common/__init__.py Normal file
View File

36
utils/common/utils.py Normal file
View File

@@ -0,0 +1,36 @@
import logging
class logger:
def __init__(self, path, clevel=logging.INFO, Flevel=logging.INFO):
self.logger = logging.getLogger(path)
self.logger.setLevel(logging.DEBUG)
fmt = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S')
# 设置CMD日志
sh = logging.StreamHandler()
sh.setFormatter(fmt)
sh.setLevel(clevel)
# 设置文件日志
fh = logging.FileHandler(path, encoding='utf-8')
fh.setFormatter(fmt)
fh.setLevel(Flevel)
self.logger.addHandler(sh)
self.logger.addHandler(fh)
def debug(self, message):
self.logger.debug(message)
def info(self, message):
self.logger.info(message)
def war(self, message):
self.logger.warn(message)
def error(self, message):
self.logger.error(message)
def cri(self, message):
self.logger.critical(message)
log = logger("log.txt")

237
utils/configure/ util.py Normal file
View File

@@ -0,0 +1,237 @@
# -*- coding: utf-8 -*-
import datetime
import random
import smtplib
import time
import warnings
from email.mime.text import MIMEText
from email.header import Header
from email.utils import parseaddr, formataddr
import json
import pandas as pd
import re
import requests
from .settings import config, get_config_data,DBSelector
def notify(title='', desp=''):
warnings.warn("该接口需要收费了,请使用企业微信")
url = f"https://sc.ftqq.com/{config['WECHAT_ID']}.send?text={title}&desp={desp}"
try:
res = requests.get(url, timeout=5)
except Exception as e:
print(e)
return False
else:
try:
js = res.json()
result = True if js['data']['errno'] == 0 else False
if result:
print('发送成功')
return True
else:
print('发送失败')
return False
except Exception as e:
print(e)
print(res.text)
def read_web_headers_cookies(website, headers=False, cookies=False):
config = get_config_data('web_headers.json')
return_headers = None
return_cookies = None
if headers:
return_headers = config[website]['headers']
if cookies:
return_headers = config[website]['cookies']
return return_headers, return_cookies
def send_message_via_wechat(_message): # 默认发送给自己
_config = config['enterprise_wechat']
userid = _config['userid']
agentid = _config['agentid']
corpid = _config['corpid']
corpsecret = _config['corpsecret']
response = requests.get(f"https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid={corpid}&corpsecret={corpsecret}")
data = json.loads(response.text)
access_token = data['access_token']
json_dict = {
"touser": userid,
"msgtype": "text",
"agentid": agentid,
"text": {
"content": _message
},
"safe": 0,
"enable_id_trans": 0,
"enable_duplicate_check": 0,
"duplicate_check_interval": 1800
}
json_str = json.dumps(json_dict)
response_send = requests.post(f"https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token={access_token}",
data=json_str)
return json.loads(response_send.text)['errmsg'] == 'ok'
def rsa_encrypt():
import rsa
(pubkey, privkey) = rsa.newkeys(1024)
print('pubkey >>>> {}'.format(pubkey))
print('privkey >>>> {}'.format(privkey))
with open('pub.pem', 'w') as f:
f.write(pubkey.save_pkcs1().decode())
with open('datasender.pem', 'w') as f:
f.write(privkey.save_pkcs1().decode())
message = ''
print("message encode {}".format(message.encode()))
crypto = rsa.encrypt(message.encode(), pubkey) # 加密数据为bytes
print('密文:\n{}'.format(crypto))
with open('encrypt.bin', 'wb') as f:
f.write(crypto)
# 解密
e_message = rsa.decrypt(crypto, privkey) # 解密数据也是为bytes
print("解密后\n{}".format(e_message.decode()))
def rsa_decrypt():
import rsa
with open('encrypt.bin', 'rb') as f:
content = f.read()
file = 'priva.pem'
with open(file, 'r') as f:
privkey = rsa.PrivateKey.load_pkcs1(f.read().encode())
e_message = rsa.decrypt(content, privkey) # 解密数据也是为bytes
print("解密后\n{}".format(e_message.decode()))
def market_status():
'''
收盘
'''
now = datetime.datetime.now()
end = datetime.datetime(now.year, now.month, now.day, 15, 2, 5)
return True if now < end else False
def _format_addr(s):
name, addr = parseaddr(s)
return formataddr((Header(name, 'utf-8').encode(), addr))
def send_from_aliyun(title, content, TO_MAIL_=config['mail']['qq']['user'], types='plain'):
username = config['aliyun']['EMAIL_USER_ALI'] # 阿里云
password = config['aliyun']['LOGIN_EMAIL_ALYI_PASSWORD'] # 阿里云
stmp = smtplib.SMTP()
msg = MIMEText(content, types, 'utf-8')
subject = title
msg['Subject'] = Header(subject, 'utf-8')
msg['From'] = _format_addr('{} <{}>'.format('数据推送', username))
msg['To'] = TO_MAIL_
try:
stmp.connect('smtp.qiye.aliyun.com', 25)
stmp.login(username, password)
stmp.sendmail(username, TO_MAIL_, msg.as_string())
except Exception as e:
time.sleep(10 + random.randint(1, 5))
stmp = smtplib.SMTP()
stmp.connect('smtp.qiye.aliyun.com', 25)
stmp.login(username, password)
stmp.sendmail(username, TO_MAIL_, msg.as_string())
else:
print('发送完毕')
def send_sms(content):
'''
一个海外的短信接口
'''
from twilio.rest import Client
client = Client(config.twilio_account_sid, config.twilio_auth_token)
try:
message = client.messages.create(
body=content,
from_=config.FROM_MOBILE,
to=config.TO_MOBILE
)
except Exception as e:
print(e)
def jsonp2json(str_):
return json.loads(str_[str_.find('{'):str_.rfind('}') + 1])
def js2json(str_):
import demjson
return demjson.decode(str_[str_.find('{'):str_.rfind('}') + 1])
def bond_filter(code):
m = re.search('^(11|12)', code)
return True if m else False
def get_holding_list(filename=None):
'''
获取持仓列表
'''
df = pd.read_csv(filename, encoding='gbk')
df['证券代码'] = df['证券代码'].astype(str)
df['kzz'] = df['证券代码'].map(bond_filter)
df = df[df['kzz'] == True]
return df['证券代码'].tolist()
def mongo_convert_df(doc,condition=None,project=None):
import pandas as pd
result =[]
for item in doc.find(condition,project):
result.append(item)
return pd.DataFrame(result)
def get_jsl_code(table):
# from settings import DBSelector
engine = DBSelector().get_engine('db_stock','kh')
df = pd.read_sql(table,engine)
return df
def fmt_date(x,src='%Y%m%d',trgt='%Y-%m-%d'):
return datetime.datetime.strptime(x, src).strftime(trgt)
def calendar(start_date,end_date):
from .settings import get_tushare_pro
src='%Y-%m-%d'
trgt='%Y%m%d'
start_date = fmt_date(start_date,src,trgt)
end_date = fmt_date(end_date,src,trgt)
pro = get_tushare_pro()
df = pro.trade_cal(exchange='SSE', start_date=start_date, end_date=end_date, is_open='1')
cal = df['trade_date'].tolist()
cal = list(map(fmt_date, cal))
return cal
if __name__ == '__main__':
print(get_jsl_code('tb_bond_jisilu'))

View File

View File

@@ -0,0 +1,99 @@
{
"mysql": {
"local": {
"host": "",
"port": 3306,
"user": "",
"password": ""
},
"qq": {
"host": "",
"port": 2222,
"user": "",
"password": ""
},
"ubuntu": {
"host": "1",
"port": 3306,
"user": "",
"password": ""
},
"ptrade": {
"host": "",
"port": 3306,
"user": "",
"password": ""
},
"tencent-1c": {
"host": "",
"port": 3306,
"user": "",
"password": ""
}
},
"redis": {
"qq": {
"host": "127.0.0.1",
"port": 6379,
"password": ""
}
},
"mail": {
"qq": {
"user": ""
}
},
"data_path": "",
"mongo": {
"qq": {
"host": "",
"port": 11111,
"user": "",
"password": ""
},
"local": {
"host": "127.0.0.1",
"port": 17017,
"user": null,
"password": null
}
},
"WECHAT_ID": "",
"jsl_cookies": {
"auto_reload": "",
"kbzw_r_uname": "",
"kbz_newcookie": "1",
"kbzw__Session": "",
"Hm_lvt_164fe01b1433a19b507595a43bf58262": "",
"Hm_lpvt_164fe01b1433a19b507595a43bf58262": ""
},
"twillio": {
"twilio_account_sid": "",
"twilio_auth_token": ""
},
"jsl_monitor": {
"EXPIRE_TIME": 1800,
"MONITOR_PERCENT": 8,
"ACCESS_INTERVAL": 20,
"JSL_USER": "",
"JSL_PASSWORD": ""
},
"aliyun": {
"EMAIL_USER_XT": "",
"EMAIL_USER_ALI": "",
"LOGIN_EMAIL_ALYI_PASSWORD": ""
},
"ts_token": "",
"holding_file": "v",
"xc_server": "",
"xc_token_pro": "",
"enterprise_wechat": {
"userid": "",
"agentid": "",
"corpid": "",
"corpsecret": ""
}
}

View File

@@ -0,0 +1,99 @@
{
"mysql": {
"local": {
"host": "",
"port": 3306,
"user": "",
"password": ""
},
"qq": {
"host": "",
"port": 2222,
"user": "",
"password": ""
},
"ubuntu": {
"host": "1",
"port": 3306,
"user": "",
"password": ""
},
"ptrade": {
"host": "",
"port": 3306,
"user": "",
"password": ""
},
"tencent-1c": {
"host": "",
"port": 3306,
"user": "",
"password": ""
}
},
"redis": {
"qq": {
"host": "127.0.0.1",
"port": 6379,
"password": ""
}
},
"mail": {
"qq": {
"user": ""
}
},
"data_path": "",
"mongo": {
"qq": {
"host": "",
"port": 11111,
"user": "",
"password": ""
},
"local": {
"host": "127.0.0.1",
"port": 17017,
"user": null,
"password": null
}
},
"WECHAT_ID": "",
"jsl_cookies": {
"auto_reload": "",
"kbzw_r_uname": "",
"kbz_newcookie": "1",
"kbzw__Session": "",
"Hm_lvt_164fe01b1433a19b507595a43bf58262": "",
"Hm_lpvt_164fe01b1433a19b507595a43bf58262": ""
},
"twillio": {
"twilio_account_sid": "",
"twilio_auth_token": ""
},
"jsl_monitor": {
"EXPIRE_TIME": 1800,
"MONITOR_PERCENT": 8,
"ACCESS_INTERVAL": 20,
"JSL_USER": "",
"JSL_PASSWORD": ""
},
"aliyun": {
"EMAIL_USER_XT": "",
"EMAIL_USER_ALI": "",
"LOGIN_EMAIL_ALYI_PASSWORD": ""
},
"ts_token": "",
"holding_file": "v",
"xc_server": "",
"xc_token_pro": "",
"enterprise_wechat": {
"userid": "",
"agentid": "",
"corpid": "",
"corpsecret": ""
}
}

104
utils/configure/settings.py Normal file
View File

@@ -0,0 +1,104 @@
'''
Author: Charmve yidazhang1@gmail.com
Date: 2023-03-02 00:12:18
LastEditors: Charmve yidazhang1@gmail.com
LastEditTime: 2023-03-09 23:29:18
FilePath: /Qbot/utils/configure/settings.py
Version: 1.0.1
Blogs: charmve.blog.csdn.net
Description:
Copyright (c) 2023 by Charmve, All Rights Reserved.
'''
# -*-coding=utf-8-*-
import os
import json
def get_config_data(config_file='config.json'):
json_file = os.path.join(os.path.dirname(__file__), config_file)
with open(json_file, 'r', encoding='utf8') as f:
_config = json.load(f)
return _config
config = get_config_data()
def config_dict(*args):
result = config
for arg in args:
try:
result = result[arg]
except:
print('找不到对应的key')
return None
return result
class DBSelector(object):
'''
数据库选择类
'''
def __init__(self):
self.json_data = config
def config(self, db_type='mysql', local='qq'):
db_dict = self.json_data[db_type][local]
user = db_dict['user']
password = db_dict['password']
host = db_dict['host']
port = db_dict['port']
return (user, password, host, port)
def get_engine(self, db, type_='qq'):
from sqlalchemy import create_engine
user, password, host, port = self.config(db_type='mysql', local=type_)
try:
engine = create_engine(
'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(user, password, host, port, db))
except Exception as e:
print(e)
return None
return engine
def get_mysql_conn(self, db, type_='qq',use_dict=False):
import pymysql
user, password, host, port = self.config(db_type='mysql', local=type_)
try:
if use_dict:
conn = pymysql.connect(host=host, port=port, user=user, password=password, db=db, charset='utf8mb4',read_timeout=10,cursorclass= pymysql.cursors.DictCursor)
else:
conn = pymysql.connect(host=host, port=port, user=user, password=password, db=db, charset='utf8mb4',read_timeout=10)
except Exception as e:
print(e)
return None
else:
return conn
def mongo(self, location_type='qq', async_type=False):
user, password, host, port = self.config('mongo', location_type)
connect_uri = f'mongodb://{user}:{password}@{host}:{port}'
if async_type:
from motor.motor_asyncio import AsyncIOMotorClient
client = AsyncIOMotorClient(connect_uri)
else:
import pymongo
client = pymongo.MongoClient(connect_uri)
return client
def get_tushare_pro():
import xcsc_tushare as xc
xc_token_pro = config.get('xc_token_pro')
xc_server = config.get('xc_server')
xc.set_token(xc_token_pro)
pro = xc.pro_api(env='prd', server=xc_server)
return pro
if __name__ == '__main__':
pass

1
utils/thsauto Submodule

Submodule utils/thsauto added at b7cd76359a