回测报错,请求救援
由bqbsomfl创建,最终由small_q 被浏览 43 用户
新手用量价相关性做了个策略,不知道为啥回测报错。请指点,万分感谢
from bigdatasource.api import DataSource
from bigdata.api.datareader import D
from biglearning.api import M
from biglearning.api import tools as T
from biglearning.module2.common.data import Outputs
#
import pandas as pd
import numpy as np
import math
import dai
import warnings
import datetime
#
from zipline.finance.commission import PerOrder
from zipline.api import get_open_orders
from zipline.api import symbol
#
from bigtrader.sdk import *
from bigtrader.utils.my_collections import NumPyDeque
from bigtrader.constant import OrderType
from bigtrader.constant import Direction
zixuan = ['000100.SZA','300496.SZA','300136.SZA','300001.SZA','002594.SZA','000876.SZA','601377.SHA','000831.SZA','601318.SHA','000902.SZA','600584.SHA','002050.SZA','000878.SZA','601899.SHA','688981.SHA','603290.SHA','600030.SHA','002920.SZA','002230.SZA','600941.SHA','603816.SHA','600141.SHA','002978.SZA','000333.SZA','002563.SZA','002001.SZA','600529.SHA','600085.SHA','601012.SHA','688297.SHA','603605.SHA','002568.SZA','002475.SZA','300146.SZA','601021.SHA','600221.SHA','600196.SHA','603392.SHA','002352.SZA','600176.SHA','600309.SHA','600089.SHA','688111.SHA','300124.SZA','000651.SZA','002027.SZA','000887.SZA','600887.SHA','002384.SZA','688005.SHA','600392.SHA','000938.SZA','600276.SHA','002241.SZA','300866.SZA','300693.SZA','603486.SHA','002460.SZA','300438.SZA','603008.SHA','003816.SZA','600765.SHA','002340.SZA','688433.SHA','300274.SZA','300750.SZA','603345.SHA','300498.SZA','600409.SHA','603056.SHA','300979.SZA','600438.SHA','600598.SHA','688036.SHA','002242.SZA','000560.SZA','603129.SHA','300435.SZA','002268.SZA','002294.SZA','600259.SHA','002511.SZA','002624.SZA','300760.SZA','002891.SZA','000061.SZA','002236.SZA']
sd = '2023-01-01'
ed = '2023-12-14'
sd = pd.to_datetime(sd) - pd.Timedelta(days=10)
df = DataSource('bar1m_CN_STOCK_A').read(instruments = zixuan, start_date = sd, end_date= ed)
df['date'] = pd.to_datetime(df['date']).dt.date
#计算量价相关性
def cal_corr(df):
#相关性
def correlation( x, y):
if x.unique().shape[0] == 1 or y.unique().shape[0] == 1:
res = 0
else:
res = x.corr(y, method='pearson')
return res
#全天1分钟数据
_c = df['close']
_v = df['volume']
corr_vp = correlation(_c, _v)
return corr_vp
data = df.groupby(['instrument', 'date']).apply(cal_corr).reset_index().rename(columns={0:'factor'})
data['weekly'] = data.groupby('instrument')['factor'].apply(lambda x: x.rolling(window=5).sum())
data.dropna(inplace=True)
data = data.drop('factor', axis=1)
data = data.reset_index(drop=True)
data['date'] = pd.to_datetime(data['date'], errors='coerce')
data['instrument'] = data['instrument'].str.replace('A','')
instruments = {'market': 'CN_STOCK_A', 'instruments': zixuan, 'start_date': sd, 'end_date': ed}
instruments = DataSource.write_pickle(instruments)
df = DataSource.write_df(data)
# 交易引擎:初始化函数,只执行一次
def m4_initialize_bigquant_run(context):
# 加载预测数据
context.df = context.options['data'].read_df()
# 交易引擎:每个单位时间开盘前调用一次。
def m4_before_trading_start_bigquant_run(context, data):
# 盘前处理,订阅行情等
pass
# 交易引擎:tick数据处理函数,每个tick执行一次
def m4_handle_tick_bigquant_run(context, tick):
pass
# 交易引擎:bar数据处理函数,每个时间单位执行一次
def m4_handle_data_bigquant_run(context, data):
# 获取日期
dt = data.current_dt.strftime('%Y-%m-%d')
# 获取当前日期的所有股票市值
df = context.df[context.df['date']==dt].sort_values('weekly', ascending=True)
instruments = list(df['instrument'])[:10]
# 排名前30的股票池集合
instruments = set(instruments)
# 获取持仓信息
holding = context.get_account_positions()
holding_list = list(holding.keys())
holding_num = len(holding_list)
holding_list = set(holding_list)
# 需要买入的股票 = 排名前30的股票池集合 - 已持有的股票集合
buy_list = instruments - holding_list
# 需要卖出的股票 = 已持有的股票集合 - 排名前30的股票池集合
sell_list = holding_list - instruments
# 卖出不在股票池的股票
for ins in sell_list:
context.order_target_percent(ins, 0)
holding_num -= 1
# 买入最新的股票
for ins in buy_list:
if holding_num <= 10:
context.order_target_percent(ins, 1/10)
holding_num += 1
# 交易引擎:成交回报处理函数,每个成交发生时执行一次
def m4_handle_trade_bigquant_run(context, trade):
pass
# 交易引擎:委托回报处理函数,每个委托变化时执行一次
def m4_handle_order_bigquant_run(context, order):
pass
# 交易引擎:盘后处理函数,每日盘后执行一次
def m4_after_trading_bigquant_run(context, data):
pass
m4 = M.hftrade.v2(
instruments=instruments,
options_data=df,
start_date='',
end_date='',
initialize=m4_initialize_bigquant_run,
before_trading_start=m4_before_trading_start_bigquant_run,
handle_tick=m4_handle_tick_bigquant_run,
handle_data=m4_handle_data_bigquant_run,
handle_trade=m4_handle_trade_bigquant_run,
handle_order=m4_handle_order_bigquant_run,
after_trading=m4_after_trading_bigquant_run,
capital_base=1000000,
frequency='daily',
price_type='真实价格',
product_type='股票',
before_start_days='0',
volume_limit=1,
order_price_field_buy='open',
order_price_field_sell='close',
benchmark='000300.SH',
plot_charts=True,
disable_cache=False,
replay_bdb=False,
show_debug_info=False,
backtest_only=False
)
报错:[2023-12-18 15:09:56.923827] ERROR: moduleinvoker:417312903.py:63:<module> module name: hftrade, module version: v2, trackeback: TypeError: _cache_key_encoder: not supported type: <class 'pandas._libs.tslibs.timestamps.Timestamp'>
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[12], line 63
59 def m4_after_trading_bigquant_run(context, data):
60 pass
---> 63 m4 = M.hftrade.v2(
64 instruments=instruments,
65 options_data=df,
66 start_date='',
67 end_date='',
68 initialize=m4_initialize_bigquant_run,
69 before_trading_start=m4_before_trading_start_bigquant_run,
70 handle_tick=m4_handle_tick_bigquant_run,
71 handle_data=m4_handle_data_bigquant_run,
72 handle_trade=m4_handle_trade_bigquant_run,
73 handle_order=m4_handle_order_bigquant_run,
74 after_trading=m4_after_trading_bigquant_run,
75 capital_base=1000000,
76 frequency='daily',
77 price_type='真实价格',
78 product_type='股票',
79 before_start_days='0',
80 volume_limit=1,
81 order_price_field_buy='open',
82 order_price_field_sell='close',
83 benchmark='000300.SH',
84 plot_charts=True,
85 disable_cache=False,
86 replay_bdb=False,
87 show_debug_info=False,
88 backtest_only=False
89 )
File module2/common/modulemanagerv2.py:88, in biglearning.module2.common.modulemanagerv2.BigQuantModuleVersion.__call__()
File module2/common/moduleinvoker.py:370, in biglearning.module2.common.moduleinvoker.module_invoke()
File module2/common/moduleinvoker.py:292, in biglearning.module2.common.moduleinvoker._invoke_with_cache()
File module2/common/moduleinvoker.py:253, in biglearning.module2.common.moduleinvoker._invoke_with_cache()
File module2/common/moduleinvoker.py:210, in biglearning.module2.common.moduleinvoker._module_run()
File module2/modules/hftrade/v2/__init__.py:417, in biglearning.module2.modules.hftrade.v2.__init__.bigquant_run()
File module2/modules/hftrade/v2/__init__.py:257, in biglearning.module2.modules.hftrade.v2.__init__.bigquant_run.do_backtest_run()
File module2/common/modulemanagerv2.py:88, in biglearning.module2.common.modulemanagerv2.BigQuantModuleVersion.__call__()
File module2/common/moduleinvoker.py:356, in biglearning.module2.common.moduleinvoker.module_invoke()
File module2/common/modulecache.py:167, in biglearning.module2.common.modulecache.cache_key()
File /usr/local/python3/lib/python3.8/json/__init__.py:234, in dumps(obj, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
232 if cls is None:
233 cls = JSONEncoder
--> 234 return cls(
235 skipkeys=skipkeys, ensure_ascii=ensure_ascii,
236 check_circular=check_circular, allow_nan=allow_nan, indent=indent,
237 separators=separators, default=default, sort_keys=sort_keys,
238 **kw).encode(obj)
File /usr/local/python3/lib/python3.8/json/encoder.py:199, in JSONEncoder.encode(self, o)
195 return encode_basestring(o)
196 # This doesn't pass the iterator directly to ''.join() because the
197 # exceptions aren't as detailed. The list call should be roughly
198 # equivalent to the PySequence_Fast that ''.join() would do.
--> 199 chunks = self.iterencode(o, _one_shot=True)
200 if not isinstance(chunks, (list, tuple)):
201 chunks = list(chunks)
File /usr/local/python3/lib/python3.8/json/encoder.py:257, in JSONEncoder.iterencode(self, o, _one_shot)
252 else:
253 _iterencode = _make_iterencode(
254 markers, self.default, _encoder, self.indent, floatstr,
255 self.key_separator, self.item_separator, self.sort_keys,
256 self.skipkeys, _one_shot)
--> 257 return _iterencode(o, 0)
File module2/common/modulecache.py:111, in biglearning.module2.common.modulecache._cache_key_encoder()
TypeError: _cache_key_encoder: not supported type:
\