请 [注册] 或 [登录]  | 返回主站

量化交易吧 /  数理科学 帖子:3364740 新帖:4

策略分享之一

今天你爆仓了吗发表于:8 月 14 日 13:08回复(1)

极端梯度提升 (eXtreme Gradient Boosting) 是一种基于决策树的集成机器学习方法,适用于分类和回归问题。其优点是速度快、效果好、能处理大规模数据、支持自定义损失函数等。

In [1]:

import os
import pickle
?
import pandas as pd
import xgboost as xgb
In [2]:

with open(r'./raw-data.csv') as f:
raw_data = pd.read_csv(f, parse_dates=['date'], index_col=['date', 'symbol'])
In [3]:

with open(r'macro-data.csv') as f:
macro_data = pd.read_csv(f, parse_dates=['date'], index_col=['date'])
In [4]:

FEATURE_NAMES = [
'book_leverage', 'cash_earnings_to_price_ratio',
'cumulative_range', 'daily_standard_deviation', 'debt_to_assets',
'earnings_growth', 'earnings_to_price_ratio', 'historical_sigma',
'market_leverage', 'one_month_share_turnover', 'sales_growth',
'three_months_share_turnover', 'twelve_months_share_turnover',
'CPI', 'PPI', 'M2', 'PMI'
]
In [5]:

def merge_macro_data(data, macro_data):

# 当月宏观数据下月公布, 本月只能获取上月数据
macro_data_lagged = macro_data.shift().dropna()

round_to_month_end = pd.offsets.MonthEnd().rollforward
data['calendar_month_ends'] = data.index.get_level_values('date').map(round_to_month_end)

res = (
    data
    .join(macro_data_lagged, on='calendar_month_ends', how='inner')
    .drop('calendar_month_ends', axis=1)
)
return res    

In [6]:

data = merge_macro_data(raw_data, macro_data)
In [7]:

def convert_industry_to_dummy(data):
industry = data['industry']
ind_dummy = pd.get_dummies(industry)
res = (
pd.concat([data, ind_dummy], axis=1)
.drop('industry', axis=1)
)
return res
?
def remove_unqualified_stock(data):
non_ST = ~data['is_ST']
non_suspended = ~data['is_suspended']
days_gt_180 = data['days_listed'] > 180
return data[non_ST
& non_suspended
& days_gt_180]

def label_by_quantile(r, quantiles=(0.1, 0.9)):

lower_bound, upper_bound = r.quantile(quantiles)
label = pd.np.repeat(pd.np.nan, len(r))
label[r > upper_bound] = 1
label[r < lower_bound] = 0
return label

?
def label_return(data, label_func=label_by_quantile):
label = (data['forward_return']
.groupby(level='date')
.transform(label_func)
.dropna())
return label
In [8]:

BURNIN_PERIODS = 48
rebalance_dates = raw_data.index.get_level_values('date').unique()[BURNIN_PERIODS: ]
In [9]:

def training_window(rebalance_date, window=48):
month = pd.offsets.MonthBegin()
end = rebalance_date - month # 换仓日当月月初
start = end - window * month

# start 和 end 都是月初, 虽然不一定是交易日, 但在月末换仓的假设下
# 能保证必定覆盖 `window` 个月, 同时rebalance_date不在窗口内
return start, end

?
def get_training_dataset(rebalance_date, data, window=48):
start, end = training_window(rebalance_date)
data = (
data
.loc[start:end]
.pipe(remove_unqualified_stock)
.pipe(convert_industry_to_dummy)
)
label = label_return(data)

features = data.drop(['is_ST', 'is_suspended', 'days_listed', 'forward_return'], axis=1)
label, features = label.align(features, join='inner')
return features, label

?
def get_prediction_dataset(rebalance_date, data):
data = (
data
.loc[rebalance_date]
.pipe(remove_unqualified_stock)
.pipe(convert_industry_to_dummy)
)
features = data.drop(['is_ST', 'is_suspended', 'days_listed', 'forward_return'], axis=1)
return features
In [10]:

def make_model(rebalance_date, data, window=48,
n_estimators=100, max_depth=3, booster='gbtree', **kwargs):
X, y = get_training_dataset(rebalance_date, data)

if 'n_jobs' not in kwargs:
    kwargs['n_jobs'] = 4
if 'slient' not in kwargs:
    kwargs['slient'] = True    
model = xgb.XGBClassifier(n_estimators=n_estimators, max_depth=max_depth, **kwargs)
model.fit(X,y)
return model

?
def make_factor(rebalance_date, model, data):
features = get_prediction_dataset(rebalance_date, data)
res = pd.Series(model.predict_proba(features)[:,1], pd.MultiIndex.from_product(
[[rebalance_date], features.index], names=['date', 'symbol']))
return res
In [11]:

from collections import OrderedDict
?
?
def get_portfolios_from_factor_data(factor_data, quantile):
factor_data = factor_data[factor_data['factor_quantile'] == quantile]

def build_portfolio(df):
    # 等权组合, 权重为 1/len(df), index为股票代码---df.index的第二层
    return pd.Series(1 / len(df), index=df.index.get_level_values('symbol'))

portfolios = OrderedDict()
for date, df in factor_data.groupby(level='date'):
    portfolios[date] = build_portfolio(df)
return portfolios

?
def net_value(rebalance_dates, portfolio_weights, daily_returns):
"""计算组合净值
rebalance_dates: list. 回测期内所有换仓日
portfolio_weights: dict. 每个换仓日的目标组合. key为换仓日, value为代表目标组合的Series, index为股票代码, value为权重
daily_return: DataFrame. 回测期内所有股票的日度复权收益率. index为日期, columns为股票代码
"""
net_value_list = []
for start, end in zip(rebalance_dates[:-1], rebalance_dates[1:]):
weights = portfolio_weights[start]

    # 每个持仓周期为左闭右开区间,以确保一个交易日属于且只属于一个持仓周期
    holding_period = (daily_returns.index >= start) &amp; (daily_returns.index < end)
    assets_held = weights.index

    rtn = daily_returns.loc[holding_period, assets_held].fillna(0)

    net_value_list.append(period_net_value(rtn, weights))

res = merge_period_net_value(net_value_list)
return res

?
def period_net_value(daily_returns, weights):
asset_net_value = (1 daily_returns).cumprod()
normalized_weights = weights / weights.sum()
portf_net_value = asset_net_value.dot(normalized_weights)
return portf_net_value
?
def merge_period_net_value(period_net_values):
net_value_list = []
init_capital = 1
for nv in period_net_values:
nv *= init_capital
net_value_list.append(nv)

    # 下一段净值的初始资金是上一段最后一天的值
    init_capital = nv.iat[-1]
res = pd.concat(net_value_list)

# 整个回测期第一天的净值一定是1, 第一天的return其实用不到
res.iloc[0] = 1
return res

In [12]:

factor_list = []
model_dict = {}
for date in rebalance_dates:
print('Training model for {}'.format(date))
model = make_model(date, data)
model_dict[date] = model

factor = make_factor(date, model, data)
factor_list.append(factor)

factor = pd.concat(factor_list)
Training model for 2013-01-31 00:00:00
Training model for 2013-02-28 00:00:00
Training model for 2013-03-29 00:00:00
Training model for 2013-04-26 00:00:00
Training model for 2013-05-31 00:00:00
Training model for 2013-06-28 00:00:00
Training model for 2013-07-31 00:00:00
Training model for 2013-08-30 00:00:00
Training model for 2013-09-30 00:00:00
Training model for 2013-10-31 00:00:00
Training model for 2013-11-29 00:00:00
Training model for 2013-12-31 00:00:00
Training model for 2014-01-30 00:00:00
Training model for 2014-02-28 00:00:00
Training model for 2014-03-31 00:00:00
Training model for 2014-04-30 00:00:00
Training model for 2014-05-30 00:00:00
Training model for 2014-06-30 00:00:00
Training model for 2014-07-31 00:00:00
Training model for 2014-08-29 00:00:00
Training model for 2014-09-30 00:00:00
Training model for 2014-10-31 00:00:00
Training model for 2014-11-28 00:00:00
Training model for 2014-12-31 00:00:00
Training model for 2015-01-30 00:00:00
Training model for 2015-02-27 00:00:00
Training model for 2015-03-31 00:00:00
Training model for 2015-04-30 00:00:00
Training model for 2015-05-29 00:00:00
Training model for 2015-06-30 00:00:00
Training model for 2015-07-31 00:00:00
Training model for 2015-08-31 00:00:00
Training model for 2015-09-30 00:00:00
Training model for 2015-10-30 00:00:00
Training model for 2015-11-30 00:00:00
Training model for 2015-12-31 00:00:00
Training model for 2016-01-29 00:00:00
Training model for 2016-02-29 00:00:00
Training model for 2016-03-31 00:00:00
Training model for 2016-04-29 00:00:00
Training model for 2016-05-31 00:00:00
Training model for 2016-06-30 00:00:00
Training model for 2016-07-29 00:00:00
Training model for 2016-08-31 00:00:00
Training model for 2016-09-30 00:00:00
Training model for 2016-10-31 00:00:00
Training model for 2016-11-30 00:00:00
Training model for 2016-12-30 00:00:00
Training model for 2017-01-26 00:00:00
Training model for 2017-02-28 00:00:00
Training model for 2017-03-31 00:00:00
Training model for 2017-04-28 00:00:00
Training model for 2017-05-31 00:00:00
Training model for 2017-06-30 00:00:00
Training model for 2017-07-31 00:00:00
Training model for 2017-08-31 00:00:00
Training model for 2017-09-29 00:00:00
Training model for 2017-10-31 00:00:00
Training model for 2017-11-30 00:00:00
Training model for 2017-12-29 00:00:00
Training model for 2018-01-31 00:00:00
Training model for 2018-02-28 00:00:00
Training model for 2018-03-30 00:00:00
Training model for 2018-04-27 00:00:00
Training model for 2018-05-31 00:00:00
Training model for 2018-06-29 00:00:00
Training model for 2018-07-31 00:00:00
Training model for 2018-08-31 00:00:00
Training model for 2018-09-28 00:00:00
In [13]:

def make_factor_data(factor, quantiles=10):
quantile_labels = list(range(1, quantiles 1))
factor_quantile = (factor
.groupby(level='date', group_keys=False)
.apply(pd.qcut,
q=quantiles,
labels=quantile_labels))

res = pd.concat({'factor': factor, 'factor_quantile': factor_quantile},
                axis=1)
return res

In [14]:

factor_data = make_factor_data(factor, quantiles=10)
In [15]:

with open('daily-returns.csv') as f:
daily_returns = pd.read_csv(f, parse_dates=['date'], index_col=['date'])
?
with open('CS-500-return.csv') as f:
CS_500 = pd.read_csv(f, parse_dates=['date'], index_col=['date']).squeeze()
In [16]:

excess_returns = daily_returns.apply(lambda x: x - CS_500)
In [17]:

QUANTILE_GROUPS = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
?
quantile_portf_net_values = OrderedDict()
for group in QUANTILE_GROUPS:
portfolio_weights = get_portfolios_from_factor_data(factor_data, group)
rebalance_dates = list(portfolio_weights.keys())
nv = net_value(rebalance_dates, portfolio_weights, excess_returns)
quantile_portf_net_values[group] = nv
In [22]:

pd.concat(quantile_portf_net_values, axis=1).plot(figsize=(18, 6))
Out[22]:

In [19]:

with open('portfolio-xgboost-rolling-window-all.pkl', 'wb') as f:
pickle.dump(portfolio_weights, f)
In [20]:

win_ratio = (quantile_portf_net_values[10].loc[rebalance_dates].pct_change() > 0).mean()
print('Monthly win ratio: {}'.format(win_ratio))
Monthly win ratio: 0.6956521739130435
In [21]:

print('Latest portfolio on {}'.format(rebalance_dates[-1]))
portfolio_weights[rebalance_dates[-1]].to_dict()
Latest portfolio on 2018-09-28 00:00:00
Out[21]:
{'000001.XSHE': 0.0029940119760479044,
'000009.XSHE': 0.0029940119760479044,
'000012.XSHE': 0.0029940119760479044,
'000021.XSHE': 0.0029940119760479044,
'000026.XSHE': 0.0029940119760479044,
'000027.XSHE': 0.0029940119760479044,
'000030.XSHE': 0.0029940119760479044,
'000039.XSHE': 0.0029940119760479044,
'000058.XSHE': 0.0029940119760479044,
'000059.XSHE': 0.0029940119760479044,
'000060.XSHE': 0.0029940119760479044,
'000069.XSHE': 0.0029940119760479044,
'000100.XSHE': 0.0029940119760479044,
'000157.XSHE': 0.0029940119760479044,
'000338.XSHE': 0.0029940119760479044,
'000402.XSHE': 0.0029940119760479044,
'000404.XSHE': 0.0029940119760479044,
'000413.XSHE': 0.0029940119760479044,
'000417.XSHE': 0.0029940119760479044,
'000419.XSHE': 0.0029940119760479044,
'000423.XSHE': 0.0029940119760479044,
'000425.XSHE': 0.0029940119760479044,
'000501.XSHE': 0.0029940119760479044,
'000524.XSHE': 0.0029940119760479044,
'000525.XSHE': 0.0029940119760479044,
'000529.XSHE': 0.0029940119760479044,
'000536.XSHE': 0.0029940119760479044,
'000541.XSHE': 0.0029940119760479044,
'000551.XSHE': 0.0029940119760479044,
'000559.XSHE': 0.0029940119760479044,
'000581.XSHE': 0.0029940119760479044,
'000582.XSHE': 0.0029940119760479044,
'000589.XSHE': 0.0029940119760479044,
'000598.XSHE': 0.0029940119760479044,
'000617.XSHE': 0.0029940119760479044,
'000627.XSHE': 0.0029940119760479044,
'000630.XSHE': 0.0029940119760479044,
'000631.XSHE': 0.0029940119760479044,
'000639.XSHE': 0.0029940119760479044,
'000652.XSHE': 0.0029940119760479044,
'000656.XSHE': 0.0029940119760479044,
'000671.XSHE': 0.0029940119760479044,
'000683.XSHE': 0.0029940119760479044,
'000685.XSHE': 0.0029940119760479044,
'000715.XSHE': 0.0029940119760479044,
'000716.XSHE': 0.0029940119760479044,
'000718.XSHE': 0.0029940119760479044,
'000753.XSHE': 0.0029940119760479044,
'000758.XSHE': 0.0029940119760479044,
'000761.XSHE': 0.0029940119760479044,
'000778.XSHE': 0.0029940119760479044,
'000785.XSHE': 0.0029940119760479044,
'000800.XSHE': 0.0029940119760479044,
'000809.XSHE': 0.0029940119760479044,
'000828.XSHE': 0.0029940119760479044,
'000848.XSHE': 0.0029940119760479044,
'000858.XSHE': 0.0029940119760479044,
'000869.XSHE': 0.0029940119760479044,
'000876.XSHE': 0.0029940119760479044,
'000880.XSHE': 0.0029940119760479044,
'000883.XSHE': 0.0029940119760479044,
'000895.XSHE': 0.0029940119760479044,
'000915.XSHE': 0.0029940119760479044,
'000919.XSHE': 0.0029940119760479044,
'000937.XSHE': 0.0029940119760479044,
'000951.XSHE': 0.0029940119760479044,
'000960.XSHE': 0.0029940119760479044,
'000999.XSHE': 0.0029940119760479044,
'001896.XSHE': 0.0029940119760479044,
'002004.XSHE': 0.0029940119760479044,
'002014.XSHE': 0.0029940119760479044,
'002017.XSHE': 0.0029940119760479044,
'002029.XSHE': 0.0029940119760479044,
'002031.XSHE': 0.0029940119760479044,
'002037.XSHE': 0.0029940119760479044,
'002039.XSHE': 0.0029940119760479044,
'002048.XSHE': 0.0029940119760479044,
'002055.XSHE': 0.0029940119760479044,
'002060.XSHE': 0.0029940119760479044,
'002067.XSHE': 0.0029940119760479044,
'002068.XSHE': 0.0029940119760479044,
'002080.XSHE': 0.0029940119760479044,
'002081.XSHE': 0.0029940119760479044,
'002092.XSHE': 0.0029940119760479044,
'002097.XSHE': 0.0029940119760479044,
'002105.XSHE': 0.0029940119760479044,
'002123.XSHE': 0.0029940119760479044,
'002145.XSHE': 0.0029940119760479044,
'002178.XSHE': 0.0029940119760479044,
'002185.XSHE': 0.0029940119760479044,
'002186.XSHE': 0.0029940119760479044,
'002187.XSHE': 0.0029940119760479044,
'002216.XSHE': 0.0029940119760479044,
'002241.XSHE': 0.0029940119760479044,
'002251.XSHE': 0.0029940119760479044,
'002275.XSHE': 0.0029940119760479044,
'002283.XSHE': 0.0029940119760479044,
'002284.XSHE': 0.0029940119760479044,
'002303.XSHE': 0.0029940119760479044,
'002305.XSHE': 0.0029940119760479044,
'002325.XSHE': 0.0029940119760479044,
'002328.XSHE': 0.0029940119760479044,
'002334.XSHE': 0.0029940119760479044,
'002339.XSHE': 0.0029940119760479044,
'002344.XSHE': 0.0029940119760479044,
'002360.XSHE': 0.0029940119760479044,
'002386.XSHE': 0.0029940119760479044,
'002391.XSHE': 0.0029940119760479044,
'002393.XSHE': 0.0029940119760479044,
'002394.XSHE': 0.0029940119760479044,
'002420.XSHE': 0.0029940119760479044,
'002429.XSHE': 0.0029940119760479044,
'002451.XSHE': 0.0029940119760479044,
'002461.XSHE': 0.0029940119760479044,
'002479.XSHE': 0.0029940119760479044,
'002482.XSHE': 0.0029940119760479044,
'002495.XSHE': 0.0029940119760479044,
'002509.XSHE': 0.0029940119760479044,
'002534.XSHE': 0.0029940119760479044,
'002535.XSHE': 0.0029940119760479044,
'002561.XSHE': 0.0029940119760479044,
'002588.XSHE': 0.0029940119760479044,
'002593.XSHE': 0.0029940119760479044,
'002600.XSHE': 0.0029940119760479044,
'002601.XSHE': 0.0029940119760479044,
'002623.XSHE': 0.0029940119760479044,
'002626.XSHE': 0.0029940119760479044,
'002641.XSHE': 0.0029940119760479044,
'002676.XSHE': 0.0029940119760479044,
'002694.XSHE': 0.0029940119760479044,
'002708.XSHE': 0.0029940119760479044,
'002713.XSHE': 0.0029940119760479044,
'002732.XSHE': 0.0029940119760479044,
'002760.XSHE': 0.0029940119760479044,
'002763.XSHE': 0.0029940119760479044,
'002783.XSHE': 0.0029940119760479044,
'002818.XSHE': 0.0029940119760479044,
'002820.XSHE': 0.0029940119760479044,
'002823.XSHE': 0.0029940119760479044,
'002868.XSHE': 0.0029940119760479044,
'002884.XSHE': 0.0029940119760479044,
'300039.XSHE': 0.0029940119760479044,
'300057.XSHE': 0.0029940119760479044,
'300069.XSHE': 0.0029940119760479044,
'300113.XSHE': 0.0029940119760479044,
'300185.XSHE': 0.0029940119760479044,
'300204.XSHE': 0.0029940119760479044,
'300216.XSHE': 0.0029940119760479044,
'300218.XSHE': 0.0029940119760479044,
'300227.XSHE': 0.0029940119760479044,
'300371.XSHE': 0.0029940119760479044,
'300376.XSHE': 0.0029940119760479044,
'300385.XSHE': 0.0029940119760479044,
'300415.XSHE': 0.0029940119760479044,
'300446.XSHE': 0.0029940119760479044,
'300512.XSHE': 0.0029940119760479044,
'300528.XSHE': 0.0029940119760479044,
'300575.XSHE': 0.0029940119760479044,
'300599.XSHE': 0.0029940119760479044,
'300676.XSHE': 0.0029940119760479044,
'300695.XSHE': 0.0029940119760479044,
'600010.XSHG': 0.0029940119760479044,
'600012.XSHG': 0.0029940119760479044,
'600017.XSHG': 0.0029940119760479044,
'600018.XSHG': 0.0029940119760479044,
'600020.XSHG': 0.0029940119760479044,
'600026.XSHG': 0.0029940119760479044,
'600031.XSHG': 0.0029940119760479044,
'600033.XSHG': 0.0029940119760479044,
'600035.XSHG': 0.0029940119760479044,
'600054.XSHG': 0.0029940119760479044,
'600064.XSHG': 0.0029940119760479044,
'600066.XSHG': 0.0029940119760479044,
'600073.XSHG': 0.0029940119760479044,
'600077.XSHG': 0.0029940119760479044,
'600081.XSHG': 0.0029940119760479044,
'600120.XSHG': 0.0029940119760479044,
'600126.XSHG': 0.0029940119760479044,
'600143.XSHG': 0.0029940119760479044,
'600153.XSHG': 0.0029940119760479044,
'600160.XSHG': 0.0029940119760479044,
'600169.XSHG': 0.0029940119760479044,
'600170.XSHG': 0.0029940119760479044,
'600173.XSHG': 0.0029940119760479044,
'600180.XSHG': 0.0029940119760479044,
'600185.XSHG': 0.0029940119760479044,
'600190.XSHG': 0.0029940119760479044,
'600210.XSHG': 0.0029940119760479044,
'600219.XSHG': 0.0029940119760479044,
'600223.XSHG': 0.0029940119760479044,
'600229.XSHG': 0.0029940119760479044,
'600252.XSHG': 0.0029940119760479044,
'600261.XSHG': 0.0029940119760479044,
'600266.XSHG': 0.0029940119760479044,
'600269.XSHG': 0.0029940119760479044,
'600279.XSHG': 0.0029940119760479044,
'600284.XSHG': 0.0029940119760479044,
'600287.XSHG': 0.0029940119760479044,
'600290.XSHG': 0.0029940119760479044,
'600300.XSHG': 0.0029940119760479044,
'600308.XSHG': 0.0029940119760479044,
'600323.XSHG': 0.0029940119760479044,
'600325.XSHG': 0.0029940119760479044,
'600327.XSHG': 0.0029940119760479044,
'600361.XSHG': 0.0029940119760479044,
'600362.XSHG': 0.0029940119760479044,
'600367.XSHG': 0.0029940119760479044,
'600381.XSHG': 0.0029940119760479044,
'600396.XSHG': 0.0029940119760479044,
'600420.XSHG': 0.0029940119760479044,
'600422.XSHG': 0.0029940119760479044,
'600461.XSHG': 0.0029940119760479044,
'600466.XSHG': 0.0029940119760479044,
'600497.XSHG': 0.0029940119760479044,
'600500.XSHG': 0.0029940119760479044,
'600557.XSHG': 0.0029940119760479044,
'600561.XSHG': 0.0029940119760479044,
'600565.XSHG': 0.0029940119760479044,
'600567.XSHG': 0.0029940119760479044,
'600580.XSHG': 0.0029940119760479044,
'600582.XSHG': 0.0029940119760479044,
'600586.XSHG': 0.0029940119760479044,
'600587.XSHG': 0.0029940119760479044,
'600589.XSHG': 0.0029940119760479044,
'600594.XSHG': 0.0029940119760479044,
'600595.XSHG': 0.0029940119760479044,
'600597.XSHG': 0.0029940119760479044,
'600600.XSHG': 0.0029940119760479044,
'600606.XSHG': 0.0029940119760479044,
'600611.XSHG': 0.0029940119760479044,
'600616.XSHG': 0.0029940119760479044,
'600618.XSHG': 0.0029940119760479044,
'600624.XSHG': 0.0029940119760479044,
'600628.XSHG': 0.0029940119760479044,
'600649.XSHG': 0.0029940119760479044,
'600657.XSHG': 0.0029940119760479044,
'600665.XSHG': 0.0029940119760479044,
'600667.XSHG': 0.0029940119760479044,
'600676.XSHG': 0.0029940119760479044,
'600684.XSHG': 0.0029940119760479044,
'600691.XSHG': 0.0029940119760479044,
'600694.XSHG': 0.0029940119760479044,
'600697.XSHG': 0.0029940119760479044,
'600705.XSHG': 0.0029940119760479044,
'600708.XSHG': 0.0029940119760479044,
'600710.XSHG': 0.0029940119760479044,
'600716.XSHG': 0.0029940119760479044,
'600717.XSHG': 0.0029940119760479044,
'600723.XSHG': 0.0029940119760479044,
'600742.XSHG': 0.0029940119760479044,
'600743.XSHG': 0.0029940119760479044,
'600780.XSHG': 0.0029940119760479044,
'600790.XSHG': 0.0029940119760479044,
'600795.XSHG': 0.0029940119760479044,
'600827.XSHG': 0.0029940119760479044,
'600835.XSHG': 0.0029940119760479044,
'600839.XSHG': 0.0029940119760479044,
'600846.XSHG': 0.0029940119760479044,
'600863.XSHG': 0.0029940119760479044,
'600873.XSHG': 0.0029940119760479044,
'600874.XSHG': 0.0029940119760479044,
'600875.XSHG': 0.0029940119760479044,
'600881.XSHG': 0.0029940119760479044,
'600887.XSHG': 0.0029940119760479044,
'600894.XSHG': 0.0029940119760479044,
'600897.XSHG': 0.0029940119760479044,
'600963.XSHG': 0.0029940119760479044,
'600966.XSHG': 0.0029940119760479044,
'600969.XSHG': 0.0029940119760479044,
'600978.XSHG': 0.0029940119760479044,
'600981.XSHG': 0.0029940119760479044,
'600987.XSHG': 0.0029940119760479044,
'600995.XSHG': 0.0029940119760479044,
'601000.XSHG': 0.0029940119760479044,
'601007.XSHG': 0.0029940119760479044,
'601016.XSHG': 0.0029940119760479044,
'601107.XSHG': 0.0029940119760479044,
'601126.XSHG': 0.0029940119760479044,
'601158.XSHG': 0.0029940119760479044,
'601163.XSHG': 0.0029940119760479044,
'601168.XSHG': 0.0029940119760479044,
'601169.XSHG': 0.0029940119760479044,
'601177.XSHG': 0.0029940119760479044,
'601200.XSHG': 0.0029940119760479044,
'601208.XSHG': 0.0029940119760479044,
'601216.XSHG': 0.0029940119760479044,
'601326.XSHG': 0.0029940119760479044,
'601333.XSHG': 0.0029940119760479044,
'601360.XSHG': 0.0029940119760479044,
'601366.XSHG': 0.0029940119760479044,
'601368.XSHG': 0.0029940119760479044,
'601388.XSHG': 0.0029940119760479044,
'601515.XSHG': 0.0029940119760479044,
'601566.XSHG': 0.0029940119760479044,
'601636.XSHG': 0.0029940119760479044,
'601669.XSHG': 0.0029940119760479044,
'601678.XSHG': 0.0029940119760479044,
'601866.XSHG': 0.0029940119760479044,
'601886.XSHG': 0.0029940119760479044,
'601898.XSHG': 0.0029940119760479044,
'601899.XSHG': 0.0029940119760479044,
'601919.XSHG': 0.0029940119760479044,
'601966.XSHG': 0.0029940119760479044,
'601985.XSHG': 0.0029940119760479044,
'601988.XSHG': 0.0029940119760479044,
'601992.XSHG': 0.0029940119760479044,
'601998.XSHG': 0.0029940119760479044,
'603000.XSHG': 0.0029940119760479044,
'603020.XSHG': 0.0029940119760479044,
'603100.XSHG': 0.0029940119760479044,
'603123.XSHG': 0.0029940119760479044,
'603156.XSHG': 0.0029940119760479044,
'603166.XSHG': 0.0029940119760479044,
'603167.XSHG': 0.0029940119760479044,
'603178.XSHG': 0.0029940119760479044,
'603198.XSHG': 0.0029940119760479044,
'603208.XSHG': 0.0029940119760479044,
'603298.XSHG': 0.0029940119760479044,
'603518.XSHG': 0.0029940119760479044,
'603556.XSHG': 0.0029940119760479044,
'603567.XSHG': 0.0029940119760479044,
'603585.XSHG': 0.0029940119760479044,
'603588.XSHG': 0.0029940119760479044,
'603599.XSHG': 0.0029940119760479044,
'603611.XSHG': 0.0029940119760479044,
'603730.XSHG': 0.0029940119760479044,
'603766.XSHG': 0.0029940119760479044,
'603768.XSHG': 0.0029940119760479044,
'603797.XSHG': 0.0029940119760479044,
'603817.XSHG': 0.0029940119760479044,
'603898.XSHG': 0.0029940119760479044,
'603919.XSHG': 0.0029940119760479044,
'603987.XSHG': 0.0029940119760479044,
'603993.XSHG': 0.0029940119760479044}
In [30]:

top_50_portf = OrderedDict()
for date, df in factor.groupby(level='date'):
top_50_portf[date] = pd.Series([1/50], index=df.nlargest(50).index.get_level_values('symbol'))
In [35]:

net_value(rebalance_dates, top_50_portf, excess_returns).plot(figsize=(18, 6))
Out[35]:

In [ ]:

?
In [36]:

top_10_portf = OrderedDict()
for date, df in factor.groupby(level='date'):
top_10_portf[date] = pd.Series([1/10], index=df.nlargest(10).index.get_level_values('symbol'))
In [38]:

net_value(rebalance_dates, top_10_portf, excess_returns).plot(figsize=(18, 6))
Out[38]:

In [ ]:

?

全部回复

0/140

量化课程

    移动端课程