请 [注册] 或 [登录]  | 返回主站

量化交易吧 /  量化平台 帖子:3364477 新帖:3

精英任务 | 第二期券商研报复现挑战赛

交易资深人士发表于:7 月 22 日 15:53回复(1)

附录:因子检查策略代码¶

用于因子策略回测效果检查

'''
根据聚宽高频因子挖掘大赛比赛专用模板修改
初始资金:2000000
建议回测时间范围
每日调仓 一年 回测日期:20180720-20190720
每周调仓 三年 回测日期:20160720-20190720
每月调仓 五年 回测日期:20140720-20190720

股票池:中证500
每日持仓:数量固定为股票池的20只,持仓均为等权重持有
换仓时间:默认14:50
交易成本:不考虑滑点,印花税1‰,佣金2.5‱、最低5元
'''

# 导入函数库
from jqdata import *
import numpy as np
import pandas as pd
import jqfactor

################################################# 以下内容根据研究因子内容定义 ########################################################

# 定义因子
def calc_factor(context):
    '''
    用户自定义因子,要求返回一个 Series,index为股票code,value为因子值
    我们会买入「因子值最小」的20只,如果您想使用买入「因子值最大」的20只股票,只需将您的结果「乘以-1.0」即可,详见函数下方 return 部分
    '''
    # 获取股票池,g.stock_pool为因子挖掘的对象股票池,用户不可对此股票池进行二次筛选
    stocks = g.stock_pool
    # 获取当前时间
    now  =  context.current_dt
    # 获取数据
    df = get_price(stocks,end_date=context.previous_date,count=21,fields=['close'])['close'] 
    far = df.iloc[-1,:]/df.iloc[0,:] - 1
    
    ###### 中性化等数据处理模块,用户根据需要决定是否使用 ######
    # 中位数去极值
    # far = jqfactor.winsorize_med(far, scale=3, inclusive=True, inf2nan=True)
    # # 行业市值对数中性化
    far = jqfactor.neutralize(far, how=['market_cap'], date=g.d)
    # # zscore标准化
    # far = jqfactor.standardlize(far, inf2nan=True)
    # 去除 nan 值
    # far = far.dropna()
    
    return far
    #如想选择因子值最大的20只股票,请注释上方`return far`。使用下方的return:
    # return far * -1.0
    

# 开盘前运行函数
def before_market_open(context):
    '''
    盘后运行函数,可选实现
    '''
    pass

## 收盘后运行函数
def after_market_close(context):
    '''
    盘后运行函数,可选实现
    '''
    pass



################################################# 以下内容除设置运行周期,其他地方不用修改 ########################################################

# 初始化函数,设定基准等等
def initialize(context):
    # 设定500等权作为基准
    g.benchmark = '000982.XSHG'
    set_benchmark(g.benchmark)
    # 开启动态复权模式(真实价格)
    set_option('use_real_price', True)
    ### 股票相关设定 ###
    # 股票类每笔交易时的手续费
    set_order_cost(OrderCost(close_tax=0.001, open_commission=0.00025, close_commission=0.00025, min_commission=5),type='stock')
    # 滑点
    set_slippage(FixedSlippage(0.0))
    # 初始化因子设置
    factor_analysis_initialize(context)
    # 定义股票池
    set_stockpool(context)
    # 运行函数(reference_security为运行时间的参考标的;传入的标的只做种类区分,因此传入'000300.XSHG'或'510300.XSHG'是一样的)
    run_daily(set_stockpool, time='before_open', reference_security='000300.XSHG')
    run_daily(before_market_open, time='before_open', reference_security='000300.XSHG')
    
    #设置策略交易时间间隔
    #run_daily(trade, time='14:50', reference_security='000300.XSHG')
    run_weekly(trade,1, time='14:50', reference_security='000300.XSHG')
    #run_monthly(trade,1, time='14:50', reference_security='000300.XSHG')
    
    run_daily(after_market_close, time='after_close', reference_security='000300.XSHG')

# 定义股票池
def set_stockpool(context):
    # 获取股票池
    stocks = get_index_stocks(g.benchmark,context.previous_date)
    paused_series = get_price(stocks,end_date=context.current_dt,count=1,fields='paused')['paused'].iloc[0]
    # g.stock_pool 为因子挖掘的对象股票池,用户不可对此股票池进行二次筛选
    g.stock_pool =  paused_series[paused_series==False].index.tolist()

# 定义需要用到的全局变量
def factor_analysis_initialize(context):
    # g.weight_method 为加权方式, "avg"按平均加权
    g.weight_method = "avg"
    weight_method_model = {"avg": "平均加权"}
    # 持仓股票数量
    g.buy_num = 20
    # g.sell为卖出股票权重列表
    g.sell = pd.Series(dtype=float)
    # g.buy为买入股票权重列表
    g.buy = pd.Series(dtype=float)
    #g.ind为行业分类
    g.ind = 'jq_l1'
    # g.d 为获取昨天的时间点
    g.d = context.previous_date

# 对因子进行分析计算出每日买入或卖出的股票
def fac(context):
    # 获取因子值
    far = calc_factor(context)
    # 买入股票池
    try:
        buy = far.sort_values(ascending=True).index.tolist()[:g.buy_num]
    except:
        buy = far.order(ascending=True).index.tolist()[:g.buy_num]
    # 买卖股票权重
    if g.weight_method == "avg":
        buy_weight = pd.Series(1. / len(buy), index=buy)
    else:
        raise ValueError('invalid weight_method %s', weight_method)

    return buy_weight

#股票交易
def trade(context):
    # 计算买入卖出的股票和权重
    try:
        factor_analysis_initialize(context)
        g.buy = fac(context)
    except ValueError:
        if "Bin edges must be unique" in str(e):
            log.error("计算因子值过程出错!")
        else:
            raise
    
    for s in context.portfolio.positions.keys():
        if s not in g.buy.index:
            order_target_value(s, 0)
    

    long_cash = context.portfolio.total_value
    for s in g.buy.index:
        order_target_value(s, g.buy.loc[s] * 0.98 * long_cash)

# 买入股票
def buy(context):
    # 计算买入卖出的股票和权重
    try:
        factor_analysis_initialize(context)
        g.buy = fac(context)
    except ValueError:
        if "Bin edges must be unique" in str(e):
            log.error("计算因子值过程出错!")
        else:
            raise
    long_cash = context.portfolio.total_value
    for s in g.buy.index:
        order_target_value(s, g.buy.loc[s] * 0.98 * long_cash)

# 卖出股票
def sell(context):
    for s in context.portfolio.positions.keys():
        order_target_value(s, 0)
        
        

请先从下面内容开始¶

因子分析基础模板¶

#导入需要的数据库
from jqfactor import *
from jqdata import *
import pandas as pd
import warnings  
warnings.filterwarnings('ignore') 

#获取日期列表
def get_tradeday_list(start,end,frequency=None,count=None):
    if count != None:
        df = get_price('000001.XSHG',end_date=end,count=count)
    else:
        df = get_price('000001.XSHG',start_date=start,end_date=end)
    if frequency == None or frequency =='day':
        return df.index
    else:
        df['year-month'] = [str(i)[0:7] for i in df.index]
        if frequency == 'month':
            return df.drop_duplicates('year-month').index
        elif frequency == 'quarter':
            df['month'] = [str(i)[5:7] for i in df.index]
            df = df[(df['month']=='01') | (df['month']=='04') | (df['month']=='07') | (df['month']=='10') ]
            return df.drop_duplicates('year-month').index
        elif frequency =='halfyear':
            df['month'] = [str(i)[5:7] for i in df.index]
            df = df[(df['month']=='01') | (df['month']=='06')]
            return df.drop_duplicates('year-month').index 

===初始化====¶

# 设置起止时间
start='2016-07-01'
end='2019-07-01'
# 设置调仓周期
periods=(5,10,20)
# 设置分层数量
quantiles=5
#获取日期列表
date_list = get_tradeday_list(start=start,end=end,count=None)#获取回测日期间的所有交易日
date_list
DatetimeIndex(['2016-07-01', '2016-07-04', '2016-07-05', '2016-07-06',
               '2016-07-07', '2016-07-08', '2016-07-11', '2016-07-12',
               '2016-07-13', '2016-07-14',
               ...
               '2019-06-18', '2019-06-19', '2019-06-20', '2019-06-21',
               '2019-06-24', '2019-06-25', '2019-06-26', '2019-06-27',
               '2019-06-28', '2019-07-01'],
              dtype='datetime64[ns]', length=730, freq=None)

===原始计算因子数据===¶

  • 进行因子值函数定义
  • 循环日期获取因子值

股票价格的动量(Momentum) , 顾名思义代表的是股价在一定时间内延续前期走势的现象。 不过与海外长期的研究和经验相悖的是, 在 A 股市场, 我们发现股价的反转(Reverse) 效应要远强于动量效应, 且短期反转因子的历史收益非常出色。

但常用动量因子也存在单调性不佳, 多头收益不稳定的问题, 因此参考研报我们尝试从不同角度出发对动量因子进行改造, 寻找提升常用动量因子选股 效果和稳定性的方法。

在该多因子系列报告中, 曾给出过动量类因子的因子测试结论, 报告中测试的几个常用动量因子,也是我们经常接触到的基础动量因子,明细如下

image.png

下面我们将以统计周期为21天的动量因子为例进行探索演示

#定义要计算的动量因子
def factor_cal(pool,date):
    df = get_price(pool,end_date=date,count=21,fields=['close'])['close'] 
    far = df.iloc[-1,:]/df.iloc[0,:] - 1
    return far
factor_cal(['000001.XSHE','600000.XSHG'],'2019-07-12')
000001.XSHE    0.142395
600000.XSHG   -0.022901
dtype: float64
#定义一个空的dataframe记录因子值
factor_df = pd.DataFrame()
#循环计算给定日期范围的因子值
mark = 1
for d in date_list:
    pool = get_index_stocks('000905.XSHG',date=d)
    far = factor_cal(pool,d)
    if mark == 1:
        factor_df = far
        mark = 0
    else:
        factor_df = pd.concat([far,factor_df],axis=1)
#将columns更改为可以日期标签
factor_df.columns = date_list
factor_df.head(3)
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
2016-07-01 00:00:00 2016-07-04 00:00:00 2016-07-05 00:00:00 2016-07-06 00:00:00 2016-07-07 00:00:00 2016-07-08 00:00:00 2016-07-11 00:00:00 2016-07-12 00:00:00 2016-07-13 00:00:00 2016-07-14 00:00:00 2016-07-15 00:00:00 2016-07-18 00:00:00 2016-07-19 00:00:00 2016-07-20 00:00:00 2016-07-21 00:00:00 2016-07-22 00:00:00 2016-07-25 00:00:00 2016-07-26 00:00:00 2016-07-27 00:00:00 2016-07-28 00:00:00 2016-07-29 00:00:00 2016-08-01 00:00:00 2016-08-02 00:00:00 2016-08-03 00:00:00 2016-08-04 00:00:00 2016-08-05 00:00:00 2016-08-08 00:00:00 2016-08-09 00:00:00 2016-08-10 00:00:00 2016-08-11 00:00:00 2016-08-12 00:00:00 2016-08-15 00:00:00 2016-08-16 00:00:00 2016-08-17 00:00:00 2016-08-18 00:00:00 2016-08-19 00:00:00 2016-08-22 00:00:00 2016-08-23 00:00:00 2016-08-24 00:00:00 2016-08-25 00:00:00 ... 2019-05-06 00:00:00 2019-05-07 00:00:00 2019-05-08 00:00:00 2019-05-09 00:00:00 2019-05-10 00:00:00 2019-05-13 00:00:00 2019-05-14 00:00:00 2019-05-15 00:00:00 2019-05-16 00:00:00 2019-05-17 00:00:00 2019-05-20 00:00:00 2019-05-21 00:00:00 2019-05-22 00:00:00 2019-05-23 00:00:00 2019-05-24 00:00:00 2019-05-27 00:00:00 2019-05-28 00:00:00 2019-05-29 00:00:00 2019-05-30 00:00:00 2019-05-31 00:00:00 2019-06-03 00:00:00 2019-06-04 00:00:00 2019-06-05 00:00:00 2019-06-06 00:00:00 2019-06-10 00:00:00 2019-06-11 00:00:00 2019-06-12 00:00:00 2019-06-13 00:00:00 2019-06-14 00:00:00 2019-06-17 00:00:00 2019-06-18 00:00:00 2019-06-19 00:00:00 2019-06-20 00:00:00 2019-06-21 00:00:00 2019-06-24 00:00:00 2019-06-25 00:00:00 2019-06-26 00:00:00 2019-06-27 00:00:00 2019-06-28 00:00:00 2019-07-01 00:00:00
000006.XSHE 0.050943 0.028302 0.018450 0.007326 -0.005396 0.048872 0.070881 0.033582 0.005525 0.013084 0.009276 -0.030576 -0.019856 0.000000 -0.005464 -0.037906 -0.024164 -0.029358 -0.043557 -0.014870 -0.100170 -0.098639 -0.084459 -0.085427 -0.113238 -0.146067 -0.171429 -0.171561 -0.162037 -0.183206 -0.184569 -0.151145 -0.171898 -0.182229 -0.187870 -0.192420 -0.214599 -0.208999 -0.203757 -0.202963 ... 0.121523 0.138235 0.116312 0.118741 0.153515 0.172805 0.155492 0.139403 0.135838 0.089744 0.059829 0.060734 0.097561 0.098694 0.080344 0.105036 0.052260 -0.018841 -0.032117 0.013353 0.008863 0.000000 0.027697 0.027941 0.034125 0.063253 0.050975 0.074924 0.056489 0.057229 0.090062 0.089231 0.092476 0.081633 0.044978 0.038864 0.044248 0.016200 0.007353 0.008982
000008.XSHE 0.021053 -0.015666 0.015915 0.000000 0.018667 0.054348 0.048649 0.005249 -0.018135 -0.026178 -0.038560 -0.064838 -0.049875 -0.038071 -0.045000 -0.074813 -0.061224 -0.048223 -0.062814 -0.047859 -0.112150 -0.088095 -0.158482 -0.149888 -0.198718 -0.210300 -0.206009 -0.197895 -0.180467 -0.204167 -0.186192 -0.150424 -0.166320 -0.184265 -0.175258 -0.188259 -0.190083 -0.205645 -0.189409 -0.179752 ... -0.065789 -0.065541 -0.082524 -0.088462 -0.070260 -0.102830 -0.088292 -0.083734 -0.084615 -0.103612 -0.099906 -0.035748 -0.073446 -0.068868 -0.054650 -0.071361 -0.103286 -0.089623 -0.089720 -0.064333 -0.080074 -0.035441 0.011788 0.031746 0.046324 0.042281 0.012634 0.007759 -0.074733 -0.108475 -0.100085 -0.111204 -0.071678 -0.046763 -0.130833 -0.122705 -0.069056 -0.066079 -0.060579 -0.072807
000009.XSHE 0.010363 -0.025597 -0.021922 0.000000 0.025862 0.021201 0.005155 0.012259 -0.039116 -0.007168 -0.005348 -0.049069 -0.008606 0.008834 0.005263 -0.038394 -0.034234 -0.032986 -0.058923 -0.040816 -0.063107 0.010345 -0.040453 -0.062903 -0.126506 -0.138508 -0.150365 -0.192362 -0.114458 -0.164671 -0.158921 -0.101824 -0.114329 -0.141123 -0.156805 -0.165939 -0.193314 -0.198887 -0.144092 -0.160000 ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN

===进行因子数据优化===¶

  • 标准化
  • 风格中性化

根据研报的观点,动量(反转) 效应的成因是投资者认识出现偏差,对信息的解读不够及时充分。 也可以认为动量(反转) 效应是来自于投资者认知偏差或者说噪音交易行为。

由此可以推测, 散户集中程度或者说流动性因素是影响动量因子效果的一个重要变量。 因此, 我们会自然的联想到采用截面中性化的方法, 将衡量散户集中程度的流动性因素从原始动量因子中剥离。

因此,我们接下来对原始因子数据进行特定风格中性化处理

#数据清洗、包括去极值、标准化、中性化等,并加入y值
for date in date_list:
    #对数据进行处理、标准化、去极值、中性化
    #factor_df = winsorize_med(factor_df, scale=3, inclusive=True, inf2nan=True, axis=0) #中位数去极值处理
    se = standardlize(factor_df[date], inf2nan=True) #对每列做标准化处理
    se = neutralize(se, how=['liquidity'], date=date)#剔除原始因子值与流动性相关的部分
    factor_df[date] = se
#进行转置,调整为分析可用的格式
factor_df = factor_df.T
factor_df.head()
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
000006.XSHE 000008.XSHE 000009.XSHE 000012.XSHE 000021.XSHE 000025.XSHE 000027.XSHE 000028.XSHE 000030.XSHE 000031.XSHE 000039.XSHE 000049.XSHE 000050.XSHE 000060.XSHE 000061.XSHE 000062.XSHE 000066.XSHE 000078.XSHE 000088.XSHE 000089.XSHE 000090.XSHE 000099.XSHE 000156.XSHE 000158.XSHE 000301.XSHE 000400.XSHE 000401.XSHE 000417.XSHE 000418.XSHE 000426.XSHE 000488.XSHE 000501.XSHE 000513.XSHE 000517.XSHE 000519.XSHE 000528.XSHE 000536.XSHE 000537.XSHE 000541.XSHE 000543.XSHE ... 603169.XSHG 603188.XSHG 603198.XSHG 603225.XSHG 603228.XSHG 603233.XSHG 603328.XSHG 603355.XSHG 603369.XSHG 603377.XSHG 603444.XSHG 603486.XSHG 603501.XSHG 603515.XSHG 603517.XSHG 603528.XSHG 603555.XSHG 603556.XSHG 603567.XSHG 603568.XSHG 603569.XSHG 603589.XSHG 603650.XSHG 603658.XSHG 603659.XSHG 603698.XSHG 603699.XSHG 603712.XSHG 603766.XSHG 603799.XSHG 603806.XSHG 603816.XSHG 603866.XSHG 603868.XSHG 603877.XSHG 603883.XSHG 603885.XSHG 603888.XSHG 603899.XSHG 603939.XSHG
2016-07-01 0.116428 -0.230825 -0.307546 -0.017955 -1.115177 -0.499190 0.232893 -0.346697 NaN 0.389434 0.668373 NaN NaN -0.652418 -0.650867 -0.936872 0.396265 -1.316721 NaN 0.691706 0.011042 NaN -1.094731 -0.483332 0.537730 -0.068185 0.746538 NaN NaN 0.537650 -0.396464 0.041377 0.888937 NaN -0.476049 0.022398 -0.153899 -0.447995 -0.195972 -0.029039 ... NaN NaN 1.036016 NaN NaN NaN 0.210814 0.173173 1.773932 -1.312037 NaN NaN NaN NaN NaN NaN NaN NaN NaN 1.237744 NaN NaN NaN NaN NaN NaN NaN NaN -0.025542 NaN 0.632685 NaN 0.346715 -0.040193 NaN 0.127807 0.359732 NaN NaN 1.316023
2016-07-04 0.231698 -0.280273 -0.315143 -0.054414 -0.708193 -0.221565 0.410183 -0.384252 NaN 0.312390 0.584072 NaN NaN -0.682799 -0.757834 -1.067492 0.567677 -1.425422 NaN 0.416048 0.139009 NaN -0.910407 -0.872526 0.751654 0.067959 0.191607 NaN NaN 0.781479 -0.218293 0.127299 0.943790 NaN 0.083122 -0.075113 -0.934181 -0.378305 -0.062999 0.267783 ... NaN NaN 1.074767 NaN NaN NaN 0.283202 0.627697 1.260382 -1.129842 NaN NaN NaN NaN NaN NaN NaN NaN NaN 1.391296 NaN NaN NaN NaN NaN NaN NaN NaN -0.219725 NaN 0.930746 NaN 0.997969 0.677476 NaN 0.143812 0.843404 NaN NaN 1.788873
2016-07-05 0.035406 0.031373 -0.384120 0.010424 -0.971500 -0.465220 0.334233 -0.545614 NaN 0.111820 0.569030 NaN NaN -0.143472 -0.339208 -1.027693 -0.403451 -1.595983 NaN 0.348806 0.175512 NaN -0.776017 -0.617814 0.937965 -0.076579 -0.094668 NaN NaN 1.046252 -0.242046 -0.034398 0.371218 NaN 0.308416 -0.106725 -1.003009 -0.551741 -0.055788 -0.113532 ... NaN NaN 1.324132 NaN NaN NaN 0.342802 0.317266 1.051646 -1.187129 NaN NaN NaN NaN NaN NaN NaN NaN NaN 1.581782 NaN NaN NaN NaN NaN NaN NaN NaN -0.096771 NaN 0.731451 NaN 0.762915 0.362457 NaN -0.242476 0.714780 NaN NaN 1.708576
2016-07-06 -0.023493 -0.110242 -0.071841 0.118088 -0.776796 -0.069252 0.291099 -0.816786 NaN 0.059859 0.512994 NaN NaN -0.005035 -0.069086 -1.039766 -0.592097 -1.734615 NaN 0.260288 0.197901 NaN -0.648501 -0.622358 0.744043 -0.223378 0.732156 NaN NaN 1.132597 -0.031358 -0.034492 0.321402 NaN 0.837089 -0.021255 -0.937877 -0.567371 -0.151440 0.174824 ... NaN NaN 0.464655 NaN NaN NaN 0.042383 0.071585 0.983175 -1.790249 NaN NaN NaN NaN NaN NaN NaN NaN NaN 1.735074 NaN NaN NaN NaN NaN NaN NaN NaN 0.045121 NaN 0.693114 NaN 0.929698 0.280443 NaN -0.573872 0.507220 NaN NaN 1.721659
2016-07-07 -0.195783 0.130046 0.275594 -0.142245 -0.822292 -0.166341 0.453611 -1.004190 NaN 0.030409 0.593724 NaN NaN 0.160607 -0.035690 -0.951136 -0.548082 -2.091484 NaN 0.271877 0.195934 NaN -0.248998 -0.351175 0.738797 0.026824 0.615206 NaN NaN 1.676545 -0.081650 -0.075612 -0.322050 NaN 0.777374 0.060044 -1.196249 -0.510322 -0.217122 0.269440 ... NaN NaN 0.502936 NaN NaN NaN -0.124809 0.143295 0.822098 -1.334742 NaN NaN NaN NaN NaN NaN NaN NaN NaN 1.621989 NaN NaN NaN NaN NaN NaN NaN NaN 0.032174 NaN 0.401699 NaN 0.681276 0.167283 NaN -1.034320 0.614117 NaN NaN 0.615728

注意!!!¶

需要将处理好的factor_df格式设置为:

  • factor_df是dataframe格式
  • index 为日期
  • columns 是股票名称

将满足如上格式内容的df传入下面的效果模板即可

因子效果检查¶

在调整了因子数据格式之后,接下来的部分,我们将利用聚宽的因子分析模板,对计算好的因子进行收益分析。

我们将通过如下三个方面进行因子效果检查

1.IC信息比率

2.分组收益

3.换手率

在收益分析中, 分位数的平均收益, 第一分位数的因子值最小, 第五分位数的因子值最大。

分位数收益: 表示持仓5、10、20天后,各分位数可以获得的平均收益。

#使用获取的因子值进行单因子分析
far = analyze_factor(factor=factor_df, start_date=date_list[0], end_date=date_list[-1], weight_method='avg', industry='jq_l1', quantiles=quantiles, periods=periods,max_loss=0.3)

IC分析

# 打印信息比率(IC)相关表
far.plot_information_table(group_adjust=False, method='rank')
IC 分析
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
period_5 period_10 period_20
IC Mean 0.015 0.022 0.027
IC Std. 0.095 0.104 0.115
IR 0.153 0.213 0.235
t-stat(IC) 4.140 5.742 6.342
p-value(IC) 0.000 0.000 0.000
IC Skew 1.382 2.255 3.431
IC Kurtosis 6.468 12.274 21.583

IC分析:

以上展示了该因子的IC分析数据,通常来讲,我们主要观察的两个指标是IC值与IR值。

从分析结果我们看到,在持仓5天、10天、20天的分组中,持仓20天收益IC均值最大,IR信息比率最高,但是IC值不到0.03,说明该因子效果也有待提升。

分组收益

# 画各分位数平均收益图
far.plot_quantile_returns_bar(by_group=False, demeaned=0, group_adjust=False)
<Figure size 432x288 with 0 Axes>

分组收益分析:

从五分组平均收益来看,基本上是有较为明显的分组效果,随着因子值1-5组从小到大的过程,分组跌幅也从大到小的变化。

从不同持仓周期看,持仓20天(绿色柱子)收益递增关系看起来更稳定一些

换手率分析

# 打印换手率表
far.plot_turnover_table()
换手率分析
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
period_10 period_20 period_5
Quantile 1 Mean Turnover 0.572 0.790 0.408
Quantile 2 Mean Turnover 0.723 0.799 0.631
Quantile 3 Mean Turnover 0.734 0.773 0.657
Quantile 4 Mean Turnover 0.724 0.801 0.626
Quantile 5 Mean Turnover 0.543 0.781 0.377
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
period_5 period_10 period_20
Mean Factor Rank Autocorrelation 0.695 0.434 -0.034

换手率分析:

因子的换手率是在不同的时间周期下, 观察因子个分位中个股的进出情况。 计算方法举例: 某因子第一分位持有的股票数量为30支, 5天后有一只发生变动, 换手率为:

1/30 *100% = 3.33%

对于10日、20日的换手率,在每日都会对比当日1、5分位数的成分股与10日、20日前该分位数的成分股的变化进行计算。

从该分析结果我们看到,5、10、20不同持仓周期,5日换手率最低,20日换手率最高,1-5分组无明显区别

因子效果检查综述¶

结论:

通过以上IC分析、分组收益检查、换手率分析,我们初步对该示例因子有了一定了解

该因子有一定的收益预测能力,但是不够显著,最好的IC均值为0.027,不到0.03,存在分组效果,换手率在与因子统计周期一致是换手率最高,因子效果有待进一步优化提升

因子分析信息全览¶

具体说明可参考:

https://www.joinquant.com/help/api/help?name=factor#%E5%9B%A0%E5%AD%90%E5%88%86%E6%9E%90%E7%BB%93%E6%9E%9C

在收益分析中, 分位数的平均收益, 各分位数的累积收益, 以及分位数的多空组合收益三方面观察因子的表现。 第一分位数的因子值最小, 第五分位数的因子值最大。

分位数收益: 表示持仓5、10、20天后,各分位数可以获得的平均收益。

分位数的累积收益: 表示各分位数持仓收益的累计值。

多空组合收益: 做多五分位(因子值最大), 做空一分位(因子值最小)的投资组合的收益。

#调用因子分析方法,进行因子信息全览
far.create_full_tear_sheet(demeaned=False, group_adjust=False, by_group=False, turnover_periods=None, avgretplot=(5, 15), std_bar=False)
分位数统计
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
min max mean std count count %
factor_quantile
1 -0.085382 0.002653 -0.006845 0.006105 21483 21.186599
2 -0.028046 0.005780 -0.002510 0.003623 21112 20.820718
3 -0.020089 0.008544 -0.000462 0.003327 19458 19.189538
4 -0.014201 0.013963 0.001925 0.003238 19527 19.257586
5 -0.006711 0.156010 0.008186 0.007241 19819 19.545558
-------------------------

收益分析
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
period_1 period_5 period_20
Ann. alpha -0.508 -0.155 -0.101
beta 0.035 -0.012 -0.046
Mean Period Wise Return Top Quantile (bps) -46.423 -23.300 -16.870
Mean Period Wise Return Bottom Quantile (bps) 13.029 -7.557 -11.705
Mean Period Wise Spread (bps) -59.452 -15.601 -5.019
<Figure size 432x288 with 0 Axes>
<Figure size 432x288 with 0 Axes>
<Figure size 432x288 with 0 Axes>
<Figure size 432x288 with 0 Axes>
<Figure size 432x288 with 0 Axes>
-------------------------

IC 分析
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
period_1 period_5 period_20
IC Mean -0.128 -0.070 -0.037
IC Std. 0.100 0.088 0.074
IR -1.284 -0.795 -0.501
t-stat(IC) -20.022 -12.386 -7.805
p-value(IC) 0.000 0.000 0.000
IC Skew 0.219 0.033 0.051
IC Kurtosis 0.959 1.502 0.076
<Figure size 432x288 with 0 Axes>
<Figure size 432x288 with 0 Axes>
<Figure size 432x288 with 0 Axes>
-------------------------

换手率分析
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
period_1 period_20 period_5
Quantile 1 Mean Turnover 0.756 0.773 0.764
Quantile 2 Mean Turnover 0.780 0.788 0.784
Quantile 3 Mean Turnover 0.791 0.806 0.800
Quantile 4 Mean Turnover 0.794 0.804 0.799
Quantile 5 Mean Turnover 0.755 0.781 0.771
.dataframe tbody tr th:only-of-type { vertical-align: middle; } .dataframe tbody tr th { vertical-align: top; } .dataframe thead th { text-align: right; }
period_1 period_5 period_20
Mean Factor Rank Autocorrelation 0.049 0.035 0.028
<Figure size 432x288 with 0 Axes>
<Figure size 432x288 with 0 Axes>
-------------------------

<Figure size 432x288 with 0 Axes>

**分析数据的获取函数¶

这部分内容不做要求,可以根据因子分析功能 官网介绍,获取因子分析模块的原始数据进行更多的操作

# 计算指定调仓周期的各分位数每日累积收益
df = far.calc_cumulative_return_by_quantile(period=1)
#进行数据展示
df.plot(figsize=(15,6))
<matplotlib.axes._subplots.AxesSubplot at 0x7fa5df464438>

全部回复

0/140

量化课程

    移动端课程