Notebook
In [2]:
from quantopian.pipeline import Pipeline, CustomFactor
from quantopian.research import run_pipeline
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline.filters import  Q3000US, StaticAssets
from quantopian.pipeline.factors import SimpleMovingAverage
from alphalens.utils import get_clean_factor_and_forward_returns
from alphalens.tears import create_full_tear_sheet
import talib
In [3]:
base_universe = Q3000US()
In [4]:
class Factor_N_Days_Ago(CustomFactor):
    
    def compute(self, today, assets, out, input_factor): 
        out[:] = input_factor[0]
In [5]:
class sma(CustomFactor):  
    
    window_safe = True
    sma50 = SimpleMovingAverage(inputs = [USEquityPricing.close], window_length=50 )
       
    sma200 = SimpleMovingAverage(inputs = [USEquityPricing.close], window_length=200 )      
      
    
    inputs = [sma50,sma200]
    window_length = 1
    outputs = ["sma50","sma200"]
    
    def compute(self, today, assets, out, sma50, sma200):  
        
        out.sma50[:] = sma50
        out.sma200[:] = sma200

        
        
In [9]:
def make_pipeline():       
       
      
    sma50 = SimpleMovingAverage(inputs = [USEquityPricing.close], window_length=50 )
    
    sma200 = SimpleMovingAverage(inputs = [USEquityPricing.close], window_length=200 )    
        
    sma50_1, sma200_1 = sma()    
      
    sma200_lagged = Factor_N_Days_Ago([sma200_1], window_length=2)
    
    sma50_lagged = Factor_N_Days_Ago([sma50_1], window_length=2)    
        
    crossover_condition= (sma200 < sma50) & (sma200_lagged > sma50_lagged )
    

    return Pipeline(
      columns={
        
        'crossover_condition': crossover_condition, 
          
              
      
      },
      screen= base_universe 
    )

result = run_pipeline(make_pipeline(), '2019-5-1', '2020-4-22')

Pipeline Execution Time: 9.61 Seconds
In [11]:
asset_list = result.index.levels[1].unique()
pricing = get_pricing(asset_list,start_date="2019-4-5", end_date="2020-5-5", fields= "close_price" )
merged_data = get_clean_factor_and_forward_returns(result, pricing, quantiles= None, bins= [-1, .5, 2])

create_full_tear_sheet(merged_data)
Dropped 0.7% entries from factor data: 0.7% in forward returns computation and 0.0% in binning phase (set max_loss=0 to see potentially suppressed Exceptions).
max_loss is 35.0%, not exceeded: OK!
Quantiles Statistics
min max mean std count count %
factor_quantile
1 False False False 0.0 571853 99.699604
2 True True True 0.0 1723 0.300396
Returns Analysis
1D 5D 10D
Ann. alpha NaN NaN NaN
beta NaN NaN NaN
Mean Period Wise Return Top Quantile (bps) -15.999 -5.619 -3.522
Mean Period Wise Return Bottom Quantile (bps) 0.024 0.014 0.013
Mean Period Wise Spread (bps) -16.025 -6.555 -4.380
<matplotlib.figure.Figure at 0x7f051d0db5f8>
Information Analysis
1D 5D 10D
IC Mean -0.002 -0.002 -0.002
IC Std. 0.023 0.023 0.022
Risk-Adjusted IC -0.100 -0.075 -0.114
t-stat(IC) NaN NaN NaN
p-value(IC) NaN NaN NaN
IC Skew NaN NaN NaN
IC Kurtosis NaN NaN NaN
/venvs/py35/lib/python3.5/site-packages/statsmodels/nonparametric/kdetools.py:20: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future
  y = X[:m/2+1] + np.r_[0,X[m/2+1:],0]*1j
/venvs/py35/lib/python3.5/site-packages/alphalens/utils.py:912: UserWarning: Skipping return periods that aren't exact multiples of days.
  + " of days."
Turnover Analysis
1D 5D 10D
Quantile 1 Mean Turnover 0.007 0.010 0.014
Quantile 2 Mean Turnover 1.000 0.993 1.000
1D 5D 10D
Mean Factor Rank Autocorrelation -0.003 -0.003 -0.003
In [ ]: