I am the python new player, I try to combine two strategy from Quantopian community ( Global market rotation strategy and Modern Portfolio Theory strategy) , the idea from http://gestaltu.blogspot.ca/2012/05/adaptive-asset-allocation-true.html?m=1
BUT in these initial draft version, I got following run time error and cannot find out the issue,
There was a runtime error. TypeError: 'NoneType' object is not
iterable ... USER ALGORITHM:54, in normalise ret, vol =
get_metrics(data, stock, period)
please help what was wrong for me, thanks a lot!
import pandas as pd
import numpy as np
import numpy.linalg as la
import math
def initialize(context):
#parameters
context.top_n_sector = 5
context.nobs = 252
context.recalibrate = 126 #re-estimate every so often (in days)
context.leverage= 1
#setup the identifiers and data storage
context.tickers = ['xlf', 'xle', 'xlu', 'xlk', 'xlb', 'xlp', 'xly','xli', 'xlv']
context.sids = [ sid(19656), sid(19655),
sid(19660), sid(19658),
sid(19654), sid(19659),
sid(19662), sid(19657),
sid(19661) ]
context.topN_tickers = np.empty(context.top_n_sector,dtype=complex)
context.topN_sids = np.empty(context.top_n_sector,dtype=complex)
context.data = pd.DataFrame({ k : pd.Series() for k in context.topN_tickers } )
context.onevec = np.asmatrix(np.ones((len(context.topN_tickers), 1)))
context.daysToRecalibration = 0
context.month = None
context.period = 72 # 3 months period
context.ranked = {}
@batch_transform(window_length=73)
def get_metrics(dp, security, period):
print("The get_metrics is: ")
'''Calculate performance and volatility for given period.'''
# Get's all the close prices of the security in the last 73 days (3 months)
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
volatility = (pd.rolling_std(prices,20)*math.sqrt(period/20)).mean()
return (end - begin)/begin, volatility/begin
def normalise(data, sids, period):
# Need to return normalised return and volume
stocks_ret = {}
stocks_vol = {}
print("The normalise in sids: is: " + str(sids))
for stock in sids:
if data is not None and stock is not None and period is not None:
print("The stock in sids: is: " + stock.symbol)
print("The stock in sids: is: " + str(data))
ret, vol = get_metrics(data, stock, period)
stocks_ret[stock] = ret
stocks_vol[stock] = vol
# Return max = highest performance, while volatility max is lowest volatility
ret_max, ret_min, vol_max, vol_min = max(stocks_ret.values()), min(stocks_ret.values()), \
min(stocks_vol.values()), max(stocks_vol.values())
return ret_max, ret_min, vol_max, vol_min
def get_best(data, sids, period):
ranked = {}
ret_max, ret_min, vol_max, vol_min = normalise(data, sids, period)
for stock in sids:
print("The stock in stocks: is: " + stock.symbol)
print("The stock in stocks: is: " + str(data))
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
ret = (ret-ret_min)/(ret_max-ret_min)
vol = (vol-vol_min)/(vol_max-vol_min)
rank = ret * 0.7 + vol * 0.3
ranked[stock] = rank
log.debug('%s: return: %.2f, vol: %.2f, rank: %.2f' % \
(stock.symbol, ret, vol, rank))
return ranked
def handle_data(context, data):
if context.portfolio.starting_cash == context.portfolio.cash:
#buy into the benchmark while we build the starting data set
order(sid(8554), math.floor(context.portfolio.starting_cash/data[sid(8554)].close_price) )
if len(context.data.index) < context.nobs:
#still recording data
newRow = pd.DataFrame({k:float(data[e].returns()) for k,e in zip(context.tickers, context.sids) },index=[0])
context.data = context.data.append(newRow, ignore_index = True)
else:
context.ranked = get_best(data, context.sids, context.period) #get top n security
#get the top n security from stocks dictionary to list: context.tickers, context.sids
for i in range(context.top_n_sector):
pos = list(context.tickers).index(context.ranked.keys()[i])
context.topN_sids[i] = context.sids[pos]
context.topN_tickers[i] = context.tickers[pos]
context.data = pd.DataFrame({ k : pd.Series() for k in context.topN_tickers } )
context.onevec = np.asmatrix(np.ones((len(context.topN_tickers), 1)))
newRow = pd.DataFrame({k:float(data[e].returns()) for k,e in zip(context.topN_tickers, context.topN_sids) },index=[0])
context.data = context.data.append(newRow, ignore_index = True)
context.data = context.data[1:len(context.data.index)]
if context.portfolio.positions[sid(8554)].amount != 0:
#data gathering time is done, get out of the benchmark
order(sid(8554), -1.0*context.portfolio.positions[sid(8554)].amount)
#wait a day for the trades to clear before placing the new trades.
return
if context.daysToRecalibration == 0:
context.daysToRecalibration = context.recalibrate
#recalibrate
log.info('recalibrating...')
#calculate the minimum variance portfolio weights;
precision = np.asmatrix(la.inv(context.data.cov()))
pimv = precision*context.onevec / (context.onevec.T*precision*context.onevec)
pimv = { e:pimv[i,0] for i,e in enumerate(context.topN_tickers) }
#open all positions:
startingCash = (context.portfolio.starting_cash+context.portfolio.pnl)*context.leverage
for i, e in enumerate(context.topN_sids):
currentPosition = context.portfolio.positions[e].amount
newPosition = math.floor(startingCash*pimv[context.topN_tickers[i]]/data[e].price)
order(e, newPosition - currentPosition)
else:
context.daysToRecalibration -= 1
record(c = context.portfolio.positions_value)