Back to Community
The other two thirds of the year of live trading

Following up to https://www.quantopian.com/posts/a-year-of-live-trading , here are the other two algos I was running during that period. Note that there was evidently some backtest overfitting, despite my best efforts.

8 responses

Analysis of the vol trading algo.

Loading notebook preview...

Vol trading (shorting) algo.

Clone Algorithm
7
Loading...
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
Returns 1 Month 3 Month 6 Month 12 Month
Alpha 1 Month 3 Month 6 Month 12 Month
Beta 1 Month 3 Month 6 Month 12 Month
Sharpe 1 Month 3 Month 6 Month 12 Month
Sortino 1 Month 3 Month 6 Month 12 Month
Volatility 1 Month 3 Month 6 Month 12 Month
Max Drawdown 1 Month 3 Month 6 Month 12 Month
import numpy as np
import pandas as pd
from scipy import stats
from pytz import timezone
import datetime
import math
import time
import re
from pykalman import KalmanFilter
from pandas.stats.api import ols
from sklearn.decomposition import FastICA, PCA
from sklearn import cluster
from sklearn import covariance
from statsmodels.stats.stattools import jarque_bera
from statsmodels.tsa.stattools import adfuller 
import functools
from statsmodels.tsa.stattools import coint
import random
import itertools
from statsmodels.stats.moment_helpers import cov2corr, corr2cov, se_cov
from cvxopt import matrix
import cvxopt
cvxopt.solvers.options['show_progress'] = False
from zipline.utils import tradingcalendar
vixUrl = 'http://www.cboe.com/publish/scheduledtask/mktdata/datahouse/vixcurrent.csv'
vxstUrl = 'http://www.cboe.com/publish/scheduledtask/mktdata/datahouse/vxstcurrent.csv'
vxvUrl = 'http://www.cboe.com/publish/scheduledtask/mktdata/datahouse/vxvdailyprices.csv'
vxmtUrl = 'http://www.cboe.com/publish/scheduledtask/mktdata/datahouse/vxmtdailyprices.csv'
vvixUrl = 'http://www.cboe.com/publish/scheduledtask/mktdata/datahouse/vvixtimeseries.csv'

PctDailyVolatilityTarget = 0.015
DiversificationMultiplier = 1.0 # keep at 1.0 since all our assets a perfectly correlated!
VolHalfLife = 5 * 2 # two week vol half life
PositionInertiaLevel = 0.1
LeverageCap = 2.0
History = 128
# turn this to False if you are worried about ETN issuer
# risk
OnlyVXX = True

def initialize(context):
    set_symbol_lookup_date("2015-10-18")
    
    context.fetch_failed = False
    context.vxx = sid(38054) # long vol ETN (iPath / Barclays)
    context.xiv = sid(40516) # short vol ETN (VelocityShares / Credit Suisse)
    context.vixy = sid(40669) # long vol ETF (ProShares)
    context.svxy = sid(41968) # short vol ETF (ProShares)
    context.spy = sid(8554)
    if (not OnlyVXX): 
        context.Weights = pd.Series({
            context.vxx: 0.25, 
            context.xiv: 0.25,
            context.vixy: 0.25,
            context.svxy: 0.25
            })
    else:
        context.Weights = pd.Series({
            context.vxx: 1.0
            })
    # man python gets ugly fast
    context.Uni = np.append(context.Weights.index.get_values(), context.spy)
    # calculate our allocation every day mid-morning
    schedule_function(highbar_allocation, date_rule=date_rules.every_day(), time_rule=time_rules.market_open(minutes=15))
    schedule_function(highbar_cancel_all, date_rule=date_rules.every_day(), time_rule=time_rules.market_close(minutes=1))
    set_benchmark(context.spy)
    context.equity = pd.Series() 
    context.max_dd = 0.0
    fetch_csv(vixUrl, 
              symbol='VIX', 
              skiprows=1,
              date_column='Date', 
              pre_func=addFieldsVIX,
              post_func=shift_data)
 #   fetch_csv(vxstUrl, 
 #             symbol='VXST', 
 #             skiprows=3,
 #             date_column='Date', 
 #             pre_func=addFieldsVXST,
 #             post_func=shift_data)
    fetch_csv(vxvUrl, 
              symbol='VXV', 
              skiprows=2,
              date_column='Date', 
              pre_func=addFieldsVXV,
              post_func=shift_data)
    fetch_csv(vxmtUrl, 
              symbol='VXMT', 
              skiprows=2,
              date_column='Date', 
              pre_func=addFieldsVXMT,
              post_func=shift_data)
#    fetch_csv(vvixUrl, 
#              symbol='VVIX', 
#              skiprows=1,
#              date_column='Date', 
#              pre_func=addFieldsVVIX,
#              post_func=shift_data)
    set_slippage(slippage.FixedSlippage(spread=0.02))
    set_commission(commission.PerShare(cost=0.0035, min_trade_cost=0.35))

def drawdown(log_rets):
    equity = 1+log_rets.cumsum()
    drawdowns = -(equity - equity.cummax()).fillna(0)
    return drawdowns.iloc[-1]

def handle_data(context, data):
    pass
#    context.equity[get_datetime()] = context.portfolio.portfolio_value
#    actual_returns = np.log(context.equity).diff().fillna(0)
#    dd = drawdown(actual_returns)
#    if (dd > context.max_dd):
#        log.info("Max DD: %02.02f%%" % (dd*100.0))
#        context.max_dd = dd
#    elif ((dd < 0.001) & (len(context.equity) > 10000)): 
#        # to avoid running out of memory (!)
#        context.equity = pd.Series()

def forecast_collar(x):
    return min(max(x,-20.0),20.0)
        
# this is a function from -20 -> 20, with an absolute average ~10, which has a flat section
# in the middle
def slope_double_logit_forecast(slopes):
    # no need to collar by definition
    return 20.0/(1.0+np.exp(-20.0*(slopes-1.15)))+20.0/(1.0+np.exp(-20.0*(slopes-0.85)))-20.0

def slope_logit_forecast(slopes):
    return 40.0 / (1.0+np.exp(-10.0*(slopes-1))) - 20.0

def vrp_norm(x):
    # 3.5 is neutral, 7 is -10, 0 is +10, linear scale
    # that is, if we have positive VRP, then we have over priced VIX, so we want to sell VXX,
    # so forecast is -10
    # if we have negative VRP, we have underpriced VIX, so we want to buy VXX, so forecast is +10
    return forecast_collar((x-3.5)*(-10.0/3.5))

def vrp_forecast(context, prices, span):
    hvol = (pd.ewmstd(np.log(prices[context.spy]).diff(), span=span, bias=False) * 16.0 * 100.0).dropna()
    vrp = (context.vix_vals - hvol)
    svrp = pd.ewma(vrp, span=5)
    return vrp_norm(vrp.iloc[-1]), vrp_norm(svrp.iloc[-1])
    
def highbar_forecast(context, data, prices):
    short_slopes = slope_logit_forecast(context.vix_vals / context.vxv_vals)
    short_slope_forecast = short_slopes.iloc[-1]
    sshort_slope_forecast = pd.ewma(short_slopes,span=5).iloc[-1]
    record(short_slope_forecast=short_slope_forecast)
    
    mid_slopes = slope_logit_forecast( ((context.vxv_vals / context.vxmt_vals) - 1.0)*1.6 + 1.0 )
    # scale our mid-slopes to roughly the same range about 1 as the short slopes
    mid_slope_forecast = mid_slopes.iloc[-1]
    smid_slope_forecast = pd.ewma(mid_slopes,span=5).iloc[-1]
    record(mid_slope_forecast=mid_slope_forecast)
    
    vrp_5, svrp_5 = vrp_forecast(context, prices, 5)
    vrp_10, svrp_10 = vrp_forecast(context, prices, 10)
    vrp_20, svrp_20 = vrp_forecast(context, prices, 20)
    
    # kitchen sink it
    short_forecast = (0.075 * vrp_5 + 
                      0.075 * svrp_5 + 
                      0.05 * vrp_10 + 
                      0.05 * svrp_10 + 
                      0.075 * vrp_20 + 
                      0.075 * svrp_20 + 
                      0.15 * short_slope_forecast +
                      0.15 * sshort_slope_forecast +  
                      0.15 * mid_slope_forecast +
                      0.15 * smid_slope_forecast) * 1.4
    forecast = forecast_collar(short_forecast)
    
    record(forecast=forecast)
    
    return pd.Series({ context.vxx: forecast,
                       context.xiv: -forecast,
                       context.vixy: forecast,
                       context.svxy: -forecast })

def is_deleveraging(context, sid, delta):
    return np.sign(delta) != np.sign(context.portfolio.positions[sid].amount)

def expected_leverage(context, positions, closes):
    gross = (positions.abs() * closes).sum()
    equity = context.account.net_liquidation
    return gross / equity

def relative_position_change(context, x, desired_position):
    off_by = 0.0
    current_position = 0
    if (x in context.portfolio.positions):
        current_position = context.portfolio.positions[x].amount
    delta = int(desired_position - current_position)
    if (current_position != 0):
        off_by = abs(float(delta) / float(current_position))
    else:
        if (delta != 0.0):
            off_by = 1.0
    do_trade = False
    is_closing = (desired_position == 0) & (delta != 0)
    if ((off_by > PositionInertiaLevel) | is_closing):
        do_trade = True
    return (delta, do_trade)   

def highbar_allocation(context, data):
    update_indices(context, data)
    record(fetch_failed = context.fetch_failed * 20)
    if context.fetch_failed:
        pass
    daily_cash_volatility_target = PctDailyVolatilityTarget*context.portfolio.portfolio_value
    # we do not want the current day in here, we want yesterday with today's date to match
    # the VIX data and what we'll be getting in paper trading with stupid fetcher
    closes = data.history(context.Uni, 'price', History, '1d')
    closes.index = context.vix_vals.index
    closes = closes.shift(1)
    positions = highbar_calc_target_position(closes[context.Weights.index], 
                                             highbar_forecast(context, data, closes), 
                                             daily_cash_volatility_target,
                                             context.Weights).dropna()
    prices = closes.iloc[-1]
    exp_lev = expected_leverage(context, positions, prices)
    if (exp_lev != 0.0):
        adj_factor = min(1.0, LeverageCap / exp_lev)
    else:
        adj_factor = 0.0
    positions = positions * adj_factor
    for x in positions.index:
        if (not data.can_trade(x)):
            continue
        desired_position = positions[x]
        delta, do_trade = relative_position_change(context, x, desired_position)
        if do_trade:
            price = closes[x].iloc[-1] 
            delev = is_deleveraging(context, x, delta)
            buysell = "BUY" if (delta>0) else "SELL"
            if (delev | (context.account.leverage < LeverageCap)):
                log.info("%s %d %s @ MARKET (currently at %03.02f)" % (buysell, delta, x.symbol, price))
                order(x, delta, style=MarketOrder())
        else:
            log.info("Ignore %d %s, too little" % (delta, x.symbol))
    for x in context.portfolio.positions.keys():
        if ((x not in positions.index) & (data.can_trade(x))): # FUCK  
            log.info("SELL ALL %s @ MARKET" % (x.symbol))
            order_target_percent(x, 0.0)

def highbar_cancel_all(context, data):
    record(leverage=context.account.leverage)
    sids_cancelled = set()
    logged_cancel = False
    open_orders = get_open_orders()
    for security, orders in open_orders.iteritems():  
        for oo in orders:
            if (not logged_cancel):
                log.warn("Cancelling orders at close")
                logged_cancel = True
            sids_cancelled.add(oo.sid)
            cancel_order(oo)
    return sids_cancelled 
                         
def highbar_floor_corr(corr):
    corr[corr<0] = 0
    return corr

def highbar_std(returns):
    downside_only = False
    if (downside_only):
        returns = returns.copy()
        returns[returns > 0.0] = np.nan
    b = pd.ewmstd(returns, halflife=VolHalfLife, adjust=True, ignore_na=True).dropna()
    return b.iloc[-1]

def highbar_calc_vol_scalar(prices, daily_cash_vol_target):
    shares_per_block = 1.0
    # ignore FX
    rets = np.log(prices).diff().dropna()
    block_value = (shares_per_block * prices.iloc[-1])
    price_vol = highbar_std(rets)
    # instrument_currency_volatility not necessary since we don't have FX
    instrument_value_volatility = block_value * price_vol
    volatility_scalar = daily_cash_vol_target / instrument_value_volatility
    return volatility_scalar

def highbar_calc_instrument_diversification_multiplier(prices, instrument_weights):
    rets = np.log(prices).diff().dropna()
    corr = highbar_floor_corr(rets.corr())
    return 1.0 / np.sqrt(np.dot(instrument_weights, np.dot(corr,instrument_weights.T)))

def highbar_calc_target_position(prices, forecast, daily_cash_vol_target, instrument_weights):
    volatility_scalar = highbar_calc_vol_scalar(prices, daily_cash_vol_target)
    subsystem_position_blocks = (forecast * volatility_scalar) / 10.0
#    diversification_multiplier = highbar_calc_instrument_diversification_multiplier(prices, instrument_weights)
    diversification_multiplier = DiversificationMultiplier
    portfolio_position_blocks = subsystem_position_blocks * instrument_weights * diversification_multiplier
    return portfolio_position_blocks.round()


##########

def update_indices(context, data):
    context.fetch_failed = False
    context.vix_vals = unpack_from_data(context, data, 'VIX')    
    context.vxv_vals = unpack_from_data(context, data, 'VXV')  
    context.vxmt_vals = unpack_from_data(context, data, 'VXMT')
    # context.vvix_vals = unpack_from_data(context, data, 'VVIX')
    # context.vxst_vals = unpack_from_data(context, data, 'VXST')

def fix_close(df,closeField):
    df = df.rename(columns={closeField:'Close'})
    # remove spurious asterisks
    df['Date'] = df['Date'].apply(lambda dt: re.sub('\*','',dt))
    # convert date column to timestamps
    df['Date'] = df['Date'].apply(lambda dt: pd.Timestamp(datetime.datetime.strptime(dt,'%m/%d/%Y')))
    df = df.sort(columns='Date', ascending=True)
    return df

def subsequent_trading_date(date):
    tdays = tradingcalendar.trading_days
    last_date = pd.to_datetime(date)
    last_dt = tradingcalendar.canonicalize_datetime(last_date)
    next_dt = tdays[tdays.searchsorted(last_dt) + 1]
    return next_dt

def add_last_bar(df):
    last_date = df.index[-1]
    subsequent_date = subsequent_trading_date(last_date)
    blank_row = pd.Series({}, index=df.columns, name=subsequent_date)
    # add today, and shift all previous data up to today. This 
    # should result in the same data frames as in backtest
    df = df.append(blank_row).shift(1).dropna(how='all')
    return df

def shift_data(df):
    log.info("Pre-Shift")
    df = add_last_bar(df)
    df.fillna(method='ffill') 
    df['PrevCloses'] = my_rolling_apply_series(df['Close'], to_csv_str, History)
    dates = pd.Series(df.index)
    dates.index = df.index
    df['PrevDates'] = my_rolling_apply_series(dates, to_csv_str, History)
    return df

def unpack_from_data(context, data, sym):
    try:
        v = data.current(sym, 'PrevCloses')
        i = data.current(sym, 'PrevDates')
        return from_csv_strs(i,v,True).apply(float)
    except:
        log.warn("Unable to unpack historical {s} data.".format(s=sym))
        context.fetch_failed = True

def addFieldsVIX(df):
    log.info("VIX: Pre-Massage")
    df = fix_close(df,'VIX Close')
    log.info("VIX: Post-Massage")
    return df

def addFieldsVXST(df):
    log.info("VXST: Pre-Massage")
    df = fix_close(df,'Close')
    log.info("VXST: Post-Massage")
    return df

def addFieldsVXMT(df):
    log.info("VXMT: Pre-Massage")
    df = fix_close(df,'Close')
    log.info("VXMT: Post-Massage")
    return df

def addFieldsVXV(df):
    log.info("VXV: Pre-Massage")
    df.rename(columns={'Unnamed: 0': 'Date'}, inplace=True)
    df = fix_close(df,'CLOSE')
    log.info("VXV: Post-Massage")
    return df

def addFieldsVVIX(df):
    log.info("VVIX: Pre-Massage")
    df = fix_close(df,'VVIX')
    log.info("VVIX: Post-Massage")
    return df

# convert a series of values to a comma-separated string of said values
def to_csv_str(s):
    return functools.reduce(lambda x,y: x+','+y, pd.Series(s).apply(str))

# a specific instance of rolling apply, for Series of any type (not just numeric,
# ala pandas.rolling_apply), where the index of the series is set to the indices
# of the last elements of each subset
def my_rolling_apply_series(s_in, f, n):
    s_out = pd.Series([f(s_in[i:i+n]) for i in range(0,len(s_in)-(n-1))]) 
    s_out.index = s_in.index[n-1:]
    return s_out

# reconstitutes a Series from two csv-encoded strings, one of the index, one of the values
def from_csv_strs(x, y, idx_is_date):
    s = pd.Series(y.split(','),index=x.split(','))
    if (idx_is_date):
        s.index = s.index.map(lambda x: pd.Timestamp(x))
    return s
There was a runtime error.

Obligatory post text.

Clone Algorithm
6
Loading...
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
Returns 1 Month 3 Month 6 Month 12 Month
Alpha 1 Month 3 Month 6 Month 12 Month
Beta 1 Month 3 Month 6 Month 12 Month
Sharpe 1 Month 3 Month 6 Month 12 Month
Sortino 1 Month 3 Month 6 Month 12 Month
Volatility 1 Month 3 Month 6 Month 12 Month
Max Drawdown 1 Month 3 Month 6 Month 12 Month
import numpy as np
import pandas as pd
from scipy import stats
from pytz import timezone
import datetime
import math
import time
import functools
import random
import itertools
from statsmodels.stats.moment_helpers import cov2corr, corr2cov, se_cov
from quantopian.algorithm import attach_pipeline, pipeline_output
from quantopian.pipeline import Pipeline
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline.data import morningstar
from quantopian.pipeline.factors import SimpleMovingAverage, Latest
from quantopian.pipeline import CustomFactor
from cvxopt import matrix
import cvxopt
cvxopt.solvers.options['show_progress'] = False

PctDailyVolatilityTarget = 0.01
#DiversificationMultiplier = 1.5
VolHalfLife = 5 * 2 # two weeks
PositionInertiaLevel = 0.1
LeverageCap = 2.5

def initialize(context):
    # set_symbol_lookup_date("2015-10-18")
    set_commission(commission.PerShare(cost=0.0035, min_trade_cost=0.35))
    set_slippage(slippage.FixedSlippage(spread=0.01))
    context.benchmark = sid(8554)
    set_benchmark(context.benchmark)

    #schedule_function(roguewave_allocation, date_rule=date_rules.week_start(), time_rule=time_rules.market_open(minutes=15))
    schedule_function(roguewave_allocation, date_rule=date_rules.week_start(days_offset=0), time_rule=time_rules.market_open(hours=0, minutes=15))
    schedule_function(roguewave_cancel_all, date_rule=date_rules.every_day(), time_rule=time_rules.market_close(hours=0, minutes=1))
    
    context.weights = pd.Series({
        # BONDS
        sid(22887): 0.20 , # EDV extended-duration USA treasury 
#        sid(23921): 0.2 , # TLT long-duration USA treasury 
#        # US TOTAL EQUITY            
        sid(22739): 0.125 , # VTI US equity 
#        # GLOBAL EQUITY
        sid(27101): 0.125 , # VPL pacific rim equity
        sid(27102): 0.125 , # VWO emerging markets equity
        sid(27100): 0.125 , # VGK europe equity
        # ALTS
        sid(26981): 0.1 , # IAU gold trust
        sid(26669): 0.1 , # VNQ USA reit
        sid(35997): 0.1 , # UCI commodities
#        sid(28054): 0.1 , # DBC commodities
        })
    #context.randstate = np.random.RandomState()
    
def cap(forecast):
    forecast[forecast > 20.0] = 20.0
    forecast[forecast < -20.0] = -20.0
    return forecast

def before_trading_start(context, data):
    print "before_trading_start"
        
def handle_data(context, data):
    # print "handle_data"
    pass

def binary_prev_return_forecast(context, data, closes, n):
    trailing_ret = np.log(pd.ewma(closes,span=5)).diff(5*4*n)
    flipflop = (trailing_ret.iloc[-1] > 0.0) * 20.0
    return flipflop
       
def forecast_pack(context, data, closes, f):
    xrange = range(2,7)
    weights = pd.Series(1.0 / len(xrange), index=xrange)
    fes = pd.DataFrame({i: f(context, data, closes, i) for i in xrange})
    raw_forecast = 0.85 * (fes.fillna(0) * weights).sum(axis=1) # down-weight to target avg forecast
    return cap(raw_forecast)

def floor_corr(corr):
    corr[corr<0] = 0
    return corr

def roguewave_forecast(context, data, closes):
    prev_ret_forecast = forecast_pack(context, data, closes, binary_prev_return_forecast)
    forecast = prev_ret_forecast
    forecast.sort(ascending=False)
    forecast = forecast
    weights = context.weights
    record(forecast_max=forecast.max())
    record(forecast_mean=forecast.abs().mean())
    return forecast, weights

def is_deleveraging(context, sid, delta):
    return np.sign(delta) != np.sign(context.portfolio.positions[sid].amount)

def expected_leverage(context, positions, closes):
    gross = (positions.abs() * closes).sum()
    equity = context.account.net_liquidation
    return gross / equity

def relative_position_change(context, x, desired_position):
    off_by = 0.0
    current_position = 0
    if (x in context.portfolio.positions):
        current_position = context.portfolio.positions[x].amount
    delta = int(desired_position - current_position)
    if (current_position != 0):
        off_by = abs(float(delta) / float(current_position))
    else:
        if (delta != 0.0):
            off_by = 1.0
    do_trade = False
    is_closing = (desired_position == 0) & (delta != 0)
    if ((off_by > PositionInertiaLevel) | is_closing):
        do_trade = True
    return (delta, do_trade)   

def roguewave_allocation(context, data):
    daily_cash_volatility_target = PctDailyVolatilityTarget*context.portfolio.portfolio_value
    closes = data.history(context.weights.index, 'price', 256, '1d')
    forecast, weights = roguewave_forecast(context, data, closes[context.weights.index])
    if (len(forecast)):
        positions = roguewave_calc_target_position(closes[forecast.index], 
                                                   forecast,     
                                                   daily_cash_volatility_target,
                                                   weights).dropna()
    else:
        positions = pd.Series()
        
    prices = closes.iloc[-1]
    exp_lev = expected_leverage(context, positions, prices)
    if (exp_lev != 0.0):
        adj_factor = min(1.0, LeverageCap / exp_lev)
    else:
        adj_factor = 0.0
    positions = positions * adj_factor
    for x in positions.index:
        if (not data.can_trade(x)):
            continue
        desired_position = positions[x]
        delta, do_trade = relative_position_change(context, x, desired_position)
        if (do_trade):
            price = closes[x].iloc[-1] 
            delev = is_deleveraging(context, x, delta)
            buysell = "BUY" if (delta>0) else "SELL"
            if (delev | (context.account.leverage < LeverageCap)):
                log.info("%s %d %s @ MARKET (currently at %03.02f)" % (buysell, delta, x.symbol, price))
                order(x, delta, style=MarketOrder())
        else:
            log.info("Ignore %d %s, too little" % (delta, x.symbol))
    for x in context.portfolio.positions.keys():
        if ((x not in positions.index) & data.can_trade(x)): 
            log.info("SELL ALL %s @ MARKET" % (x.symbol))
            order_target_percent(x, 0.0)
            
def roguewave_cancel_all(context, data):
    record(leverage=context.account.leverage)
    sids_cancelled = set()
    logged_cancel = False
    open_orders = get_open_orders()
    for security, orders in open_orders.iteritems():  
        for oo in orders:
            if (not logged_cancel):
                log.warn("Cancelling orders at close")
                logged_cancel = True
            sids_cancelled.add(oo.sid)
            cancel_order(oo)
    return sids_cancelled 

def roguewave_std(returns):
    downside_only = False
    if (downside_only):
        returns = returns.copy()
        returns[returns > 0.0] = np.nan
    b = pd.ewmstd(returns, halflife=VolHalfLife, adjust=True, ignore_na=True).dropna()
    return b.iloc[-1]

def roguewave_calc_vol_scalar(prices, daily_cash_vol_target):
    shares_per_block = 1.0
    # ignore FX
    rets = np.log(prices).diff().dropna()
    block_value = (shares_per_block * prices.iloc[-1])
    price_vol = roguewave_std(rets)
    # instrument_currency_volatility not necessary since we don't have FX
    instrument_value_volatility = block_value * price_vol
    volatility_scalar = daily_cash_vol_target / instrument_value_volatility
    return volatility_scalar

def roguewave_calc_instrument_diversification_multiplier(prices, instrument_weights):
    rets = np.log(prices).diff().dropna()
    corr = floor_corr(rets.corr())
    return 1.0 / np.sqrt(np.dot(instrument_weights, np.dot(corr,instrument_weights.T)))

def roguewave_calc_target_position(prices, forecast, daily_cash_vol_target, instrument_weights):
    volatility_scalar = roguewave_calc_vol_scalar(prices, daily_cash_vol_target)
    subsystem_position_blocks = (forecast * volatility_scalar) / 10.0
    diversification_multiplier = roguewave_calc_instrument_diversification_multiplier(prices, instrument_weights)
    record(diversification_multiplier=diversification_multiplier)
#    diversification_multiplier = DiversificationMultiplier
    portfolio_position_blocks = subsystem_position_blocks * instrument_weights * diversification_multiplier
    return portfolio_position_blocks.round()
There was a runtime error.

Notebook attempt 2, of the global ETF long-only "momentum" asset allocation.

Hang on give me a minute, it keeps trying to attach the previous notebook. Bloody hell.

There I think I got it. Some sort of caching layer of cell previews preventing the correct attachment. Probably some nosql something or other haha damn.

Loading notebook preview...

Why you don't use the pipeline to get the VIX? Isn't it easier? :-/

I never found pipeline vix reliable nor timely.

What you said is interessting. I used also the fetch_csv() formerly. But QuantOpian told me using the pipeline is much reliable. :-/