Back to Community
Didier Sornette's Strategy to Exploit Return Correlations

This strategy is outlined in Didier Sornette's book, "Why Stock Markets Crash". It tries to exploit return correlations at very short (minute) intervals. I was thinking it could potentially be improved upon. I am a little skeptical if it would ever be profitable with transaction costs and slippage. However, it is fun to see how return correlations exist.

Please visit my stack exchange post for a rundown of how I am calculating m_t or to see an overview of the strategy.

Clone Algorithm
28
Loading...
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
Returns 1 Month 3 Month 6 Month 12 Month
Alpha 1 Month 3 Month 6 Month 12 Month
Beta 1 Month 3 Month 6 Month 12 Month
Sharpe 1 Month 3 Month 6 Month 12 Month
Sortino 1 Month 3 Month 6 Month 12 Month
Volatility 1 Month 3 Month 6 Month 12 Month
Max Drawdown 1 Month 3 Month 6 Month 12 Month
import numpy as np

def initialize(context):
    '''
    Called once at beginning of algorithm.
    '''
    set_commission(commission.PerTrade(cost=0.00))
    set_slippage(slippage.FixedSlippage(spread=0.00))
    set_benchmark(sid(8554))
    
    # lookback time interval (minutes)
    context.tau = 2
    context.r = []
    context.s = sid(8554) #SPY
    
 
    # schedule_function(
    #     func=my_rebalance, 
    #     date_rule=date_rules.every_day(), 
    #     time_rule=time_rules.market_open(minutes=1))

def before_trading_start(context, data):
    """
    Called every day before market open.
    """
    pass


def my_assign_weights(context, data):
    """
    Assign weights to securities that we want to order.
    """
    pass


def my_record_vars(context, data):
    """
    Plot variables at the end of each day.
    """
    pass


#def my_rebalance(context,data):
def handle_data(context,data):
    """
    Called every minute
    """    
    prices = data.history(context.s, fields="price", bar_count=(context.tau+1), frequency="1m")
    pct_change = (prices.ix[-1] - prices.ix[0]) / prices.ix[0]    
    context.r.append(pct_change)

    if len(context.r) >= context.tau**2:
        # create a matrix of price returns for time-lag "tau"
        tau_matrix = []
        reverse_price_list = context.r[::-1]

        for i in range(context.tau):
            tau_matrix.append(reverse_price_list[i:context.tau+i])

        # create correlation matrix and inverse matrix from tau
        # coef_matrix = np.corrcoef(tau_matrix)
        detr = np.linalg.det(tau_matrix)
        
        if detr != 0:
            inv_matrix = np.linalg.inv(tau_matrix)
            # log.info(inv_matrix)

            r_arr = []
            for i in range(context.tau-1):
                r_arr.append(inv_matrix[i+1][0]*tau_matrix[0][i+1])
            # log.info(len(r_arr))    
            # m_t = inv_matrix[0][0]*sum(r_arr)
            m_t = sum(r_arr)/inv_matrix[0][0]
            # log.info(m_t)

            if m_t > 0:
                order_target_percent(context.s, 1.0)
            elif m_t < 0:
                order_target_percent(context.s, -1.0)
            else:
                order_target_percent(context.s, 0)   
There was a runtime error.
1 response

what are... the parameter you use... for the following.... ? Thanks.

tc=
A=
B=
CC=
W=
DW=
DT=
ALPHA=