Back to Community
Oil Calendar spreads - a story of slippage

Hello everyone, having spent this holiday season trying to figure, out how to trade calendar spreads in CL contracts I thought i should share the results with you. As you will see there is nothing earth-shatteringly complex about this algo, its just a standard linear mean-reversion trading strategy on CL contracts.

My initial thought was to trade the 1year spread (so 1 and 11 or 12 contracts) however, which showed some promise in the research environment. However, upon researching the topic further, i realized that there is virtually 0 liquidity in the 11 / 12 contract so I was forced to implement it using different spreads.

As you can see i have tried to impose volume restrictions on the algo, i.e. not trade if the average volume the last 4 days is below a certain threshold value. It would be nice to see if any of you have another idea how to model liquidity (model it as a time series ?) to make sure we are actually trading the spread and not just taking positions in the front month.

Other feedback is also greatly appreciated however since the algo is very straightforward i doubt there is much to speak about in terms of in terms of implementation.

Best

Magnus

Clone Algorithm
10
Loading...
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
Returns 1 Month 3 Month 6 Month 12 Month
Alpha 1 Month 3 Month 6 Month 12 Month
Beta 1 Month 3 Month 6 Month 12 Month
Sharpe 1 Month 3 Month 6 Month 12 Month
Sortino 1 Month 3 Month 6 Month 12 Month
Volatility 1 Month 3 Month 6 Month 12 Month
Max Drawdown 1 Month 3 Month 6 Month 12 Month
"""
This is a template algorithm on Quantopian for you to adapt and fill in.
"""

import quantopian.algorithm as algo
import quantopian.optimize as opt

from sklearn import cross_validation
from sklearn.linear_model import LinearRegression,Lasso
import statsmodels.api as sm
from quantopian.algorithm import order_optimal_portfolio
import numpy as np
import pandas as pd
import quantopian.optimize as opt
import scipy as sp
import talib


MAX_GROSS_EXPOSURE = 1.0
MAX_SHORT_POSITION_SIZE = 1.0  # 1.5%
MAX_LONG_POSITION_SIZE = 1.0  # 1.5%

# Scheduling Parameters
MINUTES_AFTER_OPEN_TO_TRADE = 25
BASE_UNIVERSE_RECALCULATE_FREQUENCY = 'close'



def update_parameters(context,data):
    
    prices = data.history(context.future_list, 'price', context.window, '1d').dropna()
     
    rets = np.log(prices).diff().fillna(0)
    
    context.b = find_coint_coef(prices,context)
    
    resid = rets.iloc[:,0]*context.b-rets.iloc[:,1]
    
    resid = resid.dropna()
    
    context.lb = int((np.log(2)/m_hurst(resid))*context.ml)
    
    record(lookback=context.lb)
    record(regrCoef = context.b)
    

def initialize(context):
    """
    Called once at the start of the algorithm.
    """   
    # Rebalance every day, 1 hour after market open.
    cl_lst = []
    context.maxFuture = 20
    set_benchmark(sid(28320))
    
    cl_lst.append(continuous_future('CL', offset=0, roll='calendar', adjustment="mul"))
    cl_lst.append(continuous_future('CL', offset=3, roll='calendar', adjustment="mul"))
    context.window = 250
    context.future_list = cl_lst
    regr = LinearRegression()
    context.regr = regr
    context.b = 0.86
    context.ml = 2
    context.lb = 25
    
    context.min_vol = 500
    
    # Create our dynamic stock selector.
    
    algo.schedule_function(
        do_portfolio_construction,
        date_rule=algo.date_rules.every_day(),
        time_rule=algo.time_rules.market_open(minutes=MINUTES_AFTER_OPEN_TO_TRADE),
        half_days=False
    )
    
    algo.schedule_function(
        update_parameters,
        date_rule=algo.date_rules.month_end(),
        time_rule=algo.time_rules.market_open(minutes=MINUTES_AFTER_OPEN_TO_TRADE),
        half_days=False
    )
        
        
        

    
    
def prepair_alpha(context,data):
    
    prices = data.history(context.future_list, 'open', context.lb+1, '1d').dropna()
    
    vol = data.history(context.future_list,"volume",3,"1d").dropna()
   
    avg_vol = vol.mean(axis=0)
    
    min_vol = np.min(avg_vol)
    
    record(minivolume = min_vol)
    
    cutOff = min_vol > context.min_vol
    
    rets = np.log(prices).diff().dropna()
    
    resid = rets.iloc[:,0]*context.b - rets.iloc[:,1]
    
    stdev = resid.std()
    
    upper = resid.mean()-resid
    
    zScore = upper/stdev
    
    zScore = zScore[-1]*cutOff
    
    assets = [data.current(x,"contract") for x in rets.columns]
    
    output = pd.Series(np.array([zScore,-zScore]),index=assets)
    
    return output
    
  
    
def find_coint_coef(pricing,context):
    
    rets = np.log(pricing).diff().dropna()
    
    xRet = rets.iloc[:,0]
    yRet = rets.iloc[:,1]
    
    x = xRet.reshape(len(rets),1)

    y = yRet.reshape(len(rets),1)
    
    context.regr.fit(x,y)
    
    b = float(context.regr.coef_)
    
    return b
    
    
    
def m_hurst(X):
    
    X = np.array(X)
    N = X.size
    T = np.arange(1, N + 1)
    Y = np.cumsum(X)
    Ave_T = Y / T

    S_T = np.zeros(N)
    R_T = np.zeros(N)
    
    np_std = np.std
    np_ptp = np.ptp
        
    for i in range(N):
        S_T[i] = np_std(X[:i + 1])
        X_T = Y - T * Ave_T[i]
        R_T[i] = np_ptp(X_T[:i + 1])
        
    R_S = R_T / S_T
    R_S = (R_S)[1:]
    n = (T)[1:]
    A = np.column_stack((n, np.ones(n.size)))
    [m, c] = np.linalg.lstsq(A, R_S)[0]
    H = m
    return H


def do_portfolio_construction(context, data):
     
    
    
    context.alpha = prepair_alpha(context,data)

    order_optimal_portfolio(
        opt.TargetWeights(context.alpha),
        constraints=[opt.MaxGrossExposure(1.0)])
    
    record(pos1 = context.portfolio.positions[context.alpha.index[0]].amount)
    record(pos2= context.portfolio.positions[context.alpha.index[1]].amount) 

def handle_data(context,data):
    """
    Called every minute.
    """
    pass
There was a runtime error.
2 responses

Hi,

What is your m_hurst function calculating and why is that useful? I've never heard of that before. How and why are you using that?

I ran your algorithm over a longer time period. There are bouts of time where there this approach is useful and other sections of time where this method does not work as well. I wonder if you could detect the correct market mode (maybe a simple to two state distribution estimated with a HMM) and then turn on and off this mean reversion strategy.

Clone Algorithm
0
Loading...
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
Returns 1 Month 3 Month 6 Month 12 Month
Alpha 1 Month 3 Month 6 Month 12 Month
Beta 1 Month 3 Month 6 Month 12 Month
Sharpe 1 Month 3 Month 6 Month 12 Month
Sortino 1 Month 3 Month 6 Month 12 Month
Volatility 1 Month 3 Month 6 Month 12 Month
Max Drawdown 1 Month 3 Month 6 Month 12 Month
"""
This is a template algorithm on Quantopian for you to adapt and fill in.
"""

import quantopian.algorithm as algo
import quantopian.optimize as opt

from sklearn import cross_validation
from sklearn.linear_model import LinearRegression,Lasso
import statsmodels.api as sm
from quantopian.algorithm import order_optimal_portfolio
import numpy as np
import pandas as pd
import quantopian.optimize as opt
import scipy as sp
import talib


MAX_GROSS_EXPOSURE = 1.0
MAX_SHORT_POSITION_SIZE = 1.0  # 1.5%
MAX_LONG_POSITION_SIZE = 1.0  # 1.5%

# Scheduling Parameters
MINUTES_AFTER_OPEN_TO_TRADE = 25
BASE_UNIVERSE_RECALCULATE_FREQUENCY = 'close'



def update_parameters(context,data):
    
    prices = data.history(context.future_list, 'price', context.window, '1d').dropna()
     
    rets = np.log(prices).diff().fillna(0)
    
    context.b = find_coint_coef(prices,context)
    
    resid = rets.iloc[:,0]*context.b-rets.iloc[:,1]
    
    resid = resid.dropna()
    
    context.lb = int((np.log(2)/m_hurst(resid))*context.ml)
    
    record(lookback=context.lb)
    record(regrCoef = context.b)
    

def initialize(context):
    """
    Called once at the start of the algorithm.
    """   
    # Rebalance every day, 1 hour after market open.
    cl_lst = []
    context.maxFuture = 20
    set_benchmark(sid(28320))
    
    cl_lst.append(continuous_future('CL', offset=0, roll='calendar', adjustment="mul"))
    cl_lst.append(continuous_future('CL', offset=3, roll='calendar', adjustment="mul"))
    context.window = 250
    context.future_list = cl_lst
    regr = LinearRegression()
    context.regr = regr
    context.b = 0.86
    context.ml = 2
    context.lb = 25
    
    context.min_vol = 500
    
    # Create our dynamic stock selector.
    
    algo.schedule_function(
        do_portfolio_construction,
        date_rule=algo.date_rules.every_day(),
        time_rule=algo.time_rules.market_open(minutes=MINUTES_AFTER_OPEN_TO_TRADE),
        half_days=False
    )
    
    algo.schedule_function(
        update_parameters,
        date_rule=algo.date_rules.month_end(),
        time_rule=algo.time_rules.market_open(minutes=MINUTES_AFTER_OPEN_TO_TRADE),
        half_days=False
    )
        
        
        

    
    
def prepair_alpha(context,data):
    
    prices = data.history(context.future_list, 'open', context.lb+1, '1d').dropna()
    
    vol = data.history(context.future_list,"volume",3,"1d").dropna()
   
    avg_vol = vol.mean(axis=0)
    
    min_vol = np.min(avg_vol)
    
    record(minivolume = min_vol)
    
    cutOff = min_vol > context.min_vol
    
    rets = np.log(prices).diff().dropna()
    
    resid = rets.iloc[:,0]*context.b - rets.iloc[:,1]
    
    stdev = resid.std()
    
    upper = resid.mean()-resid
    
    zScore = upper/stdev
    
    zScore = zScore[-1]*cutOff
    
    assets = [data.current(x,"contract") for x in rets.columns]
    
    output = pd.Series(np.array([zScore,-zScore]),index=assets)
    
    return output
    
  
    
def find_coint_coef(pricing,context):
    
    rets = np.log(pricing).diff().dropna()
    
    xRet = rets.iloc[:,0]
    yRet = rets.iloc[:,1]
    
    x = xRet.reshape(len(rets),1)

    y = yRet.reshape(len(rets),1)
    
    context.regr.fit(x,y)
    
    b = float(context.regr.coef_)
    
    return b
    
    
    
def m_hurst(X):
    
    X = np.array(X)
    N = X.size
    T = np.arange(1, N + 1)
    Y = np.cumsum(X)
    Ave_T = Y / T

    S_T = np.zeros(N)
    R_T = np.zeros(N)
    
    np_std = np.std
    np_ptp = np.ptp
        
    for i in range(N):
        S_T[i] = np_std(X[:i + 1])
        X_T = Y - T * Ave_T[i]
        R_T[i] = np_ptp(X_T[:i + 1])
        
    R_S = R_T / S_T
    R_S = (R_S)[1:]
    n = (T)[1:]
    A = np.column_stack((n, np.ones(n.size)))
    [m, c] = np.linalg.lstsq(A, R_S)[0]
    H = m
    return H


def do_portfolio_construction(context, data):
     
    
    
    context.alpha = prepair_alpha(context,data)

    order_optimal_portfolio(
        opt.TargetWeights(context.alpha),
        constraints=[opt.MaxGrossExposure(1.0)])
    
    record(pos1 = context.portfolio.positions[context.alpha.index[0]].amount)
    record(pos2= context.portfolio.positions[context.alpha.index[1]].amount) 

def handle_data(context,data):
    """
    Called every minute.
    """
    pass
There was a runtime error.

Hi, the hurst parameter is relating the half time of the mean reversion process, I think other threads discuss this approach in detail.

Yea it seems like the approach is only working sometimes, but how much of that is due to low liquidity in the 3rd contract?

To illustrate what i mean consider the 1-12 spread without any volume filter below (we are essentially just trading the 1st future since the latter are not getting the fills).

Clone Algorithm
10
Loading...
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
Returns 1 Month 3 Month 6 Month 12 Month
Alpha 1 Month 3 Month 6 Month 12 Month
Beta 1 Month 3 Month 6 Month 12 Month
Sharpe 1 Month 3 Month 6 Month 12 Month
Sortino 1 Month 3 Month 6 Month 12 Month
Volatility 1 Month 3 Month 6 Month 12 Month
Max Drawdown 1 Month 3 Month 6 Month 12 Month
"""
This is a template algorithm on Quantopian for you to adapt and fill in.
"""

import quantopian.algorithm as algo
import quantopian.optimize as opt

from sklearn import cross_validation
from sklearn.linear_model import LinearRegression,Lasso
import statsmodels.api as sm
from quantopian.algorithm import order_optimal_portfolio
import numpy as np
import pandas as pd
import quantopian.optimize as opt
import scipy as sp
import talib


MAX_GROSS_EXPOSURE = 1.0
MAX_SHORT_POSITION_SIZE = 1.0  # 1.5%
MAX_LONG_POSITION_SIZE = 1.0  # 1.5%

# Scheduling Parameters
MINUTES_AFTER_OPEN_TO_TRADE = 25
BASE_UNIVERSE_RECALCULATE_FREQUENCY = 'close'



def update_parameters(context,data):
    
    prices = data.history(context.future_list, 'price', context.window, '1d').dropna()
     
    rets = np.log(prices).diff().fillna(0)
    
    context.b = find_coint_coef(prices,context)
    
    resid = rets.iloc[:,0]*context.b-rets.iloc[:,1]
    
    resid = resid.dropna()
    
    context.lb = int((np.log(2)/m_hurst(resid))*context.ml)
    
    record(lookback=context.lb)
    record(regrCoef = context.b)
    

def initialize(context):
    """
    Called once at the start of the algorithm.
    """   
    # Rebalance every day, 1 hour after market open.
    cl_lst = []
    set_benchmark(sid(28320))
    
    cl_lst.append(continuous_future('CL', offset=0, roll='calendar', adjustment="mul"))
    cl_lst.append(continuous_future('CL', offset=11, roll='calendar', adjustment="mul"))
    context.window = 250
    context.future_list = cl_lst
    regr = LinearRegression()
    context.regr = regr
    context.b = 0.86
    context.ml = 2
    context.lb = 25
    
    context.min_vol = 0
    
    # Create our dynamic stock selector.
    
    algo.schedule_function(
        do_portfolio_construction,
        date_rule=algo.date_rules.every_day(),
        time_rule=algo.time_rules.market_open(minutes=MINUTES_AFTER_OPEN_TO_TRADE),
        half_days=False
    )
    
    algo.schedule_function(
        update_parameters,
        date_rule=algo.date_rules.month_end(),
        time_rule=algo.time_rules.market_open(minutes=MINUTES_AFTER_OPEN_TO_TRADE),
        half_days=False
    )
        
        
        

    
    
def prepair_alpha(context,data):
    
    prices = data.history(context.future_list, 'open', context.lb+1, '1d').dropna()
    
    vol = data.history(context.future_list,"volume",3,"1d").dropna()
   
    avg_vol = vol.mean(axis=0)
    
    min_vol = np.min(avg_vol)
    
    record(minivolume = min_vol)
    
    cutOff = min_vol > context.min_vol
    
    rets = np.log(prices).diff().dropna()
    
    resid = rets.iloc[:,0]*context.b - rets.iloc[:,1]
    
    stdev = resid.std()
    
    upper = resid.mean()-resid
    
    zScore = upper/stdev
    
    zScore = zScore[-1]*cutOff
    
    assets = [data.current(x,"contract") for x in rets.columns]
    
    output = pd.Series(np.array([zScore,-zScore]),index=assets)
    
    return output
    
  
    
def find_coint_coef(pricing,context):
    
    rets = np.log(pricing).diff().dropna()
    
    xRet = rets.iloc[:,0]
    yRet = rets.iloc[:,1]
    
    x = xRet.reshape(len(rets),1)

    y = yRet.reshape(len(rets),1)
    
    context.regr.fit(x,y)
    
    b = float(context.regr.coef_)
    
    return b
    
    
    
def m_hurst(X):
    
    X = np.array(X)
    N = X.size
    T = np.arange(1, N + 1)
    Y = np.cumsum(X)
    Ave_T = Y / T

    S_T = np.zeros(N)
    R_T = np.zeros(N)
    
    np_std = np.std
    np_ptp = np.ptp
        
    for i in range(N):
        S_T[i] = np_std(X[:i + 1])
        X_T = Y - T * Ave_T[i]
        R_T[i] = np_ptp(X_T[:i + 1])
        
    R_S = R_T / S_T
    R_S = (R_S)[1:]
    n = (T)[1:]
    A = np.column_stack((n, np.ones(n.size)))
    [m, c] = np.linalg.lstsq(A, R_S)[0]
    H = m
    return H


def do_portfolio_construction(context, data):
     
    
    
    context.alpha = prepair_alpha(context,data)

    order_optimal_portfolio(
        opt.TargetWeights(context.alpha),
        constraints=[opt.MaxGrossExposure(1.0)])
    
    record(pos1 = context.portfolio.positions[context.alpha.index[0]].amount)
    record(pos2= context.portfolio.positions[context.alpha.index[1]].amount) 

def handle_data(context,data):
    """
    Called every minute.
    """
    pass
There was a runtime error.