Back to Community
deleted

deleted

Clone Algorithm
48
Loading...
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
Returns 1 Month 3 Month 6 Month 12 Month
Alpha 1 Month 3 Month 6 Month 12 Month
Beta 1 Month 3 Month 6 Month 12 Month
Sharpe 1 Month 3 Month 6 Month 12 Month
Sortino 1 Month 3 Month 6 Month 12 Month
Volatility 1 Month 3 Month 6 Month 12 Month
Max Drawdown 1 Month 3 Month 6 Month 12 Month
"""
This is a template algorithm on Quantopian for you to adapt and fill in.
"""
from quantopian.algorithm import attach_pipeline, pipeline_output
from quantopian.pipeline import Pipeline
from quantopian.pipeline.data.builtin import USEquityPricing
import numpy as np
import pandas as pd
import scipy as sp
from scipy.stats.mstats import winsorize
from scipy.stats import norm
import math
from quantopian.pipeline.data import morningstar, Fundamentals
from quantopian.pipeline.factors import CustomFactor, Returns
from datetime import datetime, timedelta
from quantopian.pipeline.filters import QTradableStocksUS
from quantopian.pipeline.filters.morningstar import Q500US, Q1500US
from quantopian.pipeline.classifiers.morningstar import Sector
from sklearn.linear_model import LinearRegression
import cvxopt as opt
from cvxopt import blas, solvers
solvers.options['show_progress'] = False


def initialize(context):
    """
    Called once at the start of the algorithm.
    """
    context.factor_percentile_short = 0.1 #Should be between 0 and 0.5
    context.factor_percentile_long = 0.1
    context.long_weight = 1.0
    context.short_weight = 1.0
    context.long_short = 'long_short'
    context.payoff_rolling_window = 6 #rolling window size for smoothing payoff coefficients
    context.base_universe = Q1500US()
    context.sector_field = Fundamentals.morningstar_industry_code.latest  
    context.groupby_cols = ['sector']
    context.winsorize = 0.05
    set_benchmark(symbol('SHY'))
    
    schedule_function(collect_pipe_data,date_rule=date_rules.month_end(),\
                      time_rule=time_rules.market_open(minutes=15)) #collect data from pipeline
   
    # Rebalance every month end at open.
    schedule_function(my_rebalance, date_rule=date_rules.month_end(),\
                      time_rule=time_rules.market_open(minutes=30))

    attach_pipeline(FCFtoICtrendPipe(context), 'fcf_to_ic_trend_pipeline')

    context.fcf_to_ic_trend_pipeline_data_list = []    
    context.pipeline_pull_count = 0 #keep track of how many times we've saved monthy data

    context.strategies = {'fcf_to_ic_trend':FactorStrategy('fcf_to_ic_trend')}


def collect_pipe_data(context, data):  
    fcf_to_ic_trend_results = pipeline_output('fcf_to_ic_trend_pipeline')
    fcf_to_ic_trend_results.loc[:,'date'] = get_datetime()
    fcf_to_ic_trend_results.index.names = ['ticker']
    fcf_to_ic_trend_results = fcf_to_ic_trend_results.reset_index().sort_values(['date','ticker']).set_index(['date','ticker'])
    context.fcf_to_ic_trend_pipeline_data_list.append(fcf_to_ic_trend_results)
    context.fcf_to_ic_trend_pipeline_data_list = context.fcf_to_ic_trend_pipeline_data_list[-(context.payoff_rolling_window + 10):]
     
    context.pipeline_pull_count += 1

    if context.pipeline_pull_count >= context.payoff_rolling_window + 1:
        
        fcf_to_ic_trend_pipeline_data = pd.concat(context.fcf_to_ic_trend_pipeline_data_list)
        context.fcf_to_ic_trend_orders = factor_port(context, fcf_to_ic_trend_pipeline_data, groupby_cols=context.groupby_cols, window_size=context.payoff_rolling_window)
        context.strategies['fcf_to_ic_trend'].RecordPortRet(data, context, context.fcf_to_ic_trend_orders)   
              
            
        
def my_rebalance(context,data):
    """
    Execute orders according to our schedule_function() timing. 
    """
    if len(context.strategies['fcf_to_ic_trend'].weighted_orders.keys()) >= 1:        
        context.strategies['fcf_to_ic_trend'].Trade(data, context, 1.0, context.fcf_to_ic_trend_orders)        
    

class FCFtoICtrend(CustomFactor):
    inputs = [Fundamentals.fcf_per_share, \
              Fundamentals.diluted_average_shares_earnings_reports, Fundamentals.invested_capital]
    window_length = 252
    def compute(self, today, assets, out, fcf_per_share, shares, ic):
        ratio_1 = (fcf_per_share[-1]*shares[-1])/ic[-1]
        ratio_2 = (fcf_per_share[-66]*shares[-66])/ic[-66]
        ratio_3 = (fcf_per_share[-132]*shares[-132])/ic[-132]
        ratio_4 = (fcf_per_share[-198]*shares[-198])/ic[-198]
        data = [ratio_1, ratio_2, ratio_3, ratio_4]
        mu = np.mean(data,axis=0)
        std = np.std(data,axis=0)
        out[:] = (ratio_1 - mu)/std


def FCFtoICtrendPipe(context):
    Data_Pipe = Pipeline()
    score = FCFtoICtrend()
    Data_Pipe.add(score,'score')    
    Data_Pipe.add(context.sector_field, 'sector')
    Data_Pipe.add(USEquityPricing.close.latest,'close_px')
    
    universe = context.base_universe & score.notnull() & context.sector_field.notnull() & USEquityPricing.close.latest.notnull()
    
    Data_Pipe.set_screen(universe)
        
    return Data_Pipe


def factor_port(context, pipeline_data, groupby_cols, window_size):
    pipeline_data = pipeline_data.reset_index().rename(columns=\
                                                       {'level_0':'date','level_1':'ticker'})
    factor_port = pipeline_data.groupby(['ticker',pd.Grouper(key='date',freq='M')]).\
    last().drop('date',axis=1)

    rets = factor_port.groupby(level=0).apply(lambda x: x['close_px']/x['close_px'].shift() - 1)
    rets = pd.DataFrame(rets).reset_index(level=1).drop('ticker',axis=1).reset_index().\
    sort_values(['date','ticker']).set_index(['date','ticker']).\
    rename(columns={'close_px':'monthly_ret'})
    
    factor_port = factor_port.reset_index().sort_values(['date','ticker']).\
    set_index(['date','ticker'])

    factor_port_reg = []
    
    if len(groupby_cols) > 0:
        for date in factor_port.index.get_level_values(0).unique():
            temp = factor_port.loc[date].groupby(groupby_cols).apply(lambda x: pd.DataFrame(winsorize(x,limits=(context.winsorize,context.winsorize),\
                                                 inclusive=(True,True),axis=0),index=x.index,columns=x.columns))
            temp = temp.groupby(groupby_cols).apply(blom_transform)
            temp['date'] = date
            factor_port_reg.append(temp)

        factor_port_reg = pd.concat(factor_port_reg).reset_index().drop(groupby_cols+['close_px'],axis=1).\
        set_index(['date','ticker'])
    else:
        for date in factor_port.index.get_level_values(0).unique():
            temp = factor_port.loc[date].apply(lambda x: winsorize(x,limits=(context.winsorize,context.winsorize),inclusive=(True,True),axis=0))
            temp = blom_transform(temp)
            temp['date'] = date
            factor_port_reg.append(temp)

        factor_port_reg = pd.concat(factor_port_reg).reset_index().drop(['close_px'],axis=1).\
        set_index(['date','ticker'])        

    fcast_port_reg = factor_port_reg.loc[factor_port_reg.index.get_level_values(0)[-1]]
    
    factor_port_reg = factor_port_reg.reset_index().sort_values(['ticker','date']).\
    set_index(['ticker','date']).groupby(level=0).shift().\
    reset_index().sort_values(['date','ticker']).\
    set_index(['date','ticker'])

    factor_port_reg.columns = [i+'_l1' for i in factor_port_reg.columns]
    factor_port_reg = pd.concat([factor_port_reg, rets],axis=1)    

    coefs = pd.DataFrame(index=factor_port_reg.index.get_level_values(0).unique()[1:],\
                 columns=factor_port_reg.drop('monthly_ret',axis=1).columns)

    lm = LinearRegression()
    for date in factor_port_reg.index.get_level_values(0).unique()[1:]:
        temp = factor_port_reg.loc[date]
        X = temp.dropna().drop('monthly_ret',axis=1)
        y = temp.dropna()['monthly_ret']
        lm.fit(X,y)
        coefs.loc[date] = lm.coef_

    coefs = coefs.rolling(window=window_size).mean().dropna()

    fcast_coefs = coefs.loc[factor_port_reg.index.get_level_values(0)[-1]]

    order_weights = pd.DataFrame(np.asarray(fcast_port_reg)*np.asarray(fcast_coefs),\
                                 index=fcast_port_reg.index,\
                                 columns=fcast_port_reg.columns).sum(axis=1)
    
    order_weights = blom_transform(order_weights)
    
    lower = (order_weights - norm.ppf(context.factor_percentile_short)).abs().sort_values().index[0]
    upper = (order_weights - norm.ppf(1-context.factor_percentile_long)).abs().sort_values().index[0]
    
    if context.long_short == 'long':
        order_weights = order_weights.sort_values().loc[upper:]
        order_weights = pd.DataFrame(order_weights/order_weights.abs().sum(),columns=['weight'])
        order_weights *= context.long_weight
    elif context.long_short == 'short':
        order_weights = order_weights.sort_values().loc[:lower]
        order_weights = pd.DataFrame(order_weights/order_weights.abs().sum(),columns=['weight'])
        order_weights *= context.short_weight
    else:
        lower_weights = order_weights.sort_values().loc[:lower]
        lower_weights = lower_weights/lower_weights.abs().sum()
        lower_weights *= context.short_weight

        upper_weights = order_weights.sort_values().loc[upper:]
        upper_weights = upper_weights/upper_weights.abs().sum()
        upper_weights *= context.long_weight

        order_weights = pd.DataFrame(pd.concat([lower_weights, upper_weights],axis=0),columns=['weight'])
    
    return order_weights


def sdnorm_quantile(p):
    return np.sqrt(2)*sp.special.erfinv(2*p-1)


def blom_transform(data):
    ranks = data.rank()
    c = 3/8
    temp = (ranks - c)/(len(data.dropna())-2*c+1)
    return sdnorm_quantile(temp)

# Strategy class to keep track of basic strategy parameters
class Strategy:
    def __init__(self, name):
        self.name = name
        #Keep track if we are invested or Flat
        self.raw_orders = {}
        self.weighted_orders = {}
        #Keep track of the last shares bought from this strategy so we can exit only those shares
        self.port_rets = {}
        #init these dicts
        self.port_vars = {}
        
# Sub-Strategy class to implement specific rules
class FactorStrategy(Strategy):
    def RecordPortRet(self, data, context, order_weights):
        self.weighted_orders[get_datetime().date()] = order_weights
                
        keys = set(self.weighted_orders.keys())
        unwanted = set([i for i in keys if i <= (get_datetime() + timedelta(days=-10*365.25)).date()])
        for unwanted_key in unwanted: del self.weighted_orders[unwanted_key]

        if len(self.weighted_orders.keys()) > 1:
            last_date = sorted(self.weighted_orders.keys())[-2]
            last_orders = self.weighted_orders[last_date]
            price_data = data.history(last_orders.index.tolist(), fields = 'price', bar_count=40, frequency='1d')
            rets = price_data.loc[last_date:, :]
            rets = np.log(rets/rets.shift())
            port_ret = rets.mul(last_orders['weight']).sum().sum()
            self.port_rets[last_day_of_month(last_date)] = port_ret         
            
    def RecordPortVar(self, data, context, order_weights):
        self.raw_orders[get_datetime().date()] = order_weights
                
        keys = set(self.raw_orders.keys())
        unwanted = set([i for i in keys if i <= (get_datetime() + timedelta(days=-10*365.25)).date()])
        for unwanted_key in unwanted: del self.raw_orders[unwanted_key]

        if len(self.raw_orders.keys()) > 1:
            last_date = sorted(self.raw_orders.keys())[-2]
            last_orders = self.raw_orders[last_date]
            price_data = data.history(last_orders.index.tolist(), fields = 'price', bar_count=40, frequency='1d')
            var = price_data.loc[last_date:,:]
            var = np.log(var/var.shift())
            var = var.mul(last_orders['weight'])
            var = var.sum(axis=1)
            var = var**2
            var = var.values
            var = np.nansum(var)
            self.port_vars[last_day_of_month(last_date)] = var
        
    
    def Trade(self, data, context, allocation, order_weights):
        if allocation > 0:
            # Close all current positions
            for stock in context.portfolio.positions:
                if stock not in order_weights.index:
                   order_target(stock, 0)
            #Order
            for stock, weight in zip(order_weights.index, order_weights['weight']):
                if stock in context.portfolio.positions:
                    order_target_percent(stock, allocation*weight)
                else:
                    order_percent(stock, allocation*weight)

                

def last_day_of_month(any_day):
    next_month = any_day.replace(day=28) + timedelta(days=4)  # this will never fail
    return next_month - timedelta(days=next_month.day)

        
standardize = lambda x: (x-x.mean())/x.std()   
#
There was a runtime error.
1 response
    context.long_weight = 1.0  
    context.short_weight = 1.0  

fyi 2x lev -- ignore me if that is by design...

def before_trading_start(context, data):  
    record(lev = context.account.leverage)