from quantopian.pipeline.data.morningstar import Fundamentals as f
from quantopian.pipeline.data.factset import Fundamentals as FF
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline import CustomFactor
from quantopian.pipeline import Pipeline, CustomFilter
from quantopian.algorithm import attach_pipeline, pipeline_output
import numpy as np
from quantopian.pipeline.filters import QTradableStocksUS
from quantopian.pipeline.factors import SimpleMovingAverage as Sma
import quantopian.optimize as opt
max_leverage = 1
def initialize(context):
set_benchmark(symbol('QQQ'))
schedule_function(
my_rebalance,
# date_rules.every_day(),
# date_rules.week_start(),
date_rules.month_start(),
time_rules.market_open(minutes=5)
)
schedule_function(
record_vars,
date_rules.every_day(),
time_rules.market_close()
)
attach_pipeline(make_pipeline(), 'my_pipeline')
context.month = -1
context.value = []
class N100(CustomFilter):
inputs = [
f.morningstar_sector_code,
USEquityPricing.volume,
f.market_cap,
f.primary_exchange_id
]
window_length = 63
window_safe = True
def compute(self, today, assets, out, sec, v, mcap, exch):
# Nasdaq has to be primary exchange
screen = exch[-1] == 'NAS'
# Finance sector is excluded (morningstar sector code 103)
screen &= sec[-1] != 103
# must have traded for at least three full calendar months
screen &= ~np.isnan(v).any(axis=0)
# Need to have an average daily volume of 200k
avol = np.nanmean(v, axis=0)
screen &= avol >= 200000
# Pick out the biggest 100 of them in terms of market cap
top_cap = np.sort(mcap[-1][screen])[-100]
screen &= mcap[-1] >= top_cap
out[:] = screen
class N100QuarterlyWeighting(CustomFactor):
inputs = [f.market_cap]
window_length = 375
def compute(self, today, assets, out, mcap):
# stage 1:
# initial weighting by market cap:
wts = mcap[-1] / np.nansum(mcap[-1])
# if any weights exceed 24 % they are clipped to 20 %:
ecx24 = wts > .24
wts[ecx24] = .2
# stage 2:
# find weights that exceed 4.5 %:
exc40 = wts > .045
# if the sum of those weights is larger than 48 %, their sum is clipped to 40 %
if np.nansum(wts[exc40]) > .48:
wts[exc40] = wts[exc40] / np.nansum(wts[exc40]) * .4
out[:] = wts
class N100YearlyWeighting(CustomFactor):
inputs = [f.market_cap]
window_length = 375
def compute(self, today, assets, out, mcap):
# stage 1:
# initial weighting by market cap:
wts = mcap[-1] / np.nansum(mcap[-1])
# if any weights exceed 15 % they are clipped to 14 %:
ecx24 = wts > .15
wts[ecx24] = .14
# stage 2:
# If the aggregate weight of the subset of Index Securities with the five largest market capitalizations is
# less than 40%, Stage 1 weights are used as final weights; otherwise, Stage 1 weights are adjusted to
# meet the following constraints, producing the final weights:
# ï‚· The aggregate weight of the subset of Index Securities with the five largest market
# capitalizations is set to 38.5%.
#
# ï‚· No security with a market capitalization outside the largest five may have a final index weight
# exceeding the lesser of 4.4% or the final index weight of the Index Security ranked fifth by
# market capitalization.
fifth_largest = np.sort(wts)[-5]
top5 = wts >= fifth_largest
if np.nansum(wts[top5]) >= .4:
wts[top5] = wts[top5] / np.nansum(wts[top5]) * .385
fifth_largest = np.sort(wts)[-5]
max_wt = min(.044, fifth_largest)
exc_mwt = (wts > max_wt) & ~top5
wts[exc_mwt] = max_wt
out[:] = wts
def make_pipeline():
# nasdaq 100 constituants are only checked once a year, so we downsample the filter:
nsd = N100().downsample('year_start')
# however, the weighting is adjusted quarterly
qwts = N100QuarterlyWeighting(mask=nsd)
# and once a year there's a different weighting process
ywts = N100YearlyWeighting(mask=nsd)
return Pipeline(
columns={
'qwts': qwts,
'ywts': ywts,
},
screen=nsd,
)
def my_rebalance(context, data):
# only adjust the weights every quarter
context.month += 1
if context.month % 3 != 0:
return
df = pipeline_output('my_pipeline')
wts = df.qwts
# once a year apply the other weighting
if context.month % 12 == 0:
wts = df.ywts
order_optimal_portfolio(
objective=opt.TargetWeights(wts),
constraints=[],
)
def record_vars(context, data):
context.value.append(context.portfolio.portfolio_value)
corr = 0
portval = np.array(context.value)
n = max(len(portval), 100)
if len(portval) >= n:
qqq = data.history(symbol('QQQ'), 'close', n, '1d').values
corr = np.corrcoef(qqq, portval)[0,1]
positions = len(context.portfolio.positions)
record(positions=positions)
record(lever=context.account.leverage)
record(correlation_to_QQQ=corr)