Global market rotation strategy buggy implementation

Hi,

This is my draft implementation of global market rotation strategy. Can you help me to find mistakes in it? It looks like I'm calculating volatility incorrectly and most probably there are other bugs as well.

For the ranking I calculate the 3 month performance of all ETF's and normalise between 0-1. The best will have 1. Then I calculate the medium 3 month 20 day volatility and also normalize from 0-1.
Then I used Ranking= 0.7*performance +0.3*volatility.
This will give me a ranking from 0-1 from which I will take the best.

Thank you,
Ed

237
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Strategy
import math
import pandas

def initialize(context):
context.stocks = {12915: sid(12915), 21769: sid(21769),
24705: sid(24705), 23134: sid(23134),
23118: sid(23118), 22887: sid(22887),
26807: sid(26807)}
context.month = None
context.period = 72 # 3 months period

def _order(stock, amount, price):
if amount != 0:
order(stock, amount)
log.info("%s %d shares of %s = %.2f" % \
stock.symbol, abs(price*amount)))

@batch_transform(window_length=73)
def get_metrics(dp, security, period):
'''Calculate performance and volatility for given period.'''
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
volatility = (pandas.rolling_std(prices, 20) * math.sqrt(period)).mean()
return (end - begin)/begin, volatility/begin

def get_best(data, stocks, period):
best = None
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
rank = ret * 0.7 + vol * 0.3
if best is None or best[1] < rank:
best = stock, rank
return best[0]

def handle_data(context, data):
stocks = context.stocks
month = data[stocks.values()[0]].datetime.month
if not context.month:
context.month = month

ret = get_metrics(data, stocks.values()[0], context.period)
# check if next month began
if context.month == month:
return
context.month = month

if ret:
stock = get_best(data, stocks, context.period)
positions = context.portfolio.positions
if positions[stock.sid]['amount']:
return
sold = 0.0
for position in positions.values():
if position.amount:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price

amount = int((context.portfolio.cash+sold)/data[stock].price)
_order(stock, amount, data[stock].price)

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.
58 responses

Ed,

Could you explain why you think you're calculating volatility incorrectly?
Based off this paper by Nassim Taleb, where the ratio of mean to standard deviation is sqrt(2/pi) I've changed the volatility calculations in the attached backtest. I might be completely off so feel free to correct me whenever.

50
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Strategy
import math
import pandas

def initialize(context):
context.stocks = {12915: sid(12915), 21769: sid(21769),
24705: sid(24705), 23134: sid(23134),
23118: sid(23118), 22887: sid(22887),
26807: sid(26807)}
context.month = None
context.period = 72 # 3 months period

def _order(stock, amount, price):
if amount != 0:
order(stock, amount)
#log.info("%s %d shares of %s = %.2f" % \
# If less than 0, it'll print out selling
#         stock.symbol, abs(price*amount)))

@batch_transform(window_length=73)
def get_metrics(dp, security, period):
'''Calculate performance and volatility for given period.'''
# Get's all the close prices of the security in the last 73 days (3 months)
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
# Do we need rolling standard deviation?
volatility = (pandas.rolling_std(prices,20) * math.sqrt(period)).mean()/(math.sqrt(2/math.pi))
return (end - begin)/begin, volatility/begin

def get_best(data, stocks, period):
best = None
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
rank = ret * 0.7 + vol * 0.3
if best is None or best[1] < rank:
best = stock, rank
return best[0]

def handle_data(context, data):
stocks = context.stocks
month = data[stocks.values()[0]].datetime.month
if not context.month:
context.month = month

ret = get_metrics(data, stocks.values()[0], context.period)
# check if next month began
if context.month == month:
return
context.month = month

if ret:
stock = get_best(data, stocks, context.period)
positions = context.portfolio.positions
if positions[stock.sid]['amount']:
return
sold = 0.0
for position in positions.values():
if position.amount:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price

amount = int((context.portfolio.cash+sold)/data[stock].price)
_order(stock, amount, data[stock].price)

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.
Disclaimer

The material on this website is provided for informational purposes only and does not constitute an offer to sell, a solicitation to buy, or a recommendation or endorsement for any security or strategy, nor does it constitute an offer to provide investment advisory services by Quantopian. In addition, the material offers no opinion with respect to the suitability of any security or specific investment. No information contained herein should be regarded as a suggestion to engage in or refrain from any investment-related course of action as none of Quantopian nor any of its affiliates is undertaking to provide investment advice, act as an adviser to any plan or entity subject to the Employee Retirement Income Security Act of 1974, as amended, individual retirement account or individual retirement annuity, or give advice in a fiduciary capacity with respect to the materials presented herein. If you are an individual retirement or other investor, contact your financial advisor or other fiduciary unrelated to Quantopian about whether any given investment idea, strategy, product or service described herein may be appropriate for your circumstances. All investments involve risk, including loss of principal. Quantopian makes no guarantees as to the accuracy or completeness of the views expressed in the website. The views are subject to change, and may have become unreliable for various reasons, including changes in market conditions or economic circumstances.

Hi Seong,

Thank you for helping me.

I'm not sure what exactly is incorrect in my implementation, but there is definitely something wrong, because results are not even close to what is described in this article http://seekingalpha.com/article/1622832-a-global-market-rotation-strategy-with-an-annual-performance-of-41-4-since-2003?source=intbrokers_regular. And as I still don't understand what does it mean 'calculate the medium 3 month 20 day volatility and also normalize from 0-1' I thought that I made a mistake in volatility calculation. But it can be mistake in another place as well.

What's your understanding of author's explanations of volatility calculation? I'm especially confused with normalization. How to normalize volatility?

Regards,
Ed

Ed,

The author mentions that

It is better not have too much short term (e.g. 20day) volatility. An
ETF should not get a higher ranking only because of a short term
spike, so, high volatility is lowering the rank. In fact, the ETF's in
a rotation strategy should have approximately the same volatility.
This is not always possible, so using volatility to lower the ranking
can help.

So for normalization, the highest return would be 1 while for volatility, the lowest would be 1; I've attempted to put it in implementation, but like you said, it doesn't come close to the returns described in the article. Perhaps, the author wasn't staying strict to the 3 month lookback period when he stated his returns?

-Seong

50
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Strategy
import math
import pandas

def initialize(context):
context.stocks = {12915: sid(12915), 21769: sid(21769),
24705: sid(24705), 23134: sid(23134),
23118: sid(23118), 22887: sid(22887),
23911: sid(23911)}
context.month = None
context.period = 72 # 3 months period

def _order(stock, amount, price):
if amount != 0:
order(stock, amount)
log.info("%s %d shares of %s = %.2f" % \
# If less than 0, it'll print out selling
stock.symbol, abs(price*amount)))

@batch_transform(window_length=73)
def get_metrics(dp, security, period):
'''Calculate performance and volatility for given period.'''
# Get's all the close prices of the security in the last 73 days (3 months)
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
volatility = (pandas.rolling_std(prices,20)*math.sqrt(period/20)).mean()
return (end - begin)/begin, volatility/begin

def normalise(data, stocks, period):
# Need to return normalised return and volume
stocks_ret = {}
stocks_vol = {}
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
stocks_ret[stock] = ret
stocks_vol[stock] = vol
# Return max = highest performance, while volatility max is lowest volatility
ret_max, ret_min, vol_max, vol_min = max(stocks_ret.values()), min(stocks_ret.values()), \
min(stocks_vol.values()), max(stocks_vol.values())
return ret_max, ret_min, vol_max, vol_min

def get_best(data, stocks, period):
best = None
ret_max, ret_min, vol_max, vol_min = normalise(data, stocks, period)
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
ret = (ret-ret_min)/(ret_max-ret_min)
vol = (vol-vol_min)/(vol_max-vol_min)
rank = ret * 0.7 + vol * 0.3
log.debug("The rank is " + str(rank) + "for stock: " + str(stock))
if best is None or best[1] < rank:
best = stock, rank
log.debug("The BEST rank is: " + str(best[0]))
return best[0]

def handle_data(context, data):
stocks = context.stocks
month = data[stocks.values()[0]].datetime.month
if not context.month:
context.month = month

ret = get_metrics(data, stocks.values()[0], context.period)
# check if next month began
if context.month == month:
return
context.month = month

if ret:
stock = get_best(data, stocks, context.period)
positions = context.portfolio.positions
if positions[stock.sid]['amount']:
return
sold = 0.0
for position in positions.values():
if position.amount:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price

amount = int((context.portfolio.cash+sold)/data[stock].price)
_order(stock, amount, data[stock].price)

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.

Hi Seong,

I excluded SHY from the list as EDV is already there for the same purpose. Now results are much better.
Your changes improved algo a lot! Thank you!

217
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Strategy
import math
import pandas

def initialize(context):
context.stocks = {12915: sid(12915), 21769: sid(21769),
24705: sid(24705), 23134: sid(23134),
23118: sid(23118), 22887: sid(22887)}
#23911: sid(23911)}
context.month = None
context.period = 72 # 3 months period

def _order(stock, amount, price):
if amount != 0:
order(stock, amount)
log.info("%s %d shares of %s = %.2f" % \
# If less than 0, it'll print out selling
stock.symbol, abs(price*amount)))

@batch_transform(window_length=73)
def get_metrics(dp, security, period):
'''Calculate performance and volatility for given period.'''
# Get's all the close prices of the security in the last 73 days (3 months)
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
volatility = (pandas.rolling_std(prices,20)*math.sqrt(period/20)).mean()
return (end - begin)/begin, volatility/begin

def normalise(data, stocks, period):
# Need to return normalised return and volume
stocks_ret = {}
stocks_vol = {}
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
stocks_ret[stock] = ret
stocks_vol[stock] = vol
# Return max = highest performance, while volatility max is lowest volatility
ret_max, ret_min, vol_max, vol_min = max(stocks_ret.values()), min(stocks_ret.values()), \
min(stocks_vol.values()), max(stocks_vol.values())
return ret_max, ret_min, vol_max, vol_min

def get_best(data, stocks, period):
best = None
ret_max, ret_min, vol_max, vol_min = normalise(data, stocks, period)
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
ret = (ret-ret_min)/(ret_max-ret_min)
vol = (vol-vol_min)/(vol_max-vol_min)
rank = ret * 0.7 + vol * 0.3
log.debug('%s: return: %.2f, vol: %.2f, rank: %.2f' % \
(stock.symbol, ret, vol, rank))
#log.debug("The rank is " + str(rank) + "for stock: " + stock.symbol)
if best is None or best[1] < rank:
best = stock, rank
log.debug("The BEST rank is: " + best[0].symbol)
return best[0]

def handle_data(context, data):
stocks = context.stocks
month = data[stocks.values()[0]].datetime.month
if not context.month:
context.month = month

ret = get_metrics(data, stocks.values()[0], context.period)
# check if next month began
if context.month == month:
return
context.month = month

if ret:
stock = get_best(data, stocks, context.period)
positions = context.portfolio.positions
if positions[stock.sid]['amount']:
return
sold = 0.0
for position in positions.values():
if position.amount:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price

amount = int((context.portfolio.cash+sold)/data[stock].price)
_order(stock, amount, data[stock].price)

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.

Ed,

No problem! I believe you had gold in there before, but the algo seems to do much better without gold and SHY!

-Seong

Seong,

Results are better, but still far from author's. It could be because of other mistakes, which we haven't spotted yet. I'll re-read the article. If you have more ideas what can be incorrect in my implementation please let me know.

Regards,
Ed

I've excluded EDV and returned SHY back just to be able to test for a longer period. SHY started in 2007. That's why previous backtests were shorter.

217
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Strategy
import math
import pandas

def initialize(context):
context.stocks = {12915: sid(12915), 21769: sid(21769),
24705: sid(24705), 23134: sid(23134),
23118: sid(23118), 23911: sid(23911)}

context.month = None
context.period = 72 # 3 months period

def _order(stock, amount, price):
if amount != 0:
order(stock, amount)
log.info("%s %d shares of %s = %.2f" % \
# If less than 0, it'll print out selling
stock.symbol, abs(price*amount)))

@batch_transform(window_length=73)
def get_metrics(dp, security, period):
'''Calculate performance and volatility for given period.'''
# Get's all the close prices of the security in the last 73 days (3 months)
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
volatility = (pandas.rolling_std(prices,20)*math.sqrt(period/20)).mean()
return (end - begin)/begin, volatility/begin

def normalise(data, stocks, period):
# Need to return normalised return and volume
stocks_ret = {}
stocks_vol = {}
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
stocks_ret[stock] = ret
stocks_vol[stock] = vol
# Return max = highest performance, while volatility max is lowest volatility
ret_max, ret_min, vol_max, vol_min = max(stocks_ret.values()), min(stocks_ret.values()), \
min(stocks_vol.values()), max(stocks_vol.values())
return ret_max, ret_min, vol_max, vol_min

def get_best(data, stocks, period):
best = None
ret_max, ret_min, vol_max, vol_min = normalise(data, stocks, period)
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
ret = (ret-ret_min)/(ret_max-ret_min)
vol = (vol-vol_min)/(vol_max-vol_min)
rank = ret * 0.7 + vol * 0.3
log.debug('%s: return: %.2f, vol: %.2f, rank: %.2f' % \
(stock.symbol, ret, vol, rank))
if best is None or best[1] < rank:
best = stock, rank
log.debug("The BEST rank is: " + best[0].symbol)
return best[0]

def handle_data(context, data):
stocks = context.stocks
month = data[stocks.values()[0]].datetime.month
if not context.month:
context.month = month

ret = get_metrics(data, stocks.values()[0], context.period)
# check if next month began
if context.month == month:
return
context.month = month

if ret:
stock = get_best(data, stocks, context.period)
positions = context.portfolio.positions
if positions[stock.sid]['amount']:
return
sold = 0.0
for position in positions.values():
if position.amount:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price

amount = int((context.portfolio.cash+sold)/data[stock].price)
_order(stock, amount, data[stock].price)

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.

Ed,

The part that confuses me is that our Sharpe ratio is so much higher than the author's; perhaps it's the computed differently by Quantopian. I'll take a look into it.

-Seong

Ed and Seong,
Thank you for sharing this great strategy. I think there is something to be made here and it needs some kind of test to determine its robustness.That can be done by testing how good it performs for periods of 45, 60, 75, 90, 120 days etc. That is fairly easy. Also, another test is to see what happens if we buy the second best stock instead of the best one. That way we know we are not just somehow over optimizing the stock portfolio.

As a matter of curiosity so that I can learn how to program in this language, what modifications are needed so that we buy the second and the third highest stock instead of the highest? We just split the available capital 50-50 and buy these two.

Thanks again for this great strategy.

Hi Maji,

I've modified the strategy as you've asked. Results are not that impressive because with this approach strategy is missing SHY during bear market and as a result suffers from big drawdowns.

PS: I still think that my implementation has mistakes as results are much worse than author's. I'd appreciate if we can find those mistakes together.

Regards,
Ed

217
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Strategy
import math
import pandas

def initialize(context):
context.stocks = {12915: sid(12915), 21769: sid(21769),
24705: sid(24705), 23134: sid(23134),
23118: sid(23118), 23911: sid(23911)}

context.month = None
context.period = 72 # 3 months period

def _order(stock, amount, price):
if amount != 0:
order(stock, amount)
log.info("%s %d shares of %s = %.2f" % \
# If less than 0, it'll print out selling
stock.symbol, abs(price*amount)))

@batch_transform(window_length=73)
def get_metrics(dp, security, period):
'''Calculate performance and volatility for given period.'''
# Get's all the close prices of the security in the last 73 days (3 months)
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
volatility = (pandas.rolling_std(prices,20)*math.sqrt(period/20)).mean()
return (end - begin)/begin, volatility/begin

def normalise(data, stocks, period):
# Need to return normalised return and volume
stocks_ret = {}
stocks_vol = {}
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
stocks_ret[stock] = ret
stocks_vol[stock] = vol
# Return max = highest performance, while volatility max is lowest volatility
ret_max, ret_min, vol_max, vol_min = max(stocks_ret.values()), min(stocks_ret.values()), \
min(stocks_vol.values()), max(stocks_vol.values())
return ret_max, ret_min, vol_max, vol_min

def sorted_by_rank(data, stocks, period):
result = []
ret_max, ret_min, vol_max, vol_min = normalise(data, stocks, period)
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
ret = (ret-ret_min)/(ret_max-ret_min)
vol = (vol-vol_min)/(vol_max-vol_min)
rank = ret * 0.7 + vol * 0.3
log.debug('%s: return: %.2f, vol: %.2f, rank: %.2f' % \
(stock.symbol, ret, vol, rank))
result.append((rank, stock))
return [stock for rank, stock in sorted(result, reverse=True)]

def handle_data(context, data):
stocks = context.stocks
month = data[stocks.values()[0]].datetime.month
if not context.month:
context.month = month

ret = get_metrics(data, stocks.values()[0], context.period)
# check if next month began
if context.month == month:
return
context.month = month

if ret:
stocks = sorted_by_rank(data, stocks, context.period)
log.debug("The BEST 3 are: " + ', '.join(item.symbol for item in stocks[:3]))
#stocks = stocks[0:1] # pick up first(the best)
#stocks = stocks[1:2] # pick up second
stocks = stocks[1:3] # pick up second and third ranked etfs
sold = 0.0
sids = [stock.sid for stock in stocks]

# filter out already bought stocks
positions = context.portfolio.positions
stocks = [stock for stock in stocks \
if not positions[stock.sid]['amount']]

# sell open positions except remaining winners
sold = 0
for position in positions.values():
if position.amount and position.sid not in sids:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price

# buy new winners if any
if not stocks:
log.debug("Winners are the same. Nothing to buy.")
else:
for stock in stocks:
amount = int((context.portfolio.cash+sold)/len(stocks)/data[stock].price)
_order(stock, amount, data[stock].price)

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.

Thank you Ed for your hard work.

I read the article on the website and I feel that the author has not disclosed all the rules or he has some subjective element. I can't put my finger on it, but I feel not all the rules are disclosed. Either way, the system that you came up with and Seong modified is VERY GOOD. That kind of return is nothing to thumb ones nose at. I am not proficient at python at all and am a learner.

Oh, did you take the moving average of the 20 day volatility? However, I don't see what period is used to average in that system on the website.

Keep chugging at it and I think you have got something very special in your hands.

Ed,
I did not understand the purpose of the following code. I thought the system requires you to sell all the stocks at the end of the month and buy the new winner. Am I misunderstanding something?

Thank you.
Maji

        # sell open positions except remaining winners
sold = 0
for position in positions.values():
if position.amount and position.sid not in sids:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price


Maji,

The code is saying that you should sell any stock that you currently have a position in, that isn't in the list of 'winning' stocks for the month

# sell open positions except remaining winners
sold = 0
for position in positions.values():
# If there is an amount in the position for a stock not in winning stocks then sell all stocks like that
if position.amount and position.sid not in sids:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price


The code to buy the winning stocks for the month is a bit below that in this:

# buy new winners if any
if not stocks:
log.debug("Winners are the same. Nothing to buy.")
else:
for stock in stocks:
amount = int((context.portfolio.cash+sold)/len(stocks)/data[stock].price)
_order(stock, amount, data[stock].price)


This says that if the winning stock for the previous month is still the winning stock/stocks for this month, then keep it the same. Else, buy the new winning stocks.

As for the volatility there is some discrepancy in how we calculated it. Currently, it's set at at 20-day standard deviation*sqrt(number of days in period/20) averaged over a 73 day period. I'm actually not too familiar with volatility calculations but I figured that you'd be sqrt(number of periods) not the absolute number of days. E.g. If you're calculating annualized volatility with a monthly standard deviation you'd multiply std_dev*sqrt(12).
Please feel free to correct me if I"m wrong.

-Seong

Seong,
Thank you for the explanation. Very much appreciated.

I think a 20 day Std. Dev. is a good measure for volatility. However, this maybe the gray area where this model is different from the original author's model. As this system is very sensitive on volatility, the weights assigned to momentum and volatility may have to be adjusted.

Anyway, this is a great system and with a few more adjustments to work out the kinks, it may be tradeable and profitable.

Thank you guys for your hard work.

Hi,

Bi-weekly rebalancing improved results. Frankly, I somehow dislike the idea of time-based rebalancing. There should be better approach. If you have ideas please share. I'll try to implement them.

Ed

217
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Strategy
import math
import pandas

def initialize(context):
context.stocks = {12915: sid(12915), 21769: sid(21769),
24705: sid(24705), 23134: sid(23134),
23118: sid(23118), 23911: sid(23911)}

context.week = 1
context.period = 72 # 3 months period

def _order(stock, amount, price):
if amount != 0:
order(stock, amount)
log.info("%s %d shares of %s = %.2f" % \
# If less than 0, it'll print out selling
stock.symbol, abs(price*amount)))

@batch_transform(window_length=73)
def get_metrics(dp, security, period):
'''Calculate performance and volatility for given period.'''
# Get's all the close prices of the security in the last 73 days (3 months)
prices = dp['close_price'][security.sid][-period-1:]
begin, end = prices[-period], prices[-1]
volatility = (pandas.rolling_std(prices,20)*math.sqrt(period/20)).mean()
return (end - begin)/begin, volatility/begin

def normalise(data, stocks, period):
# Need to return normalised return and volume
stocks_ret = {}
stocks_vol = {}
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
stocks_ret[stock] = ret
stocks_vol[stock] = vol
# Return max = highest performance, while volatility max is lowest volatility
ret_max, ret_min, vol_max, vol_min = max(stocks_ret.values()), min(stocks_ret.values()), \
min(stocks_vol.values()), max(stocks_vol.values())
return ret_max, ret_min, vol_max, vol_min

def sorted_by_rank(data, stocks, period):
result = []
ret_max, ret_min, vol_max, vol_min = normalise(data, stocks, period)
for stock in stocks.values():
ret, vol = get_metrics(data, stock, period)
#log.debug('%s: return: %.2f, vol: %.2f' % (stock.symbol, ret, vol))
ret = (ret-ret_min)/(ret_max-ret_min)
vol = (vol-vol_min)/(vol_max-vol_min)
rank = ret * 0.7 + vol * 0.3
log.debug('%s: return: %.2f, vol: %.2f, rank: %.2f' % \
(stock.symbol, ret, vol, rank))
result.append((rank, stock))
return [stock for rank, stock in sorted(result, reverse=True)]

def handle_data(context, data):
stocks = context.stocks

ret = get_metrics(data, stocks.values()[0], context.period)

# Rebalance on Wednesday
if data[stocks.values()[0]].datetime.weekday() != 2:
return

# Every second week
context.week += 1
if context.week != 2:
return
context.week = 0

if ret:
stocks = sorted_by_rank(data, stocks, context.period)
log.debug("The BEST 3 are: " + ', '.join(item.symbol for item in stocks[:3]))
if stocks[0].symbol == 'SHY':
stocks = stocks[0:1]
else:
stocks = stocks[0:1] # pick up first(the best)
#stocks = stocks[1:2] # pick up second
#stocks = stocks[1:3] # pick up second and third ranked etfs

sids = [stock.sid for stock in stocks]

# filter out already bought stocks
positions = context.portfolio.positions
stocks = [stock for stock in stocks \
if not positions[stock.sid]['amount']]

# sell open positions except of remaining winners
sold = 0.0
for position in positions.values():
if position.amount and position.sid not in sids:
pstock = context.stocks[position.sid]
price = data[pstock].price
_order(pstock, -position.amount, price)
sold += position.amount * price

# buy new winners if any
if not stocks:
log.debug("Winners are the same. Nothing to buy.")
else:
for stock in stocks:
amount = int((context.portfolio.cash+sold)/len(stocks)/data[stock].price)
_order(stock, amount, data[stock].price)

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.

Hello,

After seeing this original algorithm posted a few days ago I was intrigued. Let me first say thanks for writing it to begin with. After reading the original article and having a look at the code plus some of the follow ups you guys have done I wanted to have my hand at it.

What started as tweaks here and there has evolved into a near-complete re-write of the original codebase. A list of the big resulting changes follows:

• Being unable to backtest using EDV before 2008, as it was not listed until Dec 14, 2007. This is no longer a problem. The algorithm now looks at a stocks start date and only analyzes it can actually be traded.
• The algorithm used to put in a sell order and proceed to buy before the sale was complete. This no longer happens, the position must be sold before the next one can be purchased.
• As has been done already, I removed SHY. It offers no benefit and only serves to flat line any returns. Looking at the site the article references, SHY is never held at any point.
• Volatility calculations have been re-done. I did as much research as I could on this and settled on implementing a formula from iVolatility.com. I'm still unsure about the number I'm putting in for the square root in the final step (currently 252/period). I've experimented with other numbers but changing this has little impact on the result.
• The code is a little simpler and much more organized. More comments and more functions that do simpler tasks. Hopefully understanding what's going on should be easier for others.
• Attempted (unsuccessfully) to prevent the algorithm from buying more than there was of cash available. This happened before too, just worse.

Being a programmer by trade, I'm confident in the programmatic changes I made to the code. The thing is, I don't know if I actually made things better. The returns may higher, but so are the risk indicators. What I don't know is if I made the financial logic side of things worse.

I still think it's a great strategy, and would love to learn more / see if it can be improved further.

Regards,
- David

165
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
#
# Adapted Global Market Rotation Strategy
#
# This strategy rotates between six global market ETFs on a monthly
# basis.  Each month the performance and mean 20-day volitility over
# the last 13 weekds are used to rank which ETF should be invested
# in for the coming month.
import math
import pandas

def initialize(context):
context.stocks = {
12915: sid(12915), # MDY (SPDR S&P MIDCAP 400)
21769: sid(21769), # IEV (ISHARES EUROPE ETF)
24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS)
23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40)
23118: sid(23118), # EEP (ISHARES MSCI PACIFIC EX JAPAN)
22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY)
#23911: sid(23911)
}

# Keep track of the current month.
context.currentMonth = None

# The order ID of the sell order currently being filled
context.oid = None

# The current stock being held
context.currentStock = None

# The next stock that needs to get purchased (once the sell order
# on the current stock is filled
context.nextStock = None

# The 3-month lookback period.  Calculated based on there being
# an average of 21 trading days in a month
context.lookback = 63

'''
Gets the minimum and maximum values of an array of values
'''
def getMinMax(arr):
return min(arr.values()), max(arr.values())

'''
Calculates the n-day historical volatility given a set of
n+1 prices.

@param period The number of days for which to calculate volatility
@param prices An array of price information.  Must be of length
period + 1.
'''
def historicalVolatility(period, prices):
# HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1)

# Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1))
r = []
for i in xrange(1, period + 1):
r.append(math.log(prices[i] / prices[i-1]))

# Find the average of all returns
rMean = sum(r) / period;

# Determine the difference of each return from the mean, then square
d = []
for i in xrange(0, period):
d.append(math.pow((r[i] - rMean), 2))

# Take the square root of the sum over the period - 1.  Then mulitply
# that by the square root of the number of trading days in a year
vol = math.sqrt(sum(d) / (period - 1)) * math.sqrt(252/period)

return vol

'''
Gets the performance and average 20-day volatility of a security
over a given period

@param prices
@param period The time period for which to find
'''
def getStockMetrics(prices, period):
# Get the prices
#prices = data['close_price'][security][-period-1:]
start = prices[-period] # First item
end = prices[-1] # Last item

performance = (end - start) / start

# Calculate 20-day volatility for the given period
v = []
x = 0
for i in xrange(-period, 0):
v.append(historicalVolatility(20, prices[i-21:21+x]))
x += 1

volatility = sum(v) / period

return performance, volatility

'''
Picks the best stock from a group of stocks based on the given
data over a specified period using the stocks' performance and
volatility

@param data The datapanel with data of all the stocks
@param stocks A list of stocks to rank
@param period The time period over which the stocks will be
analyzed
'''
def getBestStock(data, stocks, period):
best = None

performances = {}
volatilities = {}

# Get performance and volatility for all the stocks
for s in stocks:
p, v = getStockMetrics(data['price'][s.sid], period)
performances[s.sid] = p
volatilities[s.sid] = v

# Determine min/max of each.  NOTE: volatility is switched
# since a low volatility should be weighted highly.
minP, maxP = getMinMax(performances)
maxV, minV = getMinMax(volatilities)

# Normalize the performance and volatility values to a range
# between [0..1] then rank them based on a 70/30 weighting.
for s in stocks:
p = (performances[s.sid] - minP) / (maxP - minP)
v = (volatilities[s.sid] - minV) / (maxV - minV)
rank = p * 0.7 + v * 0.3

#log.info('Rank info for %s: p=%s, v=%s, r=%s' % (s,p,v,rank))

# If the new rank is greater than the old best rank, pick it.
if best is None or rank > best[1]:
best = s, rank

return best[0]

'''
Sells all the currently held positions in the context's portfolio
'''
def sellHoldings(context):
positions = context.portfolio.positions

oid = None
for p in positions.values():
if (p.amount > 0):
#log.debug('ordering %s' % p)
oid = order(p.sid, -p.amount)

return oid

'''
Utilize the batch_transform decorator to accumulate multiple days
of data into one datapanel  Need the window length to be 20 longer
than lookback period to allow for a 20-day volatility calculation
'''
@batch_transform(window_length=83)
def accumulateData(data):
return data

'''
The main proccessing function.  This is called and passed data
'''
def handle_data(context, data):
# Accumulate data until there is enough days worth of data
# to process without having outOfBounds issues.
datapanel = accumulateData(data)

if datapanel is None:
# There is insufficient data accumulated to process
return

# If there is an order ID, check the status of the order.
# If there is an order and it is filled, the next stock
# can be purchased.
if context.oid is not None:
orderObj = get_order(context.oid)
if orderObj.filled == orderObj.amount:
# Good to buy next holding
amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1
log.info('Sell order complete, buying %s of %s (%s of %s)' % \
(amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash))
order(context.nextStock, amount)
context.currentStock = context.nextStock
context.oid = None
context.nextStock = None

date = get_datetime()
month = date.month

if not context.currentMonth:
# Set the month initially
context.currentMonth = month

if context.currentMonth == month:
# If the current month is unchanged, nothing further to do
return

context.currentMonth = month

# At this point, a new month has been reached.  The stocks
# need to be

# Ensure stocks are only traded if possible.
# (e.g) EDV doesn't start trading until late 2007, without
# this, any backtest run before that date would fail.
stocks = []
for s in context.stocks.values():
if date > s.security_start_date:
stocks.append(s)

# Determine which stock should be used for the next month
best = getBestStock(datapanel, stocks, context.lookback)

if best:
if (context.currentStock is not None and context.currentStock == best):
# If there is a stock currently held and it is the same as
# the new 'best' stock, nothing needs to be done
return
else:
# Otherwise, the current stock needs to be sold and the new
# stock bought
context.oid = sellHoldings(context)
context.nextStock = best

# Purchase will not occur until the next call of handle_data
# and only when the order has been filled.

# If there is no stock currently held, it needs to be bought.
# This only happend
if context.currentStock is None:
amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1
log.info('First purchase, buying %s of %s (%s of %s)' % \
(amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash))
order(context.nextStock, amount)
context.currentStock = context.nextStock
context.oid = None
context.nextStock = None
This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.

Hi David,

Thank you very much for your work! Your implementation is great! This is what I was hoping for when started this thread.

The main issue I can see with this strategy is that it performed great until the beginning of 2012. After that results are not so impressive. I'm going to to look at other rebalancing techniques. Also I've found out that decreasing rebalancing period can help to decrease drawdown.

Ed

Fantastic work guys. Sector rotation is all about identifying the sectors that smart money is getting in to and hopefully we can ride their coattails. The collaboration here is terrific and thanks to everyone.

Using my limited knowledge of understanding the code, I think here is a place where some variation can be tried. Please see the subroutine getStockMetrics below. Ha ha, it shows my age... still stuck in the time of Fortran IV and Pascal.

Anyway, instead of just the open and end prices for the "look back period", I think we should look at the weekly ROC for the last 12 weeks (or what ever weeks we are using), and make a moving average out of it. This way we can look at the sectors every week or every two weeks, but noise will be smoothed out. That way we can catch the trend earlier, hopefully. The current one is just looking at two prices and not looking what happened in between. The story in between may not be noise and it is possible that we are catching a sector that is just starting to dry up.

Another place to look at is what Ed discovered... look at every week or two weeks, but that increases noise. Hopefully by using the above, we can get rid of some of the noise.

Instead of just cycling into bonds, maybe we should look at all the ETFs (other than the bond ETF) and then if their ROC is downwards, then buy TLT or a Treasury Bond ETF or cash. Over here, even if the momentum is negative, we are still buying because it is ranked no. 1 irrespective of whether the momentum is up or down. I think there are times when smart money just sits in cash (or in Treasury Bonds) and we should try to do that.

I hope I am being clear with my post here. If not, please ask and I will try to explain better.

Thank you everyone for your help.
Maji

def getStockMetrics(prices, period):
# Get the prices
#prices = data['close_price'][security][-period-1:]
start = prices[-period] # First item
end = prices[-1] # Last item
performance = (end - start) / start
# Calculate 20-day volatility for the given period
v = []
x = 0
for i in xrange(-period, 0):
v.append(historicalVolatility(20, prices[i-21:21+x]))
x += 1
volatility = sum(v) / period
return performance, volatility



You guys may have seen Prof. Balch's blog on his ETF rotation strategy:

Hi David,

- EDV was not used before 2008. Does it mean that your algo didn't have chance to switch to bonds before 2008?
- Does buying after sell order completes mean that sell order is executed on the next day open and buy order - on the next day close?

Ed

Hey Ed,

Yes, every time handle_data is called the the date is used to filter the list of securities specified at the top to remove any that are not traded on that date. See the code on lines 216-220(ish). Look for the

if date > s.security_start_date:


After that block of code, the stocks variable contains a list of securities that is a subset of the context.stocks list, and it is that subset that is passed to the getBestStock function. So you could add any security you want to the list of items at the top and not worry about whether or not they are available for trading when running a backtest.

As for the second question, the sell order is initiated first. The buy order is not initiated until the sell order is filled. This does end up happening a day later. Looking at the transaction details and position values in a full backtest makes it clear what is going on. As an example I will break down what happens on the first sell/buy switch of the algorithm I originally posted.
- Oct 1, 2003 20:00:00 - Order to sell 2328 shares of EEM -- This order is after market close, it is not filled until the next day.
- Oct 2, 2003 Position value changes to reflect the execution of the order, all shares of EEM are sold.
- Oct 2, 2003 20:00:00 - Order to buy 1641 shares of EPP -- Again, after market close so it is not filled until the next day.
- Oct 3, 2003 Position value now reflect the purchase of EPP shares.

The downside of the order being filled the following day is that the number of shares bought has been calculated the previous day, but since the price has changed, what often happens is the cost of purchasing shares has increased and results in a negative cash position. I plan on fixing this in my next iteration (currently being worked on).

I wrote the code to force the buy order to only happen after the sale is filed since earlier versions of the code were having bad problems.Not only would buy orders be filled before a sell order (leaving a large negative cash position for several days in some cases), but also I found the algorithm would sometimes make multiple sell or buy orders without a matching buy or sell, which just didn't make sense.

Regards,
- David

Maybe we need to go to Minute Level Data to get this issue resolved. Just a suggestion.

Thank you guys
Maji

Thank you Mr. Bartosh, Mr. Quast and others! I had been aware of Mr. Grossman's S.A. blog post and website on GMRE and was searching for a way to test it too, when I discovered the work you've done to replicate his results. I wanted to share the results I'm getting with a tweak of your original efforts. With the changes I've made I'm able to get a 1707% return for the same initial cash amount (30000) and the date range 01-01-2003 to 2013-09-09. I've also got a version that will run in Zipline to act as a confirmation. If you would like a copy please contact me and I'll forward a copy of the Zipline version. If there is interest in building a coding group for this strategy I can put the Quantopian and Zipline version on GitHub.

Some notes on the changes:
- In the Open Source spirit of both the Quantopian and Zipline projects, as well as your sharing the GMRE backtest - I've explicitly folded this GMRE code under GPLv3.
- Adopted the Rogers & Satchell Volatility measure.
- Added a .5 factor for the EDV ETF as per Mr. Grossman's blog entry (EDV has 50% higher volatility than the other basket items).
- Modified the look back to most recent previous 3 months (63 days) rather than the last 3 months of a 4 month period (84) which more closely matches Mr. Grossman's notes, as I interpret them.
- Created a BUY and SELL function for the orders to better keep track of completed buys and sells.
- Added ability to signal the stock to buy on the last trading day of the month.

Some observations using Quantopian:
- As others have noted buy and sell orders do not always complete on a single trading day.
- Buy and sell orders don't appear to be executable for the same trading day. Ideally I'd like to sell at the start of a trading day and buy the next best stock once the sell is complete during the same trading day; better matching Mr. Grossman's strategy. But that doesn't appear to be achievable.
- I don't fathom how negative cash accounts occur. I've tried to avoid this my leaving a cash reserve using a buffer price for buy orders. (not used in the attached backtest)
- The data appears to correlate well with the Zipline load Yahoo data bars adjusted=true

Some observations using the GMRE strategy:
- As Mr. Grossman notes, the volatility weighting didn't appear to impact the pick of global ETF's and is primarily to smooth the noise in EDV.
- Performance/Volatility weighting more towards volatility negatively impacts the strategy. (eg. perf .4/ vol .6)
- No, the monthly buy list from the Quantopian GMRE strategy DOES NOT MATCH Mr. Grossman's historical list one-to-one. But is does correlate closely to it. I would attribute the differences to alterations of his strategy over time, his data source and his analyst tools.

69
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Enhanced (GMRE) - Roger & Satchell Volatility

# This program is free software: you can redistribute it and/or modify
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

# GMRE Strategy: Frank Grossman, 2013-08-09
#    SeekingAlpha http://seekingalpha.com/article/1622832-a-global-market-rotation-strategy-with-an-annual-performance-of-41-4-since-2003
#    Logical-Invest http://www.logical-invest.com/strategy/GlobalMarketRotationEnhancedStrategy.htm
# Quantopian Author: Ed Bartosh, 2013-09-13
# Quantopian Author: David Quast, 2013-09-13
# Quantopian Author: James Crocker, 2013-11-14 [email protected]

# NOTE: record() can ONLY handle five elements in the graph. Any more than that will runtime error once 5 are exceeded.

# NOTE: With default vaules for the ta_lib functions
# 2013-11-05 PLUS_DI|MINUS_DI provide good indicators - KEEP PLUS_DI|MINUS_DI
# 2013-11-05 RSI trends closely toPLUS_DI with a bit more volatility;  - IGNORE RSI
# 2013-11-05 ROCR100 trends PLUS_DI in shape but is more volatile; lags PLUS_DI|MINUS_DI cross over; lags PPO - ignore ROCR100
# 2013-11-05 PPO trends PLUS_DI but tempers whipsaw of PLUS_DI|MINUS_DI crossovers - KEEP PPO to temper +-DI
# 2013-11-05 TSF trends PPO by 10x. Even factoring by 10 TSF trend doesn't appear to be a good indicator - IGNORE TSF
# 2013-11-05 STOCH/STOCHF to get SlowD and FastD for signal - KEEP STOCH/STOCHF for now. - May presage corrections of other trends.
# 2013-11-05 VAR in for future reference (original GMRE metric); may also need to reconsider ROCR100 unless PLUS_DI fits.
# 2013-11-14 Removing the DI signal as it has no impact on backtest. Week stock already weakest even with -1 for DI signal.
# 2013-12-02 Added 0.5 factor for EDV as per original Seeking Alpha post by Mr. Grossman. EDV volatility is about 50% higher than the other ETF's.
# 2013-12-06 Added verbose logging switches
# 2013-12-09 Tried weighting the volatility AND performance months (e.g. 1/5, 1/3, 1/2) but didn't impact the performance enough to warrant.

# Adapted Global Market Rotation Strategy
#
# This strategy rotates between six global market ETFs on a monthly
# basis.  Each month the performance and mean 20-day volatility over
# the last 13 weeks are used to rank which ETF should be invested
# in for the coming month.

import math
import pandas

# window_length SHOULD EQUAL context.metricPeriod
@batch_transform(window_length=63)
def accumulateData(data):
return data

def initialize(context):

# Trade on boundary of first trading day of MONTH or set DAYS
# DAYS|MONTH
#context.boundaryDays = 21

# Set the last date for the FORECAST BEST Buy
context.lastForecastYear = 2013
context.lastForecastMonth = 8
context.lastForecastDay = 30

# Set Performance vs. Volatility factors (7.0, 3.0 from Grossman GMRE
context.factorPerformance = 0.7
context.factorVolatility = 0.3

# Period Volatility and Performance period in DAYS
context.metricPeriod = 63 # 3 months LOOKBACK
context.periodVolatility = 21 # Volatility period. Chose a MULTIPLE of metricPeriod

# To prevent going 'negative' on cash account set stop, limit and price factor >= stop
#context.orderSellLimits = False
##context.priceSellStop = None
##context.priceSellLimit = None
#context.priceBuyFactor = 3.03 # Buffering since buys and sells DON'T occur on the same day.

# Re-enact pricing from original Quast code
context.orderSellLimits = False

# Factor commission cost
#set_commission(commission.PerShare(cost=0.03))

12915: sid(12915), # MDY (SPDR S&P MIDCAP 400)
21769: sid(21769), # IEV (ISHARES EUROPE ETF)
24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS)
23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40)
23118: sid(23118), # EPP (ISHARES MSCI PACIFIC EX JAPAN)
22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY)
40513: sid(40513), # ZIV (VelocityShares Inverse VIX Medium-Term)
#26432: sid(26432), # FEZ
#23911: sid(23911), # SHY
}

# Set/Unset logging features for verbosity levels
context.logWarn = False
context.logSell = False
context.logHold = True
context.logRank = False
context.logDebug = False

# SHOULDN'T NEED TO MODIFY REMAINING VARIABLES
# Keep track of the current month.
context.currentDayNum = None
context.currentMonth = None
context.currentStock = None
context.nextStock = None
context.oidSell = None

context.sellCount = 0

def getMinMax(arr):
return min(arr.values()), max(arr.values())

def rsVolatility(period, openPrices, closePrices, highPrices, lowPrices):
# Rogers and Satchell (1991)
r = []

for i in xrange(0, period):
a = math.log(highPrices[i] / closePrices[i])
b = math.log(highPrices[i] / openPrices[i])
c = math.log(lowPrices[i] / closePrices[i])
d = math.log(lowPrices[i] / openPrices[i])
r.append( a*b + c*d )

# Take the square root of the sum over the period - 1.  Then mulitply
# that by the square root of the number of trading days in a year
vol = math.sqrt(sum(r) / period) * math.sqrt(252/period)

return vol

def getStockMetrics(context, openPrices, closePrices, highPrices, lowPrices):
# Get the prices

# You can use the 20 day volatility averaged over 3 month.
# For the ranking I calculate the 3 month performance of all ETF's and normalise between 0-1.
# The best will have 1. Then I calculate the medium 3 month 20 day volatility and also normalize from 0-1.
# Then I used Ranking= 0.7*performance +0.3*volatility.
# This will give me a ranking from 0-1 from which I will take the best.

period = context.metricPeriod
periodV = context.periodVolatility
volDays = periodV - 1
range = period / volDays

# Calculate the period performance
start = closePrices[-period] # First item
end = closePrices[-1] # Last item

performance = (end - start) / start

# Calculate 20-day volatility for the given period
v = []
x = 0
for i in xrange(-range, 0):
x = i * periodV
y = x + volDays
if context.logDebug is True:
log.debug('period %s, pV %s, volDays %s, i %s, x %s, y %s, lenopenprices %s' % (period, periodV, volDays, i, x, y, len(openPrices)))
v.append(rsVolatility(volDays, openPrices[x:y], closePrices[x:y], highPrices[x:y], lowPrices[x:y]))

volatility = sum(v) / range

return performance, volatility

def getBestStock(context, data, stocks):

# For the ranking, I also use the volatility of the ETFs.
# While this is not so important for the 5 Global market ETFs, it is important to lower the EDV ranking
# a little bit, according to the higher volatility of the EDV ETF. EDV has a medium 20-day volatility,
# which is roughly 50% higher than the volatility of the 5 global market ETFs. This results in higher
# spikes during small market turbulence and the model would switch too early between shares (our 5 ETFs)
# and treasuries .

performances = {}
volatilities = {}

# Get performance and volatility for all the stocks
for s in stocks:
p, v = getStockMetrics(context, data['open_price'][s.sid], data['close_price'][s.sid], data['high'][s.sid], data['low'][s.sid])
performances[s.sid] = p
volatilities[s.sid] = v

# Determine min/max of each.  NOTE: volatility is switched
# since a low volatility should be weighted highly.
minP, maxP = getMinMax(performances)
maxV, minV = getMinMax(volatilities)

# Normalize the performance and volatility values to a range
# between [0..1] then rank them based on a 70/30 weighting.
stockRanks = {}
for s in stocks:
p = (performances[s.sid] - minP) / (maxP - minP)
v = (volatilities[s.sid] - minV) / (maxV - minV)

if context.logDebug is True:
log.debug('[%s] p %s, v %s' % (s, p, v))

pFactor = context.factorPerformance
vFactor = context.factorVolatility

if math.isnan(p) or math.isnan(v):
rank = None
else:
# Adjust volatility for EDV by 50%
if s.sid == 22887:
rank = (p * pFactor) + ((v * 0.5) * vFactor)
else:
rank = (p * pFactor) + (v * vFactor)

if rank is not None:
stockRanks[s] = rank

bestStock = None
if len(stockRanks) > 0:
if context.logDebug is True and len(stockRanks) < len(stocks):
log.debug('FEWER STOCK RANKINGS THAN IN STOCK BASKET!')
if context.logRank is True:
for s in sorted(stockRanks, key=stockRanks.get, reverse=True):
log.info('RANK [%s] %s' % (s, stockRanks[s]))

bestStock = max(stockRanks, key=stockRanks.get)
else:
if context.logDebug is True:
log.debug('NO STOCK RANKINGS FOUND IN BASKET; BEST STOCK IS: NONE')

return bestStock

def sellPositions(context):
oid = None
positions = context.portfolio.positions

try:
priceSellStop = context.priceSellStop
except:
priceSellStop = 0.0

try:
priceSellLimit = context.priceSellLimit
except:
priceSellLimit = 0.0

for p in positions.values():
if (p.amount > 0):

amount = p.amount
price = p.last_sale_price

if context.logSell is True:
orderValue = price * amount
log.info('SELL [%s] (%s) @ $%s (%s)' % (p.sid, -amount, price, orderValue)) stop = price - priceSellStop limit = stop - priceSellLimit if context.orderSellLimits is True: oid = order(p.sid, -amount, limit_price = limit, stop_price = stop) else: oid = order(p.sid, -amount) context.sellCount += 1 return oid def buyPositions(context, data): oid = None cash = context.portfolio.cash s = context.nextStock try: priceBuyFactor = context.priceBuyFactor except: priceBuyFactor = 0.0 try: priceBuyStop = context.priceBuyStop except: priceBuyStop = 0.0 try: priceBuyLimit = context.priceBuyLimit except: priceBuyLimit = 0.0 price = data[s.sid].open_price amount = math.floor(cash / (price + priceBuyFactor)) orderValue = price * amount stop = price + priceBuyStop limit = stop + priceBuyLimit if cash <= 0 or cash < orderValue: log.info('BUY ABORT! cash$%s < orderValue $%s' % (cash, orderValue)) else: if context.logBuy is True: log.info('BUY [%s] %s @$%s ($%s of$%s)' % (s, amount, price, orderValue, cash))

oid = order(s, amount, limit_price = limit, stop_price = stop)
else:
oid = order(s, amount)

return oid

'''
The main proccessing function.  This is called and passed data
'''
def handle_data(context, data):

date = get_datetime()
month = int(date.month)
day = int(date.day)
year = int(date.year)
#dayNum = int(date.strftime("%j"))

fYear = context.lastForecastYear
fMonth = context.lastForecastMonth
fDay = context.lastForecastDay

if context.logWarn is True and context.portfolio.cash < 0:
log.warn('NEGATIVE CASH %s' % context.portfolio.cash)

if context.oidSell is not None:
orderObj = get_order(context.oidSell)
if orderObj.filled == orderObj.amount:
# Good to buy next holding
if context.logSell is True:
log.info('SELL ORDER COMPLETED')
context.oidSell = None
context.currentStock = context.nextStock
context.nextStock = None
else:
if context.logSell is True:
log.info('SELL ORDER *NOT* COMPLETED')
return

if orderObj.filled == orderObj.amount:
else:
return

datapanel = accumulateData(data)

if datapanel is None:
# There is insufficient data accumulated to process
if context.logWarn is True:
log.warn('INSUFFICIENT DATA!')
return

if not context.currentMonth or context.currentMonth != month or (year == fYear and month == fMonth and day == fDay):
#context.currentDayNum = dayNum
context.currentMonth = month
else:
return

# At this point the stocks need to be ranked.

# Ensure stocks are only traded if possible.
# (e.g) EDV doesn't start trading until late 2007, without
# this, any backtest run before that date would fail.
stocks = []
if date > s.security_start_date:
stocks.append(s)

best = getBestStock(context, datapanel, stocks)

if best is not None:
if (context.currentStock == best):
# Hold current
if context.logHold is True:
log.info('HOLD [%s]' % context.currentStock)
return
elif (context.currentStock is None):
context.currentStock = best
context.nextStock = best
else:
# Sell ALL and Buy best
context.nextStock = best
#log.info('SELLING ALL POSITIONS - BUYING [%s]' % best)
context.oidSell = sellPositions(context)
else:
if context.logWarn is True:
log.warn('COULD NOT FIND A BEST STOCK! BEST STOCK IS *NONE*')

if (year == fYear and month == fMonth and day == fDay):
log.info('PNL $%s, CASH$%s, PORTFOLIO $%s' % (context.portfolio.pnl, context.portfolio.cash, context.portfolio.portfolio_value))  This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes. There was a runtime error. Hi James, Thank you for continuing and improving our work! I'm interested in helping coding of this strategy, so please create project on github and put your code there. if you can also explain how to run your code in zipline it would be great. Regards, Ed PS: Regarding execution of orders. To execute orders for the same trading day we can switch to minute time frame. It should not be hard to do. However, it would require some changes in the code. I've placed the Quantopian Cloud and Quantopian Zipline GMRE strategy on GitHub https://github.com/james-crocker/quant with very basic HOWTO for each. If anyone is interested in collaborating on this strategy contact me on GitHub. The more eyes on this the better! I'll start looking at the changes to enable minute trading so we can execute sell and buy orders on the same day. Although this is interesting, the lion's share of the return here comes from ZIV, and etn with special risks. And here. I don't just mean high beta or the increased risk of ETNs vs ETFs. The issuer can just terminate the fund if things are going badly. In that event, you could lock in losses several times larger than an S&P index etf would be experiencing at the same time. Not that it couldn't work - just keep your eyes open. It would also be great if someone could incorporate a minimum variance algorithm and generate a portfolio of more than one asset at a time. See http://02f27c6.netsolhost.com/papers/darwin-adaptive-asset-allocation.pdf for some qualitative ideas in that direction. @James, David, I still don't know how you guys did the backtest in Quantopian from 2003. As long as I have EDV in my code, even if I put # in front of it, Quantopian does allow me to start the backtest earlier than Dec. 2007, the day when EDV started to trade. I cloned your code and it still starts from Dec. 2007. -Huapu Hi Huapu, You can switch off automatic date adjustments in UI. Just click on the adjusted date and read instructions carefully. There should be 'Turn off' button somewhere there. PS: I'm working on this strategy. Hopefully will show results soon. Stay tuned :)! Regards, Ed # 2013-12-02 Added 0.5 factor for EDV as per original Seeking Alpha post by Mr. Grossman. # EDV volatility is about 50% higher than the other ETF's.  Shouldn't the factor be 0.66 if volatility is 50% higher? Making the change does improve performance a little. P. Hi Ed, Thanks. I just realized that it is "Build Algorithms" that has the auto adjusted date. Full backtest doesn't have this restriction. I am looking forward to your updates. This is a very interesting strategy and I am working on it too. Hopefully we can make it comparable or better than Grossman's results ;) Best, Huapu Hi, Looks like Quantopian changed the way they calculate risk metrics. Below is the result of cloned David's implementation. I didn't change anything there, just cloned and backtested it. Here is the list of values from David's and cloned backtests: Total Returns 15.49 15.49 Alpha 14.97 0.27 Beta 0.24 -0.05 Sharpe 18.89 1.08 Sortino 36.79 1.48 Information Ratio 0.05 0.87 Benchmark Returns 0.90 0.90 Volatility 0.80 0.24 Max Drawdown 0.27 0.15 Which set looks more realistic from your point of view? Regards, Ed 191 Loading... Backtest from to with initial capital Total Returns -- Alpha -- Beta -- Sharpe -- Sortino -- Max Drawdown -- Benchmark Returns -- Volatility --  Returns 1 Month 3 Month 6 Month 12 Month  Alpha 1 Month 3 Month 6 Month 12 Month  Beta 1 Month 3 Month 6 Month 12 Month  Sharpe 1 Month 3 Month 6 Month 12 Month  Sortino 1 Month 3 Month 6 Month 12 Month  Volatility 1 Month 3 Month 6 Month 12 Month  Max Drawdown 1 Month 3 Month 6 Month 12 Month # # Adapted Global Market Rotation Strategy # # This strategy rotates between six global market ETFs on a monthly # basis. Each month the performance and mean 20-day volitility over # the last 13 weekds are used to rank which ETF should be invested # in for the coming month. import math import pandas def initialize(context): context.stocks = { 12915: sid(12915), # MDY (SPDR S&P MIDCAP 400) 21769: sid(21769), # IEV (ISHARES EUROPE ETF) 24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS) 23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40) 23118: sid(23118), # EEP (ISHARES MSCI PACIFIC EX JAPAN) 22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY) #23911: sid(23911) } # Keep track of the current month. context.currentMonth = None # The order ID of the sell order currently being filled context.oid = None # The current stock being held context.currentStock = None # The next stock that needs to get purchased (once the sell order # on the current stock is filled context.nextStock = None # The 3-month lookback period. Calculated based on there being # an average of 21 trading days in a month context.lookback = 63 ''' Gets the minimum and maximum values of an array of values ''' def getMinMax(arr): return min(arr.values()), max(arr.values()) ''' Calculates the n-day historical volatility given a set of n+1 prices. @param period The number of days for which to calculate volatility @param prices An array of price information. Must be of length period + 1. ''' def historicalVolatility(period, prices): # HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1) # Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1)) r = [] for i in xrange(1, period + 1): r.append(math.log(prices[i] / prices[i-1])) # Find the average of all returns rMean = sum(r) / period; # Determine the difference of each return from the mean, then square d = [] for i in xrange(0, period): d.append(math.pow((r[i] - rMean), 2)) # Take the square root of the sum over the period - 1. Then mulitply # that by the square root of the number of trading days in a year vol = math.sqrt(sum(d) / (period - 1)) * math.sqrt(252/period) return vol ''' Gets the performance and average 20-day volatility of a security over a given period @param prices @param period The time period for which to find ''' def getStockMetrics(prices, period): # Get the prices #prices = data['close_price'][security][-period-1:] start = prices[-period] # First item end = prices[-1] # Last item performance = (end - start) / start # Calculate 20-day volatility for the given period v = [] x = 0 for i in xrange(-period, 0): v.append(historicalVolatility(20, prices[i-21:21+x])) x += 1 volatility = sum(v) / period return performance, volatility ''' Picks the best stock from a group of stocks based on the given data over a specified period using the stocks' performance and volatility @param data The datapanel with data of all the stocks @param stocks A list of stocks to rank @param period The time period over which the stocks will be analyzed ''' def getBestStock(data, stocks, period): best = None performances = {} volatilities = {} # Get performance and volatility for all the stocks for s in stocks: p, v = getStockMetrics(data['price'][s.sid], period) performances[s.sid] = p volatilities[s.sid] = v # Determine min/max of each. NOTE: volatility is switched # since a low volatility should be weighted highly. minP, maxP = getMinMax(performances) maxV, minV = getMinMax(volatilities) # Normalize the performance and volatility values to a range # between [0..1] then rank them based on a 70/30 weighting. for s in stocks: p = (performances[s.sid] - minP) / (maxP - minP) v = (volatilities[s.sid] - minV) / (maxV - minV) rank = p * 0.7 + v * 0.3 #log.info('Rank info for %s: p=%s, v=%s, r=%s' % (s,p,v,rank)) # If the new rank is greater than the old best rank, pick it. if best is None or rank > best[1]: best = s, rank return best[0] ''' Sells all the currently held positions in the context's portfolio ''' def sellHoldings(context): positions = context.portfolio.positions oid = None for p in positions.values(): if (p.amount > 0): #log.debug('ordering %s' % p) oid = order(p.sid, -p.amount) return oid ''' Utilize the batch_transform decorator to accumulate multiple days of data into one datapanel Need the window length to be 20 longer than lookback period to allow for a 20-day volatility calculation ''' @batch_transform(window_length=83) def accumulateData(data): return data ''' The main proccessing function. This is called and passed data ''' def handle_data(context, data): # Accumulate data until there is enough days worth of data # to process without having outOfBounds issues. datapanel = accumulateData(data) if datapanel is None: # There is insufficient data accumulated to process return # If there is an order ID, check the status of the order. # If there is an order and it is filled, the next stock # can be purchased. if context.oid is not None: orderObj = get_order(context.oid) if orderObj.filled == orderObj.amount: # Good to buy next holding amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1 log.info('Sell order complete, buying %s of %s (%s of %s)' % \ (amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash)) order(context.nextStock, amount) context.currentStock = context.nextStock context.oid = None context.nextStock = None date = get_datetime() month = date.month if not context.currentMonth: # Set the month initially context.currentMonth = month if context.currentMonth == month: # If the current month is unchanged, nothing further to do return context.currentMonth = month # At this point, a new month has been reached. The stocks # need to be # Ensure stocks are only traded if possible. # (e.g) EDV doesn't start trading until late 2007, without # this, any backtest run before that date would fail. stocks = [] for s in context.stocks.values(): if date > s.security_start_date: stocks.append(s) # Determine which stock should be used for the next month best = getBestStock(datapanel, stocks, context.lookback) if best: if (context.currentStock is not None and context.currentStock == best): # If there is a stock currently held and it is the same as # the new 'best' stock, nothing needs to be done return else: # Otherwise, the current stock needs to be sold and the new # stock bought context.oid = sellHoldings(context) context.nextStock = best # Purchase will not occur until the next call of handle_data # and only when the order has been filled. # If there is no stock currently held, it needs to be bought. # This only happend if context.currentStock is None: amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1 log.info('First purchase, buying %s of %s (%s of %s)' % \ (amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash)) order(context.nextStock, amount) context.currentStock = context.nextStock context.oid = None context.nextStock = None This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes. There was a runtime error. We did change the way we calculate risk metrics. There is supposed to be a flag on old backtests that warns of that change - I'm going to hunt down why that flag is missing! Here is the detailed explanation of the updated metrics. Disclaimer The material on this website is provided for informational purposes only and does not constitute an offer to sell, a solicitation to buy, or a recommendation or endorsement for any security or strategy, nor does it constitute an offer to provide investment advisory services by Quantopian. In addition, the material offers no opinion with respect to the suitability of any security or specific investment. No information contained herein should be regarded as a suggestion to engage in or refrain from any investment-related course of action as none of Quantopian nor any of its affiliates is undertaking to provide investment advice, act as an adviser to any plan or entity subject to the Employee Retirement Income Security Act of 1974, as amended, individual retirement account or individual retirement annuity, or give advice in a fiduciary capacity with respect to the materials presented herein. If you are an individual retirement or other investor, contact your financial advisor or other fiduciary unrelated to Quantopian about whether any given investment idea, strategy, product or service described herein may be appropriate for your circumstances. All investments involve risk, including loss of principal. Quantopian makes no guarantees as to the accuracy or completeness of the views expressed in the website. The views are subject to change, and may have become unreliable for various reasons, including changes in market conditions or economic circumstances. HI, I took David's code and made the following modifications to it: - changed approach to define rebalancing period. Now it can be set to any number of calendar days - used new order_value API - added check for for price data. This fixes backtest crash - added more information about orders to the log output - made code to satisfy Python code style and look more pythonic - put code to github. You can track my progress here: https://github.com/bartosh/gmr/commits/master Here is my near term todo list: - try to make order execution shorter. Now it takes 3 days, which is unrealistic - parameterize periods used to calculate performance and volatility - add a % stop loss and % stop profit - parameterize amount of securities to pick up I'm taking ideas from this thread, so thank you again for great collaboration! James, let's merge our code. As I started my work from Devid's code merging would require some thought as our codebases are quite different now. Please let me know if you're interested. Regards, Ed 191 Loading... Backtest from to with initial capital Total Returns -- Alpha -- Beta -- Sharpe -- Sortino -- Max Drawdown -- Benchmark Returns -- Volatility --  Returns 1 Month 3 Month 6 Month 12 Month  Alpha 1 Month 3 Month 6 Month 12 Month  Beta 1 Month 3 Month 6 Month 12 Month  Sharpe 1 Month 3 Month 6 Month 12 Month  Sortino 1 Month 3 Month 6 Month 12 Month  Volatility 1 Month 3 Month 6 Month 12 Month  Max Drawdown 1 Month 3 Month 6 Month 12 Month """ Adapted Global Market Rotation Strategy This strategy rotates between six global market ETFs on a monthly basis. Each month the performance and mean 20-day volitility over the last 13 weekds are used to rank which ETF should be invested in for the coming month. """ import math def initialize(context): """Initialize context object. It's passed to the handle_data function.""" context.stocks = { 12915: sid(12915), # MDY (SPDR S&P MIDCAP 400) 21769: sid(21769), # IEV (ISHARES EUROPE ETF) 24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS) 23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40) 23118: sid(23118), # EEP (ISHARES MSCI PACIFIC EX JAPAN) 22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY) } # Date of last rebalancing context.rebalancing_date = None # Rebalancing period in calendar days context.period = 31 # The order ID of the sell order currently being filled context.oid = None # The current stock being held context.currentStock = None # The next stock that needs to get purchased (once the sell order # on the current stock is filled context.nextStock = None # The 3-month lookback period. Calculated based on there being # an average of 21 trading days in a month context.lookback = 63 def getminmax(vdict): """ Get the minimum and maximum values of a list of dictionary values. :param vdict: Python dict-like object. :returns: minimum and maximum of vdict values """ vals = vdict.values() return min(vals), max(vals) def hist_volatility(period, prices): """ Calculate the n-day historical volatility given a set of n+1 prices. :param period: The number of days for which to calculate volatility :param prices: An array of price information. Must be of length period + 1. """ # HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1) # Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1)) returns = [] for i in xrange(1, period + 1): returns.append(math.log(prices[i] / prices[i-1])) # Find the average of all returns rmean = sum(returns) / period # Determine the difference of each return from the mean, then square diff = [] for i in xrange(0, period): diff.append(math.pow((returns[i] - rmean), 2)) # Take the square root of the sum over the period - 1. Then mulitply # that by the square root of the number of trading days in a year vol = math.sqrt(sum(diff) / (period - 1)) * math.sqrt(252/period) return vol def getmetrics(prices, period): """ Get the performance and average 20-day volatility of a security over a given period :param prices: :param period: The time period for which to find """ # Get the prices #prices = data['close_price'][security][-period-1:] start = prices[-period] # First item end = prices[-1] # Last item performance = (end - start) / start # Calculate 20-day volatility for the given period volats = [] j = 0 for i in xrange(-period, 0): volats.append(hist_volatility(20, prices[i-21:21+j])) j += 1 avg_volat = sum(volats) / period return performance, avg_volat def getbeststock(data, stocks, period): """ Pick the best stock from a group of stocks based on the given data over a specified period using the stocks' performance and volatility :param data: The datapanel with data of all the stocks :param stocks: A list of stocks to rank :param period: The time period over which the stocks will be analyzed """ best = None performances = {} volatilities = {} # Get performance and volatility for all the stocks for stock in stocks: perf, volat = getmetrics(data['price'][stock.sid], period) performances[stock.sid] = perf volatilities[stock.sid] = volat # Determine min/max of each. NOTE: volatility is switched # since a low volatility should be weighted highly. minp, maxp = getminmax(performances) maxv, minv = getminmax(volatilities) # Normalize the performance and volatility values to a range # between [0..1] then rank them based on a 70/30 weighting. for stock in stocks: perf = (performances[stock.sid] - minp) / (maxp - minp) volat = (volatilities[stock.sid] - minv) / (maxv - minv) rank = perf * 0.7 + volat * 0.3 #log.info('Rank info for %s: p=%s, v=%s, r=%s' % (s,p,v,rank)) # If the new rank is greater than the old best rank, pick it. if best is None or rank > best[1]: best = stock, rank return best[0] def sellholdings(context): """Sell all the currently held positions in the context's portfolio.""" positions = context.portfolio.positions oid = None for pos in positions.values(): if (pos.amount > 0): log.info('Selling %s shares of %s' % (pos.amount, pos.sid.symbol)) oid = order(pos.sid, -pos.amount) return oid @batch_transform(window_length=83) def accumulatedata(data): """ Utilize the batch_transform decorator to accumulate multiple days of data into one datapanel Need the window length to be 20 longer than lookback period to allow for a 20-day volatility calculation """ return data def days(begin, end): """Calculate amount of calendar days between two dates.""" roundb = begin.replace(hour = 0, minute = 0, second = 0, microsecond = 0) rounde = end.replace(hour = 0, minute = 0, second = 0, microsecond = 0) return (rounde - roundb).days def handle_data(context, data): """ The main proccessing function. Called whenever a market event occurs for any of algorithm's securities. :param context: context object :param data: Object contains all the market data for algorithm securities keyed by security id. It represents a snapshot of algorithm's universe as of when this method is called. :returns: None """ # Accumulate data until there is enough days worth of data # to process without having outOfBounds issues. datapanel = accumulatedata(data) if datapanel is None: # There is insufficient data accumulated to process return current_date = get_datetime() # If there is an order ID, check the status of the order. # If there is an order and it is filled, the next stock # can be purchased. if context.oid is not None: orderobj = get_order(context.oid) if orderobj.filled == orderobj.amount: log.info('Sold %s shares of %s' % (-orderobj.amount, orderobj.sid.symbol)) # Good to buy next holding cash = context.portfolio.cash oobj = get_order(order_value(context.nextStock, cash)) log.info('Sell order complete, buying %s shares of %s. Cash is %s' % \ (oobj.amount, context.nextStock.symbol, cash)) context.currentStock = context.nextStock context.oid = None context.nextStock = None context.rebalancing_date = current_date if context.rebalancing_date and days(context.rebalancing_date, current_date) < context.period: # It's not a time to rebalance yet, nothing further to do return # At this point, a new month has been reached. The stocks # need to be # Ensure stocks are only traded if possible. # (e.g) EDV doesn't start trading until late 2007, without # this, any backtest run before that date would fail. stocks = [] for stock in context.stocks.values(): if current_date > stock.security_start_date \ and stock.sid in datapanel['price']: stocks.append(stock) # Determine which stock should be used for the next month best = getbeststock(datapanel, stocks, context.lookback) if best: if (context.currentStock is not None and context.currentStock == best): # If there is a stock currently held and it is the same as # the new 'best' stock, nothing needs to be done return else: # Otherwise, the current stock needs to be sold and the new # stock bought context.oid = sellholdings(context) context.nextStock = best # Purchase will not occur until the next call of handle_data # and only when the order has been filled. # If there is no stock currently held, it needs to be bought. # This only happend if context.currentStock is None: cash = context.portfolio.cash oobj = get_order(order_value(context.nextStock, cash)) log.info('Buying %s shares of %s. Cash is %s' % \ (oobj.amount, context.nextStock.symbol, cash)) context.currentStock = context.nextStock context.oid = None context.nextStock = None context.rebalancing_date = current_date  This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes. There was a runtime error. Hi, Ed, Thanks for updating. So far my best backtest result on this strategy is as followings. 0.8 performance + 0.2 volatility seems to give better performance than 0.7 + 0.3. I am not so sure about ZIV. Although ZIV seems to boost the performance by a lot, the potential risk can be quite high. My understanding is that ZIV is inversely proportional to VXX, and VXX has dropped from >$7000 to ~$42. So if market volatility go up in future ZIV could drop like a plummet. I appreciate comments from you guys on using ZIV. -Huapu 232 Loading... Backtest from to with initial capital Total Returns -- Alpha -- Beta -- Sharpe -- Sortino -- Max Drawdown -- Benchmark Returns -- Volatility --  Returns 1 Month 3 Month 6 Month 12 Month  Alpha 1 Month 3 Month 6 Month 12 Month  Beta 1 Month 3 Month 6 Month 12 Month  Sharpe 1 Month 3 Month 6 Month 12 Month  Sortino 1 Month 3 Month 6 Month 12 Month  Volatility 1 Month 3 Month 6 Month 12 Month  Max Drawdown 1 Month 3 Month 6 Month 12 Month # Global Market Rotation Enhanced (GMRE) - Roger & Satchell Volatility # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ATTRIBUTION: # GMRE Strategy: Frank Grossman, 2013-08-09 # SeekingAlpha http://seekingalpha.com/article/1622832-a-global-market-rotation-strategy-with-an-annual-performance-of-41-4-since-2003 # Logical-Invest http://www.logical-invest.com/strategy/GlobalMarketRotationEnhancedStrategy.htm # Quantopian Author: Ed Bartosh, 2013-09-13 # Quantopian Author: David Quast, 2013-09-13 # Quantopian Author: James Crocker, 2013-11-14 [email protected] # NOTE: record() can ONLY handle five elements in the graph. Any more than that will runtime error once 5 are exceeded. # NOTE: With default vaules for the ta_lib functions # 2013-11-05 PLUS_DI|MINUS_DI provide good indicators - KEEP PLUS_DI|MINUS_DI # 2013-11-05 RSI trends closely toPLUS_DI with a bit more volatility; - IGNORE RSI # 2013-11-05 ROCR100 trends PLUS_DI in shape but is more volatile; lags PLUS_DI|MINUS_DI cross over; lags PPO - ignore ROCR100 # 2013-11-05 PPO trends PLUS_DI but tempers whipsaw of PLUS_DI|MINUS_DI crossovers - KEEP PPO to temper +-DI # 2013-11-05 TSF trends PPO by 10x. Even factoring by 10 TSF trend doesn't appear to be a good indicator - IGNORE TSF # 2013-11-05 STOCH/STOCHF to get SlowD and FastD for signal - KEEP STOCH/STOCHF for now. - May presage corrections of other trends. # 2013-11-05 VAR in for future reference (original GMRE metric); may also need to reconsider ROCR100 unless PLUS_DI fits. # 2013-11-14 Removing the DI signal as it has no impact on backtest. Week stock already weakest even with -1 for DI signal. # 2013-12-02 Added 0.5 factor for EDV as per original Seeking Alpha post by Mr. Grossman. EDV volatility is about 50% higher than the other ETF's. # 2013-12-06 Added verbose logging switches # 2013-12-09 Tried weighting the volatility AND performance months (e.g. 1/5, 1/3, 1/2) but didn't impact the performance enough to warrant. # Adapted Global Market Rotation Strategy # # This strategy rotates between six global market ETFs on a monthly # basis. Each month the performance and mean 20-day volatility over # the last 13 weeks are used to rank which ETF should be invested # in for the coming month. import math import pandas performance_lookback = 63 # window_length SHOULD EQUAL context.metricPeriod @batch_transform(window_length=performance_lookback) def accumulateData(data): return data def initialize(context): # Set commission model set_commission(commission.PerTrade(cost=7.00)) # Trade on boundary of first trading day of MONTH or set DAYS # DAYS|MONTH #context.boundaryTrade = 'DAYS' #context.boundaryDays = 14 context.boundaryTrade = 'MONTH' # Set the last date for the FORECAST BEST Buy context.lastForecastYear = 2015 context.lastForecastMonth = 12 context.lastForecastDay = 30 # Set Performance vs. Volatility factors (7.0, 3.0 from Grossman GMRE context.factorPerformance = 0.8 context.factorVolatility = 0.2 # Period Volatility and Performance period in DAYS context.metricPeriod = performance_lookback # 3 months LOOKBACK context.periodVolatility = 21 # Volatility period. Chose a MULTIPLE of metricPeriod #context.volatilityMethod = "simple" context.volatilityMethod = "rs" # To prevent going 'negative' on cash account set stop, limit and price factor >= stop #context.orderBuyLimits = False #context.or#derSellLimits = False ##context.priceBuyStop = None ##context.priceBuyLimit = None ##context.priceSellStop = None ##context.priceSellLimit = None #context.priceBuyFactor = 3.03 # Buffering since buys and sells DON'T occur on the same day. # Re-enact pricing from original Quast code context.orderBuyLimits = False context.orderSellLimits = False context.priceBuyFactor = 0.0 # Factor commission cost #set_commission(commission.PerShare(cost=0.03)) #set_commission(commission.PerTrade(cost=15.00)) context.basket = { 12915: sid(12915), # MDY (SPDR S&P MIDCAP 400) 21769: sid(21769), # IEV (ISHARES EUROPE ETF) 24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS) 23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40) 23118: sid(23118), # EPP (ISHARES MSCI PACIFIC EX JAPAN) 32279: sid(32279), # XOP (SPDRÂ® S&PÂ® Oil & Gas Exploration & Production ETF) 22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY) #23870: sid(23870), # IEF (VANGUARD 7-10 years bond) 40513: sid(40513), # ZIV (VelocityShares Inverse VIX Medium-Term) #26432: sid(26432), # FEZ #23911: sid(23911), # SHY #8554: sid(8554) # SPY } # Set/Unset logging features for verbosity levels context.logWarn = False context.logBuy = False context.logSell = False context.logHold = True context.logRank = False context.logDebug = True # SHOULDN'T NEED TO MODIFY REMAINING VARIABLES # Keep track of the current month. context.currentDayNum = None context.currentMonth = None context.currentStock = None context.nextStock = None context.oidBuy = None context.oidSell = None context.buyCount = 0 context.sellCount = 0 def getMinMax(arr): return min(arr.values()), max(arr.values()) def simpleVolatility(period, prices): # HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1) # Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1)) #print period, len(prices) r = [] for i in xrange(1, period + 1): r.append(math.log(prices[i] / prices[i-1])) # Find the average of all returns rMean = sum(r) / period; # Determine the difference of each return from the mean, then square d = [] for i in xrange(0, period): d.append(math.pow((r[i] - rMean), 2)) # Take the square root of the sum over the period - 1. Then mulitply # that by the square root of the number of trading days in a year vol = math.sqrt(sum(d) / (period - 1)) * math.sqrt(252/period) return vol def rsVolatility(period, openPrices, closePrices, highPrices, lowPrices): # Rogers and Satchell (1991) r = [] for i in xrange(0, period): a = math.log(highPrices[i] / closePrices[i]) b = math.log(highPrices[i] / openPrices[i]) c = math.log(lowPrices[i] / closePrices[i]) d = math.log(lowPrices[i] / openPrices[i]) r.append( a*b + c*d ) # Take the square root of the sum over the period - 1. Then mulitply # that by the square root of the number of trading days in a year vol = math.sqrt(sum(r) / period) * math.sqrt(252/period) return vol def getStockMetrics(context, openPrices, closePrices, highPrices, lowPrices): # Get the prices # Frank GrossmannComments (114) # You can use the 20 day volatility averaged over 3 month. # For the ranking I calculate the 3 month performance of all ETF's and normalise between 0-1. # The best will have 1. Then I calculate the medium 3 month 20 day volatility and also normalize from 0-1. # Then I used Ranking= 0.7*performance +0.3*volatility. # This will give me a ranking from 0-1 from which I will take the best. period = context.metricPeriod periodV = context.periodVolatility volDays = periodV volRange = period / volDays # Calculate the period performance start = closePrices[-period] # First item end = closePrices[-1] # Last item performance = (end - start) / start # Calculate 20-day volatility for the given period v = [] x = 0 for i in xrange(-volRange, 0): x = i * periodV y = x + volDays #if context.logDebug is True: # log.debug('period %s, pV %s, volDays %s, i %s, x %s, y %s, lenopenprices %s' % (period, periodV, volDays, i, x, y, len(openPrices))) if context.volatilityMethod == 'simple': if (y<0): v.append(simpleVolatility(volDays, closePrices[x:(y+1)])) else: v.append(simpleVolatility(volDays, closePrices[(x-1):])) else: if (y<0): v.append(rsVolatility(volDays, openPrices[x:y], closePrices[x:y], highPrices[x:y], lowPrices[x:y])) else: v.append(rsVolatility(volDays, openPrices[x:], closePrices[x:], highPrices[x:], lowPrices[x:])) volatility = sum(v) / volRange return performance, volatility def getBestStock(context, data, stocks): # Frank GrossmannComments (114) # For the ranking, I also use the volatility of the ETFs. # While this is not so important for the 5 Global market ETFs, it is important to lower the EDV ranking # a little bit, according to the higher volatility of the EDV ETF. EDV has a medium 20-day volatility, # which is roughly 50% higher than the volatility of the 5 global market ETFs. This results in higher # spikes during small market turbulence and the model would switch too early between shares (our 5 ETFs) # and treasuries . performances = {} volatilities = {} # Get performance and volatility for all the stocks for s in stocks: p, v = getStockMetrics(context, data['open_price'][s.sid], data['close_price'][s.sid], data['high'][s.sid], data['low'][s.sid]) performances[s.sid] = p volatilities[s.sid] = v # Determine min/max of each. NOTE: volatility is switched # since a low volatility should be weighted highly. minP, maxP = getMinMax(performances) maxV, minV = getMinMax(volatilities) # Normalize the performance and volatility values to a range # between [0..1] then rank them based on a 70/30 weighting. stockRanks = {} for s in stocks: p = (performances[s.sid] - minP) / (maxP - minP) v = (volatilities[s.sid] - minV) / (maxV - minV) if context.logDebug is True: log.debug('[%s] p %s, v %s' % (s, p, v)) log.debug('[%s] perf %s, vol %s' % (s, performances[s.sid], volatilities[s.sid])) pFactor = context.factorPerformance vFactor = context.factorVolatility if math.isnan(p) or math.isnan(v): rank = None else: # Adjust volatility for EDV by 50% #if s.sid == 22887: # rank = (p * pFactor) + ((v * 0.5) * vFactor) #else: rank = (p * pFactor) + (v * vFactor) if rank is not None: stockRanks[s] = rank bestStock = None if len(stockRanks) > 0: if context.logDebug is True and len(stockRanks) < len(stocks): log.debug('FEWER STOCK RANKINGS THAN IN STOCK BASKET!') if context.logRank is True: for s in sorted(stockRanks, key=stockRanks.get, reverse=True): log.info('RANK [%s] %s' % (s, stockRanks[s])) bestStock = max(stockRanks, key=stockRanks.get) else: if context.logDebug is True: log.debug('NO STOCK RANKINGS FOUND IN BASKET; BEST STOCK IS: NONE') return bestStock def sellPositions(context): oid = None positions = context.portfolio.positions try: priceSellStop = context.priceSellStop except: priceSellStop = 0.0 try: priceSellLimit = context.priceSellLimit except: priceSellLimit = 0.0 for p in positions.values(): if (p.amount > 0): amount = p.amount price = p.last_sale_price if context.logSell is True: orderValue = price * amount log.info('SELL [%s] (%s) @$%s (%s)' % (p.sid, -amount, price, orderValue))

stop = price - priceSellStop
limit = stop - priceSellLimit

if context.orderSellLimits is True:
oid = order(p.sid, -amount, limit_price = limit, stop_price = stop)
else:
oid = order(p.sid, -amount)

context.sellCount += 1

return oid

oid = None

cash = context.portfolio.cash
s = context.nextStock

try:
except:

try:
except:

try:
except:

price = data[s.sid].open_price
amount = math.floor(cash / (price + priceBuyFactor))

orderValue = price * amount

if cash <= 0 or cash < orderValue:
log.info('BUY ABORT! cash $%s < orderValue$%s' % (cash, orderValue))
else:
log.info('BUY [%s] %s @ $%s ($%s of $%s)' % (s, amount, price, orderValue, cash)) if context.orderBuyLimits is True: oid = order(s, amount, limit_price = limit, stop_price = stop) else: oid = order(s, amount) context.buyCount += 1 return oid ''' The main proccessing function. This is called and passed data ''' def handle_data(context, data): date = get_datetime() month = int(date.month) day = int(date.day) year = int(date.year) #dayNum = int(date.strftime("%j")) fYear = context.lastForecastYear fMonth = context.lastForecastMonth fDay = context.lastForecastDay if context.logWarn is True and context.portfolio.cash < 0: log.warn('NEGATIVE CASH %s' % context.portfolio.cash) if context.oidSell is not None: orderObj = get_order(context.oidSell) if orderObj.filled == orderObj.amount: # Good to buy next holding if context.logSell is True: log.info('SELL ORDER COMPLETED') context.oidSell = None context.oidBuy = buyPositions(context, data) context.currentStock = context.nextStock context.nextStock = None else: if context.logSell is True: log.info('SELL ORDER *NOT* COMPLETED') return if context.oidBuy is not None: orderObj = get_order(context.oidBuy) if orderObj.filled == orderObj.amount: if context.logBuy is True: log.info('BUY ORDER COMPLETED') context.oidBuy = None else: if context.logBuy is True: log.info('BUY ORDER *NOT* COMPLETED') return datapanel = accumulateData(data) if datapanel is None: # There is insufficient data accumulated to process if context.logWarn is True: log.warn('INSUFFICIENT DATA!') return if context.boundaryTrade == "MONTH": if not context.currentMonth or context.currentMonth != month or (year == fYear and month == fMonth and day == fDay): #context.currentDayNum = dayNum context.currentMonth = month else: return else: if context.boundaryTrade == "DAYS": if not context.currentMonth or not context.currentDayNum or context.currentMonth != month or \ (day - context.currentDayNum)>=context.boundaryDays or (year == fYear and month == fMonth and day == fDay): context.currentDayNum = day context.currentMonth = month else: return # At this point the stocks need to be ranked. # Ensure stocks are only traded if possible. # (e.g) EDV doesn't start trading until late 2007, without # this, any backtest run before that date would fail. stocks = [] for s in context.basket.values(): if date > s.security_start_date: stocks.append(s) best = getBestStock(context, datapanel, stocks) if best is not None: if (context.currentStock == best): # Hold current if context.logHold is True: log.info('HOLD [%s]' % context.currentStock) return elif (context.currentStock is None): # Buy best context.currentStock = best context.nextStock = best context.oidBuy = buyPositions(context, data) else: # Sell ALL and Buy best context.nextStock = best #log.info('SELLING ALL POSITIONS - BUYING [%s]' % best) log.info('BUYING [%s]' % best) context.oidSell = sellPositions(context) else: if context.logWarn is True: log.warn('COULD NOT FIND A BEST STOCK! BEST STOCK IS *NONE*') record(buy=context.buyCount, sell=context.sellCount, cash=context.portfolio.cash, pnl=context.portfolio.pnl) if (year == fYear and month == fMonth and day == fDay): log.info('PNL$%s, CASH $%s, PORTFOLIO$%s' % (context.portfolio.pnl, context.portfolio.cash, context.portfolio.portfolio_value))

This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes.
There was a runtime error.

sorry, should be this one

232
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
# Global Market Rotation Enhanced (GMRE) - Roger & Satchell Volatility

# This program is free software: you can redistribute it and/or modify
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

# GMRE Strategy: Frank Grossman, 2013-08-09
#    SeekingAlpha http://seekingalpha.com/article/1622832-a-global-market-rotation-strategy-with-an-annual-performance-of-41-4-since-2003
#    Logical-Invest http://www.logical-invest.com/strategy/GlobalMarketRotationEnhancedStrategy.htm
# Quantopian Author: Ed Bartosh, 2013-09-13
# Quantopian Author: David Quast, 2013-09-13
# Quantopian Author: James Crocker, 2013-11-14 [email protected]

# NOTE: record() can ONLY handle five elements in the graph. Any more than that will runtime error once 5 are exceeded.

# NOTE: With default vaules for the ta_lib functions
# 2013-11-05 PLUS_DI|MINUS_DI provide good indicators - KEEP PLUS_DI|MINUS_DI
# 2013-11-05 RSI trends closely toPLUS_DI with a bit more volatility;  - IGNORE RSI
# 2013-11-05 ROCR100 trends PLUS_DI in shape but is more volatile; lags PLUS_DI|MINUS_DI cross over; lags PPO - ignore ROCR100
# 2013-11-05 PPO trends PLUS_DI but tempers whipsaw of PLUS_DI|MINUS_DI crossovers - KEEP PPO to temper +-DI
# 2013-11-05 TSF trends PPO by 10x. Even factoring by 10 TSF trend doesn't appear to be a good indicator - IGNORE TSF
# 2013-11-05 STOCH/STOCHF to get SlowD and FastD for signal - KEEP STOCH/STOCHF for now. - May presage corrections of other trends.
# 2013-11-05 VAR in for future reference (original GMRE metric); may also need to reconsider ROCR100 unless PLUS_DI fits.
# 2013-11-14 Removing the DI signal as it has no impact on backtest. Week stock already weakest even with -1 for DI signal.
# 2013-12-02 Added 0.5 factor for EDV as per original Seeking Alpha post by Mr. Grossman. EDV volatility is about 50% higher than the other ETF's.
# 2013-12-06 Added verbose logging switches
# 2013-12-09 Tried weighting the volatility AND performance months (e.g. 1/5, 1/3, 1/2) but didn't impact the performance enough to warrant.

# Adapted Global Market Rotation Strategy
#
# This strategy rotates between six global market ETFs on a monthly
# basis.  Each month the performance and mean 20-day volatility over
# the last 13 weeks are used to rank which ETF should be invested
# in for the coming month.

import math
import pandas

performance_lookback = 63
# window_length SHOULD EQUAL context.metricPeriod
@batch_transform(window_length=performance_lookback)
def accumulateData(data):
return data

def initialize(context):

# Set commission model

# Trade on boundary of first trading day of MONTH or set DAYS
# DAYS|MONTH
#context.boundaryDays = 14

# Set the last date for the FORECAST BEST Buy
context.lastForecastYear = 2015
context.lastForecastMonth = 12
context.lastForecastDay = 30

# Set Performance vs. Volatility factors (7.0, 3.0 from Grossman GMRE
context.factorPerformance = 0.8
context.factorVolatility = 0.2

# Period Volatility and Performance period in DAYS
context.metricPeriod = performance_lookback # 3 months LOOKBACK
context.periodVolatility = 21 # Volatility period. Chose a MULTIPLE of metricPeriod
#context.volatilityMethod = "simple"
context.volatilityMethod = "rs"

# To prevent going 'negative' on cash account set stop, limit and price factor >= stop
#context.or#derSellLimits = False
##context.priceSellStop = None
##context.priceSellLimit = None
#context.priceBuyFactor = 3.03 # Buffering since buys and sells DON'T occur on the same day.

# Re-enact pricing from original Quast code
context.orderSellLimits = False

# Factor commission cost
#set_commission(commission.PerShare(cost=0.03))

12915: sid(12915), # MDY (SPDR S&P MIDCAP 400)
21769: sid(21769), # IEV (ISHARES EUROPE ETF)
24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS)
23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40)
23118: sid(23118), # EPP (ISHARES MSCI PACIFIC EX JAPAN)
32279: sid(32279), # XOP (SPDRÂ® S&PÂ® Oil & Gas Exploration & Production ETF)
22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY)
#23870: sid(23870),  # IEF (VANGUARD 7-10 years bond)
40513: sid(40513), # ZIV (VelocityShares Inverse VIX Medium-Term)
#26432: sid(26432), # FEZ
#23911: sid(23911), # SHY
#8554: sid(8554)    # SPY
}

# Set/Unset logging features for verbosity levels
context.logWarn = False
context.logSell = False
context.logHold = True
context.logRank = False
context.logDebug = True

# SHOULDN'T NEED TO MODIFY REMAINING VARIABLES
# Keep track of the current month.
context.currentDayNum = None
context.currentMonth = None
context.currentStock = None
context.nextStock = None
context.oidSell = None

context.sellCount = 0

def getMinMax(arr):
return min(arr.values()), max(arr.values())

def simpleVolatility(period, prices):
# HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1)

# Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1))
#print period, len(prices)
r = []
for i in xrange(1, period + 1):
r.append(math.log(prices[i] / prices[i-1]))

# Find the average of all returns
rMean = sum(r) / period;

# Determine the difference of each return from the mean, then square
d = []
for i in xrange(0, period):
d.append(math.pow((r[i] - rMean), 2))

# Take the square root of the sum over the period - 1.  Then mulitply
# that by the square root of the number of trading days in a year
vol = math.sqrt(sum(d) / (period - 1)) * math.sqrt(252/period)

return vol

def rsVolatility(period, openPrices, closePrices, highPrices, lowPrices):
# Rogers and Satchell (1991)
r = []

for i in xrange(0, period):
a = math.log(highPrices[i] / closePrices[i])
b = math.log(highPrices[i] / openPrices[i])
c = math.log(lowPrices[i] / closePrices[i])
d = math.log(lowPrices[i] / openPrices[i])
r.append( a*b + c*d )

# Take the square root of the sum over the period - 1.  Then mulitply
# that by the square root of the number of trading days in a year
vol = math.sqrt(sum(r) / period) * math.sqrt(252/period)

return vol

def getStockMetrics(context, openPrices, closePrices, highPrices, lowPrices):
# Get the prices

# You can use the 20 day volatility averaged over 3 month.
# For the ranking I calculate the 3 month performance of all ETF's and normalise between 0-1.
# The best will have 1. Then I calculate the medium 3 month 20 day volatility and also normalize from 0-1.
# Then I used Ranking= 0.7*performance +0.3*volatility.
# This will give me a ranking from 0-1 from which I will take the best.

period = context.metricPeriod
periodV = context.periodVolatility
volDays = periodV
volRange = period / volDays

# Calculate the period performance
start = closePrices[-period] # First item
end = closePrices[-1] # Last item

performance = (end - start) / start

# Calculate 20-day volatility for the given period
v = []
x = 0
for i in xrange(-volRange, 0):
x = i * periodV
y = x + volDays
#if context.logDebug is True:
#    log.debug('period %s, pV %s, volDays %s, i %s, x %s, y %s, lenopenprices %s' % (period, periodV, volDays, i, x, y, len(openPrices)))
if context.volatilityMethod == 'simple':
if (y<0):
v.append(simpleVolatility(volDays, closePrices[x:(y+1)]))
else:
v.append(simpleVolatility(volDays, closePrices[(x-1):]))
else:
if (y<0):
v.append(rsVolatility(volDays, openPrices[x:y], closePrices[x:y], highPrices[x:y], lowPrices[x:y]))
else:
v.append(rsVolatility(volDays, openPrices[x:], closePrices[x:], highPrices[x:], lowPrices[x:]))

volatility = sum(v) / volRange

return performance, volatility

def getBestStock(context, data, stocks):

# For the ranking, I also use the volatility of the ETFs.
# While this is not so important for the 5 Global market ETFs, it is important to lower the EDV ranking
# a little bit, according to the higher volatility of the EDV ETF. EDV has a medium 20-day volatility,
# which is roughly 50% higher than the volatility of the 5 global market ETFs. This results in higher
# spikes during small market turbulence and the model would switch too early between shares (our 5 ETFs)
# and treasuries .

performances = {}
volatilities = {}

# Get performance and volatility for all the stocks
for s in stocks:
p, v = getStockMetrics(context, data['open_price'][s.sid], data['close_price'][s.sid], data['high'][s.sid], data['low'][s.sid])
performances[s.sid] = p
volatilities[s.sid] = v

# Determine min/max of each.  NOTE: volatility is switched
# since a low volatility should be weighted highly.
minP, maxP = getMinMax(performances)
maxV, minV = getMinMax(volatilities)

# Normalize the performance and volatility values to a range
# between [0..1] then rank them based on a 70/30 weighting.
stockRanks = {}
for s in stocks:
p = (performances[s.sid] - minP) / (maxP - minP)
v = (volatilities[s.sid] - minV) / (maxV - minV)

if context.logDebug is True:
log.debug('[%s] p %s, v %s' % (s, p, v))
log.debug('[%s] perf %s, vol %s' % (s, performances[s.sid], volatilities[s.sid]))

pFactor = context.factorPerformance
vFactor = context.factorVolatility

if math.isnan(p) or math.isnan(v):
rank = None
else:
# Adjust volatility for EDV by 50%
#if s.sid == 22887:
#    rank = (p * pFactor) + ((v * 0.5) * vFactor)
#else:
rank = (p * pFactor) + (v * vFactor)

if rank is not None:
stockRanks[s] = rank

bestStock = None
if len(stockRanks) > 0:
if context.logDebug is True and len(stockRanks) < len(stocks):
log.debug('FEWER STOCK RANKINGS THAN IN STOCK BASKET!')
if context.logRank is True:
for s in sorted(stockRanks, key=stockRanks.get, reverse=True):
log.info('RANK [%s] %s' % (s, stockRanks[s]))

bestStock = max(stockRanks, key=stockRanks.get)
else:
if context.logDebug is True:
log.debug('NO STOCK RANKINGS FOUND IN BASKET; BEST STOCK IS: NONE')

return bestStock

def sellPositions(context):
oid = None
positions = context.portfolio.positions

try:
priceSellStop = context.priceSellStop
except:
priceSellStop = 0.0

try:
priceSellLimit = context.priceSellLimit
except:
priceSellLimit = 0.0

for p in positions.values():
if (p.amount > 0):

amount = p.amount
price = p.last_sale_price

if context.logSell is True:
orderValue = price * amount
log.info('SELL [%s] (%s) @ $%s (%s)' % (p.sid, -amount, price, orderValue)) stop = price - priceSellStop limit = stop - priceSellLimit if context.orderSellLimits is True: oid = order(p.sid, -amount, limit_price = limit, stop_price = stop) else: oid = order(p.sid, -amount) context.sellCount += 1 return oid def buyPositions(context, data): oid = None cash = context.portfolio.cash s = context.nextStock try: priceBuyFactor = context.priceBuyFactor except: priceBuyFactor = 0.0 try: priceBuyStop = context.priceBuyStop except: priceBuyStop = 0.0 try: priceBuyLimit = context.priceBuyLimit except: priceBuyLimit = 0.0 price = data[s.sid].open_price amount = math.floor(cash / (price + priceBuyFactor)) orderValue = price * amount stop = price + priceBuyStop limit = stop + priceBuyLimit if cash <= 0 or cash < orderValue: log.info('BUY ABORT! cash$%s < orderValue $%s' % (cash, orderValue)) else: if context.logBuy is True: log.info('BUY [%s] %s @$%s ($%s of$%s)' % (s, amount, price, orderValue, cash))

oid = order(s, amount, limit_price = limit, stop_price = stop)
else:
oid = order(s, amount)

return oid

'''
The main proccessing function.  This is called and passed data
'''
def handle_data(context, data):

date = get_datetime()
month = int(date.month)
day = int(date.day)
year = int(date.year)
#dayNum = int(date.strftime("%j"))

fYear = context.lastForecastYear
fMonth = context.lastForecastMonth
fDay = context.lastForecastDay

if context.logWarn is True and context.portfolio.cash < 0:
log.warn('NEGATIVE CASH %s' % context.portfolio.cash)

if context.oidSell is not None:
orderObj = get_order(context.oidSell)
if orderObj.filled == orderObj.amount:
# Good to buy next holding
if context.logSell is True:
log.info('SELL ORDER COMPLETED')
context.oidSell = None
context.currentStock = context.nextStock
context.nextStock = None
else:
if context.logSell is True:
log.info('SELL ORDER *NOT* COMPLETED')
return

if orderObj.filled == orderObj.amount:
else:
return

datapanel = accumulateData(data)

if datapanel is None:
# There is insufficient data accumulated to process
if context.logWarn is True:
log.warn('INSUFFICIENT DATA!')
return

if not context.currentMonth or context.currentMonth != month or (year == fYear and month == fMonth and day == fDay):
#context.currentDayNum = dayNum
context.currentMonth = month
else:
return
else:
if not context.currentMonth or not context.currentDayNum or context.currentMonth != month or \
(day - context.currentDayNum)>=context.boundaryDays or (year == fYear and month == fMonth and day == fDay):
context.currentDayNum = day
context.currentMonth = month
else:
return
# At this point the stocks need to be ranked.

# Ensure stocks are only traded if possible.
# (e.g) EDV doesn't start trading until late 2007, without
# this, any backtest run before that date would fail.
stocks = []
if date > s.security_start_date:
stocks.append(s)

best = getBestStock(context, datapanel, stocks)

if best is not None:
if (context.currentStock == best):
# Hold current
if context.logHold is True:
log.info('HOLD [%s]' % context.currentStock)
return
elif (context.currentStock is None):
context.currentStock = best
context.nextStock = best
else:
# Sell ALL and Buy best
context.nextStock = best
#log.info('SELLING ALL POSITIONS - BUYING [%s]' % best)
context.oidSell = sellPositions(context)
else:
if context.logWarn is True:
log.warn('COULD NOT FIND A BEST STOCK! BEST STOCK IS *NONE*')

if (year == fYear and month == fMonth and day == fDay):
log.info('PNL $%s, CASH$%s, PORTFOLIO $%s' % (context.portfolio.pnl, context.portfolio.cash, context.portfolio.portfolio_value))  This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes. There was a runtime error. Well, of course, the drive is to get some edge. I agree that 2000% returns bear all the hallmarks of over-optimization. Still, you can "de-optimize" this a lot and still get good returns over benchmarks. On the plus-side, this is working with a well-documented anomaly, relative strength. OTOH, that may be its downside too. If you comment out ziv, and run the system for the last two years, results are unimpressive. That may be nothing, but it may indicate that professionals have now priced this relative strength anomaly out of the market. As I've commented previously, I think the real future of this sort of idea is to rank many geographically diverse etfs by relative strength (diverse geography is pretty much the last place that relative strength works), and then select a portfolio of four or five of them, and use a minimum variance algorithm to weight the holdings. I think such a system could achieve a 14% to 15% CAGR with low risk, which in my world is crazy good return. Hi Antony, First of all I'm sorry for not mentioning why I want to continue working on this. Personally for me there are several reasons to continue working on this strategy and all of them are far from the wish to become 2000% richer in a couple of years :) First, I want to see if it's possible to tweak this strategy to show stable results for last years. As I already mentioned in this thread and Bob also pointed out, past 2 years results are far from being perfect. I'm planning to look how this strategy works with performance-based rebalancing, how introducing stop loss and take profit techniques would change the metrics and so on. I'd like to try ideas from this topic and see how they work. Second and most important reason is fantastic collaboration here. People already helped to turn my first buggy implementation into more mature code and I hope we can come up with more realistic strategy if we continue to collaborate. And third reason. I want to see what Quantopian is capable of and help people to improve it as much as I can. I hope I answered your question. Regards, Ed Does anyone have any idea why this cosmetic change caused quite different results? This is the change: https://github.com/bartosh/gmr/commit/bb119558007f8a89d1acfb7142c46f374b463e38 And this is the backtest without this change: https://www.quantopian.com/posts/global-market-rotation-strategy-buggy-implementation#52bf4b54a329011c16000061 191 Loading... Backtest from to with initial capital Total Returns -- Alpha -- Beta -- Sharpe -- Sortino -- Max Drawdown -- Benchmark Returns -- Volatility --  Returns 1 Month 3 Month 6 Month 12 Month  Alpha 1 Month 3 Month 6 Month 12 Month  Beta 1 Month 3 Month 6 Month 12 Month  Sharpe 1 Month 3 Month 6 Month 12 Month  Sortino 1 Month 3 Month 6 Month 12 Month  Volatility 1 Month 3 Month 6 Month 12 Month  Max Drawdown 1 Month 3 Month 6 Month 12 Month """ Adapted Global Market Rotation Strategy This strategy rotates between six global market ETFs on a monthly basis. Each month the performance and mean 20-day volitility over the last 13 weekds are used to rank which ETF should be invested in for the coming month. """ import math def initialize(context): """Initialize context object. It's passed to the handle_data function.""" context.stocks = [ sid(12915), # MDY (SPDR S&P MIDCAP 400) sid(21769), # IEV (ISHARES EUROPE ETF) sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS) sid(23134), # ILF (ISHARES LATIN AMERICA 40) sid(23118), # EEP (ISHARES MSCI PACIFIC EX JAPAN) sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY) ] # Date of last rebalancing context.rebalancing_date = None # Rebalancing period in calendar days context.period = 31 # The order ID of the sell order currently being filled context.oid = None # The current stock being held context.currentStock = None # The next stock that needs to get purchased (once the sell order # on the current stock is filled context.nextStock = None # The 3-month lookback period. Calculated based on there being # an average of 21 trading days in a month context.lookback = 63 def getminmax(vdict): """ Get the minimum and maximum values of a list of dictionary values. :param vdict: Python dict-like object. :returns: minimum and maximum of vdict values """ vals = vdict.values() return min(vals), max(vals) def hist_volatility(period, prices): """ Calculate the n-day historical volatility given a set of n+1 prices. :param period: The number of days for which to calculate volatility :param prices: An array of price information. Must be of length period + 1. """ # HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1) # Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1)) returns = [] for i in xrange(1, period + 1): returns.append(math.log(prices[i] / prices[i-1])) # Find the average of all returns rmean = sum(returns) / period # Determine the difference of each return from the mean, then square diff = [] for i in xrange(0, period): diff.append(math.pow((returns[i] - rmean), 2)) # Take the square root of the sum over the period - 1. Then mulitply # that by the square root of the number of trading days in a year vol = math.sqrt(sum(diff) / (period - 1)) * math.sqrt(252/period) return vol def getmetrics(prices, period): """ Get the performance and average 20-day volatility of a security over a given period :param prices: :param period: The time period for which to find """ # Get the prices #prices = data['close_price'][security][-period-1:] start = prices[-period] # First item end = prices[-1] # Last item performance = (end - start) / start # Calculate 20-day volatility for the given period volats = [] j = 0 for i in xrange(-period, 0): volats.append(hist_volatility(20, prices[i-21:21+j])) j += 1 avg_volat = sum(volats) / period return performance, avg_volat def getbeststock(data, stocks, period): """ Pick the best stock from a group of stocks based on the given data over a specified period using the stocks' performance and volatility :param data: The datapanel with data of all the stocks :param stocks: A list of stocks to rank :param period: The time period over which the stocks will be analyzed """ best = None performances = {} volatilities = {} # Get performance and volatility for all the stocks for stock in stocks: perf, volat = getmetrics(data['price'][stock.sid], period) performances[stock.sid] = perf volatilities[stock.sid] = volat # Determine min/max of each. NOTE: volatility is switched # since a low volatility should be weighted highly. minp, maxp = getminmax(performances) maxv, minv = getminmax(volatilities) # Normalize the performance and volatility values to a range # between [0..1] then rank them based on a 70/30 weighting. for stock in stocks: perf = (performances[stock.sid] - minp) / (maxp - minp) volat = (volatilities[stock.sid] - minv) / (maxv - minv) rank = perf * 0.7 + volat * 0.3 #log.info('Rank info for %s: p=%s, v=%s, r=%s' % (s,p,v,rank)) # If the new rank is greater than the old best rank, pick it. if best is None or rank > best[1]: best = stock, rank return best[0] def sellholdings(context): """Sell all the currently held positions in the context's portfolio.""" positions = context.portfolio.positions oid = None for pos in positions.values(): if (pos.amount > 0): log.info('Selling %s shares of %s' % (pos.amount, pos.sid.symbol)) oid = order(pos.sid, -pos.amount) return oid @batch_transform(window_length=83) def accumulatedata(data): """ Utilize the batch_transform decorator to accumulate multiple days of data into one datapanel Need the window length to be 20 longer than lookback period to allow for a 20-day volatility calculation """ return data def days(begin, end): """Calculate amount of calendar days between two dates.""" roundb = begin.replace(hour = 0, minute = 0, second = 0, microsecond = 0) rounde = end.replace(hour = 0, minute = 0, second = 0, microsecond = 0) return (rounde - roundb).days def handle_data(context, data): """ The main proccessing function. Called whenever a market event occurs for any of algorithm's securities. :param context: context object :param data: Object contains all the market data for algorithm securities keyed by security id. It represents a snapshot of algorithm's universe as of when this method is called. :returns: None """ # Accumulate data until there is enough days worth of data # to process without having outOfBounds issues. datapanel = accumulatedata(data) if datapanel is None: # There is insufficient data accumulated to process return current_date = get_datetime() # If there is an order ID, check the status of the order. # If there is an order and it is filled, the next stock # can be purchased. if context.oid is not None: orderobj = get_order(context.oid) if orderobj.filled == orderobj.amount: log.info('Sold %s shares of %s' % (-orderobj.amount, orderobj.sid.symbol)) # Good to buy next holding cash = context.portfolio.cash oobj = get_order(order_value(context.nextStock, cash)) log.info('Sell order complete, buying %s shares of %s. Cash is %s' % \ (oobj.amount, context.nextStock.symbol, cash)) context.currentStock = context.nextStock context.oid = None context.nextStock = None context.rebalancing_date = current_date if context.rebalancing_date and days(context.rebalancing_date, current_date) < context.period: # It's not a time to rebalance yet, nothing further to do return # At this point, a new month has been reached. The stocks # need to be # Ensure stocks are only traded if possible. # (e.g) EDV doesn't start trading until late 2007, without # this, any backtest run before that date would fail. stocks = [] for stock in context.stocks: if current_date > stock.security_start_date \ and stock.sid in datapanel['price']: stocks.append(stock) # Determine which stock should be used for the next month best = getbeststock(datapanel, stocks, context.lookback) if best: if (context.currentStock is not None and context.currentStock == best): # If there is a stock currently held and it is the same as # the new 'best' stock, nothing needs to be done return else: # Otherwise, the current stock needs to be sold and the new # stock bought context.oid = sellholdings(context) context.nextStock = best # Purchase will not occur until the next call of handle_data # and only when the order has been filled. # If there is no stock currently held, it needs to be bought. # This only happend if context.currentStock is None: cash = context.portfolio.cash oobj = get_order(order_value(context.nextStock, cash)) log.info('Buying %s shares of %s. Cash is %s' % \ (oobj.amount, context.nextStock.symbol, cash)) context.currentStock = context.nextStock context.oid = None context.nextStock = None context.rebalancing_date = current_date  This backtest was created using an older version of the backtester. Please re-run this backtest to see results using the latest backtester. Learn more about the recent changes. There was a runtime error. I spend quite a bit of time playing with etfreplay.com, which is a web site which has a pre-canned algol for rotating through etfs using relative strength. I did this because I knew someone who consistently used it for very good returns, about 30% yearly. He was kind enough to provide me his system configurations. These system began to fail badly in 2013. I came to several conclusions based on creating dozens of such systems. First, that the systems are very dependent on the exact mix of etfs included, which lead me to believe there there are serious theoretical problems, probably based on the exact timing issues over time of the etfs involved, meaning that there despite the simplicity of the systems, the selection of etfs themselves create a large over fitting problem. Also they are very sensitive to the exact parameters, which further supports this overfitting notion. Second I suspect that the very existence of such a website available so widely, means that the effect could be being canceled out. Lastly, on a sidenote, I have read of many systems using ZIV which perform very well recently. I trade ZIV myself very successfully, but I think that if one wants to trade volatility, one should trade it as a separate system which is tuned for this purpose. When I have a volatility trade on, even something slower moving like ZIV, I watch it every day. If I can't, such as vacation or business trips, I close my position. I would never hold any volatility position for a month blindly trade without looking. All that being said, I continue to look into this method, but I won't put money to work this way, until I feel I understand the issues. Thanks for sharing your experience Richard. I think that amplifies Antony's criticism, that starting with an idea and etf selection put together by an author to support his paid service already suggests drastic overfitting. Some people who build models run a t stat to statistically validate their model. Many of these people feel you should subtract a degree of freedom each time you tweak a parameter to get a better back test result. And I agree that to be useful, these models should not be hugely sensitive to exactly which etfs are used and the exact values of parameters. I don't have anything against trading ZIV. I just think it has unique risks that make it too risky to put a large portion of your portfolio in it. According to the prospectus, the sponsor can close the fund if conditions are difficult. On a huge spike in volatility (e.g., big overnight terrorist attack), you could lose 90%, the sponsor could dissolve the fund, and you would be sitting in cash with a 90% loss and no prospects of recovery. Richard is your system for trading ziv based on the term structure of VIX futures contracts or something else? Simply checking the term structure to make sure it is in contango should keep people out of normal larger down drafts. I enjoyed how one author described trading xiv and ziv. He said it was like picking up$100 bills in front of an unpredictable steam roller.

Bob. I used to trade ZIV using a 78 day stochastic. It's slow moving usually and only trades once every few months, but it makes good money over time. I'm hoping to automate this once I figure out how to get the stochastics to work with the new history feature on quantopian. If I do, I'll post it.

Now I use vix contango to trade xiv/vxx together with technicals. I have to check it daily, but it's worth the trouble. I think that the risk of a shutdown overnight is small, but it is large enough to keep my xiv/vxx position less than 10% of my portfolio just in case. Because of it's relationship to vix contango, I view it more as picking up $100 bills in front of a very predictable steam roller as long as the driver doesn't suffer a sudden stroke. I'd love to automate this, but I think it may take awhile. I need hourly updates on$vix at least, and to get the stochastics working of course.

Thanks. That's interesting. If you haven't, you might want to run a Sharpe ratio comparing xiv returns in your system to ziv returns in a similar system. What I have found, is that although xiv has greater raw returns, ziv has much better risk adjusted returns, at least in the systems I've looked at.

Good idea. Once I can put my xiv system into quantopian, I'll do that. Thanks.

Anthony
I don't consider myself an expert. I'm still learning. But my guess is that your approach will not lead to over fitting, because all of these similar indicators will blend into another indicator. When you create this system, if the results are fairly smooth when you vary the master key, and you see similar results with different securities, that is a good indication to me that you haven't overfit. To me, the key to to understand why you are getting the results you are getting. In this case It looks like you are trying to smooth out the bumpyness of the momentum indicators, and hopefully get better and more consistent results. However, you could also lose buy triggers this way also, or delay them.

I use 4 different momentum indicators when I trade xiv, but instead of combining them, I look at each individually. I have two long term and two short term. If the two long term agree, I go with that, if not, I look at the short term. If they don't agree either, I wait till later. Is this better than combining them. I don't know, because up till I started using Quantopian, I didn't really have a facility to combine things in unique ways. On one hand, I would like Quantopian to allow me to do things I do as a chart trader, but on the other hand, I realize that I may have become biased by the tools I had before.

To go back to your comment about the "fantastical results" (love that interplay of fantastic and magical) of the relative strength systems which I have played with a whole lot, my issue with them is not that they don't give great returns, they do. Even though you can too easily tweak the system to give fantastical results, it's still easy to get more realistic results over time. The problem with them is that they are unpredictable. There is no way of knowing if a bad year will lead to more bad years, or disaster, or improve again. Compare this with trend following systems, where I keep most of my money. I know that if one of my systems misses a sell signal, if the market keeps on going down, it will at some point sell, because that is the nature of the trend following system. It can experience anomalies which cause it to underperform, but overtime, it will work. This is not the case with the relative strength systems.

Here is a little thought experiment about relative strength system. Suppose that instead of buying each month on the same day, I would vary the day of the month so that each month would be different. Then I would test different sets of buy and sell dates, to see which performed better. It's clear that I could really overfit and rig the results by going through each month and selecting the best rebalance date. I think we would all agree that this would be a unreliable system. My argument is that this is what happens anyway. Even though we are rebalancing on the same day each month, the effect is the same, because the internal cycles of the etfs are not monthly, but constantly vary. By switching to a very similar etf, one could introduce a wide variance in the internal cycles, because the new etf contains stock x, and not stock y.

The only way I can think of to get rid of this phenomenon, is to get rid of the notion of monthly rebalancing, and us technical triggers to rebalance instead, which can be done more easily using computers.

cheers,

These types of strategies have merit, but are entirely dependent on the mix of funds.
In the 1990's Fidelity and Rydex sector funds were used with great success until 2000-2002. Search Formula Research and Pankin strategies for more info.
I agree with Richard Diehl, these strategies fall apart when funds are added to the strategies.
A better approach is combine macro factors like interest rates, dollar, CRB and so on with each ETF. Funds must be top ranked and have macro factors positive as well.

Hi everyone,

I don't understand how to make changes to prevent negative cash .. it says:

# To prevent going 'negative' on cash account set stop, limit and price factor >= stop

#context.orderBuyLimits = False
#context.or#derSellLimits = False
##context.priceSellStop = None
##context.priceSellLimit = None
#context.priceBuyFactor = 3.03 # Buffering since buys and sells DON'T occur on the same day.


Can anyone give me an example of what to do exactly .. thanks a lot!

Hi David @Quast,
I backtested your code unchanged. The resulting Sharpe ratio was lower by a factor of 10, compared to results posted by yourself and Mr. @Bartosh. Is this a result of changes to the Quantopian platform, or is there something else afoot?
Thanks,
Monte

Total Returns 288.3%
Benchmark Returns 23.8%
Alpha 0.47
Beta -0.01
Sharpe 1.85
Sortino 2.64
Information Ratio 1.80
Volatility 0.26
Max Drawdown 26.7%

164
Backtest from to with initial capital
Total Returns
--
Alpha
--
Beta
--
Sharpe
--
Sortino
--
Max Drawdown
--
Benchmark Returns
--
Volatility
--
 Returns 1 Month 3 Month 6 Month 12 Month
 Alpha 1 Month 3 Month 6 Month 12 Month
 Beta 1 Month 3 Month 6 Month 12 Month
 Sharpe 1 Month 3 Month 6 Month 12 Month
 Sortino 1 Month 3 Month 6 Month 12 Month
 Volatility 1 Month 3 Month 6 Month 12 Month
 Max Drawdown 1 Month 3 Month 6 Month 12 Month
#
# Adapted Global Market Rotation Strategy
#
# This strategy rotates between six global market ETFs on a monthly
# basis.  Each month the performance and mean 20-day volitility over
# the last 13 weekds are used to rank which ETF should be invested
# in for the coming month.
import math
import pandas

def initialize(context):
context.stocks = {
12915: sid(12915), # MDY (SPDR S&P MIDCAP 400)
21769: sid(21769), # IEV (ISHARES EUROPE ETF)
24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS)
23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40)
23118: sid(23118), # EEP (ISHARES MSCI PACIFIC EX JAPAN)
22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY)
#23911: sid(23911)
}

# Keep track of the current month.
context.currentMonth = None

# The order ID of the sell order currently being filled
context.oid = None

# The current stock being held
context.currentStock = None

# The next stock that needs to get purchased (once the sell order
# on the current stock is filled
context.nextStock = None

# The 3-month lookback period.  Calculated based on there being
# an average of 21 trading days in a month
context.lookback = 63

'''
Gets the minimum and maximum values of an array of values
'''
def getMinMax(arr):
return min(arr.values()), max(arr.values())

'''
Calculates the n-day historical volatility given a set of
n+1 prices.

@param period The number of days for which to calculate volatility
@param prices An array of price information.  Must be of length
period + 1.
'''
def historicalVolatility(period, prices):
# HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1)

# Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1))
r = []
for i in xrange(1, period + 1):
r.append(math.log(prices[i] / prices[i-1]))

# Find the average of all returns
rMean = sum(r) / period;

# Determine the difference of each return from the mean, then square
d = []
for i in xrange(0, period):
d.append(math.pow((r[i] - rMean), 2))

# Take the square root of the sum over the period - 1.  Then mulitply
# that by the square root of the number of trading days in a year
vol = math.sqrt(sum(d) / (period - 1)) * math.sqrt(252/period)

return vol

'''
Gets the performance and average 20-day volatility of a security
over a given period

@param prices
@param period The time period for which to find
'''
def getStockMetrics(prices, period):
# Get the prices
#prices = data['close_price'][security][-period-1:]
start = prices[-period] # First item
end = prices[-1] # Last item

performance = (end - start) / start

# Calculate 20-day volatility for the given period
v = []
x = 0
for i in xrange(-period, 0):
v.append(historicalVolatility(20, prices[i-21:21+x]))
x += 1

volatility = sum(v) / period

return performance, volatility

'''
Picks the best stock from a group of stocks based on the given
data over a specified period using the stocks' performance and
volatility

@param data The datapanel with data of all the stocks
@param stocks A list of stocks to rank
@param period The time period over which the stocks will be
analyzed
'''
def getBestStock(data, stocks, period):
best = None

performances = {}
volatilities = {}

# Get performance and volatility for all the stocks
for s in stocks:
p, v = getStockMetrics(data['price'][s.sid], period)
performances[s.sid] = p
volatilities[s.sid] = v

# Determine min/max of each.  NOTE: volatility is switched
# since a low volatility should be weighted highly.
minP, maxP = getMinMax(performances)
maxV, minV = getMinMax(volatilities)

# Normalize the performance and volatility values to a range
# between [0..1] then rank them based on a 70/30 weighting.
for s in stocks:
p = (performances[s.sid] - minP) / (maxP - minP)
v = (volatilities[s.sid] - minV) / (maxV - minV)
rank = p * 0.7 + v * 0.3

#log.info('Rank info for %s: p=%s, v=%s, r=%s' % (s,p,v,rank))

# If the new rank is greater than the old best rank, pick it.
if best is None or rank > best[1]:
best = s, rank

return best[0]

'''
Sells all the currently held positions in the context's portfolio
'''
def sellHoldings(context):
positions = context.portfolio.positions

oid = None
for p in positions.values():
if (p.amount > 0):
#log.debug('ordering %s' % p)
oid = order(p.sid, -p.amount)

return oid

'''
Utilize the batch_transform decorator to accumulate multiple days
of data into one datapanel  Need the window length to be 20 longer
than lookback period to allow for a 20-day volatility calculation
'''
@batch_transform(window_length=83)
def accumulateData(data):
return data

'''
The main proccessing function.  This is called and passed data
'''
def handle_data(context, data):
# Accumulate data until there is enough days worth of data
# to process without having outOfBounds issues.
datapanel = accumulateData(data)

if datapanel is None:
# There is insufficient data accumulated to process
return

# If there is an order ID, check the status of the order.
# If there is an order and it is filled, the next stock
# can be purchased.
if context.oid is not None:
orderObj = get_order(context.oid)
if orderObj.filled == orderObj.amount:
# Good to buy next holding
amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1
log.info('Sell order complete, buying %s of %s (%s of %s)' % \
(amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash))
order(context.nextStock, amount)
context.currentStock = context.nextStock
context.oid = None
context.nextStock = None

date = get_datetime()
month = date.month

if not context.currentMonth:
# Set the month initially
context.currentMonth = month

if context.currentMonth == month:
# If the current month is unchanged, nothing further to do
return

context.currentMonth = month

# At this point, a new month has been reached.  The stocks
# need to be

# Ensure stocks are only traded if possible.
# (e.g) EDV doesn't start trading until late 2007, without
# this, any backtest run before that date would fail.
stocks = []
for s in context.stocks.values():
if date > s.security_start_date:
stocks.append(s)

# Determine which stock should be used for the next month
best = getBestStock(datapanel, stocks, context.lookback)

if best:
if (context.currentStock is not None and context.currentStock == best):
# If there is a stock currently held and it is the same as
# the new 'best' stock, nothing needs to be done
return
else:
# Otherwise, the current stock needs to be sold and the new
# stock bought
context.oid = sellHoldings(context)
context.nextStock = best

# Purchase will not occur until the next call of handle_data
# and only when the order has been filled.

# If there is no stock currently held, it needs to be bought.
# This only happend
if context.currentStock is None:
amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1
log.info('First purchase, buying %s of %s (%s of %s)' % \
(amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash))
order(context.nextStock, amount)
context.currentStock = context.nextStock
context.oid = None
context.nextStock = None
There was a runtime error.

Hi Monte,

Thanks for posting your findings. On the original backtests above, you'll notice a little warning sign about platform changes. We put those warnings up whenever a platform change affects an older backtest. You can click on them to get the details on the change. In this case, we found and fixed several bugs in our cumulative stats, including the Sharpe ratio. You can read the details here: https://www.quantopian.com/posts/corrections-to-cumulative-risk-metrics

I believe the discrepancy is entirely attributable to the change to annualizing the Sharpe ratio, as described in that link.

thanks,
fawce

Disclaimer

The material on this website is provided for informational purposes only and does not constitute an offer to sell, a solicitation to buy, or a recommendation or endorsement for any security or strategy, nor does it constitute an offer to provide investment advisory services by Quantopian. In addition, the material offers no opinion with respect to the suitability of any security or specific investment. No information contained herein should be regarded as a suggestion to engage in or refrain from any investment-related course of action as none of Quantopian nor any of its affiliates is undertaking to provide investment advice, act as an adviser to any plan or entity subject to the Employee Retirement Income Security Act of 1974, as amended, individual retirement account or individual retirement annuity, or give advice in a fiduciary capacity with respect to the materials presented herein. If you are an individual retirement or other investor, contact your financial advisor or other fiduciary unrelated to Quantopian about whether any given investment idea, strategy, product or service described herein may be appropriate for your circumstances. All investments involve risk, including loss of principal. Quantopian makes no guarantees as to the accuracy or completeness of the views expressed in the website. The views are subject to change, and may have become unreliable for various reasons, including changes in market conditions or economic circumstances.

Hi Guys,

Why can I not implement this into live trading?

Just curious..

Thanks!
Brandon

Hi
Thks u so much for sharing.
Understand that the code currently buy only one stock and readjust once every month. Can anyone help to share how to rank and buy the top 5 or 10 stocks? To allow diversification and more balance allocation. We can also choose from a bigger universe of stocks.
Thk u

Bump for chen chen Q?

A double bump for Chen and myself.

Hi, I am new to Quantopian and learning python. I tried to run some of the source code posted in this thread, but unfortunately I get an error indicating that the @batch_transform function is deprecated and to use history() instead. I am having some difficulty doing this. Could someone make the code modification and share with us newbies how to do this?

Thank you so much!



#
# Adapted Global Market Rotation Strategy
#
# This strategy rotates between six global market ETFs on a monthly
# basis.  Each month the performance and mean 20-day volitility over
# the last 13 weekds are used to rank which ETF should be invested
# in for the coming month.
import math
import pandas

def initialize(context):
context.stocks = {
12915: sid(12915), # MDY (SPDR S&P MIDCAP 400)
21769: sid(21769), # IEV (ISHARES EUROPE ETF)
24705: sid(24705), # EEM (ISHARES MSCI EMERGING MARKETS)
23134: sid(23134), # ILF (ISHARES LATIN AMERICA 40)
23118: sid(23118), # EEP (ISHARES MSCI PACIFIC EX JAPAN)
22887: sid(22887), # EDV (VANGUARD EXTENDED DURATION TREASURY)
#23911: sid(23911)
}

# Keep track of the current month.
context.currentMonth = None
# The order ID of the sell order currently being filled
context.oid = None
# The current stock being held
context.currentStock = None
# The next stock that needs to get purchased (once the sell order
# on the current stock is filled
context.nextStock = None
# The 3-month lookback period.  Calculated based on there being
# an average of 21 trading days in a month
context.lookback = 63

'''
Gets the minimum and maximum values of an array of values
'''
def getMinMax(arr):
return min(arr.values()), max(arr.values())

'''
Calculates the n-day historical volatility given a set of
n+1 prices.

@param period The number of days for which to calculate volatility
@param prices An array of price information.  Must be of length
period + 1.
'''
def historicalVolatility(period, prices):
# HVdaily = sqrt( sum[1..n](x_t - Xbar)^2 / n - 1)
# Start by calculating Xbar = 1/n sum[1..n] (ln(P_t / P_t-1))
r = []
for i in xrange(1, period + 1):
r.append(math.log(prices[i] / prices[i-1]))

# Find the average of all returns
rMean = sum(r) / period;
# Determine the difference of each return from the mean, then square
d = []
for i in xrange(0, period):
d.append(math.pow((r[i] - rMean), 2))
# Take the square root of the sum over the period - 1.  Then mulitply
# that by the square root of the number of trading days in a year
vol = math.sqrt(sum(d) / (period - 1)) * math.sqrt(252/period)
return vol

'''
Gets the performance and average 20-day volatility of a security
over a given period

@param prices
@param period The time period for which to find
'''
def getStockMetrics(prices, period):
# Get the prices
#prices = data['close_price'][security][-period-1:]
start = prices[-period] # First item
end = prices[-1] # Last item
performance = (end - start) / start
# Calculate 20-day volatility for the given period
v = []
x = 0
for i in xrange(-period, 0):
v.append(historicalVolatility(20, prices[i-21:21+x]))
x += 1
volatility = sum(v) / period
return performance, volatility

'''
Picks the best stock from a group of stocks based on the given
data over a specified period using the stocks' performance and
volatility

@param data The datapanel with data of all the stocks
@param stocks A list of stocks to rank
@param period The time period over which the stocks will be
analyzed
'''
def getBestStock(data, stocks, period):
best = None
performances = {}
volatilities = {}
# Get performance and volatility for all the stocks
for s in stocks:
p, v = getStockMetrics(data['price'][s.sid], period)
performances[s.sid] = p
volatilities[s.sid] = v
# Determine min/max of each.  NOTE: volatility is switched
# since a low volatility should be weighted highly.
minP, maxP = getMinMax(performances)
maxV, minV = getMinMax(volatilities)
# Normalize the performance and volatility values to a range
# between [0..1] then rank them based on a 70/30 weighting.
for s in stocks:
p = (performances[s.sid] - minP) / (maxP - minP)
v = (volatilities[s.sid] - minV) / (maxV - minV)
rank = p * 0.7 + v * 0.3
#log.info('Rank info for %s: p=%s, v=%s, r=%s' % (s,p,v,rank))
# If the new rank is greater than the old best rank, pick it.
if best is None or rank > best[1]:
best = s, rank
return best[0]

'''
Sells all the currently held positions in the context's portfolio
'''
def sellHoldings(context):
positions = context.portfolio.positions
oid = None
for p in positions.values():
if (p.amount > 0):
#log.debug('ordering %s' % p)
oid = order(p.sid, -p.amount)
return oid

'''
Utilize the batch_transform decorator to accumulate multiple days
of data into one datapanel  Need the window length to be 20 longer
than lookback period to allow for a 20-day volatility calculation
'''
#@history(window_length=83)
def accumulateData(data):
return data

'''
The main proccessing function.  This is called and passed data
'''
def handle_data(context, data):
# Accumulate data until there is enough days worth of data
# to process without having outOfBounds issues.
data.history('context.stocks', 'close',83,'1d')
datapanel = accumulateData(data)
if datapanel is None:
# There is insufficient data accumulated to process
return

# If there is an order ID, check the status of the order.
# If there is an order and it is filled, the next stock
# can be purchased.
if context.oid is not None:
orderObj = get_order(context.oid)
if orderObj.filled == orderObj.amount:
# Good to buy next holding
amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1
log.info('Sell order complete, buying %s of %s (%s of %s)' % \
(amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash))
order(context.nextStock, amount)
context.currentStock = context.nextStock
context.oid = None
context.nextStock = None

date = get_datetime()
month = date.month
if not context.currentMonth:
# Set the month initially
context.currentMonth = month
if context.currentMonth == month:
# If the current month is unchanged, nothing further to do
return
context.currentMonth = month
# At this point, a new month has been reached.  The stocks
# need to be
# Ensure stocks are only traded if possible.
# (e.g) EDV doesn't start trading until late 2007, without
# this, any backtest run before that date would fail.
stocks = []
for s in context.stocks.values():
if date > s.security_start_date:
stocks.append(s)

# Determine which stock should be used for the next month
best = getBestStock(datapanel, stocks, context.lookback)

if best:
if (context.currentStock is not None and context.currentStock == best):
# If there is a stock currently held and it is the same as
# the new 'best' stock, nothing needs to be done
return
else:
# Otherwise, the current stock needs to be sold and the new
# stock bought
context.oid = sellHoldings(context)
context.nextStock = best
# Purchase will not occur until the next call of handle_data
# and only when the order has been filled.

# If there is no stock currently held, it needs to be bought.
# This only happend
if context.currentStock is None:
amount = math.floor((context.portfolio.cash) / data[context.nextStock.sid].price) - 1
log.info('First purchase, buying %s of %s (%s of %s)' % \
(amount, context.nextStock, amount*data[context.nextStock.sid].price, context.portfolio.cash))
order(context.nextStock, amount)
context.currentStock = context.nextStock
context.oid = None
context.nextStock = None


I changed the code using data.history function in row 174 but I still get an error msg