Overall Statistics |
Total Trades 55 Average Win 0.25% Average Loss -0.51% Compounding Annual Return 5.441% Drawdown 5.900% Expectancy 0.050 Net Profit 30.378% Sharpe Ratio 1.032 Probabilistic Sharpe Ratio 50.932% Loss Rate 29% Win Rate 71% Profit-Loss Ratio 0.48 Alpha 0.047 Beta -0.009 Annual Standard Deviation 0.044 Annual Variance 0.002 Information Ratio -0.481 Tracking Error 0.178 Treynor Ratio -4.964 Total Fees $64.42 |
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2020 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from Model import Model class OptimizedUncoupledAutosequencers(QCAlgorithm): def Initialize(self): self.SetStartDate(2015, 10, 1) # Set Start Date self.SetEndDate(2020, 10, 1) self.SetCash(100000) # Set Strategy Cash tickers = ['VTI', 'AGG', 'DBC', 'VIXY'] for ticker in tickers: self.AddEquity(ticker, Resolution.Daily) n_periods = 51 self.data = RollingWindow[Slice](n_periods) self.Train(self.DateRules.MonthStart('VTI'), self.TimeRules.Midnight, self.Rebalance) self.model = None self.SetWarmup(n_periods) self.prev_day = -1 def OnData(self, data): # this prevents duplicate bars that sometimes occurs because of SetWarmUp if self.prev_day != self.Time.day: self.data.Add(data) self.prev_day = self.Time.day def Rebalance(self): if not self.data.IsReady: return try: # since RollingWindow is recent at top, we need to reverse it data = self.PandasConverter.GetDataFrame(self.data).iloc[::-1] except: return # turn the closing prices for each equity into columns data = data['close'].unstack(level=0) # sometimes we are missing a row of data if len(data) < self.data.Count: return tickers = [symbol.split(' ')[0] for symbol in data.columns] if self.model is None: self.model = Model() allocations = self.model.get_allocations(data) self.Log(f'Portfolio Allocations: {allocations}') for ticker, allocation in zip(tickers, allocations): self.SetHoldings(ticker, allocation)
import numpy as np # setting the seed allows for reproducible results np.random.seed(123) import tensorflow as tf from tensorflow.keras.layers import LSTM, Flatten, Dense from tensorflow.keras.models import Sequential import tensorflow.keras.backend as K class Model: def __init__(self): self.data = None self.model = None def __build_model(self, input_shape, outputs): ''' Builds and returns the Deep Neural Network that will compute the allocation ratios that optimize the Sharpe Ratio of the portfolio inputs: input_shape - tuple of the input shape, outputs - the number of assets returns: a Deep Neural Network model ''' model = Sequential([ LSTM(64, input_shape=input_shape), Flatten(), Dense(outputs, activation='softmax') ]) def sharpe_loss(_, y_pred): # make all time-series start at 1 data = tf.divide(self.data, self.data[0]) # value of the portfolio after allocations applied portfolio_values = tf.reduce_sum(tf.multiply(data, y_pred), axis=1) portfolio_returns = (portfolio_values[1:] - portfolio_values[:-1]) / portfolio_values[:-1] # % change formula sharpe = K.mean(portfolio_returns) / K.std(portfolio_returns) # since we want to maximize Sharpe, while gradient descent minimizes the loss, # we can negate Sharpe (the min of a negated function is its max) return -sharpe model.compile(loss=sharpe_loss, optimizer='adam') return model def get_allocations(self, data): ''' Computes and returns the allocation ratios that optimize the Sharpe over the given data input: data - DataFrame of historical closing prices of various assets return: the allocations ratios for each of the given assets ''' # data with returns data_w_ret = np.concatenate([ data.values[1:], data.pct_change().values[1:] ], axis=1) data = data.iloc[1:] self.data = tf.cast(tf.constant(data), float) if self.model is None: self.model = self.__build_model(data_w_ret.shape, len(data.columns)) fit_predict_data = data_w_ret[np.newaxis,:] self.model.fit(fit_predict_data, np.zeros((1, len(data.columns))), epochs=20, shuffle=False) return self.model.predict(fit_predict_data)[0]