Overall Statistics |
Total Trades 0 Average Win 0% Average Loss 0% Compounding Annual Return 0% Drawdown 0% Expectancy 0 Net Profit 0% Sharpe Ratio 0 Loss Rate 0% Win Rate 0% Profit-Loss Ratio 0 Alpha 0 Beta 0 Annual Standard Deviation 0 Annual Variance 0 Information Ratio 0 Tracking Error 0 Treynor Ratio 0 Total Fees $0.00 |
import numpy as np from sklearn.neural_network import MLPClassifier import time class BasicTemplateAlgorithm(QCAlgorithm): def Initialize(self): self.SetCash(25000) self.SetStartDate(2017,1,1) self.SetEndDate(2017,11,3) self.SetBrokerageModel(BrokerageName.InteractiveBrokersBrokerage) self.stock = self.AddEquity("SPY", Resolution.Minute).Symbol self.no_of_classifiers = 30 self.classifiers_trained = np.zeros((self.no_of_classifiers), dtype=bool) self.all_classifiers_trained = False self.random_numbers_for_training = range(self.no_of_classifiers) self.classifiers = {} # random training set self.X = np.random.rand(300000,25) self.y = np.random.randint(2, size=300000) self.Schedule.On(self.DateRules.MonthStart(self.stock), self.TimeRules.AfterMarketOpen(self.stock), Action(self.month_starts)) self.Log("RAM used: " + str(OS.TotalPhysicalMemoryUsed) + " out of available: " + str(OS.TotalPhysicalMemory)) def OnData(self, slice): if self.all_classifiers_trained == False: self.train_classifiers() else: if not slice.ContainsKey("SPY"): return elif slice["SPY"] is None: return else: self.price = float(slice["SPY"].Close) def month_starts(self): self.all_classifiers_trained = False self.classifiers_trained = np.zeros((self.no_of_classifiers), dtype=bool) def train_classifiers(self): self.spy_history = self.History(["SPY"], 300000, Resolution.Minute).loc["SPY"] if not self.all_classifiers_trained: self.start_time = time.time() n = np.where(self.classifiers_trained == False)[0][0] # currently trained classifier self.classifier = MLPClassifier(hidden_layer_sizes=(30,30), activation='relu', solver='adam', alpha=0.0001, batch_size='auto', learning_rate='constant', learning_rate_init=0.001, power_t=0.5, max_iter=200, shuffle=True, random_state=self.random_numbers_for_training[n], tol=0.0001, verbose=False, warm_start=False, momentum=0.9, nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-08) self.classifier.fit(self.X, self.y) self.classifiers[n] = self.classifier self.classifiers_trained[n] = True self.end_time = time.time() self.Log("Classifier " + str(n) + " fully trained - " + str(self.end_time - self.start_time) + " - RAM: " + str(OS.TotalPhysicalMemoryUsed)) self.Debug("Classifier " + str(n) + " fully trained - " + str(self.end_time - self.start_time) + " - RAM: " + str(OS.TotalPhysicalMemoryUsed)) #GC.Collect() if all(self.classifiers_trained == True): self.all_classifiers_trained = True self.Log("All classifiers trained. RAM: " + str(OS.TotalPhysicalMemoryUsed) + " out of available: " + str(OS.TotalPhysicalMemory)) self.Debug("All classifiers trained. RAM: " + str(OS.TotalPhysicalMemoryUsed) + " out of available: " + str(OS.TotalPhysicalMemory)) GC.Collect()