So I was checking out Quantconnect's example demo code for XGBoost in Python from

(https://www.quantconnect.com/docs/v2/writing-algorithms/machine-learning/popular-libraries/xgboost#09-Clone-Example-Algorithm). 

However, I when I cloned the demo and tried running the backtest with the code unchanged, I keep on receiving the error 

"During the algorithm initialization, the following exception has occurred: Can't get attribute 'NeuralNetwork' on  at _getattribute".

I tried asking Mia AI but couldn't find a working solution. I'm pretty new to machine learning libraries so I'm stuck on this one.

  1. # region imports
  2. from AlgorithmImports import *
  3. import xgboost as xgb
  4. import joblib
  5. # endregion
  6. class XGBoostExampleAlgorithm(QCAlgorithm):
  7. def Initialize(self):
  8. self.SetStartDate(2022, 7, 4)
  9. self.SetEndDate(2022, 7, 8)
  10. self.SetCash(100000)
  11. self.symbol = self.AddEquity("SPY", Resolution.Daily).Symbol
  12. training_length = 252*2
  13. self.training_data = RollingWindow[float](training_length)
  14. history = self.History[TradeBar](self.symbol, training_length, Resolution.Daily)
  15. for trade_bar in history:
  16. self.training_data.Add(trade_bar.Close)
  17. if self.ObjectStore.ContainsKey("model"):
  18. file_name = self.ObjectStore.GetFilePath("model")
  19. self.model = joblib.load(file_name)
  20. else:
  21. self.Train(self.my_training_method)
  22. self.Train(self.DateRules.Every(DayOfWeek.Sunday), self.TimeRules.At(8,0), self.my_training_method)
  23. def get_features_and_labels(self, n_steps=5):
  24. close_prices = np.array(list(self.training_data)[::-1])
  25. df = (np.roll(close_prices, -1) - close_prices) * 0.5 + close_prices * 0.5
  26. df = df[:-1]
  27. features = []
  28. labels = []
  29. for i in range(len(df)-n_steps):
  30. features.append(df[i:i+n_steps])
  31. labels.append(df[i+n_steps])
  32. features = np.array(features)
  33. labels = np.array(labels)
  34. features = (features - features.mean()) / features.std()
  35. labels = (labels - labels.mean()) / labels.std()
  36. d_matrix = xgb.DMatrix(features, label=labels)
  37. return d_matrix
  38. def my_training_method(self):
  39. d_matrix = self.get_features_and_labels()
  40. params = {
  41. 'booster': 'gbtree',
  42. 'colsample_bynode': 0.8,
  43. 'learning_rate': 0.1,
  44. 'lambda': 0.1,
  45. 'max_depth': 5,
  46. 'num_parallel_tree': 100,
  47. 'objective': 'reg:squarederror',
  48. 'subsample': 0.8,
  49. }
  50. self.model = xgb.train(params, d_matrix, num_boost_round=2)
  51. def OnData(self, slice: Slice) -> None:
  52. if self.symbol in slice.Bars:
  53. self.training_data.Add(slice.Bars[self.symbol].Close)
  54. new_d_matrix = self.get_features_and_labels()
  55. prediction = self.model.predict(new_d_matrix)
  56. prediction = prediction.flatten()
  57. if float(prediction[-1]) > float(prediction[-2]):
  58. self.SetHoldings(self.symbol, 1)
  59. else:
  60. self.SetHoldings(self.symbol, -1)
  61. def OnEndOfAlgorithm(self):
  62. model_key = "model"
  63. file_name = self.ObjectStore.GetFilePath(model_key)
  64. joblib.dump(self.model, file_name)
  65. self.ObjectStore.Save(model_key)
+ Expand

Author

David L

August 2023