Overall Statistics
Total Orders
195
Average Win
1.16%
Average Loss
-7.09%
Compounding Annual Return
79.954%
Drawdown
20.800%
Expectancy
0.091
Start Equity
100000
End Equity
179282.78
Net Profit
79.283%
Sharpe Ratio
2.014
Sortino Ratio
2.615
Probabilistic Sharpe Ratio
82.640%
Loss Rate
6%
Win Rate
94%
Profit-Loss Ratio
0.16
Alpha
0.244
Beta
1.925
Annual Standard Deviation
0.246
Annual Variance
0.06
Information Ratio
2.23
Tracking Error
0.163
Treynor Ratio
0.257
Total Fees
$2568.35
Estimated Strategy Capacity
$490000.00
Lowest Capacity Asset
UPRO UDQRQQYTO12D
Portfolio Turnover
45.30%
from AlgorithmImports import *
from datetime import time


#// All indicators could/should be given functionality for both weighted and simple entry
#// Currently only ROC PERCENT, MFI and ADX have simple entries, while ROC PERCENT AND MFI have no weighted entries

# This enables or disables the requirement for short entry to only be allowed if the long MIN_SCORE_PERCENTAGE_ENTRY_1_LONG
# is above NEGATIVE_MIN_SCORE_PERCENTAGE_THRESHOLD
USE_NEGATIVE_MIN_SCORE_PERCENTAGE_THRESHOLD = True
NEGATIVE_MIN_SCORE_PERCENTAGE_THRESHOLD = 0

# Short positions can be entered until this time
LATEST_CROSS_OVER_TIME = time(hour=15, minute=00)

# If this is True then no short positions will be entered if a long position is still open
# If False all Long positions will be liquidated if a short signal comes
VALIDATE_NO_LONG_POSITIONS = True

# Allows or disallows exits in extended market hours
ENABLE_EXTENDED_HOUR_EXIT = False
from AlgorithmImports import *
from datetime import time
from positions.calculation_weights import CalculationWeights

#######################################################  GENERAL SETTINGS  ###########################################################################################################



BACKTEST_START_YEAR = 2023                        # Set start Year of the Backtest
BACKTEST_START_MONTH = 1                             # Set start Month of the Backtest
BACKTEST_START_DAY = 1                                  # Set start Day of the Backtest

BACKTEST_END_YEAR = 2023                             # Set end Year of the Backtest
BACKTEST_END_MONTH = 12                                 # Set end Month of the Backtest       
BACKTEST_END_DAY = 31                                   # Set end Day of the Backtest

BACKTEST_ACCOUNT_CASH = 100000                        # Set Backtest Strategy Cash

# Currently only for 1 stock at the same time.
STOCKS = ["UPRO"]

# True or False
ENABLE_LONG_TRADES = True

# True or False
ENABLE_SHORT_TRADES = True


# Currently only "MARKET"
EXIT_MODE = "MARKET"


#// All indicators could/should be given functionality for both weighted and simple entry
#// Currently only ROC PERCENT, MFI and ADX have simple entries, while ROC PERCENT AND MFI have no weighted entries

#######################################################  TIME SETTINGS  ###########################################################################################################

# Positions can be entered after this time
TRADING_START_TIME = time(hour=9, minute=30)

# All Positions will be closed after this time and no further entry. (Please ensure that this is atleast 1 Minute before market close)
TRADING_END_TIME = time(hour=14, minute=45)


# Enable or Disable Conditional Trading End Time
# If this is enabled then the bot will check if the current price is below the configured daily SMA, 
# And if yes it will liquidate any position at the TRADING_END_TIME
# This should help with avoiding overnight exposure for long positions during market down turns
# And thus reduce drawdown and increase return
ENABLE_CONDITIONAL_TRADING_END_TIME = False
CONDITIONAL_TRADING_END_TIME_SMA_PERIOD = 40


WEIGHTING_THRESHOLD_INCREASE_TIME = time(hour=14, minute=00)

# Percentage multiplier applied to weighting at the weighting threshold increase time,
# 50 = 50% increase in required weighting
WEIGHTING_THRESHOLD_INCREASE = 50

# region imports
from AlgorithmImports import *
# endregion
# Weights and thresholds from config_long_allocation_weights.py


USE_ALLOCATION_WEIGHTS = True
ENABLE_BEAR_MARKET_DETECTION = True
ENABLE_NH_NL = True
ENABLE_ADR = True


NH_NL_WEIGHT = 0.2
NH_NL_THRESHOLD_LOWER = -100  # Bearish signal (dominance of lows)
NH_NL_THRESHOLD_UPPER = 100   # Bullish signal (dominance of highs)

ADL_WEIGHT = 0.2
ADL_THRESHOLD_LOWER = 0.8     # Bearish signal (more decliners)
ADL_THRESHOLD_UPPER = 1.2     # Bullish signal (more advancers)


BEAR_MARKET_DETECTION_WEIGHT = 0.6
BEAR_MARKET_DETECTION_THRESHOLD_LOWER = 0
BEAR_MARKET_DETECTION_THRESHOLD_UPPER = 1

MIN_ALLOCATION = 0.5
MAX_ALLOCATION = 1
USE_LIMIT_LONG_CASH = False
MAX_LONG_CASH_PERCENTAGE = 100


# Used to adjust damping of the logarithmic total weighting calculation.
# Higher Values here will make the weighting less sensitive to changes in the indicator weightings and usually allow for higher weightings to be reached easier.
WEIGHT_SENSITIVITY_DAMPING = 1

# Percentage of Calculation Weight Total Score CWTS required for Entry 1
MIN_SCORE_PERCENTAGE_ENTRY_1 = 15.5

LIQUIDATE_MONTHS_PRIOR_MOC = False
USE_NO_ENTRY_MONTHS = True
# Enter any month of the year written in UPPER CASE here, e.g. SEPTEMBER,
# Any month entered into this list will not enter any long trades
NO_ENTRY_MONTHS = ["SEPTEMBER"]

LIQUIDATE_DAY_PRIOR_DAYS = False
USE_NO_ENTRY_DAYS = False
NO_ENTRY_DAYS = ["FRIDAY"]

# Enables or disables bear market protection
ENABLE_BEAR_MARKET_DETECTION = False
# Percentage allocation that is used when a bear market has been detected
# This also updates already open positions
BEAR_MARKET_PERCENTAGE = 50
# Print when bear market is detected
PRINT_BEAR_MARKET = False

# Enables New High / New Low, if enabled it prohibits entry when the indicator is below 0
ENABLE_NH_NL = False
# Enables Exit of open positions upon this condition being True
ENABLE_NH_NL_EXIT = False
# Period of SMA of this indicator, currently SMA unused
NH_NL_MA_PERIOD = 20


LIQUIDATE_BLACKOUT_PERIOD_PRIOR_MOC = False
USE_BLACKOUT_PERIODS = True
# Enter any period here in yyyy-mm-dd format for which no long trades are allowed to enter
BLACKOUT_PERIODS = [

#2019
    {"start_date": "2019-02-19", "end_date": "2019-02-22", "reason": "PRESIDENTS DAY"},
    #{"start_date": "2019-05-01", "end_date": "2019-05-31", "reason": "MAY 2019"},
    {"start_date": "2019-07-04", "end_date": "2019-07-05", "reason": "AFTER JULY 4"},
    {"start_date": "2019-08-31", "end_date": "2019-08-31", "reason": "BEFORE SEPTEMBER"},
    {"start_date": "2019-10-01", "end_date": "2019-10-02", "reason": "AFTER SEPTEMBER"},

#2020
    {"start_date": "2020-02-19", "end_date": "2020-02-22", "reason": "PRESIDENTS DAY"},
    {"start_date": "2020-07-04", "end_date": "2020-07-05", "reason": "AFTER JULY 4"},
    {"start_date": "2020-08-31", "end_date": "2020-08-31", "reason": "BEFORE SEPTEMBER"},
    {"start_date": "2020-10-01", "end_date": "2020-10-02", "reason": "AFTER SEPTEMBER"},

#2021
    {"start_date": "2021-02-19", "end_date": "2021-02-22", "reason": "PRESIDENTS DAY"},
    {"start_date": "2021-07-04", "end_date": "2021-07-05", "reason": "AFTER JULY 4"},
    {"start_date": "2021-08-31", "end_date": "2021-08-31", "reason": "BEFORE SEPTEMBER"},
    {"start_date": "2021-10-01", "end_date": "2021-10-02", "reason": "AFTER SEPTEMBER"},

#2022
    {"start_date": "2022-02-19", "end_date": "2022-02-22", "reason": "PRESIDENTS DAY"},
    {"start_date": "2022-07-04", "end_date": "2022-07-05", "reason": "AFTER JULY 4"},
    {"start_date": "2022-08-31", "end_date": "2022-08-31", "reason": "BEFORE SEPTEMBER"},
    {"start_date": "2022-10-01", "end_date": "2022-10-02", "reason": "AFTER SEPTEMBER"},

#2023
    {"start_date": "2023-02-19", "end_date": "2023-02-22", "reason": "PRESIDENTS DAY"},
    {"start_date": "2023-07-04", "end_date": "2023-07-05", "reason": "AFTER JULY 4"},
    {"start_date": "2023-08-31", "end_date": "2023-08-31", "reason": "BEFORE SEPTEMBER"},
    {"start_date": "2023-10-01", "end_date": "2023-10-02", "reason": "AFTER SEPTEMBER"},
]


































































































































































































































































































































































































































































































































#region imports
from AlgorithmImports import *
#endregion
# This will add extra weighting to the total max weighting,
# This is another way to alter the weighting calculations sensitivity
# E.g. if total max weighting was 20 before, it might only take 5 current weighting to reach a threshold to buy
# so then if the below variable is set to say 3 it might then take 6 current weighting to reach threshold to buy
# The numbers above are not accurate and just for the sake of explanation
TOTAL_MAX_WEIGHTING_INCREASE = 0

# If disabled the LONG entry will automatically use the weighting logic for entries
# If enabled the SHORT entry will use the simple entry config below
USE_SIMPLE_ENTRY = False

# Here simply enter the indicators to be used for the simple LONG entry
# Options are "ADX", "MFI", "ROC_PERCENT"
SIMPLE_ENTRY_CONFIG = ["ADX"]

# please refer to this class for documentation (You can ctrl + click on the CalculationWeights):

LOG_SCALING_THRESHOLD_PERCENTAGE_1 = 25

LOG_SCALING_THRESHOLD_PERCENTAGE_2 = 50

LOG_SCALING_THRESHOLD_PERCENTAGE_3 = 75

# Percentage of Calculation Weight Total Score CWTS required for Entry 2
MIN_SCORE_PERCENTAGE_ENTRY_2 = 25

# Percentage of Calculation Weight Total Score CWTS required for Entry 3
MIN_SCORE_PERCENTAGE_ENTRY_3 = 65

# Enable or Disable the use of MIN_SCORE_PERCENTAGE_2 and MIN_SCORE_PERCENTAGE_3 and their subsequent entries
USE_LAYERED_ENTRIES = False

# "SHARES" or "PERCENTAGE"
QUANTITY_MODE = "PERCENTAGE"

# Percentage of account cash to be invested for when the first min score is reached,
# 0.5 = 50%
QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_1 = 0.15

# Percentage of account cash to be invested for when the second min score is reached,
# 0.7 = 70%
QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_2 = 0.95

# Percentage of account cash to be invested for when the final min score is reached,
# 100 = 100%
QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_3 = 1

# Number of shares to be invested for when the first min score is reached,
QUANTITY_TOTAL_SHARES_ENTRY_1 = 1

# Number of shares to be invested for when the second min score is reached,
QUANTITY_TOTAL_SHARES_ENTRY_2_LONG = 3

# Number of shares to be invested for when the third min score is reached,
QUANTITY_TOTAL_SHARES_ENTRY_3 = 4
#region imports
from AlgorithmImports import *
#endregion
# Stop Loss Percentage, 3 = 3%
STOP_PERCENTAGE = 11.75

# Untested, ETA Tomorrow
TAKE_PROFIT_X_TIMES_RISK = 2

USE_LONG_VOLATILITY_STOPS = False

#floor .5
LOW_VOLATILITY_STOP_PERCENTAGE = 2.5

HIGH_VOLATILITY_STOP_PERCENTAGE = 3.5

# Enable or disable trailing stop by inputting True or False
USE_TRAILING_STOP = False

# Required profit percentage at which the trailing stop will be activated
# 1 = 1%
TRAILING_STOP_ACTIVATION_THRESHOLD = 1

# Trailing percentage, 1 = 1%
TRAILING_STOP_PERCENTAGE = 1

# Enable or disable the ATR based trailing stop,
# This currently overwrites the regular trailing stop if both are active
USE_ATR_TRAILING_STOP = True

# offset for LONG UPRO in positive direction
# 1 
ATR_OFFSET_POSITIVE = 1.027

#  offset for LONG UPRO in negative direction
ATR_OFFSET_NEGATIVE = 9.965

# For a trade trending in positive direction create a variable(s) that we can tighten the ATR EXIT to lock in profits
# Here the left numbers (keys) are the gain percentages,
# The right numbers (values) are the dividers by which the trailing percent is divided by 
POSITIVE_TIGHTEN_LEVELS = {
    .25: 2,
    1.0: 4.8,
    1.5: 7
}
NEGATIVE_TIGHTEN_LEVELS = {
    0.25: 2.0,
    1.0: 4.8,
    1.5: 7
}

# Scaling model 0.06% and 0.15% (ATR as Percentage of Price) to set the range
SCALE_MIN_ATR = 0.085
SCALE_MAX_ATR = 0.0895

# These define the minimum and maximum trailing percentages
# The above scaling can set the trailing percentage to any percentage,
# That is in between these 2 numbers
MIN_TRAILING_PERCENT = 0.44
MAX_TRAILING_PERCENT = 1.55



ENABLE_POSITIVE_TRAILING_STOP = True
ENABLE_NEGATIVE_TRAILING_STOP = True
#region imports
from AlgorithmImports import *
#endregion
#// I removed all _LONG from the variables here,
#// This allowed me to to remove roughly 50 lines of code duplication from setup_indicators.py
#// Removing the code duplication will also make the code more extensible and easier to maintain
#// Additionally this change allowed me to remove roughly  200 lines of code duplication from
#// setup_timeframes.py (with the same additional benefits there), 
#// as well as other smaller benefits in various places in the code base




####################################################### CHANDELIER EXIT STRATEGY ###########################################################################################################


USE_CHANDELIER_EXITS = False
CHANDELIER_ATR_TIMEFRAME = (60, "Minute")
USE_CLOSE_PRICE_FOR_EXTREME_MOMENTUM = True
CHANDELIER_ATR_PERIOD = 14
CHANDELIER_ATR_MULTIPLIER = 3






####################################################### ROC PERCENT STRATEGY ###########################################################################################################

ROC_PERCENT_WEIGHTING = 0

ROC_PERCENT_PERIOD = 14

ROC_PERCENT_BOL_PERIOD = 20

ROC_PERCENT_BOL_BAND_MULTIPLIER = 4

ROC_PERCENT_LOWER_THRESHOLD = 70

# Timeframe to be used, please see top of this file for further details.
ROC_PERCENT_TIMEFRAME = (15, "Minute")



####################################################### ROC RATIO STRATEGY ###########################################################################################################

ROC_RATIO_WEIGHTING = 0

ROC_RATIO_PERIOD = 14

ROC_RATIO_BOL_PERIOD = 20

ROC_RATIO_BOL_BAND_MULTIPLIER = 4

ROC_RATIO_LOWER_THRESHOLD = 70

# Timeframe to be used, please see top of this file for further details.
ROC_RATIO_TIMEFRAME = (15, "Minute")








####################################################### MFI STRATEGY ###########################################################################################################

MFI_WEIGHTING = 0

MFI_PERIOD = 14

MFI_OVERBOUGHT = 70

MFI_OVERSOLD = 70

# Timeframe to be used, please see top of this file for further details.
MFI_TIMEFRAME = (15, "Minute")






#######################################################  MACD STRATEGY  ###########################################################################################################

MACD_WEIGHTING = 3

# Percentages for how much of the total MACD_WEIGHTING is to be allocated to which condition, 33 = 33%
MACD_RELATIVE_WEIGHTING_GOLDEN_CROSS = 50
MACD_RELATIVE_WEIGHTING_DEATH_CROSS = 25
MACD_RELATIVE_WEIGHTING_ZERO_LINE = 0
MACD_RELATIVE_WEIGHTING_POSITION = 25

# Timeframe to be used, please see top of this file for further details.
MACD_TIMEFRAME = (20, "Minute")

MACD_FAST_PERIOD = 15

MACD_SLOW_PERIOD = 25

MACD_SIGNAL_PERIOD = 9



#######################################################  RSI STRATEGY  ###########################################################################################################

RSI_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
RSI_TIMEFRAME = (15, "Minute")

RSI_PERIOD = 14


# Below values will be used to determine bullish and bearish. 
# Right now it is linearly scaled, with the RSI_BASELINE to RSI_OVERBOUGHT being bearish territory and
# RSI_BASELINE to RSI_OVERSOLD bullish.

RSI_OVERBOUGHT = 73.5

RSI_BASELINE = 45

RSI_OVERSOLD = 28.5





#######################################################  BOLLINGER BANDS STRATEGY  ###########################################################################################################

# Untested
BOLLINGER_BANDS_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
BOLLINGER_BANDS_TIMEFRAME = (15, "Minute")

BOLLINGER_BANDS_PERIOD = 14

BOLLINGER_BANDS_OFFSET = 2



#######################################################  VWAP STRATEGY  ###########################################################################################################

VWAP_WEIGHTING = 10

# Offset multiplier applied to the standard deviation VWAP to calculate the bands
VWAP_UPPER_BAND_1_OFFSET = 2
VWAP_LOWER_BAND_1_OFFSET = 2

VWAP_UPPER_BAND_2_OFFSET = 3
VWAP_LOWER_BAND_2_OFFSET = 3

# Percentage offset applied to VWAP itself, 10 = 10%
VWAP_OFFSET = 0.00


####################################################### CROSS OVER STRATEGY ###########################################################################################################

# Still fixing bugs here at the moment, so should be left at 0
CROSS_OVER_WEIGHTING = 3

ENABLE_SMA_CROSS = False
ENABLE_EMA_CROSS = True
ENABLE_SMA_GOLDEN_CROSS = False
ENABLE_EMA_GOLDEN_CROSS = True

GOLDEN_CROSS_WEIGHT_INCREASE_FACTOR = 0

# Weight Increase after golden cross occurred, in minutes
GOLDEN_CROSS_WEIGHT_INCREASE_DURATION = 1

ENABLE_SMA_DEATH_CROSS = False
ENABLE_EMA_DEATH_CROSS = True

DEATH_CROSS_WEIGHT_INCREASE_FACTOR = 1.5

# Weight Increase after death cross occurred, in minutes
DEATH_CROSS_WEIGHT_INCREASE_DURATION = 120

SMA_PERIOD_FAST = 1
SMA_PERIOD_SLOW = 300

EMA_PERIOD_FAST = 85
EMA_PERIOD_SLOW = 200

# Timeframes to be used, please see top of this file for further details.
# Currently only maximum of 5 each possible
SMA_TIMEFRAMES = [(1, "Day")]
EMA_TIMEFRAMES = [(1, "Day")]

####################################################### STOCHASTIC OSCILLATOR STRATEGY ###########################################################################################################

STOCHASTIC_OSCILLATOR_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
STOCHASTIC_OSCILLATOR_TIMEFRAME = (15, "Minute")

STOCHASTIC_OSCILLATOR_LENGTH = 14

STOCHASTIC_OSCILLATOR_K_SMOOTHING = 1

STOCHASTIC_OSCILLATOR_D_SMOOTHING = 3

# Below values will be used to determine bullish and bearish.
# Right now it is linearly scaled, with the STOCHASTIC_OSCILLATOR_BASELINE to STOCHASTIC_OSCILLATOR_OVERBOUGHT being bearish territory and
# STOCHASTIC_OSCILLATOR_BASELINE to STOCHASTIC_OSCILLATOR_OVERSOLD bullish.
STOCHASTIC_OSCILLATOR_OVERBOUGHT = 74

STOCHASTIC_OSCILLATOR_BASELINE = 50.5

STOCHASTIC_OSCILLATOR_OVERSOLD = 32.75

####################################################### OBV ACC DIST STRATEGY ###########################################################################################################

# Might be good to give low weighting
OBV_ACC_DIST_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
OBV_ACC_DIST_TIMEFRAME = (1, "Day")

# Moving average applied to the Accumulation/Distribution of OBV
OBV_ACC_DIST_SMA_LENGTH = 10

####################################################### ADX STRATEGY ###########################################################################################################

# Might be good to give high weighting
ADX_WEIGHTING = 12

# Minimum Value that ADX needs to have to have a weighting > 0
ADX_MIN_TREND_STRENGTH = 18.9

# Maximum ADX value at which the full ADX Weighting is reached.
# +DI and -DI are used to determine the direction of trend, so they switch the weighting between positive and negative.
ADX_MAX_TREND_STRENGTH = 30

# Timeframe to be used, please see top of this file for further details.
ADX_TIMEFRAME= (15, "Minute")

ADX_LENGTH = 14




####################################################### DIRECTIONAL VOLATILITY AND VOLUME STRATEGY ###########################################################################################################

# Might be good to give low weighting

DVV_WEIGHTING = 1.9

# Timeframe to be used, please see top of this file for further details.
DVV_TIMEFRAME = (15, "Minute")

DVV_VOLATILITY_PERIOD = 6

# Options here are "SMA", "EMA", "RMA", "WMA"
DVV_VOLATILITY_SMOOTHING = "SMA"

DVV_VOLUME_PERIOD = 14

# Options here are "SMA", "EMA", "RMA", "WMA"
DVV_VOLUME_SMOOTHING = "SMA"

DVV_ZONE_PERIOD = 14

# Options here are "SMA", "EMA", "RMA", "WMA"
DVV_ZONE_SMOOTHING = "SMA"


####################################################### CHAIKIN VOLATILITY STRATEGY ###########################################################################################################

# Might be good to give low weighting

CHAIKIN_VOLATILITY_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
CHAIKIN_VOLATILITY_TIMEFRAME = (15, "Minute")

CHAIKIN_VOLATILITY_LENGTH = 10

CHAIKIN_VOLATILITY_ROC_LENGTH = 12
#region imports
from AlgorithmImports import *
#endregion
# Number of minutes after a lost long trade was closed that no further long trade can be entered
LOSS_TIME_SLOT = 1440

# Number of minutes after a won long trade was closed that no further long trade can be entered
WIN_TIME_SLOT = 30

# Relating to:
# The reason that it occasionally was not triggering is, 
# because the control variable was also used by other trade pause variables, 
# which if not triggered in that instance would overwrite the pause to allow for entry. I have added ENABLE_OLD_PAUSE_BEHAVIOR = True
# The new behavior seems to especially cause problems in 2022. Which are somewhat mitigated by turning on bear market detection.


ENABLE_OLD_PAUSE_BEHAVIOR = True

RESET_LOSS_COUNTER_EACH_DAY = True

BACK_OFF_TIME_SLOTS = {
    1: 1,  # Multiplier for 1st loss
    2: 1,  # Multiplier for 2nd loss
    3: 1,  # Multiplier for 3rd loss
    4: 1,  # Multiplier for 4th loss
    5: 1,  # Multiplier for 5th loss
    6: 1,  # Multiplier for 6th loss
    7: 1,  # Multiplier for 7th loss
    8: 1,  # Multiplier for 8th loss
    9: 1,  # Multiplier for 9th loss
}

####################################################### MA RESTRICTION ###########################################################################################################


# Moving Average used in Restriction;
# Simply find the Indicator in the documentation and enter it's class name along with desired period and
# If applicable, other required settings, e.g.
# RegressionChannel(200, 2), this has 200 as period and 2 as k factor for the bands
# SimpleMovingAverage(200), this has 200 as period and no other settings
# DOCS: https://www.quantconnect.com/docs/v2/writing-algorithms/indicators/supported-indicators
# There might be some confusion here with which indicators can be used here 
# And how to configure them, any confusion will have to be solved 1 by 1 via E-Mail
MA_RESTRICTION_TYPE = ExponentialMovingAverage(1725)
MA_RESTRICTION_TYPE_2 = HullMovingAverage(37)

# If this is True, then if an indicator has a LowerBand,
# Then this will be used as comparison instead,
# E.g. BollingerBand with this as False would use the MiddleBand
# But with this as True it would use the LowerBand
# Right now one has to manually make sure that the Indicator has a LowerBand
# This might be very tricky for now, because e.g. 
# LinearRegressionChannel has the lower band named as LowerChannel,
# While BollingerBands have it named as LowerBand
# There are many variations of this, right now LowerChannel is hardcoded.
USER_LOWER_BAND_IF_AVAILABLE = False

# This enables or disables the Pause MA Restriction
# Please not that this overwrites any Liquidation setting for Pause MA
PAUSE_MA_RESTRICTION_ACTIVE = False

# If Pause MA is enabled then this Liquidates any open LONG position as well.
LIQUIDATE_ON_PAUSE_MA_TRIGGER = False

# Number of Minutes to Pause for after trigger hit.
PAUSE_MA_PAUSE_MINUTES = 30

# Timeframe for the Pause MA to use, e.g. (1, "Day") is 1 Day candles
# CHANGE TO THIS VALUE CHANGES BOTH GAIN AND DRAWDOWN
PAUSE_MA_TIMEFRAME = (1, "Day") 

# If this is activated, then regardless of other settings, 
# If price is below both MA_RESTRICTION_TYPES
# Then the entry quantity will be multiplied by below Factor
# Currently only for Percentage entry and only Long entries
PAUSE_MA_ENTRY_SCALE_ACTIVE = True
PAUSE_MA_ENTRY_SCALE_FACTOR = 0.4325

####################################################### VIX PAUSE ###########################################################################################################



# Flag to enable/disable pausing trading based on VIX movements
VIX_PAUSE_MINUTES_ACTIVE = False

# Time period in minutes to monitor for increases in the VIX
VIX_INCREASE_PERIOD_IN_MINUTES = 390

# Maximum percentage increase in VIX to trigger a pause
VIX_MAX_PERCENTAGE_INCREASE = 15

# Number of minutes to pause trading following a VIX-based trigger
VIX_PAUSE_MINUTES = 30

# Determines whether positions are liquidated when a VIX trigger occurs
LIQUIDATE_ON_VIX_TRIGGER = False

# This enables the print for this section
ENABLE_PRINT_VIX_INCREASE = False


####################################################### VIX CHANNEL PAUSE ###########################################################################################################


# Indicates whether to use a regression channel based on VIX data for decisions
VIX_REGRESSION_CHANNEL_ACTIVE = True
# Period for the VIX regression channel
VIX_REGRESSION_CHANNEL_PERIOD = 1440
# Number of standard deviations for the VIX regression channel
VIX_REGRESSION_CHANNEL_STDEV = 6

# Pause duration in minutes if the VIX regression channel is triggered
VIX_REGRESSION_CHANNEL_PAUSE_MINUTES = 2500
# Controls whether to liquidate positions when the VIX regression channel triggers
LIQUIDATE_ON_VIX_RC_TRIGGER = False

# This enables the print for this section
ENABLE_PRINT_VIX_CHANNEL = False
#region imports
from AlgorithmImports import *
#endregion
# This will add extra weighting to the total max weighting,
# This is another way to alter the weighting calculations sensitivity
# E.g. if total max weighting was 20 before, it might only take 5 current weighting to reach a threshold to buy
# so then if the below variable is set to say 3 it might then take 6 current weighting to reach threshold to buy
# The numbers above are not accurate and just for the sake of explanation
TOTAL_MAX_WEIGHTING_INCREASE = 3


# If disabled the SHORT entry will automatically use the weighting logic for entries
# If enabled the SHORT entry will use the simple entry config below
USE_SIMPLE_ENTRY = False

# If enabled any Short position will be limited to this cash percentage
# And positions will be rebalanced daily to keep meeting this target
USE_LIMIT_SHORT_CASH = True

# Cash Percentage to use
MAX_SHORT_CASH_PERCENTAGE = 80

# Here simply enter the indicators to be used for the simple SHORT entry
# Options are "ADX", "MFI", "ROC_PERCENT"
SIMPLE_ENTRY_CONFIG = ["ADX"]

# Here enter how many of them minimum need to be True to trigger a SHORT entry
SIMPLE_ENTRY_MINIMUM_TO_TRIGGER = 3


# please refer to this class for documentation (You can ctrl + click on the CalculationWeights):
LOG_SCALING_THRESHOLD_PERCENTAGE_1 = 25


# Used to adjust damping of the logarithmic total weighting calculation.
# Higher Values here will make the weighting less sensitive to changes in the indicator weightings and usually allow for higher weightings to be reached easier.
WEIGHT_SENSITIVITY_DAMPING = 1

# Percentage of Calculation Weight Total Score CWTS required for Entry 1
MIN_SCORE_PERCENTAGE_ENTRY_1 = 15

# "SHARES" or "PERCENTAGE"
QUANTITY_MODE = "PERCENTAGE"

# Percentage of account cash to be invested for when the first min score is reached,
# 0.5 = 50%
QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_1 = 0.15

# Number of shares to be invested for when the first min score is reached,
QUANTITY_TOTAL_SHARES_ENTRY_1 = 1

























































































































#region imports
from AlgorithmImports import *
#endregion
# Stop Loss Percentage, 3 = 3%
STOP_PERCENTAGE = 15

# Untested, ETA Tomorrow
TAKE_PROFIT_X_TIMES_RISK = 2


USE_SHORT_VOLATILITY_STOPS = False

#floor .5
LOW_VOLATILITY_STOP_PERCENTAGE = 2.5

HIGH_VOLATILITY_STOP_PERCENTAGE = 3.5

# Enable or disable trailing stop by inputting True or False
USE_TRAILING_STOP = True

# Required profit percentage at which the trailing stop will be activated
# 1 = 1%
TRAILING_STOP_ACTIVATION_THRESHOLD = 1

# Trailing percentage, 1 = 1%
TRAILING_STOP_PERCENTAGE = 1

# Enable or disable the ATR based trailing stop,
# This currently overwrites the regular trailing stop if both are active
USE_ATR_TRAILING_STOP = True

# offset for LONG UPRO in positive direction
# 1 
ATR_OFFSET_POSITIVE = 1.024

#  offset for LONG UPRO in negative direction
ATR_OFFSET_NEGATIVE = 9.965

# For a trade trending in positive direction create a variable(s) that we can tighten the ATR EXIT to lock in profits
# Here the left numbers (keys) are the gain percentages,
# The right numbers (values) are the dividers by which the trailing percent is divided by 
POSITIVE_TIGHTEN_LEVELS = {
    0.25: 2,
    0.75: 3,
    1.0: 4
}
NEGATIVE_TIGHTEN_LEVELS = {
    6.0: 2,
    6.5: 3,
    7.0: 4
}

# Scaling model 0.06% and 0.15% (ATR as Percentage of Price) to set the range
SCALE_MIN_ATR = 0.06
SCALE_MAX_ATR = 0.15

# These define the minimum and maximum trailing percentages
# The above scaling can set the trailing percentage to any percentage,
# That is in between these 2 numbers
MIN_TRAILING_PERCENT = 0.51
MAX_TRAILING_PERCENT = 1.51


ENABLE_POSITIVE_TRAILING_STOP = True
ENABLE_NEGATIVE_TRAILING_STOP = True

# Enables exit of short position at 3:59 PM
EXIT_ON_MARKET_CLOSE = False
#region imports
from AlgorithmImports import *
#endregion
#// I removed all _SHORT from the variables here,
#// This allowed me to to remove roughly 50 lines of code duplication from setup_indicators.py
#// Removing the code duplication will also make the code more extensible and easier to maintain
#// Additionally this change allowed me to remove roughly  200 lines of code duplication from
#// setup_timeframes.py (with the same additional benefits there), 
#// as well as other smaller benefits in various places in the code base





####################################################### CHANDELIER EXIT STRATEGY ###########################################################################################################


USE_CHANDELIER_EXITS = False
CHANDELIER_ATR_TIMEFRAME = (60, "Minute")
USE_CLOSE_PRICE_FOR_EXTREME_MOMENTUM = True
CHANDELIER_ATR_PERIOD = 14
CHANDELIER_ATR_MULTIPLIER = 3






####################################################### ROC PERCENT STRATEGY ###########################################################################################################

ROC_PERCENT_WEIGHTING = 0

ROC_PERCENT_PERIOD = 14

ROC_PERCENT_BOL_PERIOD = 20

ROC_PERCENT_BOL_BAND_MULTIPLIER = 4

ROC_PERCENT_LOWER_THRESHOLD = 70

# Timeframe to be used, please see top of this file for further details.
ROC_PERCENT_TIMEFRAME = (15, "Minute")




####################################################### ROC RATIO STRATEGY ###########################################################################################################

ROC_RATIO_WEIGHTING = 0

ROC_RATIO_PERIOD = 14

ROC_RATIO_BOL_PERIOD = 20

ROC_RATIO_BOL_BAND_MULTIPLIER = 4

ROC_RATIO_LOWER_THRESHOLD = 70

# Timeframe to be used, please see top of this file for further details.
ROC_RATIO_TIMEFRAME = (15, "Minute")





####################################################### MFI STRATEGY ###########################################################################################################

MFI_WEIGHTING = 0

MFI_PERIOD = 14

MFI_OVERBOUGHT = 70

MFI_OVERSOLD = 70

# Timeframe to be used, please see top of this file for further details.
MFI_TIMEFRAME = (15, "Minute")


####################################################### ADX STRATEGY ###########################################################################################################

# Might be good to give high weighting
ADX_WEIGHTING = 12

# Minimum Value that ADX needs to have to have a weighting > 0
ADX_MIN_TREND_STRENGTH = 18.9

# Maximum ADX value at which the full ADX Weighting is reached.
# +DI and -DI are used to determine the direction of trend, so they switch the weighting between positive and negative.
ADX_MAX_TREND_STRENGTH = 30

# Timeframe to be used, please see top of this file for further details.
ADX_TIMEFRAME = (15, "Minute")

ADX_LENGTH = 14







#######################################################  MACD STRATEGY  ###########################################################################################################

MACD_WEIGHTING = 3

# Percentages for how much of the total MACD_WEIGHTING is to be allocated to which condition, 33 = 33%
MACD_RELATIVE_WEIGHTING_GOLDEN_CROSS = 33
MACD_RELATIVE_WEIGHTING_DEATH_CROSS = 33
MACD_RELATIVE_WEIGHTING_ZERO_LINE = 0
MACD_RELATIVE_WEIGHTING_POSITION = 33

# Timeframe to be used, please see top of this file for further details.
MACD_TIMEFRAME = (15, "Minute")

MACD_FAST_PERIOD = 14

MACD_SLOW_PERIOD = 26

MACD_SIGNAL_PERIOD = 9



#######################################################  RSI STRATEGY  ###########################################################################################################

RSI_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
RSI_TIMEFRAME = (15, "Minute")

RSI_PERIOD = 14


# Below values will be used to determine bullish and bearish. 
# Right now it is linearly scaled, with the RSI_BASELINE to RSI_OVERBOUGHT being bearish territory and
# RSI_BASELINE to RSI_OVERSOLD bullish.

RSI_OVERBOUGHT = 73.51

RSI_BASELINE = 45

RSI_OVERSOLD = 30





#######################################################  BOLLINGER BANDS STRATEGY  ###########################################################################################################

# Untested
BOLLINGER_BANDS_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
BOLLINGER_BANDS_TIMEFRAME = (15, "Minute")

BOLLINGER_BANDS_PERIOD = 14

BOLLINGER_BANDS_OFFSET = 2



#######################################################  VWAP STRATEGY  ###########################################################################################################

VWAP_WEIGHTING = 10

# Offset multiplier applied to the standard deviation VWAP to calculate the bands
VWAP_UPPER_BAND_1_OFFSET = 2
VWAP_LOWER_BAND_1_OFFSET = 2

VWAP_UPPER_BAND_2_OFFSET = 3
VWAP_LOWER_BAND_2_OFFSET = 3

# Percentage offset applied to VWAP itself, 10 = 10%
VWAP_OFFSET = -0.002


####################################################### CROSS OVER STRATEGY ###########################################################################################################

# Still fixing bugs here at the moment, so should be left at 0
CROSS_OVER_WEIGHTING = 0

ENABLE_SMA_CROSS = True
ENABLE_EMA_CROSS = False

ENABLE_SMA_GOLDEN_CROSS = True
ENABLE_EMA_GOLDEN_CROSS = True

GOLDEN_CROSS_WEIGHT_INCREASE_FACTOR = 1.5

# Weight Increase after golden cross occurred, in minutes
GOLDEN_CROSS_WEIGHT_INCREASE_DURATION = 120

ENABLE_SMA_DEATH_CROSS = True
ENABLE_EMA_DEATH_CROSS = True

DEATH_CROSS_WEIGHT_INCREASE_FACTOR = 1.5

# Weight Increase after death cross occurred, in minutes
DEATH_CROSS_WEIGHT_INCREASE_DURATION = 120

SMA_PERIOD_FAST = 1
SMA_PERIOD_SLOW = 300

EMA_PERIOD_FAST = 50
EMA_PERIOD_SLOW = 200

# Timeframes to be used, please see top of this file for further details.
# Currently only maximum of 5 each possible
SMA_TIMEFRAMES = [(1, "Day")]
EMA_TIMEFRAMES = [(1, "Day")]

####################################################### STOCHASTIC OSCILLATOR STRATEGY ###########################################################################################################

STOCHASTIC_OSCILLATOR_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
STOCHASTIC_OSCILLATOR_TIMEFRAME = (15, "Minute")

STOCHASTIC_OSCILLATOR_LENGTH = 14

STOCHASTIC_OSCILLATOR_K_SMOOTHING = 1

STOCHASTIC_OSCILLATOR_D_SMOOTHING = 3

# Below values will be used to determine bullish and bearish.
# Right now it is linearly scaled, with the STOCHASTIC_OSCILLATOR_BASELINE to STOCHASTIC_OSCILLATOR_OVERBOUGHT being bearish territory and
# STOCHASTIC_OSCILLATOR_BASELINE to STOCHASTIC_OSCILLATOR_OVERSOLD bullish.
STOCHASTIC_OSCILLATOR_OVERBOUGHT = 75.5

STOCHASTIC_OSCILLATOR_BASELINE = 50

STOCHASTIC_OSCILLATOR_OVERSOLD = 30

####################################################### OBV ACC DIST STRATEGY ###########################################################################################################

# Might be good to give low weighting
OBV_ACC_DIST_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
OBV_ACC_DIST_TIMEFRAME = (1, "Day")

# Moving average applied to the Accumulation/Distribution of OBV
OBV_ACC_DIST_SMA_LENGTH = 10




####################################################### DIRECTIONAL VOLATILITY AND VOLUME STRATEGY ###########################################################################################################

# Might be good to give low weighting

DVV_WEIGHTING = 1.9

# Timeframe to be used, please see top of this file for further details.
DVV_TIMEFRAME = (15, "Minute")

DVV_VOLATILITY_PERIOD = 6

# Options here are "SMA", "EMA", "RMA", "WMA"
DVV_VOLATILITY_SMOOTHING = "SMA"

DVV_VOLUME_PERIOD = 14

# Options here are "SMA", "EMA", "RMA", "WMA"
DVV_VOLUME_SMOOTHING = "SMA"

DVV_ZONE_PERIOD = 14

# Options here are "SMA", "EMA", "RMA", "WMA"
DVV_ZONE_SMOOTHING = "SMA"


####################################################### CHAIKIN VOLATILITY STRATEGY ###########################################################################################################

# Might be good to give low weighting

CHAIKIN_VOLATILITY_WEIGHTING = 3

# Timeframe to be used, please see top of this file for further details.
CHAIKIN_VOLATILITY_TIMEFRAME = (15, "Minute")

CHAIKIN_VOLATILITY_LENGTH = 10

CHAIKIN_VOLATILITY_ROC_LENGTH = 12
# region imports
from AlgorithmImports import *
# endregion
csv_string = """time,close_malp,close_mahp,close_diff
2020-01-02 14:30:00,0.01,57.0,56.99
2020-01-03 14:30:00,0.01,21.0,20.99
2020-01-06 14:30:00,0.01,23.0,22.99
2020-01-07 14:30:00,1.0,35.0,34.0
2020-01-08 14:30:00,0.01,59.0,58.99
2020-01-09 14:30:00,0.01,72.0,71.99
2020-01-10 14:30:00,1.0,55.0,54.0
2020-01-13 14:30:00,1.0,62.0,61.0
2020-01-14 14:30:00,0.01,60.0,59.99
2020-01-15 14:30:00,0.01,71.0,70.99
2020-01-16 14:30:00,0.01,96.0,95.99
2020-01-17 14:30:00,0.01,123.0,122.99
2020-01-21 14:30:00,2.0,88.0,86.0
2020-01-22 14:30:00,2.0,106.0,104.0
2020-01-23 14:30:00,4.0,50.0,46.0
2020-01-24 14:30:00,5.0,81.0,76.0
2020-01-27 14:30:00,11.0,19.0,8.0
2020-01-28 14:30:00,4.0,26.0,22.0
2020-01-29 14:30:00,8.0,44.0,36.0
2020-01-30 14:30:00,13.0,48.0,35.0
2020-01-31 14:30:00,13.0,33.0,20.0
2020-02-03 14:30:00,11.0,23.0,12.0
2020-02-04 14:30:00,3.0,63.0,60.0
2020-02-05 14:30:00,0.01,67.0,66.99
2020-02-06 14:30:00,0.01,57.0,56.99
2020-02-07 14:30:00,1.0,34.0,33.0
2020-02-10 14:30:00,4.0,46.0,42.0
2020-02-11 14:30:00,3.0,76.0,73.0
2020-02-12 14:30:00,2.0,68.0,66.0
2020-02-13 14:30:00,2.0,66.0,64.0
2020-02-14 14:30:00,4.0,74.0,70.0
2020-02-18 14:30:00,7.0,71.0,64.0
2020-02-19 14:30:00,5.0,77.0,72.0
2020-02-20 14:30:00,5.0,47.0,42.0
2020-02-21 14:30:00,8.0,26.0,18.0
2020-02-24 14:30:00,23.0,6.0,-17.0
2020-02-25 14:30:00,42.0,4.0,-38.0
2020-02-26 14:30:00,51.0,3.0,-48.0
2020-02-27 14:30:00,100.0,4.0,-96.0
2020-02-28 14:30:00,126.0,0.01,-125.99
2020-03-02 14:30:00,19.0,0.01,-18.99
2020-03-03 14:30:00,32.0,2.0,-30.0
2020-03-04 14:30:00,30.0,6.0,-24.0
2020-03-05 14:30:00,76.0,7.0,-69.0
2020-03-06 14:30:00,138.0,4.0,-134.0
2020-03-09 13:30:00,223.0,1.0,-222.0
2020-03-10 13:30:00,97.0,3.0,-94.0
2020-03-11 13:30:00,128.0,0.01,-127.99
2020-03-12 13:30:00,331.0,0.01,-330.99
2020-03-13 13:30:00,121.0,1.0,-120.0
2020-03-16 13:30:00,335.0,0.01,-334.99
2020-03-17 13:30:00,207.0,7.0,-200.0
2020-03-18 13:30:00,289.0,5.0,-284.0
2020-03-19 13:30:00,89.0,3.0,-86.0
2020-03-20 13:30:00,94.0,0.01,-93.99
2020-03-23 13:30:00,226.0,0.01,-225.99
2020-03-24 13:30:00,4.0,0.01,-3.99
2020-03-25 13:30:00,4.0,0.01,-3.99
2020-03-26 13:30:00,0.01,0.01,0.0
2020-03-27 13:30:00,1.0,1.0,0.0
2020-03-30 13:30:00,2.0,1.0,-1.0
2020-03-31 13:30:00,0.01,1.0,0.99
2020-04-01 13:30:00,10.0,0.01,-9.99
2020-04-02 13:30:00,22.0,0.01,-21.99
2020-04-03 13:30:00,13.0,0.01,-12.99
2020-04-06 13:30:00,0.01,4.0,3.99
2020-04-07 13:30:00,0.01,4.0,3.99
2020-04-08 13:30:00,0.01,2.0,1.99
2020-04-09 13:30:00,0.01,5.0,4.99
2020-04-13 13:30:00,0.01,4.0,3.99
2020-04-14 13:30:00,0.01,8.0,7.99
2020-04-15 13:30:00,1.0,8.0,7.0
2020-04-16 13:30:00,0.01,14.0,13.99
2020-04-17 13:30:00,0.01,11.0,10.99
2020-04-20 13:30:00,0.01,10.0,9.99
2020-04-21 13:30:00,0.01,1.0,0.99
2020-04-22 13:30:00,2.0,4.0,2.0
2020-04-23 13:30:00,0.01,7.0,6.99
2020-04-24 13:30:00,1.0,1.0,0.0
2020-04-27 13:30:00,0.01,6.0,5.99
2020-04-28 13:30:00,1.0,10.0,9.0
2020-04-29 13:30:00,0.01,4.0,3.99
2020-04-30 13:30:00,1.0,3.0,2.0
2020-05-01 13:30:00,2.0,0.01,-1.99
2020-05-04 13:30:00,3.0,0.01,-2.99
2020-05-05 13:30:00,2.0,10.0,8.0
2020-05-06 13:30:00,3.0,7.0,4.0
2020-05-07 13:30:00,0.01,8.0,7.99
2020-05-08 13:30:00,0.01,7.0,6.99
2020-05-11 13:30:00,1.0,16.0,15.0
2020-05-12 13:30:00,2.0,10.0,8.0
2020-05-13 13:30:00,9.0,3.0,-6.0
2020-05-14 13:30:00,17.0,4.0,-13.0
2020-05-15 13:30:00,1.0,9.0,8.0
2020-05-18 13:30:00,0.01,22.0,21.99
2020-05-19 13:30:00,0.01,11.0,10.99
2020-05-20 13:30:00,0.01,12.0,11.99
2020-05-21 13:30:00,0.01,4.0,3.99
2020-05-22 13:30:00,0.01,5.0,4.99
2020-05-26 13:30:00,0.01,12.0,11.99
2020-05-27 13:30:00,0.01,8.0,7.99
2020-05-28 13:30:00,0.01,18.0,17.99
2020-05-29 13:30:00,0.01,15.0,14.99
2020-06-01 13:30:00,0.01,20.0,19.99
2020-06-02 13:30:00,0.01,16.0,15.99
2020-06-03 13:30:00,0.01,29.0,28.99
2020-06-04 13:30:00,0.01,15.0,14.99
2020-06-05 13:30:00,0.01,24.0,23.99
2020-06-08 13:30:00,0.01,17.0,16.99
2020-06-09 13:30:00,0.01,9.0,8.99
2020-06-10 13:30:00,0.01,17.0,16.99
2020-06-11 13:30:00,0.01,3.0,2.99
2020-06-12 13:30:00,0.01,1.0,0.99
2020-06-15 13:30:00,0.01,0.01,0.0
2020-06-16 13:30:00,0.01,7.0,6.99
2020-06-17 13:30:00,0.01,10.0,9.99
2020-06-18 13:30:00,0.01,6.0,5.99
2020-06-19 13:30:00,0.01,20.0,19.99
2020-06-22 13:30:00,0.01,12.0,11.99
2020-06-23 13:30:00,0.01,24.0,23.99
2020-06-24 13:30:00,0.01,1.0,0.99
2020-06-25 13:30:00,0.01,5.0,4.99
2020-06-26 13:30:00,0.01,5.0,4.99
2020-06-29 13:30:00,0.01,1.0,0.99
2020-06-30 13:30:00,1.0,12.0,11.0
2020-07-01 13:30:00,0.01,18.0,17.99
2020-07-02 13:30:00,0.01,37.0,36.99
2020-07-06 13:30:00,0.01,40.0,39.99
2020-07-07 13:30:00,0.01,32.0,31.99
2020-07-08 13:30:00,0.01,19.0,18.99
2020-07-09 13:30:00,1.0,33.0,32.0
2020-07-10 13:30:00,0.01,18.0,17.99
2020-07-13 13:30:00,0.01,38.0,37.99
2020-07-14 13:30:00,0.01,7.0,6.99
2020-07-15 13:30:00,0.01,31.0,30.99
2020-07-16 13:30:00,0.01,21.0,20.99
2020-07-17 13:30:00,0.01,38.0,37.99
2020-07-20 13:30:00,0.01,41.0,40.99
2020-07-21 13:30:00,0.01,46.0,45.99
2020-07-22 13:30:00,1.0,41.0,40.0
2020-07-23 13:30:00,0.01,51.0,50.99
2020-07-24 13:30:00,0.01,8.0,7.99
2020-07-27 13:30:00,0.01,18.0,17.99
2020-07-28 13:30:00,0.01,23.0,22.99
2020-07-29 13:30:00,0.01,37.0,36.99
2020-07-30 13:30:00,0.01,29.0,28.99
2020-07-31 13:30:00,0.01,31.0,30.99
2020-08-03 13:30:00,0.01,40.0,39.99
2020-08-04 13:30:00,0.01,29.0,28.99
2020-08-05 13:30:00,0.01,43.0,42.99
2020-08-06 13:30:00,0.01,26.0,25.99
2020-08-07 13:30:00,0.01,33.0,32.99
2020-08-10 13:30:00,0.01,36.0,35.99
2020-08-11 13:30:00,0.01,40.0,39.99
2020-08-12 13:30:00,0.01,28.0,27.99
2020-08-13 13:30:00,0.01,15.0,14.99
2020-08-14 13:30:00,0.01,16.0,15.99
2020-08-17 13:30:00,0.01,29.0,28.99
2020-08-18 13:30:00,0.01,31.0,30.99
2020-08-19 13:30:00,0.01,25.0,24.99
2020-08-20 13:30:00,0.01,15.0,14.99
2020-08-21 13:30:00,0.01,26.0,25.99
2020-08-24 13:30:00,0.01,47.0,46.99
2020-08-25 13:30:00,0.01,30.0,29.99
2020-08-26 13:30:00,0.01,36.0,35.99
2020-08-27 13:30:00,0.01,41.0,40.99
2020-08-28 13:30:00,0.01,27.0,26.99
2020-08-31 13:30:00,0.01,33.0,32.99
2020-09-01 13:30:00,1.0,39.0,38.0
2020-09-02 13:30:00,0.01,85.0,84.99
2020-09-03 13:30:00,0.01,17.0,16.99
2020-09-04 13:30:00,1.0,0.01,-0.99
2020-09-08 13:30:00,2.0,0.01,-1.99
2020-09-09 13:30:00,2.0,3.0,1.0
2020-09-10 13:30:00,2.0,6.0,4.0
2020-09-11 13:30:00,1.0,5.0,4.0
2020-09-14 13:30:00,0.01,11.0,10.99
2020-09-15 13:30:00,0.01,21.0,20.99
2020-09-16 13:30:00,0.01,23.0,22.99
2020-09-17 13:30:00,0.01,9.0,8.99
2020-09-18 13:30:00,0.01,11.0,10.99
2020-09-21 13:30:00,1.0,1.0,0.0
2020-09-22 13:30:00,0.01,2.0,1.99
2020-09-23 13:30:00,1.0,4.0,3.0
2020-09-24 13:30:00,2.0,0.01,-1.99
2020-09-25 13:30:00,0.01,0.01,0.0
2020-09-28 13:30:00,0.01,5.0,4.99
2020-09-29 13:30:00,0.01,7.0,6.99
2020-09-30 13:30:00,0.01,13.0,12.99
2020-10-01 13:30:00,0.01,12.0,11.99
2020-10-02 13:30:00,1.0,7.0,6.0
2020-10-05 13:30:00,0.01,23.0,22.99
2020-10-06 13:30:00,0.01,29.0,28.99
2020-10-07 13:30:00,0.01,37.0,36.99
2020-10-08 13:30:00,0.01,57.0,56.99
2020-10-09 13:30:00,0.01,70.0,69.99
2020-10-12 13:30:00,0.01,67.0,66.99
2020-10-13 13:30:00,0.01,34.0,33.99
2020-10-14 13:30:00,0.01,20.0,19.99
2020-10-15 13:30:00,0.01,17.0,16.99
2020-10-16 13:30:00,0.01,47.0,46.99
2020-10-19 13:30:00,1.0,22.0,21.0
2020-10-20 13:30:00,1.0,23.0,22.0
2020-10-21 13:30:00,0.01,19.0,18.99
2020-10-22 13:30:00,1.0,22.0,21.0
2020-10-23 13:30:00,0.01,27.0,26.99
2020-10-26 13:30:00,2.0,5.0,3.0
2020-10-27 13:30:00,2.0,12.0,10.0
2020-10-28 13:30:00,9.0,1.0,-8.0
2020-10-29 13:30:00,10.0,4.0,-6.0
2020-10-30 13:30:00,2.0,2.0,0.0
2020-11-02 14:30:00,0.01,5.0,4.99
2020-11-03 14:30:00,0.01,24.0,23.99
2020-11-04 14:30:00,0.01,48.0,47.99
2020-11-05 14:30:00,0.01,72.0,71.99
2020-11-06 14:30:00,0.01,51.0,50.99
2020-11-09 14:30:00,1.0,137.0,136.0
2020-11-10 14:30:00,0.01,10.0,9.99
2020-11-11 14:30:00,0.01,7.0,6.99
2020-11-12 14:30:00,0.01,3.0,2.99
2020-11-13 14:30:00,0.01,7.0,6.99
2020-11-16 14:30:00,0.01,35.0,34.99
2020-11-17 14:30:00,0.01,22.0,21.99
2020-11-18 14:30:00,0.01,28.0,27.99
2020-11-19 14:30:00,0.01,6.0,5.99
2020-11-20 14:30:00,0.01,16.0,15.99
2020-11-23 14:30:00,0.01,28.0,27.99
2020-11-24 14:30:00,0.01,52.0,51.99
2020-11-25 14:30:00,0.01,15.0,14.99
2020-11-27 14:30:00,0.01,21.0,20.99
2020-11-30 14:30:00,0.01,11.0,10.99
2020-12-01 14:30:00,0.01,44.0,43.99
2020-12-02 14:30:00,0.01,16.0,15.99
2020-12-03 14:30:00,0.01,31.0,30.99
2020-12-04 14:30:00,0.01,48.0,47.99
2020-12-07 14:30:00,0.01,26.0,25.99
2020-12-08 14:30:00,0.01,46.0,45.99
2020-12-09 14:30:00,0.01,40.0,39.99
2020-12-10 14:30:00,0.01,11.0,10.99
2020-12-11 14:30:00,0.01,11.0,10.99
2020-12-14 14:30:00,0.01,25.0,24.99
2020-12-15 14:30:00,1.0,17.0,16.0
2020-12-16 14:30:00,1.0,31.0,30.0
2020-12-17 14:30:00,1.0,36.0,35.0
2020-12-18 14:30:00,0.01,40.0,39.99
2020-12-21 14:30:00,0.01,16.0,15.99
2020-12-22 14:30:00,1.0,24.0,23.0
2020-12-23 14:30:00,0.01,32.0,31.99
2020-12-24 14:30:00,0.01,7.0,6.99
2020-12-28 14:30:00,1.0,36.0,35.0
2020-12-29 14:30:00,0.01,23.0,22.99
2020-12-30 14:30:00,1.0,21.0,20.0
2020-12-31 14:30:00,1.0,28.0,27.0
2021-01-04 14:30:00,0.01,54.0,53.99
2021-01-05 14:30:00,0.01,19.0,18.99
2021-01-06 14:30:00,0.01,86.0,85.99
2021-01-07 14:30:00,0.01,98.0,97.99
2021-01-08 14:30:00,0.01,79.0,78.99
2021-01-11 14:30:00,0.01,46.0,45.99
2021-01-12 14:30:00,0.01,80.0,79.99
2021-01-13 14:30:00,0.01,39.0,38.99
2021-01-14 14:30:00,0.01,49.0,48.99
2021-01-15 14:30:00,0.01,8.0,7.99
2021-01-19 14:30:00,0.01,35.0,34.99
2021-01-20 14:30:00,0.01,51.0,50.99
2021-01-21 14:30:00,0.01,25.0,24.99
2021-01-22 14:30:00,0.01,14.0,13.99
2021-01-25 14:30:00,0.01,27.0,26.99
2021-01-26 14:30:00,0.01,20.0,19.99
2021-01-27 14:30:00,0.01,26.0,25.99
2021-01-28 14:30:00,0.01,10.0,9.99
2021-01-29 14:30:00,0.01,7.0,6.99
2021-02-01 14:30:00,0.01,6.0,5.99
2021-02-02 14:30:00,0.01,15.0,14.99
2021-02-03 14:30:00,0.01,8.0,7.99
2021-02-04 14:30:00,0.01,23.0,22.99
2021-02-05 14:30:00,0.01,34.0,33.99
2021-02-08 14:30:00,0.01,42.0,41.99
2021-02-09 14:30:00,0.01,38.0,37.99
2021-02-10 14:30:00,0.01,47.0,46.99
2021-02-11 14:30:00,0.01,44.0,43.99
2021-02-12 14:30:00,0.01,52.0,51.99
2021-02-16 14:30:00,0.01,77.0,76.99
2021-02-17 14:30:00,0.01,26.0,25.99
2021-02-18 14:30:00,0.01,15.0,14.99
2021-02-19 14:30:00,0.01,51.0,50.99
2021-02-22 14:30:00,0.01,68.0,67.99
2021-02-23 14:30:00,0.01,51.0,50.99
2021-02-24 14:30:00,0.01,102.0,101.99
2021-02-25 14:30:00,0.01,74.0,73.99
2021-02-26 14:30:00,1.0,5.0,4.0
2021-03-01 14:30:00,0.01,46.0,45.99
2021-03-02 14:30:00,0.01,30.0,29.99
2021-03-03 14:30:00,0.01,61.0,60.99
2021-03-04 14:30:00,0.01,29.0,28.99
2021-03-05 14:30:00,0.01,56.0,55.99
2021-03-08 14:30:00,0.01,120.0,119.99
2021-03-09 14:30:00,0.01,36.0,35.99
2021-03-10 14:30:00,0.01,48.0,47.99
2021-03-11 14:30:00,0.01,73.0,72.99
2021-03-12 14:30:00,0.01,82.0,81.99
2021-03-15 13:30:00,0.01,88.0,87.99
2021-03-16 13:30:00,0.01,31.0,30.99
2021-03-17 13:30:00,0.01,42.0,41.99
2021-03-18 13:30:00,0.01,84.0,83.99
2021-03-19 13:30:00,0.01,13.0,12.99
2021-03-22 13:30:00,0.01,14.0,13.99
2021-03-23 13:30:00,0.01,11.0,10.99
2021-03-24 13:30:00,0.01,15.0,14.99
2021-03-25 13:30:00,0.01,15.0,14.99
2021-03-26 13:30:00,0.01,64.0,63.99
2021-03-29 13:30:00,0.01,73.0,72.99
2021-03-30 13:30:00,0.01,30.0,29.99
2021-03-31 13:30:00,0.01,29.0,28.99
2021-04-01 13:30:00,0.01,30.0,29.99
2021-04-05 13:30:00,0.01,79.0,78.99
2021-04-06 13:30:00,0.01,65.0,64.99
2021-04-07 13:30:00,0.01,31.0,30.99
2021-04-08 13:30:00,0.01,35.0,34.99
2021-04-09 13:30:00,0.01,50.0,49.99
2021-04-12 13:30:00,0.01,72.0,71.99
2021-04-13 13:30:00,0.01,40.0,39.99
2021-04-14 13:30:00,0.01,65.0,64.99
2021-04-15 13:30:00,0.01,84.0,83.99
2021-04-16 13:30:00,0.01,127.0,126.99
2021-04-19 13:30:00,0.01,66.0,65.99
2021-04-20 13:30:00,0.01,63.0,62.99
2021-04-21 13:30:00,0.01,87.0,86.99
2021-04-22 13:30:00,0.01,84.0,83.99
2021-04-23 13:30:00,0.01,79.0,78.99
2021-04-26 13:30:00,0.01,100.0,99.99
2021-04-27 13:30:00,0.01,50.0,49.99
2021-04-28 13:30:00,0.01,76.0,75.99
2021-04-29 13:30:00,0.01,121.0,120.99
2021-04-30 13:30:00,0.01,52.0,51.99
2021-05-03 13:30:00,0.01,105.0,104.99
2021-05-04 13:30:00,0.01,97.0,96.99
2021-05-05 13:30:00,0.01,105.0,104.99
2021-05-06 13:30:00,1.0,117.0,116.0
2021-05-07 13:30:00,1.0,157.0,156.0
2021-05-10 13:30:00,0.01,218.0,217.99
2021-05-11 13:30:00,1.0,8.0,7.0
2021-05-12 13:30:00,0.01,12.0,11.99
2021-05-13 13:30:00,0.01,13.0,12.99
2021-05-14 13:30:00,0.01,31.0,30.99
2021-05-17 13:30:00,0.01,37.0,36.99
2021-05-18 13:30:00,0.01,43.0,42.99
2021-05-19 13:30:00,0.01,3.0,2.99
2021-05-20 13:30:00,0.01,16.0,15.99
2021-05-21 13:30:00,0.01,24.0,23.99
2021-05-24 13:30:00,0.01,29.0,28.99
2021-05-25 13:30:00,1.0,29.0,28.0
2021-05-26 13:30:00,0.01,18.0,17.99
2021-05-27 13:30:00,1.0,30.0,29.0
2021-05-28 13:30:00,1.0,30.0,29.0
2021-06-01 13:30:00,0.01,75.0,74.99
2021-06-02 13:30:00,0.01,62.0,61.99
2021-06-03 13:30:00,0.01,37.0,36.99
2021-06-04 13:30:00,0.01,59.0,58.99
2021-06-07 13:30:00,0.01,60.0,59.99
2021-06-08 13:30:00,0.01,54.0,53.99
2021-06-09 13:30:00,1.0,40.0,39.0
2021-06-10 13:30:00,0.01,60.0,59.99
2021-06-11 13:30:00,1.0,32.0,31.0
2021-06-14 13:30:00,2.0,35.0,33.0
2021-06-15 13:30:00,1.0,36.0,35.0
2021-06-16 13:30:00,1.0,26.0,25.0
2021-06-17 13:30:00,2.0,22.0,20.0
2021-06-18 13:30:00,1.0,14.0,13.0
2021-06-21 13:30:00,0.01,19.0,18.99
2021-06-22 13:30:00,1.0,29.0,28.0
2021-06-23 13:30:00,0.01,32.0,31.99
2021-06-24 13:30:00,1.0,38.0,37.0
2021-06-25 13:30:00,0.01,31.0,30.99
2021-06-28 13:30:00,0.01,37.0,36.99
2021-06-29 13:30:00,0.01,38.0,37.99
2021-06-30 13:30:00,0.01,22.0,21.99
2021-07-01 13:30:00,0.01,36.0,35.99
2021-07-02 13:30:00,0.01,54.0,53.99
2021-07-06 13:30:00,0.01,50.0,49.99
2021-07-07 13:30:00,0.01,69.0,68.99
2021-07-08 13:30:00,0.01,22.0,21.99
2021-07-09 13:30:00,0.01,44.0,43.99
2021-07-12 13:30:00,0.01,63.0,62.99
2021-07-13 13:30:00,1.0,40.0,39.0
2021-07-14 13:30:00,1.0,39.0,38.0
2021-07-15 13:30:00,0.01,28.0,27.99
2021-07-16 13:30:00,0.01,49.0,48.99
2021-07-19 13:30:00,0.01,10.0,9.99
2021-07-20 13:30:00,0.01,41.0,40.99
2021-07-21 13:30:00,0.01,37.0,36.99
2021-07-22 13:30:00,0.01,39.0,38.99
2021-07-23 13:30:00,0.01,79.0,78.99
2021-07-26 13:30:00,0.01,47.0,46.99
2021-07-27 13:30:00,0.01,44.0,43.99
2021-07-28 13:30:00,2.0,40.0,38.0
2021-07-29 13:30:00,1.0,74.0,73.0
2021-07-30 13:30:00,2.0,66.0,64.0
2021-08-02 13:30:00,2.0,77.0,75.0
2021-08-03 13:30:00,3.0,66.0,63.0
2021-08-04 13:30:00,3.0,65.0,62.0
2021-08-05 13:30:00,5.0,53.0,48.0
2021-08-06 13:30:00,2.0,41.0,39.0
2021-08-09 13:30:00,2.0,27.0,25.0
2021-08-10 13:30:00,2.0,46.0,44.0
2021-08-11 13:30:00,1.0,49.0,48.0
2021-08-12 13:30:00,2.0,44.0,42.0
2021-08-13 13:30:00,0.01,58.0,57.99
2021-08-16 13:30:00,1.0,69.0,68.0
2021-08-17 13:30:00,3.0,40.0,37.0
2021-08-18 13:30:00,2.0,31.0,29.0
2021-08-19 13:30:00,3.0,29.0,26.0
2021-08-20 13:30:00,1.0,51.0,50.0
2021-08-23 13:30:00,1.0,57.0,56.0
2021-08-24 13:30:00,1.0,29.0,28.0
2021-08-25 13:30:00,1.0,55.0,54.0
2021-08-26 13:30:00,2.0,33.0,31.0
2021-08-27 13:30:00,1.0,57.0,56.0
2021-08-30 13:30:00,0.01,73.0,72.99
2021-08-31 13:30:00,0.01,44.0,43.99
2021-09-01 13:30:00,0.01,55.0,54.99
2021-09-02 13:30:00,1.0,79.0,78.0
2021-09-03 13:30:00,1.0,54.0,53.0
2021-09-07 13:30:00,1.0,20.0,19.0
2021-09-08 13:30:00,1.0,34.0,33.0
2021-09-09 13:30:00,2.0,32.0,30.0
2021-09-10 13:30:00,3.0,14.0,11.0
2021-09-13 13:30:00,1.0,11.0,10.0
2021-09-14 13:30:00,1.0,3.0,2.0
2021-09-15 13:30:00,3.0,7.0,4.0
2021-09-16 13:30:00,2.0,9.0,7.0
2021-09-17 13:30:00,3.0,7.0,4.0
2021-09-20 13:30:00,2.0,0.01,-1.99
2021-09-21 13:30:00,5.0,1.0,-4.0
2021-09-22 13:30:00,8.0,8.0,0.0
2021-09-23 13:30:00,4.0,26.0,22.0
2021-09-24 13:30:00,6.0,19.0,13.0
2021-09-27 13:30:00,4.0,26.0,22.0
2021-09-28 13:30:00,5.0,17.0,12.0
2021-09-29 13:30:00,1.0,7.0,6.0
2021-09-30 13:30:00,4.0,4.0,0.0
2021-10-01 13:30:00,8.0,9.0,1.0
2021-10-04 13:30:00,6.0,18.0,12.0
2021-10-05 13:30:00,6.0,15.0,9.0
2021-10-06 13:30:00,10.0,3.0,-7.0
2021-10-07 13:30:00,4.0,29.0,25.0
2021-10-08 13:30:00,3.0,23.0,20.0
2021-10-11 13:30:00,8.0,38.0,30.0
2021-10-12 13:30:00,9.0,11.0,2.0
2021-10-13 13:30:00,10.0,8.0,-2.0
2021-10-14 13:30:00,0.01,31.0,30.99
2021-10-15 13:30:00,0.01,56.0,55.99
2021-10-18 13:30:00,0.01,39.0,38.99
2021-10-19 13:30:00,1.0,42.0,41.0
2021-10-20 13:30:00,0.01,62.0,61.99
2021-10-21 13:30:00,0.01,61.0,60.99
2021-10-22 13:30:00,0.01,88.0,87.99
2021-10-25 13:30:00,2.0,79.0,77.0
2021-10-26 13:30:00,0.01,68.0,67.99
2021-10-27 13:30:00,5.0,38.0,33.0
2021-10-28 13:30:00,4.0,34.0,30.0
2021-10-29 13:30:00,4.0,50.0,46.0
2021-11-01 13:30:00,2.0,49.0,47.0
2021-11-02 13:30:00,5.0,55.0,50.0
2021-11-03 13:30:00,3.0,54.0,51.0
2021-11-04 13:30:00,4.0,76.0,72.0
2021-11-05 13:30:00,2.0,83.0,81.0
2021-11-08 14:30:00,1.0,49.0,48.0
2021-11-09 14:30:00,2.0,31.0,29.0
2021-11-10 14:30:00,4.0,25.0,21.0
2021-11-11 14:30:00,6.0,14.0,8.0
2021-11-12 14:30:00,1.0,33.0,32.0
2021-11-15 14:30:00,2.0,50.0,48.0
2021-11-16 14:30:00,3.0,76.0,73.0
2021-11-17 14:30:00,6.0,44.0,38.0
2021-11-18 14:30:00,9.0,40.0,31.0
2021-11-19 14:30:00,10.0,45.0,35.0
2021-11-22 14:30:00,12.0,50.0,38.0
2021-11-23 14:30:00,6.0,16.0,10.0
2021-11-24 14:30:00,4.0,25.0,21.0
2021-11-26 14:30:00,25.0,7.0,-18.0
2021-11-29 14:30:00,20.0,15.0,-5.0
2021-11-30 14:30:00,44.0,7.0,-37.0
2021-12-01 14:30:00,44.0,13.0,-31.0
2021-12-02 14:30:00,13.0,4.0,-9.0
2021-12-03 14:30:00,7.0,11.0,4.0
2021-12-06 14:30:00,1.0,21.0,20.0
2021-12-07 14:30:00,0.01,36.0,35.99
2021-12-08 14:30:00,0.01,30.0,29.99
2021-12-09 14:30:00,1.0,23.0,22.0
2021-12-10 14:30:00,1.0,37.0,36.0
2021-12-13 14:30:00,4.0,53.0,49.0
2021-12-14 14:30:00,2.0,16.0,14.0
2021-12-15 14:30:00,11.0,39.0,28.0
2021-12-16 14:30:00,3.0,67.0,64.0
2021-12-17 14:30:00,7.0,22.0,15.0
2021-12-20 14:30:00,11.0,2.0,-9.0
2021-12-21 14:30:00,0.01,10.0,9.99
2021-12-22 14:30:00,0.01,14.0,13.99
2021-12-23 14:30:00,0.01,35.0,34.99
2021-12-27 14:30:00,0.01,57.0,56.99
2021-12-28 14:30:00,0.01,79.0,78.99
2021-12-29 14:30:00,0.01,78.0,77.99
2021-12-30 14:30:00,0.01,66.0,65.99
2021-12-31 14:30:00,0.01,43.0,42.99
2022-01-03 14:30:00,0.01,21.0,20.99
2022-01-04 14:30:00,1.0,65.0,64.0
2022-01-05 14:30:00,1.0,57.0,56.0
2022-01-06 14:30:00,2.0,30.0,28.0
2022-01-07 14:30:00,1.0,48.0,47.0
2022-01-10 14:30:00,6.0,39.0,33.0
2022-01-11 14:30:00,1.0,25.0,24.0
2022-01-12 14:30:00,1.0,36.0,35.0
2022-01-13 14:30:00,0.01,42.0,41.99
2022-01-14 14:30:00,3.0,39.0,36.0
2022-01-18 14:30:00,9.0,35.0,26.0
2022-01-19 14:30:00,6.0,13.0,7.0
2022-01-20 14:30:00,8.0,12.0,4.0
2022-01-21 14:30:00,24.0,5.0,-19.0
2022-01-24 14:30:00,29.0,1.0,-28.0
2022-01-25 14:30:00,3.0,7.0,4.0
2022-01-26 14:30:00,9.0,13.0,4.0
2022-01-27 14:30:00,13.0,18.0,5.0
2022-01-28 14:30:00,24.0,5.0,-19.0
2022-01-31 14:30:00,0.01,8.0,7.99
2022-02-01 14:30:00,1.0,18.0,17.0
2022-02-02 14:30:00,2.0,25.0,23.0
2022-02-03 14:30:00,6.0,27.0,21.0
2022-02-04 14:30:00,10.0,27.0,17.0
2022-02-07 14:30:00,6.0,19.0,13.0
2022-02-08 14:30:00,4.0,27.0,23.0
2022-02-09 14:30:00,0.01,38.0,37.99
2022-02-10 14:30:00,4.0,32.0,28.0
2022-02-11 14:30:00,13.0,16.0,3.0
2022-02-14 14:30:00,17.0,2.0,-15.0
2022-02-15 14:30:00,3.0,6.0,3.0
2022-02-16 14:30:00,7.0,16.0,9.0
2022-02-17 14:30:00,20.0,6.0,-14.0
2022-02-18 14:30:00,26.0,8.0,-18.0
2022-02-22 14:30:00,32.0,6.0,-26.0
2022-02-23 14:30:00,38.0,2.0,-36.0
2022-02-24 14:30:00,65.0,3.0,-62.0
2022-02-25 14:30:00,0.01,16.0,15.99
2022-02-28 14:30:00,5.0,21.0,16.0
2022-03-01 14:30:00,15.0,27.0,12.0
2022-03-02 14:30:00,1.0,28.0,27.0
2022-03-03 14:30:00,4.0,24.0,20.0
2022-03-04 14:30:00,27.0,38.0,11.0
2022-03-07 14:30:00,67.0,50.0,-17.0
2022-03-08 14:30:00,74.0,19.0,-55.0
2022-03-09 14:30:00,2.0,2.0,0.0
2022-03-10 14:30:00,12.0,5.0,-7.0
2022-03-11 14:30:00,16.0,13.0,-3.0
2022-03-14 13:30:00,32.0,10.0,-22.0
2022-03-15 13:30:00,7.0,13.0,6.0
2022-03-16 13:30:00,1.0,14.0,13.0
2022-03-17 13:30:00,0.01,18.0,17.99
2022-03-18 13:30:00,1.0,17.0,16.0
2022-03-21 13:30:00,0.01,38.0,37.99
2022-03-22 13:30:00,0.01,23.0,22.99
2022-03-23 13:30:00,4.0,22.0,18.0
2022-03-24 13:30:00,4.0,29.0,25.0
2022-03-25 13:30:00,5.0,57.0,52.0
2022-03-28 13:30:00,4.0,34.0,30.0
2022-03-29 13:30:00,0.01,51.0,50.99
2022-03-30 13:30:00,0.01,43.0,42.99
2022-03-31 13:30:00,8.0,52.0,44.0
2022-04-01 13:30:00,6.0,27.0,21.0
2022-04-04 13:30:00,3.0,11.0,8.0
2022-04-05 13:30:00,8.0,41.0,33.0
2022-04-06 13:30:00,22.0,41.0,19.0
2022-04-07 13:30:00,24.0,31.0,7.0
2022-04-08 13:30:00,2.0,56.0,54.0
2022-04-11 13:30:00,11.0,35.0,24.0
2022-04-12 13:30:00,15.0,24.0,9.0
2022-04-13 13:30:00,11.0,20.0,9.0
2022-04-14 13:30:00,14.0,32.0,18.0
2022-04-18 13:30:00,23.0,26.0,3.0
2022-04-19 13:30:00,2.0,26.0,24.0
2022-04-20 13:30:00,3.0,68.0,65.0
2022-04-21 13:30:00,15.0,76.0,61.0
2022-04-22 13:30:00,26.0,2.0,-24.0
2022-04-25 13:30:00,46.0,2.0,-44.0
2022-04-26 13:30:00,40.0,0.01,-39.99
2022-04-27 13:30:00,51.0,3.0,-48.0
2022-04-28 13:30:00,38.0,5.0,-33.0
2022-04-29 13:30:00,43.0,1.0,-42.0
2022-05-02 13:30:00,70.0,1.0,-69.0
2022-05-03 13:30:00,7.0,4.0,-3.0
2022-05-04 13:30:00,20.0,10.0,-10.0
2022-05-05 13:30:00,27.0,10.0,-17.0
2022-05-06 13:30:00,64.0,8.0,-56.0
2022-05-09 13:30:00,101.0,3.0,-98.0
2022-05-10 13:30:00,106.0,2.0,-104.0
2022-05-11 13:30:00,88.0,1.0,-87.0
2022-05-12 13:30:00,140.0,0.01,-139.99
2022-05-13 13:30:00,0.01,0.01,0.0
2022-05-16 13:30:00,7.0,7.0,0.0
2022-05-17 13:30:00,4.0,14.0,10.0
2022-05-18 13:30:00,35.0,5.0,-30.0
2022-05-19 13:30:00,66.0,1.0,-65.0
2022-05-20 13:30:00,69.0,2.0,-67.0
2022-05-23 13:30:00,7.0,4.0,-3.0
2022-05-24 13:30:00,44.0,7.0,-37.0
2022-05-25 13:30:00,9.0,14.0,5.0
2022-05-26 13:30:00,1.0,19.0,18.0
2022-05-27 13:30:00,1.0,18.0,17.0
2022-05-31 13:30:00,0.01,20.0,19.99
2022-06-01 13:30:00,3.0,3.0,0.0
2022-06-02 13:30:00,2.0,3.0,1.0
2022-06-03 13:30:00,1.0,7.0,6.0
2022-06-06 13:30:00,2.0,10.0,8.0
2022-06-07 13:30:00,4.0,13.0,9.0
2022-06-08 13:30:00,2.0,14.0,12.0
2022-06-09 13:30:00,14.0,1.0,-13.0
2022-06-10 13:30:00,51.0,0.01,-50.99
2022-06-13 13:30:00,148.0,0.01,-147.99
2022-06-14 13:30:00,142.0,1.0,-141.0
2022-06-15 13:30:00,48.0,0.01,-47.99
2022-06-16 13:30:00,210.0,0.01,-209.99
2022-06-17 13:30:00,105.0,0.01,-104.99
2022-06-21 13:30:00,15.0,0.01,-14.99
2022-06-22 13:30:00,45.0,0.01,-44.99
2022-06-23 13:30:00,41.0,0.01,-40.99
2022-06-24 13:30:00,0.01,3.0,2.99
2022-06-27 13:30:00,0.01,3.0,2.99
2022-06-28 13:30:00,1.0,2.0,1.0
2022-06-29 13:30:00,16.0,1.0,-15.0
2022-06-30 13:30:00,40.0,1.0,-39.0
2022-07-01 13:30:00,31.0,2.0,-29.0
2022-07-05 13:30:00,52.0,1.0,-51.0
2022-07-06 13:30:00,5.0,4.0,-1.0
2022-07-07 13:30:00,1.0,3.0,2.0
2022-07-08 13:30:00,0.01,4.0,3.99
2022-07-11 13:30:00,4.0,3.0,-1.0
2022-07-12 13:30:00,7.0,3.0,-4.0
2022-07-13 13:30:00,32.0,2.0,-30.0
2022-07-14 13:30:00,62.0,0.01,-61.99
2022-07-15 13:30:00,4.0,2.0,-2.0
2022-07-18 13:30:00,4.0,3.0,-1.0
2022-07-19 13:30:00,1.0,2.0,1.0
2022-07-20 13:30:00,1.0,0.01,-0.99
2022-07-21 13:30:00,1.0,0.01,-0.99
2022-07-22 13:30:00,4.0,1.0,-3.0
2022-07-25 13:30:00,2.0,3.0,1.0
2022-07-26 13:30:00,1.0,1.0,0.0
2022-07-27 13:30:00,2.0,2.0,0.0
2022-07-28 13:30:00,5.0,6.0,1.0
2022-07-29 13:30:00,6.0,9.0,3.0
2022-08-01 13:30:00,2.0,9.0,7.0
2022-08-02 13:30:00,1.0,5.0,4.0
2022-08-03 13:30:00,4.0,4.0,0.0
2022-08-04 13:30:00,3.0,7.0,4.0
2022-08-05 13:30:00,4.0,3.0,-1.0
2022-08-08 13:30:00,0.01,10.0,9.99
2022-08-09 13:30:00,3.0,8.0,5.0
2022-08-10 13:30:00,0.01,16.0,15.99
2022-08-11 13:30:00,0.01,13.0,12.99
2022-08-12 13:30:00,0.01,13.0,12.99
2022-08-15 13:30:00,0.01,23.0,22.99
2022-08-16 13:30:00,0.01,19.0,18.99
2022-08-17 13:30:00,0.01,12.0,11.99
2022-08-18 13:30:00,0.01,14.0,13.99
2022-08-19 13:30:00,1.0,7.0,6.0
2022-08-22 13:30:00,8.0,2.0,-6.0
2022-08-23 13:30:00,5.0,2.0,-3.0
2022-08-24 13:30:00,7.0,5.0,-2.0
2022-08-25 13:30:00,0.01,8.0,7.99
2022-08-26 13:30:00,16.0,5.0,-11.0
2022-08-29 13:30:00,22.0,2.0,-20.0
2022-08-30 13:30:00,20.0,0.01,-19.99
2022-08-31 13:30:00,14.0,1.0,-13.0
2022-09-01 13:30:00,36.0,1.0,-35.0
2022-09-02 13:30:00,14.0,2.0,-12.0
2022-09-06 13:30:00,29.0,0.01,-28.99
2022-09-07 13:30:00,15.0,5.0,-10.0
2022-09-08 13:30:00,7.0,6.0,-1.0
2022-09-09 13:30:00,0.01,6.0,5.99
2022-09-12 13:30:00,0.01,10.0,9.99
2022-09-13 13:30:00,16.0,1.0,-15.0
2022-09-14 13:30:00,30.0,2.0,-28.0
2022-09-15 13:30:00,21.0,0.01,-20.99
2022-09-16 13:30:00,56.0,0.01,-55.99
2022-09-19 13:30:00,27.0,0.01,-26.99
2022-09-20 13:30:00,67.0,1.0,-66.0
2022-09-21 13:30:00,71.0,2.0,-69.0
2022-09-22 13:30:00,120.0,1.0,-119.0
2022-09-23 13:30:00,149.0,1.0,-148.0
2022-09-26 13:30:00,117.0,0.01,-116.99
2022-09-27 13:30:00,141.0,0.01,-140.99
2022-09-28 13:30:00,34.0,1.0,-33.0
2022-09-29 13:30:00,105.0,0.01,-104.99
2022-09-30 13:30:00,87.0,0.01,-86.99
2022-10-03 13:30:00,24.0,1.0,-23.0
2022-10-04 13:30:00,1.0,1.0,0.0
2022-10-05 13:30:00,11.0,1.0,-10.0
2022-10-06 13:30:00,31.0,2.0,-29.0
2022-10-07 13:30:00,71.0,2.0,-69.0
2022-10-10 13:30:00,72.0,1.0,-71.0
2022-10-11 13:30:00,100.0,1.0,-99.0
2022-10-12 13:30:00,78.0,0.01,-77.99
2022-10-13 13:30:00,170.0,3.0,-167.0
2022-10-14 13:30:00,7.0,5.0,-2.0
2022-10-17 13:30:00,2.0,0.01,-1.99
2022-10-18 13:30:00,2.0,3.0,1.0
2022-10-19 13:30:00,9.0,2.0,-7.0
2022-10-20 13:30:00,28.0,3.0,-25.0
2022-10-21 13:30:00,32.0,9.0,-23.0
2022-10-24 13:30:00,6.0,20.0,14.0
2022-10-25 13:30:00,1.0,14.0,13.0
2022-10-26 13:30:00,3.0,25.0,22.0
2022-10-27 13:30:00,12.0,20.0,8.0
2022-10-28 13:30:00,8.0,31.0,23.0
2022-10-31 13:30:00,8.0,22.0,14.0
2022-11-01 13:30:00,8.0,20.0,12.0
2022-11-02 13:30:00,18.0,21.0,3.0
2022-11-03 13:30:00,43.0,5.0,-38.0
2022-11-04 13:30:00,24.0,18.0,-6.0
2022-11-07 14:30:00,13.0,17.0,4.0
2022-11-08 14:30:00,8.0,21.0,13.0
2022-11-09 14:30:00,15.0,9.0,-6.0
2022-11-10 14:30:00,0.01,18.0,17.99
2022-11-11 14:30:00,0.01,22.0,21.99
2022-11-14 14:30:00,2.0,14.0,12.0
2022-11-15 14:30:00,0.01,5.0,4.99
2022-11-16 14:30:00,2.0,3.0,1.0
2022-11-17 14:30:00,2.0,1.0,-1.0
2022-11-18 14:30:00,3.0,8.0,5.0
2022-11-21 14:30:00,2.0,9.0,7.0
2022-11-22 14:30:00,3.0,23.0,20.0
2022-11-23 14:30:00,0.01,20.0,19.99
2022-11-25 14:30:00,0.01,22.0,21.99
2022-11-28 14:30:00,2.0,10.0,8.0
2022-11-29 14:30:00,2.0,3.0,1.0
2022-11-30 14:30:00,1.0,24.0,23.0
2022-12-01 14:30:00,0.01,31.0,30.99
2022-12-02 14:30:00,0.01,20.0,19.99
2022-12-05 14:30:00,4.0,7.0,3.0
2022-12-06 14:30:00,9.0,2.0,-7.0
2022-12-07 14:30:00,7.0,7.0,0.0
2022-12-08 14:30:00,4.0,14.0,10.0
2022-12-09 14:30:00,1.0,5.0,4.0
2022-12-12 14:30:00,1.0,2.0,1.0
2022-12-13 14:30:00,1.0,19.0,18.0
2022-12-14 14:30:00,2.0,8.0,6.0
2022-12-15 14:30:00,6.0,2.0,-4.0
2022-12-16 14:30:00,18.0,1.0,-17.0
2022-12-19 14:30:00,19.0,4.0,-15.0
2022-12-20 14:30:00,14.0,1.0,-13.0
2022-12-21 14:30:00,3.0,5.0,2.0
2022-12-22 14:30:00,22.0,1.0,-21.0
2022-12-23 14:30:00,1.0,2.0,1.0
2022-12-27 14:30:00,3.0,8.0,5.0
2022-12-28 14:30:00,7.0,8.0,1.0
2022-12-29 14:30:00,0.01,1.0,0.99
2022-12-30 14:30:00,0.01,0.01,0.0
2023-01-03 14:30:00,6.0,1.0,-5.0
2023-01-04 14:30:00,0.01,4.0,3.99
2023-01-05 14:30:00,7.0,8.0,1.0
2023-01-06 14:30:00,5.0,18.0,13.0
2023-01-09 14:30:00,2.0,12.0,10.0
2023-01-10 14:30:00,0.01,4.0,3.99
2023-01-11 14:30:00,1.0,10.0,9.0
2023-01-12 14:30:00,1.0,14.0,13.0
2023-01-13 14:30:00,2.0,13.0,11.0
2023-01-17 14:30:00,1.0,15.0,14.0
2023-01-18 14:30:00,2.0,12.0,10.0
2023-01-19 14:30:00,2.0,1.0,-1.0
2023-01-20 14:30:00,4.0,0.01,-3.99
2023-01-23 14:30:00,0.01,9.0,8.99
2023-01-24 14:30:00,8.0,22.0,14.0
2023-01-25 14:30:00,1.0,9.0,8.0
2023-01-26 14:30:00,0.01,22.0,21.99
2023-01-27 14:30:00,0.01,16.0,15.99
2023-01-30 14:30:00,0.01,4.0,3.99
2023-01-31 14:30:00,0.01,10.0,9.99
2023-02-01 14:30:00,0.01,24.0,23.99
2023-02-02 14:30:00,1.0,35.0,34.0
2023-02-03 14:30:00,1.0,18.0,17.0
2023-02-06 14:30:00,1.0,5.0,4.0
2023-02-07 14:30:00,3.0,6.0,3.0
2023-02-08 14:30:00,2.0,11.0,9.0
2023-02-09 14:30:00,1.0,16.0,15.0
2023-02-10 14:30:00,0.01,3.0,2.99
2023-02-13 14:30:00,0.01,4.0,3.99
2023-02-14 14:30:00,0.01,9.0,8.99
2023-02-15 14:30:00,0.01,18.0,17.99
2023-02-16 14:30:00,1.0,8.0,7.0
2023-02-17 14:30:00,1.0,8.0,7.0
2023-02-21 14:30:00,1.0,2.0,1.0
2023-02-22 14:30:00,1.0,4.0,3.0
2023-02-23 14:30:00,3.0,6.0,3.0
2023-02-24 14:30:00,11.0,2.0,-9.0
2023-02-27 14:30:00,7.0,4.0,-3.0
2023-02-28 14:30:00,10.0,9.0,-1.0
2023-03-01 14:30:00,13.0,8.0,-5.0
2023-03-02 14:30:00,13.0,9.0,-4.0
2023-03-03 14:30:00,2.0,23.0,21.0
2023-03-06 14:30:00,1.0,20.0,19.0
2023-03-07 14:30:00,9.0,9.0,0.0
2023-03-08 14:30:00,11.0,2.0,-9.0
2023-03-09 14:30:00,22.0,5.0,-17.0
2023-03-10 14:30:00,40.0,0.01,-39.99
2023-03-13 13:30:00,48.0,0.01,-47.99
2023-03-14 13:30:00,15.0,2.0,-13.0
2023-03-15 13:30:00,38.0,3.0,-35.0
2023-03-16 13:30:00,21.0,3.0,-18.0
2023-03-17 13:30:00,19.0,4.0,-15.0
2023-03-20 13:30:00,8.0,2.0,-6.0
2023-03-21 13:30:00,2.0,6.0,4.0
2023-03-22 13:30:00,14.0,6.0,-8.0
2023-03-23 13:30:00,31.0,2.0,-29.0
2023-03-24 13:30:00,34.0,4.0,-30.0
2023-03-27 13:30:00,0.01,6.0,5.99
2023-03-28 13:30:00,0.01,6.0,5.99
2023-03-29 13:30:00,0.01,8.0,7.99
2023-03-30 13:30:00,0.01,6.0,5.99
2023-03-31 13:30:00,0.01,18.0,17.99
2023-04-03 13:30:00,0.01,20.0,19.99
2023-04-04 13:30:00,0.01,16.0,15.99
2023-04-05 13:30:00,2.0,10.0,8.0
2023-04-06 13:30:00,0.01,7.0,6.99
2023-04-10 13:30:00,0.01,1.0,0.99
2023-04-11 13:30:00,0.01,8.0,7.99
2023-04-12 13:30:00,2.0,13.0,11.0
2023-04-13 13:30:00,1.0,12.0,11.0
2023-04-14 13:30:00,2.0,10.0,8.0
2023-04-17 13:30:00,1.0,15.0,14.0
2023-04-18 13:30:00,0.01,26.0,25.99
2023-04-19 13:30:00,1.0,15.0,14.0
2023-04-20 13:30:00,3.0,22.0,19.0
2023-04-21 13:30:00,4.0,20.0,16.0
2023-04-24 13:30:00,2.0,20.0,18.0
2023-04-25 13:30:00,7.0,23.0,16.0
2023-04-26 13:30:00,11.0,5.0,-6.0
2023-04-27 13:30:00,4.0,20.0,16.0
2023-04-28 13:30:00,2.0,25.0,23.0
2023-05-01 13:30:00,1.0,34.0,33.0
2023-05-02 13:30:00,14.0,17.0,3.0
2023-05-03 13:30:00,12.0,23.0,11.0
2023-05-04 13:30:00,26.0,5.0,-21.0
2023-05-05 13:30:00,3.0,12.0,9.0
2023-05-08 13:30:00,7.0,12.0,5.0
2023-05-09 13:30:00,13.0,14.0,1.0
2023-05-10 13:30:00,11.0,18.0,7.0
2023-05-11 13:30:00,14.0,6.0,-8.0
2023-05-12 13:30:00,15.0,17.0,2.0
2023-05-15 13:30:00,8.0,11.0,3.0
2023-05-16 13:30:00,13.0,13.0,0.0
2023-05-17 13:30:00,15.0,20.0,5.0
2023-05-18 13:30:00,7.0,28.0,21.0
2023-05-19 13:30:00,3.0,28.0,25.0
2023-05-22 13:30:00,9.0,17.0,8.0
2023-05-23 13:30:00,1.0,3.0,2.0
2023-05-24 13:30:00,14.0,0.01,-13.99
2023-05-25 13:30:00,33.0,11.0,-22.0
2023-05-26 13:30:00,14.0,17.0,3.0
2023-05-30 13:30:00,17.0,20.0,3.0
2023-05-31 13:30:00,21.0,4.0,-17.0
2023-06-01 13:30:00,19.0,6.0,-13.0
2023-06-02 13:30:00,2.0,15.0,13.0
2023-06-05 13:30:00,5.0,15.0,10.0
2023-06-06 13:30:00,5.0,18.0,13.0
2023-06-07 13:30:00,0.01,22.0,21.99
2023-06-08 13:30:00,2.0,10.0,8.0
2023-06-09 13:30:00,6.0,16.0,10.0
2023-06-12 13:30:00,3.0,24.0,21.0
2023-06-13 13:30:00,0.01,43.0,42.99
2023-06-14 13:30:00,2.0,40.0,38.0
2023-06-15 13:30:00,0.01,48.0,47.99
2023-06-16 13:30:00,0.01,55.0,54.99
2023-06-20 13:30:00,0.01,14.0,13.99
2023-06-21 13:30:00,0.01,15.0,14.99
2023-06-22 13:30:00,5.0,15.0,10.0
2023-06-23 13:30:00,4.0,18.0,14.0
2023-06-26 13:30:00,2.0,16.0,14.0
2023-06-27 13:30:00,1.0,46.0,45.0
2023-06-28 13:30:00,6.0,41.0,35.0
2023-06-29 13:30:00,3.0,42.0,39.0
2023-06-30 13:30:00,0.01,76.0,75.99
2023-07-03 13:30:00,0.01,20.0,19.99
2023-07-05 13:30:00,0.01,17.0,16.99
2023-07-06 13:30:00,2.0,4.0,2.0
2023-07-07 13:30:00,5.0,11.0,6.0
2023-07-10 13:30:00,4.0,28.0,24.0
2023-07-11 13:30:00,1.0,50.0,49.0
2023-07-12 13:30:00,2.0,65.0,63.0
2023-07-13 13:30:00,1.0,51.0,50.0
2023-07-14 13:30:00,4.0,39.0,35.0
2023-07-17 13:30:00,4.0,59.0,55.0
2023-07-18 13:30:00,3.0,52.0,49.0
2023-07-19 13:30:00,0.01,44.0,43.99
2023-07-20 13:30:00,2.0,33.0,31.0
2023-07-21 13:30:00,0.01,34.0,33.99
2023-07-24 13:30:00,1.0,26.0,25.0
2023-07-25 13:30:00,0.01,40.0,39.99
2023-07-26 13:30:00,0.01,31.0,30.99
2023-07-27 13:30:00,1.0,49.0,48.0
2023-07-28 13:30:00,2.0,26.0,24.0
2023-07-31 13:30:00,1.0,28.0,27.0
2023-08-01 13:30:00,3.0,21.0,18.0
2023-08-02 13:30:00,5.0,12.0,7.0
2023-08-03 13:30:00,6.0,16.0,10.0
2023-08-04 13:30:00,11.0,18.0,7.0
2023-08-07 13:30:00,9.0,19.0,10.0
2023-08-08 13:30:00,17.0,13.0,-4.0
2023-08-09 13:30:00,6.0,15.0,9.0
2023-08-10 13:30:00,4.0,18.0,14.0
2023-08-11 13:30:00,3.0,4.0,1.0
2023-08-14 13:30:00,11.0,8.0,-3.0
2023-08-15 13:30:00,18.0,3.0,-15.0
2023-08-16 13:30:00,17.0,4.0,-13.0
2023-08-17 13:30:00,16.0,2.0,-14.0
2023-08-18 13:30:00,17.0,0.01,-16.99
2023-08-21 13:30:00,18.0,3.0,-15.0
2023-08-22 13:30:00,13.0,4.0,-9.0
2023-08-23 13:30:00,9.0,10.0,1.0
2023-08-24 13:30:00,13.0,10.0,-3.0
2023-08-25 13:30:00,9.0,6.0,-3.0
2023-08-28 13:30:00,2.0,10.0,8.0
2023-08-29 13:30:00,2.0,20.0,18.0
2023-08-30 13:30:00,1.0,24.0,23.0
2023-08-31 13:30:00,4.0,22.0,18.0
2023-09-01 13:30:00,19.0,26.0,7.0
2023-09-05 13:30:00,25.0,11.0,-14.0
2023-09-06 13:30:00,24.0,5.0,-19.0
2023-09-07 13:30:00,25.0,12.0,-13.0
2023-09-08 13:30:00,17.0,13.0,-4.0
2023-09-11 13:30:00,12.0,10.0,-2.0
2023-09-12 13:30:00,14.0,11.0,-3.0
2023-09-13 13:30:00,11.0,10.0,-1.0
2023-09-14 13:30:00,7.0,12.0,5.0
2023-09-15 13:30:00,9.0,8.0,-1.0
2023-09-18 13:30:00,11.0,6.0,-5.0
2023-09-19 13:30:00,9.0,7.0,-2.0
2023-09-20 13:30:00,6.0,14.0,8.0
2023-09-21 13:30:00,29.0,2.0,-27.0
2023-09-22 13:30:00,36.0,1.0,-35.0
2023-09-25 13:30:00,42.0,2.0,-40.0
2023-09-26 13:30:00,43.0,1.0,-42.0
2023-09-27 13:30:00,49.0,1.0,-48.0
2023-09-28 13:30:00,33.0,4.0,-29.0
2023-09-29 13:30:00,10.0,1.0,-9.0
2023-10-02 13:30:00,51.0,1.0,-50.0
2023-10-03 13:30:00,63.0,1.0,-62.0
2023-10-04 13:30:00,41.0,1.0,-40.0
2023-10-05 13:30:00,40.0,3.0,-37.0
2023-10-06 13:30:00,52.0,6.0,-46.0
2023-10-09 13:30:00,20.0,8.0,-12.0
2023-10-10 13:30:00,3.0,11.0,8.0
2023-10-11 13:30:00,9.0,12.0,3.0
2023-10-12 13:30:00,38.0,19.0,-19.0
2023-10-13 13:30:00,21.0,12.0,-9.0
2023-10-16 13:30:00,5.0,11.0,6.0
2023-10-17 13:30:00,4.0,17.0,13.0
2023-10-18 13:30:00,24.0,12.0,-12.0
2023-10-19 13:30:00,35.0,2.0,-33.0
2023-10-20 13:30:00,36.0,0.01,-35.99
2023-10-23 13:30:00,56.0,1.0,-55.0
2023-10-24 13:30:00,33.0,0.01,-32.99
2023-10-25 13:30:00,62.0,0.01,-61.99
2023-10-26 13:30:00,33.0,0.01,-32.99
2023-10-27 13:30:00,64.0,0.01,-63.99
2023-10-30 13:30:00,44.0,0.01,-43.99
2023-10-31 13:30:00,16.0,1.0,-15.0
2023-11-01 13:30:00,29.0,7.0,-22.0
2023-11-02 13:30:00,10.0,11.0,1.0
2023-11-03 13:30:00,1.0,20.0,19.0
2023-11-06 14:30:00,0.01,9.0,8.99
2023-11-07 14:30:00,3.0,13.0,10.0
2023-11-08 14:30:00,7.0,17.0,10.0
2023-11-09 14:30:00,12.0,19.0,7.0
2023-11-10 14:30:00,15.0,26.0,11.0
2023-11-13 14:30:00,7.0,26.0,19.0
2023-11-14 14:30:00,0.01,46.0,45.99
2023-11-15 14:30:00,0.01,42.0,41.99
2023-11-16 14:30:00,2.0,17.0,15.0
2023-11-17 14:30:00,1.0,17.0,16.0
2023-11-20 14:30:00,1.0,29.0,28.0
2023-11-21 14:30:00,0.01,30.0,29.99
2023-11-22 14:30:00,1.0,45.0,44.0
2023-11-24 14:30:00,0.01,23.0,22.99
2023-11-27 14:30:00,0.01,34.0,33.99
2023-11-28 14:30:00,1.0,20.0,19.0
2023-11-29 14:30:00,1.0,30.0,29.0
2023-11-30 14:30:00,2.0,37.0,35.0
2023-12-01 14:30:00,1.0,58.0,57.0
2023-12-04 14:30:00,0.01,38.0,37.99
2023-12-05 14:30:00,0.01,15.0,14.99
2023-12-06 14:30:00,0.01,29.0,28.99
2023-12-07 14:30:00,0.01,17.0,16.99
2023-12-08 14:30:00,0.01,30.0,29.99
2023-12-11 14:30:00,0.01,52.0,51.99
2023-12-12 14:30:00,2.0,71.0,69.0
2023-12-13 14:30:00,1.0,89.0,88.0
2023-12-14 14:30:00,0.01,93.0,92.99
2023-12-15 14:30:00,2.0,49.0,47.0
2023-12-18 14:30:00,2.0,33.0,31.0
2023-12-19 14:30:00,1.0,47.0,46.0
2023-12-20 14:30:00,1.0,40.0,39.0
2023-12-21 14:30:00,1.0,17.0,16.0
2023-12-22 14:30:00,0.01,39.0,38.99
2023-12-26 14:30:00,0.01,50.0,49.99
2023-12-27 14:30:00,0.01,50.0,49.99
2023-12-28 14:30:00,0.01,47.0,46.99
2023-12-29 14:30:00,0.01,33.0,32.99
2024-01-02 14:30:00,0.01,17.0,16.99
2024-01-03 14:30:00,0.01,16.0,15.99
2024-01-04 14:30:00,0.01,16.0,15.99
2024-01-05 14:30:00,0.01,9.0,8.99
2024-01-08 14:30:00,0.01,12.0,11.99
2024-01-09 14:30:00,0.01,11.0,10.99
2024-01-10 14:30:00,1.0,29.0,28.0
2024-01-11 14:30:00,1.0,38.0,37.0
2024-01-12 14:30:00,0.01,39.0,38.99
2024-01-16 14:30:00,2.0,23.0,21.0
2024-01-17 14:30:00,5.0,23.0,18.0
2024-01-18 14:30:00,8.0,28.0,20.0
2024-01-19 14:30:00,3.0,60.0,57.0
2024-01-22 14:30:00,3.0,84.0,81.0
2024-01-23 14:30:00,1.0,39.0,38.0
2024-01-24 14:30:00,2.0,66.0,64.0
2024-01-25 14:30:00,2.0,50.0,48.0
2024-01-26 14:30:00,1.0,35.0,34.0
2024-01-29 14:30:00,0.01,43.0,42.99
2024-01-30 14:30:00,0.01,79.0,78.99
2024-01-31 14:30:00,3.0,62.0,59.0
2024-02-01 14:30:00,5.0,38.0,33.0
2024-02-02 14:30:00,4.0,69.0,65.0
2024-02-05 14:30:00,11.0,30.0,19.0
2024-02-06 14:30:00,8.0,27.0,19.0
2024-02-07 14:30:00,4.0,76.0,72.0
2024-02-08 14:30:00,6.0,53.0,47.0
2024-02-09 14:30:00,4.0,47.0,43.0
2024-02-12 14:30:00,1.0,52.0,51.0
2024-02-13 14:30:00,7.0,17.0,10.0
2024-02-14 14:30:00,4.0,34.0,30.0
2024-02-15 14:30:00,2.0,70.0,68.0
2024-02-16 14:30:00,3.0,62.0,59.0
2024-02-20 14:30:00,3.0,31.0,28.0
2024-02-21 14:30:00,0.01,26.0,25.99
2024-02-22 14:30:00,1.0,85.0,84.0
2024-02-23 14:30:00,2.0,92.0,90.0
2024-02-26 14:30:00,1.0,70.0,69.0
2024-02-27 14:30:00,0.01,47.0,46.99
2024-02-28 14:30:00,1.0,69.0,68.0
2024-02-29 14:30:00,1.0,64.0,63.0
2024-03-01 14:30:00,2.0,88.0,86.0
2024-03-04 14:30:00,8.0,106.0,98.0
2024-03-05 14:30:00,8.0,50.0,42.0
2024-03-06 14:30:00,5.0,52.0,47.0
2024-03-07 14:30:00,1.0,81.0,80.0
2024-03-08 14:30:00,0.01,65.0,64.99
2024-03-11 13:30:00,0.01,21.0,20.99
2024-03-12 13:30:00,0.01,44.0,43.99
2024-03-13 13:30:00,0.01,58.0,57.99
2024-03-14 13:30:00,0.01,40.0,39.99
2024-03-15 13:30:00,0.01,27.0,26.99
2024-03-18 13:30:00,1.0,38.0,37.0
2024-03-19 13:30:00,1.0,43.0,42.0
2024-03-20 13:30:00,1.0,84.0,83.0
2024-03-21 13:30:00,1.0,115.0,114.0
2024-03-22 13:30:00,2.0,54.0,52.0
2024-03-25 13:30:00,2.0,30.0,28.0
2024-03-26 13:30:00,1.0,33.0,32.0
2024-03-27 13:30:00,0.01,64.0,63.99
2024-03-28 13:30:00,0.01,94.0,93.99
2024-04-01 13:30:00,1.0,39.0,38.0
2024-04-02 13:30:00,4.0,23.0,19.0
2024-04-03 13:30:00,4.0,31.0,27.0
2024-04-04 13:30:00,5.0,53.0,48.0
2024-04-05 13:30:00,5.0,17.0,12.0
2024-04-08 13:30:00,3.0,21.0,18.0
2024-04-09 13:30:00,1.0,14.0,13.0
2024-04-10 13:30:00,8.0,3.0,-5.0
2024-04-11 13:30:00,5.0,11.0,6.0
2024-04-12 13:30:00,8.0,12.0,4.0
2024-04-15 13:30:00,7.0,4.0,-3.0
2024-04-16 13:30:00,8.0,0.01,-7.99
2024-04-17 13:30:00,9.0,1.0,-8.0
2024-04-18 13:30:00,9.0,2.0,-7.0
2024-04-19 13:30:00,5.0,4.0,-1.0
2024-04-22 13:30:00,4.0,9.0,5.0
2024-04-23 13:30:00,2.0,12.0,10.0
2024-04-24 13:30:00,4.0,10.0,6.0
2024-04-25 13:30:00,5.0,14.0,9.0
2024-04-26 13:30:00,9.0,20.0,11.0
2024-04-29 13:30:00,1.0,16.0,15.0
2024-04-30 13:30:00,5.0,18.0,13.0
2024-05-01 13:30:00,11.0,11.0,0.0
2024-05-02 13:30:00,8.0,15.0,7.0
2024-05-03 13:30:00,1.0,21.0,20.0
2024-05-06 13:30:00,2.0,28.0,26.0
2024-05-07 13:30:00,2.0,47.0,45.0
2024-05-08 13:30:00,2.0,32.0,30.0
2024-05-09 13:30:00,2.0,34.0,32.0
2024-05-10 13:30:00,1.0,56.0,55.0
2024-05-13 13:30:00,0.01,38.0,37.99
2024-05-14 13:30:00,0.01,31.0,30.99
2024-05-15 13:30:00,0.01,70.0,69.99
2024-05-16 13:30:00,0.01,66.0,65.99
2024-05-17 13:30:00,1.0,44.0,43.0
2024-05-20 13:30:00,4.0,60.0,56.0
2024-05-21 13:30:00,6.0,53.0,47.0
2024-05-22 13:30:00,6.0,44.0,38.0
2024-05-23 13:30:00,7.0,32.0,25.0
2024-05-24 13:30:00,5.0,27.0,22.0
2024-05-28 13:30:00,11.0,22.0,11.0
2024-05-29 13:30:00,16.0,7.0,-9.0
2024-05-30 13:30:00,9.0,14.0,5.0
2024-05-31 13:30:00,7.0,18.0,11.0
2024-06-03 13:30:00,3.0,25.0,22.0
2024-06-04 13:30:00,6.0,19.0,13.0
2024-06-05 13:30:00,9.0,25.0,16.0
2024-06-06 13:30:00,5.0,27.0,22.0
2024-06-07 13:30:00,4.0,17.0,13.0
2024-06-10 13:30:00,5.0,20.0,15.0
2024-06-11 13:30:00,4.0,19.0,15.0
2024-06-12 13:30:00,1.0,39.0,38.0
2024-06-13 13:30:00,8.0,17.0,9.0
2024-06-14 13:30:00,15.0,11.0,-4.0
2024-06-17 13:30:00,6.0,38.0,32.0
2024-06-18 13:30:00,6.0,50.0,44.0
2024-06-20 13:30:00,5.0,30.0,25.0
2024-06-21 13:30:00,1.0,22.0,21.0
2024-06-24 13:30:00,0.01,35.0,34.99
2024-06-25 13:30:00,3.0,21.0,18.0
2024-06-26 13:30:00,5.0,10.0,5.0
2024-06-27 13:30:00,1.0,11.0,10.0
2024-06-28 13:30:00,1.0,18.0,17.0
2024-07-01 13:30:00,4.0,12.0,8.0
2024-07-02 13:30:00,4.0,15.0,11.0
2024-07-03 13:30:00,4.0,20.0,16.0
2024-07-05 13:30:00,5.0,18.0,13.0
2024-07-08 13:30:00,7.0,21.0,14.0
2024-07-09 13:30:00,15.0,30.0,15.0
2024-07-10 13:30:00,11.0,34.0,23.0"""
from AlgorithmImports import *



class RegularHoursConsolidator(TradeBarConsolidator):
    def Update(self, data):
        # Check if the data is within regular trading hours
        if (data.Time.hour == 8 and data.Time.minute >= 30) or (data.Time.hour > 8 and data.Time.hour < 16) or ((data.Time.hour == 16 and data.Time.minute == 0)):
            super().Update(data)
from AlgorithmImports import *
from datetime import timedelta
from data.custom_consolidator import RegularHoursConsolidator

class DataDistributor():



    def __init__(self, algo, symbol, warmup_info, indicator_map, receiver_map):
        self.algo = algo
        self.symbol = symbol
        self.warmup_info = warmup_info
        self.indicator_map = indicator_map
        self.receiver_map = receiver_map

        # Mapping the main receiver functions in dictionary
        self.function_map  = {}
        self.function_map [1] = self.receive_bar_1
        self.function_map [2] = self.receive_bar_2
        self.function_map [3] = self.receive_bar_3
        self.function_map [4] = self.receive_bar_4
        self.function_map [5] = self.receive_bar_5
        self.function_map [6] = self.receive_bar_6
        self.function_map [7] = self.receive_bar_7
        self.function_map [8] = self.receive_bar_8
        self.function_map [9] = self.receive_bar_9
        self.function_map [10] = self.receive_bar_10
        self.function_map [11] = self.receive_bar_11
        self.function_map [12] = self.receive_bar_12
        self.function_map [13] = self.receive_bar_13
        self.function_map [14] = self.receive_bar_14
        self.function_map [15] = self.receive_bar_15
        self.function_map [16] = self.receive_bar_16
        self.function_map [17] = self.receive_bar_17
        self.function_map [18] = self.receive_bar_18
        self.function_map [19] = self.receive_bar_19
        self.function_map [20] = self.receive_bar_20
        self.consolidators = {}


        # Create Consolidators
        # For each timeframe we create, subscribe and register a new consolidator
        step = 0
        for timeframe, warm_up_period in self.warmup_info.items():
            step += 1
            delta = self.tuple_to_timedelta(timeframe)
            consolidator = RegularHoursConsolidator(delta)
            self.algo.SubscriptionManager.AddConsolidator(self.symbol, consolidator)
            consolidator.DataConsolidated += self.function_map[step]
            indicators = self.get_keys_by_value(self.indicator_map, timeframe)
            functions = self.grab_dictionary_values(indicators, self.receiver_map)
            self.consolidators[step] = functions
            if timeframe[1] == "Minute":
                warm_up_period = (warm_up_period * timeframe[0]) * 2
                history = self.algo.History[TradeBar](self.symbol, warm_up_period, Resolution.Minute)
            elif timeframe[1] == "Hour":
                warm_up_period = (warm_up_period * timeframe[0]) * 2
                history = self.algo.History[TradeBar](self.symbol, warm_up_period, Resolution.Hour)
            elif timeframe[1] == "Day":
                warm_up_period = (warm_up_period * timeframe[0]) * 2
                history = self.algo.History[TradeBar](self.symbol, warm_up_period, Resolution.Daily)
            
            for bar in history:
                consolidator.Update(bar)

    # Grab list of Indicators
    def get_keys_by_value(self, dictionary, value):
        return [key for key, val in dictionary.items() if val == value]

    # Get list of functions of the indicators to send the TradeBars to 
    def grab_dictionary_values(self, keys, dictionary):
        values = []
        for key in keys:
            if key in dictionary:
                values.append(dictionary[key])
        return values



    # Creating timedelta objects supplied to the consolidators as timeframes
    def tuple_to_timedelta(self, time_tuple):
        if time_tuple[1] == "Minute":
            return timedelta(minutes=time_tuple[0])
        elif time_tuple[1] == "Hour":
            return timedelta(hours=time_tuple[0])
        elif time_tuple[1] == "Day":
            return timedelta(days=time_tuple[0])
        elif time_tuple[1] == "Week":
            return timedelta(weeks=time_tuple[0])


    # Functions that receive the TradeBar's of the various consolidators and distribute them to the indicators

    def receive_bar_1(self, sender, bar):
        functions = self.consolidators[1]

        for function in functions:
            function(sender, bar)

    def receive_bar_2(self, sender, bar):
        functions = self.consolidators[2]

        for function in functions:
            function(sender, bar)

    def receive_bar_3(self, sender, bar):
        functions = self.consolidators[3]

        for function in functions:
            function(sender, bar)

    def receive_bar_4(self, sender, bar):
        functions = self.consolidators[4]

        for function in functions:
            function(sender, bar)

    def receive_bar_5(self, sender, bar):
        functions = self.consolidators[5]

        for function in functions:
            function(sender, bar)

    def receive_bar_6(self, sender, bar):
        functions = self.consolidators[6]

        for function in functions:
            function(sender, bar)

    def receive_bar_7(self, sender, bar):
        functions = self.consolidators[7]

        for function in functions:
            function(sender, bar)

    def receive_bar_8(self, sender, bar):
        functions = self.consolidators[8]

        for function in functions:
            function(sender, bar)

    def receive_bar_9(self, sender, bar):
        functions = self.consolidators[9]

        for function in functions:
            function(sender, bar)

    def receive_bar_10(self, sender, bar):
        functions = self.consolidators[10]

        for function in functions:
            function(sender, bar)

    def receive_bar_11(self, sender, bar):
        functions = self.consolidators[11]

        for function in functions:
            function(sender, bar)

    def receive_bar_12(self, sender, bar):
        functions = self.consolidators[12]

        for function in functions:
            function(sender, bar)

    def receive_bar_13(self, sender, bar):
        functions = self.consolidators[13]

        for function in functions:
            function(sender, bar)

    def receive_bar_14(self, sender, bar):
        functions = self.consolidators[14]

        for function in functions:
            function(sender, bar)

    def receive_bar_15(self, sender, bar):
        functions = self.consolidators[15]

        for function in functions:
            function(sender, bar)

    def receive_bar_16(self, sender, bar):
        functions = self.consolidators[16]

        for function in functions:
            function(sender, bar)

    def receive_bar_17(self, sender, bar):
        functions = self.consolidators[17]

        for function in functions:
            function(sender, bar)

    def receive_bar_18(self, sender, bar):
        functions = self.consolidators[18]

        for function in functions:
            function(sender, bar)

    def receive_bar_19(self, sender, bar):
        functions = self.consolidators[19]

        for function in functions:
            function(sender, bar)

    def receive_bar_20(self, sender, bar):
        functions = self.consolidators[20]

        for function in functions:
            function(sender, bar)

from AlgorithmImports import *
from configs import config_main as cfg



#// This function now also works with dependency injection for both long and short
#//  This means configurations for entry, exit, and indicators are passed directly into the function,
#//  enhancing flexibility and reducing dependencies on global configurations.
def get_timeframes(symbol_instances, symbol, direction, entry_config, exit_config, indicator_config):
    # Initialize variables to store receiver functions for various indicators.
    macd_r = None
    rsi_r = None
    bb_r = None
    cross_sma_r = None
    cross_ema_r = None
    stoch_r = None
    adx_r = None
    dvv_r = None
    chk_r = None
    obv_r = None
    chd_r = None 
    mfi_r = None
    roc_percent_r = None
    roc_ratio_r = None


    # Loop through the indicators associated with the specified symbol.
    for name, indicator in symbol_instances[symbol].indicators.items():
        # Map the indicator's receive_bar function to the appropriate variable based on its name.
        if name == "MACD":
            macd_r = indicator.receive_bar
        elif name == "RSI":
            rsi_r = indicator.receive_bar
        elif name == "BB":
            bb_r = indicator.receive_bar
        elif name == "SMA":
            cross_sma_r = indicator.sma_receivers
            cross_ema_r = indicator.ema_receivers
        elif name == "STOCH":
            stoch_r = indicator.receive_bar
        elif name == "OBV":
            obv_r = indicator.receive_bar
        elif name == "ADX":
            adx_r = indicator.receive_bar
        elif name == "DVV":
            dvv_r = indicator.receive_bar
        elif name == "CHK":
            chk_r = indicator.receive_bar
        elif name == "CHD":
            chd_r = indicator.receive_bar
        elif name == "MFI":
            mfi_r = indicator.receive_bar
        elif name == "ROC_PERCENT":
            roc_percent_r = indicator.receive_bar
        elif name == "ROC_RATIO":
            roc_ratio_r = indicator.receive_bar
    # Create empty dictionaries to store warm-up information, indicator mappings, and receiver mappings.
    warmup_info = {}
    indicator_map = {}
    receiver_map = {}

    # Creating 3 maps for each indicator to store relative information on each indicator
    # with access to easy lookups later. Also calculating needed warm-up periods for each indicator
    #// Indicator configuration now uses a single, generalized configuration object.
    #// This simplification means that indicator weights, timeframes, and periods are no longer hard-coded
    #// or dependent on specific global variables, increasing the code's adaptability and maintainability.
    # Check if MACD indicator weighting is not zero
    if indicator_config.MACD_WEIGHTING != 0:
        # If not zero, store the receiver object for MACD in receiver_map
        receiver_map["MACD"] = macd_r
        # Associate the MACD indicator with its timeframe in indicator_map
        indicator_map["MACD"] = indicator_config.MACD_TIMEFRAME
        # Create a list of MACD timeframes
        macd_timeframes = [indicator_config.MACD_FAST_PERIOD, indicator_config.MACD_SLOW_PERIOD, indicator_config.MACD_SIGNAL_PERIOD]
        # Check if the MACD timeframe is not already in warmup_info, add it with the maximum of the timeframes
        if indicator_config.MACD_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.MACD_TIMEFRAME] = max(macd_timeframes)
        # If it's already in warmup_info, update it with the maximum if necessary
        elif max(macd_timeframes) > warmup_info[indicator_config.MACD_TIMEFRAME]:
            warmup_info[indicator_config.MACD_TIMEFRAME] = max(macd_timeframes)
    # Repeat the above process for various other indicators below
    if indicator_config.RSI_WEIGHTING != 0:
        receiver_map["RSI"] = rsi_r
        indicator_map["RSI"] = indicator_config.RSI_TIMEFRAME
        rsi_timeframes = [indicator_config.RSI_PERIOD]
        if indicator_config.RSI_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.RSI_TIMEFRAME] = max(rsi_timeframes)
        elif max(rsi_timeframes) > warmup_info[indicator_config.RSI_TIMEFRAME]:
            warmup_info[indicator_config.RSI_TIMEFRAME] = max(rsi_timeframes)
    if indicator_config.BOLLINGER_BANDS_WEIGHTING != 0:
        receiver_map["BB"] = bb_r
        indicator_map["BB"] = indicator_config.BOLLINGER_BANDS_TIMEFRAME
        bb_timeframes = [indicator_config.BOLLINGER_BANDS_PERIOD]
        if indicator_config.BOLLINGER_BANDS_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.BOLLINGER_BANDS_TIMEFRAME] = max(bb_timeframes)
        elif max(bb_timeframes) > warmup_info[indicator_config.BOLLINGER_BANDS_TIMEFRAME]:
            warmup_info[indicator_config.BOLLINGER_BANDS_TIMEFRAME] = max(bb_timeframes)
    if indicator_config.CROSS_OVER_WEIGHTING != 0:
        cross_timeframes = [indicator_config.SMA_PERIOD_FAST, indicator_config.SMA_PERIOD_SLOW]
        for tf, receiver in zip(indicator_config.SMA_TIMEFRAMES, cross_sma_r):
            receiver_map["SMA"+str(tf)] = receiver
            indicator_map["SMA"+str(tf)] = tf
            if tf not in warmup_info.keys():
                warmup_info[tf] = max(cross_timeframes)
            elif max(cross_timeframes) > warmup_info[tf]:
                warmup_info[tf] = max(cross_timeframes)
        cross_timeframes = [indicator_config.EMA_PERIOD_FAST, indicator_config.EMA_PERIOD_SLOW]
        for tf, receiver in zip(indicator_config.EMA_TIMEFRAMES, cross_ema_r):
            receiver_map["EMA"+str(tf)] = receiver
            indicator_map["EMA"+str(tf)] = tf
            if tf not in warmup_info.keys():
                warmup_info[tf] = max(cross_timeframes)
            elif max(cross_timeframes) > warmup_info[tf]:
                warmup_info[tf] = max(cross_timeframes)
    if indicator_config.STOCHASTIC_OSCILLATOR_WEIGHTING != 0:
        receiver_map["STOCH"] = stoch_r
        indicator_map["STOCH"] = indicator_config.STOCHASTIC_OSCILLATOR_TIMEFRAME
        stoch_timeframes = [indicator_config.STOCHASTIC_OSCILLATOR_LENGTH]
        if indicator_config.STOCHASTIC_OSCILLATOR_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.STOCHASTIC_OSCILLATOR_TIMEFRAME] = max(stoch_timeframes)
        elif max(stoch_timeframes) > warmup_info[indicator_config.STOCHASTIC_OSCILLATOR_TIMEFRAME]:
            warmup_info[indicator_config.STOCHASTIC_OSCILLATOR_TIMEFRAME] = max(stoch_timeframes)
    if indicator_config.OBV_ACC_DIST_WEIGHTING != 0:
        receiver_map["OBV"] = obv_r
        indicator_map["OBV"] = indicator_config.OBV_ACC_DIST_TIMEFRAME
        obv_timeframes = [indicator_config.OBV_ACC_DIST_SMA_LENGTH]
        if indicator_config.OBV_ACC_DIST_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.OBV_ACC_DIST_TIMEFRAME] = max(obv_timeframes)
        elif max(obv_timeframes) > warmup_info[indicator_config.OBV_ACC_DIST_TIMEFRAME]:
            warmup_info[indicator_config.OBV_ACC_DIST_TIMEFRAME] = max(obv_timeframes)
    if indicator_config.ADX_WEIGHTING != 0:
        receiver_map["ADX"] = adx_r
        indicator_map["ADX"] = indicator_config.ADX_TIMEFRAME
        adx_timeframes = [indicator_config.ADX_LENGTH]
        if indicator_config.ADX_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.ADX_TIMEFRAME] = max(adx_timeframes)
        elif max(adx_timeframes) > warmup_info[indicator_config.ADX_TIMEFRAME]:
            warmup_info[indicator_config.ADX_TIMEFRAME] = max(adx_timeframes)
    if indicator_config.DVV_WEIGHTING != 0:
        receiver_map["DVV"] = dvv_r
        indicator_map["DVV"] = indicator_config.DVV_TIMEFRAME
        dvv_timeframes = [indicator_config.DVV_VOLATILITY_PERIOD, indicator_config.DVV_VOLUME_PERIOD, indicator_config.DVV_ZONE_PERIOD]
        if indicator_config.DVV_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.DVV_TIMEFRAME] = max(dvv_timeframes)
        elif max(dvv_timeframes) > warmup_info[indicator_config.DVV_TIMEFRAME]:
            warmup_info[indicator_config.DVV_TIMEFRAME] = max(dvv_timeframes)
    if indicator_config.CHAIKIN_VOLATILITY_WEIGHTING != 0:
        receiver_map["CHAIKIN"] = chk_r
        indicator_map["CHAIKIN"] = indicator_config.CHAIKIN_VOLATILITY_TIMEFRAME
        chk_timeframes = [indicator_config.CHAIKIN_VOLATILITY_LENGTH, indicator_config.CHAIKIN_VOLATILITY_ROC_LENGTH]
        if indicator_config.CHAIKIN_VOLATILITY_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.CHAIKIN_VOLATILITY_TIMEFRAME] = max(chk_timeframes)
        elif max(chk_timeframes) > warmup_info[indicator_config.CHAIKIN_VOLATILITY_TIMEFRAME]:
            warmup_info[indicator_config.CHAIKIN_VOLATILITY_TIMEFRAME] = max(chk_timeframes)
    if indicator_config.USE_CHANDELIER_EXITS:
        receiver_map["CHD"] = chd_r
        indicator_map["CHD"] = indicator_config.CHANDELIER_ATR_TIMEFRAME
        chd_timeframes = [indicator_config.CHANDELIER_ATR_PERIOD]
        if indicator_config.CHANDELIER_ATR_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.CHANDELIER_ATR_TIMEFRAME] = max(chd_timeframes)
        elif max(chd_timeframes) > warmup_info[indicator_config.CHANDELIER_ATR_TIMEFRAME]:
            warmup_info[indicator_config.CHANDELIER_ATR_TIMEFRAME] = max(chd_timeframes)
    if indicator_config.ROC_PERCENT_WEIGHTING != 0:
            receiver_map["ROC_PERCENT"] = roc_percent_r
            indicator_map["ROC_PERCENT"] = indicator_config.ROC_PERCENT_TIMEFRAME
            roc_percent_timeframes = [indicator_config.ROC_PERCENT_PERIOD]
            if indicator_config.ROC_PERCENT_TIMEFRAME not in warmup_info.keys():
                warmup_info[indicator_config.ROC_PERCENT_TIMEFRAME] = max(roc_percent_timeframes)
            elif max(roc_percent_timeframes) > warmup_info[indicator_config.ROC_PERCENT_TIMEFRAME]:
                warmup_info[indicator_config.ROC_PERCENT_TIMEFRAME] = max(roc_percent_timeframes)
    if indicator_config.ROC_RATIO_WEIGHTING != 0:
        receiver_map["ROC_RATIO"] = roc_ratio_r
        indicator_map["ROC_RATIO"] = indicator_config.ROC_RATIO_TIMEFRAME
        roc_ratio_timeframes = [indicator_config.ROC_RATIO_PERIOD]
        if indicator_config.ROC_RATIO_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.ROC_RATIO_TIMEFRAME] = max(roc_ratio_timeframes)
        elif max(roc_ratio_timeframes) > warmup_info[indicator_config.ROC_RATIO_TIMEFRAME]:
            warmup_info[indicator_config.ROC_RATIO_TIMEFRAME] = max(roc_ratio_timeframes)
    if indicator_config.MFI_WEIGHTING != 0:
        receiver_map["MFI"] = mfi_r
        indicator_map["MFI"] = indicator_config.MFI_TIMEFRAME
        mfi_timeframes = [indicator_config.MFI_PERIOD]
        if indicator_config.MFI_TIMEFRAME not in warmup_info.keys():
            warmup_info[indicator_config.MFI_TIMEFRAME] = max(mfi_timeframes)
        elif max(mfi_timeframes) > warmup_info[indicator_config.MFI_TIMEFRAME]:
            warmup_info[indicator_config.MFI_TIMEFRAME] = max(mfi_timeframes)

    if entry_config.USE_SIMPLE_ENTRY:
        if "ROC_PERCENT" in entry_config.SIMPLE_ENTRY_CONFIG:
            receiver_map["ROC_PERCENT"] = roc_percent_r
            indicator_map["ROC_PERCENT"] = indicator_config.ROC_PERCENT_TIMEFRAME
            roc_percent_timeframes = [indicator_config.ROC_PERCENT_PERIOD]
            if indicator_config.ROC_PERCENT_TIMEFRAME not in warmup_info.keys():
                warmup_info[indicator_config.ROC_PERCENT_TIMEFRAME] = max(roc_percent_timeframes)
            elif max(roc_percent_timeframes) > warmup_info[indicator_config.ROC_PERCENT_TIMEFRAME]:
                warmup_info[indicator_config.ROC_PERCENT_TIMEFRAME] = max(roc_percent_timeframes)
        if "MFI" in entry_config.SIMPLE_ENTRY_CONFIG:
            receiver_map["MFI"] = mfi_r
            indicator_map["MFI"] = indicator_config.MFI_TIMEFRAME
            mfi_timeframes = [indicator_config.MFI_PERIOD]
            if indicator_config.MFI_TIMEFRAME not in warmup_info.keys():
                warmup_info[indicator_config.MFI_TIMEFRAME] = max(mfi_timeframes)
            elif max(mfi_timeframes) > warmup_info[indicator_config.MFI_TIMEFRAME]:
                warmup_info[indicator_config.MFI_TIMEFRAME] = max(mfi_timeframes)
        if "ADX" in entry_config.SIMPLE_ENTRY_CONFIG:
            receiver_map["ADX"] = adx_r
            indicator_map["ADX"] = indicator_config.ADX_TIMEFRAME
            adx_timeframes = [indicator_config.ADX_LENGTH]
            if indicator_config.ADX_TIMEFRAME not in warmup_info.keys():
                warmup_info[indicator_config.ADX_TIMEFRAME] = max(adx_timeframes)
            elif max(adx_timeframes) > warmup_info[indicator_config.ADX_TIMEFRAME]:
                warmup_info[indicator_config.ADX_TIMEFRAME] = max(adx_timeframes)


    return warmup_info, indicator_map, receiver_map
#region imports
from AlgorithmImports import *
from data.set_up_timeframes import get_timeframes
from configs.configs_long import config_long_entry as lcfg
from configs.configs_short import config_short_entry as scfg
from configs.configs_long import config_long_indicators as licfg
from configs.configs_short import config_short_indicators as sicfg
from configs.configs_long import config_long_exit as lxcfg
from configs.configs_short import config_short_exit as sxcfg


#endregion

#// This whole file/function was in main.py before
#// Moving this here de-clutters the main file and increases separation of concerns
def setup_data(symbol, symbol_instances_long, symbol_instances_short):
    warmup_info_long, indicator_map_long, receiver_map_long = get_timeframes(symbol_instances_long, symbol, "LONG", lcfg, lxcfg, licfg)
    warmup_info_short, indicator_map_short, receiver_map_short = get_timeframes(symbol_instances_short, symbol, "SHORT", scfg, sxcfg, sicfg)
    # Prefixing keys for long instances
    indicator_map_long_prefixed = {f'LONG_{key}': value for key, value in indicator_map_long.items()}
    receiver_map_long_prefixed = {f'LONG_{key}': value for key, value in receiver_map_long.items()}

    # Prefixing keys for short instances
    indicator_map_short_prefixed = {f'SHORT_{key}': value for key, value in indicator_map_short.items()}
    receiver_map_short_prefixed = {f'SHORT_{key}': value for key, value in receiver_map_short.items()}
    # Combine prefixed indicator maps
    combined_indicator_map = {**indicator_map_long_prefixed, **indicator_map_short_prefixed}
    # Combine prefixed receiver maps
    combined_receiver_map = {**receiver_map_long_prefixed, **receiver_map_short_prefixed}

    # Initialize an empty dictionary for the combined warmup_info
    combined_warmup_info = {}

    # Update combined_warmup_info with all keys from warmup_info_long
    combined_warmup_info.update(warmup_info_long)

    # Iterate over items in warmup_info_short and update combined_warmup_info
    for key, value in warmup_info_short.items():
        # If the key is also in warmup_info_long and its value is greater, update the value in combined_warmup_info
        if key in combined_warmup_info and combined_warmup_info[key] < value:
            combined_warmup_info[key] = value
        # If the key is not in combined_warmup_info, simply add it
        elif key not in combined_warmup_info:
            combined_warmup_info[key] = value
    return combined_warmup_info, combined_indicator_map, combined_receiver_map
#region imports
from AlgorithmImports import *
from indicators.adx_indicator import AverageDirectionalIndexIndicator
from indicators.bollinger_bands_indicator import BollingerBandsIndicator
from indicators.chaikin_vol_indicator import ChaikinVolatilityIndicator
from indicators.dvv_indicator import DirectionalVolatilityVolumeIndicator
from indicators.macd_indicator import MovingAverageConvergenceDivergenceIndicator
from indicators.obv_acc_dist_indicator import OnBalanceVolumeAccDist
from indicators.rsi_indicator import RelativeStrengthIndexIndicator
from indicators.mfi_indicator import MoneyFlowIndexIndicator
from indicators.roc_percent_indicator import RateOfChangePercentIndicator
from indicators.roc_ratio_indicator import RateOfChangeRatioIndicator
from indicators.stochastic_indicator import StochasticOscillatorIndicator
from indicators.vwap_indicator import VolumeWeightedAveragePriceIndicator
from indicators.moving_averages import MovingAverageCrossIndicator
from positions.chandelier_exit  import ChandelierExit
#endregion


#// This was previously in main.py
#// And previously ran twice for long and short, now the same lines of code setup both the long and short indicators
#// with the injected dependencies
#// Before the config files used to retrieve indicator information were hardcoded,
#// this change now removes duplication and increases maintainability
def setup_indicators(self, symbol, symbol_instances, indicator_config, exit_config, entry_config):
    if indicator_config.MACD_WEIGHTING != 0:
        symbol_instances[symbol].indicators["MACD"] = MovingAverageConvergenceDivergenceIndicator(self, symbol, indicator_config.MACD_FAST_PERIOD, indicator_config.MACD_SLOW_PERIOD, indicator_config.MACD_SIGNAL_PERIOD, indicator_config.MACD_RELATIVE_WEIGHTING_GOLDEN_CROSS, indicator_config.MACD_RELATIVE_WEIGHTING_DEATH_CROSS, indicator_config.MACD_RELATIVE_WEIGHTING_ZERO_LINE, indicator_config.MACD_RELATIVE_WEIGHTING_POSITION, indicator_config.MACD_WEIGHTING)

    if indicator_config.RSI_WEIGHTING != 0:
        symbol_instances[symbol].indicators["RSI"] = RelativeStrengthIndexIndicator(self, symbol, indicator_config.RSI_PERIOD, indicator_config.RSI_OVERBOUGHT, indicator_config.RSI_BASELINE, indicator_config.RSI_OVERSOLD, indicator_config.RSI_WEIGHTING)

    if indicator_config.BOLLINGER_BANDS_WEIGHTING != 0:
        symbol_instances[symbol].indicators["BB"] = BollingerBandsIndicator(self, symbol, indicator_config.BOLLINGER_BANDS_PERIOD, indicator_config.BOLLINGER_BANDS_OFFSET, indicator_config.BOLLINGER_BANDS_WEIGHTING)

    if indicator_config.VWAP_WEIGHTING != 0:
        symbol_instances[symbol].indicators["VWAP"] = VolumeWeightedAveragePriceIndicator(self, symbol, indicator_config.VWAP_UPPER_BAND_1_OFFSET, indicator_config.VWAP_LOWER_BAND_1_OFFSET, indicator_config.VWAP_UPPER_BAND_2_OFFSET, indicator_config.VWAP_LOWER_BAND_2_OFFSET, indicator_config.VWAP_OFFSET, indicator_config.VWAP_WEIGHTING)

    if indicator_config.CROSS_OVER_WEIGHTING != 0:
        symbol_instances[symbol].indicators["SMA"] = MovingAverageCrossIndicator(self, symbol, indicator_config.SMA_TIMEFRAMES, indicator_config.EMA_TIMEFRAMES, indicator_config.SMA_PERIOD_FAST, indicator_config.SMA_PERIOD_SLOW, indicator_config.EMA_PERIOD_FAST, indicator_config.EMA_PERIOD_SLOW, indicator_config.ENABLE_SMA_CROSS, indicator_config.ENABLE_EMA_CROSS, indicator_config.ENABLE_SMA_GOLDEN_CROSS, indicator_config.ENABLE_EMA_GOLDEN_CROSS, indicator_config.ENABLE_SMA_DEATH_CROSS, indicator_config.ENABLE_EMA_DEATH_CROSS, indicator_config.GOLDEN_CROSS_WEIGHT_INCREASE_FACTOR, indicator_config.DEATH_CROSS_WEIGHT_INCREASE_FACTOR, indicator_config.GOLDEN_CROSS_WEIGHT_INCREASE_DURATION, indicator_config.DEATH_CROSS_WEIGHT_INCREASE_DURATION, indicator_config.CROSS_OVER_WEIGHTING)

    if indicator_config.STOCHASTIC_OSCILLATOR_WEIGHTING != 0:
        symbol_instances[symbol].indicators["STOCH"] = StochasticOscillatorIndicator(self, symbol, indicator_config.STOCHASTIC_OSCILLATOR_LENGTH, indicator_config.STOCHASTIC_OSCILLATOR_K_SMOOTHING, indicator_config.STOCHASTIC_OSCILLATOR_D_SMOOTHING, indicator_config.STOCHASTIC_OSCILLATOR_OVERBOUGHT, indicator_config.STOCHASTIC_OSCILLATOR_BASELINE, indicator_config.STOCHASTIC_OSCILLATOR_OVERSOLD, indicator_config.STOCHASTIC_OSCILLATOR_WEIGHTING)

    if indicator_config.OBV_ACC_DIST_WEIGHTING != 0:
        symbol_instances[symbol].indicators["OBV"] = OnBalanceVolumeAccDist(self, symbol, indicator_config.OBV_ACC_DIST_SMA_LENGTH, indicator_config.OBV_ACC_DIST_WEIGHTING)

    if indicator_config.ADX_WEIGHTING != 0:
        symbol_instances[symbol].indicators["ADX"] = AverageDirectionalIndexIndicator(self, symbol, indicator_config.ADX_LENGTH, indicator_config.ADX_MIN_TREND_STRENGTH, indicator_config.ADX_MAX_TREND_STRENGTH, indicator_config.ADX_WEIGHTING)

    if indicator_config.DVV_WEIGHTING != 0:
        symbol_instances[symbol].indicators["DVV"] = DirectionalVolatilityVolumeIndicator(self, symbol, indicator_config.DVV_VOLATILITY_PERIOD, indicator_config.DVV_VOLUME_PERIOD, indicator_config.DVV_ZONE_PERIOD, indicator_config.DVV_VOLATILITY_SMOOTHING, indicator_config.DVV_VOLUME_SMOOTHING, indicator_config.DVV_ZONE_SMOOTHING, indicator_config.DVV_WEIGHTING)

    if indicator_config.MFI_WEIGHTING != 0:
        symbol_instances[symbol].indicators["MFI"] = MoneyFlowIndexIndicator(self, symbol, indicator_config.MFI_PERIOD, indicator_config.MFI_OVERBOUGHT, indicator_config.MFI_OVERSOLD, indicator_config.MFI_WEIGHTING)

    if indicator_config.CHAIKIN_VOLATILITY_WEIGHTING != 0:
        symbol_instances[symbol].indicators["CHK"] = ChaikinVolatilityIndicator(self, symbol, indicator_config.CHAIKIN_VOLATILITY_LENGTH, indicator_config.CHAIKIN_VOLATILITY_ROC_LENGTH, indicator_config.CHAIKIN_VOLATILITY_WEIGHTING)
    
    if indicator_config.ROC_PERCENT_WEIGHTING != 0:
        symbol_instances[symbol].indicators["ROC_PERCENT"] = RateOfChangePercentIndicator(self, symbol, indicator_config.ROC_PERCENT_PERIOD, indicator_config.ROC_PERCENT_BOL_PERIOD, indicator_config.ROC_PERCENT_BOL_BAND_MULTIPLIER, indicator_config.ROC_PERCENT_LOWER_THRESHOLD, indicator_config.ROC_PERCENT_WEIGHTING)
    
    if indicator_config.ROC_RATIO_WEIGHTING != 0:
        symbol_instances[symbol].indicators["ROC_RATIO"] = RateOfChangeRatioIndicator(self, symbol, indicator_config.ROC_RATIO_PERIOD, indicator_config.ROC_RATIO_BOL_PERIOD, indicator_config.ROC_RATIO_BOL_BAND_MULTIPLIER, indicator_config.ROC_RATIO_LOWER_THRESHOLD, indicator_config.ROC_RATIO_WEIGHTING)
 

    if indicator_config.USE_CHANDELIER_EXITS:
        symbol_instances[symbol].indicators["CHD"] = ChandelierExit(self, symbol, indicator_config.CHANDELIER_ATR_PERIOD, indicator_config.CHANDELIER_ATR_MULTIPLIER, indicator_config.USE_CLOSE_PRICE_FOR_EXTREME_MOMENTUM)
    if entry_config.USE_SIMPLE_ENTRY:
        if "ADX" in entry_config.SIMPLE_ENTRY_CONFIG:
            symbol_instances[symbol].indicators["ADX"] = AverageDirectionalIndexIndicator(self, symbol, indicator_config.ADX_LENGTH, indicator_config.ADX_MIN_TREND_STRENGTH, indicator_config.ADX_MAX_TREND_STRENGTH, indicator_config.ADX_WEIGHTING)
        if "MFI" in entry_config.SIMPLE_ENTRY_CONFIG:
            symbol_instances[symbol].indicators["MFI"] = MoneyFlowIndexIndicator(self, symbol, indicator_config.MFI_PERIOD, indicator_config.MFI_OVERBOUGHT, indicator_config.MFI_OVERSOLD, indicator_config.MFI_WEIGHTING)
        if "ROC_PERCENT" in entry_config.SIMPLE_ENTRY_CONFIG:
            symbol_instances[symbol].indicators["ROC_PERCENT"] = RateOfChangePercentIndicator(self, symbol, indicator_config.ROC_PERCENT_PERIOD, indicator_config.ROC_PERCENT_BOL_PERIOD, indicator_config.ROC_PERCENT_BOL_BAND_MULTIPLIER, indicator_config.ROC_PERCENT_LOWER_THRESHOLD, indicator_config.ROC_PERCENT_WEIGHTING)

    
    return symbol_instances[symbol]

from AlgorithmImports import *
from configs import config_main as cfg
from configs.configs_long import config_long_entry as lcfg
from configs_long import config_long_pause as lpcfg
from collections import deque
from data.custom_consolidator import RegularHoursConsolidator
from data.csv_str import csv_string
from datetime import datetime
from io import StringIO

class SymbolData():
    
    def __init__(self, algo, symbol):
        self.algo = algo
        self.symbol = symbol
        self.is_bear_market = False
        self.print_bear_market = lcfg.PRINT_BEAR_MARKET
        # Add SMA and ROC indicators
        self.sma150 = SimpleMovingAverage(150)
        self.roc = RateOfChange(9)
        self.roc_sma = SimpleMovingAverage(9)

        self.allocation_indicator_Values = {
            'BEAR_MARKET_DETECTION': 1,  # Default to not in bear market
            'NH_NL': 1  # Default to NH (New High)
        }
        self.vix = self.algo.AddIndex("VIX", Resolution.Minute).Symbol
                # Indicators
        self.sma_200 = SimpleMovingAverage(200)
        self.rsi = RelativeStrengthIndex(14)
        self.highest_close_100 = deque(maxlen=100)
        self.roc_per = RateOfChangePercent(14)

        self.enable_nh_nl = lcfg.ENABLE_NH_NL
        self.enable_nh_nl_exit = lcfg.ENABLE_NH_NL_EXIT
        self.nh_nl_ma_period = lcfg.NH_NL_MA_PERIOD

        self.use_bear_market_detection = lcfg.ENABLE_BEAR_MARKET_DETECTION

        self.pause_ma = lpcfg.MA_RESTRICTION_TYPE
        self.pause_ma_2 = lpcfg.MA_RESTRICTION_TYPE_2
        self.use_lowerband = lpcfg.USER_LOWER_BAND_IF_AVAILABLE
        self.hma_200  = lpcfg.MA_RESTRICTION_TYPE
        self.hma_50 = lpcfg.MA_RESTRICTION_TYPE_2
        pause_ma_timeframe = lpcfg.PAUSE_MA_TIMEFRAME

        resolution_map = {
            "Minute": Resolution.Minute,
            "Hour": Resolution.Hour,
            "Day": Resolution.Daily
        }
        resolution = resolution_map[pause_ma_timeframe[1]]

        if pause_ma_timeframe[1] == "Minute":
            self.pause_consolidator = RegularHoursConsolidator(timedelta(minutes=pause_ma_timeframe[0]))
        elif pause_ma_timeframe[1] == "Hour":
            self.pause_consolidator = RegularHoursConsolidator(timedelta(hours=pause_ma_timeframe[0]))
        elif pause_ma_timeframe[1] == "Day":
            self.pause_consolidator = RegularHoursConsolidator(timedelta(days=pause_ma_timeframe[0]))

        self.algo.SubscriptionManager.AddConsolidator(self.symbol, self.pause_consolidator)
        self.pause_consolidator.DataConsolidated += self.pause_consolidator_receive_bar

        history_period = 1000
        history = self.algo.History[TradeBar](self.symbol, history_period, resolution)
        for bar in history:
            self.pause_consolidator.Update(bar)
        self.is_below_pause_ma = False

        self.indicators = {}
        self.daily_reg = RegressionChannel(200, 2)
        self.sma = HullMovingAverage(cfg.CONDITIONAL_TRADING_END_TIME_SMA_PERIOD)
        self.bar_consolidator = RegularHoursConsolidator(timedelta(days=1))
        self.algo.SubscriptionManager.AddConsolidator(self.symbol, self.bar_consolidator)
        self.bar_consolidator.DataConsolidated += self.receive_bar

        self.warmup = True
        history = self.algo.History[TradeBar](self.symbol, history_period, Resolution.Daily)
        for bar in history:
            self.bar_consolidator.Update(bar)
        self.warmup = False
        self.atr = NormalizedAverageTrueRange(22)

        self.bar_consolidator_min = RegularHoursConsolidator(timedelta(minutes=1))
        self.algo.SubscriptionManager.AddConsolidator(self.symbol, self.bar_consolidator_min)
        self.bar_consolidator_min.DataConsolidated += self.receive_bar_min

        history_period = 50
        history = self.algo.History[TradeBar](self.symbol, history_period, Resolution.Minute)
        for bar in history:
            self.bar_consolidator_min.Update(bar)

        self.vix_consolidator = RegularHoursConsolidator(timedelta(minutes=1))
        algo.SubscriptionManager.AddConsolidator(self.vix, self.vix_consolidator)
        self.vix_consolidator.DataConsolidated += self.vix_minutely_update

        history_period = 3000
        self.vix_prices = deque(maxlen=history_period)

        self.vix_rc = False

        self.regression_channel = RegressionChannel(lpcfg.VIX_REGRESSION_CHANNEL_PERIOD, lpcfg.VIX_REGRESSION_CHANNEL_STDEV)  
        self.bar_consolidator_min_vix = RegularHoursConsolidator(timedelta(minutes=1))
        self.algo.SubscriptionManager.AddConsolidator(self.vix, self.bar_consolidator_min_vix)
        self.bar_consolidator_min_vix.DataConsolidated += self.update_regression_channel
        
        history = self.algo.History[TradeBar](self.vix, history_period*2, Resolution.Minute)
        for bar in history:
            self.vix_consolidator.Update(bar)
            self.bar_consolidator_min_vix.Update(bar)

        self.csv_string = csv_string 
        self.data = self.LoadCSVData(self.csv_string)


    def determine_nh_nl(self):
        if self.enable_nh_nl:
            current_date = self.algo.Time.date()
            current_value = self.data[self.data['time'].dt.date == current_date]
            
            if not current_value.empty:
                close_diff = current_value['close_diff'].values[0]
                if close_diff < 0:
                    self.allocation_indicator_Values['NH_NL'] = 0  # Set to 0 for NL (New Low)
                else:
                    self.allocation_indicator_Values['NH_NL'] = 1  # Set to 1 for NH (New High)
    
    def DetectBearMarket(self):
        cumulative_decline = (self.algo.Securities[self.symbol].Close - max(self.highest_close_100)) / max(self.highest_close_100)
        
        self.is_bear_market = (cumulative_decline < -0.125 and 
                               self.algo.Securities[self.symbol].Close < self.sma_200.Current.Value and 
                               self.rsi.Current.Value < 64 and self.roc_per.Current.Value < 5)
        
        self.allocation_indicator_Values['BEAR_MARKET_DETECTION'] = 0 if self.is_bear_market else 1
        
        if not self.warmup and self.print_bear_market:
            self.algo.Debug(f"{self.algo.Time}: Bear Market Signal: {self.is_bear_market}")

    def LoadCSVData(self, csv_string):
        # Load the CSV string into a pandas DataFrame
        df = pd.read_csv(StringIO(csv_string))
        # Convert the 'time' column to datetime
        df['time'] = pd.to_datetime(df['time'])
        # Calculate the moving average of 'close_diff'
        df['moving_avg'] = df['close_diff'].rolling(window=self.nh_nl_ma_period).mean()  # 5-day moving average as an example
        return df

    
      

    def update_regression_channel(self, sender, bar):
        self.regression_channel.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        if bar.Close > self.regression_channel.upper_channel.Current.Value:
            self.vix_rc = True
        elif bar.Close < self.regression_channel.lower_channel.Current.Value:
            self.vix_rc = False
        else:
            self.vix_rc = False

    def pause_consolidator_receive_bar(self, sender, bar):
        self.pause_ma.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.pause_ma_2.Update(IndicatorDataPoint(bar.EndTime, bar.Low))
        self.hma_200.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.hma_50.Update(IndicatorDataPoint(bar.EndTime, bar.Low))

        compare_value_1 = self.pause_ma.LowerChannel.Current.Value if self.use_lowerband else self.pause_ma.Current.Value

        close_price = self.algo.Securities[self.symbol].Close
        self.is_below_pause_ma = close_price < compare_value_1

    def vix_minutely_update(self, sender, bar):
        self.vix_prices.append(bar)
    
    def get_percentage_change(self, minutes_back):
        if minutes_back <= 0 or minutes_back >= len(self.vix_prices):
            return None
        old_price = self.vix_prices[-minutes_back]
        current_price = self.vix_prices[-1]
        if old_price == 0:
            return None
        percentage_change = (current_price.Close - old_price.Open) / old_price.Open * 100
        return percentage_change

    def receive_bar(self, sender, bar):
        self.sma.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.daily_reg.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.sma150.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.roc.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.roc_sma.Update(IndicatorDataPoint(bar.EndTime, self.roc.Current.Value))

        self.sma_200.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.rsi.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.roc_per.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.highest_close_100.appendleft(bar.Close)
        self.DetectBearMarket()

    def DetectBearMarket(self):
        
        
        # Calculate Cumulative Decline
        cumulative_decline = (self.algo.Securities[self.symbol].Close - max(self.highest_close_100)) / max(self.highest_close_100)
        
        # Check conditions for bear market
        self.is_bear_market = (cumulative_decline < -0.125 and 
                          self.algo.Securities[self.symbol].Close < self.sma_200.Current.Value and 
                          self.rsi.Current.Value < 64 and self.roc_per.current.value < 5)
        # Output the bear market signal
        if not self.warmup and self.print_bear_market:
            self.algo.Debug(f"{self.algo.Time}: Bear Market Signal: {self.is_bear_market}")


    def receive_bar_min(self, sender, bar):
        self.atr.Update(bar)

    def is_in_danger_zone(self):
        return (self.algo.Securities[self.symbol].Price < self.sma150.Current.Value) and (self.roc.Current.Value < self.roc_sma.Current.Value)
from AlgorithmImports import *


class AdvanceDeclineRatioIndicator:
    def __init__(self, algorithm):
        self.algo = algorithm
        self.symbol_data = {}  # Dictionary to store the previous close for each symbol
        self.advancers = 0
        self.decliners = 0

    def update(self, universe_symbols):
        # Reset advancers and decliners counts for each update
        self.advancers = 0
        self.decliners = 0

        for symbol in universe_symbols:
            # Initialize data structure for the symbol if not present
            if symbol not in self.symbol_data:
                self.symbol_data[symbol] = {
                    "previous_close": None
                }

            symbol_info = self.symbol_data[symbol]

            # Ensure the symbol has valid price data
            security = self.algo.Securities[symbol]
            if not security.Price or not security.HasData:
                continue

            current_close = security.Close

            # Perform logic only if we have a previous close value
            if symbol_info["previous_close"] is not None:
                if current_close > symbol_info["previous_close"]:
                    self.advancers += 1
                elif current_close < symbol_info["previous_close"]:
                    self.decliners += 1

            # Update the previous close for the next iteration
            symbol_info["previous_close"] = current_close

    @property
    def value(self):
        """
        Returns the Advance-Decline Ratio (ADR) and the advancers/decliners counts.
        """
        if self.decliners == 0:  # Avoid division by zero
            adr = float('inf') if self.advancers > 0 else 0
        else:
            adr = self.advancers / self.decliners

        return {
            "advancers": self.advancers,
            "decliners": self.decliners,
            "value": adr
        }
from AlgorithmImports import *




class AverageDirectionalIndexIndicator():
    # This function is the constructor. It initializes the object when it's created.
    def __init__(self, algo, symbol, period, min_trend_strength, max_trend_strength, max_weighting):
        # Store some initial values that are passed to the constructor.
        self.algo = algo
        self.symbol = symbol
        self.period = period
        self.min_trend_strength = min_trend_strength
        self.max_trend_strength = max_trend_strength
        self.current_weighting = 0
        self.max_weighting = max_weighting
        self.simple_condition = False
        # Create an AverageDirectionalIndex object with the specified period.
        self.adx = AverageDirectionalIndex(self.period)

    # This function calculates the weighting based on the current ADX, +DI, and -DI values.
    def calculate_weighting(self):
        adx = self.adx.Current.Value
        plus_di = self.adx.PositiveDirectionalIndex.Current.Value
        minus_di = self.adx.NegativeDirectionalIndex.Current.Value

        # Calculate a scaling factor based on the current ADX, min_trend_strength, and max_trend_strength.
        scaling_factor = (adx - self.min_trend_strength) / (self.max_trend_strength - self.min_trend_strength)
        scaling_factor = max(0, min(scaling_factor, 1))  # Ensure the scaling factor is between 0 and 1.

        # Determine the direction bias based on the relative positions of +DI and -DI.
        if plus_di > minus_di:
            direction_bias = 1  # Indicates an upward bias.
        elif minus_di > plus_di:
            direction_bias = -1  # Indicates a downward bias.
        else:
            direction_bias = 0  # Indicates no bias.

        # Scale the current weighting based on the scaling factor and direction bias.
        scaled_weighting = scaling_factor * self.max_weighting * direction_bias
        
        # Update the current weighting value.
        self.current_weighting = scaled_weighting

    def calculate_simple_condition(self):
        adx = self.adx.Current.Value
        plus_di = self.adx.PositiveDirectionalIndex.Current.Value
        minus_di = self.adx.NegativeDirectionalIndex.Current.Value
        if minus_di > plus_di and adx >= 20:
            self.simple_condition = True
        else:
            self.simple_condition = False

    # This function receives a price bar (e.g., OHLC data) and updates the ADX values.
    def receive_bar(self, sender, bar):
        self.adx.Update(bar)
        if self.adx.IsReady:
            # Calculate the weighting based on the updated ADX values.
            self.calculate_weighting()
            self.calculate_simple_condition()
from AlgorithmImports import *
from collections import deque
from datetime import timedelta

class BearMarketIndicator:
    def __init__(self, algo, symbol, print_debug=False):
        self.algo = algo
        self.symbol = symbol
        self.print_debug = print_debug
        self.is_bear_market = False
        self.allocation_value = 1  # 1 if not a bear market, 0 if a bear market
        
        # Initialize indicators
        self.sma_200 = SimpleMovingAverage(200)
        self.rsi = RelativeStrengthIndex(14)
        self.roc_per = RateOfChangePercent(14)
        self.highest_close_100 = deque(maxlen=100)
        
        # Consolidator for daily bars
        self.bar_consolidator = TradeBarConsolidator(timedelta(days=1))
        self.bar_consolidator.DataConsolidated += self.receive_bar
        self.algo.SubscriptionManager.AddConsolidator(self.symbol, self.bar_consolidator)
        
        history = self.algo.History[TradeBar](self.symbol, 1000, Resolution.Daily)
        for bar in history:
            self.bar_consolidator.Update(bar)
    
    def receive_bar(self, sender, bar):
        # Update indicators
        self.sma_200.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.rsi.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.roc_per.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        self.highest_close_100.appendleft(bar.Close)
        
        # Detect bear market conditions
        self.detect_bear_market()
    
    def detect_bear_market(self):
        # Calculate cumulative decline
        current_price = self.algo.Securities[self.symbol].Close
        highest_close = max(self.highest_close_100) if self.highest_close_100 else current_price
        cumulative_decline = (current_price - highest_close) / highest_close
        
        # Bear market conditions
        self.is_bear_market = (
            cumulative_decline < -0.125 and 
            current_price < self.sma_200.Current.Value and 
            self.rsi.Current.Value < 64 and 
            self.roc_per.Current.Value < 5
        )
        
        # Update allocation value
        self.allocation_value = 0 if self.is_bear_market else 1
        
        # Debugging output
        if self.print_debug:
            self.algo.Debug(f"{self.algo.Time}: Bear Market Signal: {self.is_bear_market}")
    
    @property
    def value(self):
        return {'value':self.allocation_value}
from AlgorithmImports import *




class BollingerBandsIndicator():




    def __init__(self, algo, symbol, period, offset, max_weighting):
        self.algo = algo
        self.symbol = symbol
        self.period = period
        self.offset = offset
        self.current_weighting = 0
        self.max_weighting = max_weighting



        self.bb = BollingerBands(self.period, self.offset)

    

    def calculate_weighting(self):
        pass




    def receive_bar(self, sender, bar):
        self.bb.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        #if self.bb.IsReady:
from AlgorithmImports import *






class ChaikinVolatilityIndicator():
    # Constructor method to initialize the indicator with required parameters.
    def __init__(self, algo, symbol, period, roc_period, max_weighting):
        # Store the algorithm instance, symbol, period, ROC period, and maximum weighting as instance variables.
        self.algo = algo
        self.symbol = symbol
        self.period = period
        self.roc_period = roc_period
        self.current_weighting = 0
        self.max_weighting = max_weighting

        # Initialize some indicator objects: Exponential Moving Average (ema), and Rate of Change Percent (roc).
        self.ema = ExponentialMovingAverage(self.period)
        self.roc = RateOfChangePercent(self.roc_period)
        self.chaiking_vol = None

    # This method calculates a weighting value based on some calculations.
    def calculate_weighting(self):
        # Define old and new value ranges.
        oldMin = -100
        oldMax = 200
        newMin = -self.max_weighting
        newMax = self.max_weighting

        # Calculate a raw value using a formula.
        rawValue = ((self.roc.Current.Value - oldMin) / (oldMax - oldMin)) * (newMax - newMin) + newMin

        # Ensure the raw value is within the specified range (newMin to newMax).
        newValue = max(min(rawValue, newMax), newMin)

        # Update the current weighting with the new value.
        self.current_weighting = newValue

        # Optionally, there's a commented out section for debugging purposes.
        # If the absolute value of the current weighting exceeds the maximum weighting, it prints a debug message.

    # This method receives a "bar" of financial data and updates the indicators.
    def receive_bar(self, sender, bar):
        # Update the Exponential Moving Average (ema) with the difference between high and low prices of the bar.
        self.ema.Update(IndicatorDataPoint(bar.EndTime, bar.High - bar.Low))

        # Check if the Exponential Moving Average (ema) is ready (enough data points collected).
        if self.ema.IsReady:
            # Update the Rate of Change Percent (roc) with the current value of the Exponential Moving Average (ema).
            self.roc.Update(IndicatorDataPoint(bar.EndTime, self.ema.Current.Value))

            # Check if the Rate of Change Percent (roc) is ready.
            if self.roc.IsReady:
                # Calculate the weighting based on the Rate of Change Percent (roc).
                self.calculate_weighting()
from AlgorithmImports import *
from collections import deque



class DirectionalVolatilityVolumeIndicator():

    # Constructor method to initialize the object with various parameters
    def __init__(self, algo, symbol, volatility_period, volume_period, zone_period, volatility_type, volume_type, zone_type, max_weighting):
        # Store the input parameters as instance variables
        self.algo = algo  # Algorithm object
        self.symbol = symbol  # Symbol to analyze
        self.volatility_period = volatility_period  # Period for volatility calculation
        self.volume_period = volume_period  # Period for volume calculation
        self.zone_period = zone_period  # Period for zone calculation
        self.volatility_type = volatility_type  # Type of moving average for volatility
        self.volume_type = volume_type  # Type of moving average for volume
        self.zone_type = zone_type  # Type of moving average for zone
        self.max_weighting = max_weighting  # Maximum allowed weighting value

        # Initialize variables to store moving averages for volatility, volume, and zone
        self.volatility_ma = None
        self.volume_ma = None
        self.zone_ma = None
        self.znup = None
        self.zndn = None

        # Check the volatility type and create the appropriate moving average
        if self.volatility_type == "SMA":
            self.volatility_ma = SimpleMovingAverage(self.volatility_period)
        elif self.volatility_type == "EMA":
            self.volatility_ma = ExponentialMovingAverage(self.volatility_period)
        elif self.volatility_type == "RMA":
            self.volatility_ma = RelativeMovingAverage(self.volatility_period)
        elif self.volatility_type == "WMA":
            # For "WMA" type, use a deque and do not initialize volatility_ma
            self.volatility_queue = deque(maxlen=self.volatility_period)
        else:
            raise Exception(f"Invalid Moving Average Type selected for DVV Volatility Smoothing, {self.volatility_type} is invalid.")
        
        # Check the volume type and create the appropriate moving average
        if self.volume_type == "SMA":
            self.volume_ma = SimpleMovingAverage(self.volume_period)
        elif self.volume_type == "EMA":
            self.volume_ma = ExponentialMovingAverage(self.volume_period)
        elif self.volume_type == "RMA":
            self.volume_ma = RelativeMovingAverage(self.volume_period)
        elif self.volume_type == "WMA":
            # For "WMA" type, do not initialize volume_ma
            pass
        else:
            raise Exception(f"Invalid Moving Average Type selected for DVV Volume Smoothing, {self.volume_type} is invalid.")

        # Check the zone type and create the appropriate moving averages
        if self.zone_type == "SMA":
            self.zone_ma = SimpleMovingAverage(self.zone_period)
            self.znup = SimpleMovingAverage(self.zone_period)
            self.zndn = SimpleMovingAverage(self.zone_period)
        elif self.zone_type == "EMA":
            self.zone_ma = ExponentialMovingAverage(self.zone_period)
            self.znup = ExponentialMovingAverage(self.zone_period)
            self.zndn = ExponentialMovingAverage(self.zone_period)
        elif self.zone_type == "RMA":
            self.zone_ma = RelativeMovingAverage(self.zone_period)
            self.znup = RelativeMovingAverage(self.zone_period)
            self.zndn = RelativeMovingAverage(self.zone_period)
        elif self.zone_type == "WMA":
            # For "WMA" type, use deques and do not initialize zone_ma, znup, and zndn
            self.zone_queue = deque(maxlen=self.zone_period)
            self.zone_up_queue = deque(maxlen=self.zone_period)
            self.zone_dn_queue = deque(maxlen=self.zone_period)
        else:
            raise Exception(f"Invalid Moving Average Type selected for DVV Zone Smoothing, {self.zone_type} is invalid.")



        # Initialize variables to store various values and states.
        self.zero_line = None  # Stores a value related to a zero line (not specified in the provided code).
        self.val = None  # Stores a value (not specified in the provided code).
        self.vol = None  # Stores a value related to volume (not specified in the provided code).
        self.vol_sum = None  # Stores a summed value related to volume (not specified in the provided code).
        self.upbar = False  # Indicates whether the current price bar is considered an "up" bar (True/False).
        self.dnbar = False  # Indicates whether the current price bar is considered a "down" bar (True/False).

        # Initialize deque (double-ended queue) data structures to store recent values.
        self.val_queue = deque(maxlen=2)  # Stores values with a maximum length of 2.
        self.znup_queue = deque(maxlen=2)  # Stores values with a maximum length of 2.
        self.zndown_queue = deque(maxlen=2)  # Stores values with a maximum length of 2.
        self.volume_queue = deque(maxlen=self.volatility_period)  # Stores volume values with a maximum length specified by 'volatility_period'.

        # Initialize variables to store colors (not specified in the provided code).
        self.val_color = ""  # Stores a color related to 'val'.
        self.vol_color = ""  # Stores a color related to 'vol'.

        # Initialize variables related to the current weighting for the strategy.
        self.current_weighting = 0  # Stores the current weighting value.
        self.max_weighting = max_weighting  # Stores the maximum allowed weighting value.



        

        # Method to calculate a weighted moving average (WMA) for a list of items.
    def calculate_wma(self, list_item):
        y = 15  # 'y' represents a constant value (not specified in the provided code).
        norm = 0.0  # Initialize a variable to store the normalization factor.
        sum = 0.0  # Initialize a variable to store the weighted sum of values.

        # Iterate through a range of values from 0 to 'y'.
        for i in range(y):
            weight = (y - i) * y  # Calculate the weight for the current item.
            norm += weight  # Accumulate the weights for normalization.
            sum += list_item[i] * weight  # Multiply the value by its weight and accumulate the sum.

        return sum / norm  # Calculate and return the weighted moving average.

    # Method to calculate the current weighting for the strategy.
    def calculate_weighting(self):
        # Define a nested function to map a value to a specified range.
        def map_to_range(value, min_value, max_value, min_range, max_range):
            scaled_value = min((value - max_range) / (max_range - min_range), 1)  # Scale the value.
            mapped_value = scaled_value * (max_value - min_value) + min_value  # Map the scaled value to the target range.
            return mapped_value

        # Calculate the 'val_value' based on 'val_color'.
        if self.val_color == "GREEN":
            input_value = self.val_queue[0]  # Get the value from the queue.
            min_value = 0
            max_value = self.max_weighting / 2
            min_range = 0
            max_range = 500

            val_value = map_to_range(input_value, min_value, max_value, min_range, max_range)  # Map the value.

        else:
            input_value = abs(self.val_queue[0])  # Get the absolute value from the queue.
            min_value = 0
            max_value = self.max_weighting / 2
            min_range = 0
            max_range = 500

            val_value = -map_to_range(input_value, min_value, max_value, min_range, max_range)  # Map the negative value.

        # Calculate the 'vol_value' based on 'vol_color'.
        if self.vol_color == "BLUE":
            input_value = self.znup_queue[0]  # Get the value from the queue.
            min_value = 0
            max_value = self.max_weighting / 2
            min_range = 0
            max_range = 100

            vol_value = map_to_range(input_value, min_value, max_value, min_range, max_range)  # Map the value.

        else:
            input_value = abs(self.zndown_queue[0])  # Get the absolute value from the queue.
            min_value = 0
            max_value = self.max_weighting / 2
            min_range = 0
            max_range = 100

            vol_value = map_to_range(input_value, min_value, max_value, min_range, max_range)  # Map the value.

        # Calculate the current weighting as the sum of 'vol_value' and 'val_value'.
        self.current_weighting = vol_value + val_value

        # if abs(self.current_weighting) > self.max_weighting:
        #     self.algo.Debug(self.current_weighting)
        

        # Method to receive and process a new price bar.
    def receive_bar(self, sender, bar):
        zero_line = None  # Initialize a variable to store a value related to a zero line.
        val = None  # Initialize a variable to store a value.
        znup = None  # Initialize a variable to store a value related to 'znup'.
        zndown = None  # Initialize a variable to store a value related to 'zndown'.
        volsum = None  # Initialize a variable to store a value related to volume summation.

        # Check if the zone type is not "WMA".
        if self.zone_type != "WMA":
            self.zone_ma.Update(IndicatorDataPoint(bar.EndTime, ((bar.High - bar.Low) / 2)))  # Update the zone MA with a calculated value.
            self.znup.Update(IndicatorDataPoint(bar.EndTime, bar.High))  # Update 'znup' with the high price.
            self.zndn.Update(IndicatorDataPoint(bar.EndTime, bar.Low))  # Update 'zndown' with the low price.

            # Check if the zone MA is ready (has enough data points).
            if self.zone_ma.IsReady:
                zero_line = self.zone_ma.Current.Value  # Store the current value of the zone MA.
                znup = self.znup.Current.Value - zero_line  # Calculate 'znup' relative to the zero line.
                zndown = self.zndn.Current.Value - zero_line  # Calculate 'zndown' relative to the zero line.

        # If the zone type is "WMA", perform the following calculations.
        else:
            self.zone_queue.appendleft(bar.Close)  # Add the close price to the zone queue.
            self.zone_up_queue.appendleft(bar.High)  # Add the high price to the zone up queue.
            self.zone_dn_queue.appendleft(bar.Low)  # Add the low price to the zone down queue.

            # Check if the length of the queues reaches the specified period.
            if len(self.zone_queue) == self.zone_period:
                self.zone_ma = self.calculate_wma(self.zone_queue)  # Calculate the zone MA.
                zero_line = self.zone_ma  # Set the zero line to the calculated zone MA.

            if len(self.zone_up_queue) == self.zone_period:
                self.znup = self.calculate_wma(self.zone_up_queue)  # Calculate 'znup' relative to the zero line.

            if len(self.zone_dn_queue) == self.zone_period:
                self.zndn = self.calculate_wma(self.zone_dn_queue)  # Calculate 'zndown' relative to the zero line.

        # If a zero line value is available, proceed with calculations related to volatility.
        if zero_line is not None:
            # Check if the volatility type is not "WMA".
            if self.volatility_type != "WMA":
                self.volatility_ma.Update(IndicatorDataPoint(bar.EndTime, bar.Close))  # Update the volatility MA with the close price.

                # Check if the volatility MA is ready.
                if self.volatility_ma.IsReady:
                    val = self.volatility_ma.Current.Value - zero_line  # Calculate 'val' relative to the zero line.
            else:
                self.volatility_queue.appendleft(bar.Close)  # Add the close price to the volatility queue.

                # Check if the length of the queue reaches the specified period.
                if len(self.volatility_queue) == self.volatility_period:
                    self.volatility_ma = self.calculate_wma(self.volatility_queue)  # Calculate the volatility MA.
                    val = self.volatility_ma - zero_line  # Calculate 'val' relative to the zero line.

            # Determine the sign of 'vol' based on whether the current bar is up or down.
            if bar.Close > bar.Open:
                self.vol = bar.Volume
            else:
                self.vol = -bar.Volume

            # Check if the volume type is not "WMA".
            if self.volume_type != "WMA":
                self.volume_ma.Update(IndicatorDataPoint(bar.EndTime, self.vol))  # Update the volume MA with the volume data.
            else:
                self.volume_queue.appendleft(bar.Close)  # Add the close price to the volume queue.

                # Check if the length of the queue reaches the specified period.
                if len(self.volume_queue) == self.volume_period:
                    self.volume_ma = self.calculate_wma(self.volume_queue)  # Calculate the volume MA.

            # Add 'val', 'znup', and 'zndown' to their respective queues.
            if val is not None:
                self.val_queue.appendleft(val)
                self.znup_queue.appendleft(znup)
                self.zndown_queue.appendleft(zndown)

                # Check if the length of the queues for 'val', 'znup', and 'zndown' is 2.
                if len(self.val_queue) == 2 and len(self.znup_queue) == 2 and len(self.zndown_queue) == 2:
                    # Determine whether the current bar is considered "up" or "down" based on previous values.
                    if self.val_queue[0] > self.znup_queue[0] and self.val_queue[1] < self.znup_queue[1]:
                        self.upbar = True
                    else:
                        self.upbar = False

                    if self.val_queue[0] > self.zndown_queue[0] and self.val_queue[1] < self.zndown_queue[1]:
                        self.dnbar = True
                    else:
                        self.dnbar = False

                    # Determine the color for 'val' and 'vol' based on conditions.
                    if self.val_queue[0] > self.znup_queue[0]:
                        self.val_color = "GREEN"
                    else:
                        self.val_color = "RED"

                    if len(self.volume_queue) == 0:
                        if self.volume_ma.Current.Value > 0:
                            self.vol_color = "BLUE"
                        else:
                            self.vol_color = "ORANGE"
                    else:
                        if self.volume_ma > 0:
                            self.vol_color = "BLUE"
                        else:
                            self.vol_color = "ORANGE"

                    # Calculate the current weighting for the strategy.
                    self.calculate_weighting()

            
from AlgorithmImports import *
from collections import deque





class MovingAverageConvergenceDivergenceIndicator():

    # Constructor to initialize the indicator with parameters.
    def __init__(self, algo, symbol, fast_period, slow_period, signal_period, rw_g_cross, rw_d_cross, rw_zero, rw_position, max_weighting):
        self.algo = algo  # Reference to the trading algorithm using this indicator.
        self.symbol = symbol  # The financial symbol this indicator is applied to.
        self.fast_period = fast_period  # Number of periods for the fast moving average.
        self.slow_period = slow_period  # Number of periods for the slow moving average.
        self.signal_period = signal_period  # Number of periods for the signal line.
        
        # Parameters used for weighting different aspects of the MACD signal.
        self.rw_g_cross = rw_g_cross / 100  # Weight for a bullish (positive) MACD crossover.
        self.rw_d_cross = rw_d_cross / 100  # Weight for a bearish (negative) MACD crossover.
        self.rw_zero = rw_zero / 100  # Weight for when MACD is above zero.
        self.rw_position = rw_position / 100  # Weight for MACD histogram position.
        
        self.max_weighting = max_weighting  # Maximum allowed weighting for any condition.
        self.current_weighting = 0  # Current calculated weighting.
        
        # Initialize the MACD indicator with the specified parameters.
        self.macd = MovingAverageConvergenceDivergence(self.fast_period, self.slow_period, self.signal_period)
        
        # A deque (double-ended queue) to store the last 2 MACD crossover values.
        self.macd_cross_queue = deque(maxlen=2)

    # Method to calculate the current weighting based on MACD conditions.
    def calculate_weighting(self):
        self.current_weighting = 0
        
        # Check for a bullish crossover and adjust the weighting accordingly.
        if self.macd_cross_queue[0] and not self.macd_cross_queue[1]:
            self.current_weighting += self.max_weighting * self.rw_g_cross
        # Check for a bearish crossover and adjust the weighting accordingly.
        elif not self.macd_cross_queue[0] and self.macd_cross_queue[1]:
            self.current_weighting -= self.max_weighting * self.rw_d_cross

        # Check if MACD is above zero and adjust the weighting.
        if self.macd.Current.Value > 0:
            self.current_weighting += self.max_weighting * self.rw_zero
        else:
            self.current_weighting -= self.max_weighting * self.rw_zero
        
        # Check if there was a recent MACD crossover and adjust the weighting.
        if self.macd_cross_queue[0]:
            self.current_weighting += self.max_weighting * self.rw_position
        else:
            self.current_weighting -= self.max_weighting * self.rw_position

        # Uncomment the following lines to debug the current weighting value.
        # if abs(self.current_weighting) > self.max_weighting:
        #     self.algo.Debug(self.current_weighting)
    
    # Method to receive new price bars and update the MACD indicator.
    def receive_bar(self, sender, bar):
        # Update the MACD indicator with the closing price of the new bar.
        self.macd.Update(IndicatorDataPoint(bar.EndTime, bar.Close))

        # Check if the MACD indicator is ready (has enough data).
        if self.macd.IsReady:
            # Compare the current MACD value with the current signal line value.
            # Determine if there was a bullish or bearish crossover and update the deque.
            if self.macd.Current.Value > self.macd.Signal.Current.Value:
                self.macd_cross_queue.appendleft(True)
            else:
                self.macd_cross_queue.appendleft(False)
            
            # If there are two values in the deque, calculate the current weighting.
            if len(self.macd_cross_queue) == 2:
                self.calculate_weighting()
from AlgorithmImports import *




class MoneyFlowIndexIndicator():

    # Constructor method to initialize the object with some parameters
    def __init__(self, algo, symbol, period, overbought, oversold, max_weighting):
        # Store the input parameters as instance variables
        self.algo = algo  # Algorithm object
        self.symbol = symbol  # Symbol to analyze
        self.period = period  # Period for MFI calculation
        self.overbought = overbought  # Overbought MFI threshold
        self.oversold = oversold  # Overbought MFI threshold

        self.simple_condition = True
        # Create an instance of the RelativeStrengthIndex class with the specified period
        self.mfi = MoneyFlowIndex(self.period)
        self.current_weighting = 0  # Current weighting value
        self.max_weighting = max_weighting  # Maximum allowed weighting value

    # Method to calculate the weighting based on MFI values
    def calculate_weighting(self):
        # Get the current MFI value
        mfi = self.mfi.Current.Value

        # Calculate the weighting based on MFI values and thresholds
        weighting = ((self.baseline - mfi) / (self.baseline - self.oversold) * self.max_weighting)

        # Check if the calculated weighting is negative
        if weighting < 0:
            # Recalculate weighting with a different formula if it's negative
            weighting = -((mfi - self.baseline) / (self.overbought - self.baseline) * self.max_weighting)
            
            # Check if the recalculated weighting is now positive, and if so, set it to 0
            if weighting > 0:
                weighting = 0
        
        # Ensure that the calculated weighting value is within the specified bounds
        newValue = max(min(weighting, self.max_weighting), -self.max_weighting)

        # Update the current weighting with the calculated value
        self.current_weighting = newValue

    def calculate_simple_condition(self):
        if self.mfi.Current.Value >= self.overbought:
            self.simple_condition = True
        else:
            self.simple_condition = False

    # Method to receive and process price bars
    def receive_bar(self, sender, bar):
        # Update the MFI indicator with the closing price of the received bar
        self.mfi.Update(IndicatorDataPoint(bar.EndTime, bar.Close))

        # Check if the MFI indicator is ready (has enough data points)
        if self.mfi.IsReady:
            # Calculate the weighting based on the MFI values
            self.calculate_simple_condition()
            self.calculate_weighting()
#region imports
from AlgorithmImports import *
from collections import deque
from datetime import timedelta
# Your New Python File



class MovingAverageCrossIndicator():
    
    # The constructor (__init__) for this class takes several parameters:
    def __init__(self, algo, symbol, sma_timeframes, ema_timeframes, fast_sma_period, slow_sma_period, fast_ema_period, slow_ema_period, enable_sma_cross, enable_ema_cross, enable_sma_g_cross, enable_ema_g_cross, enable_s_d_cross, enable_e_d_cross, g_factor, d_factor, g_duration, d_duration, max_weighting):
        
        # Initialize and store various attributes based on the provided parameters.
        self.algo = algo  # A reference to the algorithm
        self.symbol = symbol  # The trading symbol (e.g., stock ticker)
        self.fast_sma_period = fast_sma_period  # Period for fast Simple Moving Average (SMA)
        self.slow_sma_period = slow_sma_period  # Period for slow Simple Moving Average (SMA)
        self.fast_ema_period = fast_ema_period  # Period for fast Exponential Moving Average (EMA)
        self.slow_ema_period = slow_ema_period  # Period for slow Exponential Moving Average (EMA)
        self.enable_sma_cross = enable_sma_cross  # Enable flag for SMA cross indicator
        self.enable_ema_cross = enable_ema_cross  # Enable flag for EMA cross indicator
        self.enable_sma_g_cross = enable_sma_g_cross  # Enable flag for SMA golden cross indicator
        self.enable_ema_g_cross = enable_ema_g_cross  # Enable flag for EMA golden cross indicator
        self.enable_s_d_cross = enable_s_d_cross  # Enable flag for SMA death cross indicator
        self.enable_e_d_cross = enable_e_d_cross  # Enable flag for EMA death cross indicator
        self.g_factor = g_factor
        self.d_factor = d_factor
        self.g_duration = g_duration
        self.d_duration = d_duration
        self.max_weighting = max_weighting  # Maximum weighting value
        
        # Initialize queues to store moving average values
        self.sma_queue = deque(maxlen=2)
        self.ema_queue = deque(maxlen=2)
        
        # Initialize variables to track the times when golden cross (g_cross) and death cross (d_cross) events occur
        self.sma_g_cross_time = None
        self.ema_g_cross_time = None
        self.sma_d_cross_time = None
        self.ema_d_cross_time = None
        
        # Initialize current weighting and lists to store receivers for SMA and EMA signals
        self.current_weighting = 0
        self.sma_receivers = []
        self.ema_receivers = []
        
        # Initialize dictionaries to store SMA and EMA values
        self.sma_dict = {}
        self.ema_dict = {}

        step = 0  # Initialize a counter to keep track of the current step

        # Loop through the elements in the 'sma_timeframes' list
        for timeframe in sma_timeframes:
            step += 1  # Increment the step counter with each iteration

            # Check which step we are in and take specific actions accordingly
            if step == 1:
                # If it's the first step, add a receiver function to 'sma_receivers'
                self.sma_receivers.append(self.receive_sma_bar_1)

                # Check if the 'receive_sma_bar_1' function is not already in 'sma_dict' keys
                if self.receive_sma_bar_1 not in self.sma_dict.keys():
                    # If not, add it to 'sma_dict' with associated moving average calculations
                    self.sma_dict[self.receive_sma_bar_1] = [("FAST", SimpleMovingAverage(self.fast_sma_period)), ("SLOW", SimpleMovingAverage(self.slow_sma_period))]
                else:
                    # If it's already in 'sma_dict', append more moving average calculations to it
                    self.sma_dict[self.receive_sma_bar_1].append(("FAST", SimpleMovingAverage(self.fast_sma_period)))
                    self.sma_dict[self.receive_sma_bar_1].append(("SLOW", SimpleMovingAverage(self.slow_sma_period)))
            
            # Repeat the same logic for steps 2, 3, 4, and 5, with different receiver functions ('receive_sma_bar_2', 'receive_sma_bar_3', etc.)
            elif step == 2:
                self.sma_receivers.append(self.receive_sma_bar_2)

                # Check if the 'receive_sma_bar_2' function is not already in 'sma_dict' keys
                if self.receive_sma_bar_2 not in self.sma_dict.keys():
                    # If not, add it to 'sma_dict' with associated moving average calculations
                    self.sma_dict[self.receive_sma_bar_2] = [("FAST", SimpleMovingAverage(self.fast_sma_period)), ("SLOW", SimpleMovingAverage(self.slow_sma_period))]
                else:
                    # If it's already in 'sma_dict', append more moving average calculations to it
                    self.sma_dict[self.receive_sma_bar_2].append(("FAST", SimpleMovingAverage(self.fast_sma_period)))
                    self.sma_dict[self.receive_sma_bar_2].append(("SLOW", SimpleMovingAverage(self.slow_sma_period)))

            # Continue the pattern for steps 3, 4, and 5, with 'receive_sma_bar_3', 'receive_sma_bar_4', and 'receive_sma_bar_5'
            elif step == 3:
                self.sma_receivers.append(self.receive_sma_bar_3)

                # Check if the 'receive_sma_bar_3' function is not already in 'sma_dict' keys
                if self.receive_sma_bar_3 not in self.sma_dict.keys():
                    # If not, add it to 'sma_dict' with associated moving average calculations
                    self.sma_dict[self.receive_sma_bar_3] = [("FAST", SimpleMovingAverage(self.fast_sma_period)), ("SLOW", SimpleMovingAverage(self.slow_sma_period))]
                else:
                    # If it's already in 'sma_dict', append more moving average calculations to it
                    self.sma_dict[self.receive_sma_bar_3].append(("FAST", SimpleMovingAverage(self.fast_sma_period)))
                    self.sma_dict[self.receive_sma_bar_3].append(("SLOW", SimpleMovingAverage(self.slow_sma_period)))

            elif step == 4:
                self.sma_receivers.append(self.receive_sma_bar_4)

                # Check if the 'receive_sma_bar_4' function is not already in 'sma_dict' keys
                if self.receive_sma_bar_4 not in self.sma_dict.keys():
                    # If not, add it to 'sma_dict' with associated moving average calculations
                    self.sma_dict[self.receive_sma_bar_4] = [("FAST", SimpleMovingAverage(self.fast_sma_period)), ("SLOW", SimpleMovingAverage(self.slow_sma_period))]
                else:
                    # If it's already in 'sma_dict', append more moving average calculations to it
                    self.sma_dict[self.receive_sma_bar_4].append(("FAST", SimpleMovingAverage(self.fast_sma_period)))
                    self.sma_dict[self.receive_sma_bar_4].append(("SLOW", SimpleMovingAverage(self.slow_sma_period)))

            elif step == 5:
                self.sma_receivers.append(self.receive_sma_bar_5)

                # Check if the 'receive_sma_bar_5' function is not already in 'sma_dict' keys
                if self.receive_sma_bar_5 not in self.sma_dict.keys():
                    # If not, add it to 'sma_dict' with associated moving average calculations
                    self.sma_dict[self.receive_sma_bar_5] = [("FAST", SimpleMovingAverage(self.fast_sma_period)), ("SLOW", SimpleMovingAverage(self.slow_sma_period))]
                else:
                    # If it's already in 'sma_dict', append more moving average calculations to it
                    self.sma_dict[self.receive_sma_bar_5].append(("FAST", SimpleMovingAverage(self.fast_sma_period)))
                    self.sma_dict[self.receive_sma_bar_5].append(("SLOW", SimpleMovingAverage(self.slow_sma_period)))

        step = 0  # Initialize a counter to keep track of the current step

        # Loop through the elements in the 'ema_timeframes' list
        for timeframe in ema_timeframes:
            step += 1  # Increment the step counter with each iteration

            # Check which step we are in and take specific actions accordingly
            if step == 1:
                # If it's the first step, add a receiver function to 'ema_receivers'
                self.ema_receivers.append(self.receive_ema_bar_1)

                # Check if the 'receive_ema_bar_1' function is not already in 'ema_dict' keys
                if self.receive_ema_bar_1 not in self.ema_dict.keys():
                    # If not, add it to 'ema_dict' with associated exponential moving average calculations
                    self.ema_dict[self.receive_ema_bar_1] = [("FAST", ExponentialMovingAverage(self.fast_ema_period)), ("SLOW", ExponentialMovingAverage(self.slow_ema_period))]
                else:
                    # If it's already in 'ema_dict', append more exponential moving average calculations to it
                    self.ema_dict[self.receive_ema_bar_1].append(("FAST", ExponentialMovingAverage(self.fast_ema_period)))
                    self.ema_dict[self.receive_ema_bar_1].append(("SLOW", ExponentialMovingAverage(self.slow_ema_period)))
            
            # Repeat the same logic for steps 2, 3, 4, and 5, with different receiver functions ('receive_ema_bar_2', 'receive_ema_bar_3', etc.)
            elif step == 2:
                self.ema_receivers.append(self.receive_ema_bar_2)

                # Check if the 'receive_ema_bar_2' function is not already in 'ema_dict' keys
                if self.receive_ema_bar_2 not in self.ema_dict.keys():
                    # If not, add it to 'ema_dict' with associated exponential moving average calculations
                    self.ema_dict[self.receive_ema_bar_2] = [("FAST", ExponentialMovingAverage(self.fast_ema_period)), ("SLOW", ExponentialMovingAverage(self.slow_ema_period))]
                else:
                    # If it's already in 'ema_dict', append more exponential moving average calculations to it
                    self.ema_dict[self.receive_ema_bar_2].append(("FAST", ExponentialMovingAverage(self.fast_ema_period)))
                    self.ema_dict[self.receive_ema_bar_2].append(("SLOW", ExponentialMovingAverage(self.slow_ema_period)))

            # Continue the pattern for steps 3, 4, and 5, with 'receive_ema_bar_3', 'receive_ema_bar_4', and 'receive_ema_bar_5'
            elif step == 3:
                self.ema_receivers.append(self.receive_ema_bar_3)

                # Check if the 'receive_ema_bar_3' function is not already in 'ema_dict' keys
                if self.receive_ema_bar_3 not in self.ema_dict.keys():
                    # If not, add it to 'ema_dict' with associated exponential moving average calculations
                    self.ema_dict[self.receive_ema_bar_3] = [("FAST", ExponentialMovingAverage(self.fast_ema_period)), ("SLOW", ExponentialMovingAverage(self.slow_ema_period))]
                else:
                    # If it's already in 'ema_dict', append more exponential moving average calculations to it
                    self.ema_dict[self.receive_ema_bar_3].append(("FAST", ExponentialMovingAverage(self.fast_ema_period)))
                    self.ema_dict[self.receive_ema_bar_3].append(("SLOW", ExponentialMovingAverage(self.slow_ema_period)))

            elif step == 4:
                self.ema_receivers.append(self.receive_ema_bar_4)

                # Check if the 'receive_ema_bar_4' function is not already in 'ema_dict' keys
                if self.receive_ema_bar_4 not in self.ema_dict.keys():
                    # If not, add it to 'ema_dict' with associated exponential moving average calculations
                    self.ema_dict[self.receive_ema_bar_4] = [("FAST", ExponentialMovingAverage(self.fast_ema_period)), ("SLOW", ExponentialMovingAverage(self.slow_ema_period))]
                else:
                    # If it's already in 'ema_dict', append more exponential moving average calculations to it
                    self.ema_dict[self.receive_ema_bar_4].append(("FAST", ExponentialMovingAverage(self.fast_ema_period)))
                    self.ema_dict[self.receive_ema_bar_4].append(("SLOW", ExponentialMovingAverage(self.slow_ema_period)))

            elif step == 5:
                self.ema_receivers.append(self.receive_ema_bar_5)

                # Check if the 'receive_ema_bar_5' function is not already in 'ema_dict' keys
                if self.receive_ema_bar_5 not in self.ema_dict.keys():
                    # If not, add it to 'ema_dict' with associated exponential moving average calculations
                    self.ema_dict[self.receive_ema_bar_5] = [("FAST", ExponentialMovingAverage(self.fast_ema_period)), ("SLOW", ExponentialMovingAverage(self.slow_ema_period))]
                else:
                    # If it's already in 'ema_dict', append more exponential moving average calculations to it
                    self.ema_dict[self.receive_ema_bar_5].append(("FAST", ExponentialMovingAverage(self.fast_ema_period)))
                    self.ema_dict[self.receive_ema_bar_5].append(("SLOW", ExponentialMovingAverage(self.slow_ema_period)))



    def calculate_weighting(self):
        # Initialize flags and counters
        appended_sma = False
        appended_ema = False
        sma_up_counter = 0
        sma_down_counter = 0
        ema_up_counter = 0
        ema_down_counter = 0

        # Calculate weighting based on Simple Moving Averages (SMA) if enabled
        if self.enable_sma_cross:
            # Loop through items in 'sma_dict'
            for key, val in self.sma_dict.items():
                for tup in val:
                    # Extract current values of 'FAST' and 'SLOW' SMAs
                    if tup[0] == "FAST":
                        fast = tup[1].Current.Value
                    elif tup[0] == "SLOW":
                        slow = tup[1].Current.Value
                        # Check if 'FAST' SMA is above 'SLOW' SMA and update counters
                        if fast > slow:
                            sma_up_counter += 1
                        else:
                            sma_down_counter -= 1
                        # Append the SMA cross state to the queue if not done already
                        if not appended_sma:
                            if fast > slow:
                                self.sma_queue.append(True)
                            else:
                                self.sma_queue.append(False)
                        appended_sma = True

        # Calculate weighting based on Exponential Moving Averages (EMA) if enabled
        if self.enable_ema_cross:
            # Loop through items in 'ema_dict'
            for key, val in self.ema_dict.items():
                for tup in val:
                    # Extract current values of 'FAST' and 'SLOW' EMAs
                    if tup[0] == "FAST":
                        fast = tup[1].Current.Value
                    elif tup[0] == "SLOW":
                        slow = tup[1].Current.Value
                        # Check if 'FAST' EMA is above 'SLOW' EMA and update counters
                        if fast > slow:
                            ema_up_counter += 1
                        else:
                            ema_down_counter -= 1
                        # Append the EMA cross state to the queue if not done already
                        if not appended_ema:
                            if fast > slow:
                                self.ema_queue.append(True)
                            else:
                                self.ema_queue.append(False)
                        appended_ema = True

        # Check for Simple Moving Average Golden Cross (SMA-G)
        if len(self.sma_queue) >= 2:
            if self.sma_queue[0] and not self.sma_queue[1]:
                self.sma_g_cross_time = self.algo.Time

        # Check for Exponential Moving Average Golden Cross (EMA-G)
        if len(self.ema_queue) >= 2:
            if self.ema_queue[0] and not self.ema_queue[1]:
                self.ema_g_cross_time = self.algo.Time

        # Check for Simple Moving Average Death Cross (SMA-D)
        if len(self.sma_queue) >= 2:
            if not self.sma_queue[0] and self.sma_queue[1]:
                self.sma_d_cross_time = self.algo.Time

        # Check for Exponential Moving Average Death Cross (EMA-D)
        if len(self.ema_queue) >= 2:
            if not self.ema_queue[0] and self.ema_queue[1]:
                self.ema_d_cross_time = self.algo.Time





        # Initialize the multiplier to zero
        multiplier = 0

        # Check if Simple Moving Average Golden Cross (SMA-G) is enabled and if there's a recorded cross time
        if self.enable_sma_g_cross and self.sma_g_cross_time is not None:
            # Check if the current time is within the specified duration after the cross
            if self.algo.Time <= (self.sma_g_cross_time + timedelta(minutes=self.g_duration)):
                # Increase the multiplier based on the configured factor
                multiplier += self.g_factor
            else:
                # Reset the cross time if the duration has passed
                self.sma_g_cross_time = None

        # Repeat the same logic for Exponential Moving Average Golden Cross (EMA-G)
        if self.enable_ema_g_cross and self.ema_g_cross_time is not None:
            if self.algo.Time <= (self.ema_g_cross_time + timedelta(minutes=self.g_duration)):
                multiplier += self.g_factor
            else:
                self.ema_g_cross_time = None

        # Check if Simple Moving Average Death Cross (SMA-D) is enabled and if there's a recorded cross time
        if self.enable_s_d_cross and self.sma_d_cross_time is not None:
            # Check if the current time is within the specified duration after the cross
            if self.algo.Time <= (self.sma_d_cross_time + timedelta(minutes=self.d_duration)):
                # Decrease the multiplier based on the configured factor
                multiplier -= self.d_duration
            else:
                # Reset the cross time if the duration has passed
                self.sma_d_cross_time = None

        # Repeat the same logic for Exponential Moving Average Death Cross (EMA-D)
        if self.enable_e_d_cross and self.ema_d_cross_time is not None:
            if self.algo.Time <= (self.ema_d_cross_time + timedelta(minutes=self.d_duration)):
                multiplier -= self.d_duration
            else:
                self.ema_d_cross_time = None

        # Calculate the initial weighting based on SMA and EMA counters
        weighting = (sma_up_counter) + (ema_up_counter) + (sma_down_counter) + (ema_down_counter)

        # Apply the multiplier to the weighting if both have the same sign
        if weighting < 0 and multiplier < 0:
            weighting *= abs(multiplier)
        elif weighting > 0 and multiplier > 0:
            weighting *= abs(multiplier)

        # Limit the weighting value to the maximum allowed
        if weighting > self.max_weighting:
            weighting = self.max_weighting
        if weighting < -self.max_weighting:
            weighting = - self.max_weighting

        # Update the current weighting value
        self.current_weighting = weighting

        # if abs(self.current_weighting) > self.max_weighting:
        #   self.algo.Debug(self.current_weighting)




    # Define functions to receive Simple Moving Average (SMA) and Exponential Moving Average (EMA) bars

    # Function to receive 1-minute SMA bars
    def receive_sma_bar_1(self, sender, bar):
        # Loop through the SMA indicators associated with this function
        for tup in self.sma_dict[self.receive_sma_bar_1]:
            # Update the SMA indicator with the received bar's data
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive 15-minute SMA bars
    def receive_sma_bar_2(self, sender, bar):
        for tup in self.sma_dict[self.receive_sma_bar_2]:
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive 1-hour SMA bars
    def receive_sma_bar_3(self, sender, bar):
        for tup in self.sma_dict[self.receive_sma_bar_3]:
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive SMA bars without specifying a timeframe (generic)
    def receive_sma_bar_4(self, sender, bar):
        # Loop through the SMA indicators associated with this function
        for tup in self.sma_dict[self.receive_sma_bar_4]:
            # Update the SMA indicator with the received bar's data
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive SMA bars without specifying a timeframe (generic)
    def receive_sma_bar_5(self, sender, bar):
        # Loop through the SMA indicators associated with this function
        for tup in self.sma_dict[self.receive_sma_bar_5]:
            # Update the SMA indicator with the received bar's data
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive 1-minute EMA bars
    def receive_ema_bar_1(self, sender, bar):
        # Loop through the EMA indicators associated with this function
        for tup in self.ema_dict[self.receive_ema_bar_1]:
            # Update the EMA indicator with the received bar's data
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive 15-minute EMA bars
    def receive_ema_bar_2(self, sender, bar):
        for tup in self.ema_dict[self.receive_ema_bar_2]:
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive 1-hour EMA bars
    def receive_ema_bar_3(self, sender, bar):
        for tup in self.ema_dict[self.receive_ema_bar_3]:
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive EMA bars without specifying a timeframe (generic)
    def receive_ema_bar_4(self, sender, bar):
        # Loop through the EMA indicators associated with this function
        for tup in self.ema_dict[self.receive_ema_bar_4]:
            # Update the EMA indicator with the received bar's data
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

    # Function to receive EMA bars without specifying a timeframe (generic)
    def receive_ema_bar_5(self, sender, bar):
        # Loop through the EMA indicators associated with this function
        for tup in self.ema_dict[self.receive_ema_bar_5]:
            # Update the EMA indicator with the received bar's data
            tup[1].Update(IndicatorDataPoint(bar.EndTime, bar.Close))

        



from AlgorithmImports import *


class NewHighsLowsIndicator:
    def __init__(self, algorithm):
        self.algo = algorithm
        self.symbol_data = {}  # Dictionary to store data for each symbol

    def update(self, universe_symbols):
        for symbol in universe_symbols:
            # Initialize data structure for the symbol if not present
            if symbol not in self.symbol_data:
                self.symbol_data[symbol] = {
                    "first_update": True,
                    "52_week_high": None,
                    "52_week_low": None,
                    "is_new_high": 0,
                    "is_new_low": 0
                }

            symbol_info = self.symbol_data[symbol]

            # Ensure the symbol has valid price data
            if not self.algo.Securities[symbol].Price:
                continue

            current_high = self.algo.Securities[symbol].High
            current_low = self.algo.Securities[symbol].Low

            # Perform history fetch only for the first update
            if symbol_info["first_update"]:
                history = self.algo.History([symbol], timedelta(weeks=52), Resolution.Daily)
                if not history.empty:
                    symbol_info["52_week_high"] = history.loc[symbol]['high'].max()
                    symbol_info["52_week_low"] = history.loc[symbol]['low'].min()
                symbol_info["first_update"] = False
                # self.algo.debug(f"Fetched history for {symbol}: High {symbol_info['52_week_high']}, Low {symbol_info['52_week_low']}")

            # If history was fetched, update new high/low counts
            if symbol_info["52_week_high"] is not None and symbol_info["52_week_low"] is not None:
                # Reset new high/low flags
                symbol_info["is_new_high"] = 0
                symbol_info["is_new_low"] = 0

                if current_high >= symbol_info["52_week_high"]:
                    # self.algo.debug(f"New 52-week high for {symbol}: {current_high}")
                    symbol_info["52_week_high"] = current_high  # Update the high
                    symbol_info["is_new_high"] = 1

                if current_low <= symbol_info["52_week_low"]:
                    # self.algo.debug(f"New 52-week low for {symbol}: {current_low}")
                    symbol_info["52_week_low"] = current_low  # Update the low
                    symbol_info["is_new_low"] = 1

    @property
    def value(self):
        """
        Returns the summed counts of new highs and lows across all symbols.
        """
        total_new_highs = sum(data["is_new_high"] for data in self.symbol_data.values())
        total_new_lows = sum(data["is_new_low"] for data in self.symbol_data.values())
        return {
            "new_highs": total_new_highs,
            "new_lows": total_new_lows,
            "value": total_new_highs - total_new_lows
        }
from AlgorithmImports import *




class OnBalanceVolumeAccDist():
    # Constructor method, initializes the strategy with some parameters.
    def __init__(self, algo, symbol, sma_period, max_weighting):
        # Store the trading algorithm object, symbol, SMA (Simple Moving Average) period,
        # current weighting, and maximum weighting for this strategy.
        self.algo = algo
        self.symbol = symbol
        self.sma_period = sma_period
        self.current_weighting = 0
        self.max_weighting = max_weighting

        # Create instances of two technical indicators, On Balance Volume (OBV) and Accumulation Distribution (AD).
        # Also, create an instance of Simple Moving Average (SMA) with the specified period.
        self.obv = OnBalanceVolume()
        self.ad = AccumulationDistribution()
        self.sma = SimpleMovingAverage(self.sma_period)
        self.div = None  # Initialize a variable called 'div' to None.

    # Method to calculate the weighting for the strategy based on certain conditions.
    def calculate_weighting(self):
        # Check if 'div' (a variable calculated elsewhere) is greater than the current SMA value.
        if self.div > self.sma.Current.Value:
            self.current_weighting = self.max_weighting  # Set the current weighting to the maximum weighting.
        # Check if 'div' is less than the current SMA value.
        elif self.div < self.sma.Current.Value:
            self.current_weighting = -self.max_weighting  # Set the current weighting to the negative of the maximum weighting.
        else:
            self.current_weighting = 0  # If neither condition is met, set the current weighting to zero.

    # Method to receive new price bars and perform calculations.
    def receive_bar(self, sender, bar):
        # Update the On Balance Volume (OBV) and Accumulation Distribution (AD) indicators with the new price bar data.
        self.obv.Update(bar)
        self.ad.Update(bar)

        # Check if both OBV and AD indicators are ready (i.e., they have enough data points to be calculated).
        if self.obv.IsReady and self.ad.IsReady:
            # Calculate the 'div' value based on the OBV and AD indicators.
            # 'div' is a percentage difference between the two indicators.
            self.div = (self.obv.Current.Value - self.ad.Current.Value) / (self.obv.Current.Value + self.ad.Current.Value) * 100

            # Update the SMA indicator with the new 'div' value.
            self.sma.Update(IndicatorDataPoint(bar.EndTime, self.div))

            # Check if the SMA indicator is ready.
            if self.sma.IsReady:
                # Call the 'calculate_weighting' method to determine the current weighting for the strategy.
                self.calculate_weighting()
from AlgorithmImports import *




class RateOfChangePercentIndicator():

    # Constructor method to initialize the object with some parameters
    def __init__(self, algo, symbol, period, bol_period, bol_mult, lower_threshold, max_weighting):
        # Store the input parameters as instance variables
        self.algo = algo  # Algorithm object
        self.symbol = symbol  # Symbol to analyze
        self.period = period  # Period for RateOfChangePercent calculation
        self.lower_threshold = lower_threshold  
        self.bol_period = bol_period
        self.bol_mult = bol_mult
        # Create an instance of the RateOfChangePercent class with the specified period
        self.roc_percent = RateOfChangePercent(self.period)
        self.simple_condition = False
        self.current_weighting = 0  # Current weighting value
        self.max_weighting = max_weighting  # Maximum allowed weighting value
        self.bol = BollingerBands(self.bol_period, self.bol_mult)

    def calculate_simple_condition(self):
        if self.roc_percent.Current.Value <= self.lower_threshold:
            self.simple_condition = True
        else:
            self.simple_condition = False

    
  
    def calculate_weighting(self):
        # Calculate weighting based on ROC% position relative to Bollinger Bands
        if self.roc_percent.Current.Value > self.bol.Current.Value:
            band_range = self.bol.UpperBand.Current.Value - self.bol.Current.Value
            if band_range <= 0:
                self.current_weighting = self.max_weighting
                return
            position_within_bands = (self.roc_percent.Current.Value  - self.bol.Current.Value) / band_range
            self.current_weighting = position_within_bands * self.max_weighting
        else:
            band_range = self.bol.Current.Value - self.bol.LowerBand.Current.Value
            if band_range <= 0:
                self.current_weighting = -self.max_weighting
                return
            position_within_bands = (self.bol.Current.Value - self.roc_percent.Current.Value ) / band_range
            self.current_weighting = -position_within_bands * self.max_weighting

        # Ensure the weighting does not exceed specified bounds
        self.current_weighting =  max(-self.max_weighting, min(self.max_weighting, self.current_weighting))




    # Method to receive and process price bars
    def receive_bar(self, sender, bar):
        self.roc_percent.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        if self.roc_percent.IsReady:
            self.bol.Update(IndicatorDataPoint(bar.EndTime, self.roc_percent.Current.Value))
            if self.bol.IsReady:
                self.calculate_simple_condition()
                self.calculate_weighting()
from AlgorithmImports import *




class RateOfChangeRatioIndicator():

    # Constructor method to initialize the object with some parameters
    def __init__(self, algo, symbol, period, bol_period, bol_mult, lower_threshold, max_weighting):
        # Store the input parameters as instance variables
        self.algo = algo  # Algorithm object
        self.symbol = symbol  # Symbol to analyze
        self.period = period  # Period for RateOfChangePercent calculation
        self.lower_threshold = lower_threshold  
        self.bol_period = bol_period
        self.bol_mult = bol_mult
        # Create an instance of the RateOfChangePercent class with the specified period
        self.roc_ratio = RateOfChangeRatio(self.period)
        self.simple_condition = False
        self.current_weighting = 0  # Current weighting value
        self.max_weighting = max_weighting  # Maximum allowed weighting value
        self.bol = BollingerBands(self.bol_period, self.bol_mult)

    def calculate_simple_condition(self):
        if self.roc_ratio.Current.Value <= self.lower_threshold:
            self.simple_condition = True
        else:
            self.simple_condition = False

    
    def calculate_weighting(self):
        # Calculate weighting based on ROC Ratio position relative to Bollinger Bands
        if self.roc_ratio.Current.Value > self.bol.Current.Value:
            band_range = self.bol.UpperBand.Current.Value - self.bol.Current.Value
            if band_range <= 0:
                self.current_weighting = self.max_weighting
                return
            position_within_bands = (self.roc_ratio.Current.Value  - self.bol.Current.Value) / band_range
            self.current_weighting = position_within_bands * self.max_weighting
        else:
            band_range = self.bol.Current.Value - self.bol.LowerBand.Current.Value
            if band_range <= 0:
                self.current_weighting = -self.max_weighting
                return
            position_within_bands = (self.bol.Current.Value - self.roc_ratio.Current.Value ) / band_range
            self.current_weighting = -position_within_bands * self.max_weighting

        # Ensure the weighting does not exceed specified bounds
        self.current_weighting = max(-self.max_weighting, min(self.max_weighting, self.current_weighting))

    # Method to receive and process price bars
    def receive_bar(self, sender, bar):
        self.roc_ratio.Update(IndicatorDataPoint(bar.EndTime, bar.Close))
        if self.roc_ratio.IsReady:
            self.bol.Update(IndicatorDataPoint(bar.EndTime, self.roc_ratio.Current.Value))
            if self.bol.IsReady:
                self.calculate_simple_condition()
                self.calculate_weighting()
from AlgorithmImports import *




class RelativeStrengthIndexIndicator():

    # Constructor method to initialize the object with some parameters
    def __init__(self, algo, symbol, period, overbought, baseline, oversold, max_weighting):
        # Store the input parameters as instance variables
        self.algo = algo  # Algorithm object
        self.symbol = symbol  # Symbol to analyze
        self.period = period  # Period for RSI calculation
        self.overbought = overbought  # Overbought RSI threshold
        self.baseline = baseline  # Baseline RSI value
        self.oversold = oversold  # Oversold RSI threshold
        self.current_weighting = 0  # Current weighting value
        self.max_weighting = max_weighting  # Maximum allowed weighting value

        # Create an instance of the RelativeStrengthIndex class with the specified period
        self.rsi = RelativeStrengthIndex(self.period)

    # Method to calculate the weighting based on RSI values
    def calculate_weighting(self):
        # Get the current RSI value
        rsi = self.rsi.Current.Value

        # Calculate the weighting based on RSI values and thresholds
        weighting = ((self.baseline - rsi) / (self.baseline - self.oversold) * self.max_weighting)

        # Check if the calculated weighting is negative
        if weighting < 0:
            # Recalculate weighting with a different formula if it's negative
            weighting = -((rsi - self.baseline) / (self.overbought - self.baseline) * self.max_weighting)
            
            # Check if the recalculated weighting is now positive, and if so, set it to 0
            if weighting > 0:
                weighting = 0
        
        # Ensure that the calculated weighting value is within the specified bounds
        newValue = max(min(weighting, self.max_weighting), -self.max_weighting)

        # Update the current weighting with the calculated value
        self.current_weighting = newValue

        # Uncomment the following lines if you want to debug the current weighting
        # if abs(self.current_weighting) > self.max_weighting:
        #     self.algo.Debug(self.current_weighting)

    # Method to receive and process price bars
    def receive_bar(self, sender, bar):
        # Update the RSI indicator with the closing price of the received bar
        self.rsi.Update(IndicatorDataPoint(bar.EndTime, bar.Close))

        # Check if the RSI indicator is ready (has enough data points)
        if self.rsi.IsReady:
            # Calculate the weighting based on the RSI values
            self.calculate_weighting()
from AlgorithmImports import *





class StochasticOscillatorIndicator():
    
    # Constructor method to initialize the object.
    def __init__(self, algo, symbol, period, k_period, d_period, overbought, baseline, oversold, max_weighting):
        # Store various input parameters as instance variables.
        self.algo = algo  # A reference to an algorithm or trading strategy.
        self.symbol = symbol  # The financial instrument or asset symbol being analyzed.
        self.period = period  # The time period used for calculations.
        self.overbought = overbought  # A threshold indicating overbought conditions.
        self.baseline = baseline  # A baseline or reference point for calculations.
        self.oversold = oversold  # A threshold indicating oversold conditions.
        self.k_period = k_period  # A parameter for a stochastic indicator.
        self.d_period = d_period  # A parameter for a stochastic indicator.
        self.current_weighting = 0  # Initialize a variable to store the current weighting.
        self.max_weighting = max_weighting  # The maximum allowed weighting.
        
        # Create an instance of the Stochastic class with specified parameters.
        self.stoch = Stochastic(self.period, self.k_period, self.d_period)

    # Method to calculate and update the weighting.
    def calculate_weighting(self):
        # Get the current stochastic value from the Stochastic instance.
        stoch = self.stoch.Current.Value
        
        # Calculate a weighting based on the current stochastic value.
        weighting = ((self.baseline - stoch) / (self.baseline - self.oversold) * self.max_weighting)
        
        # Check if the calculated weighting is negative.
        if weighting < 0:
            # If negative, calculate an alternative weighting.
            weighting = -((stoch - self.baseline) / (self.overbought - self.baseline) * self.max_weighting)
            
            # Ensure the alternative weighting is not positive.
            if weighting > 0:
                weighting = 0
        
        # Ensure the final weighting is within the specified limits.
        newValue = max(min(weighting, self.max_weighting), -self.max_weighting)
        
        # Update the current weighting with the new value.
        self.current_weighting = newValue

    # Method to receive and process a new price bar.
    def receive_bar(self, sender, bar):
        # Update the Stochastic indicator with the new price bar.
        self.stoch.Update(bar)
        
        # Check if the Stochastic indicator has enough data points to make calculations.
        if self.stoch.IsReady:
            # Calculate the weighting based on the current Stochastic value.
            self.calculate_weighting()
#region imports
from AlgorithmImports import *
import math
#endregion
from data.custom_consolidator import RegularHoursConsolidator

# Your New Python File



class VolumeWeightedAveragePriceIndicator():

    # Constructor method to initialize the object.
    def __init__(self, algo, symbol, upper_offset_1, upper_offset_2, lower_offset_1, lower_offset_2, offset, max_weighting):
        # Store references to the algorithm and the symbol associated with this indicator.
        self.algo = algo  # Reference to the trading algorithm.
        self.symbol = symbol  # The financial instrument or asset symbol.
        self.current_weighting = 0  # Initialize the current weighting.
        self.max_weighting = max_weighting  # The maximum allowed weighting.
        
        # Calculate an offset factor (if provided, otherwise set to 1).
        if offset != 0:
            self.offset = 1 + (offset / 100)
        else:
            self.offset = 1

        # Store various indicator-related variables.
        self.upper_offset_1 = upper_offset_1
        self.upper_offset_2 = upper_offset_2
        self.lower_offset_1 = lower_offset_1
        self.lower_offset_2 = lower_offset_2

        # Initialize lists to store data for calculations.
        self.vwap_sum_queue = []
        self.volume_sum_queue = []
        self.v2_sum_queue = []
        self.vwap_queue = []
        self.vwap_value = None
        self.upper_band_1 = None
        self.upper_band_2 = None
        self.lower_band_1 = None
        self.lower_band_2 = None

        self.bar = 0  # Initialize a variable to store the current price.

        # Create a TradeBarConsolidator to process 1-minute bars.
        self.bar_consolidator = RegularHoursConsolidator(timedelta(minutes=1))
        self.algo.SubscriptionManager.AddConsolidator(self.symbol, self.bar_consolidator)
        self.bar_consolidator.DataConsolidated += self.receive_bar

        # Schedule a daily reset of the VWAP and related data.
        self.algo.Schedule.On(self.algo.DateRules.EveryDay(), self.algo.TimeRules.At(7, 0), self.reset_vwap)

    # Method to calculate the weighting based on various conditions.
    def calculate_weighting(self):
        self.current_weighting = 0
        weighting_add = self.max_weighting / 3

        # Check if the current price is above certain bands and add to the weighting.
        if self.bar > self.vwap_value:
            self.current_weighting += weighting_add
        if self.bar > self.upper_band_1:
            self.current_weighting += weighting_add
        if self.bar > self.upper_band_2:
            self.current_weighting += weighting_add

        # Check if the current price is below certain bands and add to the weighting.
        if self.bar < self.vwap_value:
            self.current_weighting += weighting_add
        if self.bar < self.lower_band_1:
            self.current_weighting += weighting_add
        if self.bar < self.lower_band_2:
            self.current_weighting += weighting_add

    # Method to receive and process a new price bar.
    def receive_bar(self, sender, bar):
        self.bar = bar.Close
        volume = bar.Volume

        # Calculate various sums related to VWAP.
        if len(self.vwap_sum_queue) >= 1:
            vwap_sum = (((bar.High + bar.Low) / 2) * bar.Volume) + self.vwap_sum_queue[0]
        else:
            vwap_sum = (((bar.High + bar.Low) / 2) * bar.Volume)
        self.vwap_sum_queue.append(vwap_sum)
        
        if len(self.volume_sum_queue) >= 1:
            volume_sum = volume + self.volume_sum_queue[0]
        else:
            volume_sum = volume
        self.volume_sum_queue.append(volume_sum)

        if len(self.v2_sum_queue) >= 1:
            v2_sum = (((bar.High + bar.Low) / 2) * ((bar.High + bar.Low) / 2) * bar.Volume) + self.v2_sum_queue[0]
        else:
            v2_sum = (((bar.High + bar.Low) / 2) * ((bar.High + bar.Low) / 2) * bar.Volume)
        self.v2_sum_queue.append(v2_sum)

        # Calculate the VWAP value and related statistics.
        if volume_sum != 0:
            self.vwap_value = vwap_sum / volume_sum
        else:
            self.vwap_value = 0
        
        if volume_sum != 0:
            to_square = v2_sum / volume_sum - self.vwap_value * self.vwap_value
        else:
            to_square = 0

        if to_square > 0:
            self.dev = math.sqrt(to_square)
        else:
            self.dev = math.sqrt(0)

        # Calculate upper and lower bands based on VWAP and deviations.
        self.upper_band_1 = (self.vwap_value * self.offset) + (self.dev * self.upper_offset_1)
        self.upper_band_2 = (self.vwap_value * self.offset) + (self.dev * self.upper_offset_2)
        self.lower_band_1 = (self.vwap_value * self.offset) - (self.dev * self.lower_offset_1)
        self.lower_band_2 = (self.vwap_value * self.offset) - (self.dev * self.lower_offset_2)

        # Calculate the weighting based on current price and bands.
        self.calculate_weighting()

    # Method to reset VWAP and related data on a daily schedule.
    def reset_vwap(self):
        self.vwap_sum_queue.clear()
        self.volume_sum_queue.clear()
        self.v2_sum_queue.clear()
        self.vwap_queue.clear()
        self.vwap_value = None
        self.upper_band_1 = None
        self.upper_band_2 = None
        self.lower_band_1 = None
        self.lower_band_2 = None
# region imports
from AlgorithmImports import *
from data.symboldata import SymbolData
from data.set_up_timeframes import get_timeframes
from data.setup_data import setup_data
from indicators.adx_indicator import AverageDirectionalIndexIndicator
from indicators.bollinger_bands_indicator import BollingerBandsIndicator
from indicators.chaikin_vol_indicator import ChaikinVolatilityIndicator
from data.data_distributor import DataDistributor
from indicators.dvv_indicator import DirectionalVolatilityVolumeIndicator
from indicators.macd_indicator import MovingAverageConvergenceDivergenceIndicator
from indicators.obv_acc_dist_indicator import OnBalanceVolumeAccDist
from indicators.rsi_indicator import RelativeStrengthIndexIndicator
from indicators.mfi_indicator import MoneyFlowIndexIndicator
from indicators.roc_percent_indicator import RateOfChangePercentIndicator
from indicators.stochastic_indicator import StochasticOscillatorIndicator
from indicators.vwap_indicator import VolumeWeightedAveragePriceIndicator
from indicators.moving_averages import MovingAverageCrossIndicator
from positions.manage_positions import PositionManager
from positions.chandelier_exit  import ChandelierExit
from positions.calculation_weights import CalculationWeights
from positions.allocation_weights import AllocationWeights
from configs import config_main as cfg
from configs import config_crossover as xcfg


from configs.configs_long import config_long_entry as lcfg
from configs.configs_long import config_long_exit as lxcfg
from configs.configs_long import config_long_indicators as licfg
from configs.configs_long import config_long_allocation_weights as lawcfg


from configs.configs_short import config_short_entry as scfg
from configs.configs_short import config_short_exit as sxcfg
from configs.configs_short import config_short_indicators as sicfg

from configs.configs_long import config_long_pause as lpcfg

from setup_indicators import *

# endregion

class SmoothYellowGreenBeaver(QCAlgorithm):

    def Initialize(self):
        # Setting start and end date for the backtest
        self.SetStartDate(cfg.BACKTEST_START_YEAR, cfg.BACKTEST_START_MONTH, cfg.BACKTEST_START_DAY)  
        self.SetEndDate(cfg.BACKTEST_END_YEAR, cfg.BACKTEST_END_MONTH, cfg.BACKTEST_END_DAY)   

        # Setting backtest account cash              
        self.SetCash(cfg.BACKTEST_ACCOUNT_CASH) 

        # Initialize stock and symbol data objects     
        self.stocks = cfg.STOCKS
        self.symbols = []
        self.symbol_instances_long = {}
        self.symbol_instances_short = {}
        self.enable_extended_hour_exit = xcfg.ENABLE_EXTENDED_HOUR_EXIT
        self.peak_portfolio_value = 0
        self.drawdowns = []
        self.current_drawdown = 0
        self._universe = None
        self.enable_bear_market_detection = lcfg.ENABLE_BEAR_MARKET_DETECTION
        if lawcfg.USE_ALLOCATION_WEIGHTS:
            self.universe_settings.schedule.on(self.date_rules.week_end())
            self.universe_settings.resolution = Resolution.DAILY
            # Add the SPY ETF.
            self._spy = self.add_equity("SPY", Resolution.DAILY).symbol
            # Add a universe of the SPY constituents.
            self._universe = self.add_universe(self.universe.etf(self._spy, universe_filter_func=self._etf_constituents_filter))
        
            self._constituent_symbols = set()
        # Add stocks to the algorithm and store their symbols 
        for stock in self.stocks:
            try:
                self.symbols.append(self.AddEquity(stock, Resolution.Minute, extendedMarketHours=self.enable_extended_hour_exit).Symbol)
            except:
                raise Exception(f"Unable to add stock of symbol {stock} to the algorithm")
        
        # Create symbol data objects for each stock symbol 
        for symbol in self.symbols:
            self.symbol_instances_long[symbol] = SymbolData(self, symbol)
            self.symbol_instances_short[symbol] = SymbolData(self, symbol)

            #// These have been moved out of the main file into their own setup function
            self.symbol_instances_long[symbol] = setup_indicators(self, symbol, self.symbol_instances_long, licfg, lxcfg, lcfg)
            self.symbol_instances_short[symbol] = setup_indicators(self, symbol, self.symbol_instances_short, sicfg, sxcfg, scfg)
            self.allocation_control = AllocationWeights(self, symbol, self.symbol_instances_long, self._universe)
            #// These have been consolidated into a single function call
            combined_warmup_info, combined_indicator_map, combined_receiver_map = setup_data(symbol, self.symbol_instances_long, self.symbol_instances_short)
            
    
            self.data_dist = DataDistributor(self, symbol, combined_warmup_info, combined_indicator_map, combined_receiver_map)

            total_max_weighting_long = 0
            total_max_weighting_short = 0

            #// This has been moved into it's own function
            total_max_weighting_long, total_max_weighting_short = self.get_max_weighting(symbol)

            self.long_weightings = CalculationWeights(self, symbol, lcfg.WEIGHT_SENSITIVITY_DAMPING, total_max_weighting_long+lcfg.TOTAL_MAX_WEIGHTING_INCREASE, lcfg.LOG_SCALING_THRESHOLD_PERCENTAGE_1, lcfg.LOG_SCALING_THRESHOLD_PERCENTAGE_2, lcfg.LOG_SCALING_THRESHOLD_PERCENTAGE_3)
            self.short_weightings = CalculationWeights(self, symbol, scfg.WEIGHT_SENSITIVITY_DAMPING, total_max_weighting_short+scfg.TOTAL_MAX_WEIGHTING_INCREASE, scfg.LOG_SCALING_THRESHOLD_PERCENTAGE_1, 50, 75)


            self.position_manager = PositionManager(self, symbol, cfg.ENABLE_LONG_TRADES, cfg.ENABLE_SHORT_TRADES, cfg.TRADING_START_TIME, cfg.TRADING_END_TIME, licfg.USE_CHANDELIER_EXITS, sicfg.USE_CHANDELIER_EXITS, total_max_weighting_long, total_max_weighting_short, self.allocation_control)
        self.limit_short = scfg.USE_LIMIT_SHORT_CASH
        self.max_short = scfg.MAX_SHORT_CASH_PERCENTAGE/100
        self.limit_long = lcfg.USE_LIMIT_LONG_CASH
        self.max_long = lcfg.MAX_LONG_CASH_PERCENTAGE/100
        self.reset_long_each_day = lpcfg.RESET_LOSS_COUNTER_EACH_DAY
        self.close_short_moc = sxcfg.EXIT_ON_MARKET_CLOSE
        self.Schedule.On(self.DateRules.EveryDay(), self.TimeRules.AfterMarketOpen(symbol, 1), self.rebal_short)
        self.Schedule.On(self.DateRules.EveryDay(), self.TimeRules.AfterMarketOpen(symbol, 0), self.reset_loss_counter)
        self.Schedule.On(self.DateRules.EveryDay(), self.TimeRules.BeforeMarketClose(symbol, 1), self.close_short)


    def close_short(self):
        for symbol in self.symbols:
         if self.close_short_moc and self.portfolio[symbol].is_short:
            self.liquidate(symbol, tag="Liquidating short on market close")

    def _etf_constituents_filter(self, constituents: List[ETFConstituentUniverse]) -> List[Symbol]:
        # Extract their symbols
        new_constituent_symbols = {c.symbol for c in constituents}
        
        # Check if the universe has changed
        if new_constituent_symbols != self._constituent_symbols:
            self._constituent_symbols = new_constituent_symbols



        return list(self._constituent_symbols)
        
    def AdjustPositionSizesBasedOnDrawdown(self):
        base_allocation = 1  # Start with base allocation

        if self.portfolio.invested and self.current_drawdown >= 0.15:
            # If drawdown exceeds 7.5%, decrease the allocation
            # For example, if drawdown is 15%, allocation decreases from 0.5
            adjustment_factor = base_allocation - ((self.current_drawdown - 0.15) / 0.10) * base_allocation
            
            # Ensure the adjustment factor does not go below 0
            adjustment_factor = max(0, adjustment_factor)

            for symbol in self.symbols:
                # Adjust position size based on current drawdown
                self.SetHoldings(symbol, adjustment_factor, tag=f"{self.current_drawdown} {adjustment_factor} Adjusting position size based on drawdown")


    def reset_loss_counter(self):
        if self.reset_long_each_day:
            self.position_manager.loss_counter = 0

    def rebal_short(self):
        for symbol, symbolData in self.symbol_instances_long.items():
            if self.enable_bear_market_detection and symbolData.is_bear_market:
                holding_value = self.Portfolio[symbol].HoldingsValue
                portfolio_value = self.Portfolio.TotalPortfolioValue
                
                current_allocation = holding_value / portfolio_value if portfolio_value > 0 else 0
                if current_allocation > 0.7:
                    self.SetHoldings(symbol, lcfg.BEAR_MARKET_PERCENTAGE/100, tag=f"Adjusting position size due to bear market")
                #self.AdjustPositionSizesBasedOnDrawdown()
        if self.limit_short:
            for symbol, symbolData in self.symbol_instances_short.items():
                if self.Portfolio[symbol].IsShort:
                    self.SetHoldings(symbol, -self.max_short, tag="LIMITING SHORT TO 80")
        if self.limit_long:
            for symbol, symbolData in self.symbol_instances_long.items():
                if self.Portfolio[symbol].IsLong:
                    self.SetHoldings(symbol, self.max_long, tag=f"LIMITING LONG TO {self.max_long}")

    def on_end_of_algorithm(self):
        # Calculate the average drawdown
        if len(self.drawdowns) > 0:
            average_drawdown = sum(self.drawdowns) / len(self.drawdowns)
            self.Debug(f"Average Drawdown: {average_drawdown:.4f}")

    def OnData(self, data: Slice):
        # Update the peak portfolio value
        self.peak_portfolio_value = max(self.peak_portfolio_value, self.Portfolio.TotalPortfolioValue)

        # Calculate the current drawdown
        self.current_drawdown = (self.peak_portfolio_value - self.Portfolio.TotalPortfolioValue) / self.peak_portfolio_value

        # Store the current drawdown
        self.drawdowns.append(self.current_drawdown)


        for symbol, symbolData in self.symbol_instances_long.items():
            if "SMA" in self.symbol_instances_long[symbol].indicators:
                self.symbol_instances_long[symbol].indicators["SMA"].calculate_weighting()
            if "SMA" in self.symbol_instances_short[symbol].indicators:
                self.symbol_instances_short[symbol].indicators["SMA"].calculate_weighting()
            
            current_long_weighting = self.long_weightings.get_current_total_weight(self.get_total_score(symbol, self.symbol_instances_long))
            current_short_weighting = self.short_weightings.get_current_total_weight(self.get_total_score(symbol, self.symbol_instances_short))
            
            #// This has also been cleaned up into a single function call
            long_stop, short_stop = self.get_stops(symbol)

            if self.Time.strftime("%H%M") >= self.position_manager.start_time.strftime("%H%M") and self.Time.strftime("%H%M") <= self.position_manager.end_time.strftime("%H%M"):
                #// This has been taking out of previous function calls, and is only calculated once here now and saved as local
                #// variable to be used in multiple function calls
                #// get_total_score has also been refactored into a single function with dependency injection,
                #// before it was 2 separate functions for long and short that functionally had the same code
                current_long_weighting = self.long_weightings.get_current_total_weight(self.get_total_score(symbol, self.symbol_instances_long))
                current_short_weighting = self.short_weightings.get_current_total_weight(self.get_total_score(symbol, self.symbol_instances_short))
                self.position_manager.entry(current_long_weighting, current_short_weighting, self.get_simple_entry(symbol), symbolData)
                
                #// This has also been cleaned up into a single function call
                long_stop, short_stop = self.get_stops(symbol)
                #// This has also been cleaned up into a single function call
                self.position_manager.check_exits(symbol, long_stop, short_stop, current_long_weighting, current_short_weighting, symbolData.atr.Current.Value, symbolData.sma.Current.Value)
            elif self.enable_extended_hour_exit:
                #// This has also been cleaned up into a single function call
                long_stop, short_stop = self.get_stops(symbol)
                #// This has also been cleaned up into a single function call
                self.position_manager.check_exits(symbol, long_stop, short_stop, current_long_weighting, current_short_weighting, symbolData.atr.Current.Value, symbolData.sma.Current.Value)
                self.position_manager.check_long_last_min()
            

    def get_stops(self, symbol):
        if "CHD" not in self.symbol_instances_long[symbol].indicators.keys():
                    long_stop = False
                    short_stop = False
        else:
            long_stop = self.symbol_instances_long[symbol].indicators["CHD"].sell_signal
            short_stop = self.symbol_instances_long[symbol].indicators["CHD"].buy_signal
        return long_stop, short_stop

    def get_total_score(self, symbol, symbol_instances):
        score = 0
        for name, indicator in symbol_instances[symbol].indicators.items():
            score += indicator.current_weighting
        
   
        return score

    
    
    def get_simple_entry(self, symbol):
        if scfg.USE_SIMPLE_ENTRY:
            score = 0
            for name, indicator in self.symbol_instances_short[symbol].indicators.items():
                if name in scfg.SIMPLE_ENTRY_CONFIG:
                    if indicator.simple_condition:
                        score += 1
            if score >= scfg.SIMPLE_ENTRY_MINIMUM_TO_TRIGGER:
                return True
            else:
                return False
        else:
            return False
    
    def get_max_weighting(self, symbol):
        total_max_weighting_long = 0
        total_max_weighting_short = 0
        for key, value in self.symbol_instances_long[symbol].indicators.items():
                if key != "CHD":
                    total_max_weighting_long += value.max_weighting
        for key, value in self.symbol_instances_short[symbol].indicators.items():
            if key != "CHD":
                total_max_weighting_short += value.max_weighting
        return total_max_weighting_long, total_max_weighting_short
from AlgorithmImports import *
from indicators.new_high_new_low import NewHighsLowsIndicator
from indicators.advance_decline_ratio import AdvanceDeclineRatioIndicator
from indicators.bear_market_detection import BearMarketIndicator
from configs.configs_long import config_long_entry as lcfg
from configs.configs_long import config_long_allocation_weights as lawcfg
from collections import deque
import math

class AllocationWeights:
    def __init__(self, algo, symbol, symbol_instances_long, universe):
        self.algo = algo
        self.symbol = symbol
        self.symbol_instances_long = symbol_instances_long
        self.use_allocation_weights = lawcfg.USE_ALLOCATION_WEIGHTS  # Set to False to disable
        self._universe = universe
        self.enable_bear_market_detection = lawcfg.ENABLE_BEAR_MARKET_DETECTION
        self.enable_nh_nl = lawcfg.ENABLE_NH_NL
        self.enable_adr = lawcfg.ENABLE_ADR
        self.new_highs_lows_indicator = NewHighsLowsIndicator(algo)
        self.adr_indicator = AdvanceDeclineRatioIndicator(algo)
        # Define indicators with their weights and thresholds
        self.indicators = {
            'NH_NL': {
                'enabled': self.enable_nh_nl,
                'instance': NewHighsLowsIndicator(algo),
                'weight': lawcfg.NH_NL_WEIGHT,
                'threshold_lower': lawcfg.NH_NL_THRESHOLD_LOWER,
                'threshold_upper': lawcfg.NH_NL_THRESHOLD_UPPER
            },
            'ADL': {
                'enabled': self.enable_adr, 
                'instance': AdvanceDeclineRatioIndicator(algo),
                'weight': lawcfg.ADL_WEIGHT,
                'threshold_lower': lawcfg.ADL_THRESHOLD_LOWER,
                'threshold_upper': lawcfg.ADL_THRESHOLD_UPPER
            },
            'BEAR_MARKET_DETECTION': {
                'enabled': self.enable_bear_market_detection,
                'instance': BearMarketIndicator(algo, symbol, print_debug=False),  # Initialize the BearMarketIndicator
                'weight': lawcfg.BEAR_MARKET_DETECTION_WEIGHT,
                'threshold_lower': lawcfg.BEAR_MARKET_DETECTION_THRESHOLD_LOWER,
                'threshold_upper': lawcfg.BEAR_MARKET_DETECTION_THRESHOLD_UPPER
            }
        }

        
        # Define min and max allocation limits
        self.min_allocation = lawcfg.MIN_ALLOCATION
        self.max_allocation = lawcfg.MAX_ALLOCATION
        self.scaled_allocation = 0  # Initialize scaled_allocation as a class attribute
        self.algo.Schedule.On(self.algo.DateRules.EveryDay(), self.algo.TimeRules.AfterMarketOpen(symbol, 0), self.schedule_indicator_update)


    def schedule_indicator_update(self):
        if self.use_allocation_weights:
            for name, details in self.indicators.items():
                if details['enabled'] and details['instance'] and name != 'BEAR_MARKET_DETECTION':
                    details['instance'].update(self._universe.selected)
                    value = details['instance'].value
                    self.algo.Debug(f"{self.algo.Time} {name} Value: {value}")
                elif name == 'BEAR_MARKET_DETECTION':
                    value = details['instance'].value
                    #self.algo.Debug(f"{self.algo.Time} {name} Value: {value}")

    def calculate_allocation(self):
        total_contribution = 0
        max_possible_contribution = sum(details['weight'] for details in self.indicators.values() if details['enabled'])
        
        for name, details in self.indicators.items():
            if details['enabled']:
                weight = details['weight']
                lower = details['threshold_lower']
                upper = details['threshold_upper']
                current_value = details['instance'].value['value'] if details['instance'] else 0
                
                # Scale the value
                scaled_value = max(0, min((current_value - lower) / (upper - lower), 1)) if upper != lower else 0
                
                # Contribution
                contribution = scaled_value * weight
                total_contribution += contribution
                
                #self.algo.Debug(f"Indicator: {name}, Current Value: {current_value}, Scaled Value: {scaled_value}, Contribution: {contribution}")
        
        # Scale allocation
        allocation_range = self.max_allocation - self.min_allocation
        self.scaled_allocation = (total_contribution / max_possible_contribution) * allocation_range + self.min_allocation
        
        return self.scaled_allocation

    
    def update_weights(self, new_weights):
        # Optionally allow for dynamic updating of weights and thresholds
        for indicator_name, params in new_weights.items():
            if indicator_name in self.indicators:
                self.indicators[indicator_name].update(params)
from AlgorithmImports import *
from configs import config_main as cfg
from collections import deque
import math


class CalculationWeights():


    def __init__(self, algo, symbol, damping, total_max_weighting, log_scaling_threshold_percentage_1, log_scaling_threshold_percentage_2, log_scaling_threshold_percentage_3):
        # Initialize the CalculationWeights object with algorithm, symbol, damping factor,
        # total maximum weighting, and log scaling threshold percentages.
        self.algo = algo
        self.symbol = symbol
        self.damping = damping
        self.total_max_weighting = total_max_weighting

        # Initialize log scaling threshold percentages
        self.log_scaling_threshold_percentage_1 = log_scaling_threshold_percentage_1
        self.log_scaling_threshold_percentage_2 = log_scaling_threshold_percentage_2
        self.log_scaling_threshold_percentage_3 = log_scaling_threshold_percentage_3


        # Additionally, we have log_scaling_threshold_1, log_scaling_threshold_2, and log_scaling_threshold_3, 
        # which are calculated values based on the total_max_weighting and the corresponding thresholds.
        # These values signify specific points within the total weight range where the logarithmic adjustment calculations undergo changes.
        self.log_scaling_threshold_1 = math.ceil(self.total_max_weighting * (self.log_scaling_threshold_percentage_1/100))

        self.log_scaling_threshold_2 = math.ceil(self.total_max_weighting * (self.log_scaling_threshold_percentage_2/100))

        self.log_scaling_threshold_3 = math.ceil(self.total_max_weighting * (self.log_scaling_threshold_percentage_3/100))

    
    def get_current_total_weight(self, current_weighting_score):
        # Store the original current weighting score.
        current_weighting_score_org = current_weighting_score

        # Ensure the score is positive for further calculations.
        current_weighting_score = abs(current_weighting_score)

        # Initialize the logarithmically adjusted score.
        logarithmically_adjusted_score = 0

        # Logarithmically scaling the current weighting score to the maximum possible weighting score
        if current_weighting_score_org >= self.log_scaling_threshold_1:
            logarithmically_adjusted_score = self.log_scaling_threshold_percentage_1 * math.log10(current_weighting_score / self.log_scaling_threshold_1 * self.damping + 1)

        # Logarithmically scaling the current weighting score to the maximum possible weighting score but with adjusted step in the
        # Logarithmic calculation based on threshold 2
        if current_weighting_score_org >= self.log_scaling_threshold_2:
            logarithmically_adjusted_score = self.log_scaling_threshold_percentage_1 + self.log_scaling_threshold_percentage_1 * math.log10((current_weighting_score - self.log_scaling_threshold_1) / (self.log_scaling_threshold_2 - self.log_scaling_threshold_1) * self.damping + 1)

        # Logarithmically scaling the current weighting score to the maximum possible weighting score but with adjusted step in the
        # Logarithmic calculation based on threshold 3
        if current_weighting_score_org >= self.log_scaling_threshold_3:
            logarithmically_adjusted_score = self.log_scaling_threshold_percentage_2 + self.log_scaling_threshold_percentage_1 * math.log10((current_weighting_score - self.log_scaling_threshold_2) / (self.log_scaling_threshold_3 - self.log_scaling_threshold_2) * self.damping + 1)

        # If the current score is at ceiling we fix the maximum adjusted score at 100
        if current_weighting_score_org >= self.total_max_weighting:
            logarithmically_adjusted_score = 100

        # Same as above but for short
        if current_weighting_score_org <= -self.log_scaling_threshold_1:
            logarithmically_adjusted_score = -(self.log_scaling_threshold_percentage_1 * math.log10(current_weighting_score / self.log_scaling_threshold_1 * self.damping + 1))

        if current_weighting_score_org <= -self.log_scaling_threshold_2:
            logarithmically_adjusted_score = -(self.log_scaling_threshold_percentage_1 + self.log_scaling_threshold_percentage_1 * math.log10((current_weighting_score - self.log_scaling_threshold_1) / (self.log_scaling_threshold_2 - self.log_scaling_threshold_1) * self.damping + 1))

        if current_weighting_score_org <= -self.log_scaling_threshold_3:
            logarithmically_adjusted_score = -(self.log_scaling_threshold_percentage_2 + self.log_scaling_threshold_percentage_1 * math.log10((current_weighting_score - self.log_scaling_threshold_2) / (self.log_scaling_threshold_3 - self.log_scaling_threshold_2) * self.damping + 1))

        if current_weighting_score_org <= -self.total_max_weighting:
            logarithmically_adjusted_score = -100


        
        return logarithmically_adjusted_score
from AlgorithmImports import *
from collections import deque





class ChandelierExit():
    # This function is the constructor. It initializes the object when it's created.
    def __init__(self, algo, symbol, period, multiplier, use_close):
        # Store some initial values that are passed to the constructor.
        self.algo = algo
        self.symbol = symbol
        self.period = period
        self.multiplier = multiplier
        self.use_close = use_close
        
        # Create an AverageTrueRange object with the specified period.
        self.atr = AverageTrueRange(self.period)

        # Create queues to store price data (close, high, and low) for a certain period.
        self.close_queue = deque(maxlen=self.period)
        self.high_queue = deque(maxlen=self.period)
        self.low_queue = deque(maxlen=self.period)

        # Initialize some variables to keep track of the trading strategy.
        self.dir = 1
        self.dir_queue = deque(maxlen=2)

        self.long_stop = 0
        self.short_stop = 0

        self.buy_signal = False
        self.sell_signal = False
        self.current_weighting = 0

    # This function receives a price bar (e.g., OHLC data) and updates the strategy.
    def receive_bar(self, sender, bar):
        # Add the latest close, high, and low prices to their respective queues.
        self.close_queue.appendleft(bar.Close) 
        self.high_queue.appendleft(bar.High) 
        self.low_queue.appendleft(bar.Low)  
        self.atr.Update(bar)

        # If the Average True Range (ATR) is ready (calculated), calculate stop levels based on it.
        if self.atr.IsReady:
            atr = self.atr.Current.Value * self.multiplier
            
            # Calculate long and short stop levels based on the chosen method (close or high/low).
            if self.use_close:
                long_stop = max(self.close_queue) - atr
            else:
                long_stop = max(self.high_queue) - atr

            # Store the long stop level if it's the first time.
            if self.long_stop == 0:
                self.long_stop = long_stop
            
            # Adjust the long stop level if needed.
            if self.close_queue[1] > self.long_stop:
                long_stop = max(long_stop, self.long_stop)

            # Calculate short stop levels based on the chosen method.
            if self.use_close:
                short_stop = min(self.close_queue) + atr
            else:
                short_stop = min(self.low_queue) + atr

            # Store the short stop level if it's the first time.
            if self.short_stop == 0:
                self.short_stop = short_stop
            
            # Adjust the short stop level if needed.
            if self.close_queue[1] < self.short_stop:
                short_stop = min(short_stop, self.short_stop)

            # Determine the direction (buy/sell) based on price compared to stop levels.
            if bar.Close > self.short_stop and not self.sell_signal:
                self.dir = 1
            elif bar.Close < self.long_stop and not self.buy_signal:
                self.dir = -1
            else:
                self.dir = self.dir
            
            # Add the current direction to a queue for further analysis.
            self.dir_queue.appendleft(self.dir)

            # Check for buy and sell signals based on the direction changes.
            if len(self.dir_queue) == 2:
                if self.dir == 1 and self.dir_queue[1] == -1:
                    self.buy_signal = True
                else:
                    self.buy_signal = False

                if self.dir == -1 and self.dir_queue[1] == 1:
                    self.sell_signal = True
                else:
                    self.sell_signal = False

            # Update the long and short stop levels for the next iteration.
            self.long_stop = long_stop
            self.short_stop = short_stop
from AlgorithmImports import *
from configs import config_main as cfg
from configs import config_crossover as xcfg
from configs.configs_long import config_long_entry as lcfg
from configs.configs_short import config_short_entry as scfg





from configs.configs_long import config_long_exit as lxcfg
from configs.configs_short import config_short_exit as sxcfg
from configs.configs_long import config_long_pause as lpcfg
from data.csv_str import csv_string
from datetime import datetime
from io import StringIO




class PositionManager():

    # Constructor method that initializes the object with various parameters
    def __init__(self, algo, symbol, long_trades, short_trades, start_time, end_time, chd_exits_long, chd_exits_short, total_max_weighting_long, total_max_weighting_short, allocation_control):
        
        # Store the values passed as parameters in instance variables
        self.algo = algo  # Trading algorithm name
        self.symbol = symbol  # Trading symbol

        self.allocation_control = allocation_control

        self.long_trades = long_trades  # List of long trades
        self.short_trades = short_trades  # List of short trades

        self.stop_percentage_long = lxcfg.STOP_PERCENTAGE/100
        self.stop_percentage_short = sxcfg.STOP_PERCENTAGE/100

        self.use_long_volatility_stops = lxcfg.USE_LONG_VOLATILITY_STOPS
        self.use_short_volatility_stops = sxcfg.USE_SHORT_VOLATILITY_STOPS


        self.stop_percentage_hv_long = lxcfg.HIGH_VOLATILITY_STOP_PERCENTAGE/100
        self.stop_percentage_lv_long = lxcfg.LOW_VOLATILITY_STOP_PERCENTAGE/100

        self.stop_percentage_hv_short = sxcfg.HIGH_VOLATILITY_STOP_PERCENTAGE/100
        self.stop_percentage_lv_short = sxcfg.LOW_VOLATILITY_STOP_PERCENTAGE/100

        self.enable_conditional_trading_end_time = cfg.ENABLE_CONDITIONAL_TRADING_END_TIME

        self.start_time = start_time  # Start time for trading
        self.end_time = end_time  # End time for trading

        self.latest_cross_over_time = xcfg.LATEST_CROSS_OVER_TIME
        # self.tp_factor = tp_factor  # Take profit factor
        self.chd_exits_long = chd_exits_long  # Child exits (not specified in the code)
        self.chd_exits_short = chd_exits_short

        self.validate_no_long = xcfg.VALIDATE_NO_LONG_POSITIONS
        # Convert low_vol_sl and high_vol_sl to decimal percentages
        # self.low_vol_sl = low_vol_sl / 100
        # self.high_vol_sl = high_vol_sl / 100

        # Convert trl_actv and trl_percent to decimal percentages
        # self.trl_actv = trl_actv / 100
        # self.trl_percent = trl_percent / 100

        self.total_max_weighting_long = total_max_weighting_long  # Total maximum weighting
        self.total_max_weighting_short = total_max_weighting_short # Total maximum weighting

        # Define minimum required scores for different allocation percentages
        self.min_score_entry_1_long = math.ceil(self.total_max_weighting_long * (lcfg.MIN_SCORE_PERCENTAGE_ENTRY_1/100))
        self.min_score_entry_2_long = math.ceil(self.total_max_weighting_long * (lcfg.MIN_SCORE_PERCENTAGE_ENTRY_2/100))
        self.min_score_entry_3_long = math.ceil(self.total_max_weighting_long * (lcfg.MIN_SCORE_PERCENTAGE_ENTRY_3/100))

        # Define minimum required scores for different allocation percentages
        self.min_score_entry_1_short = math.ceil(self.total_max_weighting_short * (scfg.MIN_SCORE_PERCENTAGE_ENTRY_1/100))
        self.min_score_entry_2_short = math.ceil(self.total_max_weighting_short * (25/100))
        self.min_score_entry_3_short = math.ceil(self.total_max_weighting_short * (65/100))

        self.exit_mode = "MARKET"  # Set the exit mode to "MARKET"

        # Set increase_time and increase_percent using values from cfg module (not shown here)
        self.increase_time = cfg.WEIGHTING_THRESHOLD_INCREASE_TIME
        self.increase_percent = 1 + (cfg.WEIGHTING_THRESHOLD_INCREASE/100)
        
        self.use_min_negative_score = xcfg.USE_NEGATIVE_MIN_SCORE_PERCENTAGE_THRESHOLD
        self.min_negative_score = xcfg.NEGATIVE_MIN_SCORE_PERCENTAGE_THRESHOLD

        # Set use_trail using a value from the cfg module (not shown here)
        self.use_trail_long = lxcfg.USE_TRAILING_STOP
        self.use_trail_short = sxcfg.USE_TRAILING_STOP


        # Set activation_threshold and trail_percent using values from cfg module (not shown here)
        self.activation_threshold_long = lxcfg.TRAILING_STOP_ACTIVATION_THRESHOLD/100
        self.trail_percent_long = lxcfg.TRAILING_STOP_PERCENTAGE/100

        self.activation_threshold_short = sxcfg.TRAILING_STOP_ACTIVATION_THRESHOLD/100
        self.trail_percent_short = sxcfg.TRAILING_STOP_PERCENTAGE/100

        # Initialize trailing values for long and short trades
        self.long_trailing_value = 0
        self.short_trailing_value = 0

        # Initialize boolean variable to determine whether to use layered entries
        self.use_layered_entries_long = lcfg.USE_LAYERED_ENTRIES
        self.use_layered_entries_short = False


        # Define layered entry percentages for different layers
        self.layered_percentage_1_long = lcfg.QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_1
        self.layered_percentage_2_long = lcfg.QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_2
        self.layered_percentage_3_long = lcfg.QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_3

        # Define the number of shares for each layer in layered entries
        self.layered_shares_1_long = lcfg.QUANTITY_TOTAL_SHARES_ENTRY_1 
        self.layered_shares_2_long = lcfg.QUANTITY_TOTAL_SHARES_ENTRY_2_LONG
        self.layered_shares_3_long = lcfg.QUANTITY_TOTAL_SHARES_ENTRY_3 

        # Define layered entry percentages for different layers
        self.layered_percentage_1_short = scfg.QUANTITY_AVAILABLE_CASH_PERCENTAGE_ENTRY_1
        self.layered_percentage_2_short = 0.95
        self.layered_percentage_3_short = 1

        # Define the number of shares for each layer in layered entries
        self.layered_shares_1_short = scfg.QUANTITY_TOTAL_SHARES_ENTRY_1 
        self.layered_shares_2_short = 3
        self.layered_shares_3_short = 4

        # Initialize a variable to keep track of the current score
        self.current_score_long = 0
        self.current_score_short = 0

        # Define the quantity mode for trading
        self.quantity_mode_long = lcfg.QUANTITY_MODE
        self.quantity_mode_short = scfg.QUANTITY_MODE


        # Initialize variables to store trailing values for long positions (positive and negative)
        self.long_trailing_value_p = 0
        self.long_trailing_value_n = 0

        self.short_trailing_value_p = 0
        self.short_trailing_value_n = 0

        # Determine whether to use ATR (Average True Range) for trailing stops
        self.use_atr_trail_long = lxcfg.USE_ATR_TRAILING_STOP
        self.use_atr_trail_short = sxcfg.USE_ATR_TRAILING_STOP


        # Define ATR offset values for positive and negative directions
        self.atr_offset_p_long = lxcfg.ATR_OFFSET_POSITIVE
        self.atr_offset_n_long = lxcfg.ATR_OFFSET_NEGATIVE

        self.atr_offset_p_short = sxcfg.ATR_OFFSET_POSITIVE
        self.atr_offset_n_short = sxcfg.ATR_OFFSET_NEGATIVE

        # Define tighten levels for positive and negative directions
        self.p_tighten_levels_long = lxcfg.POSITIVE_TIGHTEN_LEVELS
        self.n_tighten_levels_long = lxcfg.NEGATIVE_TIGHTEN_LEVELS

        self.p_tighten_levels_short = sxcfg.POSITIVE_TIGHTEN_LEVELS
        self.n_tighten_levels_short = sxcfg.NEGATIVE_TIGHTEN_LEVELS

        # Define minimum and maximum ATR values for scaling trailing stops
        self.scale_min_atr_long = lxcfg.SCALE_MIN_ATR
        self.scale_max_atr_long = lxcfg.SCALE_MAX_ATR

        self.scale_min_atr_short = sxcfg.SCALE_MIN_ATR
        self.scale_max_atr_short = sxcfg.SCALE_MAX_ATR


        # Define the minimum and maximum trailing percentages based on ATR
        self.min_trail_per_atr_long = lxcfg.MIN_TRAILING_PERCENT
        self.max_traiL_per_atr_long = lxcfg.MAX_TRAILING_PERCENT

        self.min_trail_per_atr_short = lxcfg.MIN_TRAILING_PERCENT
        self.max_traiL_per_atr_short = lxcfg.MAX_TRAILING_PERCENT

        self.n_is_active_long = False
        self.n_is_active_short = False

        self.loss_counter = 0
        self.long_backoff_timeslots = lpcfg.BACK_OFF_TIME_SLOTS
        self.long_trade_delay = lpcfg.LOSS_TIME_SLOT
        self.last_long_exit_time = None
        self.previous_long_delay = self.long_trade_delay 
        self.last_long_trade_result = 0
        self.last_trade_direction = None

        self.vix_pause_active = lpcfg.VIX_PAUSE_MINUTES_ACTIVE
        self.vix_period =  lpcfg.VIX_INCREASE_PERIOD_IN_MINUTES 
        self.vix_max_increase = lpcfg.VIX_MAX_PERCENTAGE_INCREASE 
        self.vix_pause = lpcfg.VIX_PAUSE_MINUTES
        self.vix_liquidate_active = lpcfg.LIQUIDATE_ON_VIX_TRIGGER

        self.vix_rc_active = lpcfg.VIX_REGRESSION_CHANNEL_ACTIVE
        self.vix_rc_minutes = lpcfg.VIX_REGRESSION_CHANNEL_PAUSE_MINUTES
        self.vix_rc_liq = lpcfg.LIQUIDATE_ON_VIX_RC_TRIGGER

        self.pause_ma_active = lpcfg.PAUSE_MA_RESTRICTION_ACTIVE 
        self.pause_ma_liq = lpcfg.LIQUIDATE_ON_PAUSE_MA_TRIGGER 
        self.pause_ma_pause_mins = lpcfg.PAUSE_MA_PAUSE_MINUTES
        self.pause_ma_scale_entry_active = lpcfg.PAUSE_MA_ENTRY_SCALE_ACTIVE
        self.pause_ma_scale_quantity = lpcfg.PAUSE_MA_ENTRY_SCALE_FACTOR

        self.no_long_entry_months = lcfg.NO_ENTRY_MONTHS
        self.number_to_month = {
            1: "JANUARY",
            2: "FEBRUARY",
            3: "MARCH",
            4: "APRIL",
            5: "MAY",
            6: "JUNE",
            7: "JULY",
            8: "AUGUST",
            9: "SEPTEMBER",
            10: "OCTOBER",
            11: "NOVEMBER",
            12: "DECEMBER"
        }

        self.no_long_entry_days = lcfg.NO_ENTRY_DAYS
        self.number_to_weekday = {
            0: "MONDAY",
            1: "TUESDAY",
            2: "WEDNESDAY",
            3: "THURSDAY",
            4: "FRIDAY",
            5: "SATURDAY",
            6: "SUNDAY"
        }
        self.blackout_periods_long = lcfg.BLACKOUT_PERIODS
        self.use_no_entry_months = lcfg.USE_NO_ENTRY_MONTHS
        self.use_no_entry_days = lcfg.USE_NO_ENTRY_DAYS
        self.use_blackout_periods = lcfg.USE_BLACKOUT_PERIODS

        self.liq_no_entry_months_long = lcfg.LIQUIDATE_MONTHS_PRIOR_MOC
        self.liq_no_entry_blackout_long = lcfg.LIQUIDATE_BLACKOUT_PERIOD_PRIOR_MOC
        self.liq_no_entry_days_long = lcfg.LIQUIDATE_DAY_PRIOR_DAYS

        # The encoded CSV string
        self.csv_string = csv_string 
        self.enable_nh_nl = lcfg.ENABLE_NH_NL
        self.enable_nh_nl_exit = lcfg.ENABLE_NH_NL_EXIT
        self.nh_nl_ma_period = lcfg.NH_NL_MA_PERIOD

        self.use_bear_market_detection = lcfg.ENABLE_BEAR_MARKET_DETECTION

        self.enable_vix_increase_print = lpcfg.ENABLE_PRINT_VIX_INCREASE
        self.enable_vix_channel_print = lpcfg.ENABLE_PRINT_VIX_CHANNEL
        self.old_pause_behavior = lpcfg.ENABLE_OLD_PAUSE_BEHAVIOR

        self.win_time_slot = lpcfg.WIN_TIME_SLOT

        # Load data from the encoded CSV string
        self.data = self.LoadCSVData(self.csv_string)


    def LoadCSVData(self, csv_string):
        # Load the CSV string into a pandas DataFrame
        df = pd.read_csv(StringIO(csv_string))
        # Convert the 'time' column to datetime
        df['time'] = pd.to_datetime(df['time'])
        # Calculate the moving average of 'close_diff'
        df['moving_avg'] = df['close_diff'].rolling(window=self.nh_nl_ma_period).mean()  # 5-day moving average as an example
        return df



    def scale_value_long(self, atr_value, distance, tighten_by):
        # Determine the maximum stop value by taking the minimum of max_traiL_per_atr and distance
        max_stop = min(self.max_traiL_per_atr_long, distance)
        
        # Calculate the scaled_value using a formula that considers the atr_value, scale_min_atr, scale_max_atr, min_trail_per_atr, and max_stop

        # Ensure scaled_value is between min_trail_per_atr and max_stop
        scaled_value = min(
            max_stop,
            # Use the formula to calculate the scaled_value
            max(
                # Ensure scaled_value is at least min_trail_per_atr
                self.min_trail_per_atr_long,
                # Use the linear interpolation formula to scale the value based on atr_value
                self.min_trail_per_atr_long + (max_stop - self.min_trail_per_atr_long) * (
                    (atr_value - self.scale_min_atr_long) / (self.scale_max_atr_long - self.scale_min_atr_long)
                )
            )
        )


        # Return the scaled_value divided by the tighten_by value
        return scaled_value / tighten_by

    
    def scale_value_short(self, atr_value, distance, tighten_by):
        # Determine the maximum stop value by taking the minimum of max_traiL_per_atr and distance
        max_stop = min(self.max_traiL_per_atr_short, distance)
        
        # Calculate the scaled_value using a formula that considers the atr_value, scale_min_atr, scale_max_atr, min_trail_per_atr, and max_stop

        # Ensure scaled_value is between min_trail_per_atr and max_stop
        scaled_value = min(
            max_stop,
            # Use the formula to calculate the scaled_value
            max(
                # Ensure scaled_value is at least min_trail_per_atr
                self.min_trail_per_atr_short,
                # Use the linear interpolation formula to scale the value based on atr_value
                self.min_trail_per_atr_short + (max_stop - self.min_trail_per_atr_short) * (
                    (atr_value - self.scale_min_atr_short) / (self.scale_max_atr_short - self.scale_min_atr_short)
                )
            )
        )


        # Return the scaled_value divided by the tighten_by value
        return scaled_value / tighten_by

    #// This was previously set inside multiple functions multiple times
    #// Now these functions simply call this function
    #// Reducing code duplication
    def reset_trailing_values(self):
        self.current_score_long = 0
        self.current_score_short = 0
        self.long_trailing_value = 0
        self.short_trailing_value = 0
        self.long_trailing_value_p = 0
        self.long_trailing_value_n = 0
        self.short_trailing_value_p = 0
        self.short_trailing_value_n = 0
        self.n_is_active_long = False
        self.n_is_active_short = False

    # Function to calculate the trailing stop for short trades
    def calculate_short_trailing_stop(self, atr_value, exit_method='limit'):
        if not self.use_atr_trail_short:
            if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent > self.short_trailing_value:
                self.short_trailing_value = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent
            elif self.short_trailing_value >= self.activation_threshold_short:
                if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent + self.trail_percent_short <= self.short_trailing_value:
                    self.cancel_orders()
                    self.execute_exit(exit_method, tag="SHORT TRAILING EXIT")
                    self.reset_trailing_values()
        else:
            if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 > self.short_trailing_value_p:
                self.short_trailing_value_p = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100
            if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 < self.short_trailing_value_n and not self.n_is_active_short:
                self.short_trailing_value_n = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100
            if self.n_is_active_short:
                if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 > self.short_trailing_value_n:
                    self.short_trailing_value_n = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100

            if sxcfg.ENABLE_POSITIVE_TRAILING_STOP:
                distance = self.atr_offset_p_short
                tighten_by = 1
                for level, tighten in self.p_tighten_levels_short.items():
                    if self.short_trailing_value_p >= level:
                        tighten_by = tighten
                trailing_stop_percentage = self.scale_value_short(atr_value, distance, tighten_by)
                if self.short_trailing_value_p >= self.atr_offset_p_short:
                    if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 + trailing_stop_percentage <= self.short_trailing_value_p:
                        self.cancel_orders()
                        self.execute_exit(exit_method, tag="SHORT TRAILING EXIT")
                        self.reset_trailing_values()
            
            if sxcfg.ENABLE_NEGATIVE_TRAILING_STOP:
                distance = self.atr_offset_n_short
                tighten_by = 1
                for level, tighten in self.n_tighten_levels_short.items():
                    if self.short_trailing_value_n <= -level:
                        tighten_by = tighten
                trailing_stop_percentage = self.scale_value_short(atr_value, distance, tighten_by)
                if self.short_trailing_value_n <= -self.atr_offset_n_short or self.n_is_active_short:
                    if self.short_trailing_value_n <= -self.atr_offset_n_short:
                        self.n_is_active_short = True
                    if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 + trailing_stop_percentage <= self.short_trailing_value_n:
                        self.cancel_orders()
                        self.execute_exit(exit_method, tag="SHORT TRAILING EXIT")
                        self.reset_trailing_values()

    # Function to calculate the trailing stop for long trades
    # Function to calculate the trailing stop for long trades
    def calculate_long_trailing_stop(self, atr_value, exit_method='limit'):
        if not self.use_atr_trail_long:
            if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent > self.long_trailing_value:
                self.long_trailing_value = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent
            elif self.long_trailing_value >= self.activation_threshold_long:
                if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent + self.trail_percent_long <= self.long_trailing_value:
                    self.cancel_orders()
                    self.execute_exit(exit_method, tag="LONG TRAILING EXIT")
                    self.last_long_exit_time = self.algo.Time
                    self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
                    self.reset_trailing_values()
                    trade_result = self.last_long_trade_result
                    #self.algo.debug(f"{self.algo.time} {trade_result}")
                    if trade_result < 0:
                        self.loss_counter += 1
                    else:
                        # If the trade was profitable, reset the counter
                        self.loss_counter = 0
                        self.previous_long_delay = self.long_trade_delay 
                        self.loss_counter = 0

        else:
            if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 > self.long_trailing_value_p:
                self.long_trailing_value_p = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100
            if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 < self.long_trailing_value_n and not self.n_is_active_long:
                self.long_trailing_value_n = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100
            if self.n_is_active_long:
                if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 > self.long_trailing_value_n:
                    self.long_trailing_value_n = self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100

            if lxcfg.ENABLE_POSITIVE_TRAILING_STOP:
                distance = self.atr_offset_p_long
                tighten_by = 1
                for level, tighten in self.p_tighten_levels_long.items():
                    if self.long_trailing_value_p >= level:
                        tighten_by = tighten
                trailing_stop_percentage = self.scale_value_long(atr_value, distance, tighten_by)
                if self.long_trailing_value_p >= self.atr_offset_p_long:
                    if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 + trailing_stop_percentage <= self.long_trailing_value_p:
                        self.cancel_orders()
                        self.execute_exit(exit_method, tag="LONG TRAILING EXIT")
                        self.last_long_exit_time = self.algo.Time
                        self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
                        self.reset_trailing_values()

                        trade_result = self.last_long_trade_result
                        #self.algo.debug(f"{self.algo.time} {trade_result}")

                        if trade_result < 0:
                            self.loss_counter += 1
                        else:
                            # If the trade was profitable, reset the counter
                            self.loss_counter = 0
                            self.previous_long_delay = self.long_trade_delay 
                            self.loss_counter = 0

            
            if lxcfg.ENABLE_NEGATIVE_TRAILING_STOP:
                distance = self.atr_offset_n_long
                tighten_by = 1
                for level, tighten in self.n_tighten_levels_long.items():
                    if self.long_trailing_value_n <= -level:
                        tighten_by = tighten
                trailing_stop_percentage = self.scale_value_long(atr_value, distance, tighten_by)
                if self.long_trailing_value_n <= -self.atr_offset_n_long or self.n_is_active_long:
                    if self.long_trailing_value_n <= -self.atr_offset_n_long:
                        self.n_is_active_long = True
                    if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent * 100 + trailing_stop_percentage <= self.long_trailing_value_n:
                        self.cancel_orders()
                        self.execute_exit(exit_method, tag="LONG TRAILING EXIT")
                        self.last_long_exit_time = self.algo.Time
                        self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
                        self.reset_trailing_values()

                        trade_result = self.last_long_trade_result
                        #self.algo.debug(f"{self.algo.time} {trade_result}")

                        if trade_result < 0:
                            self.loss_counter += 1
                        else:
                            # If the trade was profitable, reset the counter
                            self.loss_counter = 0
                            self.previous_long_delay = self.long_trade_delay 
                            self.loss_counter = 0

    def execute_exit(self, exit_method, tag="EXIT"):
        if exit_method == 'liquidate':
            # Immediately liquidate the position
            self.algo.Liquidate(self.symbol, tag=tag)
        elif exit_method == 'limit':
            quantity = -self.algo.Portfolio[self.symbol].Quantity
            if quantity < 0:
                # Place a limit order at the current price or a slightly better price
                current_price = self.algo.Securities[self.symbol].Price
                limit_price = current_price * 0.995  # Example: 0.5% better than current price
            else:
                # Place a limit order at the current price or a slightly better price
                current_price = self.algo.Securities[self.symbol].Price
                limit_price = current_price * 1.005  # Example: 0.5% better than current price
            self.algo.LimitOrder(self.symbol, quantity, limit_price, tag=tag)



    def update_after_long_exit(self):
        # Record the exit time
        self.last_long_exit_time = self.algo.Time
        
        # Record the result of the last trade
        self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
        
        # Update counters based on whether the trade was a win or a loss
        trade_result = self.last_long_trade_result
        if trade_result < 0:
            self.loss_counter += 1
        else:
            # If the trade was profitable, reset the loss counter
            self.loss_counter = 0
        
        # Prepare for the next trade (e.g., reset delay, adjust strategies, etc.)
        self.previous_long_delay = self.long_trade_delay
        
        # Reset any necessary trailing stop values
        self.reset_trailing_values()
        
        # Optionally log or print out details for analysis
        self.algo.Debug(f"Long trade exited with result: {trade_result}")


    #// I did consider merging long and short into the same functions with dependency injection 
    #// but came to the conclusion that atleast with our current overall design, there are too many
    #// dependencies that would need to be injected,
    #// which would ultimately make this function harder to read and maintain if 
    #// long and short were to use the same function with dependency injection
    def long_entry(self, current_weighting_long, symbolData):
        # Determine the threshold for entry based on the MIN_SCORE_PERCENTAGE from the config (configuration) module
        scale_quantity = 1
        if self.pause_ma_scale_entry_active and self.algo.Securities[self.symbol].Close < symbolData.hma_200.Current.Value and self.algo.Securities[self.symbol].Close < symbolData.hma_50.Current.Value:
            scale_quantity = self.pause_ma_scale_quantity
        threshold_long = self.min_score_entry_1_long
        threshold_2_long = self.min_score_entry_2_long
        threshold_3_long = self.min_score_entry_3_long
        if self.algo.Time.strftime("%H%M") >= self.increase_time.strftime("%H%M"):
            threshold_long *= self.increase_percent
            threshold_2_long *= self.increase_percent
            threshold_3_long *= self.increase_percent
        if self.long_trades and not self.algo.Portfolio[self.symbol].IsShort:
            if self.quantity_mode_long == "PERCENTAGE":
                if not self.use_layered_entries_long:
                    # Check if the symbol is not already in a Long position, current weighting is greater than or equal to the threshold,
                    # and there are no open orders for the symbol
                    if not self.algo.Portfolio[self.symbol].IsLong and current_weighting_long >= threshold_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders, and set holdings for the symbol to 100% Long
                        self.cancel_orders()
                        if not self.allocation_control.use_allocation_weights:
                            quantity = lcfg.MAX_LONG_CASH_PERCENTAGE/100
                            if self.use_bear_market_detection and symbolData.is_bear_market:
                                quantity = lcfg.BEAR_MARKET_PERCENTAGE/100
                            self.algo.SetHoldings(self.symbol, (quantity) *scale_quantity, tag=f"LONG, ALLOCATION {(quantity) *scale_quantity}")
                        else:
                            self.algo.SetHoldings(self.symbol, (self.allocation_control.scaled_allocation) *scale_quantity, tag=f"LONG, ALLOCATION {(self.allocation_control.scaled_allocation) *scale_quantity}")

                        return
                else:
                    # Check if the current score is below the 75% threshold and other conditions are met
                    if self.current_score_long < self.min_score_entry_3_long and current_weighting_long >= threshold_3_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders
                        self.cancel_orders()
                        # Set holdings with a specific percentage and update the current score
                        self.algo.SetHoldings(self.symbol, self.layered_percentage_3_long*scale_quantity, tag=f"LONG {self.layered_percentage_3_long*100}%")
                        self.current_score_long = self.min_score_entry_3_long
                        return
                    # Check if the current score is below the 50% threshold and other conditions are met
                    elif self.current_score_long < self.min_score_entry_2_long and current_weighting_long >= threshold_2_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders
                        self.cancel_orders()
                        # Set holdings with a specific percentage and update the current score
                        self.algo.SetHoldings(self.symbol, self.layered_percentage_2_long*scale_quantity, tag=f"LONG {self.layered_percentage_2_long*100}%")
                        self.current_score_long = self.min_score_entry_2_long
                        return
                    # Check if the current score is below the 25% threshold and other conditions are met
                    elif self.current_score_long < self.min_score_entry_1_long and current_weighting_long >= threshold_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders
                        self.cancel_orders()
                        # Set holdings with a specific percentage and update the current score
                        self.algo.SetHoldings(self.symbol, self.layered_percentage_1_long*scale_quantity, tag=f"LONG {self.layered_percentage_1_long*100}%")
                        self.current_score_long = self.min_score_entry_1_long
                        return
            else:
                if not self.use_layered_entries_long:
                    # Check if the symbol is not already in a Long position, current weighting is greater than or equal to the threshold,
                    # and there are no open orders for the symbol
                    if not self.algo.Portfolio[self.symbol].IsLong and current_weighting_long >= threshold_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders, and set holdings for the symbol Long
                        self.cancel_orders()
                        self.algo.MarketOrder(self.symbol, self.layered_shares_3_long, tag="LONG")
                        return
                else:
                    # Check if current score is less than the threshold for the third layer
                    if self.current_score_long < self.layered_shares_3_long and current_weighting_long >= threshold_3_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders
                        self.cancel_orders()
                        # Place a market order to go long with the remaining shares for the third layer
                        self.algo.MarketOrder(self.symbol, (self.layered_shares_3-self.current_score_long), tag=f"LONG {self.layered_percentage_3*100}%")
                        # Update the current score to reflect the new position
                        self.current_score_long = self.layered_shares_3_long
                        return
                    # If the condition for the third layer is not met, check for the second layer
                    elif self.current_score_long < self.layered_shares_2_long and current_weighting_long >= threshold_2_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders
                        self.cancel_orders()
                        # Place a market order to go long with the remaining shares for the second layer
                        self.algo.MarketOrder(self.symbol, (self.layered_shares_2_long-self.current_score_long), tag=f"LONG {self.layered_percentage_2_long*100}%")
                        # Update the current score to reflect the new position
                        self.current_score_long = self.layered_shares_2_long
                        return
                    # If the condition for the second layer is not met, check for the first layer
                    elif self.current_score_long < self.layered_shares_1_long and current_weighting_long >= threshold_long and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders
                        self.cancel_orders()
                        # Place a market order to go long with the remaining shares for the first layer
                        self.algo.MarketOrder(self.symbol, (self.layered_shares_1_long-self.current_score_long), tag=f"LONG {self.layered_percentage_1_long*100}%")
                        # Update the current score to reflect the new position
                        self.current_score_long = self.layered_shares_1_long
                        return
        return "no_entry"
    #// I did consider merging long and short into the same functions with dependency injection 
    #// but came to the conclusion that atleast with our current overall design, there are too many
    #// dependencies that would need to be injected,
    #// which would ultimately make this function harder to read and maintain if 
    #// long and short were to use the same function with dependency injection
    def short_entry(self, current_weighting_short, current_weighting_long, simple_entry_short):
        threshold_short = self.min_score_entry_1_short
        threshold_2_short = self.min_score_entry_2_short
        threshold_3_short = self.min_score_entry_3_short
        short_negative_score_con = True
        is_long = True if self.algo.Portfolio[self.symbol].IsLong else False
        if self.use_min_negative_score and current_weighting_long < self.min_negative_score:
            short_negative_score_con = False
        if self.algo.Time.strftime("%H%M") >= self.increase_time.strftime("%H%M"):
            threshold_short *= self.increase_percent
            threshold_2_short *= self.increase_percent
            threshold_3_short *= self.increase_percent
        if self.short_trades and short_negative_score_con:
            if self.quantity_mode_short == "PERCENTAGE":
                if not self.use_layered_entries_short:
                    # this "not self.algo.Portfolio[self.symbol].IsShort" part of the line is what I was removing in various places
                    # or adding it back in, where if removed the code will SetHoldings again 
                    # which results in these small rebalancing orders
                    if not self.algo.Portfolio[self.symbol].IsShort and (simple_entry_short or (not scfg.USE_SIMPLE_ENTRY and current_weighting_short <= -threshold_short)) and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders, and set holdings for the symbol to 100% Short
                        self.cancel_orders()
                        self.algo.SetHoldings(self.symbol, -scfg.MAX_SHORT_CASH_PERCENTAGE/100, tag="SHORT")
                        return
            else:
                if not self.use_layered_entries_short:
                    # Check if the symbol is not already in a Short position, current weighting is less than or equal to the negative threshold,
                    # and there are no open orders for the symbol
                    if not self.algo.Portfolio[self.symbol].IsShort and (simple_entry_short or current_weighting_short <= -threshold_short) and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                        # Cancel any existing orders, and set holdings for the symbol Short
                        self.cancel_orders()
                        if is_long:
                            self.algo.Liquidate(self.symbol)
                        self.algo.MarketOrder(self.symbol, -self.layered_shares_3_short, tag="SHORT")
        return "no_entry"
    
    #// This function previously did not exist
    #// Having this "entry" point function helps with code readability
    # Function to determine Long and Short entries with fixed 100% allocation
    def entry(self, current_weighting_long, current_weighting_short, simple_entry_short, symbolData):

        no_nh_nl_entry = False
        if self.enable_nh_nl:
            current_date = self.algo.Time.date()
        
            # Filter the data for the current date
            current_value = self.data[self.data['time'].dt.date == current_date]
            
            if not current_value.empty:
                # Extract the relevant values
                close_diff = current_value['close_diff'].values[0]
                moving_avg = current_value['moving_avg'].values[0]
                if close_diff < 0:
                    no_nh_nl_entry = True
                    if self.enable_nh_nl_exit and self.algo.portfolio[self.symbol].is_long:
                        self.algo.liquidate()
                        #no_nh_nl_entry = True


        #is_in_danger_zone = symbolData.is_in_danger_zone()
        #if self.algo.time.year == 2022 and is_in_danger_zone and self.algo.portfolio[self.symbol].is_long:
            #self.algo.liquidate(tag="DANGER")
            #no_nh_nl_entry = True
        #if symbolData.is_bear_market:
            #if self.algo.portfolio[self.symbol].is_long:
                #self.algo.liquidate(tag="DANGER")
            #no_nh_nl_entry = True

        long_entry = "False"
        no_entry_long = False
        multiplier = self.long_backoff_timeslots.get(self.loss_counter, 1.5)  # Default to 1.5 if loss counter is beyond 9
        if self.last_trade_direction == "LONG" and self.last_long_trade_result < 0:
            backoff_minutes = multiplier * self.previous_long_delay
        else:
            backoff_minutes = self.previous_long_delay
        if self.last_trade_direction == "LONG" and self.last_long_trade_result < 0 and self.algo.Time < self.last_long_exit_time + timedelta(minutes=backoff_minutes):
            long_entry = "no_entry"
            no_entry_long = True
        else:
            self.previous_long_delay = backoff_minutes
            self.last_long_exit_time = None
            self.last_long_trade_result = 0
            self.last_trade_direction = None


        if self.last_trade_direction == "LONG" and self.last_long_trade_result > 0:
            backoff_minutes_win = self.win_time_slot
       
            if self.last_trade_direction == "LONG" and self.last_long_trade_result > 0 and self.algo.Time < self.last_long_exit_time + timedelta(minutes=backoff_minutes_win):
                long_entry = "no_entry"
                no_entry_long = True
            else:
                self.last_long_exit_time = None
                self.last_long_trade_result = 0
                self.last_trade_direction = None

        # Check for VIX increase on the first minute of a new hour, but only if enough time has passed since the last increase
        current_time = self.algo.Time


        if self.pause_ma_active:
            if not hasattr(self, 'last_pause_ma_trigger_time'):
                self.last_pause_ma_trigger_time = self.algo.Time - timedelta(minutes=self.pause_ma_pause_mins)
            
            # Determine whether the pause period is still active
            pause_period_active = self.algo.Time < self.last_pause_ma_trigger_time + timedelta(minutes=self.pause_ma_pause_mins)
            
            if symbolData.is_below_pause_ma or pause_period_active:
                if symbolData.is_below_pause_ma:
                    self.last_pause_ma_trigger_time = self.algo.Time  # Update the trigger time every time it is still below
                
                no_entry_long = True
                if self.pause_ma_liq and self.algo.Portfolio[self.symbol].IsLong:
                    self.algo.Liquidate()
                    self.last_long_exit_time = self.algo.Time
                    self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
                    self.reset_trailing_values()
                    trade_result = self.last_long_trade_result
                    if trade_result < 0:
                        self.loss_counter += 1
                    else:
                        self.loss_counter = 0
                        self.previous_long_delay = self.long_trade_delay
                        self.loss_counter = 0


        if self.vix_rc_active:
            if not hasattr(self, 'last_rc_trigger_time'):
                self.last_rc_trigger_time = self.algo.Time - timedelta(minutes=self.vix_rc_minutes)

            # Check for regression channel breach
            current_time = self.algo.Time
            if symbolData.vix_rc:
                self.last_rc_trigger_time = current_time  # Record the time this trigger happened
                # VIX Channel Print
                if self.enable_vix_channel_print:
                    self.algo.Debug("VIX Regression Channel breach at {}.".format(current_time.strftime('%Y-%m-%d %H:%M:%S')))

            if self.algo.Time >= self.last_rc_trigger_time + timedelta(minutes=self.vix_rc_minutes):
                no_entry_long = False
                
            else:
                no_entry_long = True
                
                if self.vix_rc_liq and self.algo.Portfolio[self.symbol].IsLong:
                    self.algo.Liquidate()
                    self.last_long_exit_time = self.algo.Time
                    self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
                    self.reset_trailing_values()
                    trade_result = self.last_long_trade_result
                    if trade_result < 0:
                        self.loss_counter += 1
                    else:
                        # If the trade was profitable, reset the counter
                        self.loss_counter = 0
                        self.previous_long_delay = self.long_trade_delay 
                        self.loss_counter = 0
        if self.vix_pause_active:
            enough_time_passed = not hasattr(self, 'last_vix_trigger_time') or (self.algo.Time >= self.last_vix_trigger_time + timedelta(minutes=self.vix_pause))
            if (not hasattr(self, 'last_vix_update') or current_time.hour != self.last_vix_update.hour) and enough_time_passed:
                vix_percentage_change = symbolData.get_percentage_change(self.vix_period)
                self.last_vix_update = current_time  # Update the last VIX update time

                if vix_percentage_change is not None and vix_percentage_change > self.vix_max_increase:
                    no_entry_long = True
                    long_entry = "no_entry"
                    self.last_vix_trigger_time = current_time  # Record the time of this trigger
                    if self.enable_vix_increase_print:
                        self.algo.Debug("Entry halted at {}: VIX change of {:.2f}% exceeds the threshold of {:.2f}%.".format(current_time.strftime('%Y-%m-%d %H:%M:%S'), vix_percentage_change, self.vix_max_increase))
                    if self.vix_liquidate_active and self.algo.Portfolio[self.symbol].IsLong:
                        self.algo.Liquidate()
                        self.last_long_exit_time = self.algo.Time
                        self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
                        self.reset_trailing_values()
                        trade_result = self.last_long_trade_result
                        if trade_result < 0:
                            self.loss_counter += 1
                        else:
                            # If the trade was profitable, reset the counter
                            self.loss_counter = 0
                            self.previous_long_delay = self.long_trade_delay 
                            self.loss_counter = 0
        else:
            enough_time_passed = True
        self.allocation_control.calculate_allocation()
        if not no_nh_nl_entry and not no_entry_long and (long_entry != "no_entry" or self.old_pause_behavior) and enough_time_passed and not self.check_no_entry_months() and not self.check_no_entry_days() and not self.is_blackout(self.algo.time):
            #self.algo.debug(f"{self.last_trade_direction} {self.loss_counter} {backoff_minutes} {self.last_long_exit_time}")
            long_entry = self.long_entry(current_weighting_long, symbolData)
        if long_entry != "no_entry":
            self.last_trade_direction = "LONG"

        if long_entry == "no_entry" and (not self.algo.Portfolio[self.symbol].IsLong or not self.validate_no_long) and self.algo.Time.strftime("%H%M") <= self.latest_cross_over_time.strftime("%H%M"):
            short_entry = self.short_entry(current_weighting_short, current_weighting_long, simple_entry_short)
            if short_entry != "no_entry":
                self.last_trade_direction = "SHORT"
        else:
            self.last_trade_direction = "LONG"

    def check_long_last_min(self):
        self.check_no_entry_months() 
        self.check_no_entry_days()
        self.is_blackout(self.algo.time)


    def check_no_entry_months(self):
        if self.no_long_entry_months and self.use_no_entry_months:
            current_month = self.algo.time.month
            next_month = (current_month % 12) + 1
            next_month_no_entry = self.number_to_month[next_month] in self.no_long_entry_months

            if next_month_no_entry and self.liq_no_entry_months_long and self.algo.portfolio[self.symbol].is_long and self.algo.time.hour == 15 and self.algo.time.minute == 59:
                self.algo.liquidate(self.symbol, tag=f"Liquidating due to the following month being {self.number_to_month[next_month]}.")

            if self.number_to_month[current_month] in self.no_long_entry_months:
                return True
        return False

    def check_no_entry_days(self):
        if self.no_long_entry_days and self.use_no_entry_days:
            current_day = self.number_to_weekday[self.algo.time.weekday()]
            # If today is Friday (4), the next trading day is Monday (0)
            if self.algo.time.weekday() == 4:  # Friday
                next_day = self.number_to_weekday[0]  # Monday
            else:
                next_day = self.number_to_weekday[(self.algo.time.weekday() + 1) % 7]
            
            next_day_no_entry = next_day in self.no_long_entry_days

            if next_day_no_entry and self.liq_no_entry_days_long and self.algo.portfolio[self.symbol].is_long and self.algo.time.hour == 15 and self.algo.time.minute == 59:
                self.algo.liquidate(self.symbol, tag=f"Liquidating due to the following day being {next_day}.")

            if current_day in self.no_long_entry_days:
                return True
        return False

    def is_blackout(self, current_time):
        if self.use_blackout_periods:
            for period in self.blackout_periods_long:
                start_date = datetime.strptime(period["start_date"], "%Y-%m-%d")
                end_date = datetime.strptime(period["end_date"], "%Y-%m-%d")
                day_before_end_date = end_date - timedelta(days=1)
                in_blackout_period = start_date <= current_time <= end_date
                is_day_before_end = current_time == day_before_end_date

                if is_day_before_end and self.liq_no_entry_blackout_long and self.algo.portfolio[self.symbol].is_long and self.algo.time.hour == 15 and self.algo.time.minute == 59:
                    self.algo.liquidate(self.symbol, tag=f"Liquidating due to the following month being blackout period from {start_date} to {end_date}.")
                    
                if in_blackout_period:
                    return True
        return False


    # Function to exit positions based on a timed exit condition
    def time_exit(self):
        # Check if the symbol is currently invested (in a Long or Short position)
        if self.algo.Portfolio[self.symbol].Invested:
            # Cancel any existing orders and liquidate the position
            self.cancel_orders()
            self.algo.Liquidate(self.symbol, tag="TIMED EXIT")
            self.last_long_exit_time = self.algo.Time
            self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
            self.reset_trailing_values()
            trade_result = self.last_long_trade_result
            if trade_result < 0:
                self.loss_counter += 1
            else:
                # If the trade was profitable, reset the counter
                self.loss_counter = 0
                self.previous_long_delay = self.long_trade_delay 
                self.loss_counter = 0

    # Function to cancel open orders for the symbol
    def cancel_orders(self):
        self.algo.Transactions.CancelOpenOrders(self.symbol)
    
    #// This function previously did not exist
    #// Having this "entry" point function helps with reducing clutter in main.py
    def check_exits(self, symbol, long_stop, short_stop, long_weighting, short_weighting, atr_value, sma_value):
        if self.enable_conditional_trading_end_time and self.algo.Securities[symbol].Close < sma_value:
            self.time_exit()
        self.check_long_exits(long_stop, long_weighting, atr_value)
        self.check_short_exits(short_stop, short_weighting, atr_value)

    # Function to check and exit Long or Short positions based on exit conditions (long_stop and short_stop)
    def check_long_exits(self, long_stop, current_weighting, atr_value):
        # Check if the symbol is in a Long position
        if self.algo.Portfolio[self.symbol].IsLong:
            # Check if the long_stop condition is met and exit the position accordingly
            if long_stop:
                if self.exit_mode == "MARKET":
                    # If using "MARKET" exit mode, cancel any open orders and liquidate the Long position
                    self.cancel_orders()
                    self.algo.Liquidate(self.symbol, tag="CHANDELIER LONG EXIT")
                    self.last_long_exit_time = self.algo.Time
                    self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
                    self.reset_trailing_values()

                    trade_result = self.last_long_trade_result
                    if trade_result < 0:
                        self.loss_counter += 1
                    else:
                        # If the trade was profitable, reset the counter
                        self.loss_counter = 0
                        self.previous_long_delay = self.long_trade_delay 
                        self.loss_counter = 0

            # Check if trailing stop and stop-loss conditions need to be applied
            if (self.use_trail_long or self.use_atr_trail_long) and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                # Apply trailing stop logic and stop-loss logic for Long positions
                # Determine if it is regular trading hours or extended hours
                if self.is_regular_trading_hours():
                    exit_method = 'liquidate'
                else:
                    exit_method = 'limit'

                # Call the calculate_long_trailing_stop function with the determined exit method
                self.calculate_long_trailing_stop(atr_value, exit_method=exit_method)

            self.calculate_long_stop()
            if self.use_long_volatility_stops:
                if atr_value >= 0.15:
                    self.calculate_hv_long_stop()
                else:
                    self.calculate_lv_long_stop()
    
    def is_regular_trading_hours(self):
        # Define the regular trading hours (e.g., 9:30 AM to 4:00 PM)
        market_open = self.algo.Time.replace(hour=9, minute=30, second=0, microsecond=0)
        market_close = self.algo.Time.replace(hour=16, minute=0, second=0, microsecond=0)
        
        # Return True if the current time is within regular trading hours
        return market_open <= self.algo.Time <= market_close


    # Function to calculate the stop-loss for long trades
    def calculate_long_stop(self):
        # Check if the unrealized profit percentage for the symbol is less than or equal to -0.10 (-10%)
        if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent <= -self.stop_percentage_long:
            # If yes, cancel any existing orders, liquidate the long trade, and reset the long trailing value
            self.cancel_orders()
            self.algo.Liquidate(self.symbol, tag="LONG STOP EXIT")
            self.last_long_exit_time = self.algo.Time
            self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
            self.reset_trailing_values()
            trade_result = self.last_long_trade_result
            if trade_result < 0:
                self.loss_counter += 1
            else:
                # If the trade was profitable, reset the counter
                self.loss_counter = 0
                self.previous_long_delay = self.long_trade_delay 
                self.loss_counter = 0

    
    # Function to calculate the stop-loss for long trades
    def calculate_short_stop(self):
        # Check if the unrealized profit percentage for the symbol is less than or equal to -0.10 (-10%)
        if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent <= -self.stop_percentage_short:
            # If yes, cancel any existing orders, liquidate the long trade, and reset the long trailing value
            self.cancel_orders()
            self.algo.Liquidate(self.symbol, tag="SHORT STOP EXIT")
            self.reset_trailing_values()

    def calculate_hv_long_stop(self):
        if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent <= -self.stop_percentage_hv_long:
            # If yes, cancel any existing orders, liquidate the long trade, and reset the long trailing value
            self.cancel_orders()
            self.algo.Liquidate(self.symbol, tag="LONG STOP HV EXIT")
            self.last_long_exit_time = self.algo.Time
            self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
            self.reset_trailing_values()
            trade_result = self.last_long_trade_result
            if trade_result < 0:
                self.loss_counter += 1
            else:
                # If the trade was profitable, reset the counter
                self.loss_counter = 0
                self.previous_long_delay = self.long_trade_delay 
                self.loss_counter = 0

    def calculate_lv_long_stop(self):
        if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent <= -self.stop_percentage_lv_long:
            # If yes, cancel any existing orders, liquidate the long trade, and reset the long trailing value
            self.cancel_orders()
            self.algo.Liquidate(self.symbol, tag="LONG STOP LV EXIT")
            self.last_long_exit_time = self.algo.Time
            self.last_long_trade_result = self.algo.Portfolio[self.symbol].LastTradeProfit
            self.reset_trailing_values()
            trade_result = self.last_long_trade_result
            if trade_result < 0:
                self.loss_counter += 1
            else:
                # If the trade was profitable, reset the counter
                self.loss_counter = 0
                self.previous_long_delay = self.long_trade_delay 
                self.loss_counter = 0
    

    def calculate_hv_short_stop(self):
        if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent <= -self.stop_percentage_hv_short:
            # If yes, cancel any existing orders, liquidate the long trade, and reset the long trailing value
            self.cancel_orders()
            self.algo.Liquidate(self.symbol, tag="SHORT STOP HV EXIT")
            self.reset_trailing_values()

    def calculate_lv_short_stop(self):
        if self.algo.Portfolio[self.symbol].UnrealizedProfitPercent <= -self.stop_percentage_lv_short:
            # If yes, cancel any existing orders, liquidate the long trade, and reset the long trailing value
            self.cancel_orders()
            self.algo.Liquidate(self.symbol, tag="SHORT STOP LV EXIT")
            self.reset_trailing_values()


    # Function to check and exit Long or Short positions based on exit conditions (long_stop and short_stop)
    def check_short_exits(self, short_stop, current_weighting, atr_value):
        # Check if the symbol is in a Long position
        # Check if the symbol is in a Short position
        if self.algo.Portfolio[self.symbol].IsShort:
            # Check if the short_stop condition is met and exit the position accordingly
            if short_stop:
                if self.exit_mode == "MARKET":
                    # If using "MARKET" exit mode, cancel any open orders and liquidate the Short position
                    self.cancel_orders()
                    self.algo.Liquidate(self.symbol, tag="CHANDELIER SHORT EXIT")
                    self.reset_trailing_values()
            # Check if trailing stop and stop-loss conditions need to be applied
            if self.use_trail_short and len(self.algo.Transactions.GetOpenOrders(self.symbol)) == 0:
                # Apply trailing stop logic and stop-loss logic for Short positions
                # Determine if it is regular trading hours or extended hours
                if self.is_regular_trading_hours():
                    exit_method = 'liquidate'
                else:
                    exit_method = 'limit'

                # Call the calculate_long_trailing_stop function with the determined exit method
                self.calculate_short_trailing_stop(atr_value, exit_method=exit_method)

            self.calculate_short_stop()
            if self.use_short_volatility_stops:
                if atr_value >= 0.15:
                    self.calculate_hv_short_stop()
                else:
                    self.calculate_lv_short_stop()