Overall Statistics
Total Orders
220
Average Win
2.70%
Average Loss
-3.29%
Compounding Annual Return
99.831%
Drawdown
28.700%
Expectancy
0.173
Start Equity
370000.00
End Equity
612746.53
Net Profit
65.607%
Sharpe Ratio
1.766
Sortino Ratio
2.024
Probabilistic Sharpe Ratio
66.963%
Loss Rate
36%
Win Rate
64%
Profit-Loss Ratio
0.82
Alpha
0.578
Beta
-2.487
Annual Standard Deviation
0.389
Annual Variance
0.151
Information Ratio
1.81
Tracking Error
0.403
Treynor Ratio
-0.276
Total Fees
$0.00
Estimated Strategy Capacity
$3800000.00
Lowest Capacity Asset
USDJPY 8G
Portfolio Turnover
378.32%

Notebook too long to render.

#region imports
from AlgorithmImports import *
#endregion


## General Settings
general_setting = {
    "tickers": {
        "EURUSD": {"type": "forex"},
        ##-##
        # "USDJPY": {"type": "forex"},
        # "GBPUSD": {"type": "forex"},
        # "AUDUSD": {"type": "forex"},
        ##-##
    },
    
    "model_name": "ForexLSTM_V1_0",  
    "consolidator_timeframes": ["D1", "W1"], 
    ##-##
    # "lstm_tickers": ['EURUSD','USDJPY','GBPUSD','AUDUSD'],
    "lstm_tickers": ['EURUSD'],
    ##-##
    "order_counter_diff": 3,

    ##-##
    "signals": [
        "FxLstm_Both_EURUSD",
        ], 

    # "signals": [
    #     "FxLstm_Both_EURUSD","FxLstm_Both_EURUSD_Trail",
    #     "FxLstm_Hybrid_EURUSD",
    #     "FxLstm_Both_USDJPY","FxLstm_Both_USDJPY_Trail",
    #     # "FxLstm_Hybrid_GBPUSD",
    #     "FxLstm_Hybrid_GBPUSD_Trail",
    #     "FxLstm_Both_AUDUSD",
    #     # "FxLstm_Both_AUDUSD_Trail",
    #     "FxLstm_Hybrid_AUDUSD", "FxLstm_Hybrid_AUDUSD_Trail",
    #     ], 
    ##-##


    "FxLstm_prediction_hour": 1,

    "external_data": {},
    # "external_data": {
    #     # SP500
    #     'spy': {
    #         'source': 'equity',
    #         'ticker': 'SPY',
    #     },

    #     # Global X DAX Germany ETF
    #     'dax': {
    #         'source': 'equity',
    #         'ticker': 'DAX',
    #     },

    #     # US Treasury
    #     'us_treasury': {
    #         'source': 'USTreasuryYieldCurveRate',
    #         'ref': 'USTYCR',
    #         'col_date': 'time',
    #         'col_val': 'onemonth',
    #     },

    #     # Consumer Price Index for Inflation Rate
    #     # https://data.nasdaq.com/data/RATEINF-inflation-rates
    #     'cpi_usa': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_USA",
    #     },
    #     'cpi_eur': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_EUR",
    #     },
    #     'cpi_deu': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_DEU",
    #     },

    #     # Federal Funds Effective Rate (DFF)
    #     # https://fred.stlouisfed.org/series/DFF
    #     'dff': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vT5lyey5dhfrZifoZFuDwlQDOz6oILyUyAHTLVe2eqiLv9jWkNeIFITIeKqwBOtS8oEUOoZ2zXX1De7/pub?gid=1400614786&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'dff',
    #         'lag_days': 2,
    #     },

    #     # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for the Euro Area (19 Countries)
    #     # https://fred.stlouisfed.org/series/IRLTLT01EZM156N

    #     'rate_eur_lt_gov': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSl_hxRnfcXnFly0Gh1vyZYNTRW6VTv-FQDlXuNUR1090RIst2a01nyhGl7tPR4VIcrgFfGBc3OSD72/pub?gid=1026565438&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IRLTLT01EZM156N'.lower(),
    #         'lag_days': 10,
    #     },

    #     # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for the Euro Area (19 Countries)
    #     # https://fred.stlouisfed.org/series/IR3TIB01EZM156N
    #     'rate_eur_3m_bank': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSkVfnj8N9AIsVF5PJN0JzU9ahw71nK_sTwY2qLKtNNxs1JI0STexUPEW15dY9bDUN8Fwql7_WUiKhK/pub?gid=2059310805&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IR3TIB01EZM156N'.lower(),
    #         'lag_days': 10,
    #     },

    #     # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for Germany
    #     # https://fred.stlouisfed.org/series/IRLTLT01DEM156N
    #     'rate_deu_lt_gov': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vToUOn242L-w9ZWUXz_fU59aUc6oN5tDJEG8fu207zO7jMyfy5y7VesxH0mzEKaqwuU7WGOq7_xxDSu/pub?gid=2099864712&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IRLTLT01DEM156N'.lower(),
    #         'lag_days': 10,
    #     },

    #     # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Germany
    #     # https://fred.stlouisfed.org/series/IR3TIB01DEM156N
    #     'rate_deu_3m_bank': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTswIuhg3-tLwgP6RWSSPRyyLDpvHNqdlSgSNk91_SkUjKAD9_lyvhI84MAHRHzYdrIho1Narccx_w1/pub?gid=1568788544&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IR3TIB01DEM156N'.lower(),
    #         'lag_days': 10,
    #     },

    # },

    "features": {
        "D1": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10",
            #"ULTOSC","CHOP","DX14","PHASE","CRSI","PSAR",
        ],
        # "W1": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        # "D1": [],
        "W1": [],
    },
    "features_val_map": {
        "SMA10": ["val"], 
        "MACD": ["macd", "macdsignal", "macdhist"],
        "ROC2": ["val"],   
        "MOM4": ["val"],    
        "RSI10": ["val"],
        "BB20": ["upper","lower","mid"],
        "CCI20": ["val"],

        "ULTOSC": ["val"],
        "CHOP": ["val"],
        "DX14": ["val"],
        "PHASE": ["val"],
        "CRSI": ["val"],
        "PSAR": ["val"],
    },

    
}


## Consolidator Settings
consolidator_settings = {
    "D1": {
        "timeframe_minutes": 24 * 60,
        "consolidation_type": "quote",
        "indicators": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10","ATR14","ATR21",
            "SMA5","SMA20","SMA50","SMA100","SMA200",
            #"ULTOSC","CHOP","DX14","PHASE","CRSI","PSAR",
        ],
        # "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
    "W1": {
        "timeframe_minutes": 7 * 24 * 60,
        "consolidation_type": "quote",
        # "indicators": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
}


## Indicators Settings
indicator_settings = {
    "SMA5": {
        "type": "SMA",
        "lookback": 5,
        "field": "Close",
        "window": 3,
    }, 
    "SMA10": {
        "type": "SMA",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    }, 
    "SMA20": {
        "type": "SMA",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 
    "SMA50": {
        "type": "SMA",
        "lookback": 50,
        "field": "Close",
        "window": 3,
    }, 
    "SMA100": {
        "type": "SMA",
        "lookback": 100,
        "field": "Close",
        "window": 3,
    }, 
    "SMA200": {
        "type": "SMA",
        "lookback": 200,
        "field": "Close",
        "window": 3,
    }, 
    "MACD": {
        "type": "MACD",
        "window": 3,
    },
    "ROC2": {
        "type": "ROC",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "MOM4": {
        "type": "MOM",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "RSI10": {
        "type": "RSI",
        "lookback": 10,
        "ma_type": "Simple",
        "field": "Close",
        "window": 3,
    },
    "BB20": {
        "type": "BOLL",
        "lookback": 20,
        "ma_type": "Simple",
        "std": 2,
        "field": "Close",
        "window": 3,
    },
    "CCI20": {
        "type": "CCI",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 

    "ATR10": {
        "type": "ATR",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    },
    "ATR14": {
        "type": "ATR",
        "lookback": 14,
        "field": "Close",
        "window": 3,
    },
    "ATR21": {
        "type": "ATR",
        "lookback": 21,
        "field": "Close",
        "window": 3,
    },
    "ULTOSC": {
        "type": "ULTOSC",
        "window": 3,
    },
    "CHOP": {
        "type": "CHOP",
        "lookback": 52,
        "window": 3,
    },
    "DX14": {
        "type": "DX",
        "lookback": 14,
        "window": 3,
    },
    "PHASE": {
        "type": "PHASE",
        "lookback": 15,
        "window": 3,
    },
    "CRSI": {
        "type": "CRSI",
        "rsi_len": 15,
        "rsi_field": "Close",
        "rsi_window": 21,
        "window": 3,
    },
    "PSAR": {
        "type": "PSAR",
        "window": 3,
    },

}


signal_settings = {
    "FxLstm_Both_EURUSD": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        # "risk_pct": 0.01,
        "risk_pct": 0.0125,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 21,
        "longStopMultiplier": 0.1,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.1,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_EURUSD_Trail": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        "risk_pct": 0.01,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 21,
        "longStopMultiplier": 0.1,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.1,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_EURUSD": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        # "risk_pct": 0.01,
        "risk_pct": 0.0125,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 5.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_EURUSD_Trail": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.01,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 5.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_USDJPY": {
        "lstm_ticker": "USDJPY",  
        "valid_tickers": ["USDJPY"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 1.0,
        "shortStopMultiplier": 0.5,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 4.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_USDJPY_Trail": {
        "lstm_ticker": "USDJPY",  
        "valid_tickers": ["USDJPY"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 1.0,
        "shortStopMultiplier": 0.5,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 4.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_GBPUSD": {
        "lstm_ticker": "GBPUSD",  
        "valid_tickers": ["GBPUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.40,
        "shortStopMultiplier": 0.75,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_GBPUSD_Trail": {
        "lstm_ticker": "GBPUSD",  
        "valid_tickers": ["GBPUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.40,
        "shortStopMultiplier": 0.75,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_AUDUSD": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.75,
        "shortStopMultiplier": 0.25,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.2,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_AUDUSD_Trail": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.75,
        "shortStopMultiplier": 0.25,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.2,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_AUDUSD": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 1.0,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_AUDUSD_Trail": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 1.0,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

}

#region imports
from AlgorithmImports import *
#endregion


## General Settings
general_setting = {
    "tickers": {
        "EURUSD": {"type": "forex"},
    },
    
    "model_name": "ForexLSTM_V1_0",  
    "consolidator_timeframes": ["D1", "W1"], 
    "lstm_tickers": ['EURUSD'],

    "order_counter_diff": 3,
    # "signals": ["FxLstm_Both"], 
    # "signals": ["FxLstm_Hybrid"], 
    "signals": ["FxLstm_Both","FxLstm_Hybrid"], 

    "FxLstm_prediction_hour": 1,


    "external_data": {},
    # "external_data": {
    #     # SP500
    #     'spy': {
    #         'source': 'equity',
    #         'ticker': 'SPY',
    #     },

    #     # Global X DAX Germany ETF
    #     'dax': {
    #         'source': 'equity',
    #         'ticker': 'DAX',
    #     },

    #     # US Treasury
    #     'us_treasury': {
    #         'source': 'USTreasuryYieldCurveRate',
    #         'ref': 'USTYCR',
    #         'col_date': 'time',
    #         'col_val': 'onemonth',
    #     },

    #     # Consumer Price Index for Inflation Rate
    #     # https://data.nasdaq.com/data/RATEINF-inflation-rates
    #     'cpi_usa': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_USA",
    #     },
    #     'cpi_eur': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_EUR",
    #     },
    #     'cpi_deu': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_DEU",
    #     },

    #     # Federal Funds Effective Rate (DFF)
    #     # https://fred.stlouisfed.org/series/DFF
    #     'dff': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vT5lyey5dhfrZifoZFuDwlQDOz6oILyUyAHTLVe2eqiLv9jWkNeIFITIeKqwBOtS8oEUOoZ2zXX1De7/pub?gid=1400614786&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'dff',
    #         'lag_days': 2,
    #     },

    #     # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for the Euro Area (19 Countries)
    #     # https://fred.stlouisfed.org/series/IRLTLT01EZM156N

    #     'rate_eur_lt_gov': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSl_hxRnfcXnFly0Gh1vyZYNTRW6VTv-FQDlXuNUR1090RIst2a01nyhGl7tPR4VIcrgFfGBc3OSD72/pub?gid=1026565438&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IRLTLT01EZM156N'.lower(),
    #         'lag_days': 10,
    #     },

    #     # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for the Euro Area (19 Countries)
    #     # https://fred.stlouisfed.org/series/IR3TIB01EZM156N
    #     'rate_eur_3m_bank': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSkVfnj8N9AIsVF5PJN0JzU9ahw71nK_sTwY2qLKtNNxs1JI0STexUPEW15dY9bDUN8Fwql7_WUiKhK/pub?gid=2059310805&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IR3TIB01EZM156N'.lower(),
    #         'lag_days': 10,
    #     },

    #     # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for Germany
    #     # https://fred.stlouisfed.org/series/IRLTLT01DEM156N
    #     'rate_deu_lt_gov': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vToUOn242L-w9ZWUXz_fU59aUc6oN5tDJEG8fu207zO7jMyfy5y7VesxH0mzEKaqwuU7WGOq7_xxDSu/pub?gid=2099864712&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IRLTLT01DEM156N'.lower(),
    #         'lag_days': 10,
    #     },

    #     # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Germany
    #     # https://fred.stlouisfed.org/series/IR3TIB01DEM156N
    #     'rate_deu_3m_bank': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTswIuhg3-tLwgP6RWSSPRyyLDpvHNqdlSgSNk91_SkUjKAD9_lyvhI84MAHRHzYdrIho1Narccx_w1/pub?gid=1568788544&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IR3TIB01DEM156N'.lower(),
    #         'lag_days': 10,
    #     },

    # },

    "features": {
        "D1": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10",
            #"ULTOSC","CHOP","DX14","PHASE","CRSI","PSAR",
        ],
        # "W1": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        # "D1": [],
        "W1": [],
    },
    "features_val_map": {
        "SMA10": ["val"], 
        "MACD": ["macd", "macdsignal", "macdhist"],
        "ROC2": ["val"],   
        "MOM4": ["val"],    
        "RSI10": ["val"],
        "BB20": ["upper","lower","mid"],
        "CCI20": ["val"],

        "ULTOSC": ["val"],
        "CHOP": ["val"],
        "DX14": ["val"],
        "PHASE": ["val"],
        "CRSI": ["val"],
        "PSAR": ["val"],
    },

    
}


## Consolidator Settings
consolidator_settings = {
    "D1": {
        "timeframe_minutes": 24 * 60,
        "consolidation_type": "quote",
        "indicators": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10","ATR14","ATR21",
            "SMA5","SMA20","SMA50","SMA100","SMA200",
            #"ULTOSC","CHOP","DX14","PHASE","CRSI","PSAR",
        ],
        # "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
    "W1": {
        "timeframe_minutes": 7 * 24 * 60,
        "consolidation_type": "quote",
        # "indicators": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
}


## Indicators Settings
indicator_settings = {
    "SMA5": {
        "type": "SMA",
        "lookback": 5,
        "field": "Close",
        "window": 3,
    }, 
    "SMA10": {
        "type": "SMA",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    }, 
    "SMA20": {
        "type": "SMA",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 
    "SMA50": {
        "type": "SMA",
        "lookback": 50,
        "field": "Close",
        "window": 3,
    }, 
    "SMA100": {
        "type": "SMA",
        "lookback": 100,
        "field": "Close",
        "window": 3,
    }, 
    "SMA200": {
        "type": "SMA",
        "lookback": 200,
        "field": "Close",
        "window": 3,
    }, 
    "MACD": {
        "type": "MACD",
        "window": 3,
    },
    "ROC2": {
        "type": "ROC",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "MOM4": {
        "type": "MOM",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "RSI10": {
        "type": "RSI",
        "lookback": 10,
        "ma_type": "Simple",
        "field": "Close",
        "window": 3,
    },
    "BB20": {
        "type": "BOLL",
        "lookback": 20,
        "ma_type": "Simple",
        "std": 2,
        "field": "Close",
        "window": 3,
    },
    "CCI20": {
        "type": "CCI",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 

    "ATR10": {
        "type": "ATR",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    },
    "ATR14": {
        "type": "ATR",
        "lookback": 14,
        "field": "Close",
        "window": 3,
    },
    "ATR21": {
        "type": "ATR",
        "lookback": 21,
        "field": "Close",
        "window": 3,
    },
    "ULTOSC": {
        "type": "ULTOSC",
        "window": 3,
    },
    "CHOP": {
        "type": "CHOP",
        "lookback": 52,
        "window": 3,
    },
    "DX14": {
        "type": "DX",
        "lookback": 14,
        "window": 3,
    },
    "PHASE": {
        "type": "PHASE",
        "lookback": 15,
        "window": 3,
    },
    "CRSI": {
        "type": "CRSI",
        "rsi_len": 15,
        "rsi_field": "Close",
        "rsi_window": 21,
        "window": 3,
    },
    "PSAR": {
        "type": "PSAR",
        "window": 3,
    },

}


signal_settings = {
    "FxLstm_Both": {
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        "risk_pct": 0.01,
        "enter_long_trades": True,
        "enter_short_trades": True,

        # "sma_filter_lookback_fast": 5,
        # "sma_filter_lookback_slow": 20,
        "sma_filter_lookback": 100,

        "atrLength": 21,
        "longStopMultiplier": 0.1,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,

        "useTralingStop": False,    
        "trailStopSize": 1,
        "movement_thres": 0.002,
        "use_movement_thres_for_stops": False,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid": {
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.01,
        "enter_long_trades": True,
        "enter_short_trades": True,

        # "sma_filter_lookback_fast": 5,
        # "sma_filter_lookback_slow": 20,
        "sma_filter_lookback": 100,

        "atrLength": 10,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,

        "useTralingStop": False,
        "trailStopSize": 1,
        "movement_thres": 0.002,
        "use_movement_thres_for_stops": False,
        "use_prediction_direction_to_exit": False,
    },

}

#region imports
from AlgorithmImports import *
#endregion


## General Settings
general_setting = {
    "tickers": {
        "EURUSD": {"type": "forex"},
        "USDJPY": {"type": "forex"},
        "GBPUSD": {"type": "forex"},
        "AUDUSD": {"type": "forex"},
    },
    
    "model_name": "ForexLSTM_V1_05",
    "consolidator_timeframes": ["D1", "W1"],
    "order_counter_diff": 3,
    "model_types": ["both","hybrid"],

    "lstm_tickers": ['EURUSD','USDJPY','GBPUSD','AUDUSD'],

    "lstm_model_training_displace_days": {
        'EURUSD': 0,
        'AUDUSD': 0,
        'USDJPY': 1,
        'GBPUSD': 1,
    },

    "signals": [
        "FxLstm_Both_EURUSD","FxLstm_Both_EURUSD_Trail",
        "FxLstm_Hybrid_EURUSD",
        "FxLstm_Both_USDJPY","FxLstm_Both_USDJPY_Trail",
        # "FxLstm_Hybrid_GBPUSD",
        "FxLstm_Hybrid_GBPUSD_Trail",
        "FxLstm_Both_AUDUSD",
        # "FxLstm_Both_AUDUSD_Trail",
        "FxLstm_Hybrid_AUDUSD", "FxLstm_Hybrid_AUDUSD_Trail",
    ], 

    "FxLstm_prediction_hour": 1,

    "external_data": {
        # SP500
        'spy': {
            'source': 'equity',
            'ticker': 'SPY',
        },

        # Global X DAX Germany ETF
        'dax': {
            'source': 'equity',
            'ticker': 'DAX',
        },

        # US Treasury
        'us_treasury': {
            'source': 'USTreasuryYieldCurveRate',
            'ref': 'USTYCR',
            'col_date': 'time',
            'col_val': 'onemonth',
        },

        # Consumer Price Index for Inflation Rate
        # https://data.nasdaq.com/data/RATEINF-inflation-rates
        'cpi_usa': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_USA",
        },
        'cpi_eur': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_EUR",
        },
        'cpi_deu': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_DEU",
        },
        'cpi_gbr': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_GBR",
        },
        'cpi_chf': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_CHE",
        },
        'cpi_jpn': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_JPN",
        },
        'cpi_can': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_CAN",
        },
        'cpi_aus': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_AUS",
        },

        # Federal Funds Effective Rate (DFF)
        # https://fred.stlouisfed.org/series/DFF
        'dff': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vT5lyey5dhfrZifoZFuDwlQDOz6oILyUyAHTLVe2eqiLv9jWkNeIFITIeKqwBOtS8oEUOoZ2zXX1De7/pub?gid=1400614786&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'dff',
            'lag_days': 1,
        },

        # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for the Euro Area (19 Countries)
        # https://fred.stlouisfed.org/series/IRLTLT01EZM156N
        'rate_eur_lt_gov': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSl_hxRnfcXnFly0Gh1vyZYNTRW6VTv-FQDlXuNUR1090RIst2a01nyhGl7tPR4VIcrgFfGBc3OSD72/pub?gid=1026565438&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IRLTLT01EZM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for the Euro Area (19 Countries)
        # https://fred.stlouisfed.org/series/IR3TIB01EZM156N
        'rate_eur_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSkVfnj8N9AIsVF5PJN0JzU9ahw71nK_sTwY2qLKtNNxs1JI0STexUPEW15dY9bDUN8Fwql7_WUiKhK/pub?gid=2059310805&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01EZM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for Germany
        # https://fred.stlouisfed.org/series/IRLTLT01DEM156N
        'rate_deu_lt_gov': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vToUOn242L-w9ZWUXz_fU59aUc6oN5tDJEG8fu207zO7jMyfy5y7VesxH0mzEKaqwuU7WGOq7_xxDSu/pub?gid=2099864712&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IRLTLT01DEM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Germany
        # https://fred.stlouisfed.org/series/IR3TIB01DEM156N
        'rate_deu_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTswIuhg3-tLwgP6RWSSPRyyLDpvHNqdlSgSNk91_SkUjKAD9_lyvhI84MAHRHzYdrIho1Narccx_w1/pub?gid=1568788544&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01DEM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Switzerland
        # https://fred.stlouisfed.org/series/IR3TIB01CHM156N
        'rate_chf_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vRRVvpohXIZOGQ4HpAjTTMeZ6cTat0wZ1gOxpUR_5E3pDuDCHDppiRnV9GQNK33jWJ3pYxAjvOvmerO/pub?gid=1734297228&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01CHM156N'.lower(),
            'lag_days': 10,
        },

        #  3-Month or 90-day Rates and Yields: Interbank Rates for Japan
        # https://fred.stlouisfed.org/series/IR3TIB01JPM156N
        'rate_jpn_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTjcVbe63Ea3BoVxTpTBNcaEICdI11DhVmZ6Qxb-_GcuP8VbemKreHWNEu5id0ZviHPk7PAtLHqdBGr/pub?gid=1682849610&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01JPM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Australia
        # https://fred.stlouisfed.org/series/IR3TIB01AUM156N
        'rate_aus_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vS73ca8pMDndu3lH5SjmrIJS-HwWfDdqS2mh1YQkQwhGj3UtIauP12xjhmLusXag9ibZJE3YZsWLERT/pub?gid=1639615970&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01AUM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Canada 
        # https://fred.stlouisfed.org/series/IR3TIB01CAM156N
        'rate_cnd_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vQ0Odb11l33qCVwPS6G2lxrUpfQ5DWXnGw8HFu6uUV_OUx7b-yBIQItN12TwLRTq3Bx3-fBe-pU86ve/pub?gid=483123093&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01CAM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for United Kingdom
        # https://fred.stlouisfed.org/series/IR3TIB01GBM156N
        'rate_gbp_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTtQSIEiSK1swVM_oElodv6YsjzojdxwfXZm-hDx68DQD6V3HjtuOYpHb4KUQC5uWDFoe9t09-Mibex/pub?gid=1120780951&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01GBM156N'.lower(),
            'lag_days': 10,
        },

    },

    "features": {
        "D1": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20","PSAR",
        ],
        "W1": [],
    },


    "features_val_map": {
        "SMA10": ["val"], 
        "MACD": ["macd", "macdsignal", "macdhist"],
        "ROC2": ["val"],   
        "MOM4": ["val"],    
        "RSI10": ["val"],
        "BB20": ["upper","lower","mid"],
        "CCI20": ["val"],
        "ULTOSC": ["val"],
        "CHOP": ["val"],
        "DX14": ["val"],
        "PHASE": ["val"],
        "CRSI": ["val"],
        "PSAR": ["val"],
    },

    
}


## Consolidator Settings
consolidator_settings = {
    "D1": {
        "timeframe_minutes": 24 * 60,
        "consolidation_type": "quote",
        "indicators": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10","ATR14","ATR21",
            "PSAR",
            "SMA100",
            "SMA200",
        ],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },

    "W1": {
        "timeframe_minutes": 7 * 24 * 60,
        "consolidation_type": "quote",
        "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },

}


## Indicators Settings
indicator_settings = {
    "SMA5": {
        "type": "SMA",
        "lookback": 5,
        "field": "Close",
        "window": 3,
    }, 
    "SMA10": {
        "type": "SMA",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    }, 
    "SMA20": {
        "type": "SMA",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 
    "SMA50": {
        "type": "SMA",
        "lookback": 50,
        "field": "Close",
        "window": 3,
    }, 
    "SMA100": {
        "type": "SMA",
        "lookback": 100,
        "field": "Close",
        "window": 3,
    }, 
    "SMA200": {
        "type": "SMA",
        "lookback": 200,
        "field": "Close",
        "window": 3,
    }, 
    "MACD": {
        "type": "MACD",
        "window": 3,
    },
    "ROC2": {
        "type": "ROC",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "MOM4": {
        "type": "MOM",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "RSI10": {
        "type": "RSI",
        "lookback": 10,
        "ma_type": "Simple",
        "field": "Close",
        "window": 3,
    },
    "BB20": {
        "type": "BOLL",
        "lookback": 20,
        "ma_type": "Simple",
        "std": 2,
        "field": "Close",
        "window": 3,
    },
    "CCI20": {
        "type": "CCI",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 

    "ATR10": {
        "type": "ATR",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    },
    "ATR14": {
        "type": "ATR",
        "lookback": 14,
        "field": "Close",
        "window": 3,
    },
    "ATR21": {
        "type": "ATR",
        "lookback": 21,
        "field": "Close",
        "window": 3,
    },
    "ULTOSC": {
        "type": "ULTOSC",
        "window": 3,
    },
    "CHOP": {
        "type": "CHOP",
        "lookback": 52,
        "window": 3,
    },
    "DX14": {
        "type": "DX",
        "lookback": 14,
        "window": 3,
    },
    "PHASE": {
        "type": "PHASE",
        "lookback": 15,
        "window": 3,
    },
    "CRSI": {
        "type": "CRSI",
        "rsi_len": 15,
        "rsi_field": "Close",
        "rsi_window": 21,
        "window": 3,
    },
    "PSAR": {
        "type": "PSAR",
        "window": 3,
    },
}

signal_settings = {
    "FxLstm_Both_EURUSD": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        # "risk_pct": 0.01,
        "risk_pct": 0.0125,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 21,
        "longStopMultiplier": 0.1,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.1,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_EURUSD_Trail": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        "risk_pct": 0.01,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 21,
        "longStopMultiplier": 0.1,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.1,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_EURUSD": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        # "risk_pct": 0.01,
        "risk_pct": 0.0125,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 5.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_EURUSD_Trail": {
        "lstm_ticker": "EURUSD",  
        "valid_tickers": ["EURUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.01,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 0.2,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 5.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_USDJPY": {
        "lstm_ticker": "USDJPY",  
        "valid_tickers": ["USDJPY"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 1.0,
        "shortStopMultiplier": 0.5,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 4.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_USDJPY_Trail": {
        "lstm_ticker": "USDJPY",  
        "valid_tickers": ["USDJPY"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 1.0,
        "shortStopMultiplier": 0.5,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 4.0,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_GBPUSD": {
        "lstm_ticker": "GBPUSD",  
        "valid_tickers": ["GBPUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.40,
        "shortStopMultiplier": 0.75,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_GBPUSD_Trail": {
        "lstm_ticker": "GBPUSD",  
        "valid_tickers": ["GBPUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 100,
        "atrLength": 10,
        "longStopMultiplier": 0.40,
        "shortStopMultiplier": 0.75,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_AUDUSD": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.75,
        "shortStopMultiplier": 0.25,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.2,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Both_AUDUSD_Trail": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'both',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.75,
        "shortStopMultiplier": 0.25,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.2,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_AUDUSD": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        # "risk_pct": 0.005, 
        "risk_pct": 0.0075,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 1.0,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

    "FxLstm_Hybrid_AUDUSD_Trail": {
        "lstm_ticker": "AUDUSD",  
        "valid_tickers": ["AUDUSD"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.005,
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 200,
        "atrLength": 21,
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 1.0,
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": True,    
        "trailStopSize": 0.5,
        "use_movement_thres_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },

}





model_settings = {
    "col_date": ['datetime'],
    "col_price": 'close_D1',
    "col_price_cur": 'price',
    "col_target": 'target',
    "start_year": 2013,
    "trade_hour": 1,
    "prediction_lookforward_days": 1,
    "max_window_size": 100,
    "scaled_tickers": ["USDJPY"],

    "inflation_map_dict": {
        'inflation_usa': 'cpi_usa',
        'inflation_eur': 'cpi_eur',
        'inflation_deu': 'cpi_deu',
        'inflation_gbr': 'cpi_gbr',
        'inflation_chf': 'cpi_chf',
        'inflation_jpn': 'cpi_jpn',
        'inflation_can': 'cpi_can',
        'inflation_aus': 'cpi_aus',
    },

    "cols_data": [
        'datetime', 'close_D1', 'price', 
        'spy', 'dax', 
        'dff',
        'cpi_usa', 'cpi_eur', 'cpi_deu', 'cpi_gbr','cpi_chf','cpi_jpn','cpi_can','cpi_aus',
        'rate_eur_3m_bank',
        'rate_deu_3m_bank',
        'rate_chf_3m_bank',
        'rate_jpn_3m_bank',
        'rate_aus_3m_bank',
        'rate_cnd_3m_bank',
        'rate_gbp_3m_bank',
        'D1-SMA10-val', 'D1-MACD-macd', 'D1-MACD-macdsignal', 'D1-MACD-macdhist', 'D1-ROC2-val',
        'D1-MOM4-val', 'D1-RSI10-val', 'D1-BB20-upper', 'D1-BB20-mid', 'D1-BB20-lower', 'D1-CCI20-val',
        'D1-PSAR-val',
    ],

    'col_fundamental': [
        'spy','dax',
        'dff',
        'cpi_usa','cpi_eur','cpi_deu','cpi_gbr','cpi_chf','cpi_jpn','cpi_can','cpi_aus',
        'rate_eur_3m_bank',
        'rate_deu_3m_bank',
        'rate_chf_3m_bank',
        'rate_jpn_3m_bank',
        'rate_aus_3m_bank',
        'rate_cnd_3m_bank',
        'rate_gbp_3m_bank',
    ],

    'col_technical': [
        'D1-SMA10-val',
        'D1-MACD-macd',
        'D1-MACD-macdsignal',
        'D1-MACD-macdhist',
        'D1-ROC2-val',
        'D1-MOM4-val',
        'D1-RSI10-val',
        'D1-BB20-upper',
        'D1-BB20-mid', 
        'D1-BB20-lower',
        'D1-CCI20-val',
        'D1-PSAR-val',
    ],

    "model_settings_both": {
        "use_gru_model": True,
        "use_dual_lstm": False,
        "epochs": 1,
        "hidden_size": 50,
        "window_size": 5,
        "thres_multiplier": 3,
        "use_early_stop": False,
        "learning_rate": 0.0005,
        "batch_size": 8,
        "use_weighted_sampler": False,
        "volatility_type": 'thres_auto_v1',
        "valid_lookback_months": 12,
        "train_lookback_months": 48,

        "col_feature_dict": {
            "EURUSD": [
                'dff',
                'inflation_usa_d30',
                'inflation_eur_d30',
                'inflation_deu_d30',
                'rate_eur_3m_bank',
                'rate_deu_3m_bank',
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
                'D1-PSAR-val',
            ],

            "USDJPY": [
                'dff',
                'inflation_usa_d30',
                'inflation_jpn_d30',
                'rate_jpn_3m_bank',
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
                'D1-PSAR-val',
            ],

            "GBPUSD": [
                'dff',
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
            ],

            "AUDUSD": [
                'dff',
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
                'D1-PSAR-val',
            ],

        },
    },



    "model_settings_hybrid": {
        "use_gru_model": True,
        "use_dual_lstm": False,
        "epochs": 1,
        "hidden_size": 50,
        "window_size": 20,
        "thres_multiplier": 3,
        "learning_rate": 0.001,
        "batch_size": 8,
        "use_weighted_sampler": False,
        "volatility_type": 'thres_auto_v1',
        "valid_lookback_months": 12,
        "train_lookback_months": 48,

        "col_feature_fundamental_dict": {
            "EURUSD": [
                'dff',
                'inflation_usa_d1',
                'inflation_eur_d1',
                'inflation_deu_d1',
                'rate_eur_3m_bank',
                'rate_deu_3m_bank',
            ],

            "USDJPY": [
                'dff',
                'rate_jpn_3m_bank',
            ],

            "GBPUSD": [
                'dff',
                'inflation_usa_d1',
                'inflation_gbr_d1',
                'rate_gbp_3m_bank',
            ],

            "AUDUSD": [
                'dff',
                'inflation_usa_d1',
                'inflation_aus_d1',
                'rate_aus_3m_bank',
            ],           
        },

        "col_feature_technical_dict": {
            "EURUSD": [
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
            ],

            "USDJPY": [
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
            ],

            "GBPUSD": [
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
            ],

            "AUDUSD": [
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
            ],

        },

    },


}



#region imports
from AlgorithmImports import *
#endregion


## General Settings
general_setting = {
    "tickers": {
        ##-##
        "EURUSD": {"type": "forex"},
        # "USDJPY": {"type": "forex"},
        # "USDCHF": {"type": "forex"},

        # "GBPUSD": {"type": "forex"},
        # "AUDUSD": {"type": "forex"},
        # "USDCAD": {"type": "forex"},
        ##-##
    },
    
    "model_name": "ForexLSTM_V1_0",  
    "consolidator_timeframes": ["D1", "W1"], 

    ##-##
    "lstm_tickers": ['EURUSD'],
    # "lstm_tickers": ['USDJPY'],
    # "lstm_tickers": ['USDCHF'],

    # "lstm_tickers": ['GBPUSD'],
    # "lstm_tickers": ['AUDUSD'],
    # "lstm_tickers": ['USDCAD'],
    ##-##

    "fx_lstm_signal_name": 'FxLstm',

    "order_counter_diff": 3,

    "external_data": {
        # SP500
        'spy': {
            'source': 'equity',
            'ticker': 'SPY',
        },

        # Global X DAX Germany ETF
        'dax': {
            'source': 'equity',
            'ticker': 'DAX',
        },

        # US Treasury
        'us_treasury': {
            'source': 'USTreasuryYieldCurveRate',
            'ref': 'USTYCR',
            'col_date': 'time',
            'col_val': 'onemonth',
        },

        # Consumer Price Index for Inflation Rate
        # https://data.nasdaq.com/data/RATEINF-inflation-rates
        'cpi_usa': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_USA",
        },
        'cpi_eur': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_EUR",
        },
        'cpi_deu': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_DEU",
        },
        'cpi_gbr': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_GBR",
        },
        'cpi_chf': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_CHE",
        },
        'cpi_jpn': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_JPN",
        },
        'cpi_can': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_CAN",
        },
        'cpi_aus': {
            'source': 'NasdaqDataLink',
            'ref': "RATEINF/CPI_AUS",
        },

        # Federal Funds Effective Rate (DFF)
        # https://fred.stlouisfed.org/series/DFF
        'dff': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vT5lyey5dhfrZifoZFuDwlQDOz6oILyUyAHTLVe2eqiLv9jWkNeIFITIeKqwBOtS8oEUOoZ2zXX1De7/pub?gid=1400614786&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'dff',
            'lag_days': 2,
        },

        # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for the Euro Area (19 Countries)
        # https://fred.stlouisfed.org/series/IRLTLT01EZM156N

        'rate_eur_lt_gov': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSl_hxRnfcXnFly0Gh1vyZYNTRW6VTv-FQDlXuNUR1090RIst2a01nyhGl7tPR4VIcrgFfGBc3OSD72/pub?gid=1026565438&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IRLTLT01EZM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for the Euro Area (19 Countries)
        # https://fred.stlouisfed.org/series/IR3TIB01EZM156N
        'rate_eur_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSkVfnj8N9AIsVF5PJN0JzU9ahw71nK_sTwY2qLKtNNxs1JI0STexUPEW15dY9bDUN8Fwql7_WUiKhK/pub?gid=2059310805&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01EZM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for Germany
        # https://fred.stlouisfed.org/series/IRLTLT01DEM156N
        'rate_deu_lt_gov': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vToUOn242L-w9ZWUXz_fU59aUc6oN5tDJEG8fu207zO7jMyfy5y7VesxH0mzEKaqwuU7WGOq7_xxDSu/pub?gid=2099864712&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IRLTLT01DEM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Germany
        # https://fred.stlouisfed.org/series/IR3TIB01DEM156N
        'rate_deu_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTswIuhg3-tLwgP6RWSSPRyyLDpvHNqdlSgSNk91_SkUjKAD9_lyvhI84MAHRHzYdrIho1Narccx_w1/pub?gid=1568788544&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01DEM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Switzerland
        # https://fred.stlouisfed.org/series/IR3TIB01CHM156N
        'rate_chf_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vRRVvpohXIZOGQ4HpAjTTMeZ6cTat0wZ1gOxpUR_5E3pDuDCHDppiRnV9GQNK33jWJ3pYxAjvOvmerO/pub?gid=1734297228&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01CHM156N'.lower(),
            'lag_days': 10,
        },

        #  3-Month or 90-day Rates and Yields: Interbank Rates for Japan
        # https://fred.stlouisfed.org/series/IR3TIB01JPM156N
        'rate_jpn_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTjcVbe63Ea3BoVxTpTBNcaEICdI11DhVmZ6Qxb-_GcuP8VbemKreHWNEu5id0ZviHPk7PAtLHqdBGr/pub?gid=1682849610&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01JPM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Australia
        # https://fred.stlouisfed.org/series/IR3TIB01AUM156N
        'rate_aus_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vS73ca8pMDndu3lH5SjmrIJS-HwWfDdqS2mh1YQkQwhGj3UtIauP12xjhmLusXag9ibZJE3YZsWLERT/pub?gid=1639615970&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01AUM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Canada 
        # https://fred.stlouisfed.org/series/IR3TIB01CAM156N
        'rate_cnd_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vQ0Odb11l33qCVwPS6G2lxrUpfQ5DWXnGw8HFu6uUV_OUx7b-yBIQItN12TwLRTq3Bx3-fBe-pU86ve/pub?gid=483123093&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01CAM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for United Kingdom
        # https://fred.stlouisfed.org/series/IR3TIB01GBM156N
        'rate_gbp_3m_bank': {
            'source': 'gsheet',
            'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTtQSIEiSK1swVM_oElodv6YsjzojdxwfXZm-hDx68DQD6V3HjtuOYpHb4KUQC5uWDFoe9t09-Mibex/pub?gid=1120780951&single=true&output=csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01GBM156N'.lower(),
            'lag_days': 10,
        },

    },

    "features": {
        # "D1": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        # "W1": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        "D1": [],
        "W1": [],
    },
    "features_val_map": {
        "SMA10": ["val"], 
        "MACD": ["macd", "macdsignal", "macdhist"],
        "ROC2": ["val"],   
        "MOM4": ["val"],    
        "RSI10": ["val"],
        "BB20": ["upper","lower","mid"],
        "CCI20": ["val"],
    },
}


## Consolidator Settings
consolidator_settings = {
    "D1": {
        "timeframe_minutes": 24 * 60,
        "consolidation_type": "quote",
        # "indicators": [
        #     "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
        # ],
        "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
    "W1": {
        "timeframe_minutes": 7 * 24 * 60,
        "consolidation_type": "quote",
        # "indicators": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
}


## Indicators Settings
indicator_settings = {
    "SMA10": {
        "type": "SMA",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    }, 
    "MACD": {
        "type": "MACD",
        "window": 3,
    },
    "ROC2": {
        "type": "ROC",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "MOM4": {
        "type": "MOM",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "RSI10": {
        "type": "RSI",
        "lookback": 10,
        "ma_type": "Simple",
        "field": "Close",
        "window": 3,
    },
    "BB20": {
        "type": "BOLL",
        "lookback": 20,
        "ma_type": "Simple",
        "std": 2,
        "field": "Close",
        "window": 3,
    },
    "CCI20": {
        "type": "CCI",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 

    "ATR10": {
        "type": "ATR",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    },
    "ATR14": {
        "type": "ATR",
        "lookback": 14,
        "field": "Close",
        "window": 3,
    },
    "ATR21": {
        "type": "ATR",
        "lookback": 21,
        "field": "Close",
        "window": 3,
    },
}

signal_settings = {
    "FxLstm": {
        ##-##
        "valid_tickers": ["EURUSD","USDJPY","USDCHF","GBPUSD","AUDUSD","USDCAD"],
        ##-##

        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
    },
}
#region imports
from AlgorithmImports import *
#endregion


## General Settings
general_setting = {
    "tickers": {
        ##-##
        "EURUSD": {"type": "forex"},
        # "USDJPY": {"type": "forex"},
        # "USDCHF": {"type": "forex"},

        # "GBPUSD": {"type": "forex"},
        # "AUDUSD": {"type": "forex"},
        # "USDCAD": {"type": "forex"},
        ##-##
    },
    
    "model_name": "ForexLSTM_V1_0",  
    "consolidator_timeframes": ["D1", "W1"], 

    ##-##
    "lstm_tickers": ['EURUSD'],
    # "lstm_tickers": ['USDJPY'],
    # "lstm_tickers": ['USDCHF'],

    # "lstm_tickers": ['GBPUSD'],
    # "lstm_tickers": ['AUDUSD'],
    # "lstm_tickers": ['USDCAD'],
    ##-##

    "fx_lstm_signal_name": 'FxLstm',

    "order_counter_diff": 3,

    "external_data": {},
    # "external_data": {
    #     # SP500
    #     'spy': {
    #         'source': 'equity',
    #         'ticker': 'SPY',
    #     },

    #     # Global X DAX Germany ETF
    #     'dax': {
    #         'source': 'equity',
    #         'ticker': 'DAX',
    #     },

    #     # US Treasury
    #     'us_treasury': {
    #         'source': 'USTreasuryYieldCurveRate',
    #         'ref': 'USTYCR',
    #         'col_date': 'time',
    #         'col_val': 'onemonth',
    #     },

    #     # Consumer Price Index for Inflation Rate
    #     # https://data.nasdaq.com/data/RATEINF-inflation-rates
    #     'cpi_usa': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_USA",
    #     },
    #     'cpi_eur': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_EUR",
    #     },
    #     'cpi_deu': {
    #         'source': 'NasdaqDataLink',
    #         'ref': "RATEINF/CPI_DEU",
    #     },

    #     # Federal Funds Effective Rate (DFF)
    #     # https://fred.stlouisfed.org/series/DFF
    #     'dff': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vT5lyey5dhfrZifoZFuDwlQDOz6oILyUyAHTLVe2eqiLv9jWkNeIFITIeKqwBOtS8oEUOoZ2zXX1De7/pub?gid=1400614786&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'dff',
    #         'lag_days': 2,
    #     },

    #     # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for the Euro Area (19 Countries)
    #     # https://fred.stlouisfed.org/series/IRLTLT01EZM156N

    #     'rate_eur_lt_gov': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSl_hxRnfcXnFly0Gh1vyZYNTRW6VTv-FQDlXuNUR1090RIst2a01nyhGl7tPR4VIcrgFfGBc3OSD72/pub?gid=1026565438&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IRLTLT01EZM156N'.lower(),
    #         'lag_days': 2,
    #     },

    #     # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for the Euro Area (19 Countries)
    #     # https://fred.stlouisfed.org/series/IR3TIB01EZM156N
    #     'rate_eur_3m_bank': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vSkVfnj8N9AIsVF5PJN0JzU9ahw71nK_sTwY2qLKtNNxs1JI0STexUPEW15dY9bDUN8Fwql7_WUiKhK/pub?gid=2059310805&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IR3TIB01EZM156N'.lower(),
    #         'lag_days': 2,
    #     },

    #     # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for Germany
    #     # https://fred.stlouisfed.org/series/IRLTLT01DEM156N
    #     'rate_deu_lt_gov': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vToUOn242L-w9ZWUXz_fU59aUc6oN5tDJEG8fu207zO7jMyfy5y7VesxH0mzEKaqwuU7WGOq7_xxDSu/pub?gid=2099864712&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IRLTLT01DEM156N'.lower(),
    #         'lag_days': 2,
    #     },

    #     # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Germany
    #     # https://fred.stlouisfed.org/series/IR3TIB01DEM156N
    #     'rate_deu_3m_bank': {
    #         'source': 'gsheet',
    #         'link': "https://docs.google.com/spreadsheets/d/e/2PACX-1vTswIuhg3-tLwgP6RWSSPRyyLDpvHNqdlSgSNk91_SkUjKAD9_lyvhI84MAHRHzYdrIho1Narccx_w1/pub?gid=1568788544&single=true&output=csv",
    #         'col_date': 'date',
    #         'col_val': 'IR3TIB01DEM156N'.lower(),
    #         'lag_days': 2,
    #     },

    # },

    "features": {
        "D1": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10","PSAR",
            # "ULTOSC","CHOP","DX14","PHASE","CRSI",
        ],
        # "W1": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        # "D1": [],
        "W1": [],
    },
    "features_val_map": {
        "SMA10": ["val"], 
        "MACD": ["macd", "macdsignal", "macdhist"],
        "ROC2": ["val"],   
        "MOM4": ["val"],    
        "RSI10": ["val"],
        "BB20": ["upper","lower","mid"],
        "CCI20": ["val"],

        "ULTOSC": ["val"],
        "CHOP": ["val"],
        "DX14": ["val"],
        "PHASE": ["val"],
        "CRSI": ["val"],
        "PSAR": ["val"],
    },
}


## Consolidator Settings
consolidator_settings = {
    "D1": {
        "timeframe_minutes": 24 * 60,
        "consolidation_type": "quote",
        "indicators": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10",
            "PSAR",
            # "ULTOSC","CHOP","DX14","PHASE","CRSI",
        ],
        # "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
    "W1": {
        "timeframe_minutes": 7 * 24 * 60,
        "consolidation_type": "quote",
        # "indicators": ["SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20"],
        "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },
}


## Indicators Settings
indicator_settings = {
    "SMA10": {
        "type": "SMA",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    }, 
    "MACD": {
        "type": "MACD",
        "window": 3,
    },
    "ROC2": {
        "type": "ROC",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "MOM4": {
        "type": "MOM",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "RSI10": {
        "type": "RSI",
        "lookback": 10,
        "ma_type": "Simple",
        "field": "Close",
        "window": 3,
    },
    "BB20": {
        "type": "BOLL",
        "lookback": 20,
        "ma_type": "Simple",
        "std": 2,
        "field": "Close",
        "window": 3,
    },
    "CCI20": {
        "type": "CCI",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 

    "ATR10": {
        "type": "ATR",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    },
    "ULTOSC": {
        "type": "ULTOSC",
        "window": 3,
    },
    "CHOP": {
        "type": "CHOP",
        "lookback": 52,
        "window": 3,
    },
    "DX14": {
        "type": "DX",
        "lookback": 14,
        "window": 3,
    },
    "PHASE": {
        "type": "PHASE",
        "lookback": 15,
        "window": 3,
    },
    "CRSI": {
        "type": "CRSI",
        "rsi_len": 15,
        "rsi_field": "Close",
        "rsi_window": 21,
        "window": 3,
    },
    "PSAR": {
        "type": "PSAR",
        "window": 3,
    },


}


signal_settings = {
    "FxLstm": {
        ##-##
        "valid_tickers": ["EURUSD","USDJPY","USDCHF","GBPUSD","AUDUSD","USDCAD"],
        ##-##
        
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
    },
}
from AlgorithmImports import *

import numpy as np
from datetime import datetime, timedelta
from collections import deque
import talib as ta


class CustomSimpleMovingAverage:
    def __init__(self, name, period):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.queue = deque(maxlen=period)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    # Update method is mandatory
    def Update(self, EndTime, Val):
        self.queue.appendleft(Val)
        count = len(self.queue)
        self.Time = EndTime
        self.Value = sum(self.queue) / count
        self.IsReady = count == self.queue.maxlen

    def Undo(self):
        del self.queue[0]


class CustomPhase:
    def __init__(self, name, period):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.period = period
        self.close = deque(maxlen=period)
        self.high = deque(maxlen=period)
        self.low = deque(maxlen=period)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.Time = bar.EndTime

        RealPart = 0.0
        ImagPart = 0.0
        _count = min([len(self.close), len(self.high), len(self.low)])
        for J in range(_count):
            Weight = (
                self.close[J] + self.close[J] + self.high[J] + self.low[J]
            ) * 10000
            if self.period != 0:
                RealPart = RealPart + np.cos(90 * J / self.period) * Weight * 2
                ImagPart = (
                    (ImagPart + np.sin(90 * J / self.period) * Weight)
                    + (ImagPart + np.sin(180 * J / self.period) * Weight)
                ) / 2
        Phase = ((np.arctan(ImagPart / RealPart)) - 0.685) * 100
        self.Value = Phase
        self.IsReady = _count == self.period


class CustomCRSI:
    def __init__(self, name, rsi_len, rsi_field, rsi_window):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.rsi_len = rsi_len
        self.rsi_field = rsi_field
        self.rsi_window = rsi_window
        self.RSI = RelativeStrengthIndex(
            f"{name}-RSI", rsi_len, MovingAverageType.Exponential
        )
        self.RSIval = deque(maxlen=rsi_window)
        self.CRSIval = deque(maxlen=3)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.Time = bar.EndTime
        self.RSI.Update(bar.EndTime, self.get_update_val(bar))
        self.RSIval.appendleft(self.RSI.Current.Value)

        vibration = 10
        torque = 0.618 / (vibration + 1)
        phasingLag = (vibration - 1) / 0.618
        if len(self.RSIval) > int(phasingLag):
            if len(self.CRSIval) > 1:
                crsi1 = self.CRSIval[1]
            else:
                crsi1 = 0
            self.Value = (
                torque * (2 * self.RSIval[0] - self.RSIval[int(phasingLag)])
                + (1 - torque) * crsi1
            )
        else:
            self.Value = 0

        self.CRSIval.appendleft(self.Value)
        self.IsReady = (
            self.RSI.IsReady
            and (len(self.RSIval) == self.rsi_window)
            and (len(self.CRSIval) == 3)
        )

    def get_update_val(self, bar):
        if self.rsi_field == "Close":
            val = bar.Close
        elif self.rsi_field == "High":
            val = bar.High
        elif self.rsi_field == "Low":
            val = bar.Low
        elif self.rsi_field == "Open":
            val = bar.Open
        return val


class CustomPivot:
    def __init__(self, name, period):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.period = period
        self.close = deque(maxlen=period)
        self.high = deque(maxlen=period)
        self.low = deque(maxlen=period)
        self.arr_time = deque(maxlen=period)
        self.p = deque(maxlen=period)
        self.r1 = deque(maxlen=period)
        self.s1 = deque(maxlen=period)
        # self.r2 = deque(maxlen=period)
        # self.s2 = deque(maxlen=period)
        # self.r3 = deque(maxlen=period)
        # self.s3 = deque(maxlen=period)
        # self.r4 = deque(maxlen=period)
        # self.s4 = deque(maxlen=period)
        # self.r5 = deque(maxlen=period)
        # self.s5 = deque(maxlen=period)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.Time = bar.EndTime
        self.arr_time.appendleft(bar.EndTime)
        pivotX_Median = (self.high[0] + self.low[0] + self.close[0]) / 3
        self.Value = pivotX_Median
        self.p.appendleft(pivotX_Median)
        self.r1.appendleft(pivotX_Median * 2 - self.low[0])
        self.s1.appendleft(pivotX_Median * 2 - self.high[0])
        # self.r2.appendleft(pivotX_Median + 1 * (self.high[0] - self.low[0]))
        # self.s2.appendleft(pivotX_Median - 1 * (self.high[0] - self.low[0]))
        # self.r3.appendleft(pivotX_Median * 2 + (self.high[0] - 2 * self.low[0]))
        # self.s3.appendleft(pivotX_Median * 2 - (2 * self.high[0] - self.low[0]))
        # self.r4.appendleft(pivotX_Median * 3 + (self.high[0] - 3 * self.low[0]))
        # self.s4.appendleft(pivotX_Median * 3 - (3 * self.high[0] - self.low[0]))
        # self.r5.appendleft(pivotX_Median * 4 + (self.high[0] - 4 * self.low[0]))
        # self.s5.appendleft(pivotX_Median * 4 - (4 * self.high[0] - self.low[0]))
        self.IsReady = len(self.p) == self.period


class CustomChoppinessIndex:
    def __init__(self, name, period):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.period = period

        self.high = deque(maxlen=period)
        self.low = deque(maxlen=period)
        self.ATR = AverageTrueRange(f"{name}-RSI", 1)
        self.ATRval = deque(maxlen=period)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.Time = bar.EndTime
        self.ATR.Update(bar)
        self.ATRval.appendleft(self.ATR.Current.Value)
        if (max(self.high) - min(self.low)) == 0:
            self.Value = 0
        else:
            self.Value = (
                100
                * np.log10(sum(self.ATRval) / (max(self.high) - min(self.low)))
                / np.log10(self.period)
            )
        self.IsReady = self.ATR.IsReady and (len(self.ATRval) == self.period)


class CustomDX:
    def __init__(self, name, period):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.period = period
        self.window_len = period * 2 + 25
        self.high = deque(maxlen=self.window_len)
        self.low = deque(maxlen=self.window_len)
        self.close = deque(maxlen=self.window_len)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            ta_out = ta.DX(
                np.array(self.high),
                np.array(self.low),
                np.array(self.close),
                timeperiod=self.period,
            )
            self.Value = ta_out[-1]
        else:
            self.Value = 0


class CustomMACD:
    def __init__(self, name):
        self.Name = name
        self.Time = datetime.min
        self.IsReady = False
        self.window_len = 100
        self.close = deque(maxlen=self.window_len)
        self.macd = 0
        self.macdsignal = 0
        self.macdhist = 0

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            macd, macdsignal, macdhist = ta.MACD(
                np.array(self.close), fastperiod=12, slowperiod=26, signalperiod=9
            )
            self.macd = macd[-1]
            self.macdsignal = macdsignal[-1]
            self.macdhist = macdhist[-1]
        else:
            self.macd = 0
            self.macdsignal = 0
            self.macdhist = 0


class CustomULTOSC:
    def __init__(self, name):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.window_len = 100
        self.high = deque(maxlen=self.window_len)
        self.low = deque(maxlen=self.window_len)
        self.close = deque(maxlen=self.window_len)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            ta_out = ta.ULTOSC(
                np.array(self.high),
                np.array(self.low),
                np.array(self.close),
                timeperiod1=7,
                timeperiod2=14,
                timeperiod3=28,
            )
            self.Value = ta_out[-1]
        else:
            self.Value = 0
## Version
# Forex LSTM V 1.0

from AlgorithmImports import *
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
from collections import deque
import pickle

from config import (
    general_setting,
    consolidator_settings,
    indicator_settings,
    signal_settings,
)

from data_classes import (
    SymbolData,
    MarketHours,
)

from signal_classes import (
    FxLstmSignal,
)

signal_mapping = {
    "FxLstm_Both_EURUSD": FxLstmSignal,
    "FxLstm_Both_EURUSD_Trail": FxLstmSignal,

    "FxLstm_Hybrid_EURUSD": FxLstmSignal,
    "FxLstm_Hybrid_EURUSD_Trail": FxLstmSignal, 

    "FxLstm_Both_USDJPY": FxLstmSignal,
    "FxLstm_Both_USDJPY_Trail": FxLstmSignal, 

    "FxLstm_Hybrid_GBPUSD": FxLstmSignal,
    "FxLstm_Hybrid_GBPUSD_Trail": FxLstmSignal,

    "FxLstm_Both_AUDUSD": FxLstmSignal,
    "FxLstm_Both_AUDUSD_Trail": FxLstmSignal,
    
    "FxLstm_Hybrid_AUDUSD": FxLstmSignal,
    "FxLstm_Hybrid_AUDUSD_Trail": FxLstmSignal,
}




from QuantConnect.DataSource import *


class FxLstmAlgo(QCAlgorithm):

    def Initialize(self):


        ## For Optimization

        # _signal = ["FxLstm_Both_Long","FxLstm_Both_Short","FxLstm_Hybrid_Long","FxLstm_Hybrid_Short"][3]
        # _signal = ["FxLstm_Both","FxLstm_Hybrid"][0]

        # general_setting["signals"] = [_signal]
        # StopMultiplier = [0.1, 0.2, 0.25, 0.5, 0.75, 1.0][int(self.GetParameter('multiplier'))]
        # signal_settings[_signal]["longStopMultiplier"] = StopMultiplier
        # signal_settings[_signal]["shortStopMultiplier"] = StopMultiplier


        # sma_filter_lookback = [10, 20, 50, 100, 200][int(self.GetParameter('multiplier'))]
        # for _signal in ["FxLstm_Both_Long","FxLstm_Both_Short","FxLstm_Hybrid_Long","FxLstm_Hybrid_Short"]:
        #     signal_settings[_signal]["sma_filter_lookback"] = sma_filter_lookback




        # _signal = [
        #     "FxLstm_Both_EURUSD",
        #     "FxLstm_Hybrid_EURUSD",
        #     "FxLstm_Both_USDJPY",
        #     "FxLstm_Hybrid_GBPUSD",
        #     "FxLstm_Both_AUDUSD",
        #     "FxLstm_Hybrid_AUDUSD"
        # ][5]

        # # if int(self.GetParameter('multiplier')) == 0:
        # #     signal_settings[_signal]["atrLength"] = 10

        # # if int(self.GetParameter('multiplier')) == 1:
        # #     signal_settings[_signal]["atrLength"] = 14

        # # if int(self.GetParameter('multiplier')) == 2:
        # #     signal_settings[_signal]["atrLength"] = 21


        # # StopMultiplier = [0.1, 0.2, 0.25, 0.5, 0.75, 1.0][int(self.GetParameter('multiplier'))]
        # # signal_settings[_signal]["longStopMultiplier"] = StopMultiplier
        # # signal_settings[_signal]["shortStopMultiplier"] = StopMultiplier


        # # RiskRewardMultiplier = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0][int(self.GetParameter('multiplier'))]
        # # signal_settings[_signal]["longRiskRewardMultiplier"] = RiskRewardMultiplier
        # # signal_settings[_signal]["shortRiskRewardMultiplier"] = RiskRewardMultiplier

        # trailStopSize = [0.1, 0.2, 0.5, 0.75, 1.0, 1.25, 1.5, 2.0, 2.5, 3.0, 4.0, 5.0][int(self.GetParameter('multiplier'))]
        # signal_settings[_signal]["useTralingStop"] = True
        # signal_settings[_signal]["trailStopSize"] = trailStopSize




## Full Baseline
# SR: 1.122, PSR: 58.145%
# SR: 1.172, PSR: 63.188%
# SR: 1.205, PSR: 66.028%
# SR: 1.215, PSR: 64.886%
# SR: 1.270, PSR: 69.364%
# SR: 1.320, PSR: 73.606%

# SR: 1.305, PSR: 71.873%
# SR: 1.317, PSR: 72.794%
# SR: 1.343, PSR: 73.131%




        ##

        self.SetTimeZone(TimeZones.Johannesburg)

        ##-##
        self.SetStartDate(2018, 1, 1) 
        ##-##

        self.SetCash(10000)

        self.SetWarmUp(int(12 * 20 * 24 * 60), Resolution.Minute)
        self.SetBrokerageModel(BrokerageName.OandaBrokerage, AccountType.Margin)
        self.to_plot = False

        self.general_setting = general_setting
        self.consolidator_settings = consolidator_settings
        self.indicator_settings = indicator_settings
        self.signal_settings = signal_settings
        self.ref_ticker = "EURUSD"

        ##-##
        self.prediction_dict_full_TEMP = {}
        for lstm_ticker in self.general_setting["lstm_tickers"]:
            self.prediction_dict_full_TEMP[lstm_ticker] = pickle.loads(bytes(self.ObjectStore.ReadBytes(f"PREDICTIONS_{lstm_ticker}"))) 
        ##-##

        self.prediction_dict = {}
        for _signal in self.general_setting["signals"]:
            self.prediction_dict[_signal] = {}
            for ticker in self.general_setting["lstm_tickers"]:
                if ticker in self.signal_settings[_signal]['valid_tickers']:
                    self.prediction_dict[_signal][ticker] = 1
                    

        # Data Initialization
        self.Data = {}
        self.Signal = {}
        self.Counter = {}
        self.SymbolMarketHours = {}
        self.symbol_ticker_map = {}     
        self.ticker_symbol_map = {}
        self.data_list = []  
        self.output_data_dict = {}

        for ticker in self.general_setting["tickers"]:
            if general_setting["tickers"][ticker]["type"] == "equity":
                symbol = self.AddEquity(
                    ticker,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.Raw,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "forex":
                symbol = self.AddForex(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "cfd":
                symbol = self.AddCfd(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol

            self.Data[symbol] = SymbolData(
                self,
                symbol,
                ticker,
                general_setting,
                consolidator_settings,
                indicator_settings,
            )

            self.Counter[symbol] = {}
            self.Counter[symbol]["counter"] = 0
            self.Counter[symbol]["last_order_counter"] = 0
            self.SymbolMarketHours[symbol] = MarketHours(self, symbol)
            self.symbol_ticker_map[symbol] = ticker
            self.ticker_symbol_map[ticker] = symbol

            self.Signal[symbol] = {}
            for _signal in self.general_setting["signals"]:
                self.Signal[symbol][_signal] = signal_mapping[_signal](
                    self, symbol, ticker, self.general_setting, self.signal_settings[_signal]
                )


        self.Schedule.On(
            self.DateRules.MonthEnd(self.ref_ticker),
            self.TimeRules.BeforeMarketClose(self.ref_ticker, 0),
            self.SaveData,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker),
            self.SOD,
        )

        ##-##
        # self.external_data = {}
        # for _dn in self.general_setting["external_data"]:
        #     self.external_data[_dn] = {}
        #     self.external_data[_dn]['time'] = None
        #     self.external_data[_dn]['value'] = None
        #     source = self.general_setting["external_data"][_dn]['source']

        #     if source == 'gsheet':
        #         link = self.general_setting["external_data"][_dn]['link']
        #         col_date = self.general_setting["external_data"][_dn]['col_date']
        #         col_val = self.general_setting["external_data"][_dn]['col_val']
        #         data = self.Download(link)
        #         rows = []
        #         for row in data.split('\n'):
        #             rows.append(row.replace('\r','').lower().split(','))
        #         data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
        #         data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
        #         data_df[col_val] = data_df[col_val].astype(float)
        #         self.external_data[_dn]['data'] = data_df.copy()

        #     if source == 'NasdaqDataLink':
        #         ref = self.general_setting["external_data"][_dn]['ref']
        #         self.external_data[_dn]['symbol'] = self.AddData(NasdaqDataLink, ref, Resolution.Daily).Symbol

        #     if source == 'equity':
        #         ticker = self.general_setting["external_data"][_dn]['ticker']
        #         self.external_data[_dn]['symbol'] = self.AddEquity(ticker, Resolution.Daily).Symbol

        #     if source == 'USTreasuryYieldCurveRate':
        #         ref = self.general_setting["external_data"][_dn]['ref']
        #         self.external_data[_dn]['symbol'] = self.AddData(USTreasuryYieldCurveRate, ref).Symbol
        ##-##


    def SOD(self):

        ##-##
        pass
        # for _dn in self.general_setting["external_data"]:
        #     source = self.general_setting["external_data"][_dn]['source']
        #     if source == 'gsheet':
        #         col_date = self.general_setting["external_data"][_dn]['col_date']
        #         col_val = self.general_setting["external_data"][_dn]['col_val']
        #         lag_days = self.general_setting["external_data"][_dn]['lag_days']

        #         data = self.external_data[_dn]['data'][self.external_data[_dn]['data'][col_date] < (self.Time - timedelta(days=lag_days))]
        #         if len(data) > 0:
        #             self.external_data[_dn]['time'] = data[col_date].values[-1]
        #             self.external_data[_dn]['value'] = data[col_val].values[-1]

        #     if source == 'USTreasuryYieldCurveRate':
        #         col_date = self.general_setting["external_data"][_dn]['col_date']
        #         col_val = self.general_setting["external_data"][_dn]['col_val']
        #         symbol = self.external_data[_dn]['symbol']
        #         history = self.History(USTreasuryYieldCurveRate, symbol, 1, Resolution.Daily)
        #         history = history.reset_index()
        #         if len(history) > 0:
        #             if col_val in history.columns:
        #                 self.external_data[_dn]['time'] = pd.to_datetime(history[col_date], utc=True).iloc[0].replace(tzinfo=None)
        #                 self.external_data[_dn]['value'] = history[col_val].values[0]
        ##-##



    def SaveData(self):
        ##-##
        pass
        # self.output_data_dict["data"] = self.data_list
        # self.output_data_dict["version"] = "2"
        # # self.ObjectStore.SaveBytes("FUNDAMENTAL_DATA", pickle.dumps(self.output_data_dict))
        # self.ObjectStore.SaveBytes("TECHNICAL_DATA", pickle.dumps(self.output_data_dict))
        ##-##



    def OnData(self, data):

        ##-##
        # for _dn in self.general_setting["external_data"]:
        #     source = self.general_setting["external_data"][_dn]['source']

        #     if source == 'NasdaqDataLink':
        #         symbol = self.external_data[_dn]['symbol']
        #         if data.ContainsKey(symbol):
        #             self.external_data[_dn]['value'] = data[symbol].Value

        #     if source == 'equity':
        #         symbol = self.external_data[_dn]['symbol']
        #         if data.ContainsKey(symbol):
        #             if data[symbol] is not None:
        #                 self.external_data[_dn]['time'] = data[symbol].Time
        #                 self.external_data[_dn]['value'] = data[symbol].Price

        ##-##


        FxLstm_SymbolQuantity = {}
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            ticker = self.symbol_ticker_map[symbol]

            is_valid_time = self.Time.minute == 0
            is_valid_time = is_valid_time and (self.Time.hour in [self.general_setting['FxLstm_prediction_hour']])

            if is_valid_time:
                if ticker in self.general_setting["lstm_tickers"]:

                    ##-##
                    # symbol = self.ticker_symbol_map[ticker]
                    # symbolData = self.Data[symbol]

                    # if not (
                    #     data.ContainsKey(symbol)
                    #     and data[symbol] is not None
                    #     and symbolData.IsReady
                    # ):
                    #     continue

                    # data_dict = {}
                    # data_dict["datetime"] = self.Time
                    # data_dict["ticker"] = ticker        
                    # data_dict["symbol"] = symbol.Value
                    # data_dict["isWarmingUp"] = self.IsWarmingUp
                    # data_dict["symbolTime"] = str(data[symbol].Time)
                    # data_dict["price"] = np.round(data[symbol].Price, 6)

                    # # Daily Data
                    # _consolidator = symbolData.consolidators["D1"]
                    # data_dict["close_D1"] = _consolidator.close[0]

                    # # External Data
                    # for _dn in self.general_setting["external_data"]:
                    #     data_dict[f"{_dn}_time"] = self.external_data[_dn]['time']       
                    #     data_dict[_dn] = self.external_data[_dn]['value']

                    # # Technical Features
                    # for _tf in self.general_setting["features"]:
                    #     _consolidator = symbolData.consolidators[_tf]
                    #     for _in in self.general_setting["features"][_tf]:
                    #         _indicator = _consolidator.indicators[_in]

                    #         if _in in self.general_setting["features_val_map"]:
                    #             for _v in self.general_setting["features_val_map"][
                    #                 _in
                    #             ]:
                    #                 data_dict[f"{_tf}-{_in}-{_v}"] = np.round(
                    #                     _indicator[_v][0], 5
                    #                 )

                
                    # # if not self.IsWarmingUp:    
                    # #     self.data_list += [data_dict]
                    # self.data_list += [data_dict]
                    ##-##

                    for _signal in self.general_setting["signals"]:
                        if ticker in self.signal_settings[_signal]['valid_tickers']:
                            pred_type = self.signal_settings[_signal]['pred_type']
                            lstm_ticker = self.signal_settings[_signal]['lstm_ticker']                          
                            if self.Time in self.prediction_dict_full_TEMP[lstm_ticker][pred_type]:
                                self.prediction_dict[_signal][ticker] = self.prediction_dict_full_TEMP[lstm_ticker][pred_type][self.Time]
                
                            self.Signal[symbol][_signal].update_prediction_direction(self.prediction_dict[_signal][ticker])


            symbolQuantity = 0
            for _signal in self.general_setting["signals"]:
                if ticker in self.signal_settings[_signal]['valid_tickers']:

                    to_exit = self.Signal[symbol][_signal].check_exit(symbolData, data[symbol].Price, data[symbol].Time)
                    if to_exit:
                        self.Signal[symbol][_signal].update_exit()

                    has_enter = self.Signal[symbol][_signal].enter(symbolData, data[symbol].Price, data[symbol].Time)

                    quantity = self.Signal[symbol][_signal].quantity * self.Signal[symbol][_signal].allocation_multiplier
                    quantity = int(np.ceil(quantity))
                    symbolQuantity += quantity

            FxLstm_SymbolQuantity[symbol] = symbolQuantity


        ## Aggregate symbol quantities across strategies
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            self.Counter[symbol]["counter"] += 1

            symbolQuantity = 0
            if symbol in FxLstm_SymbolQuantity:
                symbolQuantity += FxLstm_SymbolQuantity[symbol]

            if not self.IsWarmingUp:
                # In case orders takes longer than 1 bar to be filled. Only send market orders every 3 minutes
                if (self.Counter[symbol]["counter"] - self.Counter[symbol]["last_order_counter"]) >= self.general_setting["order_counter_diff"]:
                    if (symbolQuantity - self.Portfolio[symbol].Quantity) != 0:
                        self.MarketOrder(symbol, symbolQuantity - self.Portfolio[symbol].Quantity)
                        self.Counter[symbol]["last_order_counter"] = self.Counter[symbol]["counter"]



        
## Version
# Forex LSTM V 1.0

from AlgorithmImports import *
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
from collections import deque
import pickle

from config import (
    general_setting,
    consolidator_settings,
    indicator_settings,
    signal_settings,
)

from data_classes import (
    SymbolData,
    MarketHours,
)

from signal_classes import (
    FxLstmSignal,
)

signal_mapping = {
    "FxLstm_Both": FxLstmSignal,
    "FxLstm_Hybrid": FxLstmSignal,
    "FxLstm_Both_Long": FxLstmSignal,
    "FxLstm_Both_Short": FxLstmSignal,
    "FxLstm_Hybrid_Long": FxLstmSignal,
    "FxLstm_Hybrid_Short": FxLstmSignal,
}




from QuantConnect.DataSource import *


class FxLstmAlgo(QCAlgorithm):

    def Initialize(self):


        ## For Optimization

        # _signal = ["FxLstm_Both_Long","FxLstm_Both_Short","FxLstm_Hybrid_Long","FxLstm_Hybrid_Short"][3]
        # _signal = ["FxLstm_Both","FxLstm_Hybrid"][0]

        # general_setting["signals"] = [_signal]
        # StopMultiplier = [0.1, 0.2, 0.25, 0.5, 0.75, 1.0][int(self.GetParameter('multiplier'))]
        # signal_settings[_signal]["longStopMultiplier"] = StopMultiplier
        # signal_settings[_signal]["shortStopMultiplier"] = StopMultiplier


        # sma_filter_lookback = [10, 20, 50, 100, 200][int(self.GetParameter('multiplier'))]
        # for _signal in ["FxLstm_Both_Long","FxLstm_Both_Short","FxLstm_Hybrid_Long","FxLstm_Hybrid_Short"]:
        #     signal_settings[_signal]["sma_filter_lookback"] = sma_filter_lookback


        # _signal = ["FxLstm_Both","FxLstm_Hybrid"][1]

        # if int(self.GetParameter('multiplier')) == 0:
        #     signal_settings[f"{_signal}"]["longRiskRewardMultiplier"] = 2
        #     signal_settings[f"{_signal}"]["shortRiskRewardMultiplier"] = 3

        # if int(self.GetParameter('multiplier')) == 1:
        #     signal_settings[f"{_signal}"]["longRiskRewardMultiplier"] = 3
        #     signal_settings[f"{_signal}"]["shortRiskRewardMultiplier"] = 2

        # if int(self.GetParameter('multiplier')) == 2:
        #     signal_settings[f"{_signal}"]["longRiskRewardMultiplier"] = 2
        #     signal_settings[f"{_signal}"]["shortRiskRewardMultiplier"] = 2

        # if int(self.GetParameter('multiplier')) == 3:
        #     signal_settings[f"{_signal}"]["longRiskRewardMultiplier"] = 3
        #     signal_settings[f"{_signal}"]["shortRiskRewardMultiplier"] = 4

        # if int(self.GetParameter('multiplier')) == 4:
        #     signal_settings[f"{_signal}"]["longRiskRewardMultiplier"] = 4
        #     signal_settings[f"{_signal}"]["shortRiskRewardMultiplier"] = 3

        # if int(self.GetParameter('multiplier')) == 5:
        #     signal_settings[f"{_signal}"]["longRiskRewardMultiplier"] = 4
        #     signal_settings[f"{_signal}"]["shortRiskRewardMultiplier"] = 4



## 1.053


# FxLstm_Both_Long
# 0.5

# FxLstm_Both_Short
# 0.2

# FxLstm_Hybrid_Long
# 1.0

# FxLstm_Hybrid_Short
# 0.5


## Baseline: 0.542
## SMA 10 Long: 0.537
## SMA 20 Long: 0.537
## SMA 50 Long: 0.542

## SMA 10 / 50: 0.742
## SMA 10 / 100: 0.742
## SMA 10 / 200: 0.742
## SMA 20 / 50:


# FxLstm_Both:      SR: 0.512
# FxLstm_Hybrid:    SR: 0.22
## Both:             SR: 0.421

# SMA5: 0.457
# SMA20: 0.561
# SMA50: 0.572
# SMA100: 0.952
# SMA200: 0.852

# SMA 5/10: 0.421
# SMA 5/100: 0.474
# SMA 5/200: 0.76

# SMA 10/20: 0.311
# SMA 10/50: 0.519
# SMA 10/100: 0.431
# SMA 10/200: 0.532
# SMA 20/50: 0.646
# SMA 20/100: 0.459

        # RiskRewardMultiplier = [0.5, 0.75, 1.0, 1.2, 1.25, 1.5, 1.75, 2.0, 2.5, 3.0, 4.0, 5.0, 6.0, 7.0, 10.0][int(self.GetParameter('multiplier'))]








        # signal_settings["FxLstm"]["longRiskRewardMultiplier"] = RiskRewardMultiplier
        # signal_settings["FxLstm"]["shortRiskRewardMultiplier"] = RiskRewardMultiplier

        # trailStopSize = [0.1, 0.2, 0.25, 0.5, 0.75, 1.0, 1.2, 1.25, 1.5, 1.75, 2.0, 2.5, 3.0, 4.0, 5.0][int(self.GetParameter('multiplier'))]
        # signal_settings["FxLstm"]["trailStopSize"] = trailStopSize

        ##

        self.SetTimeZone(TimeZones.Johannesburg)

        ##-##
        self.SetStartDate(2018, 1, 1) 
        ##-##

        self.SetCash(10000)

        ##-## 
        self.SetWarmUp(int(12 * 20 * 24 * 60), Resolution.Minute)
        # self.SetWarmUp(int(1 * 20 * 24 * 60), Resolution.Minute)
        ##-## 

        self.SetBrokerageModel(BrokerageName.OandaBrokerage, AccountType.Margin)
        self.to_plot = False

        self.general_setting = general_setting
        self.consolidator_settings = consolidator_settings
        self.indicator_settings = indicator_settings
        self.signal_settings = signal_settings
        self.ref_ticker = "EURUSD"


        ##-##
        self.prediction_dict_full_TEMP = pickle.loads(bytes(self.ObjectStore.ReadBytes(f"PREDICTIONS"))) 
        ##-##

        self.prediction_dict = {}
        for _signal in self.general_setting["signals"]:
            self.prediction_dict[_signal] = {}
            for ticker in self.general_setting["lstm_tickers"]:
                if ticker in self.signal_settings[_signal]['valid_tickers']:
                    self.prediction_dict[_signal][ticker] = 1
                    
        self.threshold_dict = {}
        for ticker in self.general_setting["lstm_tickers"]:
            self.threshold_dict[ticker] = 0          


        # Data Initialization
        self.Data = {}
        self.Signal = {}
        self.Counter = {}
        self.SymbolMarketHours = {}
        self.symbol_ticker_map = {}     
        self.ticker_symbol_map = {}
        self.data_list = []  
        self.output_data_dict = {}

        for ticker in self.general_setting["tickers"]:
            if general_setting["tickers"][ticker]["type"] == "equity":
                symbol = self.AddEquity(
                    ticker,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.Raw,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "forex":
                symbol = self.AddForex(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "cfd":
                symbol = self.AddCfd(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol

            self.Data[symbol] = SymbolData(
                self,
                symbol,
                ticker,
                general_setting,
                consolidator_settings,
                indicator_settings,
            )

            self.Counter[symbol] = {}
            self.Counter[symbol]["counter"] = 0
            self.Counter[symbol]["last_order_counter"] = 0
            self.SymbolMarketHours[symbol] = MarketHours(self, symbol)
            self.symbol_ticker_map[symbol] = ticker
            self.ticker_symbol_map[ticker] = symbol

            self.Signal[symbol] = {}
            for _signal in self.general_setting["signals"]:
                self.Signal[symbol][_signal] = signal_mapping[_signal](
                    self, symbol, ticker, self.general_setting, self.signal_settings[_signal]
                )


        self.Schedule.On(
            self.DateRules.MonthEnd(self.ref_ticker),
            self.TimeRules.BeforeMarketClose(self.ref_ticker, 0),
            self.SaveData,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker),
            self.SOD,
        )

        ##-##
        # self.external_data = {}
        # for _dn in self.general_setting["external_data"]:
        #     self.external_data[_dn] = {}
        #     self.external_data[_dn]['time'] = None
        #     self.external_data[_dn]['value'] = None
        #     source = self.general_setting["external_data"][_dn]['source']

        #     if source == 'gsheet':
        #         link = self.general_setting["external_data"][_dn]['link']
        #         col_date = self.general_setting["external_data"][_dn]['col_date']
        #         col_val = self.general_setting["external_data"][_dn]['col_val']
        #         data = self.Download(link)
        #         rows = []
        #         for row in data.split('\n'):
        #             rows.append(row.replace('\r','').lower().split(','))
        #         data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
        #         data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
        #         data_df[col_val] = data_df[col_val].astype(float)
        #         self.external_data[_dn]['data'] = data_df.copy()

        #     if source == 'NasdaqDataLink':
        #         ref = self.general_setting["external_data"][_dn]['ref']
        #         self.external_data[_dn]['symbol'] = self.AddData(NasdaqDataLink, ref, Resolution.Daily).Symbol

        #     if source == 'equity':
        #         ticker = self.general_setting["external_data"][_dn]['ticker']
        #         self.external_data[_dn]['symbol'] = self.AddEquity(ticker, Resolution.Daily).Symbol

        #     if source == 'USTreasuryYieldCurveRate':
        #         ref = self.general_setting["external_data"][_dn]['ref']
        #         self.external_data[_dn]['symbol'] = self.AddData(USTreasuryYieldCurveRate, ref).Symbol
        ##-##


    def SOD(self):

        ##-##
        pass
        # for _dn in self.general_setting["external_data"]:
        #     source = self.general_setting["external_data"][_dn]['source']
        #     if source == 'gsheet':
        #         col_date = self.general_setting["external_data"][_dn]['col_date']
        #         col_val = self.general_setting["external_data"][_dn]['col_val']
        #         lag_days = self.general_setting["external_data"][_dn]['lag_days']

        #         data = self.external_data[_dn]['data'][self.external_data[_dn]['data'][col_date] < (self.Time - timedelta(days=lag_days))]
        #         if len(data) > 0:
        #             self.external_data[_dn]['time'] = data[col_date].values[-1]
        #             self.external_data[_dn]['value'] = data[col_val].values[-1]

        #     if source == 'USTreasuryYieldCurveRate':
        #         col_date = self.general_setting["external_data"][_dn]['col_date']
        #         col_val = self.general_setting["external_data"][_dn]['col_val']
        #         symbol = self.external_data[_dn]['symbol']
        #         history = self.History(USTreasuryYieldCurveRate, symbol, 1, Resolution.Daily)
        #         history = history.reset_index()
        #         if len(history) > 0:
        #             if col_val in history.columns:
        #                 self.external_data[_dn]['time'] = pd.to_datetime(history[col_date], utc=True).iloc[0].replace(tzinfo=None)
        #                 self.external_data[_dn]['value'] = history[col_val].values[0]
        ##-##



    def SaveData(self):
        ##-##
        pass
        # self.output_data_dict["data"] = self.data_list
        # self.output_data_dict["version"] = "2"
        # # self.ObjectStore.SaveBytes("FUNDAMENTAL_DATA", pickle.dumps(self.output_data_dict))
        # self.ObjectStore.SaveBytes("TECHNICAL_DATA", pickle.dumps(self.output_data_dict))
        ##-##



    def OnData(self, data):

        ##-##
        # for _dn in self.general_setting["external_data"]:
        #     source = self.general_setting["external_data"][_dn]['source']

        #     if source == 'NasdaqDataLink':
        #         symbol = self.external_data[_dn]['symbol']
        #         if data.ContainsKey(symbol):
        #             self.external_data[_dn]['value'] = data[symbol].Value

        #     if source == 'equity':
        #         symbol = self.external_data[_dn]['symbol']
        #         if data.ContainsKey(symbol):
        #             if data[symbol] is not None:
        #                 self.external_data[_dn]['time'] = data[symbol].Time
        #                 self.external_data[_dn]['value'] = data[symbol].Price

        ##-##


        FxLstm_SymbolQuantity = {}
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            ticker = self.symbol_ticker_map[symbol]

            is_valid_time = self.Time.minute == 0
            is_valid_time = is_valid_time and (self.Time.hour in [self.general_setting['FxLstm_prediction_hour']])

            if is_valid_time:
                if ticker in self.general_setting["lstm_tickers"]:

                    ##-##
                    # symbol = self.ticker_symbol_map[ticker]
                    # symbolData = self.Data[symbol]

                    # if not (
                    #     data.ContainsKey(symbol)
                    #     and data[symbol] is not None
                    #     and symbolData.IsReady
                    # ):
                    #     continue

                    # data_dict = {}
                    # data_dict["datetime"] = self.Time
                    # data_dict["ticker"] = ticker        
                    # data_dict["symbol"] = symbol.Value
                    # data_dict["isWarmingUp"] = self.IsWarmingUp
                    # data_dict["symbolTime"] = str(data[symbol].Time)
                    # data_dict["price"] = np.round(data[symbol].Price, 6)

                    # # Daily Data
                    # _consolidator = symbolData.consolidators["D1"]
                    # data_dict["close_D1"] = _consolidator.close[0]

                    # # External Data
                    # for _dn in self.general_setting["external_data"]:
                    #     data_dict[f"{_dn}_time"] = self.external_data[_dn]['time']       
                    #     data_dict[_dn] = self.external_data[_dn]['value']

                    # # Technical Features
                    # for _tf in self.general_setting["features"]:
                    #     _consolidator = symbolData.consolidators[_tf]
                    #     for _in in self.general_setting["features"][_tf]:
                    #         _indicator = _consolidator.indicators[_in]

                    #         if _in in self.general_setting["features_val_map"]:
                    #             for _v in self.general_setting["features_val_map"][
                    #                 _in
                    #             ]:
                    #                 data_dict[f"{_tf}-{_in}-{_v}"] = np.round(
                    #                     _indicator[_v][0], 5
                    #                 )

                
                    # # if not self.IsWarmingUp:    
                    # #     self.data_list += [data_dict]
                    # self.data_list += [data_dict]
                    ##-##

                    if self.Time in self.prediction_dict_full_TEMP['threshold']:
                        self.threshold_dict[ticker] = self.prediction_dict_full_TEMP['threshold'][self.Time]

                    for _signal in self.general_setting["signals"]:
                        if ticker in self.signal_settings[_signal]['valid_tickers']:
                            pred_type = self.signal_settings[_signal]['pred_type']
                            if self.Time in self.prediction_dict_full_TEMP[pred_type]:
                                self.prediction_dict[_signal][ticker] = self.prediction_dict_full_TEMP[pred_type][self.Time]
                
                            self.Signal[symbol][_signal].update_prediction_direction(self.prediction_dict[_signal][ticker])
                            self.Signal[symbol][_signal].update_prediction_threshold(self.threshold_dict[ticker])



            symbolQuantity = 0
            for _signal in self.general_setting["signals"]:
                if ticker in self.signal_settings[_signal]['valid_tickers']:

                    to_exit = self.Signal[symbol][_signal].check_exit(symbolData, data[symbol].Price, data[symbol].Time)
                    if to_exit:
                        self.Signal[symbol][_signal].update_exit()

                    has_enter = self.Signal[symbol][_signal].enter(symbolData, data[symbol].Price, data[symbol].Time)

                    quantity = self.Signal[symbol][_signal].quantity * self.Signal[symbol][_signal].allocation_multiplier
                    quantity = int(np.ceil(quantity))
                    symbolQuantity += quantity

            FxLstm_SymbolQuantity[symbol] = symbolQuantity


        ## Aggregate symbol quantities across strategies
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            self.Counter[symbol]["counter"] += 1

            symbolQuantity = 0
            if symbol in FxLstm_SymbolQuantity:
                symbolQuantity += FxLstm_SymbolQuantity[symbol]

            if not self.IsWarmingUp:
                # In case orders takes longer than 1 bar to be filled. Only send market orders every 3 minutes
                if (self.Counter[symbol]["counter"] - self.Counter[symbol]["last_order_counter"]) >= self.general_setting["order_counter_diff"]:
                    if (symbolQuantity - self.Portfolio[symbol].Quantity) != 0:
                        self.MarketOrder(symbol, symbolQuantity - self.Portfolio[symbol].Quantity)
                        self.Counter[symbol]["last_order_counter"] = self.Counter[symbol]["counter"]



        
## Version
# Forex LSTM V 1.0

##-##
IS_LIVE = True

# TO_SAVE_DATA = False
if IS_LIVE:
    TO_SAVE_DATA = False
else:
    TO_SAVE_DATA = True
##-##


from AlgorithmImports import *
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
from collections import deque
import pickle
import torch
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(f"device: {device}")

from config import (
    general_setting,
    consolidator_settings,
    indicator_settings,
    signal_settings,
    model_settings,
)

from data_classes import (
    SymbolData,
    MarketHours,
)

from signal_classes import (
    FxLstmSignal,
)

from model_functions import (
    get_threshold,
    set_seed,
)

from model_classes_both import get_torch_rnn_dataloaders as get_torch_rnn_dataloaders_both
from model_classes_both import get_rnn_model as get_rnn_model_both
from model_classes_both import get_predictions as get_predictions_both

from model_classes_hybrid import get_torch_rnn_dataloaders as get_torch_rnn_dataloaders_hybrid
from model_classes_hybrid import get_rnn_model as get_rnn_model_hybrid
from model_classes_hybrid import get_predictions as get_predictions_hybrid
from model_classes_hybrid import get_regression_pred_decision, get_prediction_hybrid_regression
from sklearn.metrics import mean_squared_error


signal_mapping = {
    "FxLstm_Both_EURUSD": FxLstmSignal,
    "FxLstm_Both_EURUSD_Trail": FxLstmSignal,

    "FxLstm_Hybrid_EURUSD": FxLstmSignal,
    "FxLstm_Hybrid_EURUSD_Trail": FxLstmSignal, 

    "FxLstm_Both_USDJPY": FxLstmSignal,
    "FxLstm_Both_USDJPY_Trail": FxLstmSignal, 

    "FxLstm_Hybrid_GBPUSD": FxLstmSignal,
    "FxLstm_Hybrid_GBPUSD_Trail": FxLstmSignal,

    "FxLstm_Both_AUDUSD": FxLstmSignal,
    "FxLstm_Both_AUDUSD_Trail": FxLstmSignal,
    
    "FxLstm_Hybrid_AUDUSD": FxLstmSignal,
    "FxLstm_Hybrid_AUDUSD_Trail": FxLstmSignal,
}


from QuantConnect.DataSource import *


class FxLstmAlgo(QCAlgorithm):

    def Initialize(self):

        self.SetTimeZone(TimeZones.Johannesburg)

        if TO_SAVE_DATA:
            self.SetStartDate(2013, 1, 1)       ## SR: 0.574, 0.811, 0.714

            # self.SetEndDate(2017, 12, 31)
            # self.SetEndDate(2018, 12, 31)
            # self.SetEndDate(2020, 12, 31)
            # self.SetEndDate(2021, 12, 31)
        else:
            # self.SetStartDate(2018, 1, 1)         ## SR: 0.799, 1.094, 1.16, 1.054, 1.078, 1.097, 1.087
            # self.SetStartDate(2019, 1, 1)         ## SR: 0.573, 0.908, 0.986, 0.997, 1.028, 1.076
            # self.SetStartDate(2021, 1, 1)         ## SR: 0.489, 1.108, 1.256, 1.333, 1.333, 1.374
            self.SetStartDate(2022, 1, 1)         ## SR: 0.783, 1.49, 1.729, 1.674, 1.674, 1.696, 1.708

        self.SetCash(10000)

        if TO_SAVE_DATA:
            self.SetWarmUp(int(12 * 20 * 24 * 60), Resolution.Minute)
        else:
            self.SetWarmUp(int(6 * 20 * 24 * 60), Resolution.Minute)

        self.SetBrokerageModel(BrokerageName.OandaBrokerage, AccountType.Margin)
        self.to_plot = False

        self.general_setting = general_setting
        self.consolidator_settings = consolidator_settings
        self.indicator_settings = indicator_settings
        self.signal_settings = signal_settings
        self.model_settings = model_settings
        self.ref_ticker = "EURUSD"
        self.model_name = general_setting["model_name"]
        self.month_start_date = None
        self.IS_LIVE = IS_LIVE
        self.TO_SAVE_DATA = TO_SAVE_DATA

        self.prediction_dict = {}
        for _signal in self.general_setting["signals"]:
            self.prediction_dict[_signal] = {}
            for ticker in self.general_setting["lstm_tickers"]:
                if ticker in self.signal_settings[_signal]['valid_tickers']:
                    self.prediction_dict[_signal][ticker] = 1
                    
        # Data Initialization
        self.Data = {}
        self.Signal = {}
        self.Counter = {}
        self.SymbolMarketHours = {}
        self.symbol_ticker_map = {}     
        self.ticker_symbol_map = {}
        self.output_data_dict = {}

        for ticker in self.general_setting["tickers"]:
            if general_setting["tickers"][ticker]["type"] == "equity":
                symbol = self.AddEquity(
                    ticker,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.Raw,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "forex":
                symbol = self.AddForex(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "cfd":
                symbol = self.AddCfd(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol

            self.Data[symbol] = SymbolData(
                self,
                symbol,
                ticker,
                general_setting,
                consolidator_settings,
                indicator_settings,
            )

            self.Counter[symbol] = {}
            self.Counter[symbol]["counter"] = 0
            self.Counter[symbol]["last_order_counter"] = 0
            self.SymbolMarketHours[symbol] = MarketHours(self, symbol)
            self.symbol_ticker_map[symbol] = ticker
            self.ticker_symbol_map[ticker] = symbol

            self.Signal[symbol] = {}
            for _signal in self.general_setting["signals"]:
                self.Signal[symbol][_signal] = signal_mapping[_signal](
                    self, symbol, ticker, self.general_setting, self.signal_settings[_signal]
                )

        # Model Initialization
        self.Models = {}
        self.Scalers = {}
        self.ModelParams = {}   
        for lstm_ticker in self.general_setting["lstm_tickers"]:
            self.Models[lstm_ticker] = {}
            self.Scalers[lstm_ticker] = {}
            self.ModelParams[lstm_ticker] = {}
            for model_type in self.general_setting["model_types"]:
                self.Models[lstm_ticker][model_type] = {}
                self.Scalers[lstm_ticker][model_type] = {}
                self.ModelParams[lstm_ticker][model_type] = {}

                if model_type == 'both':
                    self.Models[lstm_ticker][model_type]['both'] = None
                    self.Scalers[lstm_ticker][model_type]['both'] = None 

                if model_type == 'hybrid':
                    self.Models[lstm_ticker][model_type]['fundamental'] = None
                    self.Models[lstm_ticker][model_type]['technical'] = None
                    self.Scalers[lstm_ticker][model_type]['fundamental'] = None
                    self.Scalers[lstm_ticker][model_type]['technical'] = None


        # Model Data Initialization
        self.data_list_tickers = {}
        self.has_initialized_model_data = {}
        if self.TO_SAVE_DATA:
            self.ModelData = {} 
        else:
            self.ModelData = pickle.loads(bytes(self.ObjectStore.ReadBytes(f"MODEL_DATA_{self.model_name}")))

        for lstm_ticker in self.general_setting["lstm_tickers"]:
            self.data_list_tickers[lstm_ticker] = []
            self.has_initialized_model_data[lstm_ticker] = False
            if lstm_ticker not in self.ModelData:
                self.ModelData[lstm_ticker] = pd.DataFrame()

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker),
            self.Start_Of_Day,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.At(self.general_setting['FxLstm_prediction_hour'], 5, 0),
            self.Prepare_Model_Data,
        )

        self.Schedule.On(
            self.DateRules.MonthStart(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker, 1),
            self.Get_Month_Start_Date,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker, 2),
            self.Train_Model_Both,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker, 3),
            self.Train_Model_Hybrid,
        )

        self.external_data = {}
        for _dn in self.general_setting["external_data"]:
            self.external_data[_dn] = {}
            self.external_data[_dn]['time'] = None
            self.external_data[_dn]['value'] = None
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'gsheet':
                self.Log(f"{str(self.Time)}: {_dn}: Loading Initial GSheet Data")
                link = self.general_setting["external_data"][_dn]['link']
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                to_run = True
                while to_run:
                    try:
                        data = self.Download(link)
                        rows = []
                        for row in data.split('\n'):
                            rows.append(row.replace('\r', '').lower().split(','))
                        data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
                        data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
                        data_df[col_val] = data_df[col_val].astype(float)
                        self.external_data[_dn]['data'] = data_df.copy()
                        to_run = False
                    except:
                        pass
                self.Log(f"{str(self.Time)}: {_dn}: Initial GSheet Data Loaded")

            if source == 'NasdaqDataLink':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(NasdaqDataLink, ref, Resolution.Daily).Symbol

            if source == 'equity':
                ticker = self.general_setting["external_data"][_dn]['ticker']
                self.external_data[_dn]['symbol'] = self.AddEquity(ticker, Resolution.Daily).Symbol

            if source == 'USTreasuryYieldCurveRate':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(USTreasuryYieldCurveRate, ref).Symbol


    def Start_Of_Day(self):
        if self.IS_LIVE and (not self.IsWarmingUp):
            for _dn in self.general_setting["external_data"]:
                source = self.general_setting["external_data"][_dn]['source']
                if source == 'gsheet':
                    self.Log(f"{str(self.Time)}: {_dn}: Loading GSheet Data")

                    link = self.general_setting["external_data"][_dn]['link']
                    col_date = self.general_setting["external_data"][_dn]['col_date']
                    col_val = self.general_setting["external_data"][_dn]['col_val']

                    to_run = True
                    while to_run:
                        try:
                            data = self.Download(link)
                            rows = []
                            for row in data.split('\n'):
                                rows.append(row.replace('\r', '').lower().split(','))
                            data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
                            data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
                            data_df[col_val] = data_df[col_val].astype(float)
                            self.external_data[_dn]['data'] = data_df.copy()
                            to_run = False
                        except:
                            pass

                self.Log(f"{str(self.Time)}: {_dn}: GSheet Data Loaded")

        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']
            if source == 'gsheet':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                lag_days = self.general_setting["external_data"][_dn]['lag_days']

                data = self.external_data[_dn]['data'][self.external_data[_dn]['data'][col_date] < (self.Time - timedelta(days=lag_days))]
                if len(data) > 0:
                    self.external_data[_dn]['time'] = data[col_date].values[-1]
                    self.external_data[_dn]['value'] = data[col_val].values[-1]

            if source == 'USTreasuryYieldCurveRate':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                symbol = self.external_data[_dn]['symbol']
                history = self.History(USTreasuryYieldCurveRate, symbol, 1, Resolution.Daily)
                history = history.reset_index()
                if len(history) > 0:
                    if col_val in history.columns:
                        self.external_data[_dn]['time'] = pd.to_datetime(history[col_date], utc=True).iloc[0].replace(tzinfo=None)
                        self.external_data[_dn]['value'] = history[col_val].values[0]


    def Prepare_Model_Data(self):
        # self.Log(f"{str(self.Time)}: Preparing Model Data")
        col_price = self.model_settings['col_price']
        col_price_cur = self.model_settings['col_price_cur']
        cols_data = self.model_settings['cols_data']
        col_fundamental = self.model_settings['col_fundamental']
        col_technical = self.model_settings['col_technical']
        start_year = self.model_settings['start_year']
        trade_hour = self.model_settings['trade_hour']
        scaled_tickers = self.model_settings['scaled_tickers']
        prediction_lookforward_days = self.model_settings['prediction_lookforward_days']
        col_target_gains = f"gains_N{self.model_settings['prediction_lookforward_days']}D"
        inflation_map_dict = self.model_settings['inflation_map_dict']

        to_save_data = False
        for lstm_ticker in self.general_setting["lstm_tickers"]:
            data_df = self.ModelData[lstm_ticker].copy()

            has_new_data = False
            if len(self.data_list_tickers[lstm_ticker]) > 0:
                has_new_data = True

                data_df_new = pd.DataFrame(self.data_list_tickers[lstm_ticker]).copy()

                if lstm_ticker in scaled_tickers:
                    data_df_new[col_price] = data_df_new[col_price] / 100
                    data_df_new[col_price_cur] = data_df_new[col_price_cur] / 100

                data_df_new = data_df_new[cols_data]

                data_df_new['year'] = data_df_new['datetime'].dt.year
                data_df_new['hour'] = data_df_new['datetime'].dt.hour
                data_df_new['month'] = data_df_new['datetime'].dt.month
                data_df_new['year_month'] = data_df_new['year'].astype(str) + "-" + data_df_new['month'].astype(str).apply(lambda s: s.zfill(2))

                data_df = pd.concat([data_df, data_df_new])

            if len(data_df) > 0:
                if (not self.has_initialized_model_data[lstm_ticker]) or has_new_data:
                    self.has_initialized_model_data[lstm_ticker] = True

                    data_df.reset_index(drop=True, inplace=True)
                    if self.TO_SAVE_DATA:
                        data_df.drop_duplicates('datetime', keep='last', inplace=True)
                    else:
                        data_df.drop_duplicates('datetime', keep='first', inplace=True)

                    data_df.reset_index(drop=True, inplace=True)
                    data_df.sort_values('datetime', ascending=True, inplace=True)
                    data_df.reset_index(drop=True, inplace=True)

                    for col in col_fundamental + col_technical:
                        data_df[col] = data_df[col].fillna(method='ffill')

                    data_df = data_df[data_df['year'] >= start_year]
                    data_df = data_df[data_df['hour'] == trade_hour]
                    data_df.reset_index(drop=True, inplace=True)

                    for col in inflation_map_dict:
                        col_cpi = inflation_map_dict[col]
                        data_df[f"{col}_d1"] = (data_df[col_cpi] - data_df[col_cpi].shift(1)) / data_df[col_cpi].shift(1)
                        data_df[f"{col}_d30"] = (data_df[col_cpi] - data_df[col_cpi].shift(30)) / data_df[col_cpi].shift(30)

                    data_df[col_target_gains] = data_df[col_price].shift(-prediction_lookforward_days) - data_df[col_price]

                    self.ModelData[lstm_ticker] = data_df.copy()
                    self.data_list_tickers[lstm_ticker] = []

                    to_save_data = True

        if to_save_data and self.TO_SAVE_DATA:
            self.ObjectStore.SaveBytes(f"MODEL_DATA_{self.model_name}", pickle.dumps(self.ModelData))
            self.Log(f"{str(self.Time)}: Model Data Saved")

        # self.Log(f"{str(self.Time)}: Model Data Prepared")


    def Get_Month_Start_Date(self):
        self.month_start_date = self.Time


    def Train_Model_Both(self):
        model_type = "both"
        # self.Log(f"{str(self.Time)}: {model_type}: Training Model")
        model_setting = self.model_settings[f"model_settings_{model_type}"]

        col_date = self.model_settings['col_date']
        col_price = self.model_settings['col_price']
        col_price_cur = self.model_settings['col_price_cur']
        col_target = self.model_settings['col_target']
        prediction_lookforward_days = self.model_settings['prediction_lookforward_days']
        col_target_gains = f"gains_N{self.model_settings['prediction_lookforward_days']}D"

        use_gru_model = model_setting['use_gru_model']
        use_dual_lstm = model_setting['use_dual_lstm']
        epochs = model_setting['epochs']
        hidden_size = model_setting['hidden_size']
        window_size = model_setting['window_size']
        thres_multiplier = model_setting['thres_multiplier']
        use_early_stop = model_setting['use_early_stop']
        learning_rate = model_setting['learning_rate']
        batch_size = model_setting['batch_size']
        use_weighted_sampler = model_setting['use_weighted_sampler']
        volatility_type = model_setting['volatility_type']
        valid_lookback_months = model_setting['valid_lookback_months']
        train_lookback_months = model_setting['train_lookback_months']
        inflation_map_dict = self.model_settings['inflation_map_dict']

        for lstm_ticker in self.general_setting["lstm_tickers"]:

            if self.month_start_date is None:
                continue
            else:
                month_start_year = self.month_start_date.year
                month_start_month = self.month_start_date.month
                month_start_day = self.month_start_date.day

            model_train_day = month_start_day + self.general_setting["lstm_model_training_displace_days"][lstm_ticker]
            to_train = (self.Time.year == month_start_year) and (self.Time.month == month_start_month) and (self.Time.day == model_train_day)
            if not to_train:
                continue

            data_df = self.ModelData[lstm_ticker].copy()
            if len(data_df) == 0:
                continue

            col_feature_both = model_setting["col_feature_dict"][lstm_ticker]
            year_month_list = sorted(list(set(data_df['year_month'])))
            year_month_vec = np.array(year_month_list)
            year_vec = np.array(sorted(list(set(data_df['year']))))

            test_year_month = f"{self.Time.year}-{str(self.Time.month).zfill(2)}"
            valid_year_month_list = list(year_month_vec[year_month_vec < test_year_month][-valid_lookback_months:])
            if len(valid_year_month_list) < valid_lookback_months:
                continue

            if np.sum(year_month_vec < min(valid_year_month_list)) == 0:
                continue

            train_year_month_list = list(year_month_vec[year_month_vec < min(valid_year_month_list)][-train_lookback_months:])
            if len(train_year_month_list) < train_lookback_months:
                continue

            data_df_temp = data_df.copy()

            if volatility_type == 'thres_v1':
                col_target_gains_thres = 0.00200
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            if volatility_type == 'thres_v2':
                col_target_gains_thres = 0.00235
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            if volatility_type == 'thres_auto_v1':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            if volatility_type == 'thres_auto_v2':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list+valid_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            self.ModelParams[lstm_ticker][model_type]['col_target_gains_thres'] = col_target_gains_thres

            data_df_temp = data_df_temp.dropna()
            data_df_temp.reset_index(drop=True, inplace=True)

            train_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
            valid_df = data_df_temp[data_df_temp['year_month'].isin(valid_year_month_list)].copy()

            valid_df_windowed = pd.concat([train_df,valid_df]).copy()
            valid_df_windowed = valid_df_windowed.tail(len(valid_df) + window_size-1)

            set_seed(100)
            (train_loader, val_loader, 
            _, scaler, weighted_sampler, class_weights) = get_torch_rnn_dataloaders_both(
                [col_price] + col_feature_both, col_target, train_df, valid_df_windowed, None, window_size, batch_size,
                use_weighted_sampler=use_weighted_sampler,
                has_test_data=False,
            )
            self.Scalers[lstm_ticker][model_type]['both'] = None
            self.Scalers[lstm_ticker][model_type]['both'] = scaler
            self.Models[lstm_ticker][model_type]['both'] = None
            self.Models[lstm_ticker][model_type]['both'] = get_rnn_model_both(
                [col_price] + col_feature_both, train_loader, val_loader, 
                epochs, batch_size, learning_rate, window_size, hidden_size, device, 
                use_early_stop=use_early_stop, use_weighted_sampler=use_weighted_sampler, class_weights=class_weights,
                use_dual_lstm=use_dual_lstm, use_gru_model=use_gru_model,
            )
            self.Log(f"{str(self.Time)}: {model_type}: {lstm_ticker}: Model Trained")


    def Train_Model_Hybrid(self):
        model_type = "hybrid"
        # self.Log(f"{str(self.Time)}: {model_type}: Training Model")
        model_setting = self.model_settings[f"model_settings_{model_type}"]

        col_date = self.model_settings['col_date']
        col_price = self.model_settings['col_price']
        col_price_cur = self.model_settings['col_price_cur']
        col_target = self.model_settings['col_target']
        prediction_lookforward_days = self.model_settings['prediction_lookforward_days']
        col_target_gains = f"gains_N{self.model_settings['prediction_lookforward_days']}D"

        use_gru_model = model_setting['use_gru_model']
        use_dual_lstm = model_setting['use_dual_lstm']
        epochs = model_setting['epochs']
        hidden_size = model_setting['hidden_size']
        window_size = model_setting['window_size']
        thres_multiplier = model_setting['thres_multiplier']
        learning_rate = model_setting['learning_rate']
        batch_size = model_setting['batch_size']
        volatility_type = model_setting['volatility_type']
        valid_lookback_months = model_setting['valid_lookback_months']
        train_lookback_months = model_setting['train_lookback_months']
        inflation_map_dict = self.model_settings['inflation_map_dict']

        for lstm_ticker in self.general_setting["lstm_tickers"]:

            if self.month_start_date is None:
                continue
            else:
                month_start_year = self.month_start_date.year
                month_start_month = self.month_start_date.month
                month_start_day = self.month_start_date.day

            model_train_day = month_start_day + self.general_setting["lstm_model_training_displace_days"][lstm_ticker]
            to_train = (self.Time.year == month_start_year) and (self.Time.month == month_start_month) and (self.Time.day == model_train_day)
            if not to_train:
                continue

            data_df = self.ModelData[lstm_ticker]
            if len(data_df) == 0:
                continue

            col_feature_fundamental = model_setting["col_feature_fundamental_dict"][lstm_ticker]
            col_feature_technical = model_setting["col_feature_technical_dict"][lstm_ticker]

            year_month_list = sorted(list(set(data_df['year_month'])))
            year_month_vec = np.array(year_month_list)
            year_vec = np.array(sorted(list(set(data_df['year']))))

            test_year_month = f"{self.Time.year}-{str(self.Time.month).zfill(2)}"
            valid_year_month_list = list(year_month_vec[year_month_vec < test_year_month][-valid_lookback_months:])
            if len(valid_year_month_list) < valid_lookback_months:
                continue

            if np.sum(year_month_vec < min(valid_year_month_list)) == 0:
                continue

            train_year_month_list = list(year_month_vec[year_month_vec < min(valid_year_month_list)][-train_lookback_months:])
            if len(train_year_month_list) < train_lookback_months:
                continue

            data_df_temp = data_df.copy()

            if volatility_type == 'thres_v1':
                col_target_gains_thres = 0.00200

            if volatility_type == 'thres_v2':
                col_target_gains_thres = 0.00235

            if volatility_type == 'thres_auto_v1':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier

            if volatility_type == 'thres_auto_v2':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list+valid_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier

            self.ModelParams[lstm_ticker][model_type]['col_target_gains_thres'] = col_target_gains_thres

            data_df_temp[col_target] = data_df_temp[col_price].shift(-prediction_lookforward_days)
            data_df_temp = data_df_temp.dropna()
            data_df_temp.reset_index(drop=True, inplace=True)

            train_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
            valid_df = data_df_temp[data_df_temp['year_month'].isin(valid_year_month_list)].copy()

            set_seed(100)
            (train_loader, val_loader, _, scaler) = get_torch_rnn_dataloaders_hybrid(
                [col_price] + col_feature_fundamental, col_target, train_df, valid_df, None, window_size, batch_size,
                has_test_data=False,
            )

            self.Scalers[lstm_ticker][model_type]['fundamental'] = None
            self.Scalers[lstm_ticker][model_type]['fundamental'] = scaler

            self.Models[lstm_ticker][model_type]['fundamental'] = None
            self.Models[lstm_ticker][model_type]['fundamental'] = get_rnn_model_hybrid(
                [col_price] + col_feature_fundamental, train_loader, val_loader, 
                epochs, learning_rate, hidden_size, device, 
                use_dual_lstm=use_dual_lstm, use_gru_model=use_gru_model,
            )

            y_pred_val = get_predictions_hybrid(
                val_loader, 
                self.Models[lstm_ticker][model_type]['fundamental'], 
                self.Scalers[lstm_ticker][model_type]['fundamental'], 
                [col_price] + col_feature_fundamental, 
                device,
            )
            valid_df['pred_price_fundamental'] = y_pred_val
            valid_df['pred_fundamental'] = (valid_df['pred_price_fundamental'] - valid_df[col_price]).apply(get_regression_pred_decision, col_target_gains_thres=col_target_gains_thres)

            set_seed(100)
            (train_loader, val_loader, _, scaler) = get_torch_rnn_dataloaders_hybrid(
                [col_price] + col_feature_technical, col_target, train_df, valid_df, None, window_size, batch_size,
                has_test_data=False,
            )

            self.Scalers[lstm_ticker][model_type]['technical'] = scaler

            self.Models[lstm_ticker][model_type]['technical'] = get_rnn_model_hybrid(
                [col_price] + col_feature_technical, train_loader, val_loader, 
                epochs, learning_rate, hidden_size, device, 
                use_dual_lstm=use_dual_lstm, use_gru_model=use_gru_model,
            )

            y_pred_val = get_predictions_hybrid(
                val_loader, 
                self.Models[lstm_ticker][model_type]['technical'], 
                self.Scalers[lstm_ticker][model_type]['technical'], 
                [col_price] + col_feature_technical, 
                device,
            )
            valid_df['pred_price_technical'] = y_pred_val
            valid_df['pred_technical'] = (valid_df['pred_price_technical'] - valid_df[col_price]).apply(get_regression_pred_decision, col_target_gains_thres=col_target_gains_thres)

            fundamental_mse = mean_squared_error(valid_df['pred_price_fundamental'], valid_df[col_target])
            technical_mse = mean_squared_error(valid_df['pred_price_technical'], valid_df[col_target])

            self.ModelParams[lstm_ticker][model_type]['fundamental_mse'] = fundamental_mse
            self.ModelParams[lstm_ticker][model_type]['technical_mse'] = technical_mse
            self.Log(f"{str(self.Time)}: {model_type}: {lstm_ticker}: Model Trained")


    def OnData(self, data):

        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'NasdaqDataLink':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    self.external_data[_dn]['value'] = data[symbol].Value

            if source == 'equity':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    if data[symbol] is not None:
                        self.external_data[_dn]['time'] = data[symbol].Time
                        self.external_data[_dn]['value'] = data[symbol].Price


        FxLstm_SymbolQuantity = {}
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            ticker = self.symbol_ticker_map[symbol]

            is_valid_time = self.Time.minute == 0
            is_valid_time = is_valid_time and (self.Time.hour in [self.general_setting['FxLstm_prediction_hour']])

            if is_valid_time:
                if ticker in self.general_setting["lstm_tickers"]:

                    data_dict = {}
                    data_dict["datetime"] = self.Time
                    # data_dict["ticker"] = ticker        
                    # data_dict["symbol"] = symbol.Value
                    # data_dict["isWarmingUp"] = self.IsWarmingUp
                    # data_dict["symbolTime"] = str(data[symbol].Time)
                    data_dict["price"] = np.round(data[symbol].Price, 6)

                    # Daily Data
                    _consolidator = symbolData.consolidators["D1"]
                    data_dict["close_D1"] = _consolidator.close[0]

                    # External Data
                    for _dn in self.general_setting["external_data"]:
                        # data_dict[f"{_dn}_time"] = self.external_data[_dn]['time']       
                        data_dict[_dn] = self.external_data[_dn]['value']

                    # Technical Features
                    for _tf in self.general_setting["features"]:
                        _consolidator = symbolData.consolidators[_tf]
                        for _in in self.general_setting["features"][_tf]:
                            _indicator = _consolidator.indicators[_in]

                            if _in in self.general_setting["features_val_map"]:
                                for _v in self.general_setting["features_val_map"][
                                    _in
                                ]:
                                    data_dict[f"{_tf}-{_in}-{_v}"] = np.round(
                                        _indicator[_v][0], 5
                                    )

                    if self.TO_SAVE_DATA:
                        if not self.IsWarmingUp:    
                            self.data_list_tickers[ticker] += [data_dict]
                    else:
                        self.data_list_tickers[ticker] += [data_dict]

                    col_price = self.model_settings['col_price']
                    col_price_cur = self.model_settings['col_price_cur']
                    cols_data = self.model_settings['cols_data']
                    col_fundamental = self.model_settings['col_fundamental']
                    col_technical = self.model_settings['col_technical']
                    start_year = self.model_settings['start_year']
                    trade_hour = self.model_settings['trade_hour']
                    col_target = self.model_settings['col_target']
                    scaled_tickers = self.model_settings['scaled_tickers']
                    inflation_map_dict = self.model_settings['inflation_map_dict']
                    max_window_size = self.model_settings['max_window_size']

                    test_df = pd.DataFrame()
                    if len(self.data_list_tickers[ticker]) > 0:
                        data_df_new = pd.DataFrame(self.data_list_tickers[ticker]).copy()
                        if ticker in scaled_tickers:
                            data_df_new[col_price] = data_df_new[col_price] / 100
                            data_df_new[col_price_cur] = data_df_new[col_price_cur] / 100

                        data_df_new = data_df_new[cols_data]

                        data_df_new['year'] = data_df_new['datetime'].dt.year
                        data_df_new['hour'] = data_df_new['datetime'].dt.hour
                        data_df_new['month'] = data_df_new['datetime'].dt.month
                        data_df_new['year_month'] = data_df_new['year'].astype(str) + "-" + data_df_new['month'].astype(str).apply(lambda s: s.zfill(2))

                        data_df = self.ModelData[ticker].copy()
                        if len(data_df) > 0:
                            idx = data_df['datetime'] < self.Time
                            data_df = data_df[idx]
                            data_df.reset_index(drop=True, inplace=True)

                        data_df = pd.concat([data_df, data_df_new])
                        data_df.reset_index(drop=True, inplace=True)

                        data_df.drop_duplicates('datetime', keep='last', inplace=True)
                        data_df.reset_index(drop=True, inplace=True)

                        data_df.sort_values('datetime', ascending=True, inplace=True)
                        data_df.reset_index(drop=True, inplace=True)

                        for col in col_fundamental + col_technical:
                            data_df[col] = data_df[col].fillna(method='ffill')

                        data_df = data_df[data_df['year'] >= start_year]
                        data_df = data_df[data_df['hour'] == trade_hour]
                        data_df.reset_index(drop=True, inplace=True)

                        for col in inflation_map_dict:
                            col_cpi = inflation_map_dict[col]
                            data_df[f"{col}_d1"] = (data_df[col_cpi] - data_df[col_cpi].shift(1)) / data_df[col_cpi].shift(1)
                            data_df[f"{col}_d30"] = (data_df[col_cpi] - data_df[col_cpi].shift(30)) / data_df[col_cpi].shift(30)

                        test_df = data_df.tail(max_window_size).copy()
                        test_df.reset_index(drop=True, inplace=True)


                    for model_type in self.general_setting["model_types"]:
                        if len(test_df) == 0:
                            continue

                        if model_type == 'both':
                            if self.Models[ticker][model_type]['both'] is None:
                                continue

                        if model_type == 'hybrid':
                            if self.Models[ticker][model_type]['fundamental'] is None:
                                continue
                            if self.Models[ticker][model_type]['technical'] is None:
                                continue

                        model_setting = self.model_settings[f"model_settings_{model_type}"]

                        test_df_windowed = test_df.tail(model_setting['window_size']).copy()
                        test_df_windowed.reset_index(drop=True, inplace=True)

                        if len(test_df_windowed) != model_setting['window_size']:
                            continue

                        if model_type == 'both':
                            col_feature_both = model_setting["col_feature_dict"][ticker]
                            test_df_windowed[col_target] = 1

                            (_, _, test_loader, _, _, _) = get_torch_rnn_dataloaders_both(
                                [col_price] + col_feature_both, col_target, None, None, test_df_windowed.copy(), 
                                model_setting['window_size'], model_setting['batch_size'],
                                use_weighted_sampler=False,
                                has_test_data=True,
                                is_training=False,
                                scaler=self.Scalers[ticker][model_type]['both'],
                            )

                            (y_pred_list, y_score_list) = get_predictions_both(test_loader, self.Models[ticker][model_type]['both'], device)
                            y_pred = y_pred_list[-1]

                        if model_type == 'hybrid':
                            col_feature_fundamental = model_setting["col_feature_fundamental_dict"][ticker]
                            col_feature_technical = model_setting["col_feature_technical_dict"][ticker]
                            test_df_windowed[col_target] = 1
                            ref_price = test_df_windowed[col_price].values[-1]
                            col_target_gains_thres = self.ModelParams[ticker][model_type]['col_target_gains_thres']
                            fundamental_mse = self.ModelParams[ticker][model_type]['fundamental_mse']
                            technical_mse = self.ModelParams[ticker][model_type]['technical_mse']

                            (_, _, test_loader, _) = get_torch_rnn_dataloaders_hybrid(
                                [col_price] + col_feature_fundamental, col_target, None, None, test_df_windowed.copy(), 
                                model_setting['window_size'], model_setting['batch_size'],
                                has_test_data=True,
                                is_training=False,
                                scaler=self.Scalers[ticker][model_type]['fundamental'],
                            )

                            y_pred_val = get_predictions_hybrid(
                                test_loader, 
                                self.Models[ticker][model_type]['fundamental'], 
                                self.Scalers[ticker][model_type]['fundamental'],
                                [col_price] + col_feature_fundamental,
                                device,
                            )
                            pred_price_fundamental = y_pred_val[-1]
                            pred_fundamental = get_regression_pred_decision(pred_price_fundamental - ref_price, col_target_gains_thres)

                            (_, _, test_loader, _) = get_torch_rnn_dataloaders_hybrid(
                                [col_price] + col_feature_technical, col_target, None, None, test_df_windowed.copy(), 
                                model_setting['window_size'], model_setting['batch_size'],
                                has_test_data=True,
                                is_training=False,
                                scaler=self.Scalers[ticker][model_type]['technical'],
                            )

                            y_pred_val = get_predictions_hybrid(
                                test_loader, 
                                self.Models[ticker][model_type]['technical'], 
                                self.Scalers[ticker][model_type]['technical'],
                                [col_price] + col_feature_technical,
                                device,
                            )
                            pred_price_technical = y_pred_val[-1]
                            pred_technical = get_regression_pred_decision(pred_price_technical - ref_price, col_target_gains_thres)
                            y_pred = get_prediction_hybrid_regression(pred_fundamental, pred_technical, fundamental_mse, technical_mse)


                        for _signal in self.general_setting["signals"]:
                            if ticker in self.signal_settings[_signal]['valid_tickers']:
                                pred_type = self.signal_settings[_signal]['pred_type']
                                lstm_ticker = self.signal_settings[_signal]['lstm_ticker']   

                                if (pred_type == model_type) and (lstm_ticker == ticker):
                                    self.prediction_dict[_signal][ticker] = y_pred
                                    self.Signal[symbol][_signal].update_prediction_direction(self.prediction_dict[_signal][ticker])


            symbolQuantity = 0
            for _signal in self.general_setting["signals"]:
                if ticker in self.signal_settings[_signal]['valid_tickers']:

                    to_exit = self.Signal[symbol][_signal].check_exit(symbolData, data[symbol].Price, data[symbol].Time)
                    if to_exit:
                        self.Signal[symbol][_signal].update_exit()

                    has_enter = self.Signal[symbol][_signal].enter(symbolData, data[symbol].Price, data[symbol].Time)

                    quantity = self.Signal[symbol][_signal].quantity * self.Signal[symbol][_signal].allocation_multiplier
                    quantity = int(np.ceil(quantity))
                    symbolQuantity += quantity

            FxLstm_SymbolQuantity[symbol] = symbolQuantity


        ## Aggregate symbol quantities across strategies
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            self.Counter[symbol]["counter"] += 1

            symbolQuantity = 0
            if symbol in FxLstm_SymbolQuantity:
                symbolQuantity += FxLstm_SymbolQuantity[symbol]

            if not self.IsWarmingUp:
                # In case orders takes longer than 1 bar to be filled. Only send market orders every 3 minutes
                if (self.Counter[symbol]["counter"] - self.Counter[symbol]["last_order_counter"]) >= self.general_setting["order_counter_diff"]:
                    if (symbolQuantity - self.Portfolio[symbol].Quantity) != 0:
                        self.MarketOrder(symbol, symbolQuantity - self.Portfolio[symbol].Quantity)
                        self.Counter[symbol]["last_order_counter"] = self.Counter[symbol]["counter"]



        
## Version
# Forex LSTM V 1.0

from AlgorithmImports import *
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
from collections import deque
import pickle

from config import (
    general_setting,
    consolidator_settings,
    indicator_settings,
    signal_settings,
)

from data_classes import (
    SymbolData,
    MarketHours,
)

from signal_classes import (
    FxLstmSignal,
)



from QuantConnect.DataSource import *


class FxLstmAlgo(QCAlgorithm):

    def Initialize(self):


        self.SetTimeZone(TimeZones.Johannesburg)
        self.SetStartDate(2013, 1, 1) 
        self.SetCash(10000)
        self.SetWarmUp(int(12 * 20 * 24 * 60), Resolution.Minute)
        self.SetBrokerageModel(BrokerageName.OandaBrokerage, AccountType.Margin)
        self.to_plot = False

        self.general_setting = general_setting
        self.consolidator_settings = consolidator_settings
        self.indicator_settings = indicator_settings
        self.signal_settings = signal_settings

        ##-##
        self.ref_ticker = "EURUSD"
        # self.ref_ticker = "USDJPY"
        # self.ref_ticker = "USDCHF"

        # self.ref_ticker = "GBPUSD"
        # self.ref_ticker = "AUDUSD"
        # self.ref_ticker = "USDCAD"
        ##-##

        # Data Initialization
        self.Data = {}
        self.Counter = {}
        self.SymbolMarketHours = {}
        self.symbol_ticker_map = {}     
        self.ticker_symbol_map = {}
        self.data_list = []  
        self.output_data_dict = {}

        for ticker in self.general_setting["tickers"]:
            if general_setting["tickers"][ticker]["type"] == "equity":
                symbol = self.AddEquity(
                    ticker,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.Raw,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "forex":
                symbol = self.AddForex(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "cfd":
                symbol = self.AddCfd(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol

            self.Data[symbol] = SymbolData(
                self,
                symbol,
                ticker,
                general_setting,
                consolidator_settings,
                indicator_settings,
            )

            self.Counter[symbol] = {}
            self.Counter[symbol]["counter"] = 0
            self.Counter[symbol]["last_order_counter"] = 0
            self.SymbolMarketHours[symbol] = MarketHours(self, symbol)
            self.symbol_ticker_map[symbol] = ticker
            self.ticker_symbol_map[ticker] = symbol


        self.Schedule.On(
            self.DateRules.MonthEnd(self.ref_ticker),
            self.TimeRules.BeforeMarketClose(self.ref_ticker, 0),
            self.SaveData,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker),
            self.SOD,
        )

        self.external_data = {}
        for _dn in self.general_setting["external_data"]:
            self.external_data[_dn] = {}
            self.external_data[_dn]['time'] = None
            self.external_data[_dn]['value'] = None
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'gsheet':
                link = self.general_setting["external_data"][_dn]['link']
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                data = self.Download(link)
                rows = []
                for row in data.split('\n'):
                    rows.append(row.replace('\r','').lower().split(','))
                data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
                data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
                data_df[col_val] = data_df[col_val].astype(float)
                self.external_data[_dn]['data'] = data_df.copy()

            if source == 'NasdaqDataLink':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(NasdaqDataLink, ref, Resolution.Daily).Symbol

            if source == 'equity':
                ticker = self.general_setting["external_data"][_dn]['ticker']
                self.external_data[_dn]['symbol'] = self.AddEquity(ticker, Resolution.Daily).Symbol

            if source == 'USTreasuryYieldCurveRate':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(USTreasuryYieldCurveRate, ref).Symbol


    def SOD(self):
        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']
            if source == 'gsheet':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                lag_days = self.general_setting["external_data"][_dn]['lag_days']

                data = self.external_data[_dn]['data'][self.external_data[_dn]['data'][col_date] < (self.Time - timedelta(days=lag_days))]
                if len(data) > 0:
                    self.external_data[_dn]['time'] = data[col_date].values[-1]
                    self.external_data[_dn]['value'] = data[col_val].values[-1]

            if source == 'USTreasuryYieldCurveRate':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                symbol = self.external_data[_dn]['symbol']
                history = self.History(USTreasuryYieldCurveRate, symbol, 1, Resolution.Daily)
                history = history.reset_index()
                if len(history) > 0:
                    if col_val in history.columns:
                        self.external_data[_dn]['time'] = pd.to_datetime(history[col_date], utc=True).iloc[0].replace(tzinfo=None)
                        self.external_data[_dn]['value'] = history[col_val].values[0]


    def SaveData(self):
        self.output_data_dict["data"] = self.data_list
        self.output_data_dict["version"] = "1"
        self.ObjectStore.SaveBytes(f"FUNDAMENTAL_DATA_{self.ref_ticker}", pickle.dumps(self.output_data_dict))
        # self.ObjectStore.SaveBytes(f"TECHNICAL_DATA_{self.ref_ticker}", pickle.dumps(self.output_data_dict))


    def OnData(self, data):
        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'NasdaqDataLink':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    self.external_data[_dn]['value'] = data[symbol].Value

            if source == 'equity':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    if data[symbol] is not None:
                        self.external_data[_dn]['time'] = data[symbol].Time
                        self.external_data[_dn]['value'] = data[symbol].Price


        FxLstm_SymbolQuantity = {}
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            ticker = self.symbol_ticker_map[symbol]

            is_valid_time = self.Time.minute == 0
            # is_valid_time = is_valid_time and (self.Time.hour in [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23])
            is_valid_time = is_valid_time and (self.Time.hour in [1])

            if is_valid_time:

                if ticker in self.general_setting["lstm_tickers"]:
                    data_dict = {}
                    data_dict["datetime"] = self.Time
                    # data_dict["ticker"] = ticker        
                    # data_dict["symbol"] = symbol.Value
                    # data_dict["isWarmingUp"] = self.IsWarmingUp
                    # data_dict["symbolTime"] = str(data[symbol].Time)
                    # data_dict["price"] = np.round(data[symbol].Price, 6)

                    # Daily Data
                    _consolidator = symbolData.consolidators["D1"]
                    # data_dict["close_D1"] = _consolidator.close[0]

                    # External Data
                    for _dn in self.general_setting["external_data"]:
                        # data_dict[f"{_dn}_time"] = self.external_data[_dn]['time']       
                        data_dict[_dn] = self.external_data[_dn]['value']

                    # Technical Features
                    # for _tf in self.general_setting["features"]:
                    #     _consolidator = symbolData.consolidators[_tf]
                    #     for _in in self.general_setting["features"][_tf]:
                    #         _indicator = _consolidator.indicators[_in]

                    #         if _in in self.general_setting["features_val_map"]:
                    #             for _v in self.general_setting["features_val_map"][
                    #                 _in
                    #             ]:
                    #                 data_dict[f"{_tf}-{_in}-{_v}"] = np.round(
                    #                     _indicator[_v][0], 5
                    #                 )
                
                    if not self.IsWarmingUp:    
                        self.data_list += [data_dict]







        
## Version
# Forex LSTM V 1.0

from AlgorithmImports import *
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
from collections import deque
import pickle

from config import (
    general_setting,
    consolidator_settings,
    indicator_settings,
    signal_settings,
)

from data_classes import (
    SymbolData,
    MarketHours,
)

from signal_classes import (
    FxLstmSignal,
)



from QuantConnect.DataSource import *


class FxLstmAlgo(QCAlgorithm):

    def Initialize(self):


        self.SetTimeZone(TimeZones.Johannesburg)
        self.SetStartDate(2013, 1, 1) 
        self.SetCash(10000)
        self.SetWarmUp(int(12 * 20 * 24 * 60), Resolution.Minute)
        self.SetBrokerageModel(BrokerageName.OandaBrokerage, AccountType.Margin)
        self.to_plot = False

        self.general_setting = general_setting
        self.consolidator_settings = consolidator_settings
        self.indicator_settings = indicator_settings
        self.signal_settings = signal_settings

        ##-##
        self.ref_ticker = "EURUSD"
        # self.ref_ticker = "USDJPY"
        # self.ref_ticker = "USDCHF"

        # self.ref_ticker = "GBPUSD"
        # self.ref_ticker = "AUDUSD"
        # self.ref_ticker = "USDCAD"
        ##-##

        # Data Initialization
        self.Data = {}
        self.Counter = {}
        self.SymbolMarketHours = {}
        self.symbol_ticker_map = {}     
        self.ticker_symbol_map = {}
        self.data_list = []  
        self.output_data_dict = {}

        for ticker in self.general_setting["tickers"]:
            if general_setting["tickers"][ticker]["type"] == "equity":
                symbol = self.AddEquity(
                    ticker,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.Raw,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "forex":
                symbol = self.AddForex(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "cfd":
                symbol = self.AddCfd(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol

            self.Data[symbol] = SymbolData(
                self,
                symbol,
                ticker,
                general_setting,
                consolidator_settings,
                indicator_settings,
            )

            self.Counter[symbol] = {}
            self.Counter[symbol]["counter"] = 0
            self.Counter[symbol]["last_order_counter"] = 0
            self.SymbolMarketHours[symbol] = MarketHours(self, symbol)
            self.symbol_ticker_map[symbol] = ticker
            self.ticker_symbol_map[ticker] = symbol


        self.Schedule.On(
            self.DateRules.MonthEnd(self.ref_ticker),
            self.TimeRules.BeforeMarketClose(self.ref_ticker, 0),
            self.SaveData,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.ref_ticker),
            self.TimeRules.AfterMarketOpen(self.ref_ticker),
            self.SOD,
        )

        self.external_data = {}
        for _dn in self.general_setting["external_data"]:
            self.external_data[_dn] = {}
            self.external_data[_dn]['time'] = None
            self.external_data[_dn]['value'] = None
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'gsheet':
                link = self.general_setting["external_data"][_dn]['link']
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                data = self.Download(link)
                rows = []
                for row in data.split('\n'):
                    rows.append(row.replace('\r','').lower().split(','))
                data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
                data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
                data_df[col_val] = data_df[col_val].astype(float)
                self.external_data[_dn]['data'] = data_df.copy()

            if source == 'NasdaqDataLink':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(NasdaqDataLink, ref, Resolution.Daily).Symbol

            if source == 'equity':
                ticker = self.general_setting["external_data"][_dn]['ticker']
                self.external_data[_dn]['symbol'] = self.AddEquity(ticker, Resolution.Daily).Symbol

            if source == 'USTreasuryYieldCurveRate':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(USTreasuryYieldCurveRate, ref).Symbol


    def SOD(self):

        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']
            if source == 'gsheet':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                lag_days = self.general_setting["external_data"][_dn]['lag_days']

                data = self.external_data[_dn]['data'][self.external_data[_dn]['data'][col_date] < (self.Time - timedelta(days=lag_days))]
                if len(data) > 0:
                    self.external_data[_dn]['time'] = data[col_date].values[-1]
                    self.external_data[_dn]['value'] = data[col_val].values[-1]

            if source == 'USTreasuryYieldCurveRate':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                symbol = self.external_data[_dn]['symbol']
                history = self.History(USTreasuryYieldCurveRate, symbol, 1, Resolution.Daily)
                history = history.reset_index()
                if len(history) > 0:
                    if col_val in history.columns:
                        self.external_data[_dn]['time'] = pd.to_datetime(history[col_date], utc=True).iloc[0].replace(tzinfo=None)
                        self.external_data[_dn]['value'] = history[col_val].values[0]


    def SaveData(self):
        self.output_data_dict["data"] = self.data_list
        self.output_data_dict["version"] = "1"
        # self.ObjectStore.SaveBytes(f"FUNDAMENTAL_DATA_{self.ref_ticker}", pickle.dumps(self.output_data_dict))
        self.ObjectStore.SaveBytes(f"TECHNICAL_DATA_{self.ref_ticker}", pickle.dumps(self.output_data_dict))


    def OnData(self, data):
        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'NasdaqDataLink':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    self.external_data[_dn]['value'] = data[symbol].Value

            if source == 'equity':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    if data[symbol] is not None:
                        self.external_data[_dn]['time'] = data[symbol].Time
                        self.external_data[_dn]['value'] = data[symbol].Price


        FxLstm_SymbolQuantity = {}
        for symbol, symbolData in self.Data.items():
            if not (
                data.ContainsKey(symbol)
                and data[symbol] is not None
                and symbolData.IsReady
            ):
                continue

            ticker = self.symbol_ticker_map[symbol]

            is_valid_time = self.Time.minute == 0
            # is_valid_time = is_valid_time and (self.Time.hour in [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23])
            is_valid_time = is_valid_time and (self.Time.hour in [1])

            if is_valid_time:

                if ticker in self.general_setting["lstm_tickers"]:
                    data_dict = {}
                    data_dict["datetime"] = self.Time
                    # data_dict["ticker"] = ticker        
                    # data_dict["symbol"] = symbol.Value
                    # data_dict["isWarmingUp"] = self.IsWarmingUp
                    # data_dict["symbolTime"] = str(data[symbol].Time)
                    data_dict["price"] = np.round(data[symbol].Price, 6)

                    # Daily Data
                    _consolidator = symbolData.consolidators["D1"]
                    data_dict["close_D1"] = _consolidator.close[0]

                    # External Data
                    for _dn in self.general_setting["external_data"]:
                        data_dict[f"{_dn}_time"] = self.external_data[_dn]['time']       
                        data_dict[_dn] = self.external_data[_dn]['value']

                    # Technical Features
                    for _tf in self.general_setting["features"]:
                        _consolidator = symbolData.consolidators[_tf]
                        for _in in self.general_setting["features"][_tf]:
                            _indicator = _consolidator.indicators[_in]

                            if _in in self.general_setting["features_val_map"]:
                                for _v in self.general_setting["features_val_map"][
                                    _in
                                ]:
                                    data_dict[f"{_tf}-{_in}-{_v}"] = np.round(
                                        _indicator[_v][0], 5
                                    )
                
                    if not self.IsWarmingUp:    
                        self.data_list += [data_dict]







        
# region imports
from AlgorithmImports import *
# endregion
from datetime import datetime
import numpy as np


TRADE_FUTURES = False
EXIT_2023 = True

class FuturesBasis(QCAlgorithm):

    def Initialize(self):
        ##-##
        # if EXIT_2023:
        #     self.SetStartDate(datetime(2020, 1, 1))
        #     self.SetEndDate(datetime(2023, 5, 30))
        # else:
        #     self.SetStartDate(datetime(2023, 1, 1))

        self.SetStartDate(datetime(2020, 1, 1))
        self.SetEndDate(datetime(2023, 5, 30))
        ##-##


        self.SetCash(100000)

        if TRADE_FUTURES:
            self.SetBrokerageModel(BrokerageName.InteractiveBrokersBrokerage, AccountType.Margin)
        else:
            self.SetBrokerageModel(BrokerageName.OandaBrokerage, AccountType.Margin)

        ##-##
        # self.ticker_list = ['EURUSD']
        # self.ticker_list = ['GBPUSD']
        # self.ticker_list = ['AUDUSD']
        self.ticker_list = ['USDJPY']

        self.future_units_dict = {
            "EURUSD": 12500,
            "GBPUSD": 6250,
            "AUDUSD": 10000,
            "USDJPY": -50000,
        }
        ##-##


        self.symbol_ticker_map = {}     
        self.ticker_symbol_map = {}
        self.Futures = {}
        self.FuturesTracker = {}   
        self.FuturesSymbol = {}   
        self.FuturesRefSymbol = {}        
        for ticker in self.ticker_list:

            symbol = self.AddForex(
                ticker,
                Resolution.Minute,
                Market.Oanda,
            ).Symbol
            self.symbol_ticker_map[symbol] = ticker
            self.ticker_symbol_map[ticker] = symbol

            if ticker == "EURUSD":
                self.Futures[ticker] = self.AddFuture(

                    # Futures.Currencies.EUR,
                    # Futures.Currencies.EuroFXEmini,
                    Futures.Currencies.MicroEUR,

                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.BackwardsRatio,
                    dataMappingMode=DataMappingMode.FirstDayMonth
                )

            if ticker == "GBPUSD":
                self.Futures[ticker] = self.AddFuture(
                    Futures.Currencies.MicroGBP,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.BackwardsRatio,
                    dataMappingMode=DataMappingMode.FirstDayMonth
                )

            if ticker == "AUDUSD":
                self.Futures[ticker] = self.AddFuture(
                    Futures.Currencies.MicroAUD,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.BackwardsRatio,
                    dataMappingMode=DataMappingMode.FirstDayMonth
                )

            if ticker == "USDJPY":
                self.Futures[ticker] = self.AddFuture(

                    ## Futures.Currencies.MicroJPY,
                    ## Futures.Currencies.MicroUSDJPY,

                    Futures.Currencies.JapaneseYenEmini,
                    # Futures.Currencies.JPY,

                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.BackwardsRatio,
                    dataMappingMode=DataMappingMode.FirstDayMonth
                )

            self.Futures[ticker].SetFilter(0, 182)
            self.FuturesSymbol[ticker] = self.Futures[ticker].Symbol
            self.FuturesRefSymbol[ticker] = None
            self.FuturesTracker[ticker] = None

        self.traded = False


    def OnData(self, data):


        # to_continue = False
        # for ticker in self.ticker_list:
        #     if not data.ContainsKey(self.ticker_symbol_map[ticker]):
        #         to_continue = True
        #     else:
        #         if data[self.ticker_symbol_map[ticker]] is None:
        #             to_continue = True

        #     if not data.ContainsKey(self.FuturesSymbol[ticker]):
        #         to_continue = True
        #     else:
        #         if data[self.FuturesSymbol[ticker]] is None:
        #             to_continue = True

        # if to_continue:
        #     return



        for ticker in self.ticker_list:
            # self.Log(f"symbol: {self.FuturesSymbol[ticker]}")

            ##_##
            # if self.FuturesTracker[ticker] is not None: 
            #     quantity = self.Portfolio[self.FuturesTracker[ticker]].Quantity
            #     if quantity == 0:
            #         self.FuturesTracker[ticker] = None
            ##_##


            chain = data.FuturesChains.get(self.FuturesSymbol[ticker])
            if chain:
                # Select the contract with the greatest open interest
                ref_contract = sorted(chain, key=lambda contract: contract.OpenInterest, reverse=True)[0]
                self.FuturesRefSymbol[ticker] = ref_contract.Symbol


            if data.SymbolChangedEvents.ContainsKey(self.FuturesSymbol[ticker]):
                # self.Log("event change")
                changed_event = data.SymbolChangedEvents[self.FuturesSymbol[ticker]]
                old_symbol = changed_event.OldSymbol
                new_symbol = changed_event.NewSymbol
                if self.FuturesRefSymbol[ticker] != old_symbol:
                    rollover_symbol = self.FuturesRefSymbol[ticker]
                else:
                    rollover_symbol = new_symbol

                # tag = f"Rollover - Symbol changed at {self.Time}: {old_symbol} -> {new_symbol}"
                # self.Log(tag)

                if self.FuturesTracker[ticker] is not None: 
                    if old_symbol == self.FuturesTracker[ticker]: 
                        if TRADE_FUTURES:
                            quantity = self.Portfolio[old_symbol].Quantity
                            if quantity != 0:
                                self.MarketOrder(old_symbol, -quantity)
                                self.MarketOrder(rollover_symbol, quantity)
                        else:
                            quantity = self.Portfolio[self.ticker_symbol_map[ticker]].Quantity
                            if quantity != 0:
                                self.MarketOrder(self.ticker_symbol_map[ticker], -quantity)
                                self.MarketOrder(self.ticker_symbol_map[ticker], quantity)

                        self.FuturesTracker[ticker] = rollover_symbol                 

            ##-##
            if not self.traded and (self.FuturesRefSymbol[ticker] is not None):
            # if (self.FuturesTracker[ticker] is None) and (self.FuturesRefSymbol[ticker] is not None) and ((not EXIT_2023) or (EXIT_2023 and (self.Time.year < 2023))):
            ##-##

                self.traded = True
                self.FuturesTracker[ticker] = self.FuturesRefSymbol[ticker]

                if TRADE_FUTURES:
                    self.MarketOrder(self.FuturesTracker[ticker], 1)
                else:
                    self.MarketOrder(self.ticker_symbol_map[ticker], self.future_units_dict[ticker])


            if self.FuturesTracker[ticker] is not None: 
                ##-##
                # quantity = self.Portfolio[self.FuturesTracker[ticker]].Quantity
                # if quantity == 0:
                #     self.FuturesTracker[ticker] = None
                # else:

                #     if self.Time.year >= 2023:
                #         if TRADE_FUTURES:
                #             self.MarketOrder(self.FuturesTracker[ticker], -quantity)
                #         self.FuturesTracker[ticker] = None


                # if EXIT_2023 and (self.Time.year >= 2023):
                #     if TRADE_FUTURES:
                #         quantity = self.Portfolio[self.FuturesTracker[ticker]].Quantity
                #         self.MarketOrder(self.FuturesTracker[ticker], -quantity)
                #     else:
                #         quantity = self.Portfolio[self.ticker_symbol_map[ticker]].Quantity
                #         self.MarketOrder(self.ticker_symbol_map[ticker], -quantity)

                #     self.FuturesTracker[ticker] = None


                if self.Time.year >= 2023:
                    if TRADE_FUTURES:
                        quantity = self.Portfolio[self.FuturesTracker[ticker]].Quantity
                        self.MarketOrder(self.FuturesTracker[ticker], -quantity)
                    else:
                        quantity = self.Portfolio[self.ticker_symbol_map[ticker]].Quantity
                        self.MarketOrder(self.ticker_symbol_map[ticker], -quantity)

                    self.FuturesTracker[ticker] = None

                ##-##

Notebook too long to render.

#region imports
from AlgorithmImports import *
#endregion


## General Settings
general_setting = {
    "tickers": {
        "USDJPY": {"type": "forex"},
    },
    
    "model_name": "ForexLSTM_OANDA_V1_02",
    "consolidator_timeframes": ["D1", "W1"],
    "order_counter_diff": 3,
    "model_types": ["both","hybrid"],


    "lstm_tickers": ['USDJPY'],

    "lstm_model_training_displace_days": {
        'USDJPY': 0,
    },

    "signals": [
        "FxLstm_Hybrid_USDJPY_Trail",
    ], 

    "FxLstm_prediction_hour": 1,

    "external_data": {
        # SP500
        'spy': {
            'source': 'equity',
            'ticker': 'SPY',
        },

        # Global X DAX Germany ETF
        'dax': {
            'source': 'equity',
            'ticker': 'DAX',
        },

        # US Treasury
        'us_treasury': {
            'source': 'USTreasuryYieldCurveRate',
            'ref': 'USTYCR',
            'col_date': 'time',
            'col_val': 'onemonth',
        },

        # # Consumer Price Index for Inflation Rate
        # # https://data.nasdaq.com/data/RATEINF-inflation-rates
        # 'cpi_usa': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_USA",
        # },
        # 'cpi_eur': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_EUR",
        # },
        # 'cpi_deu': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_DEU",
        # },
        # 'cpi_gbr': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_GBR",
        # },
        # 'cpi_chf': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_CHE",
        # },
        # 'cpi_jpn': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_JPN",
        # },
        # 'cpi_can': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_CAN",
        # },
        # 'cpi_aus': {
        #     'source': 'NasdaqDataLink',
        #     'ref': "RATEINF/CPI_AUS",
        # },

            # Consumer Price Index: All Items: Total for United States
        # https://fred.stlouisfed.org/series/USACPALTT01CTGYM
        'cpi_usa': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_usa/data.csv",
            'col_date': 'date',
            'col_val': 'USACPALTT01CTGYM'.lower(),
            'lag_days': 10,
        },


        # Consumer Price Index: All Items: Total: Total for the Euro Area (19 Countries)
        # https://fred.stlouisfed.org/series/EA19CPALTT01GYM
        'cpi_eur': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_eur/data.csv",
            'col_date': 'date',
            'col_val': 'EA19CPALTT01GYM'.lower(),
            'lag_days': 10,
        },

        # Consumer Price Index: All Items: Total for Germany
        # https://fred.stlouisfed.org/series/DEUCPALTT01CTGYM
        'cpi_deu': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_deu/data.csv",
            'col_date': 'date',
            'col_val': 'DEUCPALTT01CTGYM'.lower(),
            'lag_days': 10,
        },

        # Consumer Price Index: All Items: Total for United Kingdom
        # https://fred.stlouisfed.org/series/GBRCPALTT01CTGYM
        'cpi_gbr': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_gbr/data.csv",
            'col_date': 'date',
            'col_val': 'GBRCPALTT01CTGYM'.lower(),
            'lag_days': 10,
        },

        # Consumer Price Index: All Items: Total for Switzerland 
        # https://fred.stlouisfed.org/series/CHECPALTT01CTGYM
        'cpi_chf': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_chf/data.csv",
            'col_date': 'date',
            'col_val': 'CHECPALTT01CTGYM'.lower(),
            'lag_days': 10,
        },

        # Consumer Price Index: All Items: Total for Japan 
        # https://fred.stlouisfed.org/series/CPALTT01JPM659N
        'cpi_jpn': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_jpn/data.csv",
            'col_date': 'date',
            'col_val': 'CPALTT01JPM659N'.lower(),
            'lag_days': 10,
        },

        # # Consumer Price Index: All Items: Total for Canada 
        # # https://fred.stlouisfed.org/series/CANCPALTT01CTGYM
        'cpi_can': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_can/data.csv",
            'col_date': 'date',
            'col_val': 'CANCPALTT01CTGYM'.lower(),
            'lag_days': 10,
        },

        # Consumer Price Index: All Items: Total for Australia 
        # https://fred.stlouisfed.org/series/CPALTT01AUQ659N
        'cpi_aus': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/cpi_aus/data.csv",
            'col_date': 'date',
            'col_val': 'CPALTT01AUQ659N'.lower(),
            'lag_days': 10,
        },

        # Federal Funds Effective Rate (DFF)
        # https://fred.stlouisfed.org/series/DFF
        'dff': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/dff/data.csv",
            'col_date': 'date',
            'col_val': 'dff',
            'lag_days': 1,
        },

        # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for the Euro Area (19 Countries)
        # https://fred.stlouisfed.org/series/IRLTLT01EZM156N
        'rate_eur_lt_gov': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_eur_lt_gov/data.csv",
            'col_date': 'date',
            'col_val': 'IRLTLT01EZM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for the Euro Area (19 Countries)
        # https://fred.stlouisfed.org/series/IR3TIB01EZM156N
        'rate_eur_3m_bank': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_eur_3m_bank/data.csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01EZM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: Long-Term Government Bond Yields: 10-Year: Main (Including Benchmark) for Germany
        # https://fred.stlouisfed.org/series/IRLTLT01DEM156N
        'rate_deu_lt_gov': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_deu_lt_gov/data.csv",
            'col_date': 'date',
            'col_val': 'IRLTLT01DEM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Germany
        # https://fred.stlouisfed.org/series/IR3TIB01DEM156N
        'rate_deu_3m_bank': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_deu_3m_bank/data.csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01DEM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Switzerland
        # https://fred.stlouisfed.org/series/IR3TIB01CHM156N
        'rate_chf_3m_bank': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_chf_3m_bank/data.csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01CHM156N'.lower(),
            'lag_days': 10,
        },

        #  3-Month or 90-day Rates and Yields: Interbank Rates for Japan
        # https://fred.stlouisfed.org/series/IR3TIB01JPM156N
        'rate_jpn_3m_bank': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_jpn_3m_bank/data.csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01JPM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Australia
        # https://fred.stlouisfed.org/series/IR3TIB01AUM156N
        'rate_aus_3m_bank': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_aus_3m_bank/data.csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01AUM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for Canada 
        # https://fred.stlouisfed.org/series/IR3TIB01CAM156N
        'rate_cnd_3m_bank': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_cnd_3m_bank/data.csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01CAM156N'.lower(),
            'lag_days': 10,
        },

        # Interest Rates: 3-Month or 90-Day Rates and Yields: Interbank Rates: Total for United Kingdom
        # https://fred.stlouisfed.org/series/IR3TIB01GBM156N
        'rate_gbp_3m_bank': {
            'source': 'gsheet',
            'link': "https://raw.githubusercontent.com/deerfieldgreen/FRED_data/main/data/rate_gbp_3m_bank/data.csv",
            'col_date': 'date',
            'col_val': 'IR3TIB01GBM156N'.lower(),
            'lag_days': 10,
        },

    },

    "features": {
        "D1": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20","PSAR","AO","ROC10","TRIX10",
        ],
        "W1": [],
    },

    "features_val_map": {
        "SMA10": ["val"], 
        "MACD": ["macd", "macdsignal", "macdhist"],
        "ROC2": ["val"],   
        "MOM4": ["val"],    
        "RSI10": ["val"],
        "BB20": ["upper","lower","mid"],
        "CCI20": ["val"],
        "ULTOSC": ["val"],
        "CHOP": ["val"],
        "DX14": ["val"],
        "PHASE": ["val"],
        "CRSI": ["val"],
        "PSAR": ["val"],
        "AO": ["val"],
        "TRIX10": ["val"],
        "ROC10": ["val"],
    },

}


## Consolidator Settings
consolidator_settings = {
    "D1": {
        "timeframe_minutes": 24 * 60,
        "consolidation_type": "quote",
        "indicators": [
            "SMA10","MACD","ROC2","MOM4","RSI10","BB20","CCI20",
            "ATR10","ATR14","ATR21",
            "PSAR",
            "SMA100",
            "SMA200",
            "SMA21" ,
            "AO",
            "ROC10",
            "ROC2",
            "TRIX10",
            "SMA50",
                    ],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },

    "W1": {
        "timeframe_minutes": 7 * 24 * 60,
        "consolidation_type": "quote",
        "indicators": [],
        "window": 5,
        "window_multiplier_dict": {
            "forex": 1,   
        },  
    },

}


## Indicators Settings
indicator_settings = {
    "SMA5": {
        "type": "SMA",
        "lookback": 5,
        "field": "Close",
        "window": 3,
    }, 
    "SMA10": {
        "type": "SMA",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    }, 
    "SMA20": {
        "type": "SMA",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 

    "SMA21": {
        "type": "SMA",
        "lookback": 21,
        "field": "Close",
        "window": 3,
    }, 

    "SMA50": {
        "type": "SMA",
        "lookback": 50,
        "field": "Close",
        "window": 3,
    }, 
    "SMA100": {
        "type": "SMA",
        "lookback": 100,
        "field": "Close",
        "window": 3,
    }, 
    "SMA200": {
        "type": "SMA",
        "lookback": 200,
        "field": "Close",
        "window": 3,
    }, 
    "MACD": {
        "type": "MACD",
        "window": 3,
    },
    "ROC2": {
        "type": "ROC",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 

    "ROC10": {
        "type": "ROC",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    }, 
    "MOM4": {
        "type": "MOM",
        "lookback": 2,
        "field": "Close",
        "window": 3,
    }, 
    "RSI10": {
        "type": "RSI",
        "lookback": 10,
        "ma_type": "Simple",
        "field": "Close",
        "window": 3,
    },
    "BB20": {
        "type": "BOLL",
        "lookback": 20,
        "ma_type": "Simple",
        "std": 2,
        "field": "Close",
        "window": 3,
    },
    "CCI20": {
        "type": "CCI",
        "lookback": 20,
        "field": "Close",
        "window": 3,
    }, 

    "ATR10": {
        "type": "ATR",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    },
    "ATR14": {
        "type": "ATR",
        "lookback": 14,
        "field": "Close",
        "window": 3,
    },
    "ATR21": {
        "type": "ATR",
        "lookback": 21,
        "field": "Close",
        "window": 3,
    },
    "ULTOSC": {
        "type": "ULTOSC",
        "window": 3,
    },
    "CHOP": {
        "type": "CHOP",
        "lookback": 52,
        "window": 3,
    },
    "DX14": {
        "type": "DX",
        "lookback": 14,
        "window": 3,
    },
    "PHASE": {
        "type": "PHASE",
        "lookback": 15,
        "window": 3,
    },
    "CRSI": {
        "type": "CRSI",
        "rsi_len": 15,
        "rsi_field": "Close",
        "rsi_window": 21,
        "window": 3,
    },
    "PSAR": {
        "type": "PSAR",
        "window": 3,
    },
    "AO": {
        "type": "AO",
        "field": "Close",
        "window": 3,
    },
    "TRIX10": {
        "type": "TRIX",
        "lookback": 10,
        "field": "Close",
        "window": 3,
    },
}

signal_settings = {
    "FxLstm_Hybrid_USDJPY_Trail": {
        "lstm_ticker": "USDJPY",  
        "valid_tickers": ["USDJPY"],
        "active_timeframe": "D1",
        "prediction_direction_map_dict": {
            0: -1,
            1: 0,
            2: 1,
        },
        "pred_type": 'hybrid',
        "exit_wait_period": 0,
        "risk_pct": 0.08,# 0.02
        "enter_long_trades": True,
        "enter_short_trades": True,
        "use_sma_filter": True,
        "sma_filter_lookback": 10,
        "use_ao_filter": True,
        "use_roc_filter": True,
        "roc_filter_lookback": 2,
        "use_trix_filter": True,
        "trix_filter_lookback": 10,
        "use_macd_filter": True,
        "atrLength": 10, 
        "longStopMultiplier": 0.5,
        "shortStopMultiplier": 0.2, # 0.2
        "longRiskRewardMultiplier": 3,
        "shortRiskRewardMultiplier": 3,
        "useTralingStop": False,
        "trailStopSize": 3.0,  # 5.0
        "use_movement_thres_for_stops": False,
        "use_trix_for_stops": False,
        "use_ao_for_stops": False,
        "use_macd_for_stops": False,
        "use_roc_for_stops": False,
        "use_sma_for_stops": False,
        "movement_thres": 0.002,
        "use_prediction_direction_to_exit": False,
    },
    
}

    




model_settings = {
    "col_date": ['datetime'],
    "col_price": 'close_D1',
    "col_price_cur": 'price',
    "col_target": 'target',
    "start_year": 2017,
    "trade_hour": 1,
    "prediction_lookforward_days": 1,
    "max_window_size": 100,
    "scaled_tickers": ["USDJPY"],

    "inflation_map_dict": {
        'inflation_usa': 'cpi_usa',
        'inflation_eur': 'cpi_eur',
        'inflation_deu': 'cpi_deu',
        'inflation_gbr': 'cpi_gbr',
        'inflation_chf': 'cpi_chf',
        'inflation_jpn': 'cpi_jpn',
        'inflation_aus': 'cpi_aus',
    },

    "cols_data": [
        'datetime', 'close_D1', 'price', 
        'spy', 'dax', 
        'dff',
        'cpi_usa', 'cpi_eur', 'cpi_deu', 'cpi_gbr','cpi_chf','cpi_jpn','cpi_aus',
        'rate_eur_3m_bank',
        'rate_deu_3m_bank',
        'rate_chf_3m_bank',
        'rate_jpn_3m_bank',
        'rate_aus_3m_bank',
        'rate_cnd_3m_bank',
        'rate_gbp_3m_bank',
        'D1-SMA10-val', 'D1-MACD-macd', 'D1-MACD-macdsignal', 'D1-MACD-macdhist', 'D1-ROC2-val',
        'D1-MOM4-val', 'D1-RSI10-val', 'D1-BB20-upper', 'D1-BB20-mid', 'D1-BB20-lower', 'D1-CCI20-val',
        'D1-PSAR-val',
    ],

    'col_fundamental': [
        'spy','dax',
        'dff',
        'cpi_usa','cpi_eur','cpi_deu','cpi_gbr','cpi_chf','cpi_jpn','cpi_aus',
        'rate_eur_3m_bank',
        'rate_deu_3m_bank',
        'rate_chf_3m_bank',
        'rate_jpn_3m_bank',
        'rate_aus_3m_bank',
        'rate_cnd_3m_bank',
        'rate_gbp_3m_bank',
    ],

    'col_technical': [
        'D1-SMA10-val',
        'D1-MACD-macd',
        'D1-MACD-macdsignal',
        'D1-MACD-macdhist',
        'D1-ROC2-val',
        'D1-MOM4-val',
        'D1-RSI10-val',
        'D1-BB20-upper',
        'D1-BB20-mid', 
        'D1-BB20-lower',
        'D1-CCI20-val',
        'D1-PSAR-val',
    ],

    "model_settings_both": {
        "use_gru_model": True,
        "use_dual_lstm": False,
        "epochs": 1,
        "hidden_size": 50,
        "window_size": 5,
        "thres_multiplier": 3,
        "use_early_stop": False,
        "learning_rate": 0.0005,
        "batch_size": 8,
        "use_weighted_sampler": False,
        "volatility_type": 'thres_auto_v1',
        "valid_lookback_months": 12,
        "train_lookback_months": 48,

        "col_feature_dict": {
            "USDJPY": [
                'dff',
                'inflation_usa_d30',
                'inflation_eur_d30',
                'inflation_deu_d30',
                'rate_eur_3m_bank',
                'rate_deu_3m_bank',
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
                'D1-PSAR-val',
            ],


        },
    },

    "model_settings_hybrid": {
        "use_gru_model": True,
        "use_dual_lstm": False,
        "epochs": 1,
        "hidden_size": 50,
        "window_size": 20,
        "thres_multiplier": 3,
        "learning_rate": 0.001,
        "batch_size": 8,
        "use_weighted_sampler": False,
        "volatility_type": 'thres_auto_v1',
        "valid_lookback_months": 12,
        "train_lookback_months": 48,

        "col_feature_fundamental_dict": {
            "USDJPY": [
                'dff',
                # 'inflation_usa_d1',

                # 'inflation_deu_d1',
                'rate_jpn_3m_bank',
                # 'rate_deu_3m_bank',
            ],

        },

        "col_feature_technical_dict": {
            "USDJPY": [
                'D1-SMA10-val',
                'D1-MACD-macd',
                'D1-MACD-macdsignal',
                'D1-MACD-macdhist',
                'D1-ROC2-val',
                'D1-MOM4-val',
                'D1-RSI10-val',
                'D1-BB20-upper',
                'D1-BB20-mid',
                'D1-BB20-lower',
                'D1-CCI20-val',
            ],

        },

    },


}



from AlgorithmImports import *

import numpy as np
from datetime import datetime, timedelta
from collections import deque

from indicator_classes import (
    CustomPhase,
    CustomCRSI,
    CustomPivot,
    CustomChoppinessIndex,
    CustomDX,
    CustomMACD,
    CustomULTOSC,
)


class SymbolData:
    def __init__(
        self,
        algorithm,
        symbol,
        ticker,
        general_setting,
        consolidator_settings,
        indicator_settings,
    ):
        self.symbol = symbol
        self.ticker = ticker
        self.algorithm = algorithm
        self.general_setting = general_setting
        self.consolidator_settings = consolidator_settings
        self.indicator_settings = indicator_settings
        self.to_plot = algorithm.to_plot

        if general_setting["tickers"][ticker]["type"] in ["forex", "cfd"]:
            self.consolidator_type = "quote"
        elif general_setting["tickers"][ticker]["type"] in ["equity", "index"]:
            self.consolidator_type = "trade"

        self.consolidators = {}
        for timeframe in self.general_setting["consolidator_timeframes"]:
            self.consolidators[timeframe] = DataConsolidator(
                algorithm,
                symbol,
                ticker,
                general_setting,
                consolidator_settings[timeframe],
                indicator_settings,
                self.consolidator_type,
            )

    @property
    def IsReady(self):
        # All consolidators are ready
        is_ready = (
            np.prod([self.consolidators[_t].IsReady for _t in self.general_setting["consolidator_timeframes"]]) == 1
        )
        return is_ready


class DataConsolidator:
    def __init__(
        self,
        algorithm,
        symbol,
        ticker,
        general_setting,
        consolidator_setting,
        indicator_settings,
        consolidator_type,
    ):
        self.symbol = symbol
        self.ticker = ticker
        self.algorithm = algorithm
        self.general_setting = general_setting
        self.consolidator_setting = consolidator_setting
        self.indicator_settings = indicator_settings
        self.consolidator_type = consolidator_type
        self.to_plot = algorithm.to_plot
        self.indicators = {}

        self.ticker_type = self.general_setting["tickers"][self.ticker]["type"]
        self.window_multiplier = self.consolidator_setting["window_multiplier_dict"][self.ticker_type]
        self.window_length = int(self.consolidator_setting["window"] * self.window_multiplier)
        self.time = deque(maxlen=self.window_length)
        self.open = deque(maxlen=self.window_length)
        self.high = deque(maxlen=self.window_length)
        self.low = deque(maxlen=self.window_length)
        self.close = deque(maxlen=self.window_length)
        self.returns = deque(maxlen=self.window_length)

        if "window_large" in self.consolidator_setting:
            self.window_length_large = int(self.consolidator_setting["window_large"] * self.window_multiplier)
            self.time_large = deque(maxlen=self.window_length_large)           
            self.close_large = deque(maxlen=self.window_length_large)    

        self.BarCount = 0

        if self.consolidator_type == "quote":
            if self.consolidator_setting["timeframe_minutes"] in [5, 15, 30, 60]:
                self.Con = QuoteBarConsolidator(
                    TimeSpan.FromMinutes(self.consolidator_setting["timeframe_minutes"])
                )
            elif self.consolidator_setting["timeframe_minutes"] in [4 * 60]:
                self.Con = QuoteBarConsolidator(self.H4Timer)
            elif self.consolidator_setting["timeframe_minutes"] in [24 * 60]:
                self.Con = QuoteBarConsolidator(self.D1Timer)
            elif self.consolidator_setting["timeframe_minutes"] in [7 * 24 * 60]:
                self.Con = QuoteBarConsolidator(self.W1Timer)

        elif self.consolidator_type == "trade":
            if self.consolidator_setting["timeframe_minutes"] in [5, 15, 30, 60]:
                self.Con = TradeBarConsolidator(
                    TimeSpan.FromMinutes(self.consolidator_setting["timeframe_minutes"])
                )
            elif self.consolidator_setting["timeframe_minutes"] in [4 * 60]:
                self.Con = TradeBarConsolidator(self.H4Timer)
            elif self.consolidator_setting["timeframe_minutes"] in [24 * 60]:
                self.Con = TradeBarConsolidator(self.D1Timer)
            elif self.consolidator_setting["timeframe_minutes"] in [7 * 24 * 60]:
                self.Con = TradeBarConsolidator(self.W1Timer)

        self.Con.DataConsolidated += self.ConHandler
        algorithm.SubscriptionManager.AddConsolidator(symbol, self.Con)

        for _indicator in self.consolidator_setting["indicators"]:
            self.indicators[_indicator] = self.get_indicator(symbol, _indicator)

    def ConHandler(self, sender, bar):
        self.BarCount += 1
        self.time.appendleft(bar.Time)
        self.open.appendleft(bar.Open)
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.close.appendleft(bar.Close)

        if "window_large" in self.consolidator_setting:
            self.time_large.appendleft(bar.Time)
            self.close_large.appendleft(bar.Close)

        if len(self.close) > 1:
            self.returns.appendleft((self.close[0] / self.close[1]) - 1)
        else:
            self.returns.appendleft(0)

        for _indicator in self.consolidator_setting["indicators"]:
            self.update_indicator(bar, _indicator)

    def H4Timer(self, dt):
        start = (dt if dt.hour > 17 else dt - timedelta(1)).date()
        start = datetime.combine(start, datetime.min.time()) + timedelta(hours=17)
        return CalendarInfo(start, timedelta(hours=4))

    def D1Timer(self, dt):
        start = (dt if dt.hour > 17 else dt - timedelta(1)).date()
        start = datetime.combine(start, datetime.min.time()) + timedelta(hours=17)
        return CalendarInfo(start, timedelta(hours = 12))

    def W1Timer(self, dt):
        _date = dt.date()
        if _date.weekday() == 6:
            if dt.hour > 17:
                start = _date
            else:
                start = _date - timedelta(days=_date.weekday()) - timedelta(1)
        else:
            start = _date - timedelta(days=_date.weekday()) - timedelta(1)

        start = datetime.combine(start, datetime.min.time()) + timedelta(hours=17)
        return CalendarInfo(start, timedelta(7))

    def get_indicator(self, symbol, name):
        indicator_setting = self.indicator_settings[name]

        indicator_dict = {}
        if indicator_setting["type"] == "EMA":
            indicator_dict["model"] = ExponentialMovingAverage(
                symbol, indicator_setting["lookback"]
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "SMA":
            indicator_dict["model"] = SimpleMovingAverage(
                symbol, indicator_setting["lookback"]
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "ROC":
            indicator_dict["model"] = RateOfChange(
                symbol, indicator_setting["lookback"]
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "AO":
            indicator_dict["model"] = AwesomeOscillator(
                symbol, 10, 20, MovingAverageType.Simple)
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "TRIX":
            indicator_dict["model"] = Trix(
                symbol, indicator_setting["lookback"])
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "MOM":
            indicator_dict["model"] = Momentum(
                symbol, indicator_setting["lookback"]
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "CCI":
            indicator_dict["model"] = CommodityChannelIndex(
                symbol, indicator_setting["lookback"]
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "RSI":
            if indicator_setting["ma_type"] == "Exponential":
                indicator_dict["model"] = RelativeStrengthIndex(
                    symbol, indicator_setting["lookback"], MovingAverageType.Exponential
                )
            elif indicator_setting["ma_type"] == "Simple":
                indicator_dict["model"] = RelativeStrengthIndex(
                    symbol, indicator_setting["lookback"], MovingAverageType.Simple
                )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "ATR":
            indicator_dict["model"] = AverageTrueRange(
                symbol, indicator_setting["lookback"]
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "PSAR":
            indicator_dict["model"] = ParabolicStopAndReverse(symbol, 0.02, 0.02, 0.2)
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "BOLL":
            if indicator_setting["ma_type"] == "Exponential":
                indicator_dict["model"] = BollingerBands(
                    name,
                    indicator_setting["lookback"],
                    indicator_setting["std"],
                    MovingAverageType.Exponential,
                )
            elif indicator_setting["ma_type"] == "Simple":
                indicator_dict["model"] = BollingerBands(
                    name,
                    indicator_setting["lookback"],
                    indicator_setting["std"],
                    MovingAverageType.Simple,
                )

            indicator_dict["upper"] = deque(maxlen=indicator_setting["window"])
            indicator_dict["lower"] = deque(maxlen=indicator_setting["window"])
            indicator_dict["mid"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "PHASE":
            indicator_dict["model"] = CustomPhase(name, indicator_setting["lookback"])
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "CRSI":
            indicator_dict["model"] = CustomCRSI(
                name,
                indicator_setting["rsi_len"],
                indicator_setting["rsi_field"],
                indicator_setting["rsi_window"],
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "PIVOT":
            indicator_dict["model"] = CustomPivot(name, indicator_setting["period"])

        elif indicator_setting["type"] == "CHOP":
            indicator_dict["model"] = CustomChoppinessIndex(
                name, indicator_setting["lookback"]
            )
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "DX":
            indicator_dict["model"] = CustomDX(name, indicator_setting["lookback"])
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "MACD":
            indicator_dict["model"] = CustomMACD(name)
            indicator_dict["macd"] = deque(maxlen=indicator_setting["window"])
            indicator_dict["macdsignal"] = deque(maxlen=indicator_setting["window"])
            indicator_dict["macdhist"] = deque(maxlen=indicator_setting["window"])

        elif indicator_setting["type"] == "ULTOSC":
            indicator_dict["model"] = CustomULTOSC(name)
            indicator_dict["val"] = deque(maxlen=indicator_setting["window"])

        return indicator_dict

    def update_indicator(self, bar, name):
        def get_update_val(bar, indicator_setting):
            if indicator_setting["field"] == "Close":
                val = bar.Close
            elif indicator_setting["field"] == "High":
                val = bar.High
            elif indicator_setting["field"] == "Low":
                val = bar.Low
            elif indicator_setting["field"] == "Open":
                val = bar.Open
            return val

        indicator_setting = self.indicator_settings[name]

        if indicator_setting["type"] in ["EMA","SMA","ROC","MOM","RSI"]:
            self.indicators[name]["model"].Update(
                bar.EndTime, get_update_val(bar, indicator_setting)
            )
            self.indicators[name]["val"].appendleft(
                self.indicators[name]["model"].Current.Value
            )

        elif indicator_setting["type"] in ["ATR","PSAR","CCI"]:
            self.indicators[name]["model"].Update(bar)
            self.indicators[name]["val"].appendleft(
                self.indicators[name]["model"].Current.Value
            )

        elif indicator_setting["type"] in ["AO"]:
            # self.indicators[name]["model"].Update(bar)
            self.indicators[name]["model"].Update(bar)
            self.indicators[name]["val"].appendleft(
                self.indicators[name]["model"].Current.Value
            )

        elif indicator_setting["type"] in ["TRIX"]:
            self.indicators[name]["model"].Update(
                 bar.EndTime, get_update_val(bar, indicator_setting)
            )
            self.indicators[name]["val"].appendleft(
                self.indicators[name]["model"].Current.Value
            )

        elif indicator_setting["type"] in ["PHASE", "CRSI", "CHOP", "DX", "ULTOSC"]:
            self.indicators[name]["model"].Update(bar)
            self.indicators[name]["val"].appendleft(
                self.indicators[name]["model"].Value
            )

        elif indicator_setting["type"] in ["PIVOT"]:
            self.indicators[name]["model"].Update(bar)

        elif indicator_setting["type"] in ["BOLL"]:
            self.indicators[name]["model"].Update(
                bar.EndTime, get_update_val(bar, indicator_setting)
            )
            self.indicators[name]["upper"].appendleft(
                self.indicators[name]["model"].UpperBand.Current.Value
            )
            self.indicators[name]["lower"].appendleft(
                self.indicators[name]["model"].LowerBand.Current.Value
            )
            self.indicators[name]["mid"].appendleft(
                self.indicators[name]["model"].MiddleBand.Current.Value
            )

        elif indicator_setting["type"] in ["MACD"]:
            self.indicators[name]["model"].Update(bar)
            self.indicators[name]["macd"].appendleft(
                self.indicators[name]["model"].macd
            )
            self.indicators[name]["macdsignal"].appendleft(
                self.indicators[name]["model"].macdsignal
            )
            self.indicators[name]["macdhist"].appendleft(
                self.indicators[name]["model"].macdhist
            )

    @property
    def IsReady(self):
        # All indicators are ready
        is_ready = (np.prod([self.indicators[_i]["model"].IsReady for _i in self.indicators]) == 1)

        # self.close is fully populated
        is_ready = is_ready and (len(self.close) == self.window_length)

        if "window_large" in self.consolidator_setting:
            # self.close_large is fully populated
            is_ready = is_ready and (len(self.close_large) == self.window_length_large)

        return is_ready




class MarketHours:
    def __init__(self, algorithm, symbol):
        self.hours = algorithm.Securities[symbol].Exchange.Hours
        self.CurrentOpen = self.hours.GetNextMarketOpen(algorithm.Time, False)
        self.CurrentClose = self.hours.GetNextMarketClose(self.CurrentOpen, False)
        self.NextOpen = self.hours.GetNextMarketOpen(self.CurrentClose, False)

    def Update(self):
        self.CurrentOpen = self.NextOpen
        self.CurrentClose = self.hours.GetNextMarketClose(self.CurrentOpen, False)
        self.NextOpen = self.hours.GetNextMarketOpen(self.CurrentClose, False)


from AlgorithmImports import *

import numpy as np
from datetime import datetime, timedelta
from collections import deque
import talib as ta

import networkx as nx


class CustomSimpleMovingAverage:
    def __init__(self, name, lookback):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.queue = deque(maxlen=lookback)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    # Update method is mandatory
    def Update(self, EndTime, Val):
        self.queue.appendleft(Val)
        count = len(self.queue)
        self.Time = EndTime
        self.Value = sum(self.queue) / count
        self.IsReady = count == self.queue.maxlen

    def Undo(self):
        del self.queue[0]

class CustomAwesomeOscillator:
    def __init__(self, name, lookback1, lookback2):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.Value1 = 0
        self.Value2 = 0
        self.IsReady = False
        self.queue1 = deque(maxlen=lookback1)
        self.queue2 = deque(maxlen=lookback2)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    # Update method is mandatory
    def Update(self, EndTime, Val):
        self.queue1.appendleft(Val)
        self.queue2.appendleft(Val)
        count1 = len(self.queue1)
        count2 = len(self.queue2)
        self.Time = EndTime
        self.Value1 = sum(self.queue1) / count1
        self.Value2 = sum(self.queue2) / count2

        self.Value = self.Value1 -  self.Value2

        self.IsReady = ((count1 == self.queue1.maxlen) & (count2 == self.queue2.maxlen) )

    def Undo(self):
        del self.queue[0]


class CustomPhase:
    ## Need to check before use!! data is in descending order, not sure whether logic assumes descending/ascending order !!

    def __init__(self, name, lookback):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.lookback = lookback
        self.close = deque(maxlen=lookback)
        self.high = deque(maxlen=lookback)
        self.low = deque(maxlen=lookback)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.Time = bar.EndTime

        RealPart = 0.0
        ImagPart = 0.0
        _count = min([len(self.close), len(self.high), len(self.low)])
        for J in range(_count):
            Weight = (
                self.close[J] + self.close[J] + self.high[J] + self.low[J]
            ) * 10000
            if self.lookback != 0:
                RealPart = RealPart + np.cos(90 * J / self.lookback) * Weight * 2
                ImagPart = (
                    (ImagPart + np.sin(90 * J / self.lookback) * Weight)
                    + (ImagPart + np.sin(180 * J / self.lookback) * Weight)
                ) / 2
        Phase = ((np.arctan(ImagPart / RealPart)) - 0.685) * 100
        self.Value = Phase
        self.IsReady = _count == self.lookback


class CustomCRSI:
    ## Need to check before use!! data is in descending order, not sure whether logic assumes descending/ascending order !!

    def __init__(self, name, rsi_len, rsi_field, rsi_window):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.rsi_len = rsi_len
        self.rsi_field = rsi_field
        self.rsi_window = rsi_window
        self.RSI = RelativeStrengthIndex(
            f"{name}-RSI", rsi_len, MovingAverageType.Exponential
        )
        self.RSIval = deque(maxlen=rsi_window)
        self.CRSIval = deque(maxlen=3)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.Time = bar.EndTime
        self.RSI.Update(bar.EndTime, self.get_update_val(bar))
        self.RSIval.appendleft(self.RSI.Current.Value)

        vibration = 10
        torque = 0.618 / (vibration + 1)
        phasingLag = (vibration - 1) / 0.618
        if len(self.RSIval) > int(phasingLag):
            if len(self.CRSIval) > 1:
                crsi1 = self.CRSIval[1]
            else:
                crsi1 = 0
            self.Value = (
                torque * (2 * self.RSIval[0] - self.RSIval[int(phasingLag)])
                + (1 - torque) * crsi1
            )
        else:
            self.Value = 0

        self.CRSIval.appendleft(self.Value)
        self.IsReady = (
            self.RSI.IsReady
            and (len(self.RSIval) == self.rsi_window)
            and (len(self.CRSIval) == 3)
        )

    def get_update_val(self, bar):
        if self.rsi_field == "Close":
            val = bar.Close
        elif self.rsi_field == "High":
            val = bar.High
        elif self.rsi_field == "Low":
            val = bar.Low
        elif self.rsi_field == "Open":
            val = bar.Open
        return val


class CustomPivot:
    def __init__(self, name, lookback):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.lookback = lookback
        self.close = deque(maxlen=lookback)
        self.high = deque(maxlen=lookback)
        self.low = deque(maxlen=lookback)
        self.arr_time = deque(maxlen=lookback)
        self.p = deque(maxlen=lookback)
        self.r1 = deque(maxlen=lookback)
        self.s1 = deque(maxlen=lookback)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.Time = bar.EndTime
        self.arr_time.appendleft(bar.EndTime)
        pivotX_Median = (self.high[0] + self.low[0] + self.close[0]) / 3
        self.Value = pivotX_Median
        self.p.appendleft(pivotX_Median)
        self.r1.appendleft(pivotX_Median * 2 - self.low[0])
        self.s1.appendleft(pivotX_Median * 2 - self.high[0])
        self.IsReady = len(self.p) == self.lookback


class CustomChoppinessIndex:
    def __init__(self, name, lookback):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.lookback = lookback

        self.high = deque(maxlen=lookback)
        self.low = deque(maxlen=lookback)
        self.ATR = AverageTrueRange(f"{name}-RSI", 1)
        self.ATRval = deque(maxlen=lookback)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.Time = bar.EndTime
        self.ATR.Update(bar)
        self.ATRval.appendleft(self.ATR.Current.Value)
        if (max(self.high) - min(self.low)) == 0:
            self.Value = 0
        else:
            self.Value = (
                100
                * np.log10(sum(self.ATRval) / (max(self.high) - min(self.low)))
                / np.log10(self.lookback)
            )
        self.IsReady = self.ATR.IsReady and (len(self.ATRval) == self.lookback)


class CustomDX:
    def __init__(self, name, lookback):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.lookback = lookback
        self.window_len = lookback * 2 + 25
        self.high = deque(maxlen=self.window_len)
        self.low = deque(maxlen=self.window_len)
        self.close = deque(maxlen=self.window_len)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            ta_out = ta.DX(
                np.array(self.high)[::-1],
                np.array(self.low)[::-1],
                np.array(self.close)[::-1],
                timeperiod=self.lookback,
            )
            self.Value = ta_out[-1]
        else:
            self.Value = 0


class CustomMACD:
    ## Although talib assumes data in chronological ascending order, for some reasons, using data in reversed order produces better results

    def __init__(self, name):
        self.Name = name
        self.Time = datetime.min
        self.IsReady = False
        self.window_len = 100
        self.close = deque(maxlen=self.window_len)
        self.macd = 0
        self.macdsignal = 0
        self.macdhist = 0

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            macd, macdsignal, macdhist = ta.MACD(
                np.array(self.close), fastperiod=12, slowperiod=26, signalperiod=9
            )
            self.macd = macd[-1]
            self.macdsignal = macdsignal[-1]
            self.macdhist = macdhist[-1]
        else:
            self.macd = 0
            self.macdsignal = 0
            self.macdhist = 0

#
class CustomMACD:
    ## Although talib assumes data in chronological ascending order, for some reasons, using data in reversed order produces better results

    def __init__(self, name):
        self.Name = name
        self.Time = datetime.min
        self.IsReady = False
        self.window_len = 100
        self.close = deque(maxlen=self.window_len)
        self.macd = 0
        self.macdsignal = 0
        self.macdhist = 0

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            macd, macdsignal, macdhist = ta.MACD(
                np.array(self.close), fastperiod=12, slowperiod=26, signalperiod=9
            )
            self.macd = macd[-1]
            self.macdsignal = macdsignal[-1]
            self.macdhist = macdhist[-1]
        else:
            self.macd = 0
            self.macdsignal = 0
            self.macdhist = 0


class CustomULTOSC:
    def __init__(self, name):
        self.Name = name
        self.Time = datetime.min
        self.Value = 0
        self.IsReady = False
        self.window_len = 100
        self.high = deque(maxlen=self.window_len)
        self.low = deque(maxlen=self.window_len)
        self.close = deque(maxlen=self.window_len)

    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}. Value: {3}".format(
            self.Name, self.IsReady, self.Time, self.Value
        )

    def Update(self, bar):
        self.high.appendleft(bar.High)
        self.low.appendleft(bar.Low)
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            ta_out = ta.ULTOSC(
                np.array(self.high)[::-1],
                np.array(self.low)[::-1],
                np.array(self.close)[::-1],
                timeperiod1=7,
                timeperiod2=14,
                timeperiod3=28,
            )
            self.Value = ta_out[-1]
        else:
            self.Value = 0





class CustomVMD:
    def __init__(self, name):
        self.Name = name
        self.Time = datetime.min
        self.IsReady = False
        self.window_len = 120
        self.alpha = 2000
        self.k = 4
        self.close = deque(maxlen=self.window_len)
        self.Values = {}
        for j in range(self.k):
            self.Values[f"c{j}"] = 0


    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}".format(
            self.Name, self.IsReady, self.Time
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.window_len)

        if self.IsReady:
            tsv = np.array(self.close)[::-1]
            tso = VMD(tsv, self.k, alpha=self.alpha, tau=0, DC=False, init=1, tol=1e-7)
            for j in range(self.k):
                self.Values[f"c{j}"] = tso[-1,j]



class CustomVG:
    def __init__(self, name, lookback, centrality_type):
        self.Name = name
        self.Time = datetime.min
        self.IsReady = False
        self.lookback = lookback
        self.centrality_type = centrality_type  
        self.max_k = 4
        self.centrality_type_map_dict = {
            'degree': nx.degree_centrality,
            'closeness': nx.closeness_centrality,
            'harmonic': nx.harmonic_centrality,
        }

        self.close = deque(maxlen=lookback)
        self.Values = {}
        for j in range(self.max_k):
            self.Values[f"c{j}"] = 0


    def __repr__(self):
        return "{0} -> IsReady: {1}. Time: {2}".format(
            self.Name, self.IsReady, self.Time
        )

    def Update(self, bar):
        self.close.appendleft(bar.Close)
        self.Time = bar.EndTime
        self.IsReady = len(self.close) == (self.lookback)

        if self.IsReady:
            tsv = np.array(self.close)[::-1]
            tsg = visibility_graph(tsv)
            for j in range(self.max_k):
                tsg_shell = nx.k_shell(tsg, j+1)
                centralities = list(self.centrality_type_map_dict[self.centrality_type](tsg_shell).values())
                self.Values[f"c{j}"] = np.mean(centralities)

























# Version
# Forex LSTM V2.0 Oanada

##-##
IS_LIVE = False
IS_PAPER = True
# notification_value = 'PAPER'

TO_SAVE_DATA = False
# if IS_LIVE:
#     TO_SAVE_DATA = False
# else:
#     TO_SAVE_DATA = True
##-##



    
from AlgorithmImports import *
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
from collections import deque
import pickle
import torch
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(f"device: {device}")

from config import (
    general_setting,
    consolidator_settings,
    indicator_settings,
    signal_settings,
    model_settings,
)

from data_classes import (
    SymbolData,
    MarketHours,
)

from signal_classes import (
    FxLstmSignal,
)

from model_functions import (
    get_threshold,
    set_seed,
)

from model_classes_both import get_torch_rnn_dataloaders as get_torch_rnn_dataloaders_both
from model_classes_both import get_rnn_model as get_rnn_model_both
from model_classes_both import get_predictions as get_predictions_both

from model_classes_hybrid import get_torch_rnn_dataloaders as get_torch_rnn_dataloaders_hybrid
from model_classes_hybrid import get_rnn_model as get_rnn_model_hybrid
from model_classes_hybrid import get_predictions as get_predictions_hybrid
from model_classes_hybrid import get_regression_pred_decision, get_prediction_hybrid_regression
from sklearn.metrics import mean_squared_error


signal_mapping = {

    "FxLstm_Hybrid_USDJPY_Trail": FxLstmSignal, 
}


from QuantConnect.DataSource import *



class FxLstmAlgo(QCAlgorithm):

    def Initialize(self):

        # self.SetTimeZone(TimeZones.Johannesburg)
        self.set_time_zone(TimeZones.NEW_YORK)

        IS_LIVE = self.get_parameter("IS_LIVE")
        IS_PAPER = self.get_parameter("IS_PAPER")

        if IS_LIVE == 'True':
            IS_LIVE = True
        else:
            IS_LIVE = False

        if IS_PAPER == 'True':
            IS_PAPER = True
        else:
            IS_PAPER = False

        if IS_PAPER:

            self.notification_value = 'PAPER'
        else:
            self.notification_value = 'REAL' 


        if TO_SAVE_DATA:
            self.SetStartDate(2013, 1, 1)

        else:
            self.SetStartDate(2024, 1, 1)           # SR: 1.332
            # self.set_end_date(2024, 1 ,10)

        self.SetCash(370000)

        if TO_SAVE_DATA:
            self.SetWarmUp(int(12 * 20 * 24 * 60), Resolution.Minute)
        else:
            self.SetWarmUp(int(6 * 20 * 24 * 60), Resolution.Minute)


        self.SetBrokerageModel(BrokerageName.OandaBrokerage, AccountType.Margin)
        self.to_plot = False
        self.general_setting = general_setting
        self.consolidator_settings = consolidator_settings
        self.indicator_settings = indicator_settings
        self.signal_settings = signal_settings
        self.model_settings = model_settings
        self.ref_ticker = "USDJPY"
        self.model_name = general_setting["model_name"]
        self.month_start_date = None
        self.IS_LIVE = IS_LIVE
        self.TO_SAVE_DATA = TO_SAVE_DATA
        self.signals = self.general_setting["signals"]
        self.lstm_tickers = self.general_setting["lstm_tickers"] 
        self.last_loop_minute = None


        self.counter = 0
        self.mdd = 0
        self.CanTrade = True
        self.peak_value = 0
        self.his_port_value = {}
        self.roll_mdd = 0
        
        self.add_equity('UUP', resolution = Resolution.DAILY)
        self.UUP__macd = self.macd("UUP", 12, 26, 9, MovingAverageType.EXPONENTIAL, Resolution.DAILY)
        self.UUP__trix = self.trix("UUP", 20)
        self.UUP__roc = self.roc("UUP", 10)
        self.UUP = {}

        self.prediction_dict = {}
        for _signal in self.signals:
            self.prediction_dict[_signal] = {}
            for ticker in self.lstm_tickers:
                if ticker in self.signal_settings[_signal]['valid_tickers']:
                    self.prediction_dict[_signal][ticker] = 1
                    
        # Data Initialization
        self.Data = {}
        self.Signal = {}
        self.Counter = {}
        self.SymbolMarketHours = {}
        self.symbol_ticker_map = {}     
        self.ticker_symbol_map = {}
        self.output_data_dict = {}
        self.Futures = {}
        self.FuturesTracker = {}   
        self.FuturesSymbol = {}   
        self.FuturesRefSymbol = {}  

        self.entry_ticket = None
        self.tp1_ticket = []
        self.tp2_ticket = []
        self.sl_ticket = []
        self.Margin_Call = False

        self.overweekend = 0
        self.leftover_qty = None
        self.leftover_ticker = None
        
        self.prev_equity = None
        self.freeze = 0

        self.SPY_equity = self.add_equity("SPY", Resolution.MINUTE)

        for ticker in self.general_setting["tickers"]:
            if general_setting["tickers"][ticker]["type"] == "equity":
                symbol = self.AddEquity(
                    ticker,
                    Resolution.Minute,
                    dataNormalizationMode=DataNormalizationMode.Raw,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "forex":
                symbol = self.AddForex(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol
            elif general_setting["tickers"][ticker]["type"] == "cfd":
                symbol = self.AddCfd(
                    ticker,
                    Resolution.Minute,
                    Market.Oanda,
                ).Symbol

            self.Data[symbol] = SymbolData(
                self,
                symbol,
                ticker,
                general_setting,
                consolidator_settings,
                indicator_settings,
            )

            self.Counter[symbol] = {}
            self.Counter[symbol]["counter"] = 0
            self.Counter[symbol]["last_order_counter"] = 0
            self.SymbolMarketHours[symbol] = MarketHours(self, symbol)
            self.symbol_ticker_map[symbol] = ticker
            self.ticker_symbol_map[ticker] = symbol

            self.Signal[symbol] = {}
            for _signal in self.signals:
                self.Signal[symbol][_signal] = signal_mapping[_signal](
                    self, symbol, ticker, self.general_setting, self.signal_settings[_signal]
                )


        # Model Initialization
        self.Models = {}
        self.Scalers = {}
        self.ModelParams = {}   
        for lstm_ticker in self.lstm_tickers:
            self.Models[lstm_ticker] = {}
            self.Scalers[lstm_ticker] = {}
            self.ModelParams[lstm_ticker] = {}
            for model_type in self.general_setting["model_types"]:
                self.Models[lstm_ticker][model_type] = {}
                self.Scalers[lstm_ticker][model_type] = {}
                self.ModelParams[lstm_ticker][model_type] = {}

                if model_type == 'both':
                    self.Models[lstm_ticker][model_type]['both'] = None
                    self.Scalers[lstm_ticker][model_type]['both'] = None 

                if model_type == 'hybrid':
                    self.Models[lstm_ticker][model_type]['fundamental'] = None
                    self.Models[lstm_ticker][model_type]['technical'] = None
                    self.Scalers[lstm_ticker][model_type]['fundamental'] = None
                    self.Scalers[lstm_ticker][model_type]['technical'] = None


        # Model Data Initialization
        self.data_list_tickers = {}
        self.has_initialized_model_data = {}
        if self.TO_SAVE_DATA:
            self.ModelData = {} 
        else:
            self.ModelData = pickle.loads(bytes(self.ObjectStore.ReadBytes(f"MODEL_DATA_{self.model_name}")))

        for lstm_ticker in self.lstm_tickers:
            self.data_list_tickers[lstm_ticker] = []
            self.has_initialized_model_data[lstm_ticker] = False
            if lstm_ticker not in self.ModelData:
                self.ModelData[lstm_ticker] = pd.DataFrame()

        self.Schedule.On(
            self.DateRules.EveryDay(self.SPY_equity.symbol),
            self.TimeRules.AfterMarketOpen(self.SPY_equity.symbol),
            self.Start_Of_Day,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.SPY_equity.symbol),
            self.TimeRules.At(self.general_setting['FxLstm_prediction_hour'], 5, 0),
            self.Prepare_Model_Data,
        )

        self.Schedule.On(
            self.DateRules.MonthStart(self.SPY_equity.symbol),
            self.TimeRules.AfterMarketOpen(self.SPY_equity.symbol, 1),
            self.Get_Month_Start_Date,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.SPY_equity.symbol),
            self.TimeRules.AfterMarketOpen(self.SPY_equity.symbol, 2),
            self.Train_Model_Both,
        )

        self.Schedule.On(
            self.DateRules.EveryDay(self.SPY_equity.symbol),
            self.TimeRules.AfterMarketOpen(self.SPY_equity.symbol, 3),
            self.Train_Model_Hybrid,
        )

        self.external_data = {}
        for _dn in self.general_setting["external_data"]:
            self.external_data[_dn] = {}
            self.external_data[_dn]['time'] = None
            self.external_data[_dn]['value'] = None
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'gsheet':
                self.Log(f"{str(self.Time)}: {_dn}: Loading Initial GSheet Data")
                link = self.general_setting["external_data"][_dn]['link']
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                to_run = True
                while to_run:
                    try:
                        data = self.Download(link)
                        rows = []
                        for row in data.split('\n'):
                            content = row.replace('\r', '').lower().split(',')
                            if len(content) == 2:
                                rows.append(content)
                        data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
                        data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
                        data_df[col_val] = data_df[col_val].astype(float)
                        self.external_data[_dn]['data'] = data_df.copy()
                        to_run = False
                    except:
                        pass

                self.Log(f"{str(self.Time)}: {_dn}: Initial GSheet Data Loaded")

            if source == 'NasdaqDataLink':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(NasdaqDataLink, ref, Resolution.Daily).Symbol

            if source == 'equity':
                ticker = self.general_setting["external_data"][_dn]['ticker']
                self.external_data[_dn]['symbol'] = self.AddEquity(ticker, Resolution.Daily).Symbol

            if source == 'USTreasuryYieldCurveRate':
                ref = self.general_setting["external_data"][_dn]['ref']
                self.external_data[_dn]['symbol'] = self.AddData(USTreasuryYieldCurveRate, ref).Symbol


    def Start_Of_Day(self):

        # if self.IS_LIVE and (not self.IsWarmingUp):
        #     for _dn in self.general_setting["external_data"]:
        #         source = self.general_setting["external_data"][_dn]['source']
        #         if source == 'gsheet':
        #             self.Log(f"{str(self.Time)}: {_dn}: Loading GSheet Data")

        #             link = self.general_setting["external_data"][_dn]['link']
        #             col_date = self.general_setting["external_data"][_dn]['col_date']
        #             col_val = self.general_setting["external_data"][_dn]['col_val']

        #             to_run = True
        #             while to_run:
        #                 try:
        #                     data = self.Download(link)
        #                     rows = []
        #                     for row in data.split('\n'):
        #                         content = row.replace('\r', '').lower().split(',')
        #                         if len(content) == 2:
        #                             rows.append(content)
        #                     data_df = pd.DataFrame(np.array(rows[1:]), columns=rows[0])
        #                     data_df[col_date] = data_df[col_date].apply(lambda s: datetime.strptime(s, '%Y-%m-%d'))
        #                     data_df[col_val] = data_df[col_val].astype(float)
        #                     self.external_data[_dn]['data'] = data_df.copy()
        #                     to_run = False
        #                 except:
        #                     pass

        #         self.Log(f"{str(self.Time)}: {_dn}: GSheet Data Loaded")

        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']
            if source == 'gsheet':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                lag_days = self.general_setting["external_data"][_dn]['lag_days']

                data = self.external_data[_dn]['data'][self.external_data[_dn]['data'][col_date] < (self.Time - timedelta(days=lag_days))]
                if len(data) > 0:
                    self.external_data[_dn]['time'] = data[col_date].values[-1]
                    self.external_data[_dn]['value'] = data[col_val].values[-1]

            if source == 'USTreasuryYieldCurveRate':
                col_date = self.general_setting["external_data"][_dn]['col_date']
                col_val = self.general_setting["external_data"][_dn]['col_val']
                symbol = self.external_data[_dn]['symbol']
                history = self.History(USTreasuryYieldCurveRate, symbol, 1, Resolution.Daily)
                history = history.reset_index()
                if len(history) > 0:
                    if col_val in history.columns:
                        self.external_data[_dn]['time'] = pd.to_datetime(history[col_date], utc=True).iloc[0].replace(tzinfo=None)
                        self.external_data[_dn]['value'] = history[col_val].values[0]


    def Prepare_Model_Data(self):
        # self.Log(f"{str(self.Time)}: Preparing Model Data")
        col_price = self.model_settings['col_price']
        col_price_cur = self.model_settings['col_price_cur']
        cols_data = self.model_settings['cols_data']
        col_fundamental = self.model_settings['col_fundamental']
        col_technical = self.model_settings['col_technical']
        start_year = self.model_settings['start_year']
        trade_hour = self.model_settings['trade_hour']
        scaled_tickers = self.model_settings['scaled_tickers']
        prediction_lookforward_days = self.model_settings['prediction_lookforward_days']
        col_target_gains = f"gains_N{self.model_settings['prediction_lookforward_days']}D"
        inflation_map_dict = self.model_settings['inflation_map_dict']

        to_save_data = False
        for lstm_ticker in self.lstm_tickers:
            data_df = self.ModelData[lstm_ticker].copy()

            has_new_data = False
            if len(self.data_list_tickers[lstm_ticker]) > 0:
                has_new_data = True

                data_df_new = pd.DataFrame(self.data_list_tickers[lstm_ticker]).copy()

                if lstm_ticker in scaled_tickers:
                    data_df_new[col_price] = data_df_new[col_price] / 100
                    data_df_new[col_price_cur] = data_df_new[col_price_cur] / 100

                data_df_new = data_df_new[cols_data]

                data_df_new['year'] = data_df_new['datetime'].dt.year
                data_df_new['hour'] = data_df_new['datetime'].dt.hour
                data_df_new['month'] = data_df_new['datetime'].dt.month
                data_df_new['year_month'] = data_df_new['year'].astype(str) + "-" + data_df_new['month'].astype(str).apply(lambda s: s.zfill(2))

                data_df = pd.concat([data_df, data_df_new])

            if len(data_df) > 0:
                if (not self.has_initialized_model_data[lstm_ticker]) or has_new_data:
                    self.has_initialized_model_data[lstm_ticker] = True

                    data_df.reset_index(drop=True, inplace=True)
                    if self.TO_SAVE_DATA:
                        data_df.drop_duplicates('datetime', keep='last', inplace=True)
                    else:
                        data_df.drop_duplicates('datetime', keep='first', inplace=True)

                    data_df.reset_index(drop=True, inplace=True)
                    data_df.sort_values('datetime', ascending=True, inplace=True)
                    data_df.reset_index(drop=True, inplace=True)

                    for col in col_fundamental + col_technical:
                        data_df[col] = data_df[col].fillna(method='ffill')

                    data_df = data_df[data_df['year'] >= start_year]
                    data_df = data_df[data_df['hour'] == trade_hour]
                    data_df.reset_index(drop=True, inplace=True)

                    for col in inflation_map_dict:
                        col_cpi = inflation_map_dict[col]

                        ## FRED CPI value is contribution to inflation. To test using it directly without differencing, as well as  d1, d30 difference on BOTH
                        data_df[f"{col}_d1"] = data_df[col_cpi] - data_df[col_cpi].shift(1)
                        data_df[f"{col}_d30"] = data_df[col_cpi] - data_df[col_cpi].shift(1)  

                    data_df[col_target_gains] = data_df[col_price].shift(-prediction_lookforward_days) - data_df[col_price]

                    self.ModelData[lstm_ticker] = data_df.copy()
                    self.data_list_tickers[lstm_ticker] = []

                    to_save_data = True

        if to_save_data and self.TO_SAVE_DATA:
            self.ObjectStore.SaveBytes(f"MODEL_DATA_{self.model_name}", pickle.dumps(self.ModelData))
            self.Log(f"{str(self.Time)}: Model Data Saved At: MODEL_DATA_{self.model_name}")

        # self.Log(f"{str(self.Time)}: Model Data Prepared")


    def Get_Month_Start_Date(self):
        self.month_start_date = self.Time


    def Train_Model_Both(self):
        model_type = "both"
        # self.Log(f"{str(self.Time)}: {model_type}: Training Model")
        model_setting = self.model_settings[f"model_settings_{model_type}"]

        col_date = self.model_settings['col_date']
        col_price = self.model_settings['col_price']
        col_price_cur = self.model_settings['col_price_cur']
        col_target = self.model_settings['col_target']
        prediction_lookforward_days = self.model_settings['prediction_lookforward_days']
        col_target_gains = f"gains_N{self.model_settings['prediction_lookforward_days']}D"

        use_gru_model = model_setting['use_gru_model']
        use_dual_lstm = model_setting['use_dual_lstm']
        epochs = model_setting['epochs']
        hidden_size = model_setting['hidden_size']
        window_size = model_setting['window_size']
        thres_multiplier = model_setting['thres_multiplier']
        use_early_stop = model_setting['use_early_stop']
        learning_rate = model_setting['learning_rate']
        batch_size = model_setting['batch_size']
        use_weighted_sampler = model_setting['use_weighted_sampler']
        volatility_type = model_setting['volatility_type']
        valid_lookback_months = model_setting['valid_lookback_months']
        train_lookback_months = model_setting['train_lookback_months']
        inflation_map_dict = self.model_settings['inflation_map_dict']

        for lstm_ticker in self.lstm_tickers:

            if self.month_start_date is None:
                continue
            else:
                month_start_year = self.month_start_date.year
                month_start_month = self.month_start_date.month
                month_start_day = self.month_start_date.day

            model_train_day = month_start_day + self.general_setting["lstm_model_training_displace_days"][lstm_ticker]
            to_train = (self.Time.year == month_start_year) and (self.Time.month == month_start_month) and (self.Time.day == model_train_day)
            if not to_train:
                continue

            data_df = self.ModelData[lstm_ticker].copy()
            if len(data_df) == 0:
                continue

            col_feature_both = model_setting["col_feature_dict"][lstm_ticker]
            year_month_list = sorted(list(set(data_df['year_month'])))
            year_month_vec = np.array(year_month_list)
            year_vec = np.array(sorted(list(set(data_df['year']))))

            test_year_month = f"{self.Time.year}-{str(self.Time.month).zfill(2)}"
            valid_year_month_list = list(year_month_vec[year_month_vec < test_year_month][-valid_lookback_months:])
            if len(valid_year_month_list) < valid_lookback_months:
                continue

            if np.sum(year_month_vec < min(valid_year_month_list)) == 0:
                continue

            train_year_month_list = list(year_month_vec[year_month_vec < min(valid_year_month_list)][-train_lookback_months:])
            if len(train_year_month_list) < train_lookback_months:
                continue

            data_df_temp = data_df.copy()

            if volatility_type == 'thres_v1':
                col_target_gains_thres = 0.00200
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            if volatility_type == 'thres_v2':
                col_target_gains_thres = 0.00235
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            if volatility_type == 'thres_auto_v1':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            if volatility_type == 'thres_auto_v2':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list+valid_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier
                data_df_temp[col_target] = 1
                data_df_temp.loc[data_df_temp[col_target_gains] < -col_target_gains_thres, col_target] = 0
                data_df_temp.loc[data_df_temp[col_target_gains] > col_target_gains_thres, col_target] = 2

            self.ModelParams[lstm_ticker][model_type]['col_target_gains_thres'] = col_target_gains_thres

            data_df_temp = data_df_temp.dropna()
            data_df_temp.reset_index(drop=True, inplace=True)

            train_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
            valid_df = data_df_temp[data_df_temp['year_month'].isin(valid_year_month_list)].copy()

            valid_df_windowed = pd.concat([train_df,valid_df]).copy()
            valid_df_windowed = valid_df_windowed.tail(len(valid_df) + window_size-1)

            set_seed(100)
            (train_loader, val_loader, 
            _, scaler, weighted_sampler, class_weights) = get_torch_rnn_dataloaders_both(
                [col_price] + col_feature_both, col_target, train_df, valid_df_windowed, None, window_size, batch_size,
                use_weighted_sampler=use_weighted_sampler,
                has_test_data=False,
            )
            self.Scalers[lstm_ticker][model_type]['both'] = None
            self.Scalers[lstm_ticker][model_type]['both'] = scaler
            self.Models[lstm_ticker][model_type]['both'] = None
            self.Models[lstm_ticker][model_type]['both'] = get_rnn_model_both(
                [col_price] + col_feature_both, train_loader, val_loader, 
                epochs, batch_size, learning_rate, window_size, hidden_size, device, 
                use_early_stop=use_early_stop, use_weighted_sampler=use_weighted_sampler, class_weights=class_weights,
                use_dual_lstm=use_dual_lstm, use_gru_model=use_gru_model,
            )
            self.Log(f"{str(self.Time)}: {model_type}: {lstm_ticker}: Model Trained")


    def Train_Model_Hybrid(self):
        model_type = "hybrid"
        # self.Log(f"{str(self.Time)}: {model_type}: Training Model")
        model_setting = self.model_settings[f"model_settings_{model_type}"]

        col_date = self.model_settings['col_date']
        col_price = self.model_settings['col_price']
        col_price_cur = self.model_settings['col_price_cur']
        col_target = self.model_settings['col_target']
        prediction_lookforward_days = self.model_settings['prediction_lookforward_days']
        col_target_gains = f"gains_N{self.model_settings['prediction_lookforward_days']}D"

        use_gru_model = model_setting['use_gru_model']
        use_dual_lstm = model_setting['use_dual_lstm']
        epochs = model_setting['epochs']
        hidden_size = model_setting['hidden_size']
        window_size = model_setting['window_size']
        thres_multiplier = model_setting['thres_multiplier']
        learning_rate = model_setting['learning_rate']
        batch_size = model_setting['batch_size']
        volatility_type = model_setting['volatility_type']
        valid_lookback_months = model_setting['valid_lookback_months']
        train_lookback_months = model_setting['train_lookback_months']
        inflation_map_dict = self.model_settings['inflation_map_dict']

        for lstm_ticker in self.lstm_tickers:

            if self.month_start_date is None:
                # self.debug('condition 0 failed')
                continue
            else:
                month_start_year = self.month_start_date.year
                month_start_month = self.month_start_date.month
                month_start_day = self.month_start_date.day

            model_train_day = month_start_day + self.general_setting["lstm_model_training_displace_days"][lstm_ticker]
            to_train = (self.Time.year == month_start_year) and (self.Time.month == month_start_month) and (self.Time.day == model_train_day)
            if not to_train:
                continue

            data_df = self.ModelData[lstm_ticker]
            if len(data_df) == 0:
                continue

            col_feature_fundamental = model_setting["col_feature_fundamental_dict"][lstm_ticker]
            col_feature_technical = model_setting["col_feature_technical_dict"][lstm_ticker]

            year_month_list = sorted(list(set(data_df['year_month'])))
            year_month_vec = np.array(year_month_list)
            year_vec = np.array(sorted(list(set(data_df['year']))))

            test_year_month = f"{self.Time.year}-{str(self.Time.month).zfill(2)}"
            valid_year_month_list = list(year_month_vec[year_month_vec < test_year_month][-valid_lookback_months:])
            if len(valid_year_month_list) < valid_lookback_months:
                continue

            if np.sum(year_month_vec < min(valid_year_month_list)) == 0:
                continue

            train_year_month_list = list(year_month_vec[year_month_vec < min(valid_year_month_list)][-train_lookback_months:])
            if len(train_year_month_list) < train_lookback_months:
                continue

            data_df_temp = data_df.copy()

            if volatility_type == 'thres_v1':
                col_target_gains_thres = 0.00200

            if volatility_type == 'thres_v2':
                col_target_gains_thres = 0.00235

            if volatility_type == 'thres_auto_v1':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier

            if volatility_type == 'thres_auto_v2':
                thres_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list+valid_year_month_list)].copy()
                thres_df.reset_index(drop=True, inplace=True)
                col_target_gains_thres = get_threshold(thres_df[col_price]) * thres_multiplier

            self.ModelParams[lstm_ticker][model_type]['col_target_gains_thres'] = col_target_gains_thres

            data_df_temp[col_target] = data_df_temp[col_price].shift(-prediction_lookforward_days)
            data_df_temp = data_df_temp.dropna()
            data_df_temp.reset_index(drop=True, inplace=True)


            train_df = data_df_temp[data_df_temp['year_month'].isin(train_year_month_list)].copy()
            valid_df = data_df_temp[data_df_temp['year_month'].isin(valid_year_month_list)].copy()

            # self.Log(f"last date of train is {data_df_temp}")
            
            set_seed(100)
            (train_loader, val_loader, _, scaler) = get_torch_rnn_dataloaders_hybrid(
                [col_price] + col_feature_fundamental, col_target, train_df, valid_df, None, window_size, batch_size,
                has_test_data=False,
            )

            self.Scalers[lstm_ticker][model_type]['fundamental'] = None
            self.Scalers[lstm_ticker][model_type]['fundamental'] = scaler

            self.Models[lstm_ticker][model_type]['fundamental'] = None
            self.Models[lstm_ticker][model_type]['fundamental'] = get_rnn_model_hybrid(
                [col_price] + col_feature_fundamental, train_loader, val_loader, 
                epochs, learning_rate, hidden_size, device, 
                use_dual_lstm=use_dual_lstm, use_gru_model=use_gru_model,
            )

            y_pred_val = get_predictions_hybrid(
                val_loader, 
                self.Models[lstm_ticker][model_type]['fundamental'], 
                self.Scalers[lstm_ticker][model_type]['fundamental'], 
                [col_price] + col_feature_fundamental, 
                device,
            )
            valid_df['pred_price_fundamental'] = y_pred_val
            valid_df['pred_fundamental'] = (valid_df['pred_price_fundamental'] - valid_df[col_price]).apply(get_regression_pred_decision, col_target_gains_thres=col_target_gains_thres)

            set_seed(100)
            (train_loader, val_loader, _, scaler) = get_torch_rnn_dataloaders_hybrid(
                [col_price] + col_feature_technical, col_target, train_df, valid_df, None, window_size, batch_size,
                has_test_data=False,
            )

            self.Scalers[lstm_ticker][model_type]['technical'] = scaler

            self.Models[lstm_ticker][model_type]['technical'] = get_rnn_model_hybrid(
                [col_price] + col_feature_technical, train_loader, val_loader, 
                epochs, learning_rate, hidden_size, device, 
                use_dual_lstm=use_dual_lstm, use_gru_model=use_gru_model,
            )

            y_pred_val = get_predictions_hybrid(
                val_loader, 
                self.Models[lstm_ticker][model_type]['technical'], 
                self.Scalers[lstm_ticker][model_type]['technical'], 
                [col_price] + col_feature_technical, 
                device,
            )
            valid_df['pred_price_technical'] = y_pred_val
            valid_df['pred_technical'] = (valid_df['pred_price_technical'] - valid_df[col_price]).apply(get_regression_pred_decision, col_target_gains_thres=col_target_gains_thres)

            fundamental_mse = mean_squared_error(valid_df['pred_price_fundamental'], valid_df[col_target])
            technical_mse = mean_squared_error(valid_df['pred_price_technical'], valid_df[col_target])

            self.ModelParams[lstm_ticker][model_type]['fundamental_mse'] = fundamental_mse
            self.ModelParams[lstm_ticker][model_type]['technical_mse'] = technical_mse
            self.Log(f"{str(self.Time)}: {model_type}: {lstm_ticker}: Model Trained")


    def OnData(self, data):

        # Prepare external Data
        for _dn in self.general_setting["external_data"]:
            source = self.general_setting["external_data"][_dn]['source']

            if source == 'NasdaqDataLink':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    self.external_data[_dn]['value'] = data[symbol].Value

            if source == 'equity':
                symbol = self.external_data[_dn]['symbol']
                if data.ContainsKey(symbol):
                    if data[symbol] is not None:
                        self.external_data[_dn]['time'] = data[symbol].Time
                        self.external_data[_dn]['value'] = data[symbol].Price

        # if self.IS_LIVE:
        #     if self.mdd >= 0.2:
        #         self.CanTrade = False
        #         self.counter = 0
        #         self.liquidate()
        #         self.peak_value = 0
        #         self.mdd = 0
        #         self.Log("USDJPY Stop Trading")
        #         a = { "text": f"[USDJPY Real Money DONOT TRADE update] USDJPY Stop Trading" }
        #         payload = json.dumps(a)
        #         self.notify.web("https://hooks.slack.com/services/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", payload)

        #     if self.counter != 216:
        #         self.counter += 1
        #         self.peak_value = max(self.peak_value, self.portfolio.total_portfolio_value)
        #         self.mdd = max(self.mdd, 1- float(self.portfolio.total_portfolio_value / self.peak_value) )
        #     else:
        #         self.counter = 0
        #         self.peak_value = 0
        #         self.mdd = 0

        if self.CanTrade == False:
            return

        if self.IS_LIVE and self.IsWarmingUp == False :
            self.his_port_value[self.Time] = self.portfolio.total_portfolio_value
            curr_ports = [self.his_port_value[x] for x in self.his_port_value.keys() if x>=self.time-timedelta(hours=36)]
            curr_peak = max(curr_ports)
            self.roll_mdd = (1-self.portfolio.total_portfolio_value/curr_peak)
            if  self.roll_mdd>= 0.12:
                self.CanTrade = False
                self.Log("USDJPY Stop Trading")
                a = { "text": f"[[US--{self.notification_value}--USDJPY DONOT TRADE update] USDJPY Stop Trading" }
                payload = json.dumps(a)
                self.notify.web("https://hooks.slack.com/services/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", payload)


        if self.CanTrade:
            if (self.Time.minute == 0) or (self.Time.minute == 10) or (self.Time.minute == 20) or (self.Time.minute == 30) or (self.Time.minute == 40) or (self.Time.minute == 50):
                if (not self.portfolio['USDJPY'].invested) and self.prev_equity:
                    if self.portfolio.total_portfolio_value < self.prev_equity:
                        self.freeze = 1
                        self.Log("Start to freeze")
                        a = { "text": f"[US--{self.notification_value}--USDJPY freeze update] Start to freeze" }
                        payload = json.dumps(a)
                        self.notify.web("https://hooks.slack.com/services/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", payload)
                    self.prev_equity = None

                if self.freeze != 0 :
                    if self.Time.hour == 18:
                    # if self.freeze == 13 :
                        self.freeze = 0
                        self.Log("Freeze is over")
                        a = { "text": f"[US--{self.notification_value}--USDJPY freeze update] USDJPY freeze period over" }
                        payload = json.dumps(a)
                        self.notify.web("https://hooks.slack.com/services/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", payload)
                    else:   
                        # self.freeze +=1
                        return
                else:

                    for symbol, symbolData in self.Data.items():
                        if not (
                            data.ContainsKey(symbol)
                            and data[symbol] is not None
                            and symbolData.IsReady
                        ):
                            continue

                        ticker = self.symbol_ticker_map[symbol]

                        is_valid_time = self.Time.minute == 0
                        is_valid_time = is_valid_time and (self.Time.hour in [self.general_setting['FxLstm_prediction_hour']])

                        if is_valid_time:
                            if ticker in self.lstm_tickers:

                                data_dict = {}
                                data_dict["datetime"] = self.Time
                                data_dict["price"] = np.round(data[symbol].Price, 6)

                                # Daily Data
                                _consolidator = symbolData.consolidators["D1"]
                                data_dict["close_D1"] = _consolidator.close[0]

                                # External Data
                                for _dn in self.general_setting["external_data"]:    
                                    data_dict[_dn] = self.external_data[_dn]['value']

                                # Technical Features
                                for _tf in self.general_setting["features"]:
                                    _consolidator = symbolData.consolidators[_tf]
                                    for _in in self.general_setting["features"][_tf]:
                                        _indicator = _consolidator.indicators[_in]

                                        if _in in self.general_setting["features_val_map"]:
                                            for _v in self.general_setting["features_val_map"][
                                                _in
                                            ]:
                                                data_dict[f"{_tf}-{_in}-{_v}"] = np.round(
                                                    _indicator[_v][0], 5
                                                )

                                if self.TO_SAVE_DATA:
                                    if not self.IsWarmingUp:    
                                        self.data_list_tickers[ticker] += [data_dict]
                                else:
                                    self.data_list_tickers[ticker] += [data_dict]

                                col_price = self.model_settings['col_price']
                                col_price_cur = self.model_settings['col_price_cur']
                                cols_data = self.model_settings['cols_data']
                                col_fundamental = self.model_settings['col_fundamental']
                                col_technical = self.model_settings['col_technical']
                                start_year = self.model_settings['start_year']
                                trade_hour = self.model_settings['trade_hour']
                                col_target = self.model_settings['col_target']
                                scaled_tickers = self.model_settings['scaled_tickers']
                                inflation_map_dict = self.model_settings['inflation_map_dict']
                                max_window_size = self.model_settings['max_window_size']

                                test_df = pd.DataFrame()
                                if len(self.data_list_tickers[ticker]) > 0:
                                    data_df_new = pd.DataFrame(self.data_list_tickers[ticker]).copy()
                                    if ticker in scaled_tickers:
                                        data_df_new[col_price] = data_df_new[col_price] / 100
                                        data_df_new[col_price_cur] = data_df_new[col_price_cur] / 100

                                    data_df_new = data_df_new[cols_data]

                                    data_df_new['year'] = data_df_new['datetime'].dt.year
                                    data_df_new['hour'] = data_df_new['datetime'].dt.hour
                                    data_df_new['month'] = data_df_new['datetime'].dt.month
                                    data_df_new['year_month'] = data_df_new['year'].astype(str) + "-" + data_df_new['month'].astype(str).apply(lambda s: s.zfill(2))

                                    data_df = self.ModelData[ticker].copy()
                                    if len(data_df) > 0:
                                        idx = data_df['datetime'] < self.Time
                                        data_df = data_df[idx]
                                        data_df.reset_index(drop=True, inplace=True)

                                    data_df = pd.concat([data_df, data_df_new])
                                    data_df.reset_index(drop=True, inplace=True)

                                    data_df.drop_duplicates('datetime', keep='last', inplace=True)
                                    data_df.reset_index(drop=True, inplace=True)

                                    data_df.sort_values('datetime', ascending=True, inplace=True)
                                    data_df.reset_index(drop=True, inplace=True)

                                    for col in col_fundamental + col_technical:
                                        data_df[col] = data_df[col].fillna(method='ffill')

                                    data_df = data_df[data_df['year'] >= start_year]
                                    data_df = data_df[data_df['hour'] == trade_hour]
                                    data_df.reset_index(drop=True, inplace=True)

                                    for col in inflation_map_dict:
                                        col_cpi = inflation_map_dict[col]

                                        ## FRED CPI value is contribution to inflation. To test using it directly without differencing, as well as  d1, d30 difference on BOTH
                                        data_df[f"{col}_d1"] = data_df[col_cpi] - data_df[col_cpi].shift(1)
                                        data_df[f"{col}_d30"] = data_df[col_cpi] - data_df[col_cpi].shift(1)

                                    test_df = data_df.tail(max_window_size).copy()
                                    test_df.reset_index(drop=True, inplace=True)


                                for model_type in self.general_setting["model_types"]:
                                    if len(test_df) == 0:
                                        continue

                                    if model_type == 'both':
                                        if self.Models[ticker][model_type]['both'] is None:
                                            continue

                                    if model_type == 'hybrid':
                                        if self.Models[ticker][model_type]['fundamental'] is None:
                                            continue
                                        if self.Models[ticker][model_type]['technical'] is None:
                                            continue

                                    model_setting = self.model_settings[f"model_settings_{model_type}"]

                                    test_df_windowed = test_df.tail(model_setting['window_size']).copy()
                                    test_df_windowed.reset_index(drop=True, inplace=True)

                                    if len(test_df_windowed) != model_setting['window_size']:
                                        continue

                                    if model_type == 'both':
                                        col_feature_both = model_setting["col_feature_dict"][ticker]
                                        test_df_windowed[col_target] = 1

                                        (_, _, test_loader, _, _, _) = get_torch_rnn_dataloaders_both(
                                            [col_price] + col_feature_both, col_target, None, None, test_df_windowed.copy(), 
                                            model_setting['window_size'], model_setting['batch_size'],
                                            use_weighted_sampler=False,
                                            has_test_data=True,
                                            is_training=False,
                                            scaler=self.Scalers[ticker][model_type]['both'],
                                        )

                                        (y_pred_list, y_score_list) = get_predictions_both(test_loader, self.Models[ticker][model_type]['both'], device)
                                        y_pred = y_pred_list[-1]

                                    if model_type == 'hybrid':
                                        col_feature_fundamental = model_setting["col_feature_fundamental_dict"][ticker]
                                        col_feature_technical = model_setting["col_feature_technical_dict"][ticker]
                                        test_df_windowed[col_target] = 1
                                        ref_price = test_df_windowed[col_price].values[-1]
                                        col_target_gains_thres = self.ModelParams[ticker][model_type]['col_target_gains_thres']
                                        fundamental_mse = self.ModelParams[ticker][model_type]['fundamental_mse']
                                        technical_mse = self.ModelParams[ticker][model_type]['technical_mse']

                                        (_, _, test_loader, _) = get_torch_rnn_dataloaders_hybrid(
                                            [col_price] + col_feature_fundamental, col_target, None, None, test_df_windowed.copy(), 
                                            model_setting['window_size'], model_setting['batch_size'],
                                            has_test_data=True,
                                            is_training=False,
                                            scaler=self.Scalers[ticker][model_type]['fundamental'],
                                        )

                                        y_pred_val = get_predictions_hybrid(
                                            test_loader, 
                                            self.Models[ticker][model_type]['fundamental'], 
                                            self.Scalers[ticker][model_type]['fundamental'],
                                            [col_price] + col_feature_fundamental,
                                            device,
                                        )
                                        pred_price_fundamental = y_pred_val[-1]
                                        pred_fundamental = get_regression_pred_decision(pred_price_fundamental - ref_price, col_target_gains_thres)

                                        (_, _, test_loader, _) = get_torch_rnn_dataloaders_hybrid(
                                            [col_price] + col_feature_technical, col_target, None, None, test_df_windowed.copy(), 
                                            model_setting['window_size'], model_setting['batch_size'],
                                            has_test_data=True,
                                            is_training=False,
                                            scaler=self.Scalers[ticker][model_type]['technical'],
                                        )

                                        y_pred_val = get_predictions_hybrid(
                                            test_loader, 
                                            self.Models[ticker][model_type]['technical'], 
                                            self.Scalers[ticker][model_type]['technical'],
                                            [col_price] + col_feature_technical,
                                            device,
                                        )
                                        pred_price_technical = y_pred_val[-1]
                                        pred_technical = get_regression_pred_decision(pred_price_technical - ref_price, col_target_gains_thres)
                                        y_pred = get_prediction_hybrid_regression(pred_fundamental, pred_technical, fundamental_mse, technical_mse)


                                    for _signal in self.signals:
                                        if ticker in self.signal_settings[_signal]['valid_tickers']:
                                            pred_type = self.signal_settings[_signal]['pred_type']
                                            lstm_ticker = self.signal_settings[_signal]['lstm_ticker']   

                                            if (pred_type == model_type) and (lstm_ticker == ticker):
                                                self.prediction_dict[_signal][ticker] = y_pred
                                                self.Signal[symbol][_signal].update_prediction_direction(self.prediction_dict[_signal][ticker])


                        # symbolQuantity = 0
                        if self.IsWarmingUp:
                            continue

                        # if self.Time.minute ==0 & :
                        #     self.plot('rolling mdd','mdd', self.mdd)
                        #     self.debug(self.mdd)
                        for _signal in self.signals:
                            if ticker in self.signal_settings[_signal]['valid_tickers']:
                                to_exit = self.Signal[symbol][_signal].check_exit(symbolData, data[symbol].Price, data[symbol].Time)
                                if to_exit:
                                    self.Log(f"{str(self.Time)}: {ticker}: {_signal}: EXIT: {self.Signal[symbol][_signal].exitType}: Entry Price: {self.Signal[symbol][_signal].entryPrice:,.2f}: Exit Price: {data[symbol].Price:,.2f}: Quantity: {self.Signal[symbol][_signal].quantity}")
                                    self.Signal[symbol][_signal].update_exit()
                                    self.liquidate(symbol)
                                # elif (self.Time.weekday() == 4 and self.Time.hour == 16 and self.Time.minute == 30):
                                #     # self.Log(f"{str(self.Time)}: {ticker}: {_signal}: EXIT: {self.Signal[symbol][_signal].exitType}: Entry Price: {self.Signal[symbol][_signal].entryPrice:,.2f}: Exit Price: {data[symbol].Price:,.2f}: Quantity: {self.Signal[symbol][_signal].quantity}")
                                #     self.leftover_qty = self.Signal[symbol][_signal].quantity
                                #     self.Signal[symbol][_signal].update_exit()
                                #     self.liquidate(symbol)
                                #     self.overweekend = 1
                                #     self.leftover_ticker = symbol
                                self.UUP['trix'] = self.UUP__trix.current.value
                                self.UUP['macd'] = self.UUP__macd.current.value
                                self.UUP['roc'] = self.UUP__roc.current.value

                                has_enter = self.Signal[symbol][_signal].enter(symbolData, data[symbol].Price, self.portfolio[symbol].invested,data[symbol].Time, self.portfolio.total_portfolio_value, self.UUP)

                                if has_enter:
                                    self.prev_equity = self.portfolio.total_portfolio_value
                                    quantity = self.Signal[symbol][_signal].quantity * self.Signal[symbol][_signal].allocation_multiplier
                                    quantity = int(np.ceil(quantity))
                                    # self.Log(f"{str(self.Time)}: {ticker}: ENTRY Price: {data[symbol].Price:,.2f} | Quantity: {self.Signal[symbol][_signal].quantity} | Reason: {self.Signal[symbol][_signal].quote}")  
                                    price = self.Securities[symbol].Price
                                    if  quantity > 0:
                                        # split the orders
                                        self.entry_ticket = []
                                        i = 0
                                        while quantity > 10000000:

                                            self.entry_ticket.append(self.LimitOrder(symbol, 10000000, round(price*(1+0.001),5)))
                                            # self.Log(f"{str(self.Time)}: {ticker}: {_signal}: ENTRY: Price: {data[symbol].Price:,.2f}: Quantity: 10000000")  
                                            self.Log(f"{str(self.Time)}: {ticker}: ENTRY Price: {data[symbol].Price:,.2f} | Quantity: 10000000 | Reason: {self.Signal[symbol][_signal].quote}")  
                                            quantity -=  10000000
                                            i += 1
                                        self.entry_ticket.append(self.LimitOrder(symbol, quantity, round(price*(1+0.001),5)))
                                        # self.Log(f"{str(self.Time)}: {ticker}: {_signal}: ENTRY: Price: {data[symbol].Price:,.2f}: Quantity: {quantity}")  
                                        self.Log(f"{str(self.Time)}: {ticker}: ENTRY Price: {data[symbol].Price:,.2f} | Quantity: {self.Signal[symbol][_signal].quantity} | Reason: {self.Signal[symbol][_signal].quote}")  
                                    
                                    elif quantity < 0:
                                        self.entry_ticket = []
                                        i = 0
                                        while quantity < -10000000:
                                            self.entry_ticket.append(self.LimitOrder(symbol, -10000000, round(price*(1-0.001),5)))
                                            # self.Log(f"{str(self.Time)}: {ticker}: {_signal}: ENTRY: Price: {data[symbol].Price:,.2f}: Quantity: -10000000")  
                                            self.Log(f"{str(self.Time)}: {ticker}: ENTRY Price: {data[symbol].Price:,.2f} | Quantity: -10000000 | Reason: {self.Signal[symbol][_signal].quote}")  
                                            quantity += 10000000
                                            i += 1
                                        self.entry_ticket.append(self.LimitOrder(symbol, quantity, round(price*(1-0.001),5)))
                                        # self.Log(f"{str(self.Time)}: {ticker}: {_signal}: ENTRY: Price: {data[symbol].Price:,.2f}: Quantity: {quantity}")  
                                        self.Log(f"{str(self.Time)}: {ticker}: ENTRY Price: {data[symbol].Price:,.2f} | Quantity: {self.Signal[symbol][_signal].quantity} | Reason: {self.Signal[symbol][_signal].quote}")  

    def on_margin_call(self, requests):
        self.debug('Margin Call is coming')
        self.Margin_Call =  True

        a = { "text": f"[US--{self.notification_value}--USDJPY Margin Call update]Margin Call is coming" }
        payload = json.dumps(a)
        self.notify.web("https://hooks.slack.com/services/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", payload)

        return requests

    def OnOrderEvent(self, orderEvent):

        # self.Log(f'{orderEvent.OrderId}--{orderEvent.Status}--{orderEvent.quantity}')
        
        if orderEvent.Status != OrderStatus.Filled:
            return
        
        # if self.CanTrade == False:
        #     self.Transactions.CancelOpenOrders()
        IF_MARGIN = True
        # self.debug(f"The order id is {orderEvent.OrderId}, and the message is {orderEvent.message}")

        # Webhook Notification    
        price = orderEvent.FillPrice
        quantity = orderEvent.quantity
        self.debug(f"[US--{self.notification_value}--USDJPY order update] \nPrice: {price} \nQuantity: {quantity}")
        a = { "text": f"[US--{self.notification_value}--USDJPY order update] \nPrice: {price} \nQuantity: {quantity}" }
        payload = json.dumps(a)
        self.notify.web("https://hooks.slack.com/services/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", payload)


        if len(self.entry_ticket) > 0:
            # When entry order is filled, place TP and SL orders

            for i in range(len(self.entry_ticket)):
                if (orderEvent.OrderId == self.entry_ticket[i].OrderId):
                    IF_MARGIN = False
                    price = orderEvent.FillPrice
                    quantity = orderEvent.absolute_fill_quantity
                    direction = orderEvent.direction
                    q1 = int (quantity/2)
                    q2 = quantity - q1
                    if direction == 0: # Long

                        # self.tp1_ticket.append((self.LimitOrder(orderEvent.Symbol, - q1, round(price*1.005,5))))
                        # self.tp2_ticket.append((self.LimitOrder(orderEvent.Symbol, - q2, round(price*1.0025,5))))
                        # self.sl_ticket.append((self.StopMarketOrder(orderEvent.Symbol, - quantity, round(price*0.997,5))))
                        self.tp1_ticket.append((self.LimitOrder(orderEvent.Symbol, - q1, round(price*1.008,5))))
                        self.tp2_ticket.append((self.LimitOrder(orderEvent.Symbol, - q2, round(price*1.004,5))))
                        self.sl_ticket.append((self.StopMarketOrder(orderEvent.Symbol, - quantity, round(price*0.993,5))))
                        # self.Log("TP/SL constructed")

                    if direction == 1: # Short
                        # self.tp1_ticket.append(self.LimitOrder(orderEvent.Symbol,q1, round(price*0.995,5)))
                        # self.tp2_ticket.append(self.LimitOrder(orderEvent.Symbol,q2, round(price*0.9975,5)))
                        # self.sl_ticket.append(self.StopMarketOrder(orderEvent.Symbol,quantity, round(price*1.0035,5)))
                        self.tp1_ticket.append(self.LimitOrder(orderEvent.Symbol,q1, round(price*0.992,5)))
                        self.tp2_ticket.append(self.LimitOrder(orderEvent.Symbol,q2, round(price*0.996,5)))
                        self.sl_ticket.append(self.StopMarketOrder(orderEvent.Symbol,quantity, round(price*1.003,5)))
                        # self.Log("TP/SL constructed")     
                        #        

                    return 


        if len(self.tp2_ticket) > 0:
            for i in range(len(self.tp2_ticket)):
                if (orderEvent.OrderId == self.tp2_ticket[i].OrderId):
                    IF_MARGIN = False
                    updateOrderFields = UpdateOrderFields()
                    updateOrderFields.Quantity = self.tp1_ticket[i].quantity
                    updateOrderFields.stop_price = self.entry_ticket[i].average_fill_price
                    self.sl_ticket[i].Update(updateOrderFields)

                    return


        if len(self.tp1_ticket) > 0:
            for i in range(len(self.tp1_ticket)):
                if (orderEvent.OrderId == self.tp1_ticket[i].OrderId):
                    IF_MARGIN = False
                    self.sl_ticket[i].Cancel()
                    if len(self.entry_ticket)>i:
                        del self.entry_ticket[i]
                    del self.tp1_ticket[i]
                    del self.tp2_ticket[i]
                    del self.sl_ticket[i]
                    return


        if len(self.sl_ticket) > 0:
            for i in range(len(self.sl_ticket)):
                
                if (orderEvent.OrderId == self.sl_ticket[i].OrderId):
                    IF_MARGIN = False
                    # self.Log(f'Stop Loss price is {orderEvent.FillPrice}')
                    self.tp1_ticket[i].Cancel()
                    self.tp2_ticket[i].Cancel()
                    if len(self.entry_ticket)>i:
                        del self.entry_ticket[i]
                    del self.sl_ticket[i]
                    del self.tp1_ticket[i]
                    del self.tp2_ticket[i]
                    # if self.portfolio['USDJPY'].invested:
                    #     self.liquidate('USDJPY')
                    return


        if self.Margin_Call:
            i = 0
            qty = orderEvent.quantity
            self.Margin_Call = False
            # self.debug(f'Hit margin call, the qty is {qty}')

            while abs(qty) > 0:  
                
                if abs(qty) < abs(self.sl_ticket[i].quantity):
                    updateOrderFields = UpdateOrderFields()
                    updateOrderFields.Quantity = self.sl_ticket[i].quantity - qty
                    tmp_qty = self.sl_ticket[i].quantity - qty

                    self.sl_ticket[i].Update(updateOrderFields)

                    updateOrderFields = UpdateOrderFields()
                    updateOrderFields.Quantity = int(tmp_qty/2)

                    self.tp1_ticket[i].Update(updateOrderFields)

                    updateOrderFields = UpdateOrderFields()
                    updateOrderFields.Quantity = tmp_qty - int(tmp_qty/2)
                    self.tp2_ticket[i].Update(updateOrderFields)
                    return


                elif abs(qty) == abs(self.sl_ticket[i].quantity):
                    self.sl_ticket[i].Cancel()
                    self.tp1_ticket[i].Cancel()
                    self.tp2_ticket[i].Cancel()
                    
                    if len(self.entry_ticket)>i:
                        del self.entry_ticket[i]
                    del self.sl_ticket[i]
                    del self.tp1_ticket[i]
                    del self.tp2_ticket[i]

                    return

                else:
                    self.sl_ticket[i].Cancel()
                    self.tp1_ticket[i].Cancel()
                    self.tp2_ticket[i].Cancel()
                    
                    if len(self.entry_ticket)>i:
                        del self.entry_ticket[i]
                    del self.sl_ticket[i]
                    del self.tp1_ticket[i]
                    del self.tp2_ticket[i]

                    qty -= self.sl_ticket[i].quantity 

                i += 1
            
        if self.CanTrade == False:
            self.Transactions.CancelOpenOrders()
            return
            

            
from AlgorithmImports import *

import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import Dataset, DataLoader, WeightedRandomSampler, TensorDataset

import math
import random
from sklearn.preprocessing import MinMaxScaler    
from sklearn.metrics import confusion_matrix, classification_report



def get_class_distribution(obj):
    count_dict = {
        0: 0,
        1: 0,
        2: 0,
    }
    for i in obj:
        count_dict[i] += 1
    return count_dict


def get_weighted_sampler(y):
    target_list = []
    for t in y:
        target_list.append(t)
    target_list = torch.tensor(target_list)

    class_count = [i for i in get_class_distribution(target_list.cpu().numpy()).values()]
    class_weights = 1./torch.tensor(class_count, dtype=torch.float) 
    class_weights_all = class_weights[target_list]
    weighted_sampler = WeightedRandomSampler(
        weights=class_weights_all,
        num_samples=len(class_weights_all),
        replacement=True
    )

    return (weighted_sampler, class_weights)


def multi_acc(y_pred, y_test):
    y_pred_softmax = torch.log_softmax(y_pred, dim = 1)
    _, y_pred_tags = torch.max(y_pred_softmax, dim = 1)    
    correct_pred = (y_pred_tags == y_test).float()
    acc = correct_pred.sum() / len(correct_pred)
    acc = torch.round(acc * 100)
    return acc


class EarlyStopping:
    def __init__(self, tolerance=5, min_delta=0):
        self.tolerance = tolerance
        self.min_delta = min_delta
        self.counter = 0
        self.early_stop = False

    def __call__(self, train_loss, validation_loss):
        if (validation_loss - train_loss) > self.min_delta:
            self.counter +=1
            if self.counter >= self.tolerance:
                self.early_stop = True



def get_rnn_dataloader_from_array(X_data, y_data, window_size, batch_size, is_test_loader=False, use_weighted_sampler=False):

    weighted_sampler = None
    class_weights = None
    X, y = [], []
    for i in range(window_size, len(X_data)+1):
        feature = X_data[(i-window_size):i,:]
        target = y_data[i-1]
        X.append(feature)
        y.append(target)
    X = torch.tensor(X).float()
    y = torch.tensor(y).long()
    if is_test_loader:
        data_loader = DataLoader(TensorDataset(X, y), batch_size=1)
    else:
        if use_weighted_sampler:
            # (weighted_sampler, class_weights) = get_weighted_sampler(list(y_data), len(y))
            (weighted_sampler, class_weights) = get_weighted_sampler(y)
            data_loader = DataLoader(TensorDataset(X, y), sampler=weighted_sampler, batch_size=batch_size)
        else:
            data_loader = DataLoader(TensorDataset(X, y), shuffle=True, batch_size=batch_size)

    return (data_loader, weighted_sampler, class_weights)




def get_torch_rnn_dataloaders(
    col_feature, col_target, train_df, valid_df, test_df, window_size, batch_size, 
    use_weighted_sampler=False, 
    has_test_data=True,
    is_training=True,
    scaler=None,
):

    if is_training:
        X_train = train_df[col_feature].values
        y_train = train_df[col_target].values

        X_val = valid_df[col_feature].values
        y_val = valid_df[col_target].values

    if has_test_data:
        X_test = test_df[col_feature].values
        y_test = test_df[col_target].values

    if is_training:
        scaler = MinMaxScaler()
        X_train = scaler.fit_transform(X_train)
        X_val = scaler.transform(X_val)

    if has_test_data:
        X_test = scaler.transform(X_test)

    if is_training:
        X_train, y_train = np.array(X_train), np.array(y_train)
        X_val, y_val = np.array(X_val), np.array(y_val)

    if has_test_data:
        X_test, y_test = np.array(X_test), np.array(y_test)

    train_loader = None
    val_loader = None
    weighted_sampler = None
    class_weights = None
    if is_training:
        (train_loader, weighted_sampler, class_weights) = get_rnn_dataloader_from_array(X_train, y_train, window_size, batch_size, is_test_loader=False, use_weighted_sampler=use_weighted_sampler)
        (val_loader, _, _) = get_rnn_dataloader_from_array(X_val, y_val, window_size, batch_size, is_test_loader=False, use_weighted_sampler=False)

    test_loader = None
    if has_test_data:
        (test_loader, _, _) = get_rnn_dataloader_from_array(X_test, y_test, window_size, batch_size, is_test_loader=True)

    return (train_loader, val_loader, test_loader, scaler, weighted_sampler, class_weights)



def t2v(tau, f, out_features, w, b, w0, b0):
    v1 = f(torch.matmul(tau, w) + b)
    v2 = torch.matmul(tau, w0) + b0
    return torch.cat([v1, v2], -1)

class SineActivation(nn.Module):
    def __init__(self, in_features, out_features):
        super(SineActivation, self).__init__()
        self.out_features = out_features
        self.w0 = nn.parameter.Parameter(torch.randn(in_features, 1))
        self.b0 = nn.parameter.Parameter(torch.randn(1))
        self.w = nn.parameter.Parameter(torch.randn(in_features, out_features-1))
        self.b = nn.parameter.Parameter(torch.randn(out_features-1))
        self.f = torch.sin

    def forward(self, tau):
        return t2v(tau, self.f, self.out_features, self.w, self.b, self.w0, self.b0)

class CosineActivation(nn.Module):
    def __init__(self, in_features, out_features):
        super(CosineActivation, self).__init__()
        self.out_features = out_features
        self.w0 = nn.parameter.Parameter(torch.randn(in_features, 1))
        self.b0 = nn.parameter.Parameter(torch.randn(1))
        self.w = nn.parameter.Parameter(torch.randn(in_features, out_features-1))
        self.b = nn.parameter.Parameter(torch.randn(out_features-1))
        self.f = torch.cos

    def forward(self, tau):
        return t2v(tau, self.f, self.out_features, self.w, self.b, self.w0, self.b0)



class GRUmodel(nn.Module):
    def __init__(self, input_size, hidden_size, output_size, arc_num=1, use_t2v=True):
        super(GRUmodel, self).__init__()

        self.input_size = input_size
        self.hidden_size = hidden_size
        self.output_size = output_size
        self.arc_num = arc_num
        self.use_t2v = use_t2v

        if self.use_t2v:
            self.t2v_layer = SineActivation(in_features=input_size, out_features=16)
            self.layer0 = nn.Linear(16, input_size)

        self.recurrent_layer = nn.GRU(input_size=input_size, hidden_size=hidden_size, num_layers=1, batch_first=True)

        if self.arc_num == 1:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, output_size)

        if self.arc_num == 2:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, output_size)

        if self.arc_num == 3:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 64)
            self.bn2 = nn.BatchNorm1d(64)
            self.layer3 = nn.Linear(64, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, output_size)

    def forward(self, x):
        if len(x.shape) < 3:
            x = x.unsqueeze(1)

        if self.use_t2v:
            x = self.t2v_layer(x)
            x = self.layer0(x)

        o, h = self.recurrent_layer(x)
        h = h.squeeze().unsqueeze(0) if len(h.squeeze().shape) < 2 else h.squeeze()

        if self.arc_num == 1:
            x = self.layer1(h)
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            output = self.layer3(x)


        if self.arc_num in [2,3]:
            x = self.layer1(h)
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            x = self.layer3(x)
            x = self.bn3(x)
            output = self.layer4(x)

        output if len(output.shape) > 1 else output.unsqueeze(0)

        return output




class LSTMmodel(nn.Module):
    def __init__(self, input_size, hidden_size, output_size, 
                 use_dual_lstm=False, arc_num=0, use_t2v=True):
        super(LSTMmodel, self).__init__()

        self.input_size = input_size
        self.hidden_size = hidden_size
        self.output_size = output_size
        self.use_dual_lstm = use_dual_lstm
        self.arc_num = arc_num
        self.use_t2v = use_t2v

        if self.use_t2v:
            self.t2v_layer = SineActivation(in_features=input_size, out_features=16)
            self.layer0 = nn.Linear(16, input_size)

        self.recurrent_layer1 = nn.LSTM(input_size=input_size, hidden_size=hidden_size, num_layers=1, batch_first=True)
        if self.use_dual_lstm:
            self.recurrent_layer2 = nn.LSTM(input_size=hidden_size, hidden_size=hidden_size, num_layers=1, batch_first=True)

        if self.arc_num == 0:
            self.layer1 = nn.Linear(hidden_size, output_size)

        if self.arc_num == 1:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, output_size)

        if self.arc_num == 2:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, output_size)

        if self.arc_num == 3:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 64)
            self.bn2 = nn.BatchNorm1d(64)
            self.layer3 = nn.Linear(64, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, output_size)

    def forward(self, x):
        if len(x.shape) < 3:
            x = x.unsqueeze(1)

        if self.use_t2v:
            x = self.t2v_layer(x)
            x = self.layer0(x)    

        rx, (hn, cn) = self.recurrent_layer1(x)
        if self.use_dual_lstm:
            rx, (hn, cn) = self.recurrent_layer2(rx)

        if self.arc_num == 0:
            output = self.layer1(rx[:,-1])

        if self.arc_num == 1:
            x = self.layer1(rx[:,-1])
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            output = self.layer3(x)

        if self.arc_num in [2,3]:
            x = self.layer1(rx[:,-1])
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            x = self.layer3(x)
            x = self.bn3(x)
            output = self.layer4(x)

        output if len(output.shape) > 1 else output.unsqueeze(0)
        return output






def get_rnn_model(
    col_feature, train_loader, val_loader,
    epochs, batch_size, learning_rate, window_size, hidden_size, device,
    use_early_stop=False, use_weighted_sampler=False, class_weights=None,
    use_dual_lstm=False, use_gru_model=False,
):

    input_size = len(col_feature)
    output_size = 3
    #use_early_stop = False

    if use_weighted_sampler:
        criterion = nn.CrossEntropyLoss(weight=class_weights.to(device))
    else:
        criterion = nn.CrossEntropyLoss()

    # encoder = RNNEncoder(input_size, hidden_size, device).to(device)
    # decoder = RNNDecoder(hidden_size, output_size).to(device)
    # model = RNNSeq2Seq(encoder, decoder).to(device)

    if use_gru_model:
        model = GRUmodel(input_size, hidden_size, output_size).to(device)
    else:
        model = LSTMmodel(input_size, hidden_size, output_size, use_dual_lstm=use_dual_lstm).to(device)

    optimizer = optim.Adam(model.parameters(), lr=learning_rate)

    accuracy_stats = {
        'train': [],
        "val": []
    }
    loss_stats = {
        'train': [],
        "val": []
    }

    if use_early_stop:
        early_stopping = EarlyStopping(tolerance=5, min_delta=0.01)

    # print("Begin training.")
    for e in range(1, epochs+1):
        # TRAINING
        train_epoch_loss = 0
        train_epoch_acc = 0

        model.train()
        for X_train_batch, y_train_batch in train_loader:
            if X_train_batch.shape[0] == 1:
                continue

            X_train_batch, y_train_batch = X_train_batch.to(device), y_train_batch.to(device)
            optimizer.zero_grad()
            y_train_pred = model(X_train_batch)
            train_loss = criterion(y_train_pred, y_train_batch)
            train_acc = multi_acc(y_train_pred, y_train_batch)
            train_loss.backward()
            optimizer.step()
            train_epoch_loss += train_loss.item()
            train_epoch_acc += train_acc.item()

        # VALIDATION
        model.eval()
        with torch.no_grad():
            val_epoch_loss = 0
            val_epoch_acc = 0
            for X_val_batch, y_val_batch in val_loader:
                X_val_batch, y_val_batch = X_val_batch.to(device), y_val_batch.to(device)
                y_val_pred = model(X_val_batch)      
                val_loss = criterion(y_val_pred, y_val_batch)
                val_acc = multi_acc(y_val_pred, y_val_batch)
                val_epoch_loss += val_loss.item()
                val_epoch_acc += val_acc.item()

        loss_stats['train'].append(train_epoch_loss/len(train_loader))
        loss_stats['val'].append(val_epoch_loss/len(val_loader))
        accuracy_stats['train'].append(train_epoch_acc/len(train_loader))
        accuracy_stats['val'].append(val_epoch_acc/len(val_loader))

        if use_early_stop:
            early_stopping(train_epoch_loss/len(train_loader), val_epoch_loss/len(val_loader))
        # print(f'Epoch {e+0:03}: | Train Loss: {train_epoch_loss/len(train_loader):.5f} | Val Loss: {val_epoch_loss/len(val_loader):.5f} | Train Acc: {train_epoch_acc/len(train_loader):.3f}| Val Acc: {val_epoch_acc/len(val_loader):.3f}')

        if use_early_stop and early_stopping.early_stop:
            break

    return model


def get_predictions(test_loader, model, device):
    y_pred_list = []
    y_score_list = []
    with torch.no_grad():
        model.eval()
        for X_batch, _ in test_loader:
            X_batch = X_batch.to(device)
            y_test_pred = model(X_batch)
            y_pred_score, y_pred_tags = torch.max(y_test_pred, dim = 1)
            y_score_list.append(y_pred_score.cpu().numpy())
            y_pred_list.append(y_pred_tags.cpu().numpy())

    y_pred_list = [a.squeeze().tolist() for a in y_pred_list]
    y_score_list = [a.squeeze().tolist() for a in y_score_list]
    return (y_pred_list, y_score_list)




def get_prediction_hybrid(row, original=False):
    out = None

    if original:
        if (row['pred_technical'] == 1):
            out = 1
        elif (row['pred_fundamental'] == 1):
            out = 1
        elif row['pred_technical'] == row['pred_fundamental']:
            out = row['pred_technical']
        else:
            if row['score_technical'] >= row['score_fundamental']:
                out = row['pred_technical']
            else:
                out = row['pred_fundamental']
    else:
        if row['pred_technical'] == row['pred_fundamental']:
            out = row['pred_technical']
        else:
            out = 1

    return out


def get_prediction_hybrid_max(row):
    out = None
    if row['score_technical'] >= row['score_fundamental']:
        out = row['pred_technical']
    else:
        out = row['pred_fundamental']

    return out


def get_prediction_hybrid_greedy(row):
    out = None
    if row['pred_technical'] == row['pred_fundamental']:
        out = row['pred_technical']
    elif row['pred_technical'] == 1:
        out = row['pred_fundamental']
    elif row['pred_fundamental'] == 1:
        out = row['pred_technical']
    else:
        if row['score_technical'] >= row['score_fundamental']:
            out = row['pred_technical']
        else:
            out = row['pred_fundamental']

    return out








#region imports
from AlgorithmImports import *
#endregion
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import Dataset, DataLoader, WeightedRandomSampler, TensorDataset
from sklearn.preprocessing import MinMaxScaler
import torch.nn.functional as F
from sklearn.metrics import mean_squared_error

import math
import random
import pandas as pd
import numpy as np


def get_rnn_dataloader_from_array(X_data, y_data, window_size, batch_size, is_test_loader=False):
    X, y = [], []
    for i in range(window_size, len(X_data)+1):
        feature = X_data[(i-window_size):i,:]
        target = y_data[i-1]
        X.append(feature)
        y.append(target)
    X = torch.tensor(X).float()
    # y = torch.tensor(y).long()
    y = torch.tensor(y).float()
    
    if is_test_loader:
        data_loader = DataLoader(TensorDataset(X, y), batch_size=1)
    else:
        data_loader = DataLoader(TensorDataset(X, y), shuffle=True, batch_size=batch_size)

    return data_loader


def get_torch_rnn_dataloaders(
    col_feature, col_target, train_df, valid_df, test_df, window_size, batch_size,
    has_test_data=True,
    is_training=True,
    scaler=None,
):

    if is_training:
        train_data = train_df[[col_target]+col_feature].values

        valid_df_windowed = pd.concat([train_df,valid_df]).copy()
        valid_df_windowed = valid_df_windowed.tail(len(valid_df) + window_size-1)
        valid_data = valid_df_windowed[[col_target]+col_feature].values

    if has_test_data:
        test_data = test_df[[col_target]+col_feature].values

    if is_training:
        scaler = MinMaxScaler(feature_range=(0, 1))
        train_data = scaler.fit_transform(train_data)
        valid_data = scaler.transform(valid_data)

    if has_test_data:
        test_data = scaler.transform(test_data)

    if is_training:
        X_train = train_data[:, 1:]
        y_train = train_data[:, 0]
        X_val = valid_data[:, 1:]
        y_val = valid_data[:, 0]

    if has_test_data:
        X_test = test_data[:, 1:]
        y_test = test_data[:, 0]

    train_loader = None
    val_loader = None
    test_loader = None

    if is_training:
        train_loader = get_rnn_dataloader_from_array(X_train, y_train, window_size, batch_size, is_test_loader=False)
        val_loader = get_rnn_dataloader_from_array(X_val, y_val, window_size, batch_size, is_test_loader=True)

    if has_test_data:
        test_loader = get_rnn_dataloader_from_array(X_test, y_test, window_size, batch_size, is_test_loader=True)

    return (train_loader, val_loader, test_loader, scaler)




def t2v(tau, f, out_features, w, b, w0, b0):
    v1 = f(torch.matmul(tau, w) + b)
    v2 = torch.matmul(tau, w0) + b0
    return torch.cat([v1, v2], -1)

class SineActivation(nn.Module):
    def __init__(self, in_features, out_features):
        super(SineActivation, self).__init__()
        self.out_features = out_features
        self.w0 = nn.parameter.Parameter(torch.randn(in_features, 1))
        self.b0 = nn.parameter.Parameter(torch.randn(1))
        self.w = nn.parameter.Parameter(torch.randn(in_features, out_features-1))
        self.b = nn.parameter.Parameter(torch.randn(out_features-1))
        self.f = torch.sin

    def forward(self, tau):
        return t2v(tau, self.f, self.out_features, self.w, self.b, self.w0, self.b0)

class CosineActivation(nn.Module):
    def __init__(self, in_features, out_features):
        super(CosineActivation, self).__init__()
        self.out_features = out_features
        self.w0 = nn.parameter.Parameter(torch.randn(in_features, 1))
        self.b0 = nn.parameter.Parameter(torch.randn(1))
        self.w = nn.parameter.Parameter(torch.randn(in_features, out_features-1))
        self.b = nn.parameter.Parameter(torch.randn(out_features-1))
        self.f = torch.cos

    def forward(self, tau):
        return t2v(tau, self.f, self.out_features, self.w, self.b, self.w0, self.b0)



class GRUmodel(nn.Module):
    def __init__(self, input_size, hidden_size, arc_num=1, use_t2v=True):
        super(GRUmodel, self).__init__()

        self.input_size = input_size
        self.hidden_size = hidden_size
        self.arc_num = arc_num
        self.use_t2v = use_t2v

        if self.use_t2v:
            self.t2v_layer = SineActivation(in_features=input_size, out_features=16)
            self.layer0 = nn.Linear(16, input_size)

        self.recurrent_layer = nn.GRU(input_size=input_size, hidden_size=hidden_size, num_layers=1, batch_first=True)

        if self.arc_num == 1:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, 1)

        if self.arc_num == 2:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, 1)

        if self.arc_num == 3:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 64)
            self.bn2 = nn.BatchNorm1d(64)
            self.layer3 = nn.Linear(64, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, 1)

    def forward(self, x):
        if len(x.shape) < 3:
            x = x.unsqueeze(1)
            
        if self.use_t2v:
            x = self.t2v_layer(x)
            x = self.layer0(x)    
             
        o, h = self.recurrent_layer(x)
        h = h.squeeze().unsqueeze(0) if len(h.squeeze().shape) < 2 else h.squeeze()

        if self.arc_num == 1:
            x = self.layer1(h)
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            output = self.layer3(x)


        if self.arc_num in [2,3]:
            x = self.layer1(h)
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            x = self.layer3(x)
            x = self.bn3(x)
            output = self.layer4(x)

        output if len(output.shape) > 1 else output.unsqueeze(0)

        return output




class LSTMmodel(nn.Module):
    def __init__(self, input_size, hidden_size,
                 use_dual_lstm=False, arc_num=1, use_t2v=True):
        super(LSTMmodel, self).__init__()

        self.input_size = input_size
        self.hidden_size = hidden_size
        self.use_dual_lstm = use_dual_lstm
        self.arc_num = arc_num
        self.use_t2v = use_t2v

        if self.use_t2v:
            self.t2v_layer = SineActivation(in_features=input_size, out_features=16)
            self.layer0 = nn.Linear(16, input_size)

        self.recurrent_layer1 = nn.LSTM(input_size=input_size, hidden_size=hidden_size, num_layers=1, batch_first=True)
        if self.use_dual_lstm:
            self.recurrent_layer2 = nn.LSTM(input_size=hidden_size, hidden_size=hidden_size, num_layers=1, batch_first=True)

        if self.arc_num == 0:
            self.layer1 = nn.Linear(hidden_size, 1)

        if self.arc_num == 1:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, 1)

        if self.arc_num == 2:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 256)
            self.bn2 = nn.BatchNorm1d(256)
            self.layer3 = nn.Linear(256, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, 1)

        if self.arc_num == 3:
            self.layer1 = nn.Linear(hidden_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.layer2 = nn.Linear(128, 64)
            self.bn2 = nn.BatchNorm1d(64)
            self.layer3 = nn.Linear(64, 32)
            self.bn3 = nn.BatchNorm1d(32)
            self.layer4 = nn.Linear(32, 1)

    def forward(self, x):
        if len(x.shape) < 3:
            x = x.unsqueeze(1)

        if self.use_t2v:
            x = self.t2v_layer(x)
            x = self.layer0(x)    

        rx, (hn, cn) = self.recurrent_layer1(x)
        if self.use_dual_lstm:
            rx, (hn, cn) = self.recurrent_layer2(rx)

        if self.arc_num == 0:
            output = self.layer1(rx[:,-1])

        if self.arc_num == 1:
            x = self.layer1(rx[:,-1])
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            output = self.layer3(x)

        if self.arc_num in [2,3]:
            x = self.layer1(rx[:,-1])
            x = self.bn1(x)
            x = self.layer2(x)
            x = self.bn2(x)
            x = self.layer3(x)
            x = self.bn3(x)
            output = self.layer4(x)

        output if len(output.shape) > 1 else output.unsqueeze(0)
        return output




def get_rnn_model(
    col_feature, train_loader, val_loader,
    epochs, learning_rate, hidden_size, device,
    use_dual_lstm=False, use_gru_model=False,
):

    input_size = len(col_feature)

    if use_gru_model:
        model = GRUmodel(input_size, hidden_size).to(device)
    else:
        model = LSTMmodel(input_size, hidden_size, use_dual_lstm=use_dual_lstm).to(device)

    optimizer = optim.Adam(model.parameters(), lr=learning_rate)

    loss_stats = {
        'train': [],
        "val": []
    }

    for e in range(1, epochs+1):
        # TRAINING
        train_epoch_loss = 0
        model.train()
        for X_train_batch, y_train_batch in train_loader:
            if X_train_batch.shape[0] == 1:
                continue

            X_train_batch, y_train_batch = X_train_batch.to(device), y_train_batch.to(device)
            optimizer.zero_grad()
            y_train_pred = model(X_train_batch)
            train_loss = F.smooth_l1_loss(y_train_pred, y_train_batch.unsqueeze(1))
            train_loss.backward()
            optimizer.step()
            train_epoch_loss += train_loss.item()

        # VALIDATION
        model.eval()
        with torch.no_grad():
            val_epoch_loss = 0
            for X_val_batch, y_val_batch in val_loader:
                X_val_batch, y_val_batch = X_val_batch.to(device), y_val_batch.to(device)
                y_val_pred = model(X_val_batch)      
                val_loss = F.smooth_l1_loss(y_val_pred, y_val_batch.unsqueeze(1))
                val_epoch_loss += val_loss.item()

        loss_stats['train'].append(train_epoch_loss/len(train_loader))
        loss_stats['val'].append(val_epoch_loss/len(val_loader))

    return model



def get_predictions(test_loader, model, scaler, col_feature, device):
    y_pred_list = []
    with torch.no_grad():
        model.eval()
        for X_batch, _ in test_loader:
            X_batch = X_batch.to(device)
            y_test_pred = model(X_batch)
            y_test_pred = y_test_pred.cpu().squeeze().numpy().item()
            y_pred_list.append(y_test_pred)

    def inverse_transform(y_pred, col_feature):
        extended = np.zeros((len(y_pred), len(col_feature)+1))
        extended[:, 0] = y_pred
        return scaler.inverse_transform(extended)[:, 0]
        
    y_pred = np.array(y_pred_list)
    y_pred = inverse_transform(y_pred, col_feature)
    return y_pred




def get_prediction_hybrid_regression(pred_fundamental, pred_technical, fundamental_mse, technical_mse):
    out = None

    # if (pred_technical == 1):
    #     out = 1
    # elif (pred_fundamental == 1):
    #     out = 1

    if pred_technical == pred_fundamental:
        out = pred_technical
    else:
        if technical_mse <= fundamental_mse:
            out = pred_technical
        else:
            out = 1

    return pred_technical



def get_regression_pred_decision(diff, col_target_gains_thres):
    if diff > col_target_gains_thres:
        return 2
    if -diff > col_target_gains_thres:
        return 0
    else:
        return 1


from AlgorithmImports import *

import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import Dataset, DataLoader, WeightedRandomSampler, TensorDataset

import math
import random
from sklearn.preprocessing import MinMaxScaler    
from sklearn.metrics import confusion_matrix, classification_report


def set_seed(seed: int = 100) -> None:
    np.random.seed(seed)
    random.seed(seed)
    torch.manual_seed(seed)
    torch.cuda.manual_seed(seed)
    # When running on the CuDNN backend, two further options must be set
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = False
    # Set a fixed value for the hash seed
    os.environ["PYTHONHASHSEED"] = str(seed)
    # print(f"Random seed set as {seed}")


##-##
# def get_upper_threshold(close):
#     difference = close.diff()
#     difference[0] = 0
#     difference = difference.abs()
#     bins = pd.cut(difference, bins=10)
#     bins = bins.value_counts().to_frame().reset_index()
#     bins["index"] = bins["index"].apply(lambda x: x.right)
#     bins = bins.to_numpy()
#     percentile_count = len(difference) * 0.85
#     count = 0
#     for i in range(10):
#         count += bins[i, 1]
#         if count > percentile_count:
#             return bins[i, 0]

def get_upper_threshold(close):
    difference = close.diff()
    difference[0] = 0
    difference = difference.abs()
    bins = pd.cut(difference, bins=10)
    bins = bins.value_counts().to_frame().reset_index()
    bins.columns = ['close','count']
    bins["close"] = bins["close"].apply(lambda x: x.right)
    bins = bins.to_numpy()
    percentile_count = len(difference) * 0.85
    count = 0
    for i in range(10):
        count += bins[i, 1]
        if count > percentile_count:
            return bins[i, 0]
##-##




def get_entropy(labels, base=None):
    vc = pd.Series(labels).value_counts(normalize=True, sort=False)
    base = math.e if base is None else base
    return -(vc * np.log(vc)/np.log(base)).sum()


def get_threshold(close):
    difference = close.diff()
    difference = difference.drop(0)
    difference = difference.tolist()

    threshold = 0
    thres_upper_bound = get_upper_threshold(close)
    temp_thres = 0
    best_entropy = -float('inf')

    while temp_thres < thres_upper_bound:
        labels = []
        for diff in difference:
            if diff > temp_thres:
                labels.append(2)
            elif -diff > temp_thres:
                labels.append(1)
            else:
                labels.append(0)
        entropy = get_entropy(labels)
        if entropy > best_entropy:
            best_entropy = entropy
            threshold = temp_thres
        temp_thres = temp_thres + 0.00001
    return np.round(threshold,5)











from AlgorithmImports import *

import pandas as pd
import numpy as np
from datetime import datetime, timedelta
from collections import deque


from utils import (
    getFxPositionSize,
)


class FxLstmSignal:
    def __init__(
        self,
        algorithm,
        symbol,
        ticker,
        general_setting,
        signal_setting,
    ):

        # General Initializations
        self.algorithm = algorithm

        IS_PAPER = self.algorithm.get_parameter("IS_PAPER")

        if IS_PAPER == 'True':
            self.notification_value = 'PAPER'
        else:
            self.notification_value = 'REAL' 


        self.symbol = symbol
        self.ticker = ticker
        self.general_setting = general_setting
        self.signal_setting = signal_setting

        self.prediction_direction_map_dict = self.signal_setting['prediction_direction_map_dict']
        self.valid_tickers = signal_setting["valid_tickers"]
        self.enter_long_trades = signal_setting["enter_long_trades"]
        self.enter_short_trades = signal_setting["enter_short_trades"]

        self.prediction_direction = 0
        self.use_exit_reason = self.signal_setting["useTralingStop"]
        self.direction = 0
        self.quantity = 0
        self.entryBarCount = 0
        self.allocation_multiplier = 1
        self.entryPrice = None
        self.stopPrice = None
        self.stopDistance = None
        self.targetPrice = None
        self.targetDistance = None
        self.inTrade = False
        self.lookForExit = False
        self.trailingStop = False
        self.exitType = None
        self.macd = None
        self.quote = None
        self.max_qty = None



    def update_prediction_direction(self, prediction):
        self.prediction_direction = self.prediction_direction_map_dict[prediction]

    def enter(self, symbolData, price, position, market_time, total_port_value, uup):

        # max_qty = int(total_port_value/(0.02*price))-1000
        # max_qty = min(max_qty, 30000000)
        # self.max_qty = max_qty

        

        margin_qty = int(total_port_value/(0.05*(1/price)))-1000 # Margin ratio = 20:1; 1/20 =0.05
        # margin_qty = 30000
        limit_qty = 30000000
        max_qty = min(margin_qty, limit_qty)
        self.max_qty = max_qty

        has_enter = False
        self.inTrade = position
        if self.inTrade : 
            active_consolidator = symbolData.consolidators[self.signal_setting["active_timeframe"]]
            ATR = active_consolidator.indicators[f"ATR{self.signal_setting['atrLength']}"]
            SMA = active_consolidator.indicators[f"SMA{self.signal_setting['sma_filter_lookback']}"]
            AO = active_consolidator.indicators[f"AO"]
            ROC = active_consolidator.indicators[f"ROC{self.signal_setting['roc_filter_lookback']}"]
            TRIX = active_consolidator.indicators[f"TRIX{self.signal_setting['trix_filter_lookback']}"]
            MACD = active_consolidator.indicators[f"MACD"]
            MACD1 = active_consolidator.indicators[f"MACD"]
            self.macd = MACD["macd"][0]
            
            if self.algorithm.Time.minute == 0:
                self.algorithm.Log(f"{self.notification_value} MACD is {MACD['macd'][0]} ; AO is {AO['val'][0]}; Prediction is {self.prediction_direction}; UUP_roc is {round(uup['roc'],5)}  ")
                
                a = { "text": f"[US--{self.notification_value}--USDJPY hourly update] MACD is {MACD['macd'][0]} ; AO is {AO['val'][0]}; Prediction is {self.prediction_direction}; UUP_roc is {round(uup['roc'],5)}" }
                payload = json.dumps(a)
                self.algorithm.notify.web("https://hooks.slack.com/services/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", payload)


            return False

        active_consolidator = symbolData.consolidators[self.signal_setting["active_timeframe"]]
        ATR = active_consolidator.indicators[f"ATR{self.signal_setting['atrLength']}"]

        SMA = active_consolidator.indicators[f"SMA{self.signal_setting['sma_filter_lookback']}"]
        AO = active_consolidator.indicators[f"AO"]
        ROC = active_consolidator.indicators[f"ROC{self.signal_setting['roc_filter_lookback']}"]
        TRIX = active_consolidator.indicators[f"TRIX{self.signal_setting['trix_filter_lookback']}"]
        MACD = active_consolidator.indicators[f"MACD"]
        MACD1 = active_consolidator.indicators[f"MACD"]
        self.macd = MACD1["macd"][0]
        # SMA_fast = active_consolidator.indicators[f"SMA{self.signal_setting['sma_filter_lookback_fast']}"]
        # SMA_slow = active_consolidator.indicators[f"SMA{self.signal_setting['sma_filter_lookback_slow']}"]
        
        if self.algorithm.Time.minute == 0:
            self.algorithm.Log(f"{self.notification_value} TRIX is {TRIX['val'][0]}; Prediction is {self.prediction_direction}; UUP_roc is {round(uup['roc'],5)}  ")
            a = { "text": f"[US--{self.notification_value}--USDJPY hourly update] TRIX is {TRIX['val'][0]}; Prediction is {self.prediction_direction}; UUP_roc is {round(uup['roc'],5)} " }
            payload = json.dumps(a)
            self.algorithm.notify.web("https://hooks.slack.com/services/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", payload)
        if self.signal_setting["use_sma_filter"]:
            is_long_trend_sma = price < SMA["val"][0]
            is_short_trend_sma = price > SMA["val"][0]
        else:
            is_long_trend_sma = False
            is_short_trend_sma = False

        if self.signal_setting["use_ao_filter"]:
            is_long_trend_ao = (AO["val"][0] > 0)
            is_short_trend_ao = (AO["val"][0] < 0)

        else:
            is_long_trend_ao = False
            is_short_trend_ao = False
        
        if self.signal_setting["use_roc_filter"]:
            is_long_trend_roc = (ROC["val"][0] < 0)
            is_short_trend_roc = (ROC["val"][0] > 0 )
        else:
            is_long_trend_roc = False
            is_short_trend_roc = False
        
        if self.signal_setting["use_trix_filter"]:
            is_long_trend_trix = (TRIX["val"][0] < 0 )
            is_short_trend_trix = (TRIX["val"][0] > 0 )

        else:
            is_long_trend_trix = False
            is_short_trend_trix = False

        
        if self.signal_setting["use_macd_filter"]:
            is_long_trend_macd =  (MACD["macd"][0] < 0 )
            is_short_trend_macd = (MACD["macd"][0] > 0 )

        else:
            is_long_trend_macd = False
            is_short_trend_macd = False
                 

        if  is_short_trend_trix and (self.prediction_direction < 0) and uup['roc'] >0 :
            has_enter = True
            self.inTrade = True
            self.direction = 1

            if self.signal_setting["use_movement_thres_for_stops"]:
                stopSize = self.signal_setting["movement_thres"] * self.signal_setting["longStopMultiplier"]
                longStopPrice = active_consolidator.close[0] - stopSize
                longStopDistance = active_consolidator.close[0] - longStopPrice
            else:
                stopSize = ATR["val"][0] * self.signal_setting["longStopMultiplier"]
                longPriceSource = min(active_consolidator.low[0], active_consolidator.low[1])
                longStopPrice = longPriceSource - stopSize
                longStopDistance = active_consolidator.close[0] - longStopPrice
            self.targetPrice = active_consolidator.close[0] + longStopDistance * self.signal_setting["longRiskRewardMultiplier"]
            self.targetDistance = self.targetPrice - active_consolidator.close[0]

            self.quantity = self.direction * getFxPositionSize(
                longStopDistance,
                self.signal_setting["risk_pct"],
                self.algorithm,
                self.symbol,
            )

            self.algorithm.Log(f"Long position: TRIX is {TRIX['val'][0]} ; prediction is {self.prediction_direction} ")
            self.quote = 'Long TRIX & prediction'
            if abs(self.quantity) > max_qty:
                self.quantity = self.direction * max_qty

            self.entryPrice = price
            self.stopPrice = longStopPrice
            self.stopDistance = longStopDistance
            self.entryBarCount = active_consolidator.BarCount
            self.trailingStop = False
            self.lookForExit = False
        
        elif  is_short_trend_macd and is_short_trend_sma and uup['roc'] >0:
            has_enter = True
            self.inTrade = True
            self.direction = 1

            if self.signal_setting["use_movement_thres_for_stops"]:
                stopSize = self.signal_setting["movement_thres"] * self.signal_setting["longStopMultiplier"]
                longStopPrice = active_consolidator.close[0] - stopSize
                longStopDistance = active_consolidator.close[0] - longStopPrice
            else:
                stopSize = ATR["val"][0] * self.signal_setting["longStopMultiplier"]
                longPriceSource = min(active_consolidator.low[0], active_consolidator.low[1])
                longStopPrice = longPriceSource - stopSize
                longStopDistance = active_consolidator.close[0] - longStopPrice
            self.targetPrice = active_consolidator.close[0] + longStopDistance * self.signal_setting["longRiskRewardMultiplier"]
            self.targetDistance = self.targetPrice - active_consolidator.close[0]

            self.quantity = self.direction * getFxPositionSize(
                longStopDistance,
                self.signal_setting["risk_pct"],
                self.algorithm,
                self.symbol,
            )

            self.algorithm.Log(f"Long position: MACD is {MACD['macd'][0]} ; SMA is {SMA['val'][0]} ")
            self.quote = 'Long MACD & prediction'
            if abs(self.quantity) > max_qty:
                self.quantity = self.direction * max_qty

            self.entryPrice = price
            self.stopPrice = longStopPrice
            self.stopDistance = longStopDistance
            self.entryBarCount = active_consolidator.BarCount
            self.trailingStop = False
            self.lookForExit = False


        elif is_long_trend_trix and (self.prediction_direction > 0) and uup['roc'] < 0: 
            has_enter = True
            self.inTrade = True
            self.direction = -1

            if self.signal_setting["use_movement_thres_for_stops"]:
                stopSize = self.signal_setting["movement_thres"] * self.signal_setting["shortStopMultiplier"]
                shortStopPrice = active_consolidator.close[0] + stopSize
                shortStopDistance = shortStopPrice - active_consolidator.close[0]
            else:
                stopSize = ATR["val"][0] * self.signal_setting["shortStopMultiplier"]
                shortPriceSource = max(active_consolidator.high[0], active_consolidator.high[1])
                shortStopPrice = shortPriceSource + stopSize
                shortStopDistance = shortStopPrice - active_consolidator.close[0]
            self.targetPrice = active_consolidator.close[0] - shortStopDistance * self.signal_setting["shortRiskRewardMultiplier"]
            self.targetDistance = active_consolidator.close[0] - self.targetPrice

            self.quantity = self.direction * getFxPositionSize(
                shortStopDistance,
                self.signal_setting["risk_pct"],
                self.algorithm,
                self.symbol,
            )

            self.algorithm.Log(f"Short position: MACD is {MACD['macd'][0]} ; prediction is {self.prediction_direction} ")
            self.quote = 'Short MACD & prediction'

            if abs(self.quantity) > max_qty:
                self.quantity = self.direction * max_qty


            self.entryPrice = price
            self.stopPrice = shortStopPrice
            self.stopDistance = shortStopDistance
            self.entryBarCount = active_consolidator.BarCount
            self.trailingStop = False
            self.lookForExit = False




        return has_enter


    def check_exit(self, symbolData, price, market_time):

        if not self.inTrade:
            active_consolidator = symbolData.consolidators[self.signal_setting["active_timeframe"]]
            ATR = active_consolidator.indicators[f"ATR{self.signal_setting['atrLength']}"]
            SMA = active_consolidator.indicators[f"SMA{self.signal_setting['sma_filter_lookback']}"]
            AO = active_consolidator.indicators[f"AO"]
            ROC = active_consolidator.indicators[f"ROC{self.signal_setting['roc_filter_lookback']}"]
            TRIX = active_consolidator.indicators[f"TRIX{self.signal_setting['trix_filter_lookback']}"]
            MACD = active_consolidator.indicators[f"MACD"]
            MACD1 = active_consolidator.indicators[f"MACD"]
            MACD = active_consolidator.indicators[f"MACD"]
            self.macd = MACD["macd"][0]
            return False

        to_exit = False

        active_consolidator = symbolData.consolidators[self.signal_setting["active_timeframe"]]
        can_exit = (active_consolidator.BarCount - self.entryBarCount) > self.signal_setting["exit_wait_period"]
        ATR = active_consolidator.indicators[f"ATR{self.signal_setting['atrLength']}"]

        SMA = active_consolidator.indicators[f"SMA{self.signal_setting['sma_filter_lookback']}"]
        AO = active_consolidator.indicators[f"AO"]
        ROC = active_consolidator.indicators[f"ROC{self.signal_setting['roc_filter_lookback']}"]
        TRIX = active_consolidator.indicators[f"TRIX{self.signal_setting['trix_filter_lookback']}"]
        MACD1 = active_consolidator.indicators[f"MACD"]
        MACD = active_consolidator.indicators[f"MACD"]
        self.macd = MACD["macd"][0]

        if (self.direction > 0):
            
            if self.signal_setting["use_trix_for_stops"]:
                if (TRIX["val"][0] > 0 ):
                    to_exit = True     
                    self.exitType = 'DirectionChange' 

            if self.signal_setting["use_sma_for_stops"] :
                if (SMA["val"][0] > price ) & (self.prediction_direction < 0):
                    to_exit = True     
                    self.exitType = 'DirectionChange' 

            if self.signal_setting["use_roc_for_stops"]:
                if (ROC["val"][0] > 0 ):
                    to_exit = True     
                    self.exitType = 'DirectionChange' 

            if self.signal_setting["use_macd_for_stops"]:
                if self.signal_setting["use_ao_for_stops"]:
                    if (MACD["macd"][0] > 0 and AO["val"][0] < 0 ):
                        to_exit = True     
                        self.exitType = 'DirectionChange' 

        if (self.direction < 0):

            if self.signal_setting["use_trix_for_stops"]:
                if (TRIX["val"][0] < 0 ):
                    to_exit = True     
                    self.exitType = 'DirectionChange' 
            
            
            if self.signal_setting["use_sma_for_stops"] :
                if (SMA["val"][0] < price )& (self.prediction_direction > 0):
                    to_exit = True     
                    self.exitType = 'DirectionChange' 

            if self.signal_setting["use_roc_for_stops"]:
                if (ROC["val"][0] < 0 ):
                    to_exit = True     
                    self.exitType = 'DirectionChange' 

            if self.signal_setting["use_macd_for_stops"]:
                if self.signal_setting["use_ao_for_stops"]:
                    if (MACD["macd"][0] < 0 and AO["val"][0] > 0 ):
                        to_exit = True     
                        self.exitType = 'DirectionChange' 

        if to_exit and can_exit:
            return to_exit

        if (self.direction > 0):
            if (price >= self.targetPrice) and self.use_exit_reason:
                self.lookForExit = True

        if  (self.direction > 0):
            if self.signal_setting["useTralingStop"] and self.lookForExit:
                trail = active_consolidator.close[0] - ATR["val"][0] * self.signal_setting["trailStopSize"]
                if trail > self.stopPrice:
                    self.stopPrice = trail
                    self.trailingStop = True


        if (self.direction > 0):
            if (self.signal_setting["useTralingStop"] and self.lookForExit and (price <= self.stopPrice)):
                to_exit = True
                self.exitType = 'TrailingStopLoss'

        if (self.direction < 0):
            if (price <= self.targetPrice) and self.use_exit_reason:
                self.lookForExit = True


        if  (self.direction < 0):
            if self.signal_setting["useTralingStop"] and self.lookForExit:
                trail = active_consolidator.close[0] + ATR["val"][0] * self.signal_setting["trailStopSize"]
                if trail < self.stopPrice:
                    self.stopPrice = trail
                    self.trailingStop = True

        if (self.direction < 0):
            if (self.signal_setting["useTralingStop"] and self.lookForExit and (price >= self.stopPrice)):
                to_exit = True
                self.exitType = 'TrailingStopLoss'

        to_exit = to_exit and can_exit

        return to_exit


    def update_exit(self):
        self.direction = 0
        self.quantity = 0
        self.entryBarCount = 0
        self.allocation_multiplier = 1
        self.entryPrice = None
        self.stopPrice = None
        self.stopDistance = None
        self.targetPrice = None
        self.targetDistance = None
        self.inTrade = False
        self.lookForExit = False
        self.trailingStop = False
        self.exitType = None




from AlgorithmImports import *
import scipy
import math


def getFxPositionSize(stopPoints, riskPct, algorithm, symbol):
    Balance = algorithm.Portfolio.TotalPortfolioValue
    LotSize = algorithm.Securities[symbol].SymbolProperties.LotSize
    price = algorithm.Securities[symbol].Close
    conversionRate = algorithm.Securities[symbol].QuoteCurrency.ConversionRate
    pointValue = LotSize * conversionRate
    # pointValue = 0.0001*Balance/price
    units = int(np.ceil((Balance * riskPct) / (stopPoints * pointValue)))
    units = int(np.ceil(10000*(Balance * riskPct) / ((stopPoints/0.0001) * pointValue)))
    # units = 1000*int(np.ceil((Balance * riskPct) / ( 0.5 * pointValue )))  # default set stopPoints as 80, abandon longstopdistance
    # units = int(Balance * riskPct / price)
    # units = 10000*int(np.ceil((Balance * riskPct) / (40 * pointValue)))
    return units