Overall Statistics
Total Trades
7923
Average Win
0.58%
Average Loss
-0.24%
Compounding Annual Return
5.774%
Drawdown
61.600%
Expectancy
0.178
Net Profit
216.330%
Sharpe Ratio
0.318
Probabilistic Sharpe Ratio
0.009%
Loss Rate
65%
Win Rate
35%
Profit-Loss Ratio
2.39
Alpha
0.062
Beta
-0.005
Annual Standard Deviation
0.192
Annual Variance
0.037
Information Ratio
-0.503
Tracking Error
0.399
Treynor Ratio
-12.955
Total Fees
$0.00
using System;
using QuantConnect.Data.Market;
using QuantConnect.Indicators;
using QuantConnect.Algorithm.CSharp;

namespace QuantConnect.Indicators
{
    public class HoltIndicator : BarIndicator
    {
        public readonly RollingWindow<decimal> Observed;
        public readonly RollingWindow<decimal> Level;
        public readonly RollingWindow<decimal> Slope;
        public readonly RollingWindow<decimal> Forecast;

        decimal alpha = 0;
        decimal beta = 0;
        public double SquareError = 0;
        public HoltIndicator(string name, int length = 100, decimal alpha = 0.9m, decimal beta = 0.008m)
            : base(name)
        {
        	Observed = new RollingWindow<decimal>(length);
        	Level = new RollingWindow<decimal>(length);
        	Slope = new RollingWindow<decimal>(length);
        	Forecast = new RollingWindow<decimal>(length);
        	this.alpha = alpha;
        	this.beta = beta;
        }


        /// <summary>
        /// Computes the average value
        /// </summary>
        /// <param name="input">The data for the calculation</param>
        /// <returns>The average value</returns>
        protected override decimal ComputeNextValue(IBaseDataBar data)
        {

			Observed.Add(data.Close);
			if(Level.Samples==0)
			{
				Level.Add(data.Close);
				Forecast.Add(0);
				return 0m;
			}
			else if(Level.Samples==1)
			{
				//init slope
				Slope.Add(Observed[1]-Observed[0]);
				//calculate level = current observed * (previous level+previous slope)
				Level.Add((alpha*Observed[0])+(1-alpha)*(Level[0]+Slope[0]));
				//calculate slope = (current level-previous level) * previous slope
				Slope.Add(beta*(Level[0]-Level[1])+(1-beta)*Slope[0]);
				Forecast.Add(Level[1]+Slope[1]);
				return 0;
			}
			//calculate level = current observed * (previous level+previous slope)
			Level.Add((alpha*Observed[0])+(1-alpha)*(Level[0]+Slope[0]));
			//calculate slope = (current level-previous level) * previous slope
			Slope.Add(beta*(Level[0]-Level[1])+(1-beta)*Slope[0]);
			Forecast.Add(Level[1]+Slope[1]);
			SquareError = Math.Pow((double)Observed[0]-(double)Forecast[1],2);
            return (decimal)Forecast[0];

        }


        /// <summary>
        /// Returns whether the indicator will return valid results
        /// </summary>
        public override bool IsReady
        {
            get { return Forecast.Samples>2; }
        }

        /// <summary>
        /// Resets the average to its initial state
        /// </summary>
        public override void Reset()
        {
            base.Reset();
        }

    }
}
/*
 * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
 * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"); 
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
*/

using System;
using System.Collections.Generic;
using QuantConnect.Data.Market;

namespace QuantConnect.Indicators
{
        /// <summary>
        /// Represents the traditional simple moving average indicator (SMA)
        /// </summary>
        public class SkewIndicator : BarIndicator
        {

        public RollingWindow<decimal> bars;

        /// <summary>
        /// Gets a flag indicating when this indicator is ready and fully initialized
        /// </summary>
        public override bool IsReady => bars.IsReady;

        int Period;
            /// <summary>
            /// Resets this indicator to its initial state
            /// </summary>
            public override void Reset()
            {
            bars.Reset();
                base.Reset();
            }

            /// <summary>
            /// Initializes a new instance of the SimpleMovingAverage class with the specified name and period
            /// </summary>
            /// <param name="name">The name of this indicator</param>
            /// <param name="period">The period of the SMA</param>
            public SkewIndicator(string name, int period)
                : base(name)
            {
            bars = new RollingWindow<decimal>(period);
            Period = period;
        }

            /// <summary>
            /// Initializes a new instance of the SimpleMovingAverage class with the default name and period
            /// </summary>
            /// <param name="period">The period of the SMA</param>
            public SkewIndicator(int period)
                : this("Skew" + period, period)
            {
            }

            /// <summary>
            /// Computes the next value for this indicator from the given state.
            /// </summary>
            /// <param name="window">The window of data held in this indicator</param>
            /// <param name="input">The input value to this indicator on this time step</param>
            /// <returns>A new value for this indicator</returns>
            protected override decimal ComputeNextValue(IBaseDataBar input)
            {
                bars.Add(input.Close);
                if (!bars.IsReady) return 0;
                return Skewness(ToIEnumerable());
            }


        public decimal Skewness(IEnumerable<double> list)
        {
            try
            {
                var d = MathNet.Numerics.Statistics.Statistics.Skewness(list);
                if (d >= (double)Decimal.MaxValue) return Decimal.MaxValue;
                if (d <= (double)Decimal.MinValue) return Decimal.MinValue;

                return Convert.ToDecimal(d);
            }
            catch(OverflowException)
            {
                return 0;
            }
        }

        private IEnumerable<double> ToIEnumerable()
        {
            var e = bars.GetEnumerator();
            while (e.MoveNext())
            {
                yield return (double)e.Current;
            }
        }

    }
}
using System;
using System.Collections.Generic;
using QuantConnect.Data;
using QuantConnect.Data.Consolidators;
using QuantConnect.Data.UniverseSelection;
using QuantConnect.Indicators;
using QuantConnect.Securities;
using QuantConnect.Algorithm.CSharp;
namespace QuantConnect.Algorithm.Framework.Alphas
{
    /// <summary>
    /// Alpha model that uses an EMA cross to create insights
    /// </summary>
    public class AlphaSkew : AlphaModel
    {
        private readonly int _fastPeriod;
        private readonly int _slowPeriod;
         private readonly int _minutes;
        private readonly Resolution _resolution;
        private readonly int _predictionInterval;
        private readonly Dictionary<Symbol, SymbolData> _symbolDataBySymbol;

        /// <summary>
        /// Initializes a new instance of the <see cref="EmaCrossAlphaModel"/> class
        /// </summary>
        /// <param name="fastPeriod">The fast EMA period</param>
        /// <param name="slowPeriod">The slow EMA period</param>
        /// <param name="resolution">The resolution of data sent into the EMA indicators</param>
        public AlphaSkew(
            int fastPeriod = 12,
            int slowPeriod = 26,
            Resolution resolution = Resolution.Hour,
            int minutes = 10
            )
        {
            _fastPeriod = fastPeriod;
            _slowPeriod = slowPeriod;
            _resolution = resolution;
            _minutes = minutes;
            _predictionInterval = minutes;
            _symbolDataBySymbol = new Dictionary<Symbol, SymbolData>();
            Name = $"{nameof(EmaCrossAlphaModel)}({fastPeriod},{slowPeriod},{resolution})";
        }
        int count = 0;
		private List<Insight> lastinsights = new List<Insight>();
		private DateTime lastInsightDate;
        /// <summary>
        /// Updates this alpha model with the latest data from the algorithm.
        /// This is called each time the algorithm receives data for subscribed securities
        /// </summary>
        /// <param name="algorithm">The algorithm instance</param>
        /// <param name="data">The new data available</param>
        /// <returns>The new insights generated</returns>
        public override IEnumerable<Insight> Update(QCAlgorithm algorithm, Slice data)
        {
        	var insights = new List<Insight>();

            if(data==null) return insights;
            //if(algorithm.Time.Minute/10!=0) return insights;
                                var vix = ((ResistanceModulatedSplitter)algorithm).vix;
                    var vixROC = ((ResistanceModulatedSplitter)algorithm).vixROC*100;
   
            var insightPeriod = _resolution.ToTimeSpan().Multiply(_predictionInterval).Multiply(1440).Add(new TimeSpan(0,10,0));
			if(securitiesChanging) return insights;
			/*
            if(vix (vixROC>15 && vix>15) || vix>20 )
            {
            	foreach (var symbolData in _symbolDataBySymbol.Values)
            	{
	            	if(algorithm.IsMarketOpen(symbolData.Symbol))
	                {
						if(algorithm.Time>=symbolData.insightDate && algorithm.Securities[symbolData.Symbol].Price>0)
						{
	                    	insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, InsightDirection.Flat, Math.Abs(symbolData.ROCValue()), null));
	                    	symbolData.insightDate = algorithm.Time.Add(insightPeriod);
						}
	            	}
            	}
            	return insights;
            }
            */
            
if(lastInsightDate==null || algorithm.Time.Subtract(lastInsightDate).TotalDays > 2)
  {
  	lastInsightDate = algorithm.Time;

			Dictionary<SymbolData, decimal> skews = new Dictionary<SymbolData, decimal>();
			foreach (var symbolData in _symbolDataBySymbol.Values)
            {
            	if(symbolData.Skew.IsReady && algorithm.Securities[symbolData.Symbol].Price>0)
            	
            		skews[symbolData] = symbolData.Skew;
            }
            var ascskews = skews.Where(x=> x.Value>1m).OrderByDescending(pair => pair.Value).Take(2).ToDictionary(pair => pair.Key, pair => pair.Value).Keys;
            var desskews = skews.Where(x=> x.Value<-1m).OrderBy(pair => pair.Value).Take(2).ToDictionary(pair => pair.Key, pair => pair.Value).Keys;
 //           var ascskews = skews.Where(x=> x.Value>1).OrderByDescending(pair => pair.Value).Take((skews.Count/3)).ToDictionary(pair => pair.Key, pair => pair.Value).Keys;
 //           var desskews = skews.Where(x=> x.Value<-1).OrderBy(pair => pair.Value).Take((skews.Count/3)).ToDictionary(pair => pair.Key, pair => pair.Value).Keys;

                
                     /*       

            foreach (var symbolData in _symbolDataBySymbol.Values)
            {
            	                if (symbolData == null) continue;
                var symbol = symbolData.Ticker;

    
                if(algorithm.IsMarketOpen(symbol))
                {

					if(IsDateOK(algorithm.Time,symbolData.insightDate))
					{
						
						if(symbolData.CurrentMeanInsightDirection()==InsightDirection.Up && ascskews.Where(x=> x.Ticker==symbol).Select(x=>x).Count()>0)
                    	{
                    	insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, symbolData.CurrentMeanInsightDirection(), Math.Abs(symbolData.ROCValue()), null));
                    	symbolData.insightDate = algorithm.Time.Add(insightPeriod);
                    	symbolData.TradePrice = algorithm.Securities[symbolData.Symbol].Price;
                    	}
                    	
                    	if(symbolData.CurrentMeanInsightDirection()==InsightDirection.Down && desskews.Where(x=> x.Ticker==symbol).Select(x=>x).Count()>0)
                    	{
                    	//insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, symbolData.CurrentMeanInsightDirection(), Math.Abs(symbolData.ROCValue()), null));
                    	//symbolData.insightDate = algorithm.Time.Add(insightPeriod);
                    	//symbolData.TradePrice = algorithm.Securities[symbolData.Symbol].Price;
                    	}
					}
                }

            }
                       	lastinsights = insights;
            return insights;
            */
            foreach (var symbolData in ascskews)
{
            
                if (symbolData == null) continue;
                var symbol = symbolData.Ticker;

    
                if(algorithm.IsMarketOpen(symbol))
                {

					if(IsDateOK(algorithm.Time,symbolData.insightDate) )
					{
						
						//if(symbolData.CurrentMeanInsightDirection()==InsightDirection.Up)
						//if(symbolData.ema1>symbolData.ema2) /*&& symbolData.holt.Slope[0]>0*/ /*&& symbolData.CurrentMeanInsightDirection()==InsightDirection.Up)*/
                    	{
                    	insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, InsightDirection.Up, Math.Abs(symbolData.ROCValue()), null));
                    	symbolData.insightDate = algorithm.Time.Add(insightPeriod);
                    	symbolData.TradePrice = algorithm.Securities[symbolData.Symbol].Price;

                    		
                    	}
                    	/*
                    	else
                    	{
                    	insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, InsightDirection.Flat, Math.Abs(symbolData.ROCValue()), null));
                    	symbolData.insightDate = algorithm.Time.Add(insightPeriod);
                    	}
                    	*/
					}
                }

            }
            
            foreach (var symbolData in desskews)
            {
                if (symbolData == null) continue;
                var symbol = symbolData.Ticker;

    
                if(algorithm.IsMarketOpen(symbol))
                {

					if(IsDateOK(algorithm.Time,symbolData.insightDate) )
					{
						
						//if(symbolData.CurrentMeanInsightDirection()==InsightDirection.Down)
						//if( symbolData.ema1<symbolData.ema2 )
						/*&& symbolData.holt.Slope[0]>0*/ /*&& symbolData.CurrentMeanInsightDirection()==InsightDirection.Up)*/
                    	{
                    	insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, InsightDirection.Down, Math.Abs(symbolData.ROCValue()), null));
                    	symbolData.insightDate = algorithm.Time.Add(insightPeriod);
                    	symbolData.TradePrice = algorithm.Securities[symbolData.Symbol].Price;

                    	}
                    	/*
                    	else
                    	{
                    	insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, InsightDirection.Flat, Math.Abs(symbolData.ROCValue()), null));
                    	symbolData.insightDate = algorithm.Time.Add(insightPeriod);
                    	}
                    	*/
                    	
					}
                }

            }
            
            var toFlat = _symbolDataBySymbol.Values.ToList().Except(ascskews).ToList().Except(desskews).ToList();
            	foreach (var symbolData in toFlat)
            	{
	            	if(algorithm.IsMarketOpen(symbolData.Symbol))
	                {
						if(IsDateOK(algorithm.Time,symbolData.insightDate) && algorithm.Securities[symbolData.Symbol].Price>0)
						{
	                    	insights.Add(Insight.Price(symbolData.Symbol, insightPeriod, InsightDirection.Flat, Math.Abs(symbolData.ROCValue()), null));
	                    	symbolData.insightDate = algorithm.Time.Add(insightPeriod);
	                    	symbolData.TradePrice = 0;
	                    	symbolData.MaxPrice = 0;
						}
	            	}
            	}
            	lastinsights = insights;
            return insights;
  }
  else
  {
  	//push insights again
  	foreach (var i in lastinsights)
  	{
  		var symbolData = _symbolDataBySymbol[i.Symbol];
  		if(i.Direction==InsightDirection.Down)
	    			symbolData.MaxPrice = symbolData.MaxPrice==0?algorithm.Securities[i.Symbol].Price:Math.Min(symbolData.MaxPrice,algorithm.Securities[i.Symbol].Price);
	    		else if(i.Direction==InsightDirection.Up)
	    			symbolData.MaxPrice = Math.Max(symbolData.MaxPrice,algorithm.Securities[i.Symbol].Price);
	   if(_symbolDataBySymbol[i.Symbol].TradePrice>0 && 
  		((i.Direction==InsightDirection.Up && algorithm.Securities[i.Symbol].Price/_symbolDataBySymbol[i.Symbol].MaxPrice<0.9m) 
  		|| (i.Direction==InsightDirection.Down && _symbolDataBySymbol[i.Symbol].MaxPrice/algorithm.Securities[i.Symbol].Price<0.9m)))

//	   if(algorithm.Securities[i.Symbol].Invested && algorithm.Securities[i.Symbol].Holdings.UnrealizedProfitPercent<-0.1m)
{
		    		insights.Add(Insight.Price(i.Symbol, insightPeriod, InsightDirection.Flat, i.Magnitude, null));
	    		_symbolDataBySymbol[i.Symbol].insightDate = algorithm.Time.Add(insightPeriod);
	    		_symbolDataBySymbol[i.Symbol].TradePrice=0;
	    			    		_symbolDataBySymbol[i.Symbol].MaxPrice=0;

}		
  		else if(_symbolDataBySymbol[i.Symbol].TradePrice>0 && 
  		((i.Direction==InsightDirection.Up && algorithm.Securities[i.Symbol].Price/_symbolDataBySymbol[i.Symbol].TradePrice>0.98m) 
  		|| (i.Direction==InsightDirection.Down && _symbolDataBySymbol[i.Symbol].TradePrice/algorithm.Securities[i.Symbol].Price>0.98m)))
	    	{

	    		if(IsDateOK(algorithm.Time,_symbolDataBySymbol[i.Symbol].insightDate) )
	    			insights.Add(Insight.Price(i.Symbol, insightPeriod, i.Direction, i.Magnitude, null));
	    	}
	    	else if (i.Direction!=InsightDirection.Flat )//&& ((i.Direction==InsightDirection.Up && _symbolDataBySymbol[i.Symbol].Skew.Current.Value<1)||(i.Direction==InsightDirection.Down && _symbolDataBySymbol[i.Symbol].Skew.Current.Value>-1)))
	    	{
	    		insights.Add(Insight.Price(i.Symbol, insightPeriod, InsightDirection.Flat, i.Magnitude, null));
	    		_symbolDataBySymbol[i.Symbol].insightDate = algorithm.Time.Add(insightPeriod);
	    		_symbolDataBySymbol[i.Symbol].TradePrice=0;
	    		_symbolDataBySymbol[i.Symbol].MaxPrice=0;
	    		
	    	}	

  	}

  	lastinsights = insights;
  	return insights;
  }
        }
bool securitiesChanging = false;
        /// <summary>
        /// Event fired each time the we add/remove securities from the data feed
        /// </summary>
        /// <param name="algorithm">The algorithm instance that experienced the change in securities</param>
        /// <param name="changes">The security additions and removals from the algorithm</param>
        public override void OnSecuritiesChanged(QCAlgorithm algorithm, SecurityChanges changes)
        {
            var addedSymbols = new List<Symbol>();
            securitiesChanging=true;
            foreach (var added in changes.AddedSecurities)
            {
                SymbolData symbolData;
                if (!_symbolDataBySymbol.TryGetValue(added.Symbol, out symbolData))
                {
                    var ticker = added.Symbol;
                    var security = algorithm.AddSecurity(added.Symbol.SecurityType, ticker, _resolution);
                    security.SetLeverage(100);
                    symbolData = new SymbolData(algorithm, _resolution, security);
                    symbolData.insightDate = algorithm.Time;
                    symbolData.Security = added;
                    symbolData.ROC = new RateOfChange(ticker, 20);
                    symbolData.ema1 = algorithm.EMA(ticker,50);
                    symbolData.ema2 = algorithm.EMA(ticker,200);
                    /*
                    var bars = algorithm.History(added.Symbol, _resolution.ToTimeSpan().Multiply(20), _resolution);
                    foreach(var bar in bars)
                    {
                    	symbolData.ROC.Update(new IndicatorDataPoint(bar.EndTime,bar.Close));
                    	//symbolData.MEAN.Update(bar);
                    	symbolData.Update(bar.Close);
                    }
                    */
                    algorithm.RegisterIndicator(ticker, symbolData.Skew, _resolution.ToTimeSpan().Multiply(_minutes).Multiply(1440));
                    algorithm.RegisterIndicator(ticker, symbolData.ROC, _resolution.ToTimeSpan().Multiply(_minutes).Multiply(1440));//_resolution);
                    //algorithm.RegisterIndicator(ticker, symbolData.MEAN, _resolution.ToTimeSpan().Multiply(_minutes));
                    //algorithm.RegisterIndicator(ticker, symbolData.MOM, _resolution.ToTimeSpan().Multiply(_minutes));
                    //algorithm.RegisterIndicator(ticker, symbolData.ema1, _resolution.ToTimeSpan().Multiply(_minutes));
                    //algorithm.RegisterIndicator(ticker, symbolData.ema2, _resolution.ToTimeSpan().Multiply(_minutes));
                    //algorithm.RegisterIndicator(ticker, symbolData.holt, _resolution.ToTimeSpan().Multiply(_minutes));
                    _symbolDataBySymbol[added.Symbol] = symbolData;
                    addedSymbols.Add(added.Symbol);

                }
   
            }
            foreach (var removed in changes.RemovedSecurities)
	    	{
	        	SymbolData symbolData;
                if (!_symbolDataBySymbol.TryGetValue(removed.Symbol, out symbolData))
                {
		        	_symbolDataBySymbol.Remove(removed.Symbol); 
	        		algorithm.SubscriptionManager.RemoveConsolidator(removed.Symbol, symbolData.Consolidator);
                }
	        	//algorithm.RemoveSecurity(removed.Symbol);
	    	}

            if (addedSymbols.Count > 0)
            {
                // warmup our indicators by pushing history through the consolidators
                algorithm.History(addedSymbols, 100, _resolution)
                .PushThrough(bar =>
                {
                    SymbolData symbolData;
                    if (_symbolDataBySymbol.TryGetValue(bar.Symbol, out symbolData))
                    {
                        symbolData.ROC.Update(bar.EndTime, bar.Value);
                        symbolData.Update(bar.Value);
                        symbolData.MEAN.Update(bar);
                        symbolData.holt.Update(bar);
                        symbolData.MOM.Update(new IndicatorDataPoint(bar.EndTime, bar.Value));
                        try
                        {
                        symbolData.ema1.Update(new IndicatorDataPoint(bar.EndTime, bar.Value));
                        symbolData.ema2.Update(new IndicatorDataPoint(bar.EndTime, bar.Value));
                        }
                        catch(Exception) {/*will happen if EMA already exists.. ERROR] FATAL UNHANDLED EXCEPTION:This is a forward only indicator: EMA(20, MO_min) Input: 2005-03-04 00:00:00Z Previous: 2005-03-04 09:31:00Z.*/ }
                       // symbolData.MEAN.Update(new IndicatorDataPoint(bar.EndTime, bar.Value));
                    }
                });
            }
            securitiesChanging=false;
        }
        
        private bool IsDateOK(DateTime time, DateTime time2)
        {
        	return time.Subtract(time2).TotalMinutes>=-30;
        }


        class SymbolData
        {
            public RateOfChange ROC;
            public Security Security { get; set; }
            public Symbol Symbol => Security.Symbol;
            public SwingSRIndicator MEAN;
             public SkewIndicator Skew;
            public Momentum MOM;
            public string Ticker;
            public long previous = 0;
            public int Day = 0;
            public ExponentialMovingAverage ema1;
            public ExponentialMovingAverage ema2;
            public RollingWindow<IndicatorDataPoint> ema;
            public MovingAverageConvergenceDivergence MACD;
            public RelativeStrengthIndex RSI;
            public HoltIndicator holt;
             public readonly IDataConsolidator Consolidator;
            public SymbolData(QCAlgorithm algorithm, Resolution resolution, Security security)
            {
                Ticker = security.Symbol;
                MEAN = new SwingSRIndicator(Ticker,50);
                Skew = new SkewIndicator(Ticker,100);//251);
                MOM = new Momentum(200);
                holt = new HoltIndicator(Ticker,100,0.9m,0.008m);
                Consolidator = algorithm.ResolveConsolidator(security.Symbol, resolution);
                algorithm.SubscriptionManager.AddConsolidator(security.Symbol, Consolidator);
            }

            public decimal _insight_price = 1;
            public decimal _price = 0;
            public InsightDirection _insight_dir = InsightDirection.Flat;
            public decimal pps = 0;
            public DateTime insightDate;
            bool emit = false;
            public decimal TradePrice = 0;
             public decimal MaxPrice = 0;

			public InsightDirection CurrentMeanInsightDirection()
			{
				return ((int)MEAN.Current.Value == 1)?  InsightDirection.Up : InsightDirection.Down;
			}

            public bool IsReady()
            {
                if (!Skew.IsReady) return false;
                return true;
            }
            public void Reset()
            {
            }
            public void Update(decimal value)
            {
                _price = value;
                emit = true;
            }
			public double ROCValue()
			{
				return double.IsNaN((double)ROC.Current.Value)?(double)0d: (double)ROC.Current.Value;
			}
            public bool CanEmit()
            {
               if (!MEAN.Updated()) return false;
                return ROC.IsReady;
            }
        }
    }
}
/*
 * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
 * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
*/

using System;
using System.Collections.Generic;
using System.Linq;
using Accord.Math;
using Python.Runtime;
using QuantConnect.Algorithm.Framework.Alphas;
using QuantConnect.Data;
using QuantConnect.Data.UniverseSelection;
using QuantConnect.Scheduling;

namespace QuantConnect.Algorithm.Framework.Portfolio
{
    /// <summary>
    /// Provides an implementation of Mean-Variance portfolio optimization based on modern portfolio theory.
    /// The interval of weights in optimization method can be changed based on the long-short algorithm.
    /// The default model uses the last three months daily price to calculate the optimal weight
    /// with the weight range from -1 to 1 and minimize the portfolio variance with a target return of 2%
    /// </summary>
    public class MeanVarianceOptimizationPortfolioConstructionModel2 : PortfolioConstructionModel
    {
        private readonly int _lookback;
        private readonly int _period;
        private readonly Resolution _resolution;
        private readonly PortfolioBias _portfolioBias;
        private readonly IPortfolioOptimizer _optimizer;
        private readonly Dictionary<Symbol, ReturnsSymbolData> _symbolDataDict;

        /// <summary>
        /// Initialize the model
        /// </summary>
        /// <param name="rebalancingDateRules">The date rules used to define the next expected rebalance time
        /// in UTC</param>
        /// <param name="portfolioBias">Specifies the bias of the portfolio (Short, Long/Short, Long)</param>
        /// <param name="lookback">Historical return lookback period</param>
        /// <param name="period">The time interval of history price to calculate the weight</param>
        /// <param name="resolution">The resolution of the history price</param>
        /// <param name="targetReturn">The target portfolio return</param>
        /// <param name="optimizer">The portfolio optimization algorithm. If the algorithm is not provided then the default will be mean-variance optimization.</param>
        public MeanVarianceOptimizationPortfolioConstructionModel2(IDateRule rebalancingDateRules,
            PortfolioBias portfolioBias = PortfolioBias.Long,
            int lookback = 1,
            int period = 63,
            Resolution resolution = Resolution.Daily,
            double targetReturn = 0.02,
            IPortfolioOptimizer optimizer = null)
            : this(rebalancingDateRules.ToFunc(), portfolioBias, lookback, period, resolution, targetReturn, optimizer)
        {
        }

        /// <summary>
        /// Initialize the model
        /// </summary>
        /// <param name="rebalanceResolution">Rebalancing frequency</param>
        /// <param name="portfolioBias">Specifies the bias of the portfolio (Short, Long/Short, Long)</param>
        /// <param name="lookback">Historical return lookback period</param>
        /// <param name="period">The time interval of history price to calculate the weight</param>
        /// <param name="resolution">The resolution of the history price</param>
        /// <param name="targetReturn">The target portfolio return</param>
        /// <param name="optimizer">The portfolio optimization algorithm. If the algorithm is not provided then the default will be mean-variance optimization.</param>
        public MeanVarianceOptimizationPortfolioConstructionModel2(Resolution rebalanceResolution = Resolution.Daily,
            PortfolioBias portfolioBias = PortfolioBias.LongShort,
            int lookback = 1,
            int period = 63,
            Resolution resolution = Resolution.Daily,
            double targetReturn = 0.02,
            IPortfolioOptimizer optimizer = null)
            : this(rebalanceResolution.ToTimeSpan(), portfolioBias, lookback, period, resolution, targetReturn, optimizer)
        {
        }

        /// <summary>
        /// Initialize the model
        /// </summary>
        /// <param name="timeSpan">Rebalancing frequency</param>
        /// <param name="portfolioBias">Specifies the bias of the portfolio (Short, Long/Short, Long)</param>
        /// <param name="lookback">Historical return lookback period</param>
        /// <param name="period">The time interval of history price to calculate the weight</param>
        /// <param name="resolution">The resolution of the history price</param>
        /// <param name="targetReturn">The target portfolio return</param>
        /// <param name="optimizer">The portfolio optimization algorithm. If the algorithm is not provided then the default will be mean-variance optimization.</param>
        public MeanVarianceOptimizationPortfolioConstructionModel2(TimeSpan timeSpan,
            PortfolioBias portfolioBias = PortfolioBias.LongShort,
            int lookback = 1,
            int period = 63,
            Resolution resolution = Resolution.Daily,
            double targetReturn = 0.02,
            IPortfolioOptimizer optimizer = null)
            : this(dt => dt.Add(timeSpan), portfolioBias, lookback, period, resolution, targetReturn, optimizer)
        {
        }

        /// <summary>
        /// Initialize the model
        /// </summary>
        /// <param name="rebalance">Rebalancing func or if a date rule, timedelta will be converted into func.
        /// For a given algorithm UTC DateTime the func returns the next expected rebalance time
        /// or null if unknown, in which case the function will be called again in the next loop. Returning current time
        /// will trigger rebalance. If null will be ignored</param>
        /// <param name="portfolioBias">Specifies the bias of the portfolio (Short, Long/Short, Long)</param>
        /// <param name="lookback">Historical return lookback period</param>
        /// <param name="period">The time interval of history price to calculate the weight</param>
        /// <param name="resolution">The resolution of the history price</param>
        /// <param name="targetReturn">The target portfolio return</param>
        /// <param name="optimizer">The portfolio optimization algorithm. If the algorithm is not provided then the default will be mean-variance optimization.</param>
        /// <remarks>This is required since python net can not convert python methods into func nor resolve the correct
        /// constructor for the date rules parameter.
        /// For performance we prefer python algorithms using the C# implementation</remarks>
        public MeanVarianceOptimizationPortfolioConstructionModel2(PyObject rebalance,
            PortfolioBias portfolioBias = PortfolioBias.LongShort,
            int lookback = 1,
            int period = 63,
            Resolution resolution = Resolution.Daily,
            double targetReturn = 0.02,
            IPortfolioOptimizer optimizer = null)
            : this((Func<DateTime, DateTime?>)null, portfolioBias, lookback, period, resolution, targetReturn, optimizer)
        {
            SetRebalancingFunc(rebalance);
        }

        /// <summary>
        /// Initialize the model
        /// </summary>
        /// <param name="rebalancingFunc">For a given algorithm UTC DateTime returns the next expected rebalance UTC time.
        /// Returning current time will trigger rebalance. If null will be ignored</param>
        /// <param name="portfolioBias">Specifies the bias of the portfolio (Short, Long/Short, Long)</param>
        /// <param name="lookback">Historical return lookback period</param>
        /// <param name="period">The time interval of history price to calculate the weight</param>
        /// <param name="resolution">The resolution of the history price</param>
        /// <param name="targetReturn">The target portfolio return</param>
        /// <param name="optimizer">The portfolio optimization algorithm. If the algorithm is not provided then the default will be mean-variance optimization.</param>
        public MeanVarianceOptimizationPortfolioConstructionModel2(Func<DateTime, DateTime> rebalancingFunc,
            PortfolioBias portfolioBias = PortfolioBias.LongShort,
            int lookback = 1,
            int period = 63,
            Resolution resolution = Resolution.Daily,
            double targetReturn = 0.02,
            IPortfolioOptimizer optimizer = null)
            : this(rebalancingFunc != null ? (Func<DateTime, DateTime?>)(timeUtc => rebalancingFunc(timeUtc)) : null,
                portfolioBias,
                lookback,
                period,
                resolution,
                targetReturn,
                optimizer)
        {
        }

        /// <summary>
        /// Initialize the model
        /// </summary>
        /// <param name="rebalancingFunc">For a given algorithm UTC DateTime returns the next expected rebalance time
        /// or null if unknown, in which case the function will be called again in the next loop. Returning current time
        /// will trigger rebalance.</param>
        /// <param name="portfolioBias">Specifies the bias of the portfolio (Short, Long/Short, Long)</param>
        /// <param name="lookback">Historical return lookback period</param>
        /// <param name="period">The time interval of history price to calculate the weight</param>
        /// <param name="resolution">The resolution of the history price</param>
        /// <param name="targetReturn">The target portfolio return</param>
        /// <param name="optimizer">The portfolio optimization algorithm. If the algorithm is not provided then the default will be mean-variance optimization.</param>
        public MeanVarianceOptimizationPortfolioConstructionModel2(Func<DateTime, DateTime?> rebalancingFunc,
            PortfolioBias portfolioBias = PortfolioBias.LongShort,
            int lookback = 1,
            int period = 63,
            Resolution resolution = Resolution.Daily,
            double targetReturn = 0.02,
            IPortfolioOptimizer optimizer = null)
            : base(rebalancingFunc)
        {
            _lookback = lookback;
            _period = period;
            _resolution = resolution;
            _portfolioBias = portfolioBias;

            var lower = portfolioBias == PortfolioBias.Long ? 0 : -1;
            var upper = portfolioBias == PortfolioBias.Short ? 0 : 1;
            _optimizer = optimizer ?? new MinimumVariancePortfolioOptimizer(lower, upper, targetReturn);

            _symbolDataDict = new Dictionary<Symbol, ReturnsSymbolData>();
        }

        /// <summary>
        /// Method that will determine if the portfolio construction model should create a
        /// target for this insight
        /// </summary>
        /// <param name="insight">The insight to create a target for</param>
        /// <returns>True if the portfolio should create a target for the insight</returns>
        protected override bool ShouldCreateTargetForInsight(Insight insight)
        {
            var filteredInsight = FilterInvalidInsightMagnitude(Algorithm, new[] { insight }).FirstOrDefault();
            if (filteredInsight == null)
            {
                return false;
            }

            ReturnsSymbolData data;
            if (_symbolDataDict.TryGetValue(insight.Symbol, out data))
            {
                if (!insight.Magnitude.HasValue)
                {
                    Algorithm.SetRunTimeError(
                        new ArgumentNullException(
                            insight.Symbol.Value,
                            "MeanVarianceOptimizationPortfolioConstructionModel does not accept 'null' as Insight.Magnitude. " +
                            "Please checkout the selected Alpha Model specifications: " + insight.SourceModel));
                    return false;
                }
                data.Add(Algorithm.Time, insight.Magnitude.Value.SafeDecimalCast());
            }

            return true;
        }

        /// <summary>
        /// Will determine the target percent for each insight
        /// </summary>
        /// <param name="activeInsights">The active insights to generate a target for</param>
        /// <returns>A target percent for each insight</returns>
        protected override Dictionary<Insight, double> DetermineTargetPercent(List<Insight> activeInsights)
        {
            var targets = new Dictionary<Insight, double>();

            // Get the last generated active insight for each symbol
            var lastActiveInsights = from insight in activeInsights
                                     group insight by insight.Symbol into g
                                     select g.OrderBy(x => x.GeneratedTimeUtc).Last();
             
            var symbols = lastActiveInsights.Where(x=> x.Direction !=InsightDirection.Flat).Select(x => x.Symbol).ToList();

            // Get symbols' returns
            var returns = _symbolDataDict.FormReturnsMatrix(symbols);

            // Calculate rate of returns
            var rreturns = returns.Apply(e => Math.Pow(1.0 + e, 252.0) - 1.0);

            // The optimization method processes the data frame
            var w = _optimizer.Optimize(rreturns);

            // process results
            if (w.Length > 0)
            {
                var sidx = 0;
                foreach (var symbol in symbols)
                {
                	double weight = 0;
                	if(sidx<w.Length) weight = w[sidx];
					if(double.IsNaN(weight))
					{
						weight = 0;
					}
                    // don't trust the optimizer
                    else if (_portfolioBias != PortfolioBias.LongShort
                        && Math.Sign(weight) != (int)_portfolioBias)
                    {
                        weight = 0;
                    }
                    var insightSimbol = activeInsights.First(insight => insight.Symbol == symbol);
                    targets[insightSimbol] = (int)insightSimbol.Direction * Math.Abs(weight) * 2;

                    sidx++;
                }
            }

        		symbols = lastActiveInsights.Where(x=> x.Direction ==InsightDirection.Flat).Select(x => x.Symbol).ToList();
                foreach (var symbol in symbols)
                {
                    targets[activeInsights.First(insight => insight.Symbol == symbol)] = 0;
                }
            return targets;
        }

        /// <summary>
        /// Event fired each time the we add/remove securities from the data feed
        /// </summary>
        /// <param name="algorithm">The algorithm instance that experienced the change in securities</param>
        /// <param name="changes">The security additions and removals from the algorithm</param>
        public override void OnSecuritiesChanged(QCAlgorithm algorithm, SecurityChanges changes)
        {
            base.OnSecuritiesChanged(algorithm, changes);
            // clean up data for removed securities
            foreach (var removed in changes.RemovedSecurities)
            {
                ReturnsSymbolData data;
                if (_symbolDataDict.TryGetValue(removed.Symbol, out data))
                {
                    _symbolDataDict.Remove(removed.Symbol);
                }
            }

            if (changes.AddedSecurities.Count == 0)
                return;

            // initialize data for added securities
            foreach (var added in changes.AddedSecurities)
            {
                if (!_symbolDataDict.ContainsKey(added.Symbol))
                {
                    var symbolData = new ReturnsSymbolData(added.Symbol, _lookback, _period);
                    _symbolDataDict[added.Symbol] = symbolData;
                }
            }

            // warmup our indicators by pushing history through the consolidators
            algorithm.History(changes.AddedSecurities.Select(security => security.Symbol), _lookback * _period, _resolution)
                .PushThrough(bar =>
                {
                    ReturnsSymbolData symbolData;
                    if (_symbolDataDict.TryGetValue(bar.Symbol, out symbolData))
                    {
                        symbolData.Update(bar.EndTime, bar.Value);
                    }
                });
        }
    }
}
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Text;

/* code found on internet Hardkjarni, adapted to find lows and highs from clusters */
namespace QuantConnect.Algorithm.CSharp
{
    public class KMeansStats { 
        public KMeansStats(double[] _low, double[] _mean, double[] _high, double[] _length)
        {
            lows = _low;
            means = _mean;
            highs = _high;
            lengths = _length;
        }

        public void Reset()
        {
               for (int i = 0; i < means.Length; i++)
                {
                    means[i] = 0;
                }   
                for (int i = 0; i < lows.Length; i++)
                {
                    lows[i] = 0;
                }  
                for (int i = 0; i < highs.Length; i++)
                {
                    highs[i] = 0;
                } 
                 for (int i = 0; i < lengths.Length; i++)
                {
                    lengths[i] = 0;
                }           
        }
        public double[] lows { get; private set; }
        public double[] means { get; private set; }
        public double[] highs { get; private set; }
        public double[] lengths { get; private set; }
        }
    /// <summary>
    /// Defines a property or field as an attribute to use for the k-means clustering
    /// </summary>
    [AttributeUsage(AttributeTargets.Property)]
    public sealed class KMeansValueAttribute : Attribute { }

    /// <summary>
    /// Delegate that can be passed in to the <see cref="KMeans.Cluster{T}"/> function that allows the caller to provide their own distance calculation function 
    /// for a point to a centroid.
    /// </summary>
    /// <param name="point">the point being calculated</param>
    /// <param name="centroid">the centroid that is being calculated against</param>
    /// <returns>the distance value between the point and the centroid</returns>
    public delegate double KMeansCalculateDistanceDelegate(double[] point, double[] centroid);

    /// <summary>
    /// Provides a simple implementation of the k-Means algorithm. This solution is quite simple and does not support any parallel execution as of yet.
    /// </summary>
    public static class KMeans
    {
        private static double[][] ConvertEntities<T>(T[] items)
        {
            var type = typeof(T);
            var data = new List<double[]>();

            // If the type is an array type
            if (type.IsArray && type.IsAssignableFrom(typeof(double[])))
            {
                foreach (var item in items)
                {
                    var val = item as double[];
                    data.Add(val);
                }
                return data.ToArray();
            }

            var getters = new List<MethodInfo>();

            // Iterate over the type and extract all the properties that have the KMeansValueAttribute set and use them as attributes
            var attribType = typeof(KMeansValueAttribute);
            foreach (var property in type.GetProperties(BindingFlags.Instance | BindingFlags.Public))
            {
                var attribs = property.GetCustomAttributes(attribType, false).OfType<KMeansValueAttribute>().ToArray();
                if (attribs.Length <= 0)
                    continue;

                var getter = property.GetGetMethod();
                if (getter == null)
                    throw new InvalidOperationException("No public getter for property '" + property.Name + "'. All properties marked with the KMeansValueAttribute must have a public getter");

                if (!property.PropertyType.IsAssignableFrom(typeof(double)) &&
                    !property.PropertyType.IsAssignableFrom(typeof(int)) &&
                    !property.PropertyType.IsAssignableFrom(typeof(float)) &&
                    !property.PropertyType.IsAssignableFrom(typeof(long)) &&
                    !property.PropertyType.IsAssignableFrom(typeof(decimal)) &&
                    !property.PropertyType.IsAssignableFrom(typeof(short)))
                    throw new InvalidOperationException("Property type '" + property.PropertyType.Name + "' for property '" + property.Name + "' cannot be assigned to System.Double. ");

                getters.Add(getter);
            }

            foreach (var item in items)
            {
                List<double> values = new List<double>(getters.Count);
                foreach (var getter in getters)
                    values.Add(Convert.ToDouble(getter.Invoke(item, null)));
                data.Add(values.ToArray());
            }

            return data.ToArray();
        }

        /// <summary>
        /// Clusters the given item set into the desired number of clusters. 
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="items">the list of data items that should be processed, this can be an array of primitive values such as <see cref="System.Double[]"/> 
        /// or a class struct that exposes properties using the <see cref="KMeansValueAttribute"/></param>
        /// <param name="clusterCount">the desired number of clusters</param>
        /// <param name="maxIterations">the maximum number of iterations to perform</param>
        /// <param name="calculateDistanceFunction">optional, custom distance function, if omitted then the euclidean distance will be used as default</param>
        /// <param name="randomSeed">optional, a seed for the random generator that initially arranges the clustering of the nodes (specify the same value to ensure that the start ordering will be the same)</param>
        /// <param name="initialCentroidIndices">optional, the initial centroid configuration (as indicies into the <see cref="items"/> array). When this is used the <see cref="randomSeed"/> has no effect.
        /// Experiment with this as the initial arrangements of the centroids has a huge impact on the final cluster arrangement.</param>
        /// <returns>a result containing the items arranged into clusters as well as the centroids converged on and the total distance value for the cluster nodes.</returns>
        public static KMeansResults<T> Cluster<T>(T[] items, int clusterCount, int maxIterations, KMeansCalculateDistanceDelegate calculateDistanceFunction = null, int randomSeed = 0, int[] initialCentroidIndices = null)
        {
            double[][] data = ConvertEntities(items);

            // Use the built in Euclidean distance calculation if no custom one is specified
            if (calculateDistanceFunction == null)
                calculateDistanceFunction = CalculateDistance;

            bool hasChanges = true;
            int iteration = 0;
            double totalDistance = 0;
            int numData = data.Length;
            int numAttributes = data[0].Length;

            // Create a random initial clustering assignment
            int[] clustering = InitializeClustering(numData, clusterCount, randomSeed);

            // Create cluster means and centroids
            KMeansStats[] stats = CreateMatrixCluster(clusterCount, numAttributes);
            int[] centroidIdx = new int[clusterCount];
            int[] clusterItemCount = new int[clusterCount];

            // If we specify initial centroid indices then let's assign clustering based on those immediately
            if (initialCentroidIndices != null && initialCentroidIndices.Length == clusterCount)
            {
                centroidIdx = initialCentroidIndices;
                AssignClustering(data, clustering, centroidIdx, clusterCount, calculateDistanceFunction);
                //                Debug.WriteLine("Pre-Seeded Centroids resulted in initial clustering: " + string.Join(",", clustering.Select(x => x.ToString()).ToArray()));
            }

            // Perform the clustering
            while (hasChanges && iteration < maxIterations)
            {
                clusterItemCount = new int[clusterCount];
                totalDistance = CalculateClusteringInformation(data, clustering, ref stats, ref centroidIdx, clusterCount, ref clusterItemCount, calculateDistanceFunction);

                //                Debug.WriteLine("------------- Iter: " + iteration);
                //                Debug.WriteLine("Clustering: " + string.Join(",", clustering.Select(x => x.ToString()).ToArray()));
                //                Debug.WriteLine("Means: " + string.Join(",", means.Select(x => "[" + string.Join(",", x.Select(y => y.ToString("#0.0")).ToArray()) + "]").ToArray()));
                //                Debug.WriteLine("Centroids: " + string.Join(",", centroidIdx.Select(x => x.ToString()).ToArray()));
                //                Debug.WriteLine("Cluster Counts: " + string.Join(",", clusterItemCount.Select(x => x.ToString()).ToArray()));

                hasChanges = AssignClustering(data, clustering, centroidIdx, clusterCount, calculateDistanceFunction);
                ++iteration;
            }

            
            // Create the final clusters
            T[][] clusters = new T[clusterCount][];
            try
            {
                for (int k = 0; k < clusters.Length; k++)
                    clusters[k] = new T[clusterItemCount[k]];

                int[] clustersCurIdx = new int[clusterCount];
                for (int i = 0; i < clustering.Length; i++)
                {
                    clusters[clustering[i]][clustersCurIdx[clustering[i]]] = items[i];
                    ++clustersCurIdx[clustering[i]];
                }
            }
            catch(Exception e) { }

            // Return the results
            return new KMeansResults<T>(clusters, stats, centroidIdx, totalDistance);
        }

        private static int[] InitializeClustering(int numData, int clusterCount, int seed)
        {
            var rnd = new Random(seed);
            var clustering = new int[numData];

            for (int i = 0; i < numData; ++i)
                clustering[i] = rnd.Next(0, clusterCount);

            return clustering;
        }

        private static double[][] CreateMatrix(int rows, int columns)
        {
            var matrix = new double[rows][];

            for (int i = 0; i < matrix.Length; i++)
                matrix[i] = new double[columns];

            return matrix;
        }

        private static KMeansStats[] CreateMatrixCluster(int rows, int columns)
        {
            var matrix = new KMeansStats[rows];

            for (int i = 0; i < matrix.Length; i++)
            {
                var low = new double[columns];
                var mean = new double[columns];
                var high = new double[columns];
                var length = new double[columns];
                matrix[i] = new KMeansStats(low,mean,high,length);
            }
            return matrix;
        }

        

        private static double CalculateClusteringInformation(double[][] data, int[] clustering, ref KMeansStats[] stats, ref int[] centroidIdx,
                                                             int clusterCount, ref int[] clusterItemCount, KMeansCalculateDistanceDelegate calculateDistanceFunction)
        {

            for (int i = 0; i < stats.Length; i++)
            {
                stats[i].Reset();
            }

            // Calculate the means for each cluster
            // Do this in two phases, first sum them all up and then divide by the count in each cluster
            for (int i = 0; i < data.Length; i++)
            {
                // Sum up the means
                var row = data[i];
                var clusterIdx = clustering[i]; // What cluster is data i assigned to
                ++clusterItemCount[clusterIdx]; // Increment the count of the cluster that row i is assigned to

                for (int j = 0; j < row.Length; j++)
                {
                    stats[clusterIdx].means[j] += row[j];
                    stats[clusterIdx].lows[j] = stats[clusterIdx].lows[j] == 0 ? row[j] : Math.Min(stats[clusterIdx].lows[j], row[j]);
                    stats[clusterIdx].highs[j]=  Math.Max(stats[clusterIdx].highs[j], row[j]);
            		stats[clusterIdx].lengths[j]= clusterItemCount[clusterIdx];
                }
            }

            // Now divide to get the average
            for (int k = 0; k < stats.Length; k++)
            {
                for (int a = 0; a < stats[k].means.Length; a++)
                {
                    int itemCount = clusterItemCount[k];
                    stats[k].means[a] /= itemCount > 0 ? itemCount : 1;
                }
            }

            double totalDistance = 0;
            // Calc the centroids
            double[] minDistances = new double[clusterCount].Select(x => double.MaxValue).ToArray();
            for (int i = 0; i < data.Length; i++)
            {
                var clusterIdx = clustering[i]; // What cluster is data i assigned to
                //var distance = CalculateDistance(data[i], means[clusterIdx]);
                var distance = calculateDistanceFunction(data[i], stats[clusterIdx].means);
                totalDistance += distance;
                if (distance < minDistances[clusterIdx])
                {
                    minDistances[clusterIdx] = distance;
                    centroidIdx[clusterIdx] = i;
                }
            }
            //double totalCentroidDistance = minDistances.Sum();

            return totalDistance;
        }

        /// <summary>
        /// Calculates the distance for each point in <see cref="data"/> from each of the centroid in <see cref="centroidIdx"/> and 
        /// assigns the data item to the cluster with the minimum distance.
        /// </summary>
        /// <returns>true if any clustering arrangement has changed, false if clustering did not change.</returns>
        private static bool AssignClustering(double[][] data, int[] clustering, int[] centroidIdx, int clusterCount, KMeansCalculateDistanceDelegate calculateDistanceFunction)
        {
            bool changed = false;

            for (int i = 0; i < data.Length; i++)
            {
                double minDistance = double.MaxValue;
                int minClusterIndex = -1;

                for (int k = 0; k < clusterCount; k++)
                {
                    double distance = calculateDistanceFunction(data[i], data[centroidIdx[k]]);
                    if (distance < minDistance)
                    {
                        minDistance = distance;
                        minClusterIndex = k;
                    }
                    // todo: track outliers here as well and maintain an average and std calculation for the distances!
                }

                // Re-arrange the clustering for datapoint if needed
                if (minClusterIndex != -1 && clustering[i] != minClusterIndex)
                {
                    changed = true;
                    clustering[i] = minClusterIndex;
                }
            }

            return changed;
        }

        /// <summary>
        ///  Calculates the eculidean distance from the <see cref="point"/> to the <see cref="centroid"/>
        /// </summary>
        private static double CalculateDistance(double[] point, double[] centroid)
        {
            // For each attribute calculate the squared difference between the centroid and the point
            double sum = 0;
            for (int i = 0; i < point.Length; i++)
                sum += Math.Pow(centroid[i] - point[i], 2);

            return Math.Sqrt(sum);
            //return Math.Sqrt(point.Select((t, i) => Math.Pow(centroid[i] - t, 2)).Sum()); // LINQ is slower than doing the for-loop!
        }
    }

    /// <summary>
    /// Represents a single result from the <see cref="KMeans"/> algorithm. 
    /// Contains the original items arranged into the clusters converged on as well as the centroids chosen and the total distance of the converged solution.
    /// </summary>
    /// <typeparam name="T"></typeparam>
    public class KMeansResults<T>
    {
        /// <summary>
        /// The original items arranged into the clusters converged on
        /// </summary>
        public T[][] Clusters { get; private set; }

        /// <summary>
        /// The final low, high, mean, length values used for the clusters. Mostly for debugging purposes.
        /// </summary>
        public KMeansStats[] Stats { get; private set; }

        /// <summary>
        /// The list of centroids used in the final solution. These are indicies into the original data.
        /// </summary>
        public int[] Centroids { get; private set; }

        /// <summary>
        /// The total distance between all the nodes and their centroids in the final solution. 
        /// This can be used as a reference point on how "good" the solution is when the algorithm is run repeatedly with different starting configuration.
        /// Lower is "usually" better.
        /// </summary>
        public double TotalDistance { get; private set; }

        public KMeansResults(T[][] clusters, KMeansStats[] stats, int[] centroids, double totalDistance)
        {
            Clusters = clusters;
            Stats = stats;
            Centroids = centroids;
            TotalDistance = totalDistance;
        }
    }
}
using System;
using QuantConnect.Data.Market;
using QuantConnect.Indicators;
using QuantConnect.Algorithm.CSharp;
namespace QuantConnect.Indicators
{
	public class DataK { public DataK(double _price) { this.price = _price; } [KMeansValue] public double price { get; set; } }

    public class SwingSRIndicator : BarIndicator
    {
        public readonly RollingWindow<double> Prices;

        int _trend_dir = 0;
        bool changed = false;
         bool updated = false;

        public SwingSRIndicator(string name, int length = 20)
            : base(name)
        {
        	Prices = new RollingWindow<double>(length);
        }

        /// <summary>
        /// Computes the average value
        /// </summary>
        /// <param name="input">The data for the calculation</param>
        /// <returns>The average value</returns>
        protected override decimal ComputeNextValue(IBaseDataBar data)
        {

			
			if( (Prices.IsReady))
			{
	            List<DataK> d = new List<DataK>();
	            foreach (var bar in Prices)
	            {
	                d.Add(new DataK(bar));
	            }
	            int clustersize = 6;
	            var clusters = KMeans.Cluster(d.ToArray(), clustersize, 30);
	            var _lowlow = 1000000d;
	            var _lowhigh = 1000000d;
	            var _lowmean = 1000000d;
	            var _highlow = 0d;
	            var _highhigh = 0d;
	            var _highmean = 0d;
	            int j=0;
	            int[] excludes =new int[clusters.Stats.Length];
	            double[] weight =new double[clusters.Stats.Length];
	            foreach(var stats in clusters.Stats)
	            {
	            	excludes[j] = (int)stats.lengths[0];
	            	weight[j++] = stats.lengths[0]/d.Count();
	            }
	            var price = data.Close;
	            int minIndex = Array.IndexOf(excludes, excludes.Min());
	            int maxIndex = Array.IndexOf(excludes, excludes.Max());
	            j=0;
	            double max=0;	
	            double _high = (double)0;
	            foreach(var stats in clusters.Stats)
	            {
	            	if( weight[j]>=1/clustersize)
	            	{
	            		if(stats.lows[0]<_lowlow)
	            		{
		            		_lowlow = stats.lows[0];
		            		_lowhigh = stats.highs[0];
		            		_lowmean = stats.means[0];
	            		}
	            	}

	            	if(weight[j]>=1/clustersize)
	            	{
	            		if(stats.highs[0]>_highhigh)
	            		{
		            		_highlow = stats.lows[0];
		            		_highhigh = stats.highs[0];
		            		_highmean = stats.means[0];
	            		}
	            	}
	            	j++;
	            }
	            if (price<(decimal)_lowmean)

	            {
	            	if(_trend_dir!=1) {changed=true; }
	                _trend_dir = 1;
	            }
	            else if (price>(decimal)_highmean)
	            {
	            	if(_trend_dir!=-1) changed=true;
	                _trend_dir = -1;
	            }
			}
			Prices.Add((double)data.Close);
            if (!Prices.IsReady) return 0;
			updated = true;
            return (decimal)_trend_dir;

        }

		public bool Updated()
		{
			if(updated) { updated = false; return true; }
			return false;
		}
		
		public bool ChangedDirection()
		{
			return changed;
		}

        /// <summary>
        /// Returns whether the indicator will return valid results
        /// </summary>
        public override bool IsReady
        {
            get { return Prices.IsReady; }
        }

        /// <summary>
        /// Resets the average to its initial state
        /// </summary>
        public override void Reset()
        {
			Prices.Reset();
            base.Reset();
        }

    }
}
/*
 * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
 * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
*/

using System.Linq;
using QuantConnect.Algorithm.Framework.Portfolio;
using QuantConnect.Data.UniverseSelection;

namespace QuantConnect.Algorithm.Framework.Execution
{
    /// <summary>
    /// Provides an implementation of <see cref="IExecutionModel"/> that immediately submits
    /// market orders to achieve the desired portfolio targets
    /// </summary>
    public class ImmediateExecutionModel2 : ExecutionModel
    {
        private readonly PortfolioTargetCollection _targetsCollection = new PortfolioTargetCollection();

        /// <summary>
        /// Immediately submits orders for the specified portfolio targets.
        /// </summary>
        /// <param name="algorithm">The algorithm instance</param>
        /// <param name="targets">The portfolio targets to be ordered</param>
        public override void Execute(QCAlgorithm algorithm, IPortfolioTarget[] targets)
        {
            _targetsCollection.AddRange(targets);

            foreach (var target in _targetsCollection.OrderByMarginImpact(algorithm))
            {
            	                    // calculate remaining quantity to be ordered
                    var quantity = OrderSizing.GetUnorderedQuantity(algorithm, target);

                    	/*
                var existing = algorithm.Securities[target.Symbol].Holdings.Quantity
                    + algorithm.Transactions.GetOpenOrders(target.Symbol)
                        .Aggregate(0m, (d, order) => d + order.Quantity);
                var quantity = target.Quantity - existing;
                */
            
                var lastData = algorithm.Securities[target.Symbol].GetLastData();
                if (quantity != 0 && Math.Abs(algorithm.Time.Subtract ( lastData.EndTime ).TotalMinutes)<5 && algorithm.IsMarketOpen(target.Symbol))
                {
                    algorithm.MarketOrder(target.Symbol, quantity);
                }
            }

            _targetsCollection.ClearFulfilled(algorithm);
        }

        /// <summary>
        /// Event fired each time the we add/remove securities from the data feed
        /// </summary>
        /// <param name="algorithm">The algorithm instance that experienced the change in securities</param>
        /// <param name="changes">The security additions and removals from the algorithm</param>
        public override void OnSecuritiesChanged(QCAlgorithm algorithm, SecurityChanges changes)
        {
        }
    }
}
using System;
using System.Collections.Generic;
using System.Linq;
using QuantConnect.Algorithm.Framework.Alphas;
using QuantConnect.Algorithm.Framework.Execution;
using QuantConnect.Algorithm.Framework.Portfolio;
using QuantConnect.Algorithm.Framework.Selection;
using QuantConnect.Data;
using QuantConnect.Data.Fundamental;
using QuantConnect.Data.UniverseSelection;
using QuantConnect.Data.Custom.CBOE;
using System.Collections.Concurrent;

namespace QuantConnect.Algorithm.CSharp
{
	public class ResistanceModulatedSplitter : QCAlgorithm
	{

		public Symbol cboeVix;
		public decimal vix;
		public IEnumerable<Symbol> symbols;
		public RateOfChange vixROC;
		Resolution resolution = Resolution.Minute;
		int periods =1;
		public override void Initialize()
		{
			//Start and End Date range for the backtest:
			SetStartDate(2000, 1, 1);
			SetEndDate(2020, 7, 1);
			SetCash(10000);
			SetBenchmark("AAPL");
            //  RegisterIndicator
           // SetWarmUp(60);
            UniverseSettings.Leverage = 100;

			cboeVix = AddData<CBOE>("VIX", Resolution.Daily).Symbol;
            SetAlpha(new AlphaSkew(1, 1, resolution, periods));
             vixROC = new RateOfChange(cboeVix, 9);
             RegisterIndicator(cboeVix, vixROC, Resolution.Daily);

			SetExecution(new ImmediateExecutionModel2());//VolumeWeightedAveragePriceExecutionModel());
			//SetPortfolioConstruction(new EqualWeightingPortfolioConstructionModel2(new TimeSpan(1,0,0)));
			//SetExecution(new ImmediateExecutionModel2());//VolumeWeightedAveragePriceExecutionModel());
			SetPortfolioConstruction(new MeanVarianceOptimizationPortfolioConstructionModel2( timeSpan:new TimeSpan(1,0,0), portfolioBias: PortfolioBias.LongShort,optimizer:new MaximumSharpeRatioPortfolioOptimizer()));
			//SetPortfolioConstruction(new EqualWeightingPortfolioConstructionModel());
//			SetUniverseSelection(new FineFundamentalUniverseSelectionModel(CoarseSelectionFunction, FineSelectionFunction));
	        //  AddUniverseSelection(new ManualUniverseSelectionModel(
            //   QuantConnect.Symbol.Create("AAPL", SecurityType.Equity, Market.USA)));

AddUniverseSelection(new ManualUniverseSelectionModel(            
QuantConnect.Symbol.Create("BCOUSD", SecurityType.Cfd, Market.Oanda),
//uantConnect.Symbol.Create("WTIUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("XAGUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("XAUUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("XCUUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("SOYBNUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("WHEATUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("CORNUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("XPTUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("CORNUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("NATGASUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("XPDUSD", SecurityType.Cfd, Market.Oanda),
QuantConnect.Symbol.Create("SUGARUSD", SecurityType.Cfd, Market.Oanda)
));
SetBrokerageModel(QuantConnect.Brokerages.BrokerageName.OandaBrokerage);


AddRiskManagement(new MaximumUnrealizedProfitPercentPerSecurity());
AddRiskManagement(new MaximumDrawdownPercentPerSecurity());
  

//SetUniverseSelection(new FineFundamentalUniverseSelectionModel(CoarseSelectionFunction, FineSelectionFunction));
		}

		/// OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
		/// Slice object keyed by symbol containing the stock data
		public override void OnData(Slice data)
		{

			if (data.ContainsKey(cboeVix))
            {
                 vix = data.Get<CBOE>(cboeVix).Close;
                 if(vix>1)
                 vixROC.Update(new IndicatorDataPoint(data.Get<CBOE>(cboeVix).Time, data.Get<CBOE>(cboeVix).Close));

                 
            }
		}

		int lastMonth=-1;
		 int lastYear=-1;
		// sort the data by daily dollar volume and take the top 'NumberOfSymbols'
		public IEnumerable<Symbol> CoarseSelectionFunction(IEnumerable<CoarseFundamental> coarse)
		{
			if (symbols !=null  && lastMonth==Time.Month)// && Time.Month % 2 != 0 )//&& Time.DayOfWeek != DayOfWeek.Friday)
			{
	        	return symbols;
	        }
	        lastMonth=Time.Month;
	        lastYear=Time.Year;
			var numberOfSymbolsCoarse = 500;

			// select only symbols with fundamental data and sort descending by daily dollar volume
			var sortedByDollarVolume = coarse
				.Where(x => x.HasFundamentalData)
				.Where(x => x.Price>5)
				.OrderByDescending(x => x.DollarVolume);

			// take the top entries from our sorted collection
			var top5 = sortedByDollarVolume.Take(numberOfSymbolsCoarse);

			// we need to return only the symbol objects
			return top5.Select(x => x.Symbol);
		}
		
		int finelastMonth=-1;
		int finelastYear=-1;
		// sort the data by P/E ratio and take the top 'numberOfSymbolsFine'
		public IEnumerable<Symbol> FineSelectionFunction(IEnumerable<FineFundamental> fine)
		{
			if (symbols !=null && finelastMonth==Time.Month)// && Time.Month % 2 != 0 )//&& Time.Month % 2 != 0 && Time.DayOfWeek != DayOfWeek.Friday){
	        {
	        	return symbols;
	        }
	        finelastMonth=Time.Month;
	        finelastYear=Time.Year;
	        List <FineFundamental> s = new List<FineFundamental>();
			var numberOfSymbolsFine = 8;
    
       		fine.OrderByDescending(x => x.OperationRatios.ROE.Value);

			// we need to return only the symbol objects
			symbols = Momemtum(fine.Take(50)).Take(10).Select(x => x.Symbol);
			return symbols;
		}
		
		
	public static IEnumerable<FineFundamental> EVEBITDA(IEnumerable<FineFundamental> fine)
	{
			return fine
				.Where(x => x.ValuationRatios.EVToEBITDA > 0)
				.Where(x => x.EarningReports.BasicAverageShares.ThreeMonths > 0)
				.Where(x => x.EarningReports.BasicAverageShares.ThreeMonths * (x.EarningReports.BasicEPS.TwelveMonths*x.ValuationRatios.PERatio) > 5000000000);
	}
	
		public static IEnumerable<FineFundamental> Industry(IEnumerable<FineFundamental> fine, string industry, int numberOfSymbolsFine)
	{
			var I = fine
				.Where(x => x.CompanyReference.IndustryTemplateCode == industry)
				.OrderBy(x => x.ValuationRatios.EVToEBITDA);

			// take the top entries from our sorted collection
			 return I.Take(numberOfSymbolsFine);
	}
    public static decimal ZScore(decimal totalassets, decimal totalliabilities,decimal workingcapital, decimal retainedearnings, decimal ebit, decimal totalrevenue,decimal shares, decimal price)
    {
    	if(totalassets==0 || totalliabilities==0) return 0;
        var X1 = 1.2m * (workingcapital / totalassets);
        var X2 = 1.4m * (retainedearnings / totalassets);
        var X3 = 3.3m * (ebit / totalassets);
        var X4 = 0.6m * ((shares * price) / totalliabilities);
        var X5 = 1.0m * (totalrevenue / totalassets);
        return X1 + X2 + X3 + X4 + X5;

	}
	
	public static IEnumerable<FineFundamental> NCAV(IEnumerable<FineFundamental> fine)
	{
        List <FineFundamental> f = new List <FineFundamental>();
  
        foreach (var x in fine)
        {
        	if(x.EarningReports.BasicAverageShares.Value==0) continue;
        	var total_liabilities = x.FinancialStatements.BalanceSheet.CurrentLiabilities.Value+
        							x.FinancialStatements.BalanceSheet.TotalNonCurrentLiabilitiesNetMinorityInterest.Value;
        	var ncav = (x.FinancialStatements.BalanceSheet.CurrentAssets.Value - total_liabilities)/x.EarningReports.BasicAverageShares.Value;
    		if(ncav > 1.5m)
            {
                f.Add(x);
            }
        }
        return f;
	}
	public static IEnumerable<FineFundamental> ZScore2(IEnumerable<FineFundamental> fine)
    {
        List <FineFundamental> f = new List <FineFundamental>();
  
        foreach (var x in fine)
        {
            if( ResistanceModulatedSplitter.ZScore(x.FinancialStatements.BalanceSheet.TotalAssets.TwelveMonths,
            x.FinancialStatements.BalanceSheet.CurrentLiabilities.Value,
            x.FinancialStatements.BalanceSheet.WorkingCapital.TwelveMonths,
            x.FinancialStatements.BalanceSheet.RetainedEarnings.TwelveMonths,
            x.FinancialStatements.IncomeStatement.EBIT.TwelveMonths,
            x.FinancialStatements.IncomeStatement.TotalRevenue.TwelveMonths,
            x.EarningReports.BasicAverageShares.TwelveMonths,
            x.Price) > 1.81m)
            {
                f.Add(x);
            }
        }
        return f;
	}
	
 
	public  IEnumerable<FineFundamental> Momemtum(IEnumerable<FineFundamental> fine)
    {
        List <FineFundamental> f = new List <FineFundamental>();
		List<MomentumSelection> s = new List<MomentumSelection>();
        foreach (var x in fine)
        {
        	var history = History(x.Symbol,126,Resolution.Daily);
        	if(history.Count()>0)
        	{
	        	var m = new MomentumSelection();
	        	m.symbol = x.Symbol;
	        	m.fine = x;
	        	m.rate = history.First().Close/history.Last().Close;
	        	s.Add(m);
        	}
        }
        if(s.Count()<1) return f;
        s.OrderByDescending(x => x.rate);
        return s.Select(x=> x.fine);
	}


	}
	
	/*
	.Where(x => x.SecurityReference.SecurityType == "ST00000001")
            .Where(x => x.SecurityReference.IsPrimaryShare)
            .Where(x => x.ValuationRatios.EVToEBITDA > 0)
            .Where(x => x.EarningReports.BasicAverageShares.ThreeMonths > 0)
            .Where(x => 
            {
                var averageShares = x.EarningReports.BasicAverageShares.ThreeMonths;
                var history = History(x.Symbol, 1, Resolution.Daily);
                var close = history.FirstOrDefault()?.Close;
                	    
                // If history is empty, close will be null
                // In this case, we will not consider the security
                if (close == null)
                {
                    return false;
                }
                	    
                return averageShares * close > 2 * 1000 * 1000 * 1000;
             })
             .OrderByDescending(x => x.ValuationRatios.EVToEBITDA)
             .Select(x => x.Symbol);
             */
             public class MomentumSelection
             {
             	public Symbol symbol;
             	public decimal rate;
             	public FineFundamental fine;
             }
}
/*
 * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
 * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
*/

using System;
using System.Collections.Generic;
using System.Linq;
using QuantConnect.Algorithm.Framework.Alphas;
using QuantConnect.Data.UniverseSelection;

namespace QuantConnect.Algorithm.Framework.Portfolio
{
    /// <summary>
    /// Provides an implementation of <see cref="IPortfolioConstructionModel"/> that gives equal weighting to all
    /// securities. The target percent holdings of each security is 1/N where N is the number of securities. For
    /// insights of direction <see cref="InsightDirection.Up"/>, long targets are returned and for insights of direction
    /// <see cref="InsightDirection.Down"/>, short targets are returned.
    /// </summary>
    public class EqualWeightingPortfolioConstructionModel2 : PortfolioConstructionModel
    {
        private DateTime _rebalancingTime;
        private readonly TimeSpan _rebalancingPeriod;
        private List<Symbol> _removedSymbols;
        private readonly InsightCollection _insightCollection = new InsightCollection();
        private DateTime? _nextExpiryTime;

        /// <summary>
        /// Initialize a new instance of <see cref="EqualWeightingPortfolioConstructionModel"/>
        /// </summary>
        /// <param name="resolution">Rebalancing frequency</param>
        public EqualWeightingPortfolioConstructionModel2(Resolution resolution = Resolution.Daily)
        {
            _rebalancingPeriod = resolution.ToTimeSpan();
        }
        
                public EqualWeightingPortfolioConstructionModel2(TimeSpan timespan)
        {
            _rebalancingPeriod = timespan;
        }

        /// <summary>
        /// Create portfolio targets from the specified insights
        /// </summary>
        /// <param name="algorithm">The algorithm instance</param>
        /// <param name="insights">The insights to create portfolio targets from</param>
        /// <returns>An enumerable of portfolio targets to be sent to the execution model</returns>
        public override IEnumerable<IPortfolioTarget> CreateTargets(QCAlgorithm algorithm, Insight[] insights)
        {
            var targets = new List<IPortfolioTarget>();

            if (algorithm.UtcTime <= _nextExpiryTime &&
                algorithm.UtcTime <= _rebalancingTime &&
                insights.Length == 0 &&
                _removedSymbols == null)
            {
                return targets;
            }
            

            _insightCollection.AddRange(insights);

            // Create flatten target for each security that was removed from the universe
            if (_removedSymbols != null)
            {
                var universeDeselectionTargets = _removedSymbols.Select(symbol => new PortfolioTarget(symbol, 0));
                targets.AddRange(universeDeselectionTargets);
                _removedSymbols = null;
            }

            // Get insight that haven't expired of each symbol that is still in the universe
            var activeInsights = _insightCollection.GetActiveInsights(algorithm.UtcTime);

            // Get the last generated active insight for each symbol
            var lastActiveInsights = from insight in activeInsights
                                     group insight by insight.Symbol into g
                                     select g.OrderBy(x => x.GeneratedTimeUtc).Last();

            // give equal weighting to each security
            var count = lastActiveInsights.Count(x => x.Direction != InsightDirection.Flat);
            var percent = count == 0 ? 0 : 1m / count;
//percent = 0.1m;
 //           var percent = Math.Min(count == 0 ? 0 : 1m / count,0.1m);
 //           percent = 0.5m;
            var errorSymbols = new HashSet<Symbol>();

            foreach (var insight in lastActiveInsights)
            {
                var target = PortfolioTarget.Percent(algorithm, insight.Symbol, (int) insight.Direction * percent);
                if (target != null)
                {
                    targets.Add(target);
                }
                else
                {
                    errorSymbols.Add(insight.Symbol);
                }
            }

            // Get expired insights and create flatten targets for each symbol
            var expiredInsights = _insightCollection.RemoveExpiredInsights(algorithm.UtcTime);

            var expiredTargets = from insight in expiredInsights
                                 group insight.Symbol by insight.Symbol into g
                                 where !_insightCollection.HasActiveInsights(g.Key, algorithm.UtcTime) && !errorSymbols.Contains(g.Key)
                                 select new PortfolioTarget(g.Key, 0);

            targets.AddRange(expiredTargets);

            _nextExpiryTime = _insightCollection.GetNextExpiryTime();
            _rebalancingTime = algorithm.UtcTime.Add(_rebalancingPeriod);

            return targets;
        }

        /// <summary>
        /// Event fired each time the we add/remove securities from the data feed
        /// </summary>
        /// <param name="algorithm">The algorithm instance that experienced the change in securities</param>
        /// <param name="changes">The security additions and removals from the algorithm</param>
        public override void OnSecuritiesChanged(QCAlgorithm algorithm, SecurityChanges changes)
        {
            // Get removed symbol and invalidate them in the insight collection
            _removedSymbols = changes.RemovedSecurities.Select(x => x.Symbol).ToList();
            _insightCollection.Clear(_removedSymbols.ToArray());
        }
    }
}