19 using Accord.Statistics;
29 private double _lower = 1e-05;
30 private double _upper = Double.MaxValue;
39 _lower = lower ?? _lower;
40 _upper = upper ?? _upper;
50 public double[]
Optimize(
double[,] historicalReturns,
double[] expectedReturns =
null,
double[,] covariance =
null)
52 covariance = covariance ?? historicalReturns.Covariance();
53 var size = covariance.GetLength(0);
60 expectedReturns = expectedReturns ?? Vector.Create(size, 1d / size);
64 solution = Elementwise.Divide(solution, solution.Sum());
66 return solution.Select(x => Math.Clamp(x, _lower, _upper)).ToArray();
81 if (numberOfVariables < 1 || numberOfVariables > 1000)
83 throw new ArgumentException(
"Argument \"numberOfVariables\" must be a positive integer between 1 and 1000");
85 else if (numberOfVariables == 1)
87 return new double[]{1d};
90 Func<double[], double> objective = (x) => 0.5 * Matrix.Dot(Matrix.Dot(x, covariance), x) - Matrix.Dot(budget, Elementwise.Log(x));
91 Func<double[], double[]> gradient = (x) => Elementwise.Subtract(Matrix.Dot(covariance, x), Elementwise.Divide(budget, x));
92 Func<double[], double[,]> hessian = (x) => Elementwise.Add(covariance, Matrix.Diagonal(Elementwise.Divide(budget, Elementwise.Multiply(x, x))));
93 var weight = Vector.Create(numberOfVariables, 1d / numberOfVariables);
94 var newObjective = Double.MinValue;
95 var oldObjective = Double.MaxValue;
98 while (Math.Abs(newObjective - oldObjective) > tolerance && iter < maximumIteration)
101 oldObjective = newObjective;
104 var invHess = Matrix.Inverse(hessian(weight));
105 var jacobian = gradient(weight);
109 weight = Elementwise.Subtract(weight, Matrix.Dot(invHess, jacobian));
112 newObjective = objective(weight);