Lean  $LEAN_TAG$
RiskParityPortfolioOptimizer.cs
1 /*
2  * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
3  * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14 */
15 
16 using System;
17 using System.Linq;
18 using Accord.Math;
19 using Accord.Statistics;
20 
22 {
23  /// <summary>
24  /// Provides an implementation of a risk parity portfolio optimizer that calculate the optimal weights
25  /// with the weight range from 0 to 1 and equalize the risk carried by each asset
26  /// </summary>
28  {
29  private double _lower = 1e-05;
30  private double _upper = Double.MaxValue;
31 
32  /// <summary>
33  /// Initialize a new instance of <see cref="RiskParityPortfolioOptimizer"/>
34  /// </summary>
35  /// <param name="lower">The lower bounds on portfolio weights</param>
36  /// <param name="upper">The upper bounds on portfolio weights</param>
37  public RiskParityPortfolioOptimizer(double? lower = null, double? upper = null)
38  {
39  _lower = lower ?? _lower; // has to be greater than or equal to 0
40  _upper = upper ?? _upper;
41  }
42 
43  /// <summary>
44  /// Perform portfolio optimization for a provided matrix of historical returns and an array of expected returns
45  /// </summary>
46  /// <param name="historicalReturns">Matrix of annualized historical returns where each column represents a security and each row returns for the given date/time (size: K x N).</param>
47  /// <param name="expectedReturns">Risk budget vector (size: K x 1).</param>
48  /// <param name="covariance">Multi-dimensional array of double with the portfolio covariance of annualized returns (size: K x K).</param>
49  /// <returns>Array of double with the portfolio weights (size: K x 1)</returns>
50  public double[] Optimize(double[,] historicalReturns, double[] expectedReturns = null, double[,] covariance = null)
51  {
52  covariance = covariance ?? historicalReturns.Covariance();
53  var size = covariance.GetLength(0);
54 
55  // Optimization Problem
56  // minimize_{x >= 0} f(x) = 1/2 * x^T.S.x - b^T.log(x)
57  // b = 1 / num_of_assets (equal budget of risk)
58  // df(x)/dx = S.x - b / x
59  // H(x) = S + Diag(b / x^2)
60  expectedReturns = expectedReturns ?? Vector.Create(size, 1d / size);
61  var solution = RiskParityNewtonMethodOptimization(size, covariance, expectedReturns);
62 
63  // Normalize weights: w = x / x^T.1
64  solution = Elementwise.Divide(solution, solution.Sum());
65  // Make sure the vector is within range
66  return solution.Select(x => Math.Clamp(x, _lower, _upper)).ToArray();
67 
68  }
69 
70  /// <summary>
71  /// Newton method of minimization
72  /// </summary>
73  /// <param name="numberOfVariables">The number of variables (size of weight vector).</param>
74  /// <param name="covariance">Covariance matrix (size: K x K).</param>
75  /// <param name="budget">The risk budget (size: K x 1).</param>
76  /// <param name="tolerance">Tolerance level of objective difference with previous steps to accept minimization result.</param>
77  /// <param name="maximumIteration">Maximum iteration per optimization.</param>
78  /// <returns>Array of double of argumented minimization</returns>
79  protected double[] RiskParityNewtonMethodOptimization(int numberOfVariables, double[,] covariance, double[] budget, double tolerance = 1e-11, int maximumIteration = 15000)
80  {
81  if (numberOfVariables < 1 || numberOfVariables > 1000)
82  {
83  throw new ArgumentException("Argument \"numberOfVariables\" must be a positive integer between 1 and 1000");
84  }
85  else if (numberOfVariables == 1)
86  {
87  return new double[]{1d};
88  }
89 
90  Func<double[], double> objective = (x) => 0.5 * Matrix.Dot(Matrix.Dot(x, covariance), x) - Matrix.Dot(budget, Elementwise.Log(x));
91  Func<double[], double[]> gradient = (x) => Elementwise.Subtract(Matrix.Dot(covariance, x), Elementwise.Divide(budget, x));
92  Func<double[], double[,]> hessian = (x) => Elementwise.Add(covariance, Matrix.Diagonal(Elementwise.Divide(budget, Elementwise.Multiply(x, x))));
93  var weight = Vector.Create(numberOfVariables, 1d / numberOfVariables);
94  var newObjective = Double.MinValue;
95  var oldObjective = Double.MaxValue;
96  var iter = 0;
97 
98  while (Math.Abs(newObjective - oldObjective) > tolerance && iter < maximumIteration)
99  {
100  // Store old objective value
101  oldObjective = newObjective;
102 
103  // Get parameters for Newton method gradient descend
104  var invHess = Matrix.Inverse(hessian(weight));
105  var jacobian = gradient(weight);
106 
107  // Get next weight vector
108  // x^{k + 1} = x^{k} - H^{-1}(x^{k}).df(x^{k}))
109  weight = Elementwise.Subtract(weight, Matrix.Dot(invHess, jacobian));
110 
111  // Store new objective value
112  newObjective = objective(weight);
113 
114  iter++;
115  }
116 
117  return weight;
118  }
119  }
120 }