Skip to content

Commit

Permalink
Refactored tests with base test class. Added Python wrapper tests
Browse files Browse the repository at this point in the history
Base test class and python wrapper tests
  • Loading branch information
wtindall1 committed Jan 19, 2024
1 parent c7d773b commit fd46596
Show file tree
Hide file tree
Showing 5 changed files with 186 additions and 105 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,12 @@
namespace QuantConnect.Tests.Algorithm.Framework.Portfolio
{
[TestFixture]
public class MaximumSharpeRatioPortfolioOptimizerTests
public class MaximumSharpeRatioPortfolioOptimizerTests : PortfolioOptimizerTestsBase
{
private List<double[,]> _historicalReturns;
private List<double[]> _expectedReturns;
private List<double[,]> _covariances;
private List<double[]> _expectedResults;

[OneTimeSetUp]
public void SetUp()
{
_historicalReturns = new List<double[,]>
HistoricalReturns = new List<double[,]>
{
new double[,] { { 0.02, -0.02, 0.28 }, { -0.50, -0.29, -0.13 }, { 0.81, 0.29, 0.31 }, { -0.03, -0.00, 0.01 } },
new double[,] { { 0.10, 0.20, 0.4 }, { 0.12, 0.25, 0.4 }, { 0.11, 0.22, 0.4 } },
Expand All @@ -44,7 +39,7 @@ public void SetUp()
new double[,] { { 0.31, 0.43, 1.22, 0.03 }, { 0.65, 0.52, 1.25, 0.67 }, { -0.39, -0.28, -0.50, -0.10 }, { 0.58, 0.58, 2.39, -0.41 }, { -0.01, -0.01, 0.04, 0.03 } }
};

_expectedReturns = new List<double[]>
ExpectedReturns = new List<double[]>
{
new double[] { 0.08, -0.01, 0.12 },
new double[] { 0.11, 0.23, 0.4 },
Expand All @@ -56,7 +51,7 @@ public void SetUp()
new double[] { 0.23, 0.25, 0.88, 0.04 }
};

_covariances = new List<double[,]>
Covariances = new List<double[,]>
{
new double[,] { { 0.29, 0.13, 0.10 }, { 0.13, 0.06, 0.04 }, { 0.10, 0.04, 0.05 } },
null,
Expand All @@ -68,7 +63,7 @@ public void SetUp()
new double[,] { { 0.19, 0.16, 0.44, 0.05 }, { 0.16, 0.14, 0.40, 0.02 }, { 0.44, 0.40, 1.29, -0.06 }, { 0.05, 0.02, -0.06, 0.15 } }
};

_expectedResults = new List<double[]>
ExpectedResults = new List<double[]>
{
new double[] { -0.562396, 0.608942, 0.953453 },
new double[] { 0.686025, -0.269589, 0.583023 },
Expand All @@ -81,6 +76,11 @@ public void SetUp()
};
}

protected override IPortfolioOptimizer CreateOptimizer()
{
return new MaximumSharpeRatioPortfolioOptimizer();
}

[TestCase(0)]
[TestCase(1)]
[TestCase(2)]
Expand All @@ -89,25 +89,18 @@ public void SetUp()
[TestCase(5)]
[TestCase(6)]
[TestCase(7)]
public void TestOptimizeWeightings(int testCaseNumber)
public override void OptimizeWeightings(int testCaseNumber)
{
var testOptimizer = new MaximumSharpeRatioPortfolioOptimizer();

var result = testOptimizer.Optimize(
_historicalReturns[testCaseNumber],
_expectedReturns[testCaseNumber],
_covariances[testCaseNumber]);

Assert.AreEqual(_expectedResults[testCaseNumber], result.Select(x => Math.Round(x, 6)));
base.OptimizeWeightings(testCaseNumber);
}

[TestCase(0)]
public void TestOptimizeWeightingsSpecifyingLowerBoundAndRiskFreeRate(int testCaseNumber)
public void OptimizeWeightingsSpecifyingLowerBoundAndRiskFreeRate(int testCaseNumber)
{
var testOptimizer = new MaximumSharpeRatioPortfolioOptimizer(lower: 0, riskFreeRate: 0.04);
var expectedResult = new double[] { 0, 0.44898, 0.55102 };

var result = testOptimizer.Optimize(_historicalReturns[testCaseNumber]);
var result = testOptimizer.Optimize(HistoricalReturns[testCaseNumber]);

Assert.AreEqual(expectedResult, result.Select(x => Math.Round(x, 6)));
}
Expand All @@ -126,19 +119,6 @@ public void SingleSecurityPortfolioReturnsNaN()
Assert.AreEqual(result, expectedResult);
}

[Test]
public void EmptyPortfolioReturnsEmptyArrayOfDouble()
{
var testOptimizer = new MaximumSharpeRatioPortfolioOptimizer();
var historicalReturns = new double[,] { { } };

var expectedResult = Array.Empty<double>();

var result = testOptimizer.Optimize(historicalReturns);

Assert.AreEqual(result, expectedResult);
}

[Test]
public void EqualWeightingsWhenNoSolutionFound()
{
Expand All @@ -162,7 +142,7 @@ public void BoundariesAreNotViolated()
var upper = 0.5d;
var testOptimizer = new MaximumSharpeRatioPortfolioOptimizer(lower, upper);

var result = testOptimizer.Optimize(_historicalReturns[testCaseNumber], null, _covariances[testCaseNumber]);
var result = testOptimizer.Optimize(HistoricalReturns[testCaseNumber], null, Covariances[testCaseNumber]);

foreach (var x in result)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
* limitations under the License.
*/

using Accord;
using NUnit.Framework;
using QuantConnect.Algorithm.Framework.Portfolio;
using System;
Expand All @@ -23,12 +22,8 @@
namespace QuantConnect.Tests.Algorithm.Framework.Portfolio
{
[TestFixture]
public class MinimumVariancePortfolioOptimizerTests
public class MinimumVariancePortfolioOptimizerTests : PortfolioOptimizerTestsBase
{
private List<double[,]> _historicalReturns;
private List<double[]> _expectedReturns;
private List<double[,]> _covariances;
private List<double[]> _expectedResults;
private Dictionary<int, double> _targetReturns;

[OneTimeSetUp]
Expand All @@ -49,7 +44,7 @@ public void Setup()
var covariance3 = new double[,] { { 0.06, 0.09, 0.28 }, { 0.09, 0.25, 0.58 }, { 0.28, 0.58, 1.66 } };
var covariance4 = (double[,])null;

_historicalReturns = new List<double[,]>
HistoricalReturns = new List<double[,]>
{
historicalReturns1,
historicalReturns2,
Expand All @@ -61,7 +56,7 @@ public void Setup()
historicalReturns4
};

_expectedReturns = new List<double[]>
ExpectedReturns = new List<double[]>
{
expectedReturns1,
expectedReturns2,
Expand All @@ -73,7 +68,7 @@ public void Setup()
expectedReturns4
};

_covariances = new List<double[,]>
Covariances = new List<double[,]>
{
covariance1,
covariance2,
Expand All @@ -85,7 +80,7 @@ public void Setup()
covariance4
};

_expectedResults = new List<double[]>
ExpectedResults = new List<double[]>
{
new double[] { -0.089212, 0.23431, -0.040975, 0.635503 },
new double[] { 0.366812, -0.139738, 0.49345 },
Expand All @@ -106,37 +101,34 @@ public void Setup()
};
}

protected override IPortfolioOptimizer CreateOptimizer()
{
return new MinimumVariancePortfolioOptimizer();
}

[TestCase(0)]
[TestCase(1)]
[TestCase(2)]
[TestCase(3)]
public void TestOptimizeWeightings(int testCaseNumber)
public override void OptimizeWeightings(int testCaseNumber)
{
var testOptimizer = new MinimumVariancePortfolioOptimizer();

var result = testOptimizer.Optimize(
_historicalReturns[testCaseNumber],
_expectedReturns[testCaseNumber],
_covariances[testCaseNumber]);

Assert.AreEqual(_expectedResults[testCaseNumber], result.Select(x => Math.Round(x, 6)));
Assert.AreEqual(1d, result.Select(x => Math.Round(Math.Abs(x), 6)).Sum());
base.OptimizeWeightings(testCaseNumber);
}

[TestCase(4)]
[TestCase(5)]
[TestCase(6)]
[TestCase(7)]
public void TestOptimizeWeightingsSpecifyingTargetReturns(int testCaseNumber)
public void OptimizeWeightingsSpecifyingTargetReturns(int testCaseNumber)
{
var testOptimizer = new MinimumVariancePortfolioOptimizer(targetReturn: _targetReturns[testCaseNumber]);

var result = testOptimizer.Optimize(
_historicalReturns[testCaseNumber],
_expectedReturns[testCaseNumber],
_covariances[testCaseNumber]);
HistoricalReturns[testCaseNumber],
ExpectedReturns[testCaseNumber],
Covariances[testCaseNumber]);

Assert.AreEqual(_expectedResults[testCaseNumber], result.Select(x => Math.Round(x, 6)));
Assert.AreEqual(ExpectedResults[testCaseNumber], result.Select(x => Math.Round(x, 6)));
Assert.AreEqual(1d, result.Select(x => Math.Round(Math.Abs(x), 6)).Sum());
}

Expand All @@ -146,7 +138,7 @@ public void EqualWeightingsWhenNoSolutionFound(int testCaseNumber)
var testOptimizer = new MinimumVariancePortfolioOptimizer(upper: -1);
var expectedResult = new double[] { 0.25, 0.25, 0.25, 0.25 };

var result = testOptimizer.Optimize(_historicalReturns[testCaseNumber]);
var result = testOptimizer.Optimize(HistoricalReturns[testCaseNumber]);

Assert.AreEqual(expectedResult, result);
}
Expand All @@ -162,9 +154,9 @@ public void BoundariesAreNotViolated(int testCaseNumber)
var testOptimizer = new MinimumVariancePortfolioOptimizer(lower, upper);

var result = testOptimizer.Optimize(
_historicalReturns[testCaseNumber],
_expectedReturns[testCaseNumber],
_covariances[testCaseNumber]);
HistoricalReturns[testCaseNumber],
ExpectedReturns[testCaseNumber],
Covariances[testCaseNumber]);

foreach (var x in result)
{
Expand All @@ -174,24 +166,16 @@ public void BoundariesAreNotViolated(int testCaseNumber)
};
}

public void EmptyPortfolioReturnsEmptyArrayOfDouble()
{
var testOptimizer = new MinimumVariancePortfolioOptimizer();
var historicalReturns = new double[,] { { } };

var result = testOptimizer.Optimize(historicalReturns);

Assert.AreEqual(Array.Empty<double>(), result);
}

[Test]
public void SingleSecurityPortfolioReturnsOne()
{
var testOptimizer = new MinimumVariancePortfolioOptimizer();
var historicalReturns = new double[,] { { 0.76 }, { 0.02 }, { -0.50 } };
var expectedResult = new double[] { 1 };

var result = testOptimizer.Optimize(historicalReturns);

Assert.AreEqual(1d, result);
Assert.AreEqual(expectedResult, result);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by aaplicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

using NUnit.Framework;
using Python.Runtime;
using QuantConnect.Algorithm.Framework.Portfolio;
using System;

namespace QuantConnect.Tests.Algorithm.Framework.Portfolio;

[TestFixture]
public class PortfolioOptimizerPythonWrapperTests
{
[Test]
public void OptimizeIsCalled()
{
using (Py.GIL())
{
var module = PyModule.FromString(Guid.NewGuid().ToString(),
@$"
from AlgorithmImports import *
class CustomPortfolioOptimizer:
def __init__(self):
self.OptimizeWasCalled = False
def Optimize(self, historicalReturns, expectedReturns = None, covariance = None):
self.OptimizeWasCalled= True");

var pyCustomOptimizer = module.GetAttr("CustomPortfolioOptimizer").Invoke();
var wrapper = new PortfolioOptimizerPythonWrapper(pyCustomOptimizer);
var historicalReturns = new double[,] { { -0.50, -0.13 }, { 0.81, 0.31 }, { -0.02, 0.01 } };

wrapper.Optimize(historicalReturns);
pyCustomOptimizer
.GetAttr("OptimizeWasCalled")
.TryConvert(out bool optimizerWasCalled);

Assert.IsTrue(optimizerWasCalled);
}
}

[Test]
public void WrapperThrowsIfOptimizerDoesNotImplementInterface()
{
using (Py.GIL())
{
var module = PyModule.FromString(Guid.NewGuid().ToString(),
@$"
from AlgorithmImports import *
class CustomPortfolioOptimizer:
def __init__(self):
self.OptimizeWasCalled = False
def Calculate(self, historicalReturns, expectedReturns = None, covariance = None):
pass");

var pyCustomOptimizer = module.GetAttr("CustomPortfolioOptimizer").Invoke();

Assert.Throws<NotImplementedException>(() => new PortfolioOptimizerPythonWrapper(pyCustomOptimizer));
}
}
}
Loading

0 comments on commit fd46596

Please sign in to comment.