Skip to content

Commit

Permalink
0.31.8.0: Ensure deterministic order of results from multithreaded op…
Browse files Browse the repository at this point in the history
…timizers (#131)

* Fix order of results returned by RandomSearchOptimizer. These should now always match the order of the generated parameter sets no matter if running single or multi threaded

* Refactor m_parallelOptions

* Fix deterministic order of results for GridSearchOptimizer

* Fix deterministic order of results when running in parallel GlobalizedBoundedNelderMeadOptimizer

* Add specify all input arguments

* Specify all input arguments

* Add sut factory method

* Fix deterministic results when running parallel

* Add more test arguments

* 0.31.8.0

Co-authored-by: Mads Dabros <mada@ihfood.dk>
  • Loading branch information
mdabros and Mads Dabros authored Jul 12, 2020
1 parent bdb4c1d commit 3f6063f
Show file tree
Hide file tree
Showing 11 changed files with 228 additions and 157 deletions.
6 changes: 3 additions & 3 deletions src/Directory.Build.props
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
<Project>
<PropertyGroup>
<Version>0.31.7.0</Version>
<AssemblyVersion>0.31.7.0</AssemblyVersion>
<FileVersion>0.31.7.0</FileVersion>
<Version>0.31.8.0</Version>
<AssemblyVersion>0.31.8.0</AssemblyVersion>
<FileVersion>0.31.8.0</FileVersion>
<NeutralLanguage>en</NeutralLanguage>
<Authors>Mads Dabros</Authors>
<Copyright>Copyright © Mads Dabros 2014</Copyright>
Expand Down
92 changes: 53 additions & 39 deletions src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,33 +10,31 @@ namespace SharpLearning.Optimization.Test
public class BayesianOptimizerTest
{
[TestMethod]
[DataRow(false)]
[DataRow(true)]
public void BayesianOptimizer_OptimizeBest_SingleParameter(bool runParallel)
[DataRow(1)]
[DataRow(2)]
[DataRow(-1)]
[DataRow(null)]
public void BayesianOptimizer_OptimizeBest_SingleParameter(int? maxDegreeOfParallelism)
{
var parameters = new MinMaxParameterSpec[]
{
new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
};

var sut = new BayesianOptimizer(parameters,
iterations: 80,
randomStartingPointCount: 20,
functionEvaluationsPerIterationCount: 1,
randomSearchPointCount: 1000,
seed: 42,
runParallel: runParallel);
var sut = CreateSut(maxDegreeOfParallelism, parameters);

var actual = sut.OptimizeBest(MinimizeWeightFromHeight);

Assert.AreEqual(126.50056735005998, actual.Error, Delta);
Assert.AreEqual(38.359608938153649, actual.ParameterSet.Single(), Delta);
Assert.AreEqual(110.050490091126, actual.Error, Delta);
Assert.AreEqual(37.84434624847227, actual.ParameterSet.Single(), Delta);
}

[TestMethod]
[DataRow(false)]
[DataRow(true)]
public void BayesianOptimizer_OptimizeBest_MultipleParameters(bool runParallel)
[DataRow(1)]
[DataRow(2)]
[DataRow(-1)]
[DataRow(null)]
public void BayesianOptimizer_OptimizeBest_MultipleParameters(int? maxDegreeOfParallelism)
{
var parameters = new MinMaxParameterSpec[]
{
Expand All @@ -45,49 +43,39 @@ public void BayesianOptimizer_OptimizeBest_MultipleParameters(bool runParallel)
new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
};

var sut = new BayesianOptimizer(parameters,
iterations: 100,
randomStartingPointCount: 5,
functionEvaluationsPerIterationCount: 1,
randomSearchPointCount: 1000,
seed: 42,
runParallel: runParallel);
var sut = CreateSut(maxDegreeOfParallelism, parameters);

var actual = sut.OptimizeBest(Minimize);

Assert.AreEqual(-0.76070603822760785, actual.Error, Delta);
Assert.AreEqual(-0.59028531714984367, actual.Error, Delta);
Assert.AreEqual(3, actual.ParameterSet.Length);

Assert.AreEqual(1.6078245041928358, actual.ParameterSet[0], Delta);
Assert.AreEqual(-8.9735394990879769, actual.ParameterSet[1], Delta);
Assert.AreEqual(-0.18217921731163855, actual.ParameterSet[2], Delta);
Assert.AreEqual(3.9675549296511132, actual.ParameterSet[0], Delta);
Assert.AreEqual(6.8542302757754143, actual.ParameterSet[1], Delta);
Assert.AreEqual(-0.047884811669533178, actual.ParameterSet[2], Delta);
}

[TestMethod]
public void BayesianOptimizer_Optimize()
[DataRow(1)]
[DataRow(2)]
[DataRow(-1)]
[DataRow(null)]
public void BayesianOptimizer_Optimize(int? maxDegreeOfParallelism)
{
var parameters = new MinMaxParameterSpec[]
{
new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
};

var sut = new BayesianOptimizer(parameters,
iterations: 120,
randomStartingPointCount: 5,
functionEvaluationsPerIterationCount: 1,
randomSearchPointCount: 1000,
seed: 42,
runParallel: false); // Note, since the returned results are not ordered on error,
// running with parallel computations will not return reproducible order of results,
// so runParallel must be false for this test.
var sut = CreateSut(maxDegreeOfParallelism, parameters);

var results = sut.Optimize(MinimizeWeightFromHeight);
var actual = new OptimizerResult[] { results.First(), results.Last() };

var expected = new OptimizerResult[]
{
new OptimizerResult(new double[] { 43.216748276360683 }, 1352.8306605984087),
new OptimizerResult(new double[] { 38.201425707992833 }, 119.1316225267316)
new OptimizerResult(new double[] { 90.513222660177036 }, 114559.43191955783),
new OptimizerResult(new double[] { 41.752538896050559 }, 779.196560786838)
};

Assert.AreEqual(expected.First().Error, actual.First().Error, Delta);
Expand Down Expand Up @@ -199,6 +187,30 @@ public void BayesianOptimizer_ArgumentCheck_RandomSearchPointCount()
10, 20, 30, 0);
}

static BayesianOptimizer CreateSut(
int? maybeMaxDegreeOfParallelism,
MinMaxParameterSpec[] parameters)
{
const int DefaultMaxDegreeOfParallelism = -1;

var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism.HasValue ?
maybeMaxDegreeOfParallelism.Value : DefaultMaxDegreeOfParallelism;

var runParallel = maybeMaxDegreeOfParallelism.HasValue;

var sut = new BayesianOptimizer(parameters,
iterations: 30,
randomStartingPointCount: 5,
functionEvaluationsPerIterationCount: 5,
randomSearchPointCount: 1000,
seed: 42,
runParallel: runParallel,
maxDegreeOfParallelism: maxDegreeOfParallelism);

return sut;
}


OptimizerResult RunOpenLoopOptimizationTest(List<OptimizerResult> results)
{
var parameters = new MinMaxParameterSpec[]
Expand All @@ -217,7 +229,9 @@ OptimizerResult RunOpenLoopOptimizationTest(List<OptimizerResult> results)
randomStartingPointCount: randomStartingPointsCount,
functionEvaluationsPerIterationCount: functionEvaluationsPerIterationCount,
randomSearchPointCount: 1000,
seed: 42);
seed: 42,
runParallel: false,
maxDegreeOfParallelism: 1);

// Using BayesianOptimizer in an open loop.
var initialParameterSets = sut.ProposeParameterSets(randomStartingPointsCount, results);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,16 @@ public void GlobalizedBoundedNelderMeadOptimizer_OptimizeBest(int? maxDegreeOfPa
new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
};

var sut = maxDegreeOfParallelism.HasValue ?
new GlobalizedBoundedNelderMeadOptimizer(parameters, 5, 1e-5, 10,
maxDegreeOfParallelism: maxDegreeOfParallelism.Value) :
new GlobalizedBoundedNelderMeadOptimizer(parameters, 5, 1e-5, 10);
var sut = CreateSut(maxDegreeOfParallelism, parameters);

var actual = sut.OptimizeBest(Minimize);

Assert.AreEqual(actual.Error, -0.99999949547279676, Delta);
Assert.AreEqual(actual.Error, -0.99999960731425908, Delta);
Assert.AreEqual(actual.ParameterSet.Length, 3);

Assert.AreEqual(actual.ParameterSet[0], -7.8547285710964134, Delta);
Assert.AreEqual(actual.ParameterSet[1], 6.2835515298977995, Delta);
Assert.AreEqual(actual.ParameterSet[2], -1.5851024386788885E-07, Delta);
Assert.AreEqual(actual.ParameterSet[0], -1.5711056814954487, Delta);
Assert.AreEqual(actual.ParameterSet[1], -6.283490634742785, Delta);
Assert.AreEqual(actual.ParameterSet[2], -2.9822323517533149E-07, Delta);
}

[TestMethod]
Expand All @@ -48,18 +45,15 @@ public void GlobalizedBoundedNelderMeadOptimizer_Optimize(int? maxDegreeOfParall
new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
};

var sut = maxDegreeOfParallelism.HasValue ?
new GlobalizedBoundedNelderMeadOptimizer(parameters, 5, 1e-5, 10,
maxDegreeOfParallelism: maxDegreeOfParallelism.Value) :
new GlobalizedBoundedNelderMeadOptimizer(parameters, 5, 1e-5, 10);
var sut = CreateSut(maxDegreeOfParallelism, parameters);

var results = sut.Optimize(MinimizeWeightFromHeight);
var actual = new OptimizerResult[] { results.First(), results.Last() };

var expected = new OptimizerResult[]
{
new OptimizerResult(new double[] { 37.71314535727786 }, 109.34381396310141),
new OptimizerResult(new double[] { 37.7131485180996 }, 109.34381396350526)
new OptimizerResult(new double[] { 37.71314634450421 }, 109.3438139631394),
new OptimizerResult(new double[] { 37.713142445047254 }, 109.34381396345546)
};

Assert.AreEqual(expected.First().Error, actual.First().Error, Delta);
Expand All @@ -70,5 +64,30 @@ public void GlobalizedBoundedNelderMeadOptimizer_Optimize(int? maxDegreeOfParall
Assert.AreEqual(expected.Last().ParameterSet.First(),
actual.Last().ParameterSet.First(), Delta);
}

static GlobalizedBoundedNelderMeadOptimizer CreateSut(
int? maybeMaxDegreeOfParallelism,
MinMaxParameterSpec[] parameters)
{
const int DefaultMaxDegreeOfParallelism = -1;

var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism.HasValue ?
maybeMaxDegreeOfParallelism.Value : DefaultMaxDegreeOfParallelism;

var sut = new GlobalizedBoundedNelderMeadOptimizer(parameters,
maxRestarts: 50,
noImprovementThreshold: 1e-5,
maxIterationsWithoutImprovement: 10,
maxIterationsPrRestart: 0,
maxFunctionEvaluations: 0,
alpha: 1,
gamma: 2,
rho: -0.5,
sigma: 0.5,
seed: 324,
maxDegreeOfParallelism: maxDegreeOfParallelism);

return sut;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public void GridSearchOptimizer_Optimize(int? maxDegreeOfParallelism)
{
var parameters = new GridParameterSpec[]
{
new GridParameterSpec(10.0, 37.5)
new GridParameterSpec(10.0, 20.0, 30.0, 35.0, 37.5, 40.0, 50.0, 60.0)
};

var sut = maxDegreeOfParallelism.HasValue ?
Expand All @@ -50,8 +50,8 @@ public void GridSearchOptimizer_Optimize(int? maxDegreeOfParallelism)

var expected = new OptimizerResult[]
{
new OptimizerResult(new double[] { 37.5 }, 111.20889999999987),
new OptimizerResult(new double[] { 10 }, 31638.9579)
new OptimizerResult(new double[] { 10 }, 31638.9579),
new OptimizerResult(new double[] { 60 }, 20500.6279)
};

Assert.AreEqual(expected.First().Error, actual.First().Error, Delta);
Expand Down
28 changes: 22 additions & 6 deletions src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@ public void ParticleSwarmOptimizer_OptimizeBest(int? maxDegreeOfParallelism)
new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
};

var sut = maxDegreeOfParallelism.HasValue ?
new ParticleSwarmOptimizer(parameters, 100, maxDegreeOfParallelism: maxDegreeOfParallelism.Value) :
new ParticleSwarmOptimizer(parameters, 100);
var sut = CreateSut(maxDegreeOfParallelism, parameters);

var actual = sut.OptimizeBest(Minimize);

Expand All @@ -47,9 +45,7 @@ public void ParticleSwarmOptimizer_Optimize(int? maxDegreeOfParallelism)
new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
};

var sut = maxDegreeOfParallelism.HasValue ?
new ParticleSwarmOptimizer(parameters, 100, maxDegreeOfParallelism: maxDegreeOfParallelism.Value) :
new ParticleSwarmOptimizer(parameters, 100);
var sut = CreateSut(maxDegreeOfParallelism, parameters);

var results = sut.Optimize(MinimizeWeightFromHeight);

Expand All @@ -69,5 +65,25 @@ public void ParticleSwarmOptimizer_Optimize(int? maxDegreeOfParallelism)
Assert.AreEqual(expected.Last().ParameterSet.First(),
actual.Last().ParameterSet.First(), Delta);
}

static ParticleSwarmOptimizer CreateSut(
int? maybeMaxDegreeOfParallelism,
MinMaxParameterSpec[] parameters)
{
const int DefaultMaxDegreeOfParallelism = -1;

var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism.HasValue ?
maybeMaxDegreeOfParallelism.Value : DefaultMaxDegreeOfParallelism;

var sut = new ParticleSwarmOptimizer(parameters,
maxIterations: 100,
numberOfParticles:10,
c1: 2,
c2: 2,
seed: 42,
maxDegreeOfParallelism: maxDegreeOfParallelism);

return sut;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,15 @@ public void RandomSearchOptimizer_Optimize(int? maxDegreeOfParallelism)
};

var sut = maxDegreeOfParallelism.HasValue ?
new RandomSearchOptimizer(parameters, 2, 42, true, maxDegreeOfParallelism.Value) :
new RandomSearchOptimizer(parameters, 2);
new RandomSearchOptimizer(parameters, 100, 42, true, maxDegreeOfParallelism.Value) :
new RandomSearchOptimizer(parameters, 100);

var actual = sut.Optimize(MinimizeWeightFromHeight);

var expected = new OptimizerResult[]
{
new OptimizerResult(new double[] { 13.8749507052707 }, 23438.2157641635),
new OptimizerResult(new double[] { 28.3729278125674 }, 3690.81119818742),
new OptimizerResult(new double[] { 19.1529422843144 }, 14251.396910816733),
};

Assert.AreEqual(expected.First().Error, actual.First().Error, Delta);
Expand Down
39 changes: 15 additions & 24 deletions src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,7 @@ public void SmacOptimizer_OptimizeBest_SingleParameter()
new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
};

var sut = new SmacOptimizer(parameters,
iterations: 80,
randomStartingPointCount: 20,
functionEvaluationsPerIterationCount: 1,
localSearchPointCount: 10,
randomSearchPointCount: 1000,
epsilon: 0.00001,
seed: 42);
var sut = CreateSut(parameters);

var actual = sut.OptimizeBest(MinimizeWeightFromHeight);

Expand All @@ -42,14 +35,7 @@ public void SmacOptimizer_OptimizeBest_MultipleParameters()
new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear),
};

var sut = new SmacOptimizer(parameters,
iterations: 80,
randomStartingPointCount: 20,
functionEvaluationsPerIterationCount: 1,
localSearchPointCount: 10,
randomSearchPointCount: 1000,
epsilon: 0.00001,
seed: 42);
var sut = CreateSut(parameters);

var actual = sut.OptimizeBest(Minimize);

Expand All @@ -69,14 +55,7 @@ public void SmacOptimizer_Optimize()
new MinMaxParameterSpec(0.0, 100.0, Transform.Linear)
};

var sut = new SmacOptimizer(parameters,
iterations: 80,
randomStartingPointCount: 20,
functionEvaluationsPerIterationCount: 1,
localSearchPointCount: 10,
randomSearchPointCount: 1000,
epsilon: 0.00001,
seed: 42);
var sut = CreateSut(parameters);

var actual = sut.Optimize(MinimizeWeightFromHeight);

Expand Down Expand Up @@ -203,6 +182,18 @@ public void SmacOptimizer_ArgumentCheck_RandomSearchPointCount()
10, 20, 30, 40, 0);
}

static SmacOptimizer CreateSut(MinMaxParameterSpec[] parameters)
{
return new SmacOptimizer(parameters,
iterations: 80,
randomStartingPointCount: 20,
functionEvaluationsPerIterationCount: 1,
localSearchPointCount: 10,
randomSearchPointCount: 1000,
epsilon: 0.00001,
seed: 42);
}

OptimizerResult RunOpenLoopOptimizationTest(List<OptimizerResult> results)
{
var parameters = new MinMaxParameterSpec[]
Expand Down
Loading

0 comments on commit 3f6063f

Please sign in to comment.