Skip to content

Commit df6f817

Browse files
committed
Make double assertions compare with tolerance instead of precision (#6923)
Precision might cause small differences to round to a different number. Instead compare with a tolerance which is not sensitive to rounding.
1 parent 7bb71bb commit df6f817

File tree

27 files changed

+204
-204
lines changed

27 files changed

+204
-204
lines changed

test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ public void EntryPointTrainTestSplit()
9494
int testRows = CountRows(splitOutput.TestData);
9595

9696
Assert.Equal(totalRows, trainRows + testRows);
97-
Assert.Equal(0.9, (double)trainRows / totalRows, 1);
97+
Assert.Equal(0.9, (double)trainRows / totalRows, 0.1);
9898
}
9999

100100
private static int CountRows(IDataView dataView)
@@ -5005,7 +5005,7 @@ public void TestSimpleTrainExperiment()
50055005
Assert.True(b);
50065006
double auc = 0;
50075007
getter(ref auc);
5008-
Assert.Equal(0.93, auc, 2);
5008+
Assert.Equal(0.93, auc, 0.01);
50095009
b = cursor.MoveNext();
50105010
Assert.False(b);
50115011
}
@@ -5210,7 +5210,7 @@ public void TestCrossValidationMacro()
52105210
if (w == 1)
52115211
Assert.Equal(1.585, stdev, .001);
52125212
else
5213-
Assert.Equal(1.39, stdev, 2);
5213+
Assert.Equal(1.39, stdev, 0.01);
52145214
isWeightedGetter(ref isWeighted);
52155215
Assert.True(isWeighted == (w == 1));
52165216
}
@@ -5379,7 +5379,7 @@ public void TestCrossValidationMacroWithMulticlass()
53795379
getter(ref stdev);
53805380
foldGetter(ref fold);
53815381
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
5382-
Assert.Equal(0.024809923969586353, stdev, 3);
5382+
Assert.Equal(0.024809923969586353, stdev, 0.001);
53835383

53845384
double sum = 0;
53855385
double val = 0;
@@ -5788,7 +5788,7 @@ public void TestCrossValidationMacroWithStratification()
57885788
getter(ref stdev);
57895789
foldGetter(ref fold);
57905790
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
5791-
Assert.Equal(0.02582, stdev, 5);
5791+
Assert.Equal(0.02582, stdev, 0.00001);
57925792

57935793
double sum = 0;
57945794
double val = 0;
@@ -6089,9 +6089,9 @@ public void TestCrossValidationMacroWithNonDefaultNames()
60896089
foldGetter(ref fold);
60906090
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
60916091
var stdevValues = stdev.GetValues();
6092-
Assert.Equal(0.02462, stdevValues[0], 5);
6093-
Assert.Equal(0.02763, stdevValues[1], 5);
6094-
Assert.Equal(0.03273, stdevValues[2], 5);
6092+
Assert.Equal(0.02462, stdevValues[0], 0.00001);
6093+
Assert.Equal(0.02763, stdevValues[1], 0.00001);
6094+
Assert.Equal(0.03273, stdevValues[2], 0.00001);
60956095

60966096
var sumBldr = new BufferBuilder<double>(R8Adder.Instance);
60976097
sumBldr.Reset(avg.Length, true);
@@ -6291,7 +6291,7 @@ public void TestOvaMacro()
62916291
Assert.True(b);
62926292
double acc = 0;
62936293
getter(ref acc);
6294-
Assert.Equal(0.96, acc, 2);
6294+
Assert.Equal(0.96, acc, 0.01);
62956295
b = cursor.MoveNext();
62966296
Assert.False(b);
62976297
}
@@ -6463,7 +6463,7 @@ public void TestOvaMacroWithUncalibratedLearner()
64636463
Assert.True(b);
64646464
double acc = 0;
64656465
getter(ref acc);
6466-
Assert.Equal(0.71, acc, 2);
6466+
Assert.Equal(0.71, acc, 0.01);
64676467
b = cursor.MoveNext();
64686468
Assert.False(b);
64696469
}

test/Microsoft.ML.Core.Tests/UnitTests/TestLoss.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,16 +39,16 @@ private void TestHelper(IScalarLoss lossFunc, double label, double output, doubl
3939
{
4040
Double loss = lossFunc.Loss((float)output, (float)label);
4141
float derivative = lossFunc.Derivative((float)output, (float)label);
42-
Assert.Equal(expectedLoss, loss, 5);
43-
Assert.Equal(expectedUpdate, -derivative, 5);
42+
Assert.Equal(expectedLoss, loss, 0.00001);
43+
Assert.Equal(expectedUpdate, -derivative, 0.00001);
4444

4545
if (differentiable)
4646
{
4747
// In principle, the update should be the negative of the first derivative of the loss.
4848
// Use a simple finite difference method to see if it's in the right ballpark.
4949
float almostOutput = Math.Max((float)output * (1 + _epsilon), (float)output + _epsilon);
5050
Double almostLoss = lossFunc.Loss(almostOutput, (float)label);
51-
Assert.Equal((almostLoss - loss) / (almostOutput - output), derivative, 1);
51+
Assert.Equal((almostLoss - loss) / (almostOutput - output), derivative, 0.1);
5252
}
5353
}
5454

test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,7 @@ public void TestSparsifyNormalize(int startRange, bool normalize, float[] expect
293293
var actualValues = a.GetValues().ToArray();
294294
Assert.Equal(expectedValues.Length, actualValues.Length);
295295
for (int i = 0; i < expectedValues.Length; i++)
296-
Assert.Equal(expectedValues[i], actualValues[i], precision: 6);
296+
Assert.Equal(expectedValues[i], actualValues[i], 0.000001);
297297
}
298298

299299
/// <summary>

test/Microsoft.ML.CpuMath.UnitTests/UnitTests.cs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -645,7 +645,7 @@ public void SumTest(string mode, string test, Dictionary<string, string> environ
645645
}
646646

647647
var actual = CpuMathUtils.Sum(src);
648-
Assert.Equal((double)expected, (double)actual, 2);
648+
Assert.Equal((double)expected, (double)actual, 0.01);
649649
return RemoteExecutor.SuccessExitCode;
650650
}, mode, test, options);
651651
}
@@ -668,7 +668,7 @@ public void SumSqUTest(string mode, string test, Dictionary<string, string> envi
668668
}
669669

670670
var actual = CpuMathUtils.SumSq(src);
671-
Assert.Equal((double)expected, (double)actual, 2);
671+
Assert.Equal((double)expected, (double)actual, 0.01);
672672
return RemoteExecutor.SuccessExitCode;
673673
}, mode, test, options);
674674
}
@@ -693,7 +693,7 @@ public void SumSqDiffUTest(string mode, string test, string scale, Dictionary<st
693693
expected += (src[i] - defaultScale) * (src[i] - defaultScale);
694694
}
695695

696-
Assert.Equal((double)expected, (double)actual, 1);
696+
Assert.Equal((double)expected, (double)actual, 0.1);
697697
return RemoteExecutor.SuccessExitCode;
698698
}, mode, test, scale, options);
699699
}
@@ -716,7 +716,7 @@ public void SumAbsUTest(string mode, string test, Dictionary<string, string> env
716716
}
717717

718718
var actual = CpuMathUtils.SumAbs(src);
719-
Assert.Equal((double)expected, (double)actual, 2);
719+
Assert.Equal((double)expected, (double)actual, 0.01);
720720
return RemoteExecutor.SuccessExitCode;
721721
}, mode, test, options);
722722
}
@@ -741,7 +741,7 @@ public void SumAbsDiffUTest(string mode, string test, string scale, Dictionary<s
741741
expected += Math.Abs(src[i] - defaultScale);
742742
}
743743

744-
Assert.Equal((double)expected, (double)actual, 2);
744+
Assert.Equal((double)expected, (double)actual, 0.01);
745745
return RemoteExecutor.SuccessExitCode;
746746
}, mode, test, scale, options);
747747
}
@@ -769,7 +769,7 @@ public void MaxAbsUTest(string mode, string test, Dictionary<string, string> env
769769
}
770770
}
771771

772-
Assert.Equal((double)expected, (double)actual, 2);
772+
Assert.Equal((double)expected, (double)actual, 0.01);
773773
return RemoteExecutor.SuccessExitCode;
774774
}, mode, test, options);
775775
}
@@ -797,7 +797,7 @@ public void MaxAbsDiffUTest(string mode, string test, string scale, Dictionary<s
797797
expected = abs;
798798
}
799799
}
800-
Assert.Equal((double)expected, (double)actual, 2);
800+
Assert.Equal((double)expected, (double)actual, 0.01);
801801
return RemoteExecutor.SuccessExitCode;
802802
}, mode, test, scale, options);
803803
}
@@ -827,7 +827,7 @@ public void DotUTest(string mode, string test, Dictionary<string, string> enviro
827827
}
828828

829829
var actual = CpuMathUtils.DotProductDense(src, dst, dst.Length);
830-
Assert.Equal((double)expected, (double)actual, 1);
830+
Assert.Equal((double)expected, (double)actual, 0.1);
831831
return RemoteExecutor.SuccessExitCode;
832832
}, mode, test, options);
833833
}
@@ -861,7 +861,7 @@ public void DotSUTest(string mode, string test, Dictionary<string, string> envir
861861
}
862862

863863
var actual = CpuMathUtils.DotProductSparse(src, dst, idx, limit);
864-
Assert.Equal((double)expected, (double)actual, 2);
864+
Assert.Equal((double)expected, (double)actual, 0.01);
865865
return RemoteExecutor.SuccessExitCode;
866866
}, mode, test, options);
867867
}

test/Microsoft.ML.Fairlearn.Tests/MetricTest.cs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -38,17 +38,17 @@ public void RegressionMetricTest()
3838
{
3939
RegressionGroupMetric regressionMetric = mlContext.Fairlearn().Metric.Regression(eval: data, labelColumn: "Price", scoreColumn: "Score", sensitiveFeatureColumn: "Gender");
4040
var metricByGroup = regressionMetric.ByGroup();
41-
Assert.Equal(-2.30578, Convert.ToSingle(metricByGroup["RSquared"][0]), 3);
42-
Assert.Equal(-2039.81453, Convert.ToSingle(metricByGroup["RSquared"][1]), 3);
43-
Assert.Equal(1.00000, Convert.ToSingle(metricByGroup["RMS"][0]), 3);
44-
Assert.Equal(15.811388, Convert.ToSingle(metricByGroup["RMS"][1]), 3);
41+
Assert.Equal(-2.30578, Convert.ToSingle(metricByGroup["RSquared"][0]), 0.001);
42+
Assert.Equal(-2039.81453, Convert.ToSingle(metricByGroup["RSquared"][1]), 0.001);
43+
Assert.Equal(1.00000, Convert.ToSingle(metricByGroup["RMS"][0]), 0.001);
44+
Assert.Equal(15.811388, Convert.ToSingle(metricByGroup["RMS"][1]), 0.001);
4545
metricByGroup.Description();
4646
Dictionary<string, double> metricOverall = regressionMetric.Overall();
47-
Assert.Equal(125.5, metricOverall["MSE"], 1);
48-
Assert.Equal(11.202678, metricOverall["RMS"], 4);
47+
Assert.Equal(125.5, metricOverall["MSE"], 0.1);
48+
Assert.Equal(11.202678, metricOverall["RMS"], 0.0001);
4949
Dictionary<string, double> diff = regressionMetric.DifferenceBetweenGroups();
50-
Assert.Equal(14.81138, diff["RMS"], 4);
51-
Assert.Equal(2037.5, diff["RSquared"], 1);
50+
Assert.Equal(14.81138, diff["RMS"], 0.0001);
51+
Assert.Equal(2037.5, diff["RSquared"], 0.1);
5252

5353
}
5454

@@ -70,10 +70,10 @@ public void BinaryClassificationMetricTest()
7070

7171
BinaryGroupMetric metrics = mlContext.Fairlearn().Metric.BinaryClassification(eval: df, labelColumn: "label", predictedColumn: "PredictedLabel", sensitiveFeatureColumn: "group_id");
7272
var metricByGroup = metrics.ByGroup();
73-
Assert.Equal(0.8, Convert.ToSingle(metricByGroup["Accuracy"][0]), 1);
74-
Assert.Equal(0.6, Convert.ToSingle(metricByGroup["Accuracy"][1]), 1);
73+
Assert.Equal(0.8, Convert.ToSingle(metricByGroup["Accuracy"][0]), 0.1);
74+
Assert.Equal(0.6, Convert.ToSingle(metricByGroup["Accuracy"][1]), 0.1);
7575
var metricOverall = metrics.Overall();
76-
Assert.Equal(0.7, Convert.ToSingle(metricOverall["Accuracy"]), 1);
76+
Assert.Equal(0.7, Convert.ToSingle(metricOverall["Accuracy"]), 0.1);
7777
}
7878
}
7979
}

test/Microsoft.ML.Fairlearn.Tests/UtilityTest.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,10 @@ public void DemographyParityTest()
3131
PrimitiveDataFrameColumn<float> ypred = new PrimitiveDataFrameColumn<float>("pred", fl);
3232
var gSinged = dp.Gamma(ypred);
3333

34-
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][0]), 1);
35-
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][1]), 1);
36-
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][2]), 1);
37-
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][3]), 1);
34+
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][0]), 0.1);
35+
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][1]), 0.1);
36+
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][2]), 0.1);
37+
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][3]), 0.1);
3838
}
3939
}
4040
}

test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -119,8 +119,8 @@ public void InspectFastTreeModelParameters()
119119
var expectedThresholds = new float[] { 0.0911167f, 0.06509889f, 0.019873254f, 0.0361835f };
120120
for (int i = 0; i < finalTree.NumberOfNodes; ++i)
121121
{
122-
Assert.Equal(expectedSplitGains[i], finalTree.SplitGains[i], 6);
123-
Assert.Equal((double)expectedThresholds[i], (double)finalTree.NumericalSplitThresholds[i], 6);
122+
Assert.Equal(expectedSplitGains[i], finalTree.SplitGains[i], 0.000001);
123+
Assert.Equal((double)expectedThresholds[i], (double)finalTree.NumericalSplitThresholds[i], 0.000001);
124124
}
125125
}
126126

test/Microsoft.ML.IntegrationTests/ONNX.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ public void SaveOnnxModelLoadAndScoreFastTree()
7171
var originalPrediction = originalPredictionEngine.Predict(row);
7272
var onnxPrediction = onnxPredictionEngine.Predict(row);
7373
// Check that the predictions are identical.
74-
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], precision: 4);
74+
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], 0.0001);
7575
}
7676
}
7777

@@ -170,7 +170,7 @@ public void SaveOnnxModelLoadAndScoreSDCA()
170170
var originalPrediction = originalPredictionEngine.Predict(row);
171171
var onnxPrediction = onnxPredictionEngine.Predict(row);
172172
// Check that the predictions are identical.
173-
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], precision: 4);
173+
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], 0.0001);
174174
}
175175
}
176176
}

test/Microsoft.ML.IntegrationTests/Training.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -498,7 +498,7 @@ public void MetacomponentsFunctionWithKeyHandling()
498498
// Evaluate the model.
499499
var binaryClassificationMetrics = mlContext.MulticlassClassification.Evaluate(binaryClassificationPredictions);
500500

501-
Assert.Equal(0.4367, binaryClassificationMetrics.LogLoss, 4);
501+
Assert.Equal(0.4367, binaryClassificationMetrics.LogLoss, 0.0001);
502502
}
503503
}
504504
}

test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -476,9 +476,9 @@ public void TestOnnxNoneDimValue()
476476
var transformedValues = onnxTransformer.Transform(idv);
477477
var predictions = mlContext.Data.CreateEnumerable<PredictionNoneDimension>(transformedValues, reuseRowObject: false).ToArray();
478478

479-
Assert.Equal(-0.080, Math.Round(predictions[0].variable[0], 3));
480-
Assert.Equal(1.204, Math.Round(predictions[1].variable[0], 3));
481-
Assert.Equal(2.27, Math.Round(predictions[2].variable[0], 3));
479+
Assert.Equal(-0.080, predictions[0].variable[0], 0.001);
480+
Assert.Equal(1.204, predictions[1].variable[0], 0.001);
481+
Assert.Equal(2.27, predictions[2].variable[0], 0.001);
482482
}
483483

484484
/// <summary>

0 commit comments

Comments
 (0)