Skip to content

Commit

Permalink
Make double assertions compare with tolerance instead of precision (#…
Browse files Browse the repository at this point in the history
…6923)

Precision might cause small differences to round to a different number.
Instead compare with a tolerance which is not sensitive to rounding.
  • Loading branch information
ericstj committed Dec 22, 2023
1 parent 2093331 commit d3c3127
Show file tree
Hide file tree
Showing 27 changed files with 204 additions and 204 deletions.
20 changes: 10 additions & 10 deletions test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ public void EntryPointTrainTestSplit()
int testRows = CountRows(splitOutput.TestData);

Assert.Equal(totalRows, trainRows + testRows);
Assert.Equal(0.9, (double)trainRows / totalRows, 1);
Assert.Equal(0.9, (double)trainRows / totalRows, 0.1);
}

private static int CountRows(IDataView dataView)
Expand Down Expand Up @@ -5005,7 +5005,7 @@ public void TestSimpleTrainExperiment()
Assert.True(b);
double auc = 0;
getter(ref auc);
Assert.Equal(0.93, auc, 2);
Assert.Equal(0.93, auc, 0.01);
b = cursor.MoveNext();
Assert.False(b);
}
Expand Down Expand Up @@ -5210,7 +5210,7 @@ public void TestCrossValidationMacro()
if (w == 1)
Assert.Equal(1.585, stdev, .001);
else
Assert.Equal(1.39, stdev, 2);
Assert.Equal(1.39, stdev, 0.01);
isWeightedGetter(ref isWeighted);
Assert.True(isWeighted == (w == 1));
}
Expand Down Expand Up @@ -5379,7 +5379,7 @@ public void TestCrossValidationMacroWithMulticlass()
getter(ref stdev);
foldGetter(ref fold);
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
Assert.Equal(0.024809923969586353, stdev, 3);
Assert.Equal(0.024809923969586353, stdev, 0.001);

double sum = 0;
double val = 0;
Expand Down Expand Up @@ -5788,7 +5788,7 @@ public void TestCrossValidationMacroWithStratification()
getter(ref stdev);
foldGetter(ref fold);
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
Assert.Equal(0.02582, stdev, 5);
Assert.Equal(0.02582, stdev, 0.00001);

double sum = 0;
double val = 0;
Expand Down Expand Up @@ -6089,9 +6089,9 @@ public void TestCrossValidationMacroWithNonDefaultNames()
foldGetter(ref fold);
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
var stdevValues = stdev.GetValues();
Assert.Equal(0.02462, stdevValues[0], 5);
Assert.Equal(0.02763, stdevValues[1], 5);
Assert.Equal(0.03273, stdevValues[2], 5);
Assert.Equal(0.02462, stdevValues[0], 0.00001);
Assert.Equal(0.02763, stdevValues[1], 0.00001);
Assert.Equal(0.03273, stdevValues[2], 0.00001);

var sumBldr = new BufferBuilder<double>(R8Adder.Instance);
sumBldr.Reset(avg.Length, true);
Expand Down Expand Up @@ -6291,7 +6291,7 @@ public void TestOvaMacro()
Assert.True(b);
double acc = 0;
getter(ref acc);
Assert.Equal(0.96, acc, 2);
Assert.Equal(0.96, acc, 0.01);
b = cursor.MoveNext();
Assert.False(b);
}
Expand Down Expand Up @@ -6463,7 +6463,7 @@ public void TestOvaMacroWithUncalibratedLearner()
Assert.True(b);
double acc = 0;
getter(ref acc);
Assert.Equal(0.71, acc, 2);
Assert.Equal(0.71, acc, 0.01);
b = cursor.MoveNext();
Assert.False(b);
}
Expand Down
6 changes: 3 additions & 3 deletions test/Microsoft.ML.Core.Tests/UnitTests/TestLoss.cs
Original file line number Diff line number Diff line change
Expand Up @@ -39,16 +39,16 @@ private void TestHelper(IScalarLoss lossFunc, double label, double output, doubl
{
Double loss = lossFunc.Loss((float)output, (float)label);
float derivative = lossFunc.Derivative((float)output, (float)label);
Assert.Equal(expectedLoss, loss, 5);
Assert.Equal(expectedUpdate, -derivative, 5);
Assert.Equal(expectedLoss, loss, 0.00001);
Assert.Equal(expectedUpdate, -derivative, 0.00001);

if (differentiable)
{
// In principle, the update should be the negative of the first derivative of the loss.
// Use a simple finite difference method to see if it's in the right ballpark.
float almostOutput = Math.Max((float)output * (1 + _epsilon), (float)output + _epsilon);
Double almostLoss = lossFunc.Loss(almostOutput, (float)label);
Assert.Equal((almostLoss - loss) / (almostOutput - output), derivative, 1);
Assert.Equal((almostLoss - loss) / (almostOutput - output), derivative, 0.1);
}
}

Expand Down
2 changes: 1 addition & 1 deletion test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ public void TestSparsifyNormalize(int startRange, bool normalize, float[] expect
var actualValues = a.GetValues().ToArray();
Assert.Equal(expectedValues.Length, actualValues.Length);
for (int i = 0; i < expectedValues.Length; i++)
Assert.Equal(expectedValues[i], actualValues[i], precision: 6);
Assert.Equal(expectedValues[i], actualValues[i], 0.000001);
}

/// <summary>
Expand Down
18 changes: 9 additions & 9 deletions test/Microsoft.ML.CpuMath.UnitTests/UnitTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ public void SumTest(string mode, string test, Dictionary<string, string> environ
}
var actual = CpuMathUtils.Sum(src);
Assert.Equal((double)expected, (double)actual, 2);
Assert.Equal((double)expected, (double)actual, 0.01);
return RemoteExecutor.SuccessExitCode;
}, mode, test, options);
}
Expand All @@ -668,7 +668,7 @@ public void SumSqUTest(string mode, string test, Dictionary<string, string> envi
}
var actual = CpuMathUtils.SumSq(src);
Assert.Equal((double)expected, (double)actual, 2);
Assert.Equal((double)expected, (double)actual, 0.01);
return RemoteExecutor.SuccessExitCode;
}, mode, test, options);
}
Expand All @@ -693,7 +693,7 @@ public void SumSqDiffUTest(string mode, string test, string scale, Dictionary<st
expected += (src[i] - defaultScale) * (src[i] - defaultScale);
}
Assert.Equal((double)expected, (double)actual, 1);
Assert.Equal((double)expected, (double)actual, 0.1);
return RemoteExecutor.SuccessExitCode;
}, mode, test, scale, options);
}
Expand All @@ -716,7 +716,7 @@ public void SumAbsUTest(string mode, string test, Dictionary<string, string> env
}
var actual = CpuMathUtils.SumAbs(src);
Assert.Equal((double)expected, (double)actual, 2);
Assert.Equal((double)expected, (double)actual, 0.01);
return RemoteExecutor.SuccessExitCode;
}, mode, test, options);
}
Expand All @@ -741,7 +741,7 @@ public void SumAbsDiffUTest(string mode, string test, string scale, Dictionary<s
expected += Math.Abs(src[i] - defaultScale);
}
Assert.Equal((double)expected, (double)actual, 2);
Assert.Equal((double)expected, (double)actual, 0.01);
return RemoteExecutor.SuccessExitCode;
}, mode, test, scale, options);
}
Expand Down Expand Up @@ -769,7 +769,7 @@ public void MaxAbsUTest(string mode, string test, Dictionary<string, string> env
}
}
Assert.Equal((double)expected, (double)actual, 2);
Assert.Equal((double)expected, (double)actual, 0.01);
return RemoteExecutor.SuccessExitCode;
}, mode, test, options);
}
Expand Down Expand Up @@ -797,7 +797,7 @@ public void MaxAbsDiffUTest(string mode, string test, string scale, Dictionary<s
expected = abs;
}
}
Assert.Equal((double)expected, (double)actual, 2);
Assert.Equal((double)expected, (double)actual, 0.01);
return RemoteExecutor.SuccessExitCode;
}, mode, test, scale, options);
}
Expand Down Expand Up @@ -827,7 +827,7 @@ public void DotUTest(string mode, string test, Dictionary<string, string> enviro
}
var actual = CpuMathUtils.DotProductDense(src, dst, dst.Length);
Assert.Equal((double)expected, (double)actual, 1);
Assert.Equal((double)expected, (double)actual, 0.1);
return RemoteExecutor.SuccessExitCode;
}, mode, test, options);
}
Expand Down Expand Up @@ -861,7 +861,7 @@ public void DotSUTest(string mode, string test, Dictionary<string, string> envir
}
var actual = CpuMathUtils.DotProductSparse(src, dst, idx, limit);
Assert.Equal((double)expected, (double)actual, 2);
Assert.Equal((double)expected, (double)actual, 0.01);
return RemoteExecutor.SuccessExitCode;
}, mode, test, options);
}
Expand Down
22 changes: 11 additions & 11 deletions test/Microsoft.ML.Fairlearn.Tests/MetricTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,17 @@ public void RegressionMetricTest()
{
RegressionGroupMetric regressionMetric = mlContext.Fairlearn().Metric.Regression(eval: data, labelColumn: "Price", scoreColumn: "Score", sensitiveFeatureColumn: "Gender");
var metricByGroup = regressionMetric.ByGroup();
Assert.Equal(-2.30578, Convert.ToSingle(metricByGroup["RSquared"][0]), 3);
Assert.Equal(-2039.81453, Convert.ToSingle(metricByGroup["RSquared"][1]), 3);
Assert.Equal(1.00000, Convert.ToSingle(metricByGroup["RMS"][0]), 3);
Assert.Equal(15.811388, Convert.ToSingle(metricByGroup["RMS"][1]), 3);
Assert.Equal(-2.30578, Convert.ToSingle(metricByGroup["RSquared"][0]), 0.001);
Assert.Equal(-2039.81453, Convert.ToSingle(metricByGroup["RSquared"][1]), 0.001);
Assert.Equal(1.00000, Convert.ToSingle(metricByGroup["RMS"][0]), 0.001);
Assert.Equal(15.811388, Convert.ToSingle(metricByGroup["RMS"][1]), 0.001);
metricByGroup.Description();
Dictionary<string, double> metricOverall = regressionMetric.Overall();
Assert.Equal(125.5, metricOverall["MSE"], 1);
Assert.Equal(11.202678, metricOverall["RMS"], 4);
Assert.Equal(125.5, metricOverall["MSE"], 0.1);
Assert.Equal(11.202678, metricOverall["RMS"], 0.0001);
Dictionary<string, double> diff = regressionMetric.DifferenceBetweenGroups();
Assert.Equal(14.81138, diff["RMS"], 4);
Assert.Equal(2037.5, diff["RSquared"], 1);
Assert.Equal(14.81138, diff["RMS"], 0.0001);
Assert.Equal(2037.5, diff["RSquared"], 0.1);

}

Expand All @@ -70,10 +70,10 @@ public void BinaryClassificationMetricTest()

BinaryGroupMetric metrics = mlContext.Fairlearn().Metric.BinaryClassification(eval: df, labelColumn: "label", predictedColumn: "PredictedLabel", sensitiveFeatureColumn: "group_id");
var metricByGroup = metrics.ByGroup();
Assert.Equal(0.8, Convert.ToSingle(metricByGroup["Accuracy"][0]), 1);
Assert.Equal(0.6, Convert.ToSingle(metricByGroup["Accuracy"][1]), 1);
Assert.Equal(0.8, Convert.ToSingle(metricByGroup["Accuracy"][0]), 0.1);
Assert.Equal(0.6, Convert.ToSingle(metricByGroup["Accuracy"][1]), 0.1);
var metricOverall = metrics.Overall();
Assert.Equal(0.7, Convert.ToSingle(metricOverall["Accuracy"]), 1);
Assert.Equal(0.7, Convert.ToSingle(metricOverall["Accuracy"]), 0.1);
}
}
}
8 changes: 4 additions & 4 deletions test/Microsoft.ML.Fairlearn.Tests/UtilityTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,10 @@ public void DemographyParityTest()
PrimitiveDataFrameColumn<float> ypred = new PrimitiveDataFrameColumn<float>("pred", fl);
var gSinged = dp.Gamma(ypred);

Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][0]), 1);
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][1]), 1);
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][2]), 1);
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][3]), 1);
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][0]), 0.1);
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][1]), 0.1);
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][2]), 0.1);
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][3]), 0.1);
}
}
}
4 changes: 2 additions & 2 deletions test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,8 @@ public void InspectFastTreeModelParameters()
var expectedThresholds = new float[] { 0.0911167f, 0.06509889f, 0.019873254f, 0.0361835f };
for (int i = 0; i < finalTree.NumberOfNodes; ++i)
{
Assert.Equal(expectedSplitGains[i], finalTree.SplitGains[i], 6);
Assert.Equal((double)expectedThresholds[i], (double)finalTree.NumericalSplitThresholds[i], 6);
Assert.Equal(expectedSplitGains[i], finalTree.SplitGains[i], 0.000001);
Assert.Equal((double)expectedThresholds[i], (double)finalTree.NumericalSplitThresholds[i], 0.000001);
}
}

Expand Down
4 changes: 2 additions & 2 deletions test/Microsoft.ML.IntegrationTests/ONNX.cs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ public void SaveOnnxModelLoadAndScoreFastTree()
var originalPrediction = originalPredictionEngine.Predict(row);
var onnxPrediction = onnxPredictionEngine.Predict(row);
// Check that the predictions are identical.
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], precision: 4);
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], 0.0001);
}
}

Expand Down Expand Up @@ -170,7 +170,7 @@ public void SaveOnnxModelLoadAndScoreSDCA()
var originalPrediction = originalPredictionEngine.Predict(row);
var onnxPrediction = onnxPredictionEngine.Predict(row);
// Check that the predictions are identical.
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], precision: 4);
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], 0.0001);
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion test/Microsoft.ML.IntegrationTests/Training.cs
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,7 @@ public void MetacomponentsFunctionWithKeyHandling()
// Evaluate the model.
var binaryClassificationMetrics = mlContext.MulticlassClassification.Evaluate(binaryClassificationPredictions);

Assert.Equal(0.4367, binaryClassificationMetrics.LogLoss, 4);
Assert.Equal(0.4367, binaryClassificationMetrics.LogLoss, 0.0001);
}
}
}
6 changes: 3 additions & 3 deletions test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -476,9 +476,9 @@ public void TestOnnxNoneDimValue()
var transformedValues = onnxTransformer.Transform(idv);
var predictions = mlContext.Data.CreateEnumerable<PredictionNoneDimension>(transformedValues, reuseRowObject: false).ToArray();

Assert.Equal(-0.080, Math.Round(predictions[0].variable[0], 3));
Assert.Equal(1.204, Math.Round(predictions[1].variable[0], 3));
Assert.Equal(2.27, Math.Round(predictions[2].variable[0], 3));
Assert.Equal(-0.080, predictions[0].variable[0], 0.001);
Assert.Equal(1.204, predictions[1].variable[0], 0.001);
Assert.Equal(2.27, predictions[2].variable[0], 0.001);
}

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public void TestGamDirectInstantiation()
Assert.Equal(binUpperBounds.Length, gam.NumberOfShapeFunctions);

// Check the intercept
Assert.Equal(intercept, gam.Bias, 6);
Assert.Equal(intercept, gam.Bias, 0.000001);

// Check that the binUpperBounds were made correctly
CheckArrayOfArrayEquality(binUpperBounds, gam.GetBinUpperBounds());
Expand Down
6 changes: 3 additions & 3 deletions test/Microsoft.ML.Predictor.Tests/TestPredictors.cs
Original file line number Diff line number Diff line change
Expand Up @@ -737,8 +737,8 @@ private void CombineAndTestTreeEnsembles(IDataView idv, PredictorModel[] fastTre
probGetters[i](ref probs[i]);
predGetters[i](ref preds[i]);
}
Assert.Equal(score, 0.4 * scores.Sum() / predCount, 5);
Assert.Equal(prob, 1 / (1 + Math.Exp(-score)), 6);
Assert.Equal(score, 0.4 * scores.Sum() / predCount, 0.00001);
Assert.Equal(prob, 1 / (1 + Math.Exp(-score)), 0.000001);
Assert.True(pred == score > 0);
}
}
Expand Down Expand Up @@ -953,7 +953,7 @@ public void TestMulticlassEnsembleCombiner()
for (int j = 0; j < predCount; j++)
sum += vectorScores[j].GetItemOrDefault(i);
if (float.IsNaN(sum))
Assert.Equal((double)vectorScore.GetItemOrDefault(i), (double)sum / predCount, 3);
Assert.Equal((double)vectorScore.GetItemOrDefault(i), (double)sum / predCount, 0.001);
}
Assert.Equal(probs.Count(p => p >= prob), probs.Count(p => p <= prob));
}
Expand Down
4 changes: 2 additions & 2 deletions test/Microsoft.ML.Tests/AnomalyDetectionTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ public void RandomizedPcaTrainerBaselineTest()
// Evaluate
var metrics = ML.AnomalyDetection.Evaluate(transformedData, falsePositiveCount: 5);

Assert.Equal(0.98667, metrics.AreaUnderRocCurve, 5);
Assert.Equal(0.90000, metrics.DetectionRateAtFalsePositiveCount, 5);
Assert.Equal(0.98667, metrics.AreaUnderRocCurve, 0.00001);
Assert.Equal(0.90000, metrics.DetectionRateAtFalsePositiveCount, 0.00001);
}

/// <summary>
Expand Down
2 changes: 1 addition & 1 deletion test/Microsoft.ML.Tests/EvaluateTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public void MulticlassEvaluatorTopKArray()
var metrics2 = mlContext.MulticlassClassification.Evaluate(inputDV2, topKPredictionCount: 4);
var output2 = metrics2.TopKAccuracyForAllK.ToArray();
for (int i = 0; i < expectedTopKArray2.Length; i++)
Assert.Equal(expectedTopKArray2[i], output2[i], precision: 7);
Assert.Equal(expectedTopKArray2[i], output2[i], 0.0000001);
}
}
}
2 changes: 1 addition & 1 deletion test/Microsoft.ML.Tests/LearningRateSchedulerTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ internal void TestPolynomialDecay(float[] expectedValues, bool cycle)
trainState.CurrentBatchIndex = i % trainState.BatchesPerEpoch;
trainState.CurrentEpoch = i / trainState.BatchesPerEpoch;
float decayedLR = learningRateScheduler.GetLearningRate(trainState);
Assert.Equal((double)expectedValues[i], (double)decayedLR, 4);
Assert.Equal((double)expectedValues[i], (double)decayedLR, 0.0001);
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion test/Microsoft.ML.Tests/Scenarios/ClusteringTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ public void PredictClusters()
Assert.Equal(double.NaN, metrics.NormalizedMutualInformation);
//Calculate dbi is false by default so Dbi would be 0
Assert.Equal(0d, metrics.DaviesBouldinIndex);
Assert.Equal(0d, metrics.AverageDistance, 5);
Assert.Equal(0d, metrics.AverageDistance, 0.00001);
}
}
}

0 comments on commit d3c3127

Please sign in to comment.