Skip to content

Commit

Permalink
Add test for eta and mitigate float error. (#7446)
Browse files Browse the repository at this point in the history
* Add eta test.
* Don't skip test.
  • Loading branch information
trivialfis committed Nov 18, 2021
1 parent 7cfb310 commit 9fb4338
Show file tree
Hide file tree
Showing 2 changed files with 65 additions and 2 deletions.
9 changes: 7 additions & 2 deletions tests/cpp/test_serialization.cc
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ void TestLearnerSerialization(Args args, FeatureMap const& fmap, std::shared_ptr

Json m_0 = Json::Load(StringView{continued_model.c_str(), continued_model.size()});
Json m_1 = Json::Load(StringView{model_at_2kiter.c_str(), model_at_2kiter.size()});

CompareJSON(m_0, m_1);
}

Expand Down Expand Up @@ -610,7 +611,6 @@ TEST_F(MultiClassesSerializationTest, CPUCoordDescent) {

#if defined(XGBOOST_USE_CUDA)
TEST_F(MultiClassesSerializationTest, GpuHist) {
GTEST_SKIP() << "This test is broken for CUDA 11.0 + Windows combination, skipping";
TestLearnerSerialization({{"booster", "gbtree"},
{"num_class", std::to_string(kClasses)},
{"seed", "0"},
Expand All @@ -620,6 +620,9 @@ TEST_F(MultiClassesSerializationTest, GpuHist) {
// different result (1e-7) with CPU predictor for some
// entries.
{"predictor", "gpu_predictor"},
// Mitigate the difference caused by hardware fused multiply
// add to tree weight during update prediction cache.
{"learning_rate", "1.0"},
{"tree_method", "gpu_hist"}},
fmap_, p_dmat_);

Expand All @@ -630,14 +633,16 @@ TEST_F(MultiClassesSerializationTest, GpuHist) {
{"max_depth", std::to_string(kClasses)},
// GPU_Hist has higher floating point error. 1e-6 doesn't work
// after num_parallel_tree goes to 4
{"num_parallel_tree", "3"},
{"num_parallel_tree", "4"},
{"learning_rate", "1.0"},
{"tree_method", "gpu_hist"}},
fmap_, p_dmat_);

TestLearnerSerialization({{"booster", "dart"},
{"num_class", std::to_string(kClasses)},
{"seed", "0"},
{"nthread", "1"},
{"learning_rate", "1.0"},
{"max_depth", std::to_string(kClasses)},
{"tree_method", "gpu_hist"}},
fmap_, p_dmat_);
Expand Down
58 changes: 58 additions & 0 deletions tests/cpp/tree/test_tree_stat.cc
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,62 @@ TEST_F(UpdaterTreeStatTest, Exact) {
TEST_F(UpdaterTreeStatTest, Approx) {
this->RunTest("grow_histmaker");
}

class UpdaterEtaTest : public ::testing::Test {
protected:
std::shared_ptr<DMatrix> p_dmat_;
HostDeviceVector<GradientPair> gpairs_;
size_t constexpr static kRows = 10;
size_t constexpr static kCols = 10;
size_t constexpr static kClasses = 10;

void SetUp() override {
p_dmat_ = RandomDataGenerator(kRows, kCols, .5f).GenerateDMatrix(true, false, kClasses);
auto g = GenerateRandomGradients(kRows);
gpairs_.Resize(kRows);
gpairs_.Copy(g);
}

void RunTest(std::string updater) {
auto tparam = CreateEmptyGenericParam(0);
float eta = 0.4;
auto up_0 = std::unique_ptr<TreeUpdater>{
TreeUpdater::Create(updater, &tparam, ObjInfo{ObjInfo::kClassification})};
up_0->Configure(Args{{"eta", std::to_string(eta)}});

auto up_1 = std::unique_ptr<TreeUpdater>{
TreeUpdater::Create(updater, &tparam, ObjInfo{ObjInfo::kClassification})};
up_1->Configure(Args{{"eta", "1.0"}});

for (size_t iter = 0; iter < 4; ++iter) {
RegTree tree_0;
{
tree_0.param.num_feature = kCols;
up_0->Update(&gpairs_, p_dmat_.get(), {&tree_0});
}

RegTree tree_1;
{
tree_1.param.num_feature = kCols;
up_1->Update(&gpairs_, p_dmat_.get(), {&tree_1});
}
tree_0.WalkTree([&](bst_node_t nidx) {
if (tree_0[nidx].IsLeaf()) {
EXPECT_NEAR(tree_1[nidx].LeafValue() * eta, tree_0[nidx].LeafValue(), kRtEps);
}
return true;
});
}
}
};

TEST_F(UpdaterEtaTest, Hist) { this->RunTest("grow_quantile_histmaker"); }

TEST_F(UpdaterEtaTest, Exact) { this->RunTest("grow_colmaker"); }

TEST_F(UpdaterEtaTest, Approx) { this->RunTest("grow_histmaker"); }

#if defined(XGBOOST_USE_CUDA)
TEST_F(UpdaterEtaTest, GpuHist) { this->RunTest("grow_gpu_hist"); }
#endif // defined(XGBOOST_USE_CUDA)
} // namespace xgboost

0 comments on commit 9fb4338

Please sign in to comment.