Skip to content

Commit

Permalink
Remove unused parameters. (#7499)
Browse files Browse the repository at this point in the history
  • Loading branch information
trivialfis committed Dec 9, 2021
1 parent 1864fab commit 01152f8
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 48 deletions.
33 changes: 0 additions & 33 deletions src/tree/param.h
Expand Up @@ -47,8 +47,6 @@ struct TrainParam : public XGBoostParameter<TrainParam> {
float reg_lambda;
// L1 regularization factor
float reg_alpha;
// default direction choice
int default_direction;
// maximum delta update we can add in weight estimation
// this parameter can be used to stabilize update
// default=0 means no constraint on weight delta
Expand Down Expand Up @@ -77,22 +75,10 @@ struct TrainParam : public XGBoostParameter<TrainParam> {
// Stored as a JSON string.
std::string interaction_constraints;

// the criteria to use for ranking splits
std::string split_evaluator;

// ------ From CPU quantile histogram -------.
// percentage threshold for treating a feature as sparse
// e.g. 0.2 indicates a feature with fewer than 20% nonzeros is considered sparse
double sparse_threshold;
// when grouping features, how many "conflicts" to allow.
// conflict is when an instance has nonzero values for two or more features
// default is 0, meaning features should be strictly complementary
double max_conflict_rate;
// when grouping features, how much effort to expend to prevent singleton groups
// we'll try to insert each feature into existing groups before creating a new group
// for that feature; to save time, only up to (max_search_group) of existing groups
// will be considered. If set to zero, ALL existing groups will be examined
unsigned max_search_group;

// declare the parameters
DMLC_DECLARE_PARAMETER(TrainParam) {
Expand Down Expand Up @@ -139,12 +125,6 @@ struct TrainParam : public XGBoostParameter<TrainParam> {
.set_lower_bound(0.0f)
.set_default(0.0f)
.describe("L1 regularization on leaf weight");
DMLC_DECLARE_FIELD(default_direction)
.set_default(0)
.add_enum("learn", 0)
.add_enum("left", 1)
.add_enum("right", 2)
.describe("Default direction choice when encountering a missing value");
DMLC_DECLARE_FIELD(max_delta_step)
.set_lower_bound(0.0f)
.set_default(0.0f)
Expand Down Expand Up @@ -198,23 +178,10 @@ struct TrainParam : public XGBoostParameter<TrainParam> {
"e.g. [[0, 1], [2, 3, 4]], where each inner list is a group of"
"indices of features that are allowed to interact with each other."
"See tutorial for more information");
DMLC_DECLARE_FIELD(split_evaluator)
.set_default("elastic_net,monotonic")
.describe("The criteria to use for ranking splits");

// ------ From cpu quantile histogram -------.
DMLC_DECLARE_FIELD(sparse_threshold).set_range(0, 1.0).set_default(0.2)
.describe("percentage threshold for treating a feature as sparse");
DMLC_DECLARE_FIELD(max_conflict_rate).set_range(0, 1.0).set_default(0)
.describe("when grouping features, how many \"conflicts\" to allow."
"conflict is when an instance has nonzero values for two or more features."
"default is 0, meaning features should be strictly complementary.");
DMLC_DECLARE_FIELD(max_search_group).set_lower_bound(0).set_default(100)
.describe("when grouping features, how much effort to expend to prevent "
"singleton groups. We'll try to insert each feature into existing "
"groups before creating a new group for that feature; to save time, "
"only up to (max_search_group) of existing groups will be "
"considered. If set to zero, ALL existing groups will be examined.");

// add alias of parameters
DMLC_DECLARE_ALIAS(reg_lambda, lambda);
Expand Down
33 changes: 18 additions & 15 deletions src/tree/updater_colmaker.cc
Expand Up @@ -27,24 +27,29 @@ DMLC_REGISTRY_FILE_TAG(updater_colmaker);
struct ColMakerTrainParam : XGBoostParameter<ColMakerTrainParam> {
// speed optimization for dense column
float opt_dense_col;
// default direction choice
int default_direction;

DMLC_DECLARE_PARAMETER(ColMakerTrainParam) {
DMLC_DECLARE_FIELD(opt_dense_col)
.set_range(0.0f, 1.0f)
.set_default(1.0f)
.describe("EXP Param: speed optimization for dense column.");
DMLC_DECLARE_FIELD(default_direction)
.set_default(0)
.add_enum("learn", 0)
.add_enum("left", 1)
.add_enum("right", 2)
.describe("Default direction choice when encountering a missing value");
}

/*! \brief whether need forward small to big search: default right */
inline bool NeedForwardSearch(int default_direction, float col_density,
bool indicator) const {
inline bool NeedForwardSearch(float col_density, bool indicator) const {
return default_direction == 2 ||
(default_direction == 0 && (col_density < opt_dense_col) &&
!indicator);
(default_direction == 0 && (col_density < opt_dense_col) && !indicator);
}
/*! \brief whether need backward big to small search: default left */
inline bool NeedBackwardSearch(int default_direction) const {
return default_direction != 2;
}
inline bool NeedBackwardSearch() const { return default_direction != 2; }
};

DMLC_REGISTER_PARAMETER(ColMakerTrainParam);
Expand Down Expand Up @@ -465,15 +470,13 @@ class ColMaker: public TreeUpdater {
auto c = page[fid];
const bool ind =
c.size() != 0 && c[0].fvalue == c[c.size() - 1].fvalue;
if (colmaker_train_param_.NeedForwardSearch(
param_.default_direction, column_densities_[fid], ind)) {
this->EnumerateSplit(c.data(), c.data() + c.size(), +1, fid,
gpair, stemp_[tid], evaluator);
if (colmaker_train_param_.NeedForwardSearch(column_densities_[fid], ind)) {
this->EnumerateSplit(c.data(), c.data() + c.size(), +1, fid, gpair, stemp_[tid],
evaluator);
}
if (colmaker_train_param_.NeedBackwardSearch(
param_.default_direction)) {
this->EnumerateSplit(c.data() + c.size() - 1, c.data() - 1, -1,
fid, gpair, stemp_[tid], evaluator);
if (colmaker_train_param_.NeedBackwardSearch()) {
this->EnumerateSplit(c.data() + c.size() - 1, c.data() - 1, -1, fid, gpair,
stemp_[tid], evaluator);
}
});
}
Expand Down

0 comments on commit 01152f8

Please sign in to comment.