Skip to content

Commit

Permalink
Enable loading model from <1.0.0 trained with objective='binary:logit…
Browse files Browse the repository at this point in the history
…raw' (#6517)

* Enable loading model from <1.0.0 trained with objective='binary:logitraw'

* Add binary:logitraw in model compatibility testing suite

* Feedback from @trivialfis: Override ProbToMargin() for LogisticRaw

Co-authored-by: Jiaming Yuan <jm.yuan@outlook.com>
  • Loading branch information
hcho3 and trivialfis committed Dec 17, 2020
1 parent bf6cfe3 commit ad1a527
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 22 deletions.
16 changes: 10 additions & 6 deletions R-package/tests/helper_scripts/generate_models.R
Expand Up @@ -2,7 +2,6 @@
# of saved model files from XGBoost version 0.90 and 1.0.x.
library(xgboost)
library(Matrix)
source('./generate_models_params.R')

set.seed(0)
metadata <- list(
Expand Down Expand Up @@ -53,11 +52,16 @@ generate_logistic_model <- function () {
y <- sample(0:1, size = metadata$kRows, replace = TRUE)
stopifnot(max(y) == 1, min(y) == 0)

data <- xgb.DMatrix(X, label = y, weight = w)
params <- list(tree_method = 'hist', num_parallel_tree = metadata$kForests,
max_depth = metadata$kMaxDepth, objective = 'binary:logistic')
booster <- xgb.train(params, data, nrounds = metadata$kRounds)
save_booster(booster, 'logit')
objective <- c('binary:logistic', 'binary:logitraw')
name <- c('logit', 'logitraw')

for (i in seq_len(length(objective))) {
data <- xgb.DMatrix(X, label = y, weight = w)
params <- list(tree_method = 'hist', num_parallel_tree = metadata$kForests,
max_depth = metadata$kMaxDepth, objective = objective[i])
booster <- xgb.train(params, data, nrounds = metadata$kRounds)
save_booster(booster, name[i])
}
}

generate_classification_model <- function () {
Expand Down
4 changes: 4 additions & 0 deletions R-package/tests/testthat/test_model_compatibility.R
Expand Up @@ -39,6 +39,10 @@ run_booster_check <- function (booster, name) {
testthat::expect_equal(config$learner$learner_train_param$objective, 'multi:softmax')
testthat::expect_equal(as.numeric(config$learner$learner_model_param$num_class),
metadata$kClasses)
} else if (name == 'logitraw') {
testthat::expect_equal(get_num_tree(booster), metadata$kForests * metadata$kRounds)
testthat::expect_equal(as.numeric(config$learner$learner_model_param$num_class), 0)
testthat::expect_equal(config$learner$learner_train_param$objective, 'binary:logitraw')
} else if (name == 'logit') {
testthat::expect_equal(get_num_tree(booster), metadata$kForests * metadata$kRounds)
testthat::expect_equal(as.numeric(config$learner$learner_model_param$num_class), 0)
Expand Down
3 changes: 3 additions & 0 deletions src/objective/regression_loss.h
Expand Up @@ -162,6 +162,9 @@ struct LogisticRaw : public LogisticRegression {
predt = common::Sigmoid(predt);
return std::max(predt * (T(1.0f) - predt), eps);
}
static bst_float ProbToMargin(bst_float base_score) {
return base_score;
}
static const char* DefaultEvalMetric() { return "auc"; }

static const char* Name() { return "binary:logitraw"; }
Expand Down
34 changes: 18 additions & 16 deletions tests/python/generate_models.py
Expand Up @@ -64,22 +64,24 @@ def generate_logistic_model():
y = np.random.randint(0, 2, size=kRows)
assert y.max() == 1 and y.min() == 0

data = xgboost.DMatrix(X, label=y, weight=w)
booster = xgboost.train({'tree_method': 'hist',
'num_parallel_tree': kForests,
'max_depth': kMaxDepth,
'objective': 'binary:logistic'},
num_boost_round=kRounds, dtrain=data)
booster.save_model(booster_bin('logit'))
booster.save_model(booster_json('logit'))

reg = xgboost.XGBClassifier(tree_method='hist',
num_parallel_tree=kForests,
max_depth=kMaxDepth,
n_estimators=kRounds)
reg.fit(X, y, w)
reg.save_model(skl_bin('logit'))
reg.save_model(skl_json('logit'))
for objective, name in [('binary:logistic', 'logit'), ('binary:logitraw', 'logitraw')]:
data = xgboost.DMatrix(X, label=y, weight=w)
booster = xgboost.train({'tree_method': 'hist',
'num_parallel_tree': kForests,
'max_depth': kMaxDepth,
'objective': objective},
num_boost_round=kRounds, dtrain=data)
booster.save_model(booster_bin(name))
booster.save_model(booster_json(name))

reg = xgboost.XGBClassifier(tree_method='hist',
num_parallel_tree=kForests,
max_depth=kMaxDepth,
n_estimators=kRounds,
objective=objective)
reg.fit(X, y, w)
reg.save_model(skl_bin(name))
reg.save_model(skl_json(name))


def generate_classification_model():
Expand Down
11 changes: 11 additions & 0 deletions tests/python/test_model_compatibility.py
Expand Up @@ -24,6 +24,10 @@ def run_booster_check(booster, name):
config['learner']['learner_model_param']['base_score']) == 0.5
assert config['learner']['learner_train_param'][
'objective'] == 'multi:softmax'
elif name.find('logitraw') != -1:
assert len(booster.get_dump()) == gm.kForests * gm.kRounds
assert config['learner']['learner_model_param']['num_class'] == str(0)
assert config['learner']['learner_train_param']['objective'] == 'binary:logitraw'
elif name.find('logit') != -1:
assert len(booster.get_dump()) == gm.kForests * gm.kRounds
assert config['learner']['learner_model_param']['num_class'] == str(0)
Expand Down Expand Up @@ -77,6 +81,13 @@ def run_scikit_model_check(name, path):
assert config['learner']['learner_train_param'][
'objective'] == 'rank:ndcg'
run_model_param_check(config)
elif name.find('logitraw') != -1:
logit = xgboost.XGBClassifier()
logit.load_model(path)
assert (len(logit.get_booster().get_dump()) ==
gm.kRounds * gm.kForests)
config = json.loads(logit.get_booster().save_config())
assert config['learner']['learner_train_param']['objective'] == 'binary:logitraw'
elif name.find('logit') != -1:
logit = xgboost.XGBClassifier()
logit.load_model(path)
Expand Down

0 comments on commit ad1a527

Please sign in to comment.