diff --git a/demo/guide-python/cross_validation.py b/demo/guide-python/cross_validation.py index 3eb9c45bec93..f81a138a1d1c 100644 --- a/demo/guide-python/cross_validation.py +++ b/demo/guide-python/cross_validation.py @@ -14,15 +14,15 @@ # std_value is standard deviation of the metric xgb.cv(param, dtrain, num_round, nfold=5, metrics={'error'}, seed=0, - callbacks=[xgb.callback.print_evaluation(show_stdv=True)]) + callbacks=[xgb.callback.EvaluationMonitor(show_stdv=True)]) print('running cross validation, disable standard deviation display') # do cross validation, this will print result out as # [iteration] metric_name:mean_value res = xgb.cv(param, dtrain, num_boost_round=10, nfold=5, metrics={'error'}, seed=0, - callbacks=[xgb.callback.print_evaluation(show_stdv=False), - xgb.callback.early_stop(3)]) + callbacks=[xgb.callback.EvaluationMonitor(show_stdv=False), + xgb.callback.EarlyStopping(3)]) print(res) print('running cross validation, with preprocessing function') # define the preprocessing function diff --git a/doc/python/python_api.rst b/doc/python/python_api.rst index 365cbb39d65e..0a419b17e684 100644 --- a/doc/python/python_api.rst +++ b/doc/python/python_api.rst @@ -69,13 +69,15 @@ Plotting API Callback API ------------ -.. autofunction:: xgboost.callback.print_evaluation +.. autofunction:: xgboost.callback.TrainingCallback -.. autofunction:: xgboost.callback.record_evaluation +.. autofunction:: xgboost.callback.EvaluationMonitor -.. autofunction:: xgboost.callback.reset_learning_rate +.. autofunction:: xgboost.callback.EarlyStopping -.. autofunction:: xgboost.callback.early_stop +.. autofunction:: xgboost.callback.LearningRateScheduler + +.. autofunction:: xgboost.callback.TrainingCheckPoint .. _dask_api: @@ -91,6 +93,8 @@ Dask API .. autofunction:: xgboost.dask.predict +.. autofunction:: xgboost.dask.inplace_predict + .. autofunction:: xgboost.dask.DaskXGBClassifier .. autofunction:: xgboost.dask.DaskXGBRegressor diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py index b199ff2d67f5..f42b7acf7df6 100644 --- a/python-package/xgboost/sklearn.py +++ b/python-package/xgboost/sklearn.py @@ -510,7 +510,8 @@ def fit(self, X, y, sample_weight=None, base_margin=None, .. code-block:: python - [xgb.callback.reset_learning_rate(custom_rates)] + callbacks = [xgb.callback.EarlyStopping(rounds=early_stopping_rounds, + save_best=True)] """ self.n_features_in_ = X.shape[1] @@ -1249,7 +1250,8 @@ def fit(self, X, y, group, sample_weight=None, base_margin=None, .. code-block:: python - [xgb.callback.reset_learning_rate(custom_rates)] + callbacks = [xgb.callback.EarlyStopping(rounds=early_stopping_rounds, + save_best=True)] """ # check if group information is provided diff --git a/tests/python/test_callback.py b/tests/python/test_callback.py index 114f1203c88c..0d3a3d9e59b1 100644 --- a/tests/python/test_callback.py +++ b/tests/python/test_callback.py @@ -123,9 +123,10 @@ def test_early_stopping_custom_eval_skl(self): X, y = load_breast_cancer(return_X_y=True) cls = xgb.XGBClassifier() early_stopping_rounds = 5 + early_stop = xgb.callback.EarlyStopping(rounds=early_stopping_rounds) cls.fit(X, y, eval_set=[(X, y)], - early_stopping_rounds=early_stopping_rounds, - eval_metric=tm.eval_error_metric) + eval_metric=tm.eval_error_metric, + callbacks=[early_stop]) booster = cls.get_booster() dump = booster.get_dump(dump_format='json') assert len(dump) - booster.best_iteration == early_stopping_rounds + 1