From 97d758245763fb0a3d196b229cbea9b6fe0ae176 Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Fri, 12 Nov 2021 16:46:03 +0800 Subject: [PATCH] Delay breaking changes to 1.6. (#7420) The patch is too big to be backported. --- doc/tutorials/custom_metric_obj.rst | 3 ++- python-package/xgboost/sklearn.py | 16 ++++++++-------- python-package/xgboost/training.py | 8 ++++---- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/doc/tutorials/custom_metric_obj.rst b/doc/tutorials/custom_metric_obj.rst index 5dbab173b244..b84229599364 100644 --- a/doc/tutorials/custom_metric_obj.rst +++ b/doc/tutorials/custom_metric_obj.rst @@ -21,6 +21,7 @@ concepts should be readily applicable to other language bindings. .. note:: * The ranking task does not support customized functions. + * Breaking change was made in XGBoost 1.6. In the following two sections, we will provide a step by step walk through of implementing ``Squared Log Error(SLE)`` objective function: @@ -270,7 +271,7 @@ Scikit-Learn Interface The scikit-learn interface of XGBoost has some utilities to improve the integration with -standard scikit-learn functions. For instance, after XGBoost 1.5.1 users can use the cost +standard scikit-learn functions. For instance, after XGBoost 1.6.0 users can use the cost function (not scoring functions) from scikit-learn out of the box: .. code-block:: python diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py index d66bf077da65..26d5d34c9388 100644 --- a/python-package/xgboost/sklearn.py +++ b/python-package/xgboost/sklearn.py @@ -199,7 +199,7 @@ def inner(y_score: np.ndarray, dmatrix: DMatrix) -> Tuple[str, float]: eval_metric : Optional[Union[str, List[str], Callable]] - .. versionadded:: 1.5.1 + .. versionadded:: 1.6.0 Metric used for monitoring the training result and early stopping. It can be a string or list of strings as names of predefined metric in XGBoost (See @@ -239,7 +239,7 @@ def inner(y_score: np.ndarray, dmatrix: DMatrix) -> Tuple[str, float]: early_stopping_rounds : Optional[int] - .. versionadded:: 1.5.1 + .. versionadded:: 1.6.0 Activates early stopping. Validation metric needs to improve at least once in every **early_stopping_rounds** round(s) to continue training. Requires at least @@ -855,11 +855,11 @@ def fit( Validation metrics will help us track the performance of the model. eval_metric : str, list of str, or callable, optional - .. deprecated:: 1.5.1 + .. deprecated:: 1.6.0 Use `eval_metric` in :py:meth:`__init__` or :py:meth:`set_params` instead. early_stopping_rounds : int - .. deprecated:: 1.5.1 + .. deprecated:: 1.6.0 Use `early_stopping_rounds` in :py:meth:`__init__` or :py:meth:`set_params` instead. verbose : @@ -881,7 +881,7 @@ def fit( `exact` tree methods. callbacks : - .. deprecated: 1.5.1 + .. deprecated: 1.6.0 Use `callbacks` in :py:meth:`__init__` or :py:methd:`set_params` instead. """ evals_result: TrainingCallback.EvalsLog = {} @@ -1693,11 +1693,11 @@ def fit( pair in **eval_set**. eval_metric : str, list of str, optional - .. deprecated:: 1.5.1 + .. deprecated:: 1.6.0 use `eval_metric` in :py:meth:`__init__` or :py:meth:`set_params` instead. early_stopping_rounds : int - .. deprecated:: 1.5.1 + .. deprecated:: 1.6.0 use `early_stopping_rounds` in :py:meth:`__init__` or :py:meth:`set_params` instead. @@ -1727,7 +1727,7 @@ def fit( `exact` tree methods. callbacks : - .. deprecated: 1.5.1 + .. deprecated: 1.6.0 Use `callbacks` in :py:meth:`__init__` or :py:methd:`set_params` instead. """ # check if group information is provided diff --git a/python-package/xgboost/training.py b/python-package/xgboost/training.py index c5cbc96322ed..1b75df372907 100644 --- a/python-package/xgboost/training.py +++ b/python-package/xgboost/training.py @@ -80,7 +80,7 @@ def train( `_ for details. feval : - .. deprecated:: 1.5.1 + .. deprecated:: 1.6.0 Use `custom_metric` instead. maximize : bool Whether to maximize feval. @@ -132,7 +132,7 @@ def train( custom_metric: - .. versionadded 1.5.1 + .. versionadded 1.6.0 Custom metric function. See `Custom Metric `_ for @@ -392,7 +392,7 @@ def cv(params, dtrain, num_boost_round=10, nfold=3, stratified=False, folds=None details. feval : function - .. deprecated:: 1.5.1 + .. deprecated:: 1.6.0 Use `custom_metric` instead. maximize : bool Whether to maximize feval. @@ -432,7 +432,7 @@ def cv(params, dtrain, num_boost_round=10, nfold=3, stratified=False, folds=None Shuffle data before creating folds. custom_metric : - .. versionadded 1.5.1 + .. versionadded 1.6.0 Custom metric function. See `Custom Metric `_ for