diff --git a/python-package/xgboost/training.py b/python-package/xgboost/training.py index 6b333e246d4a..ba53f0759b45 100644 --- a/python-package/xgboost/training.py +++ b/python-package/xgboost/training.py @@ -3,6 +3,8 @@ # pylint: disable=too-many-branches, too-many-statements """Training Library containing training routines.""" import warnings +import copy + import numpy as np from .core import Booster, XGBoostError from .compat import (SKLEARN_INSTALLED, XGBStratifiedKFold) @@ -57,7 +59,7 @@ def _train_internal(params, dtrain, evals_result=None, maximize=None, verbose_eval=None, early_stopping_rounds=None): """internal training function""" - callbacks = [] if callbacks is None else callbacks + callbacks = [] if callbacks is None else copy.copy(callbacks) evals = list(evals) params = _configure_metrics(params.copy()) diff --git a/tests/python/test_callback.py b/tests/python/test_callback.py index 2b482bb99c5e..712019880377 100644 --- a/tests/python/test_callback.py +++ b/tests/python/test_callback.py @@ -232,3 +232,16 @@ def test_check_point(self): for i in range(1, 10): assert os.path.exists( os.path.join(tmpdir, 'model_' + str(i) + '.pkl')) + + def test_callback_list(self): + X, y = tm.get_boston() + m = xgb.DMatrix(X, y) + callbacks = [xgb.callback.EarlyStopping(rounds=10)] + for i in range(4): + xgb.train({'objective': 'reg:squarederror', + 'eval_metric': 'rmse'}, m, + evals=[(m, 'Train')], + num_boost_round=1, + verbose_eval=True, + callbacks=callbacks) + assert len(callbacks) == 1