Skip to content

Commit

Permalink
Fix mypy errors. (#8444)
Browse files Browse the repository at this point in the history
  • Loading branch information
trivialfis committed Nov 9, 2022
1 parent 0252d50 commit 9dd8d70
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 25 deletions.
4 changes: 1 addition & 3 deletions python-package/xgboost/callback.py
Expand Up @@ -135,7 +135,7 @@ class CallbackContainer:
def __init__(
self,
callbacks: Sequence[TrainingCallback],
metric: Callable = None,
metric: Optional[Callable] = None,
output_margin: bool = True,
is_cv: bool = False
) -> None:
Expand Down Expand Up @@ -391,8 +391,6 @@ def minimize(new: _Score, best: _Score) -> bool:
else:
improve_op = minimize

assert improve_op

if not self.stopping_history: # First round
self.current_rounds = 0
self.stopping_history[name] = {}
Expand Down
10 changes: 5 additions & 5 deletions python-package/xgboost/dask.py
Expand Up @@ -288,10 +288,10 @@ def __init__(
*,
weight: Optional[_DaskCollection] = None,
base_margin: Optional[_DaskCollection] = None,
missing: float = None,
missing: Optional[float] = None,
silent: bool = False, # pylint: disable=unused-argument
feature_names: Optional[FeatureNames] = None,
feature_types: FeatureTypes = None,
feature_types: Optional[FeatureTypes] = None,
group: Optional[_DaskCollection] = None,
qid: Optional[_DaskCollection] = None,
label_lower_bound: Optional[_DaskCollection] = None,
Expand All @@ -304,7 +304,7 @@ def __init__(

self.feature_names = feature_names
self.feature_types = feature_types
self.missing = missing
self.missing = missing if missing is not None else numpy.nan
self.enable_categorical = enable_categorical

if qid is not None and weight is not None:
Expand Down Expand Up @@ -651,7 +651,7 @@ def __init__(
*,
weight: Optional[_DaskCollection] = None,
base_margin: Optional[_DaskCollection] = None,
missing: float = None,
missing: Optional[float] = None,
silent: bool = False, # disable=unused-argument
feature_names: Optional[FeatureNames] = None,
feature_types: Optional[Union[Any, List[Any]]] = None,
Expand Down Expand Up @@ -2129,7 +2129,7 @@ def fit(
eval_group: Optional[Sequence[_DaskCollection]] = None,
eval_qid: Optional[Sequence[_DaskCollection]] = None,
eval_metric: Optional[Union[str, Sequence[str], Callable]] = None,
early_stopping_rounds: int = None,
early_stopping_rounds: Optional[int] = None,
verbose: Union[int, bool] = False,
xgb_model: Optional[Union[XGBModel, Booster]] = None,
sample_weight_eval_set: Optional[Sequence[_DaskCollection]] = None,
Expand Down
2 changes: 1 addition & 1 deletion python-package/xgboost/rabit.py
Expand Up @@ -152,7 +152,7 @@ def version_number() -> int:
class RabitContext:
"""A context controlling rabit initialization and finalization."""

def __init__(self, args: List[bytes] = None) -> None:
def __init__(self, args: Optional[List[bytes]] = None) -> None:
if args is None:
args = []
self.args = args
Expand Down
4 changes: 2 additions & 2 deletions python-package/xgboost/sklearn.py
Expand Up @@ -233,7 +233,7 @@ def inner(y_score: np.ndarray, dmatrix: DMatrix) -> Tuple[str, float]:
should be used to specify categorical data type. Also, JSON/UBJSON
serialization format is required.
feature_types : FeatureTypes
feature_types : Optional[FeatureTypes]
.. versionadded:: 1.7.0
Expand Down Expand Up @@ -572,7 +572,7 @@ def __init__(
validate_parameters: Optional[bool] = None,
predictor: Optional[str] = None,
enable_categorical: bool = False,
feature_types: FeatureTypes = None,
feature_types: Optional[FeatureTypes] = None,
max_cat_to_onehot: Optional[int] = None,
max_cat_threshold: Optional[int] = None,
eval_metric: Optional[Union[str, List[str], Callable]] = None,
Expand Down
39 changes: 25 additions & 14 deletions python-package/xgboost/training.py
@@ -1,4 +1,3 @@
# coding: utf-8
# pylint: disable=too-many-locals, too-many-arguments, invalid-name
# pylint: disable=too-many-branches, too-many-statements
"""Training Library containing training routines."""
Expand Down Expand Up @@ -71,7 +70,7 @@ def train(
feval: Optional[Metric] = None,
maximize: Optional[bool] = None,
early_stopping_rounds: Optional[int] = None,
evals_result: TrainingCallback.EvalsLog = None,
evals_result: Optional[TrainingCallback.EvalsLog] = None,
verbose_eval: Optional[Union[bool, int]] = True,
xgb_model: Optional[Union[str, os.PathLike, Booster, bytearray]] = None,
callbacks: Optional[Sequence[TrainingCallback]] = None,
Expand Down Expand Up @@ -285,15 +284,20 @@ def groups_to_rows(groups: List[np.ndarray], boundaries: np.ndarray) -> np.ndarr
return np.concatenate([np.arange(boundaries[g], boundaries[g+1]) for g in groups])


def mkgroupfold(dall: DMatrix, nfold: int, param: BoosterParam,
evals: Sequence[str] = (), fpreproc: FPreProcCallable = None,
shuffle: bool = True) -> List[CVPack]:
def mkgroupfold(
dall: DMatrix,
nfold: int,
param: BoosterParam,
evals: Sequence[str] = (),
fpreproc: Optional[FPreProcCallable] = None,
shuffle: bool = True,
) -> List[CVPack]:
"""
Make n folds for cross-validation maintaining groups
:return: cross-validation folds
"""
# we have groups for pairwise ranking... get a list of the group indexes
group_boundaries = dall.get_uint_info('group_ptr')
group_boundaries = dall.get_uint_info("group_ptr")
group_sizes = np.diff(group_boundaries)

if shuffle is True:
Expand Down Expand Up @@ -327,10 +331,17 @@ def mkgroupfold(dall: DMatrix, nfold: int, param: BoosterParam,
return ret


def mknfold(dall: DMatrix, nfold: int, param: BoosterParam, seed: int,
evals: Sequence[str] = (), fpreproc: FPreProcCallable = None,
stratified: bool = False, folds: XGBStratifiedKFold = None, shuffle: bool = True
) -> List[CVPack]:
def mknfold(
dall: DMatrix,
nfold: int,
param: BoosterParam,
seed: int,
evals: Sequence[str] = (),
fpreproc: Optional[FPreProcCallable] = None,
stratified: Optional[bool] = False,
folds: Optional[XGBStratifiedKFold] = None,
shuffle: bool = True,
) -> List[CVPack]:
"""
Make an n-fold list of CVPack from random indices.
"""
Expand Down Expand Up @@ -393,14 +404,14 @@ def cv(
metrics: Sequence[str] = (),
obj: Optional[Objective] = None,
feval: Optional[Metric] = None,
maximize: bool = None,
early_stopping_rounds: int = None,
fpreproc: FPreProcCallable = None,
maximize: Optional[bool] = None,
early_stopping_rounds: Optional[int] = None,
fpreproc: Optional[FPreProcCallable] = None,
as_pandas: bool = True,
verbose_eval: Optional[Union[int, bool]] = None,
show_stdv: bool = True,
seed: int = 0,
callbacks: Sequence[TrainingCallback] = None,
callbacks: Optional[Sequence[TrainingCallback]] = None,
shuffle: bool = True,
custom_metric: Optional[Metric] = None,
) -> Union[Dict[str, float], DataFrame]:
Expand Down

0 comments on commit 9dd8d70

Please sign in to comment.