From 2c6bfea7fcd0d865667cacb0a23c03253f8af60f Mon Sep 17 00:00:00 2001 From: rajathbharadwaj Date: Tue, 23 Nov 2021 17:55:33 +0530 Subject: [PATCH 01/32] added UserWarnings if max_epochs not set in the Trainer class --- pytorch_lightning/trainer/trainer.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 1ccdb9ecaeca8..62449acf8af0c 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -444,6 +444,10 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) + # UserWarning for max_epochs if not set + + warnings.warn("max_epochs not set, defaulted to 1000 epochs until stop.") if self.max_epochs is None else print(f"max epochs set to {self.max_epochs}") + fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), max_epochs=( From e6956fb4afc66d6b00a1472d63bdd72f6fa89ac0 Mon Sep 17 00:00:00 2001 From: rajathbharadwaj Date: Tue, 23 Nov 2021 18:06:46 +0530 Subject: [PATCH 02/32] updated trainer docs with max_epochs userwarning --- docs/source/common/trainer.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/common/trainer.rst b/docs/source/common/trainer.rst index 7e336c758f621..1f3c40a3fab70 100644 --- a/docs/source/common/trainer.rst +++ b/docs/source/common/trainer.rst @@ -900,7 +900,7 @@ Stop training once this number of epochs is reached # default used by the Trainer trainer = Trainer(max_epochs=1000) -If both ``max_epochs`` and ``max_steps`` aren't specified, ``max_epochs`` will default to ``1000``. +If both ``max_epochs`` and ``max_steps`` aren't specified, ``max_epochs`` will default to ``1000`` and a ``UserWarning`` will be displayed stating the ``max_epochs`` is not set and defaulted to ``1000``. To enable infinite training, set ``max_epochs = -1``. min_epochs From 78301495f34d318788957ec1b7c07259765ae8e9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 23 Nov 2021 12:42:22 +0000 Subject: [PATCH 03/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/trainer/trainer.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 62449acf8af0c..b1b8e616bcc46 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -446,7 +446,9 @@ def __init__( # UserWarning for max_epochs if not set - warnings.warn("max_epochs not set, defaulted to 1000 epochs until stop.") if self.max_epochs is None else print(f"max epochs set to {self.max_epochs}") + warnings.warn("max_epochs not set, defaulted to 1000 epochs until stop.") if self.max_epochs is None else print( + f"max epochs set to {self.max_epochs}" + ) fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), From dd2dc499511492893f785305006f2a5b3feaa26a Mon Sep 17 00:00:00 2001 From: rajathbharadwaj Date: Wed, 24 Nov 2021 14:27:36 +0530 Subject: [PATCH 04/32] implemented suggested change --- pytorch_lightning/trainer/trainer.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 62449acf8af0c..a9a5ff981e41e 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -446,7 +446,9 @@ def __init__( # UserWarning for max_epochs if not set - warnings.warn("max_epochs not set, defaulted to 1000 epochs until stop.") if self.max_epochs is None else print(f"max epochs set to {self.max_epochs}") + if self.max_epochs is None and max_steps == -1 and max_time is None: + warnings.warn("`max_epochs` is not set. By default it will run for 1000 epochs.") + fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), From 860d758f5e96a75b3d7fea26645347489c43a43f Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Wed, 24 Nov 2021 14:38:16 +0530 Subject: [PATCH 05/32] Update trainer.py --- pytorch_lightning/trainer/trainer.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index a9a5ff981e41e..62449acf8af0c 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -446,9 +446,7 @@ def __init__( # UserWarning for max_epochs if not set - if self.max_epochs is None and max_steps == -1 and max_time is None: - warnings.warn("`max_epochs` is not set. By default it will run for 1000 epochs.") - + warnings.warn("max_epochs not set, defaulted to 1000 epochs until stop.") if self.max_epochs is None else print(f"max epochs set to {self.max_epochs}") fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), From b61213d9e41d9eb77b60ce74cc0f0e3a3543e876 Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Wed, 24 Nov 2021 14:40:39 +0530 Subject: [PATCH 06/32] Update trainer.py implemented suggested change --- pytorch_lightning/trainer/trainer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index b1b8e616bcc46..1ced6996f1a7d 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -446,9 +446,9 @@ def __init__( # UserWarning for max_epochs if not set - warnings.warn("max_epochs not set, defaulted to 1000 epochs until stop.") if self.max_epochs is None else print( - f"max epochs set to {self.max_epochs}" - ) + if self.max_epochs is None and max_steps == -1 and max_time is None: + warnings.warn("`max_epochs` is not set. By default it will run for 1000 epochs.") + fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), From d4d3de3c4932b8e2640fce76498cd964e67a843c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 24 Nov 2021 09:11:55 +0000 Subject: [PATCH 07/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/trainer/trainer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 1ced6996f1a7d..0ee6ac41859ff 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -448,7 +448,6 @@ def __init__( if self.max_epochs is None and max_steps == -1 and max_time is None: warnings.warn("`max_epochs` is not set. By default it will run for 1000 epochs.") - fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), From 09a14e3fb1be934ee11d10333371925e711dec97 Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Wed, 24 Nov 2021 14:45:13 +0530 Subject: [PATCH 08/32] Update trainer.py Updated docs --- docs/source/common/trainer.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/source/common/trainer.rst b/docs/source/common/trainer.rst index 1f3c40a3fab70..f67dfe7f55bd3 100644 --- a/docs/source/common/trainer.rst +++ b/docs/source/common/trainer.rst @@ -900,8 +900,7 @@ Stop training once this number of epochs is reached # default used by the Trainer trainer = Trainer(max_epochs=1000) -If both ``max_epochs`` and ``max_steps`` aren't specified, ``max_epochs`` will default to ``1000`` and a ``UserWarning`` will be displayed stating the ``max_epochs`` is not set and defaulted to ``1000``. -To enable infinite training, set ``max_epochs = -1``. +If both ``max_epochs`` and ``max_steps`` aren't specified, ``max_epochs`` will default to ``1000``. min_epochs ^^^^^^^^^^ From 31b4d3f6830070092d477519f40e10c002a189fc Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Thu, 25 Nov 2021 15:33:15 +0530 Subject: [PATCH 09/32] changed to rank_zero_warn --- pytorch_lightning/trainer/trainer.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 0ee6ac41859ff..f3b48cffba163 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -444,11 +444,13 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) - # UserWarning for max_epochs if not set - + #rank_zero_warning of type UserWarning for max_epochs if not set if self.max_epochs is None and max_steps == -1 and max_time is None: - warnings.warn("`max_epochs` is not set. By default it will run for 1000 epochs.") - + rank_zero_warn( + "`max_epochs` is not set. By default it will run for 1000 epochs.", + UserWarning, + ) + fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), max_epochs=( From 7add35da47c3a330bd0d488124d08caa8b96582a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 25 Nov 2021 10:04:35 +0000 Subject: [PATCH 10/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/trainer/trainer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index f3b48cffba163..a39e2da31bd6c 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -444,13 +444,13 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) - #rank_zero_warning of type UserWarning for max_epochs if not set + # rank_zero_warning of type UserWarning for max_epochs if not set if self.max_epochs is None and max_steps == -1 and max_time is None: rank_zero_warn( "`max_epochs` is not set. By default it will run for 1000 epochs.", UserWarning, ) - + fit_loop = FitLoop( min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), max_epochs=( From 36d069c916a3e1bc69bfd35c10bd39ce60b810e5 Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Fri, 26 Nov 2021 14:12:46 +0530 Subject: [PATCH 11/32] refactored with utility cleaned trainer with utility to parse max_epochs and added the utility function in loops/utility --- pytorch_lightning/loops/utilities.py | 15 ++++++++++++++- pytorch_lightning/trainer/trainer.py | 20 ++++++-------------- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 017945fc37749..176b36369d642 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -15,7 +15,7 @@ from contextlib import contextmanager from functools import lru_cache from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Tuple - +from pytorch_lightning.utilities import rank_zero_warn import numpy as np import torch from torch.optim import Optimizer @@ -61,6 +61,18 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens +# rank_zero_warning of type UserWarning for max_epochs if not set +def _parse_max_epochs_and_steps(max_steps, max_epochs, max_time, min_steps, min_epochs): + if max_epochs is None and max_steps == -1 and max_time is None: + rank_zero_warn( + "`max_epochs` is not set. By default it will run for 1000 epochs.", + UserWarning, + ) + min_epochs = (1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs) + max_epochs = max_epochs if max_epochs is not None else (1000 if (max_steps == -1 and max_time is None) else -1) + return min_epochs, max_epochs + + def _build_training_step_kwargs( lightning_module: "pl.LightningModule", @@ -170,6 +182,7 @@ def _get_active_optimizers( return [(opt_idx, optimizers[opt_idx])] + def _is_max_limit_reached(current: int, maximum: int = -1) -> bool: """Check if the limit has been reached (if enabled). diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index a39e2da31bd6c..ff65a00c0cb79 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -38,6 +38,7 @@ from pytorch_lightning.loops import PredictionLoop, TrainingBatchLoop, TrainingEpochLoop from pytorch_lightning.loops.dataloader.evaluation_loop import EvaluationLoop from pytorch_lightning.loops.fit_loop import FitLoop +from pytorch_lightning.loops.utilities import _parse_max_epochs_and_steps from pytorch_lightning.plugins import DDPSpawnPlugin, ParallelPlugin, PLUGIN_INPUT, PrecisionPlugin, TrainingTypePlugin from pytorch_lightning.plugins.environments.slurm_environment import SLURMEnvironment from pytorch_lightning.profiler import ( @@ -444,19 +445,10 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) - # rank_zero_warning of type UserWarning for max_epochs if not set - if self.max_epochs is None and max_steps == -1 and max_time is None: - rank_zero_warn( - "`max_epochs` is not set. By default it will run for 1000 epochs.", - UserWarning, - ) - - fit_loop = FitLoop( - min_epochs=(1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs), - max_epochs=( - max_epochs if max_epochs is not None else (1000 if (max_steps == -1 and max_time is None) else -1) - ), - ) + # raises rank_zero_warn if max_epochs is not set + min_epochs, max_epochs = _parse_max_epochs_and_steps(self.max_steps, self.max_epochs, max_time, self.min_steps, self.min_epochs) + + fit_loop = FitLoop(min_epochs=min_epochs, max_epochs=max_epochs) training_epoch_loop = TrainingEpochLoop(min_steps, max_steps) training_batch_loop = TrainingBatchLoop() training_validation_loop = EvaluationLoop() @@ -485,7 +477,7 @@ def __init__( # todo: remove in v1.7 self._weights_summary: Optional[str] = None - + # init callbacks # Declare attributes to be set in _callback_connector on_trainer_init self._callback_connector.on_trainer_init( From e64bc63c589404f12203c194fbbfd59c2f580d7b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 26 Nov 2021 08:44:20 +0000 Subject: [PATCH 12/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/loops/utilities.py | 8 ++++---- pytorch_lightning/trainer/trainer.py | 8 +++++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 176b36369d642..c1a0bbbc9a1ca 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -15,13 +15,14 @@ from contextlib import contextmanager from functools import lru_cache from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Tuple -from pytorch_lightning.utilities import rank_zero_warn + import numpy as np import torch from torch.optim import Optimizer import pytorch_lightning as pl from pytorch_lightning.plugins import ParallelPlugin +from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.fetching import AbstractDataFetcher, DataLoaderIterDataFetcher from pytorch_lightning.utilities.memory import recursive_detach @@ -61,6 +62,7 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens + # rank_zero_warning of type UserWarning for max_epochs if not set def _parse_max_epochs_and_steps(max_steps, max_epochs, max_time, min_steps, min_epochs): if max_epochs is None and max_steps == -1 and max_time is None: @@ -68,12 +70,11 @@ def _parse_max_epochs_and_steps(max_steps, max_epochs, max_time, min_steps, min_ "`max_epochs` is not set. By default it will run for 1000 epochs.", UserWarning, ) - min_epochs = (1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs) + min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs max_epochs = max_epochs if max_epochs is not None else (1000 if (max_steps == -1 and max_time is None) else -1) return min_epochs, max_epochs - def _build_training_step_kwargs( lightning_module: "pl.LightningModule", optimizers: Sequence[Optimizer], @@ -182,7 +183,6 @@ def _get_active_optimizers( return [(opt_idx, optimizers[opt_idx])] - def _is_max_limit_reached(current: int, maximum: int = -1) -> bool: """Check if the limit has been reached (if enabled). diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index ff65a00c0cb79..3baf00e924022 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -446,8 +446,10 @@ def __init__( self.tuner = Tuner(self) # raises rank_zero_warn if max_epochs is not set - min_epochs, max_epochs = _parse_max_epochs_and_steps(self.max_steps, self.max_epochs, max_time, self.min_steps, self.min_epochs) - + min_epochs, max_epochs = _parse_max_epochs_and_steps( + self.max_steps, self.max_epochs, max_time, self.min_steps, self.min_epochs + ) + fit_loop = FitLoop(min_epochs=min_epochs, max_epochs=max_epochs) training_epoch_loop = TrainingEpochLoop(min_steps, max_steps) training_batch_loop = TrainingBatchLoop() @@ -477,7 +479,7 @@ def __init__( # todo: remove in v1.7 self._weights_summary: Optional[str] = None - + # init callbacks # Declare attributes to be set in _callback_connector on_trainer_init self._callback_connector.on_trainer_init( From 983395600746ea8bb70548224b3f412f93be5662 Mon Sep 17 00:00:00 2001 From: thomas chaton Date: Fri, 26 Nov 2021 13:25:44 +0000 Subject: [PATCH 13/32] Update pytorch_lightning/trainer/trainer.py --- pytorch_lightning/trainer/trainer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 3baf00e924022..72056d0dfbd43 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -445,7 +445,6 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) - # raises rank_zero_warn if max_epochs is not set min_epochs, max_epochs = _parse_max_epochs_and_steps( self.max_steps, self.max_epochs, max_time, self.min_steps, self.min_epochs ) From 4421b5a9dcc8fa9da4c28568e69777dda5da4290 Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Fri, 26 Nov 2021 19:19:29 +0530 Subject: [PATCH 14/32] added typing added typing to utility function --- pytorch_lightning/loops/utilities.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index c1a0bbbc9a1ca..00a76cbbc73c8 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -14,15 +14,15 @@ from collections import OrderedDict from contextlib import contextmanager from functools import lru_cache -from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Tuple - +from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Tuple, Union +from datetime import timedelta +from pytorch_lightning.utilities import rank_zero_warn import numpy as np import torch from torch.optim import Optimizer import pytorch_lightning as pl from pytorch_lightning.plugins import ParallelPlugin -from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.fetching import AbstractDataFetcher, DataLoaderIterDataFetcher from pytorch_lightning.utilities.memory import recursive_detach @@ -62,19 +62,19 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens - # rank_zero_warning of type UserWarning for max_epochs if not set -def _parse_max_epochs_and_steps(max_steps, max_epochs, max_time, min_steps, min_epochs): +def _parse_max_epochs_and_steps(max_steps: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]], min_steps: int, min_epochs: int) -> Tuple[int, int]: if max_epochs is None and max_steps == -1 and max_time is None: rank_zero_warn( "`max_epochs` is not set. By default it will run for 1000 epochs.", UserWarning, ) - min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs + min_epochs = (1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs) max_epochs = max_epochs if max_epochs is not None else (1000 if (max_steps == -1 and max_time is None) else -1) return min_epochs, max_epochs + def _build_training_step_kwargs( lightning_module: "pl.LightningModule", optimizers: Sequence[Optimizer], @@ -183,6 +183,7 @@ def _get_active_optimizers( return [(opt_idx, optimizers[opt_idx])] + def _is_max_limit_reached(current: int, maximum: int = -1) -> bool: """Check if the limit has been reached (if enabled). From 8b742f5c8f6f2f1b3526fca01796ee70072ae94e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:50:45 +0000 Subject: [PATCH 15/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/loops/utilities.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 00a76cbbc73c8..b0c10d7279767 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -13,16 +13,17 @@ # limitations under the License. from collections import OrderedDict from contextlib import contextmanager +from datetime import timedelta from functools import lru_cache from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Tuple, Union -from datetime import timedelta -from pytorch_lightning.utilities import rank_zero_warn + import numpy as np import torch from torch.optim import Optimizer import pytorch_lightning as pl from pytorch_lightning.plugins import ParallelPlugin +from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.fetching import AbstractDataFetcher, DataLoaderIterDataFetcher from pytorch_lightning.utilities.memory import recursive_detach @@ -62,19 +63,21 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens + # rank_zero_warning of type UserWarning for max_epochs if not set -def _parse_max_epochs_and_steps(max_steps: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]], min_steps: int, min_epochs: int) -> Tuple[int, int]: +def _parse_max_epochs_and_steps( + max_steps: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]], min_steps: int, min_epochs: int +) -> Tuple[int, int]: if max_epochs is None and max_steps == -1 and max_time is None: rank_zero_warn( "`max_epochs` is not set. By default it will run for 1000 epochs.", UserWarning, ) - min_epochs = (1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs) + min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs max_epochs = max_epochs if max_epochs is not None else (1000 if (max_steps == -1 and max_time is None) else -1) return min_epochs, max_epochs - def _build_training_step_kwargs( lightning_module: "pl.LightningModule", optimizers: Sequence[Optimizer], @@ -183,7 +186,6 @@ def _get_active_optimizers( return [(opt_idx, optimizers[opt_idx])] - def _is_max_limit_reached(current: int, maximum: int = -1) -> bool: """Check if the limit has been reached (if enabled). From af9ae928c75772b95da8c26615a3dac4375442e4 Mon Sep 17 00:00:00 2001 From: rajathbharadwaj Date: Wed, 24 Nov 2021 14:27:36 +0530 Subject: [PATCH 16/32] implemented suggested change --- pytorch_lightning/loops/utilities.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index b0c10d7279767..46abe1d631911 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -63,14 +63,12 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens - -# rank_zero_warning of type UserWarning for max_epochs if not set -def _parse_max_epochs_and_steps( - max_steps: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]], min_steps: int, min_epochs: int -) -> Tuple[int, int]: +def _parse_loop_limits( + min_steps: int, max_steps: int, min_epochs: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]] + ) -> Tuple[int, int]: if max_epochs is None and max_steps == -1 and max_time is None: rank_zero_warn( - "`max_epochs` is not set. By default it will run for 1000 epochs.", + "`max_epochs` was not set. Setting it to 1000 epochs.", UserWarning, ) min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs From 27e8aaee2d3560fd843cebfe351ca7d0fc6fcca8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 13:24:00 +0000 Subject: [PATCH 17/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/loops/utilities.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 46abe1d631911..4da22613e1a89 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -63,9 +63,10 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens + def _parse_loop_limits( min_steps: int, max_steps: int, min_epochs: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]] - ) -> Tuple[int, int]: +) -> Tuple[int, int]: if max_epochs is None and max_steps == -1 and max_time is None: rank_zero_warn( "`max_epochs` was not set. Setting it to 1000 epochs.", From d184226ca670164afa83dd2790da39c544fcf3e0 Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Mon, 29 Nov 2021 18:56:51 +0530 Subject: [PATCH 18/32] Updated with suggested changes Waiting for confirmation on if condition statements. --- pytorch_lightning/loops/utilities.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 4da22613e1a89..e9259082c871b 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -63,7 +63,6 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens - def _parse_loop_limits( min_steps: int, max_steps: int, min_epochs: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]] ) -> Tuple[int, int]: From 2d24f2e5a1da25d4cc2067098f880ec8b7f12aa5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 13:28:06 +0000 Subject: [PATCH 19/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/loops/utilities.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index e9259082c871b..4da22613e1a89 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -63,6 +63,7 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in hiddens = recursive_detach(training_step_output["hiddens"]) return hiddens + def _parse_loop_limits( min_steps: int, max_steps: int, min_epochs: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]] ) -> Tuple[int, int]: From 77c8fac0a7405c8dfda70602403f0acf64a15e4c Mon Sep 17 00:00:00 2001 From: Rajath Bharadwaj Date: Mon, 29 Nov 2021 18:59:53 +0530 Subject: [PATCH 20/32] Update trainer.rst --- docs/source/common/trainer.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/common/trainer.rst b/docs/source/common/trainer.rst index f67dfe7f55bd3..7e336c758f621 100644 --- a/docs/source/common/trainer.rst +++ b/docs/source/common/trainer.rst @@ -901,6 +901,7 @@ Stop training once this number of epochs is reached trainer = Trainer(max_epochs=1000) If both ``max_epochs`` and ``max_steps`` aren't specified, ``max_epochs`` will default to ``1000``. +To enable infinite training, set ``max_epochs = -1``. min_epochs ^^^^^^^^^^ From 68dbe3d81f091ffb7de543921502621ca9dc4cfc Mon Sep 17 00:00:00 2001 From: tchaton Date: Tue, 30 Nov 2021 14:00:33 +0100 Subject: [PATCH 21/32] update --- pytorch_lightning/loops/utilities.py | 23 +++++++++++++++-------- pytorch_lightning/trainer/trainer.py | 6 +++--- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 4da22613e1a89..386553ea95cc1 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -65,15 +65,22 @@ def _extract_hiddens(training_step_output: STEP_OUTPUT, truncated_bptt_steps: in def _parse_loop_limits( - min_steps: int, max_steps: int, min_epochs: int, max_epochs: int, max_time: Union[str, timedelta, Dict[str, int]] -) -> Tuple[int, int]: - if max_epochs is None and max_steps == -1 and max_time is None: - rank_zero_warn( - "`max_epochs` was not set. Setting it to 1000 epochs.", - UserWarning, - ) + min_steps: Optional[int], + max_steps: int, + min_epochs: Optional[int], + max_epochs: int, + max_time: Optional[Union[str, timedelta, Dict[str, int]]], +) -> Tuple[Optional[int], int]: + if max_epochs is None: + if max_steps == -1 and max_time is None: + rank_zero_warn( + "`max_epochs` was not set. Setting it to 1000 epochs.", + UserWarning, + ) + max_epochs = 1000 + else: + max_epochs = -1 min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs - max_epochs = max_epochs if max_epochs is not None else (1000 if (max_steps == -1 and max_time is None) else -1) return min_epochs, max_epochs diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 72056d0dfbd43..a353cb000529f 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -38,7 +38,7 @@ from pytorch_lightning.loops import PredictionLoop, TrainingBatchLoop, TrainingEpochLoop from pytorch_lightning.loops.dataloader.evaluation_loop import EvaluationLoop from pytorch_lightning.loops.fit_loop import FitLoop -from pytorch_lightning.loops.utilities import _parse_max_epochs_and_steps +from pytorch_lightning.loops.utilities import _parse_loop_limits from pytorch_lightning.plugins import DDPSpawnPlugin, ParallelPlugin, PLUGIN_INPUT, PrecisionPlugin, TrainingTypePlugin from pytorch_lightning.plugins.environments.slurm_environment import SLURMEnvironment from pytorch_lightning.profiler import ( @@ -445,8 +445,8 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) - min_epochs, max_epochs = _parse_max_epochs_and_steps( - self.max_steps, self.max_epochs, max_time, self.min_steps, self.min_epochs + min_epochs, max_epochs = _parse_loop_limits( + self.min_steps, self.max_steps, self.min_epochs, self.max_epochs, max_time ) fit_loop = FitLoop(min_epochs=min_epochs, max_epochs=max_epochs) From 3ffa76329545eb999bcd6acc9b5a923298d07734 Mon Sep 17 00:00:00 2001 From: tchaton Date: Tue, 30 Nov 2021 14:02:02 +0100 Subject: [PATCH 22/32] add default_max_epochs --- pytorch_lightning/loops/utilities.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 386553ea95cc1..fa7c0333a4680 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -70,6 +70,7 @@ def _parse_loop_limits( min_epochs: Optional[int], max_epochs: int, max_time: Optional[Union[str, timedelta, Dict[str, int]]], + default_max_epochs: int = 1000, ) -> Tuple[Optional[int], int]: if max_epochs is None: if max_steps == -1 and max_time is None: @@ -77,7 +78,7 @@ def _parse_loop_limits( "`max_epochs` was not set. Setting it to 1000 epochs.", UserWarning, ) - max_epochs = 1000 + max_epochs = default_max_epochs else: max_epochs = -1 min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs From d3f53f64970d1e4c25db1f9063d26f5df6342d29 Mon Sep 17 00:00:00 2001 From: tchaton Date: Tue, 30 Nov 2021 14:05:07 +0100 Subject: [PATCH 23/32] add docstring --- pytorch_lightning/loops/utilities.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index fa7c0333a4680..0c07264a21e0b 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -72,6 +72,20 @@ def _parse_loop_limits( max_time: Optional[Union[str, timedelta, Dict[str, int]]], default_max_epochs: int = 1000, ) -> Tuple[Optional[int], int]: + """This utility compute the number minimum and maximum number of epochs " based on minimum and maximum number + of steps and maximum time. + + Args: + min_steps: Minimum number of steps + max_steps: Maximum number of steps + min_epochs: Minimum number of epochs + max_epochs: Maximum number of epochs + max_time: Maximum number of time for the training. + default_max_epochs: Default value for the number of max_epochs. + + Returns: + A list of tuples (opt_idx, optimizer) of currently active optimizers. + """ if max_epochs is None: if max_steps == -1 and max_time is None: rank_zero_warn( From 8a0ff60325680872238fe06c125d011b71423327 Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Sat, 4 Dec 2021 17:50:24 +0530 Subject: [PATCH 24/32] Update pytorch_lightning/loops/utilities.py --- pytorch_lightning/loops/utilities.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 0c07264a21e0b..801f6fb03b4b2 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -70,18 +70,16 @@ def _parse_loop_limits( min_epochs: Optional[int], max_epochs: int, max_time: Optional[Union[str, timedelta, Dict[str, int]]], - default_max_epochs: int = 1000, ) -> Tuple[Optional[int], int]: - """This utility compute the number minimum and maximum number of epochs " based on minimum and maximum number + """This utility computes the number minimum and maximum number of epochs " based on the minimum and maximum number of steps and maximum time. Args: - min_steps: Minimum number of steps - max_steps: Maximum number of steps - min_epochs: Minimum number of epochs - max_epochs: Maximum number of epochs - max_time: Maximum number of time for the training. - default_max_epochs: Default value for the number of max_epochs. + min_steps: Minimum number of steps. + max_steps: Maximum number of steps. + min_epochs: Minimum number of epochs. + max_epochs: Maximum number of epochs. + max_time: Maximum time for the training. Returns: A list of tuples (opt_idx, optimizer) of currently active optimizers. @@ -92,7 +90,7 @@ def _parse_loop_limits( "`max_epochs` was not set. Setting it to 1000 epochs.", UserWarning, ) - max_epochs = default_max_epochs + max_epochs = 1000 else: max_epochs = -1 min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs From af308bbc7d1d533a496967f98369c3e6795c0384 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 4 Dec 2021 12:22:33 +0000 Subject: [PATCH 25/32] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/loops/utilities.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 1a5e043e15b1a..212919d240d3b 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -70,8 +70,8 @@ def _parse_loop_limits( max_epochs: int, max_time: Optional[Union[str, timedelta, Dict[str, int]]], ) -> Tuple[Optional[int], int]: - """This utility computes the number minimum and maximum number of epochs " based on the minimum and maximum number - of steps and maximum time. + """This utility computes the number minimum and maximum number of epochs " based on the minimum and maximum + number of steps and maximum time. Args: min_steps: Minimum number of steps. From 90a90a3f0ad40e0112b0070a5ee41afbf2cf28ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 6 Dec 2021 08:28:11 +0100 Subject: [PATCH 26/32] fix bug with access to self.max_epochs etc. --- pytorch_lightning/trainer/trainer.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index d3bdf7c6cbc5e..0b60c77ab9cb2 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -456,12 +456,9 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) - min_epochs, max_epochs = _parse_loop_limits( - self.min_steps, self.max_steps, self.min_epochs, self.max_epochs, max_time - ) - + min_epochs, max_epochs = _parse_loop_limits(min_steps, max_steps, min_epochs, max_epochs, max_time) fit_loop = FitLoop(min_epochs=min_epochs, max_epochs=max_epochs) - training_epoch_loop = TrainingEpochLoop(min_steps, max_steps) + training_epoch_loop = TrainingEpochLoop(min_steps=min_steps, max_steps=max_steps) training_batch_loop = TrainingBatchLoop() training_validation_loop = EvaluationLoop() training_epoch_loop.connect(batch_loop=training_batch_loop, val_loop=training_validation_loop) From 5e829697f0a104bcff43240ecfa0b77baf84a6f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 6 Dec 2021 08:28:22 +0100 Subject: [PATCH 27/32] format warning --- pytorch_lightning/loops/utilities.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 212919d240d3b..9458f1086e382 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -29,6 +29,7 @@ from pytorch_lightning.utilities.memory import recursive_detach from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature from pytorch_lightning.utilities.types import STEP_OUTPUT +from pytorch_lightning.utilities.warnings import PossibleUserWarning def check_finite_loss(loss: Optional[torch.Tensor]) -> None: @@ -85,10 +86,7 @@ def _parse_loop_limits( """ if max_epochs is None: if max_steps == -1 and max_time is None: - rank_zero_warn( - "`max_epochs` was not set. Setting it to 1000 epochs.", - UserWarning, - ) + rank_zero_warn("`max_epochs` was not set. Setting it to 1000 epochs.", category=PossibleUserWarning) max_epochs = 1000 else: max_epochs = -1 From a7d472a9f5acc2cd009fe64f3eb2c18e36e76a48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 6 Dec 2021 08:39:06 +0100 Subject: [PATCH 28/32] return all settings and update docstring --- pytorch_lightning/loops/utilities.py | 10 +++++----- pytorch_lightning/trainer/trainer.py | 4 +++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index 9458f1086e382..cc217c8cabffe 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -70,9 +70,9 @@ def _parse_loop_limits( min_epochs: Optional[int], max_epochs: int, max_time: Optional[Union[str, timedelta, Dict[str, int]]], -) -> Tuple[Optional[int], int]: - """This utility computes the number minimum and maximum number of epochs " based on the minimum and maximum - number of steps and maximum time. +) -> Tuple[Optional[int], int, Optional[int], int, Optional[Union[str, timedelta, Dict[str, int]]]]: + """This utility computes the default values for the minimum and maximum number of steps and epochs given the + values the user has selected. Args: min_steps: Minimum number of steps. @@ -82,7 +82,7 @@ def _parse_loop_limits( max_time: Maximum time for the training. Returns: - A list of tuples (opt_idx, optimizer) of currently active optimizers. + The parsed limits, with default values being set for the ones that the user did not specify. """ if max_epochs is None: if max_steps == -1 and max_time is None: @@ -91,7 +91,7 @@ def _parse_loop_limits( else: max_epochs = -1 min_epochs = 1 if (min_epochs is None and min_steps is None and max_time is None) else min_epochs - return min_epochs, max_epochs + return min_steps, max_steps, min_epochs, max_epochs, max_time def _build_training_step_kwargs( diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 0b60c77ab9cb2..40368737e21ad 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -456,7 +456,9 @@ def __init__( self.signal_connector = SignalConnector(self) self.tuner = Tuner(self) - min_epochs, max_epochs = _parse_loop_limits(min_steps, max_steps, min_epochs, max_epochs, max_time) + min_steps, max_steps, min_epochs, max_epochs, max_time = _parse_loop_limits( + min_steps, max_steps, min_epochs, max_epochs, max_time + ) fit_loop = FitLoop(min_epochs=min_epochs, max_epochs=max_epochs) training_epoch_loop = TrainingEpochLoop(min_steps=min_steps, max_steps=max_steps) training_batch_loop = TrainingBatchLoop() From 5b5692bcd8ca455300cf62e09cdf6cbe21b27adf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 6 Dec 2021 08:40:42 +0100 Subject: [PATCH 29/32] update changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f49707b296f1b..dc11391e981c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -39,6 +39,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Added `LightningCLI.configure_optimizers` to override the `configure_optimizers` return value ([#10860](https://github.com/PyTorchLightning/pytorch-lightning/issues/10860)) + +- Added a warning that shows when `max_epochs` in the `Trainer` is not set ([#10700](https://github.com/PyTorchLightning/pytorch-lightning/issues/10700)) + + ### Changed - Raised exception in `init_dist_connection()` when torch distibuted is not available ([#10418](https://github.com/PyTorchLightning/pytorch-lightning/issues/10418)) From 4606cdda2660cbb9648caea1c5498b19641aac6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 6 Dec 2021 08:43:42 +0100 Subject: [PATCH 30/32] improve warning --- pytorch_lightning/loops/utilities.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/loops/utilities.py b/pytorch_lightning/loops/utilities.py index cc217c8cabffe..dfbccfadcc859 100644 --- a/pytorch_lightning/loops/utilities.py +++ b/pytorch_lightning/loops/utilities.py @@ -86,7 +86,11 @@ def _parse_loop_limits( """ if max_epochs is None: if max_steps == -1 and max_time is None: - rank_zero_warn("`max_epochs` was not set. Setting it to 1000 epochs.", category=PossibleUserWarning) + rank_zero_warn( + "`max_epochs` was not set. Setting it to 1000 epochs. To train without an epoch limit," + " set `max_epochs=-1`.", + category=PossibleUserWarning, + ) max_epochs = 1000 else: max_epochs = -1 From e60300cc4dbd8aaae538f93a5651a2249db9b8ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 6 Dec 2021 08:48:31 +0100 Subject: [PATCH 31/32] add test --- tests/trainer/flags/test_min_max_epochs.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/trainer/flags/test_min_max_epochs.py b/tests/trainer/flags/test_min_max_epochs.py index 989dde6e79360..e410e806ea2db 100644 --- a/tests/trainer/flags/test_min_max_epochs.py +++ b/tests/trainer/flags/test_min_max_epochs.py @@ -1,6 +1,7 @@ import pytest from pytorch_lightning import Trainer +from pytorch_lightning.utilities.warnings import PossibleUserWarning from tests.helpers import BoringModel @@ -33,3 +34,10 @@ def test_min_max_steps_epochs(tmpdir, min_epochs, max_epochs, min_steps, max_ste # check training stopped at max_epochs or max_steps if trainer.max_steps and not trainer.max_epochs: assert trainer.global_step == trainer.max_steps + + +def test_max_epochs_not_set_warning(): + """Test that a warning is emitted when `max_epochs` was not set by the user.""" + with pytest.warns(PossibleUserWarning, match="`max_epochs` was not set. Setting it to 1000 epochs."): + trainer = Trainer(max_epochs=None) + assert trainer.max_epochs == 1000 From 6ca175be4a0448bbd3562dc9b3529d7dd9bde90b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 6 Dec 2021 08:59:49 +0100 Subject: [PATCH 32/32] prevent warning in test --- tests/accelerators/test_ipu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/accelerators/test_ipu.py b/tests/accelerators/test_ipu.py index bb9ef53e968b2..87154efbd478a 100644 --- a/tests/accelerators/test_ipu.py +++ b/tests/accelerators/test_ipu.py @@ -120,7 +120,7 @@ def test_warning_if_ipus_not_used(tmpdir): @RunIf(ipu=True) def test_no_warning_plugin(tmpdir): with pytest.warns(None) as record: - Trainer(default_root_dir=tmpdir, strategy=IPUPlugin(training_opts=poptorch.Options())) + Trainer(default_root_dir=tmpdir, max_epochs=1, strategy=IPUPlugin(training_opts=poptorch.Options())) assert len(record) == 0