Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
rohitgr7 committed Jan 14, 2022
1 parent 5fe24a0 commit f7200d9
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 12 deletions.
1 change: 0 additions & 1 deletion CHANGELOG.md
Expand Up @@ -75,7 +75,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Added a warning when using `DistributedSampler` during evaluation ([#11479](https://github.com/PyTorchLightning/pytorch-lightning/pull/11479))



### Changed

- Raised exception in `init_dist_connection()` when torch distibuted is not available ([#10418](https://github.com/PyTorchLightning/pytorch-lightning/issues/10418))
Expand Down
4 changes: 2 additions & 2 deletions docs/source/common/test_set.rst
Expand Up @@ -146,10 +146,10 @@ Apart from this ``.validate`` has same API as ``.test``, but would rely respecti

.. automethod:: pytorch_lightning.trainer.Trainer.validate
:noindex:

.. warning::

It is recommended to test on single device since Distributed Training such as DDP internally
It is recommended to validate on single device since Distributed Training such as DDP internally
uses :class:`~torch.utils.data.distributed.DistributedSampler` which replicates some samples to
make sure all devices have same batch size in case of uneven inputs. This is helpful to make sure
benchmarking for research papers is done the right way.
6 changes: 4 additions & 2 deletions pytorch_lightning/trainer/connectors/data_connector.py
Expand Up @@ -396,10 +396,12 @@ def _resolve_sampler(self, dataloader: DataLoader, shuffle: bool, mode: Optional
trainer_fn = self.trainer.state.fn
if isinstance(sampler, DistributedSampler) and trainer_fn in (TrainerFn.VALIDATING, TrainerFn.TESTING):
rank_zero_warn(
'Using `DistributedSampler` with the dataloaders. It is recommended to'
' use single device strategy with evaluation.'
"Using `DistributedSampler` with the dataloaders. It is recommended to"
" use single device strategy with evaluation."
)

return sampler

return dataloader.sampler

@staticmethod
Expand Down
12 changes: 5 additions & 7 deletions tests/trainer/connectors/test_data_connector.py
Expand Up @@ -14,12 +14,11 @@
from unittest.mock import Mock

import pytest
from pytorch_lightning.accelerators import accelerator
from pytorch_lightning.trainer.states import TrainerFn
from torch.utils.data import DataLoader

from pytorch_lightning import Trainer, strategies
from pytorch_lightning import Trainer
from pytorch_lightning.trainer.connectors.data_connector import _DataLoaderSource
from pytorch_lightning.trainer.states import TrainerFn
from tests.helpers import BoringDataModule, BoringModel


Expand Down Expand Up @@ -74,16 +73,15 @@ def test_eval_distributed_sampler_warning(tmpdir):
"""Test that a warning is raised with `DistributedSampler` is used with evaluation."""

model = BoringModel()
trainer = Trainer(strategy='ddp', devices=2, accelerator='cpu')
trainer = Trainer(strategy="ddp", devices=2, accelerator="cpu")
trainer._data_connector.attach_data(model)

# validation
with pytest.warns(UserWarning, match = 'It is recommended to use single device strategy'):
with pytest.warns(UserWarning, match="It is recommended to use single device strategy"):
trainer.state.fn = TrainerFn.VALIDATING
trainer.reset_val_dataloader(model)

# testing
with pytest.warns(UserWarning, match = 'It is recommended to use single device strategy'):
with pytest.warns(UserWarning, match="It is recommended to use single device strategy"):
trainer.state.fn = TrainerFn.TESTING
trainer.reset_test_dataloader(model)

0 comments on commit f7200d9

Please sign in to comment.