Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Convert generator in Sampler back to lazy construction #63646

Closed
wants to merge 8 commits into from
13 changes: 13 additions & 0 deletions test/test_dataloader.py
Expand Up @@ -1495,6 +1495,19 @@ def test_sampler_reproducibility(self):
):
self.assertEqual(list(fn()), list(fn()))

for sampler in (
RandomSampler(self.dataset, num_samples=5, replacement=True),
RandomSampler(self.dataset, replacement=False),
WeightedRandomSampler(weights, num_samples=5, replacement=True),
WeightedRandomSampler(weights, num_samples=5, replacement=False),
SubsetRandomSampler(range(10)),
):
torch.manual_seed(0)
l1 = list(sampler) + list(sampler)

torch.manual_seed(0)
l2 = list(sampler) + list(sampler)
self.assertEqual(l1, l2)

def _test_sampler(self, **kwargs):
indices = range(2, 12) # using a regular iterable
Expand Down
15 changes: 10 additions & 5 deletions torch/utils/data/sampler.py
Expand Up @@ -89,6 +89,7 @@ def __init__(self, data_source: Sized, replacement: bool = False,
self.replacement = replacement
self._num_samples = num_samples
self.generator = generator
self._gen: Optional[torch.Generator] = None

if not isinstance(self.replacement, bool):
raise TypeError("replacement should be a boolean value, but got "
Expand All @@ -112,15 +113,19 @@ def num_samples(self) -> int:
def __iter__(self) -> Iterator[int]:
n = len(self.data_source)
if self.generator is None:
self.generator = torch.Generator()
self.generator.manual_seed(int(torch.empty((), dtype=torch.int64).random_().item()))
if self._gen is None:
ejguan marked this conversation as resolved.
Show resolved Hide resolved
self._gen = torch.Generator()
self._gen.manual_seed(int(torch.empty((), dtype=torch.int64).random_().item()))
else:
self._gen = self.generator

if self.replacement:
for _ in range(self.num_samples // 32):
yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self.generator).tolist()
yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self.generator).tolist()
yield from torch.randint(high=n, size=(32,), dtype=torch.int64, generator=self._gen).tolist()
yield from torch.randint(high=n, size=(self.num_samples % 32,), dtype=torch.int64, generator=self._gen).tolist()
else:
yield from torch.randperm(n, generator=self.generator).tolist()
yield from torch.randperm(n, generator=self._gen).tolist()
self._gen = None
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will badly impact all currently running iterators, consider creating two individual iterators in test and yielding them in random order.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Emmm, you are right. So, we have to create iter-local generator. But, then it goes back to the problem that we can not serialize the state of this iter-local generator anymore. Then, when we resume from snapshotting, we can not really fast forward the state of generator, have to iterate over it to the certain amount of iterations.


def __len__(self) -> int:
return self.num_samples
Expand Down