Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix length of IterableDatasetShard and add test #13792

Merged
merged 2 commits into from Sep 29, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/transformers/trainer_pt_utils.py
Expand Up @@ -775,9 +775,9 @@ def __iter__(self):
def __len__(self):
# Will raise an error if the underlying dataset is not sized.
if self.drop_last:
return len(self.dataset) // self.num_processes
return (len(self.dataset) // (self.batch_size * self.num_processes)) * self.batch_size
else:
return math.ceil(len(self.dataset) / self.num_processes)
return math.ceil(len(self.dataset) / (self.batch_size * self.num_processes)) * self.batch_size


# In order to keep `trainer.py` compact and easy to understand, place any secondary PT Trainer
Expand Down
28 changes: 28 additions & 0 deletions tests/test_trainer_utils.py
Expand Up @@ -355,6 +355,34 @@ def test_iterable_dataset_shard(self):
self.check_iterable_dataset_shard(dataset, 4, drop_last=True, num_processes=3, epoch=42)
self.check_iterable_dataset_shard(dataset, 4, drop_last=False, num_processes=3, epoch=42)

def test_iterable_dataset_shard_with_length(self):
sampler_shards = [
IterableDatasetShard(list(range(100)), batch_size=4, drop_last=True, num_processes=2, process_index=i)
for i in range(2)
]

# Build expected shards: each process will have batches of size 4 until there is not enough elements to
# form two full batches (so we stop at 96 = (100 // (4 * 2)) * 4)
expected_shards = [[], []]
current_shard = 0
for i in range(0, 96, 4):
expected_shards[current_shard].extend(list(range(i, i + 4)))
current_shard = 1 - current_shard

self.assertListEqual([list(shard) for shard in sampler_shards], expected_shards)
self.assertListEqual([len(shard) for shard in sampler_shards], [len(shard) for shard in expected_shards])

sampler_shards = [
IterableDatasetShard(list(range(100)), batch_size=4, drop_last=False, num_processes=2, process_index=i)
for i in range(2)
]
# When drop_last=False, we get two last full batches by looping back to the beginning.
expected_shards[0].extend(list(range(96, 100)))
expected_shards[1].extend(list(range(0, 4)))

self.assertListEqual([list(shard) for shard in sampler_shards], expected_shards)
self.assertListEqual([len(shard) for shard in sampler_shards], [len(shard) for shard in expected_shards])

def check_shard_sampler(self, dataset, batch_size, drop_last, num_processes=2):
shards = [
ShardSampler(
Expand Down
1 change: 1 addition & 0 deletions utils/tests_fetcher.py
Expand Up @@ -281,6 +281,7 @@ def create_reverse_dependency_map():
"test_trainer_distributed.py",
"test_trainer_tpu.py",
],
"train_pt_utils.py": "test_trainer_utils.py",
"utils/versions.py": "test_versions_utils.py",
}

Expand Down