Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove TPU Availability check from parse devices #12326

Merged
merged 6 commits into from Mar 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 1 addition & 5 deletions pytorch_lightning/utilities/device_parser.py
Expand Up @@ -17,7 +17,6 @@

from pytorch_lightning.plugins.environments import TorchElasticEnvironment
from pytorch_lightning.tuner.auto_gpu_select import pick_multiple_gpus
from pytorch_lightning.utilities import _TPU_AVAILABLE
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.types import _DEVICE

Expand Down Expand Up @@ -122,7 +121,7 @@ def parse_tpu_cores(tpu_cores: Optional[Union[int, str, List[int]]]) -> Optional

Raises:
MisconfigurationException:
If TPU cores aren't 1 or 8 cores, or no TPU devices are found
If TPU cores aren't 1, 8 or [<1-8>]
"""
_check_data_type(tpu_cores)

Expand All @@ -132,9 +131,6 @@ def parse_tpu_cores(tpu_cores: Optional[Union[int, str, List[int]]]) -> Optional
if not _tpu_cores_valid(tpu_cores):
raise MisconfigurationException("`tpu_cores` can only be 1, 8 or [<1-8>]")

if tpu_cores is not None and not _TPU_AVAILABLE:
raise MisconfigurationException("No TPU devices were found.")

return tpu_cores


Expand Down
6 changes: 2 additions & 4 deletions tests/accelerators/test_accelerator_connector.py
Expand Up @@ -446,8 +446,7 @@ def test_ipython_compatible_dp_strategy_gpu(_, monkeypatch):


@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True)
@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.parse_devices", return_value=8)
def test_ipython_compatible_strategy_tpu(mock_devices, mock_tpu_acc_avail, monkeypatch):
def test_ipython_compatible_strategy_tpu(mock_tpu_acc_avail, monkeypatch):
monkeypatch.setattr(pytorch_lightning.utilities, "_IS_INTERACTIVE", True)
trainer = Trainer(accelerator="tpu")
assert trainer.strategy.launcher is None or trainer.strategy.launcher.is_interactive_compatible
Expand Down Expand Up @@ -894,8 +893,7 @@ def test_strategy_choice_ddp_cpu_slurm(device_count_mock, setup_distributed_mock


@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.is_available", return_value=True)
@mock.patch("pytorch_lightning.accelerators.tpu.TPUAccelerator.parse_devices", return_value=8)
def test_unsupported_tpu_choice(mock_devices, mock_tpu_acc_avail):
def test_unsupported_tpu_choice(mock_tpu_acc_avail):

with pytest.raises(MisconfigurationException, match=r"accelerator='tpu', precision=64\)` is not implemented"):
Trainer(accelerator="tpu", precision=64)
Expand Down
5 changes: 2 additions & 3 deletions tests/deprecated_api/test_remove_1-8.py
Expand Up @@ -1139,9 +1139,8 @@ def test_trainer_gpus(monkeypatch, trainer_kwargs):


def test_trainer_tpu_cores(monkeypatch):
monkeypatch.setattr(pytorch_lightning.accelerators.tpu.TPUAccelerator, "is_available", lambda: True)
monkeypatch.setattr(pytorch_lightning.accelerators.tpu.TPUAccelerator, "parse_devices", lambda: 8)
trainer = Trainer(accelerator="TPU", devices=8)
monkeypatch.setattr(pytorch_lightning.accelerators.tpu.TPUAccelerator, "is_available", lambda _: True)
trainer = Trainer(accelerator="tpu", devices=8)
with pytest.deprecated_call(
match="`Trainer.tpu_cores` is deprecated in v1.6 and will be removed in v1.8. "
"Please use `Trainer.num_devices` instead."
Expand Down