diff --git a/CHANGELOG.md b/CHANGELOG.md index a79f57d27e0e5..11e74873506da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -539,6 +539,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Deprecated `Trainer.num_gpus` in favor of `Trainer.num_devices` when GPU is used ([#12384](https://github.com/PyTorchLightning/pytorch-lightning/pull/12384)) +- Deprecated `Trainer.ipus` in favor of `Trainer.num_devices` when IPU is used ([#12386](https://github.com/PyTorchLightning/pytorch-lightning/pull/12386)) + + - Deprecated `Trainer.num_processes` in favor of `Trainer.num_devices` ([#12388](https://github.com/PyTorchLightning/pytorch-lightning/pull/12388)) @@ -738,6 +741,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed `AcceleratorConnector.num_gpus` property ([#12384](https://github.com/PyTorchLightning/pytorch-lightning/pull/12384)) +- Removed `AcceleratorConnector.num_ipus` property ([#12386](https://github.com/PyTorchLightning/pytorch-lightning/pull/12386)) + + - Removed `AcceleratorConnector.num_processes` property ([#12388](https://github.com/PyTorchLightning/pytorch-lightning/pull/12388)) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 3ab7b1cbd51df..63c975bc8ef3f 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -798,6 +798,13 @@ def tpu_cores(self) -> Optional[Union[List[int], int]]: return self._tpu_cores # type: ignore return 0 + @property + def tpu_id(self) -> Optional[int]: + if isinstance(self.accelerator, TPUAccelerator): + if isinstance(self._tpu_cores, list): + return self._tpu_cores[0] + return None + @property def num_ipus(self) -> int: if isinstance(self.accelerator, IPUAccelerator): diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 5a9bda0cd2ff7..287ed2f4d1a3d 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -1811,7 +1811,7 @@ def _log_device_info(self) -> None: ) rank_zero_info(f"TPU available: {_TPU_AVAILABLE}, using: {num_tpu_cores} TPU cores") - num_ipus = self.ipus if self.ipus is not None else 0 + num_ipus = self.num_devices if isinstance(self.accelerator, IPUAccelerator) else 0 rank_zero_info(f"IPU available: {_IPU_AVAILABLE}, using: {num_ipus} IPUs") if torch.cuda.is_available() and not isinstance(self.accelerator, GPUAccelerator): @@ -2090,7 +2090,11 @@ def tpu_cores(self) -> int: @property def ipus(self) -> int: - return self._accelerator_connector.num_ipus + rank_zero_deprecation( + "`Trainer.ipus` was deprecated in v1.6 and will be removed in v1.8." + " Please use `Trainer.num_devices` instead." + ) + return self.num_devices if isinstance(self.accelerator, IPUAccelerator) else 0 @property def num_gpus(self) -> int: diff --git a/tests/accelerators/test_ipu.py b/tests/accelerators/test_ipu.py index 5a09d654bf437..fd8b4ae1dd97a 100644 --- a/tests/accelerators/test_ipu.py +++ b/tests/accelerators/test_ipu.py @@ -464,7 +464,8 @@ def test_replication_factor(tmpdir): plugin = IPUStrategy() trainer = Trainer(ipus=2, default_root_dir=tmpdir, fast_dev_run=True, strategy=plugin) - assert trainer.ipus == 2 + assert isinstance(trainer.accelerator, IPUAccelerator) + assert trainer.num_devices == 2 assert trainer.strategy.replication_factor == 2 model = BoringModel() @@ -561,16 +562,16 @@ def test_accelerator_ipu_with_devices(): trainer = Trainer(accelerator="ipu", devices=8) - assert trainer.ipus == 8 assert isinstance(trainer.strategy, IPUStrategy) assert isinstance(trainer.accelerator, IPUAccelerator) + assert trainer.num_devices == 8 @RunIf(ipu=True) def test_accelerator_auto_with_devices_ipu(): trainer = Trainer(accelerator="auto", devices=8) assert isinstance(trainer.accelerator, IPUAccelerator) - assert trainer.ipus == 8 + assert trainer.num_devices == 8 @RunIf(ipu=True) @@ -581,7 +582,8 @@ def test_accelerator_ipu_with_ipus_priority(): with pytest.warns(UserWarning, match="The flag `devices=1` will be ignored,"): trainer = Trainer(accelerator="ipu", devices=1, ipus=ipus) - assert trainer.ipus == ipus + assert isinstance(trainer.accelerator, IPUAccelerator) + assert trainer.num_devices == ipus @RunIf(ipu=True) @@ -632,4 +634,4 @@ def test_poptorch_models_at_different_stages(tmpdir): def test_devices_auto_choice_ipu(): trainer = Trainer(accelerator="auto", devices="auto") assert trainer.num_devices == 4 - assert trainer.ipus == 4 + assert isinstance(trainer.accelerator, IPUAccelerator) diff --git a/tests/deprecated_api/test_remove_1-8.py b/tests/deprecated_api/test_remove_1-8.py index 8b2b6fa3c542b..32e89f62c65d0 100644 --- a/tests/deprecated_api/test_remove_1-8.py +++ b/tests/deprecated_api/test_remove_1-8.py @@ -22,6 +22,7 @@ import torch from torch import optim +import pytorch_lightning from pytorch_lightning import Callback, Trainer from pytorch_lightning.loggers import CSVLogger, LightningLoggerBase, LoggerCollection from pytorch_lightning.plugins.precision.precision_plugin import PrecisionPlugin @@ -986,6 +987,26 @@ def stop(self, action_name: str) -> None: CustomProfiler2() +@pytest.mark.parametrize( + ["trainer_kwargs", "expected_ipus"], + [ + ({}, 0), + ({"devices": 1}, 0), + ({"accelerator": "ipu", "devices": 1}, 1), + ({"accelerator": "ipu", "devices": 8}, 8), + ], +) +def test_trainer_config_ipus(monkeypatch, trainer_kwargs, expected_ipus): + monkeypatch.setattr(pytorch_lightning.accelerators.ipu.IPUAccelerator, "is_available", lambda _: True) + monkeypatch.setattr(pytorch_lightning.strategies.ipu, "_IPU_AVAILABLE", lambda: True) + trainer = Trainer(**trainer_kwargs) + with pytest.deprecated_call( + match="`Trainer.ipus` was deprecated in v1.6 and will be removed in v1.8." + " Please use `Trainer.num_devices` instead." + ): + trainer.ipus == expected_ipus + + @pytest.mark.parametrize( ["trainer_kwargs", "expected_num_processes"], [