|
27 | 27 | from torch.optim import Optimizer
|
28 | 28 |
|
29 | 29 | import pytorch_lightning as pl
|
30 |
| -from pytorch_lightning.accelerators import Accelerator, IPUAccelerator |
| 30 | +from pytorch_lightning.accelerators import Accelerator, GPUAccelerator, IPUAccelerator, TPUAccelerator |
31 | 31 | from pytorch_lightning.callbacks import Callback, EarlyStopping, ModelCheckpoint, ProgressBarBase
|
32 | 32 | from pytorch_lightning.callbacks.prediction_writer import BasePredictionWriter
|
33 | 33 | from pytorch_lightning.core.datamodule import LightningDataModule
|
@@ -1626,33 +1626,31 @@ def __setup_profiler(self) -> None:
|
1626 | 1626 | self.profiler.setup(stage=self.state.fn._setup_fn, local_rank=local_rank, log_dir=self.log_dir)
|
1627 | 1627 |
|
1628 | 1628 | def _log_device_info(self) -> None:
|
1629 |
| - rank_zero_info(f"GPU available: {torch.cuda.is_available()}, used: {self._device_type == _AcceleratorType.GPU}") |
| 1629 | + rank_zero_info( |
| 1630 | + f"GPU available: {torch.cuda.is_available()}, used: {isinstance(self.accelerator, GPUAccelerator)}" |
| 1631 | + ) |
1630 | 1632 |
|
1631 | 1633 | num_tpu_cores = (
|
1632 |
| - self.tpu_cores if self.tpu_cores is not None and self._device_type == _AcceleratorType.TPU else 0 |
| 1634 | + self.tpu_cores if self.tpu_cores is not None and isinstance(self.accelerator, TPUAccelerator) else 0 |
1633 | 1635 | )
|
1634 | 1636 | rank_zero_info(f"TPU available: {_TPU_AVAILABLE}, using: {num_tpu_cores} TPU cores")
|
1635 | 1637 |
|
1636 | 1638 | num_ipus = self.ipus if self.ipus is not None else 0
|
1637 | 1639 | rank_zero_info(f"IPU available: {_IPU_AVAILABLE}, using: {num_ipus} IPUs")
|
1638 | 1640 |
|
1639 |
| - if torch.cuda.is_available() and self._device_type != _AcceleratorType.GPU: |
| 1641 | + if torch.cuda.is_available() and isinstance(self.accelerator, GPUAccelerator): |
1640 | 1642 | rank_zero_warn(
|
1641 | 1643 | "GPU available but not used. Set the gpus flag in your trainer `Trainer(gpus=1)` or script `--gpus=1`.",
|
1642 | 1644 | category=PossibleUserWarning,
|
1643 | 1645 | )
|
1644 | 1646 |
|
1645 |
| - if _TPU_AVAILABLE and self._device_type != _AcceleratorType.TPU: |
| 1647 | + if _TPU_AVAILABLE and isinstance(self.accelerator, TPUAccelerator): |
1646 | 1648 | rank_zero_warn(
|
1647 | 1649 | "TPU available but not used. Set the `tpu_cores` flag in your trainer"
|
1648 | 1650 | " `Trainer(tpu_cores=8)` or script `--tpu_cores=8`."
|
1649 | 1651 | )
|
1650 | 1652 |
|
1651 |
| - if ( |
1652 |
| - _IPU_AVAILABLE |
1653 |
| - and self._device_type != _AcceleratorType.IPU |
1654 |
| - and not isinstance(self.accelerator, IPUAccelerator) |
1655 |
| - ): |
| 1653 | + if _IPU_AVAILABLE and not isinstance(self.accelerator, IPUAccelerator): |
1656 | 1654 | rank_zero_warn(
|
1657 | 1655 | "IPU available but not used. Set the `ipus` flag in your trainer"
|
1658 | 1656 | " `Trainer(ipus=8)` or script `--ipus=8`."
|
|
0 commit comments