|
27 | 27 | from torch.optim import Optimizer
|
28 | 28 |
|
29 | 29 | import pytorch_lightning as pl
|
30 |
| -from pytorch_lightning.accelerators import Accelerator, IPUAccelerator |
| 30 | +from pytorch_lightning.accelerators import Accelerator, GPUAccelerator, IPUAccelerator, TPUAccelerator |
31 | 31 | from pytorch_lightning.callbacks import Callback, EarlyStopping, ModelCheckpoint, ProgressBarBase
|
32 | 32 | from pytorch_lightning.callbacks.prediction_writer import BasePredictionWriter
|
33 | 33 | from pytorch_lightning.core.datamodule import LightningDataModule
|
@@ -1650,33 +1650,31 @@ def __setup_profiler(self) -> None:
|
1650 | 1650 | self.profiler.setup(stage=self.state.fn._setup_fn, local_rank=local_rank, log_dir=self.log_dir)
|
1651 | 1651 |
|
1652 | 1652 | def _log_device_info(self) -> None:
|
1653 |
| - rank_zero_info(f"GPU available: {torch.cuda.is_available()}, used: {self._device_type == _AcceleratorType.GPU}") |
| 1653 | + rank_zero_info( |
| 1654 | + f"GPU available: {torch.cuda.is_available()}, used: {isinstance(self.accelerator, GPUAccelerator)}" |
| 1655 | + ) |
1654 | 1656 |
|
1655 | 1657 | num_tpu_cores = (
|
1656 |
| - self.tpu_cores if self.tpu_cores is not None and self._device_type == _AcceleratorType.TPU else 0 |
| 1658 | + self.tpu_cores if self.tpu_cores is not None and isinstance(self.accelerator, TPUAccelerator) else 0 |
1657 | 1659 | )
|
1658 | 1660 | rank_zero_info(f"TPU available: {_TPU_AVAILABLE}, using: {num_tpu_cores} TPU cores")
|
1659 | 1661 |
|
1660 | 1662 | num_ipus = self.ipus if self.ipus is not None else 0
|
1661 | 1663 | rank_zero_info(f"IPU available: {_IPU_AVAILABLE}, using: {num_ipus} IPUs")
|
1662 | 1664 |
|
1663 |
| - if torch.cuda.is_available() and self._device_type != _AcceleratorType.GPU: |
| 1665 | + if torch.cuda.is_available() and isinstance(self.accelerator, GPUAccelerator): |
1664 | 1666 | rank_zero_warn(
|
1665 | 1667 | "GPU available but not used. Set the gpus flag in your trainer `Trainer(gpus=1)` or script `--gpus=1`.",
|
1666 | 1668 | category=PossibleUserWarning,
|
1667 | 1669 | )
|
1668 | 1670 |
|
1669 |
| - if _TPU_AVAILABLE and self._device_type != _AcceleratorType.TPU: |
| 1671 | + if _TPU_AVAILABLE and isinstance(self.accelerator, TPUAccelerator): |
1670 | 1672 | rank_zero_warn(
|
1671 | 1673 | "TPU available but not used. Set the `tpu_cores` flag in your trainer"
|
1672 | 1674 | " `Trainer(tpu_cores=8)` or script `--tpu_cores=8`."
|
1673 | 1675 | )
|
1674 | 1676 |
|
1675 |
| - if ( |
1676 |
| - _IPU_AVAILABLE |
1677 |
| - and self._device_type != _AcceleratorType.IPU |
1678 |
| - and not isinstance(self.accelerator, IPUAccelerator) |
1679 |
| - ): |
| 1677 | + if _IPU_AVAILABLE and not isinstance(self.accelerator, IPUAccelerator): |
1680 | 1678 | rank_zero_warn(
|
1681 | 1679 | "IPU available but not used. Set the `ipus` flag in your trainer"
|
1682 | 1680 | " `Trainer(ipus=8)` or script `--ipus=8`."
|
|
0 commit comments