Skip to content

Commit e840a82

Browse files
committed
deprecate trainer.gpus
1 parent dcc973e commit e840a82

File tree

3 files changed

+22
-3
lines changed

3 files changed

+22
-3
lines changed

pytorch_lightning/trainer/trainer.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2181,6 +2181,10 @@ def scaler(self) -> Optional[Any]:
21812181

21822182
@property
21832183
def gpus(self) -> Optional[Union[List[int], str, int]]:
2184+
rank_zero_deprecation(
2185+
"`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8."
2186+
" Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead."
2187+
)
21842188
return self._accelerator_connector.gpus
21852189

21862190
@property

tests/accelerators/test_accelerator_connector.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -545,7 +545,19 @@ def test_accelerator_gpu_with_devices(devices, plugin):
545545
def test_accelerator_auto_with_devices_gpu():
546546
trainer = Trainer(accelerator="auto", devices=1)
547547
assert isinstance(trainer.accelerator, GPUAccelerator)
548-
assert trainer.gpus == 1
548+
assert trainer.num_devices == 1
549+
550+
551+
@RunIf(min_gpus=1)
552+
def test_accelerator_gpu_with_gpus_priority():
553+
"""Test for checking `gpus` flag takes priority over `devices`."""
554+
555+
gpus = 1
556+
with pytest.warns(UserWarning, match="The flag `devices=4` will be ignored,"):
557+
trainer = Trainer(accelerator="gpu", devices=4, gpus=gpus)
558+
559+
assert isinstance(trainer.accelerator, GPUAccelerator)
560+
assert trainer.num_devices == gpus
549561

550562

551563
def test_validate_accelerator_and_devices():
@@ -934,8 +946,8 @@ def test_devices_auto_choice_cpu(is_ipu_available_mock, is_tpu_available_mock, i
934946
@mock.patch("torch.cuda.device_count", return_value=2)
935947
def test_devices_auto_choice_gpu(is_gpu_available_mock, device_count_mock):
936948
trainer = Trainer(accelerator="auto", devices="auto")
949+
assert isinstance(trainer.accelerator, GPUAccelerator)
937950
assert trainer.num_devices == 2
938-
assert trainer.gpus == 2
939951

940952

941953
@pytest.mark.parametrize(

tests/models/test_gpu.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import tests.helpers.pipelines as tpipes
2424
import tests.helpers.utils as tutils
2525
from pytorch_lightning import Trainer
26+
from pytorch_lightning.accelerators import CPUAccelerator, GPUAccelerator
2627
from pytorch_lightning.plugins.environments import TorchElasticEnvironment
2728
from pytorch_lightning.utilities import device_parser
2829
from pytorch_lightning.utilities.exceptions import MisconfigurationException
@@ -190,10 +191,12 @@ def test_torchelastic_gpu_parsing(mocked_device_count, mocked_is_available, gpus
190191
sanitizing the gpus as only one of the GPUs is visible."""
191192
trainer = Trainer(gpus=gpus)
192193
assert isinstance(trainer._accelerator_connector.cluster_environment, TorchElasticEnvironment)
193-
assert trainer.gpus == gpus
194194
# when use gpu
195195
if device_parser.parse_gpu_ids(gpus) is not None:
196196
assert trainer.device_ids == device_parser.parse_gpu_ids(gpus)
197+
assert isinstance(trainer.accelerator, GPUAccelerator)
198+
else:
199+
assert isinstance(trainer.accelerator, CPUAccelerator)
197200

198201

199202
@RunIf(min_gpus=1)

0 commit comments

Comments
 (0)