Skip to content

Commit 5d156f4

Browse files
DuYicong515Borda
andauthored
Remove AcceleratorConnector.tpu_id (#12387)
Co-authored-by: Jirka Borovec <[email protected]>
1 parent 5fbe467 commit 5d156f4

File tree

3 files changed

+6
-46
lines changed

3 files changed

+6
-46
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -729,6 +729,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
729729
- Removed `AcceleratorConnector.root_gpu` property ([#12262](https://github.com/PyTorchLightning/pytorch-lightning/pull/12262))
730730

731731

732+
- Removed `AcceleratorConnector.tpu_id` property ([#12387](https://github.com/PyTorchLightning/pytorch-lightning/pull/12387))
733+
734+
732735
- Removed `AcceleratorConnector.num_gpus` property ([#12384](https://github.com/PyTorchLightning/pytorch-lightning/pull/12384))
733736

734737

pytorch_lightning/trainer/connectors/accelerator_connector.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -798,13 +798,6 @@ def tpu_cores(self) -> Optional[Union[List[int], int]]:
798798
return self._tpu_cores # type: ignore
799799
return 0
800800

801-
@property
802-
def tpu_id(self) -> Optional[int]:
803-
if isinstance(self.accelerator, TPUAccelerator):
804-
if isinstance(self._tpu_cores, list):
805-
return self._tpu_cores[0]
806-
return None
807-
808801
@property
809802
def num_ipus(self) -> int:
810803
if isinstance(self.accelerator, IPUAccelerator):

tests/models/test_tpu.py

Lines changed: 3 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -242,20 +242,11 @@ def test_dataloaders_passed_to_fit(tmpdir):
242242
assert trainer.state.finished, f"Training failed with {trainer.state}"
243243

244244

245-
@pytest.mark.parametrize(
246-
["tpu_cores", "expected_tpu_id"],
247-
[(1, None), (8, None), ([1], 1), ([8], 8)],
248-
)
249245
@RunIf(tpu=True)
250-
def test_tpu_id_to_be_as_expected(tpu_cores, expected_tpu_id):
251-
"""Test if trainer.tpu_id is set as expected."""
252-
assert Trainer(tpu_cores=tpu_cores)._accelerator_connector.tpu_id == expected_tpu_id
253-
254-
255-
def test_tpu_misconfiguration():
256-
"""Test if trainer.tpu_id is set as expected."""
246+
@pytest.mark.parametrize("tpu_cores", [[1, 8], "9, ", [9], [0], 2, 10])
247+
def test_tpu_misconfiguration(tpu_cores):
257248
with pytest.raises(MisconfigurationException, match="`tpu_cores` can only be"):
258-
Trainer(tpu_cores=[1, 8])
249+
Trainer(tpu_cores=tpu_cores)
259250

260251

261252
@pytest.mark.skipif(_TPU_AVAILABLE, reason="test requires missing TPU")
@@ -289,33 +280,6 @@ def test_broadcast(rank):
289280
xmp.spawn(test_broadcast, nprocs=8, start_method="fork")
290281

291282

292-
@pytest.mark.parametrize(
293-
["tpu_cores", "expected_tpu_id", "error_expected"],
294-
[
295-
(1, None, False),
296-
(8, None, False),
297-
([1], 1, False),
298-
([8], 8, False),
299-
("1,", 1, False),
300-
("1", None, False),
301-
("9, ", 9, True),
302-
([9], 9, True),
303-
([0], 0, True),
304-
(2, None, True),
305-
(10, None, True),
306-
],
307-
)
308-
@RunIf(tpu=True)
309-
@pl_multi_process_test
310-
def test_tpu_choice(tmpdir, tpu_cores, expected_tpu_id, error_expected):
311-
if error_expected:
312-
with pytest.raises(MisconfigurationException, match=r".*tpu_cores` can only be 1, 8 or [<1-8>]*"):
313-
Trainer(default_root_dir=tmpdir, tpu_cores=tpu_cores)
314-
else:
315-
trainer = Trainer(default_root_dir=tmpdir, tpu_cores=tpu_cores)
316-
assert trainer._accelerator_connector.tpu_id == expected_tpu_id
317-
318-
319283
@pytest.mark.parametrize(
320284
["cli_args", "expected"],
321285
[("--tpu_cores=8", {"tpu_cores": 8}), ("--tpu_cores=1,", {"tpu_cores": "1,"})],

0 commit comments

Comments
 (0)