Skip to content
This repository was archived by the owner on Sep 28, 2022. It is now read-only.

Commit 2a6582f

Browse files
carmoccaRaalsky
authored andcommitted
Skip strategy=ddp_spawn, accelerator=cpu, python>=3.9 tests (Lightning-AI#10550)
1 parent e772a4a commit 2a6582f

18 files changed

+39
-27
lines changed

tests/callbacks/test_early_stopping.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -381,16 +381,16 @@ def on_train_end(self) -> None:
381381

382382
_ES_CHECK = dict(check_on_train_epoch_end=True)
383383
_ES_CHECK_P3 = dict(patience=3, check_on_train_epoch_end=True)
384-
_NO_WIN = dict(marks=RunIf(skip_windows=True))
384+
_SPAWN_MARK = dict(marks=RunIf(skip_windows=True, skip_49370=True))
385385

386386

387387
@pytest.mark.parametrize(
388388
"callbacks, expected_stop_epoch, check_on_train_epoch_end, strategy, num_processes",
389389
[
390390
([EarlyStopping("abc"), EarlyStopping("cba", patience=3)], 3, False, None, 1),
391391
([EarlyStopping("cba", patience=3), EarlyStopping("abc")], 3, False, None, 1),
392-
pytest.param([EarlyStopping("abc"), EarlyStopping("cba", patience=3)], 3, False, "ddp_spawn", 2, **_NO_WIN),
393-
pytest.param([EarlyStopping("cba", patience=3), EarlyStopping("abc")], 3, False, "ddp_spawn", 2, **_NO_WIN),
392+
pytest.param([EarlyStopping("abc"), EarlyStopping("cba", patience=3)], 3, False, "ddp_spawn", 2, **_SPAWN_MARK),
393+
pytest.param([EarlyStopping("cba", patience=3), EarlyStopping("abc")], 3, False, "ddp_spawn", 2, **_SPAWN_MARK),
394394
([EarlyStopping("abc", **_ES_CHECK), EarlyStopping("cba", **_ES_CHECK_P3)], 3, True, None, 1),
395395
([EarlyStopping("cba", **_ES_CHECK_P3), EarlyStopping("abc", **_ES_CHECK)], 3, True, None, 1),
396396
pytest.param(
@@ -399,15 +399,15 @@ def on_train_end(self) -> None:
399399
True,
400400
"ddp_spawn",
401401
2,
402-
**_NO_WIN,
402+
**_SPAWN_MARK,
403403
),
404404
pytest.param(
405405
[EarlyStopping("cba", **_ES_CHECK_P3), EarlyStopping("abc", **_ES_CHECK)],
406406
3,
407407
True,
408408
"ddp_spawn",
409409
2,
410-
**_NO_WIN,
410+
**_SPAWN_MARK,
411411
),
412412
],
413413
)

tests/callbacks/test_pruning.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def test_pruning_callback_ddp_spawn(tmpdir):
187187
train_with_pruning_callback(tmpdir, use_global_unstructured=True, strategy="ddp_spawn", gpus=2)
188188

189189

190-
@RunIf(skip_windows=True)
190+
@RunIf(skip_windows=True, skip_49370=True)
191191
def test_pruning_callback_ddp_cpu(tmpdir):
192192
train_with_pruning_callback(tmpdir, parameters_to_prune=True, strategy="ddp_spawn", num_processes=2)
193193

tests/callbacks/test_stochastic_weight_avg.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def test_swa_callback_ddp_spawn(tmpdir):
148148
train_with_swa(tmpdir, strategy="ddp_spawn", gpus=2)
149149

150150

151-
@RunIf(skip_windows=True)
151+
@RunIf(skip_windows=True, skip_49370=True)
152152
def test_swa_callback_ddp_cpu(tmpdir):
153153
train_with_swa(tmpdir, strategy="ddp_spawn", num_processes=2)
154154

tests/checkpointing/test_model_checkpoint.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -385,7 +385,7 @@ def on_train_end(self, trainer, pl_module):
385385
assert torch.save.call_count == 0
386386

387387

388-
@RunIf(skip_windows=True)
388+
@RunIf(skip_windows=True, skip_49370=True)
389389
def test_model_checkpoint_no_extraneous_invocations(tmpdir):
390390
"""Test to ensure that the model callback saves the checkpoints only once in distributed mode."""
391391
model = LogInTwoMethods()

tests/checkpointing/test_torch_saving.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ def test_model_torch_save(tmpdir):
3434
trainer = torch.load(temp_path)
3535

3636

37-
@RunIf(skip_windows=True)
37+
@RunIf(skip_windows=True, skip_49370=True)
3838
def test_model_torch_save_ddp_cpu(tmpdir):
3939
"""Test to ensure torch save does not fail for model and trainer using cpu ddp."""
4040
model = BoringModel()

tests/deprecated_api/test_remove_1-7.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@ def get_from_queue(self, queue: torch.multiprocessing.SimpleQueue) -> None:
245245
return super().get_from_queue(queue)
246246

247247

248-
@RunIf(skip_windows=True)
248+
@RunIf(skip_windows=True, skip_49370=True)
249249
def test_v1_7_0_deprecate_add_get_queue(tmpdir):
250250
model = BoringCallbackDDPSpawnModel()
251251
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, num_processes=2, strategy="ddp_spawn")

tests/helpers/runif.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ def __new__(
7070
fairscale_fully_sharded: bool = False,
7171
deepspeed: bool = False,
7272
rich: bool = False,
73+
skip_49370: bool = False,
7374
**kwargs,
7475
):
7576
"""
@@ -91,6 +92,7 @@ def __new__(
9192
fairscale_fully_sharded: if `fairscale` fully sharded module is required to run the test
9293
deepspeed: if `deepspeed` module is required to run the test
9394
rich: if `rich` module is required to run the test
95+
skip_49370: Skip the test as it's impacted by https://github.com/pytorch/pytorch/issues/49370.
9496
kwargs: native pytest.mark.skipif keyword arguments
9597
"""
9698
conditions = []
@@ -165,6 +167,15 @@ def __new__(
165167
conditions.append(not _RICH_AVAILABLE)
166168
reasons.append("Rich")
167169

170+
if skip_49370:
171+
# strategy=ddp_spawn, accelerator=cpu, python>=3.9, torch<1.8 does not work
172+
py_version = f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
173+
ge_3_9 = Version(py_version) >= Version("3.9")
174+
torch_version = get_distribution("torch").version
175+
old_torch = Version(torch_version) < Version("1.8")
176+
conditions.append(ge_3_9 and old_torch)
177+
reasons.append("Impacted by https://github.com/pytorch/pytorch/issues/49370")
178+
168179
reasons = [rs for cond, rs in zip(conditions, reasons) if cond]
169180
return pytest.mark.skipif(
170181
*args, condition=any(conditions), reason=f"Requires: [{' + '.join(reasons)}]", **kwargs

tests/loggers/test_all.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -321,8 +321,8 @@ def on_train_batch_start(self, trainer, pl_module, batch, batch_idx):
321321
assert pl_module.logger.experiment.something(foo="bar") is None
322322

323323

324+
@RunIf(skip_windows=True, skip_49370=True)
324325
@pytest.mark.parametrize("logger_class", [CometLogger, CSVLogger, MLFlowLogger, TensorBoardLogger, TestTubeLogger])
325-
@RunIf(skip_windows=True)
326326
def test_logger_created_on_rank_zero_only(tmpdir, monkeypatch, logger_class):
327327
"""Test that loggers get replaced by dummy loggers on global rank > 0."""
328328
_patch_comet_atexit(monkeypatch)

tests/models/test_cpu.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def validation_step(self, *args, **kwargs):
122122
model.unfreeze()
123123

124124

125-
@RunIf(skip_windows=True)
125+
@RunIf(skip_windows=True, skip_49370=True)
126126
def test_multi_cpu_model_ddp(tmpdir):
127127
"""Make sure DDP works."""
128128
tutils.set_random_main_port()

tests/models/test_horovod.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ def _run_horovod(trainer_options, on_gpu=False):
6666
assert exit_code == 0
6767

6868

69-
@RunIf(skip_windows=True, horovod=True)
69+
@RunIf(skip_windows=True, horovod=True, skip_49370=True)
7070
def test_horovod_cpu(tmpdir):
7171
"""Test Horovod running multi-process on CPU."""
7272
trainer_options = dict(
@@ -82,7 +82,7 @@ def test_horovod_cpu(tmpdir):
8282
_run_horovod(trainer_options)
8383

8484

85-
@RunIf(skip_windows=True, horovod=True)
85+
@RunIf(skip_windows=True, horovod=True, skip_49370=True)
8686
def test_horovod_cpu_clip_grad_by_value(tmpdir):
8787
"""Test Horovod running multi-process on CPU."""
8888
trainer_options = dict(
@@ -99,7 +99,7 @@ def test_horovod_cpu_clip_grad_by_value(tmpdir):
9999
_run_horovod(trainer_options)
100100

101101

102-
@RunIf(skip_windows=True, horovod=True)
102+
@RunIf(skip_windows=True, horovod=True, skip_49370=True)
103103
def test_horovod_cpu_implicit(tmpdir):
104104
"""Test Horovod without specifying a backend, inferring from env set by `horovodrun`."""
105105
trainer_options = dict(

tests/plugins/test_ddp_spawn_plugin.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def get_from_queue(self, queue: torch.multiprocessing.SimpleQueue) -> None:
4646
return super().get_from_queue(queue)
4747

4848

49-
@RunIf(skip_windows=True)
49+
@RunIf(skip_windows=True, skip_49370=True)
5050
def test_ddp_cpu():
5151
"""Tests if device is set correctly when training for DDPSpawnPlugin."""
5252
trainer = Trainer(num_processes=2, fast_dev_run=True)
@@ -91,7 +91,7 @@ def get_from_queue(self, trainer: Trainer, queue: torch.multiprocessing.SimpleQu
9191
return super().get_from_queue(trainer, queue)
9292

9393

94-
@RunIf(skip_windows=True)
94+
@RunIf(skip_windows=True, skip_49370=True)
9595
def test_ddp_spawn_add_get_queue(tmpdir):
9696
"""Tests add_to_queue/get_from_queue with DDPSpawnPlugin."""
9797

@@ -128,7 +128,7 @@ def on_predict_start(self) -> None:
128128
assert isinstance(self.trainer.model, LightningModule)
129129

130130

131-
@RunIf(skip_windows=True)
131+
@RunIf(skip_windows=True, skip_49370=True)
132132
def test_ddp_spawn_configure_ddp(tmpdir):
133133
"""Tests with ddp spawn plugin."""
134134
trainer = Trainer(default_root_dir=tmpdir, num_processes=2, strategy="ddp_spawn", fast_dev_run=True)

tests/profiler/test_profiler.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ def test_simple_profiler_with_nonexisting_dirpath(tmpdir):
161161
assert nonexisting_tmpdir.join("fit-profiler.txt").exists()
162162

163163

164-
@RunIf(skip_windows=True)
164+
@RunIf(skip_windows=True, skip_49370=True)
165165
def test_simple_profiler_distributed_files(tmpdir):
166166
"""Ensure the proper files are saved in distributed."""
167167
profiler = SimpleProfiler(dirpath=tmpdir, filename="profiler")
@@ -226,6 +226,7 @@ def test_advanced_profiler_iterable_durations(advanced_profiler, action: str, ex
226226
np.testing.assert_allclose(recored_total_duration, expected_total_duration, rtol=0.2)
227227

228228

229+
@pytest.mark.flaky(reruns=3)
229230
def test_advanced_profiler_overhead(advanced_profiler, n_iter=5):
230231
"""ensure that the profiler doesn't introduce too much overhead during training."""
231232
for _ in range(n_iter):

tests/trainer/logging_/test_distributed_logging.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def on_train_end(self):
5959
assert self.log_name.format(rank=self.local_rank) in self.logger.logs, "Expected rank to be logged"
6060

6161

62-
@RunIf(skip_windows=True)
62+
@RunIf(skip_windows=True, skip_49370=True)
6363
def test_all_rank_logging_ddp_cpu(tmpdir):
6464
"""Check that all ranks can be logged from."""
6565
model = TestModel()

tests/trainer/logging_/test_train_loop_logging.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,7 @@ def validation_step(self, batch, batch_idx):
395395
return super().validation_step(batch, batch_idx)
396396

397397

398-
@pytest.mark.parametrize("devices", [1, pytest.param(2, marks=RunIf(skip_windows=True))])
398+
@pytest.mark.parametrize("devices", [1, pytest.param(2, marks=RunIf(skip_windows=True, skip_49370=True))])
399399
def test_logging_sync_dist_true(tmpdir, devices):
400400
"""Tests to ensure that the sync_dist flag works (should just return the original value)"""
401401
fake_result = 1

tests/trainer/properties/test_get_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def test_get_model(tmpdir):
3737
trainer.fit(model)
3838

3939

40-
@RunIf(skip_windows=True)
40+
@RunIf(skip_windows=True, skip_49370=True)
4141
def test_get_model_ddp_cpu(tmpdir):
4242
"""Tests that `trainer.lightning_module` extracts the model correctly when using ddp on cpu."""
4343

tests/trainer/test_data_loading.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ def _get_warning_msg():
133133
assert warn_str in msg
134134

135135

136-
@RunIf(skip_windows=True)
136+
@RunIf(skip_windows=True, skip_49370=True)
137137
@pytest.mark.parametrize("num_workers", [0, 1])
138138
def test_dataloader_warnings(tmpdir, num_workers):
139139
trainer = Trainer(default_root_dir=tmpdir, strategy="ddp_spawn", num_processes=2, fast_dev_run=4)

tests/trainer/test_trainer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1809,7 +1809,7 @@ def on_predict_start(self) -> None:
18091809

18101810

18111811
@pytest.mark.parametrize(
1812-
"strategy,num_processes", [(None, 1), pytest.param("ddp_spawn", 2, marks=RunIf(skip_windows=True))]
1812+
"strategy,num_processes", [(None, 1), pytest.param("ddp_spawn", 2, marks=RunIf(skip_windows=True, skip_49370=True))]
18131813
)
18141814
def test_model_in_correct_mode_during_stages(tmpdir, strategy, num_processes):
18151815
model = TrainerStagesModel()
@@ -1830,7 +1830,7 @@ def validation_epoch_end(self, outputs) -> None:
18301830
pass
18311831

18321832

1833-
@RunIf(skip_windows=True)
1833+
@RunIf(skip_windows=True, skip_49370=True)
18341834
def test_fit_test_synchronization(tmpdir):
18351835
"""Test that the trainer synchronizes processes before returning control back to the caller."""
18361836
tutils.set_random_main_port()

tests/utilities/test_all_gather_grad.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,8 @@ def _test_all_gather_ddp(rank, world_size):
4141
assert torch.allclose(grad2, tensor2.grad)
4242

4343

44-
@RunIf(skip_windows=True)
45-
def test_all_gather_ddp():
44+
@RunIf(skip_windows=True, skip_49370=True)
45+
def test_all_gather_ddp_spawn():
4646
world_size = 3
4747
torch.multiprocessing.spawn(_test_all_gather_ddp, args=(world_size,), nprocs=world_size)
4848

0 commit comments

Comments
 (0)