Skip to content

Commit 7eb2edf

Browse files
awaelchlitchaton
andauthored
rename set_random_master_port (#10104)
Co-authored-by: tchaton <[email protected]>
1 parent 64fc0d4 commit 7eb2edf

16 files changed

+23
-23
lines changed

tests/accelerators/test_common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
),
3333
)
3434
def test_evaluate(tmpdir, trainer_kwargs):
35-
tutils.set_random_master_port()
35+
tutils.set_random_main_port()
3636
seed_everything(1)
3737
dm = ClassifDataModule()
3838
model = CustomClassificationModelDP()

tests/accelerators/test_ddp_spawn.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424

2525
@RunIf(min_gpus=2)
2626
def test_multi_gpu_early_stop_ddp_spawn(tmpdir):
27-
tutils.set_random_master_port()
27+
tutils.set_random_main_port()
2828

2929
trainer_options = dict(
3030
default_root_dir=tmpdir,
@@ -43,7 +43,7 @@ def test_multi_gpu_early_stop_ddp_spawn(tmpdir):
4343

4444
@RunIf(min_gpus=2)
4545
def test_multi_gpu_model_ddp_spawn(tmpdir):
46-
tutils.set_random_master_port()
46+
tutils.set_random_main_port()
4747

4848
trainer_options = dict(
4949
default_root_dir=tmpdir,
@@ -66,7 +66,7 @@ def test_multi_gpu_model_ddp_spawn(tmpdir):
6666
@RunIf(min_gpus=2)
6767
def test_ddp_all_dataloaders_passed_to_fit(tmpdir):
6868
"""Make sure DDP works with dataloaders passed to fit()"""
69-
tutils.set_random_master_port()
69+
tutils.set_random_main_port()
7070

7171
model = BoringModel()
7272
fit_options = dict(train_dataloader=model.train_dataloader(), val_dataloaders=model.val_dataloader())

tests/accelerators/test_dp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def test_multi_gpu_early_stop_dp(tmpdir):
5959
6060
with early stopping
6161
"""
62-
tutils.set_random_master_port()
62+
tutils.set_random_main_port()
6363

6464
dm = ClassifDataModule()
6565
model = CustomClassificationModelDP()
@@ -79,7 +79,7 @@ def test_multi_gpu_early_stop_dp(tmpdir):
7979

8080
@RunIf(min_gpus=2)
8181
def test_multi_gpu_model_dp(tmpdir):
82-
tutils.set_random_master_port()
82+
tutils.set_random_main_port()
8383

8484
trainer_options = dict(
8585
default_root_dir=tmpdir,

tests/core/test_metric_result_integration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def _ddp_test_fn(rank, worldsize):
103103
@RunIf(skip_windows=True, min_gpus=2)
104104
def test_result_reduce_ddp():
105105
"""Make sure result logging works with DDP."""
106-
tutils.set_random_master_port()
106+
tutils.set_random_main_port()
107107

108108
worldsize = 2
109109
mp.spawn(_ddp_test_fn, args=(worldsize,), nprocs=worldsize)

tests/core/test_results.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,6 @@ def _ddp_test_fn(rank, worldsize):
4141
@RunIf(skip_windows=True)
4242
def test_result_reduce_ddp():
4343
"""Make sure result logging works with DDP."""
44-
tutils.set_random_master_port()
44+
tutils.set_random_main_port()
4545
worldsize = 2
4646
mp.spawn(_ddp_test_fn, args=(worldsize,), nprocs=worldsize)

tests/helpers/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def reset_seed(seed=0):
7373
seed_everything(seed)
7474

7575

76-
def set_random_master_port():
76+
def set_random_main_port():
7777
reset_seed()
7878
port = RANDOM_PORTS.pop()
7979
os.environ["MASTER_PORT"] = str(port)

tests/models/data/horovod/train_default_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
print("You requested to import Horovod which is missing or not supported for your OS.")
3838

3939
from tests.helpers import BoringModel # noqa: E402
40-
from tests.helpers.utils import reset_seed, set_random_master_port # noqa: E402
40+
from tests.helpers.utils import reset_seed, set_random_main_port # noqa: E402
4141

4242
parser = argparse.ArgumentParser()
4343
parser.add_argument("--trainer-options", required=True)
@@ -46,7 +46,7 @@
4646

4747
def run_test_from_config(trainer_options, on_gpu, check_size=True):
4848
"""Trains the default model with the given config."""
49-
set_random_master_port()
49+
set_random_main_port()
5050
reset_seed()
5151

5252
ckpt_path = trainer_options["weights_save_path"]

tests/models/test_amp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def test_amp_gpus(tmpdir, strategy, precision, gpus):
129129
def test_amp_gpu_ddp_slurm_managed(tmpdir):
130130
"""Make sure DDP + AMP work."""
131131
# simulate setting slurm flags
132-
tutils.set_random_master_port()
132+
tutils.set_random_main_port()
133133

134134
model = AMPTestModel()
135135

tests/models/test_cpu.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def validation_step(self, *args, **kwargs):
125125
@RunIf(skip_windows=True)
126126
def test_multi_cpu_model_ddp(tmpdir):
127127
"""Make sure DDP works."""
128-
tutils.set_random_master_port()
128+
tutils.set_random_main_port()
129129

130130
trainer_options = dict(
131131
default_root_dir=tmpdir,

tests/models/test_gpu.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
@RunIf(min_gpus=2)
4141
def test_multi_gpu_none_backend(tmpdir):
4242
"""Make sure when using multiple GPUs the user can't use `accelerator = None`."""
43-
tutils.set_random_master_port()
43+
tutils.set_random_main_port()
4444
trainer_options = dict(
4545
default_root_dir=tmpdir,
4646
enable_progress_bar=False,

tests/models/test_horovod.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -273,7 +273,7 @@ def test_result_reduce_horovod(tmpdir):
273273
This test mirrors tests/core/test_results.py::_ddp_test_fn
274274
"""
275275
tutils.reset_seed()
276-
tutils.set_random_master_port()
276+
tutils.set_random_main_port()
277277

278278
def hvd_test_fn():
279279
path_here = os.path.abspath(os.path.dirname(__file__))

tests/models/test_onnx.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ def test_model_saves_with_example_input_array(tmpdir, modelclass, input_sample):
8888
@RunIf(min_gpus=2)
8989
def test_model_saves_on_multi_gpu(tmpdir):
9090
"""Test that ONNX model saves on a distributed backend."""
91-
tutils.set_random_master_port()
91+
tutils.set_random_main_port()
9292

9393
trainer_options = dict(
9494
default_root_dir=tmpdir,

tests/models/test_restore.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,7 @@ def test_callbacks_references_resume_from_checkpoint(tmpdir):
304304
def test_running_test_pretrained_model_distrib_dp(tmpdir):
305305
"""Verify `test()` on pretrained model."""
306306

307-
tutils.set_random_master_port()
307+
tutils.set_random_main_port()
308308

309309
dm = ClassifDataModule()
310310
model = CustomClassificationModelDP(lr=0.1)
@@ -351,7 +351,7 @@ def test_running_test_pretrained_model_distrib_dp(tmpdir):
351351
@RunIf(min_gpus=2)
352352
def test_running_test_pretrained_model_distrib_ddp_spawn(tmpdir):
353353
"""Verify `test()` on pretrained model."""
354-
tutils.set_random_master_port()
354+
tutils.set_random_main_port()
355355
dm = ClassifDataModule()
356356
model = ClassificationModel()
357357

tests/models/test_sync_batchnorm.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
from pytorch_lightning.utilities import FLOAT16_EPSILON
2323
from tests.helpers.datamodules import MNISTDataModule
2424
from tests.helpers.runif import RunIf
25-
from tests.helpers.utils import set_random_master_port
25+
from tests.helpers.utils import set_random_main_port
2626

2727

2828
class SyncBNModule(LightningModule):
@@ -70,7 +70,7 @@ def configure_optimizers(self):
7070
@RunIf(min_gpus=2, special=True)
7171
def test_sync_batchnorm_ddp(tmpdir):
7272
seed_everything(234)
73-
set_random_master_port()
73+
set_random_main_port()
7474

7575
# define datamodule and dataloader
7676
dm = MNISTDataModule()

tests/trainer/test_trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1834,7 +1834,7 @@ def validation_epoch_end(self, outputs) -> None:
18341834
@RunIf(skip_windows=True)
18351835
def test_fit_test_synchronization(tmpdir):
18361836
"""Test that the trainer synchronizes processes before returning control back to the caller."""
1837-
tutils.set_random_master_port()
1837+
tutils.set_random_main_port()
18381838
model = TestDummyModelForCheckpoint()
18391839
checkpoint = ModelCheckpoint(dirpath=tmpdir, monitor="x", mode="min", save_top_k=1)
18401840
trainer = Trainer(

tests/utilities/test_auto_restart.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -358,7 +358,7 @@ def _test_fast_forward_sampler_with_distributed_sampler(rank, worldsize):
358358
@RunIf(skip_windows=True)
359359
def test_fast_forward_sampler_with_distributed_sampler():
360360
"""Make sure result logging works with DDP."""
361-
tutils.set_random_master_port()
361+
tutils.set_random_main_port()
362362
worldsize = 2
363363
mp.spawn(_test_fast_forward_sampler_with_distributed_sampler, args=(worldsize,), nprocs=worldsize)
364364

@@ -632,7 +632,7 @@ def test_fast_forward_sampler_iterative_dataset():
632632
@RunIf(skip_windows=True)
633633
def test_fast_forward_sampler_with_distributed_sampler_and_iterative_dataset():
634634
"""Make sure result logging works with DDP."""
635-
tutils.set_random_master_port()
635+
tutils.set_random_main_port()
636636
worldsize = 2
637637
mp.spawn(
638638
_test_fast_forward_sampler_with_distributed_sampler_and_iterative_dataset, args=(worldsize,), nprocs=worldsize

0 commit comments

Comments
 (0)