File tree 2 files changed +9
-8
lines changed
pytorch_lightning/plugins/collective 2 files changed +9
-8
lines changed Original file line number Diff line number Diff line change @@ -32,14 +32,14 @@ def __init__(
32
32
self ,
33
33
on_gpu : Optional [bool ] = False ,
34
34
local_rank : int = 0 ,
35
- ):
35
+ ) -> None :
36
36
self .on_gpu = on_gpu
37
37
self .local_rank = local_rank
38
38
39
39
def join (self ) -> None :
40
40
"""Horovod function that indicates that the rank finished processing data.
41
41
42
- All ranks that did not call join() continue to process allreduce operations. This function blocks Python thread
42
+ All ranks that did not call join() continue to process allreduce operations. This function blocks the Python thread
43
43
until all ranks join.
44
44
"""
45
45
if self .on_gpu :
Original file line number Diff line number Diff line change @@ -36,12 +36,12 @@ def __init__(
36
36
device : Optional [Union [str , torch .device ]] = torch .device ("cpu" ),
37
37
device_id : Optional [int ] = None ,
38
38
world_size : int = 1 ,
39
- ):
40
- """.. note::
41
-
42
- DDP and DDPSpawn sync accross multiple nodes/devices, local_reduce = False
43
- DP run reduce in on node, local_reduce = True
44
- DDP2 behaves like DP in one node, local_reduce = True
39
+ ) -> None :
40
+ """
41
+ Note:
42
+ DDP and DDPSpawn sync accross multiple nodes/devices, local_reduce = False
43
+ DP run reduce in on node, local_reduce = True
44
+ DDP2 behaves like DP in one node, local_reduce = True
45
45
46
46
local_reduce set in Plugins.setup() functions
47
47
"""
@@ -84,6 +84,7 @@ def reduce(
84
84
If local_reduce = True (dp and ddp2), reduces tensor from all local processes.
85
85
86
86
If local_reduce = False (ddp, ddpspawning and extentions), reduces a tensor from several distributed processes
87
+
87
88
Args:
88
89
tensor: the tensor to sync and reduce
89
90
group: the process group to gather results from. Defaults to all processes (world)
You can’t perform that action at this time.
0 commit comments