Skip to content

Commit 0040f01

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 400bc59 commit 0040f01

File tree

2 files changed

+7
-6
lines changed

2 files changed

+7
-6
lines changed

src/lightning/fabric/strategies/launchers/multiprocessing.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,16 @@
2222

2323
from lightning.fabric.strategies.launchers.launcher import _Launcher
2424
from lightning.fabric.utilities.apply_func import move_data_to_device
25-
from lightning.fabric.utilities.imports import _IS_INTERACTIVE
25+
from lightning.fabric.utilities.imports import _IS_INTERACTIVE, _LIGHTNING_XPU_AVAILABLE
2626
from lightning.fabric.utilities.seed import _collect_rng_states, _set_rng_states
27-
from lightning.fabric.utilities.imports import _LIGHTNING_XPU_AVAILABLE
2827

2928
if TYPE_CHECKING:
3029
from lightning.fabric.strategies import ParallelStrategy
31-
30+
3231
if _LIGHTNING_XPU_AVAILABLE:
3332
from lightning_xpu.fabric import XPUAccelerator
3433

34+
3535
class _MultiProcessingLauncher(_Launcher):
3636
r"""Launches processes that run a given function in parallel, and joins them all at the end.
3737
@@ -193,6 +193,7 @@ def _check_bad_cuda_fork() -> None:
193193
message += " You will have to restart the Python kernel."
194194
raise RuntimeError(message)
195195

196+
196197
def _check_bad_xpu_fork() -> None:
197198
"""Checks whether it is safe to fork and initialize XPU in the new processes, and raises an exception if not.
198199
@@ -209,4 +210,4 @@ def _check_bad_xpu_fork() -> None:
209210
)
210211
if _IS_INTERACTIVE:
211212
message += " You will have to restart the Python kernel."
212-
raise RuntimeError(message)
213+
raise RuntimeError(message)

src/lightning/pytorch/strategies/launchers/multiprocessing.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,12 @@
3434
from lightning.pytorch.strategies.launchers.launcher import _Launcher
3535
from lightning.pytorch.trainer.connectors.signal_connector import _SIGNUM
3636
from lightning.pytorch.trainer.states import TrainerFn, TrainerState
37-
from lightning.pytorch.utilities.rank_zero import rank_zero_debug
3837
from lightning.pytorch.utilities.imports import _LIGHTNING_XPU_AVAILABLE
38+
from lightning.pytorch.utilities.rank_zero import rank_zero_debug
3939

4040
if _LIGHTNING_XPU_AVAILABLE:
4141
from lightning_xpu.pytorch import XPUAccelerator
42-
42+
4343
log = logging.getLogger(__name__)
4444

4545

0 commit comments

Comments
 (0)