Skip to content

Introduce the graveyard 🪦 #15061

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 16 commits into from
Oct 11, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions src/pytorch_lightning/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ def _detail(self: Any, message: str, *args: Any, **kwargs: Any) -> None:
from pytorch_lightning.core import LightningDataModule, LightningModule # noqa: E402
from pytorch_lightning.trainer import Trainer # noqa: E402

# this import needs to go last as it will patch other modules
import pytorch_lightning._graveyard # noqa: E402, F401 # isort: skip

__all__ = ["Trainer", "LightningDataModule", "LightningModule", "Callback", "seed_everything"]

# for compatibility with namespace packages
Expand Down
1 change: 1 addition & 0 deletions src/pytorch_lightning/_graveyard/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
import pytorch_lightning._graveyard.training_type # noqa: F401
74 changes: 74 additions & 0 deletions src/pytorch_lightning/_graveyard/training_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import sys
from typing import Any

import pytorch_lightning as pl


def _patch_sys_modules() -> None:
self = sys.modules[__name__]
sys.modules["pytorch_lightning.plugins.training_type"] = self
sys.modules["pytorch_lightning.plugins.training_type.ddp"] = self
sys.modules["pytorch_lightning.plugins.training_type.ddp2"] = self
sys.modules["pytorch_lightning.plugins.training_type.ddp_spawn"] = self
sys.modules["pytorch_lightning.plugins.training_type.deepspeed"] = self
sys.modules["pytorch_lightning.plugins.training_type.dp"] = self
sys.modules["pytorch_lightning.plugins.training_type.fully_sharded"] = self
sys.modules["pytorch_lightning.plugins.training_type.horovod"] = self
sys.modules["pytorch_lightning.plugins.training_type.ipu"] = self
sys.modules["pytorch_lightning.plugins.training_type.parallel"] = self
sys.modules["pytorch_lightning.plugins.training_type.sharded"] = self
sys.modules["pytorch_lightning.plugins.training_type.sharded_spawn"] = self
sys.modules["pytorch_lightning.plugins.training_type.single_device"] = self
sys.modules["pytorch_lightning.plugins.training_type.single_tpu"] = self
sys.modules["pytorch_lightning.plugins.training_type.tpu_spawn"] = self
sys.modules["pytorch_lightning.plugins.training_type.training_type_plugin"] = self
sys.modules["pytorch_lightning.plugins.training_type.utils"] = self


def _ttp_constructor(self: Any, *_: Any, **__: Any) -> None:
raise RuntimeError(
f"The `pl.plugins.{self._name}Plugin` class was removed in v1.8. Use `pl.strategies.{self._name}Strategy`"
" instead."
)


def _patch_plugin_classes() -> None:
self = sys.modules[__name__]
for name in (
"DDP",
"DDP2",
"DDPSpawn",
"DeepSpeed",
"DataParallel",
"DDPFullySharded",
"Horovod",
"IPU",
"Parallel",
"DDPSharded",
"DDPSpawnSharded",
"SingleDevice",
"SingleTPU",
"TPUSpawn",
"TrainingType",
):
plugin_name = name + "Plugin"
plugin_cls = type(plugin_name, (object,), {"__init__": _ttp_constructor, "_name": name})
setattr(self, plugin_name, plugin_cls)
# do not overwrite sys.modules as `pl.plugins` still exists. manually patch instead
setattr(pl.plugins, plugin_name, plugin_cls)


def on_colab_kaggle() -> None:
raise RuntimeError(
"`pl.plugins.training_type.utils.on_colab_kaggle` was removed in v1.8."
" Use `pl.strategies.utils.on_colab_kaggle` instead."
)


def _training_type_plugin(_: pl.Trainer) -> None:
raise RuntimeError("`Trainer.training_type_plugin` was removed in v1.8. Use `Trainer.strategy` instead.")


_patch_sys_modules()
_patch_plugin_classes()
pl.Trainer.training_type_plugin = property(_training_type_plugin)
93 changes: 93 additions & 0 deletions tests/tests_pytorch/graveyard/test_training_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
from importlib import import_module

import pytest

from pytorch_lightning import Trainer


def test_removed_training_type_plugin_property():
trainer = Trainer()
with pytest.raises(RuntimeError, match="training_type_plugin` was removed"):
trainer.training_type_plugin


@pytest.mark.parametrize(
"name",
(
"DDPPlugin",
"DDP2Plugin",
"DDPSpawnPlugin",
"DeepSpeedPlugin",
"DataParallelPlugin",
"DDPFullyShardedPlugin",
"HorovodPlugin",
"IPUPlugin",
"ParallelPlugin",
"DDPShardedPlugin",
"DDPSpawnShardedPlugin",
"SingleDevicePlugin",
"SingleTPUPlugin",
"TPUSpawnPlugin",
"TrainingTypePlugin",
),
)
@pytest.mark.parametrize("import_path", ("pytorch_lightning.plugins", "pytorch_lightning.plugins.training_type"))
def test_removed_training_type_plugin_classes(name, import_path):
module = import_module(import_path)
cls = getattr(module, name)
with pytest.raises(RuntimeError, match=f"{name}` class was removed"):
cls()


def test_removed_training_type_plugin_classes_inner_import():
from pytorch_lightning.plugins.training_type.ddp import DDPPlugin
from pytorch_lightning.plugins.training_type.ddp2 import DDP2Plugin
from pytorch_lightning.plugins.training_type.ddp_spawn import DDPSpawnPlugin
from pytorch_lightning.plugins.training_type.deepspeed import DeepSpeedPlugin
from pytorch_lightning.plugins.training_type.dp import DataParallelPlugin
from pytorch_lightning.plugins.training_type.fully_sharded import DDPFullyShardedPlugin
from pytorch_lightning.plugins.training_type.horovod import HorovodPlugin
from pytorch_lightning.plugins.training_type.ipu import IPUPlugin
from pytorch_lightning.plugins.training_type.parallel import ParallelPlugin
from pytorch_lightning.plugins.training_type.sharded import DDPShardedPlugin
from pytorch_lightning.plugins.training_type.sharded_spawn import DDPSpawnShardedPlugin
from pytorch_lightning.plugins.training_type.single_device import SingleDevicePlugin
from pytorch_lightning.plugins.training_type.single_tpu import SingleTPUPlugin
from pytorch_lightning.plugins.training_type.tpu_spawn import TPUSpawnPlugin
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin

with pytest.raises(RuntimeError, match="DDPPlugin` class was removed"):
DDPPlugin()
with pytest.raises(RuntimeError, match="DDP2Plugin` class was removed"):
DDP2Plugin()
with pytest.raises(RuntimeError, match="DDPSpawnPlugin` class was removed"):
DDPSpawnPlugin()
with pytest.raises(RuntimeError, match="DeepSpeedPlugin` class was removed"):
DeepSpeedPlugin()
with pytest.raises(RuntimeError, match="DataParallelPlugin` class was removed"):
DataParallelPlugin()
with pytest.raises(RuntimeError, match="DDPFullyShardedPlugin` class was removed"):
DDPFullyShardedPlugin()
with pytest.raises(RuntimeError, match="HorovodPlugin` class was removed"):
HorovodPlugin()
with pytest.raises(RuntimeError, match="IPUPlugin` class was removed"):
IPUPlugin()
with pytest.raises(RuntimeError, match="ParallelPlugin` class was removed"):
ParallelPlugin()
with pytest.raises(RuntimeError, match="DDPShardedPlugin` class was removed"):
DDPShardedPlugin()
with pytest.raises(RuntimeError, match="DDPSpawnShardedPlugin` class was removed"):
DDPSpawnShardedPlugin()
with pytest.raises(RuntimeError, match="SingleDevicePlugin` class was removed"):
SingleDevicePlugin()
with pytest.raises(RuntimeError, match="SingleTPUPlugin` class was removed"):
SingleTPUPlugin()
with pytest.raises(RuntimeError, match="TPUSpawnPlugin` class was removed"):
TPUSpawnPlugin()
with pytest.raises(RuntimeError, match="TrainingTypePlugin` class was removed"):
TrainingTypePlugin()

from pytorch_lightning.plugins.training_type.utils import on_colab_kaggle

with pytest.raises(RuntimeError, match="on_colab_kaggle` was removed"):
on_colab_kaggle()