Skip to content

Commit 4bd135a

Browse files
awaelchlirohitgr7Borda
authored
Remove deprecated LoggerCollection (#14283)
Co-authored-by: Rohit Gupta <[email protected]> Co-authored-by: Jirka Borovec <[email protected]>
1 parent f73b31b commit 4bd135a

File tree

10 files changed

+21
-271
lines changed

10 files changed

+21
-271
lines changed

src/pytorch_lightning/CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
128128
- Removed the experimental `pytorch_lightning.utiltiies.meta` functions in favor of built-in https://github.com/pytorch/torchdistx support ([#13868](https://github.com/Lightning-AI/lightning/pull/13868))
129129

130130

131+
- Removed the deprecated `LoggerCollection`; `Trainer.logger` and `LightningModule.logger` now returns the first logger when more than one gets passed to the Trainer ([#14283](https://github.com/Lightning-AI/lightning/pull/14283))
132+
133+
131134
- Removed the deprecated the `trainer.lr_schedulers` ([#14408](https://github.com/Lightning-AI/lightning/pull/14408))
132135

133136

src/pytorch_lightning/core/module.py

Lines changed: 2 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
import numbers
1919
import os
2020
import tempfile
21-
import warnings
2221
import weakref
2322
from contextlib import contextmanager
2423
from pathlib import Path
@@ -43,7 +42,7 @@
4342
from pytorch_lightning.core.mixins import HyperparametersMixin
4443
from pytorch_lightning.core.optimizer import LightningOptimizer
4544
from pytorch_lightning.core.saving import ModelIO
46-
from pytorch_lightning.loggers import Logger, LoggerCollection
45+
from pytorch_lightning.loggers import Logger
4746
from pytorch_lightning.trainer.connectors.logger_connector.fx_validator import _FxValidator
4847
from pytorch_lightning.utilities import _IS_WINDOWS, _TORCH_GREATER_EQUAL_1_10, GradClipAlgorithmType
4948
from pytorch_lightning.utilities.exceptions import MisconfigurationException
@@ -267,26 +266,7 @@ def truncated_bptt_steps(self, truncated_bptt_steps: int) -> None:
267266
@property
268267
def logger(self) -> Optional[Logger]:
269268
"""Reference to the logger object in the Trainer."""
270-
# this should match the implementation of `trainer.logger`
271-
# we don't reuse it so we can properly set the deprecation stacklevel
272-
if self._trainer is None:
273-
return None
274-
loggers = self.trainer.loggers
275-
if len(loggers) == 0:
276-
return None
277-
if len(loggers) == 1:
278-
return loggers[0]
279-
else:
280-
if not self._running_torchscript:
281-
rank_zero_deprecation(
282-
"Using `lightning_module.logger` when multiple loggers are configured."
283-
" This behavior will change in v1.8 when `LoggerCollection` is removed, and"
284-
" `lightning_module.logger` will return the first logger available.",
285-
stacklevel=5,
286-
)
287-
with warnings.catch_warnings():
288-
warnings.simplefilter("ignore")
289-
return LoggerCollection(loggers)
269+
return self._trainer.logger if self._trainer is not None else None
290270

291271
@property
292272
def loggers(self) -> List[Logger]:

src/pytorch_lightning/loggers/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,13 @@
1717
from pytorch_lightning.loggers.base import LightningLoggerBase
1818
from pytorch_lightning.loggers.comet import _COMET_AVAILABLE, CometLogger # noqa: F401
1919
from pytorch_lightning.loggers.csv_logs import CSVLogger
20-
from pytorch_lightning.loggers.logger import Logger, LoggerCollection
20+
from pytorch_lightning.loggers.logger import Logger
2121
from pytorch_lightning.loggers.mlflow import _MLFLOW_AVAILABLE, MLFlowLogger # noqa: F401
2222
from pytorch_lightning.loggers.neptune import NeptuneLogger # noqa: F401
2323
from pytorch_lightning.loggers.tensorboard import TensorBoardLogger
2424
from pytorch_lightning.loggers.wandb import WandbLogger # noqa: F401
2525

26-
__all__ = ["CSVLogger", "LightningLoggerBase", "Logger", "LoggerCollection", "TensorBoardLogger"]
26+
__all__ = ["CSVLogger", "LightningLoggerBase", "Logger", "TensorBoardLogger"]
2727

2828
if _COMET_AVAILABLE:
2929
__all__.append("CometLogger")

src/pytorch_lightning/loggers/base.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,6 @@ def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def]
5757
super().__init__(*args, **kwargs)
5858

5959

60-
class LoggerCollection(logger.LoggerCollection):
61-
def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def]
62-
super().__init__(*args, **kwargs)
63-
64-
6560
class DummyExperiment(logger.DummyExperiment):
6661
def __init__(self, *args, **kwargs) -> None: # type: ignore[no-untyped-def]
6762
rank_zero_deprecation(

src/pytorch_lightning/loggers/logger.py

Lines changed: 1 addition & 96 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from argparse import Namespace
2121
from collections import defaultdict
2222
from functools import wraps
23-
from typing import Any, Callable, Dict, Generator, Iterable, List, Mapping, Optional, Sequence, Union
23+
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Union
2424
from weakref import ReferenceType
2525

2626
import numpy as np
@@ -212,97 +212,6 @@ def version(self) -> Optional[Union[int, str]]:
212212
"""Return the experiment version."""
213213

214214

215-
class LoggerCollection(Logger):
216-
"""The :class:`LoggerCollection` class is used to iterate all logging actions over the given `logger_iterable`.
217-
218-
.. deprecated:: v1.6
219-
`LoggerCollection` is deprecated in v1.6 and will be removed in v1.8.
220-
Directly pass a list of loggers to the Trainer and access the list via the `trainer.loggers` attribute.
221-
222-
Args:
223-
logger_iterable: An iterable collection of loggers
224-
"""
225-
226-
def __init__(self, logger_iterable: Iterable[Logger]):
227-
super().__init__()
228-
self._logger_iterable = logger_iterable
229-
rank_zero_deprecation(
230-
"`LoggerCollection` is deprecated in v1.6 and will be removed in v1.8. Directly pass a list of loggers"
231-
" to the Trainer and access the list via the `trainer.loggers` attribute."
232-
)
233-
234-
def __getitem__(self, index: int) -> Logger:
235-
return list(self._logger_iterable)[index]
236-
237-
def after_save_checkpoint(self, checkpoint_callback: "ReferenceType[Checkpoint]") -> None:
238-
for logger in self._logger_iterable:
239-
logger.after_save_checkpoint(checkpoint_callback)
240-
241-
def update_agg_funcs(
242-
self,
243-
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,
244-
agg_default_func: Callable[[Sequence[float]], float] = np.mean,
245-
) -> None:
246-
for logger in self._logger_iterable:
247-
logger.update_agg_funcs(agg_key_funcs, agg_default_func)
248-
249-
@property
250-
def experiment(self) -> List[Any]:
251-
"""Returns a list of experiment objects for all the loggers in the logger collection."""
252-
return [logger.experiment for logger in self._logger_iterable]
253-
254-
def agg_and_log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None:
255-
for logger in self._logger_iterable:
256-
logger.agg_and_log_metrics(metrics=metrics, step=step)
257-
258-
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None:
259-
for logger in self._logger_iterable:
260-
logger.log_metrics(metrics=metrics, step=step)
261-
262-
def log_hyperparams(self, params: Union[Dict[str, Any], Namespace], *args: Any, **kwargs: Any) -> None:
263-
for logger in self._logger_iterable:
264-
logger.log_hyperparams(params, *args, **kwargs)
265-
266-
def log_graph(self, model: "pl.LightningModule", input_array: Optional[Tensor] = None) -> None:
267-
for logger in self._logger_iterable:
268-
logger.log_graph(model, input_array)
269-
270-
def log_text(self, *args: Any, **kwargs: Any) -> None:
271-
for logger in self._logger_iterable:
272-
logger.log_text(*args, **kwargs)
273-
274-
def log_image(self, *args: Any, **kwargs: Any) -> None:
275-
for logger in self._logger_iterable:
276-
logger.log_image(*args, **kwargs)
277-
278-
def save(self) -> None:
279-
for logger in self._logger_iterable:
280-
logger.save()
281-
282-
def finalize(self, status: str) -> None:
283-
for logger in self._logger_iterable:
284-
logger.finalize(status)
285-
286-
@property
287-
def save_dir(self) -> Optional[str]:
288-
"""Returns ``None`` as checkpoints should be saved to default / chosen location when using multiple
289-
loggers."""
290-
# Checkpoints should be saved to default / chosen location when using multiple loggers
291-
return None
292-
293-
@property
294-
def name(self) -> str:
295-
"""Returns the unique experiment names for all the loggers in the logger collection joined by an
296-
underscore."""
297-
return "_".join(dict.fromkeys(str(logger.name) for logger in self._logger_iterable))
298-
299-
@property
300-
def version(self) -> str:
301-
"""Returns the unique experiment versions for all the loggers in the logger collection joined by an
302-
underscore."""
303-
return "_".join(dict.fromkeys(str(logger.version) for logger in self._logger_iterable))
304-
305-
306215
class DummyExperiment:
307216
"""Dummy experiment."""
308217

@@ -355,10 +264,6 @@ def __getitem__(self, idx: int) -> "DummyLogger":
355264
# enables self.logger[0].experiment.add_image(...)
356265
return self
357266

358-
def __iter__(self) -> Generator[None, None, None]:
359-
# if DummyLogger is substituting a logger collection, pretend it is empty
360-
yield from ()
361-
362267
def __getattr__(self, name: str) -> Callable:
363268
"""Allows the DummyLogger to be called with arbitrary methods, to avoid AttributeErrors."""
364269

src/pytorch_lightning/trainer/trainer.py

Lines changed: 2 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
from pytorch_lightning.core.datamodule import LightningDataModule
5757
from pytorch_lightning.core.optimizer import LightningOptimizer
5858
from pytorch_lightning.loggers import Logger
59-
from pytorch_lightning.loggers.logger import DummyLogger, LoggerCollection
59+
from pytorch_lightning.loggers.logger import DummyLogger
6060
from pytorch_lightning.loggers.tensorboard import TensorBoardLogger
6161
from pytorch_lightning.loops import PredictionLoop, TrainingEpochLoop
6262
from pytorch_lightning.loops.dataloader.evaluation_loop import EvaluationLoop
@@ -2604,28 +2604,12 @@ def _active_loop(self) -> Optional[Union[FitLoop, EvaluationLoop, PredictionLoop
26042604

26052605
@property
26062606
def logger(self) -> Optional[Logger]:
2607-
loggers = self.loggers
2608-
if len(loggers) == 0:
2609-
return None
2610-
if len(loggers) == 1:
2611-
return loggers[0]
2612-
else:
2613-
rank_zero_deprecation(
2614-
"Using `trainer.logger` when multiple loggers are configured."
2615-
" This behavior will change in v1.8 when `LoggerCollection` is removed, and"
2616-
" `trainer.logger` will return the first logger available.",
2617-
stacklevel=5,
2618-
)
2619-
with warnings.catch_warnings():
2620-
warnings.simplefilter("ignore")
2621-
return LoggerCollection(loggers)
2607+
return self.loggers[0] if len(self.loggers) > 0 else None
26222608

26232609
@logger.setter
26242610
def logger(self, logger: Optional[Logger]) -> None:
26252611
if not logger:
26262612
self.loggers = []
2627-
elif isinstance(logger, LoggerCollection):
2628-
self.loggers = list(logger)
26292613
else:
26302614
self.loggers = [logger]
26312615

tests/tests_pytorch/core/test_lightning_module.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def test_property_logger(tmpdir):
7979
assert model.logger is None
8080

8181
logger = TensorBoardLogger(tmpdir)
82-
trainer = Mock(loggers=[logger])
82+
trainer = Trainer(logger=logger)
8383
model.trainer = trainer
8484
assert model.logger == logger
8585

@@ -94,6 +94,12 @@ def test_property_loggers(tmpdir):
9494
model.trainer = trainer
9595
assert model.loggers == [logger]
9696

97+
logger0 = TensorBoardLogger(tmpdir)
98+
logger1 = TensorBoardLogger(tmpdir)
99+
trainer = Trainer(logger=[logger0, logger1])
100+
model.trainer = trainer
101+
assert model.loggers == [logger0, logger1]
102+
97103

98104
def test_1_optimizer_toggle_model():
99105
"""Test toggle_model runs when only one optimizer is used."""

tests/tests_pytorch/deprecated_api/test_remove_1-8.py

Lines changed: 1 addition & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
from pytorch_lightning import Callback, Trainer
2727
from pytorch_lightning.callbacks import ModelCheckpoint
2828
from pytorch_lightning.demos.boring_classes import BoringDataModule, BoringModel
29-
from pytorch_lightning.loggers import CSVLogger, Logger, LoggerCollection
29+
from pytorch_lightning.loggers import CSVLogger, Logger
3030
from pytorch_lightning.plugins.precision.precision_plugin import PrecisionPlugin
3131
from pytorch_lightning.profiler import AbstractProfiler, BaseProfiler
3232
from pytorch_lightning.profilers import AdvancedProfiler, Profiler, SimpleProfiler
@@ -441,30 +441,6 @@ def _get_python_cprofile_total_duration(profile):
441441
np.testing.assert_allclose(recorded_total_duration, expected_total_duration, rtol=0.2)
442442

443443

444-
def test_v1_8_0_logger_collection(tmpdir):
445-
logger1 = CSVLogger(tmpdir)
446-
logger2 = CSVLogger(tmpdir)
447-
448-
trainer1 = Trainer(logger=logger1)
449-
trainer2 = Trainer(logger=[logger1, logger2])
450-
451-
# Should have no deprecation warning
452-
trainer1.logger
453-
trainer1.loggers
454-
trainer2.loggers
455-
456-
with pytest.deprecated_call(match="logger` will return the first logger"):
457-
_ = trainer2.logger
458-
with pytest.deprecated_call(match="`LoggerCollection` is deprecated in v1.6"):
459-
_ = LoggerCollection([logger1, logger2])
460-
461-
model = BoringModel()
462-
trainer = Trainer(logger=[logger1, logger2])
463-
model.trainer = trainer
464-
with pytest.deprecated_call(match="logger` will return the first logger"):
465-
_ = model.logger
466-
467-
468444
def test_v1_8_0_precision_plugin_checkpoint_hooks(tmpdir):
469445
class PrecisionPluginSaveHook(PrecisionPlugin):
470446
def on_save_checkpoint(self, checkpoint):

0 commit comments

Comments
 (0)