Skip to content

Commit 7fce183

Browse files
committed
Add typing for trainer.logger (#11114)
1 parent 15a4959 commit 7fce183

File tree

5 files changed

+8
-1
lines changed

5 files changed

+8
-1
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
1313
- Avoid the deprecated `onnx.export(example_outputs=...)` in torch 1.10 ([#11116](https://github.com/PyTorchLightning/pytorch-lightning/pull/11116))
1414
- Fixed an issue when torch-scripting a `LightningModule` after training with `Trainer(sync_batchnorm=True)` ([#11078](https://github.com/PyTorchLightning/pytorch-lightning/pull/11078))
1515
- Fixed an `AttributeError` occuring when using a `CombinedLoader` (multiple dataloaders) for prediction ([#11111](https://github.com/PyTorchLightning/pytorch-lightning/pull/11111))
16+
- Fixed bug where `Trainer(track_grad_norm=..., logger=False)' would fail ([#11114](https://github.com/PyTorchLightning/pytorch-lightning/pull/11114))
1617

1718
### Changed
1819

pytorch_lightning/callbacks/device_stats_monitor.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ def on_train_batch_start(
5959

6060
device_stats = trainer.accelerator.get_device_stats(pl_module.device)
6161
prefixed_device_stats = prefix_metrics_keys(device_stats, "on_train_batch_start")
62+
assert trainer.logger is not None
6263
trainer.logger.log_metrics(prefixed_device_stats, step=trainer.global_step)
6364

6465
def on_train_batch_end(
@@ -75,6 +76,7 @@ def on_train_batch_end(
7576

7677
device_stats = trainer.accelerator.get_device_stats(pl_module.device)
7778
prefixed_device_stats = prefix_metrics_keys(device_stats, "on_train_batch_end")
79+
assert trainer.logger is not None
7880
trainer.logger.log_metrics(prefixed_device_stats, step=trainer.global_step)
7981

8082

pytorch_lightning/callbacks/gpu_stats_monitor.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,7 @@ def on_train_batch_start(
161161
# First log at beginning of second step
162162
logs["batch_time/inter_step (ms)"] = (time.time() - self._snap_inter_step_time) * 1000
163163

164+
assert trainer.logger is not None
164165
trainer.logger.log_metrics(logs, step=trainer.global_step)
165166

166167
@rank_zero_only
@@ -185,6 +186,7 @@ def on_train_batch_end(
185186
if self._log_stats.intra_step_time and self._snap_intra_step_time:
186187
logs["batch_time/intra_step (ms)"] = (time.time() - self._snap_intra_step_time) * 1000
187188

189+
assert trainer.logger is not None
188190
trainer.logger.log_metrics(logs, step=trainer.global_step)
189191

190192
@staticmethod

pytorch_lightning/plugins/precision/precision_plugin.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,8 @@ def optimizer_step(
165165
def _track_grad_norm(self, trainer: "pl.Trainer") -> None:
166166
if trainer.track_grad_norm == -1:
167167
return
168-
grad_norm_dict = grad_norm(trainer.lightning_module, trainer.track_grad_norm, trainer.logger.group_separator)
168+
kwargs = {"group_separator": trainer.logger.group_separator} if trainer.logger is not None else {}
169+
grad_norm_dict = grad_norm(trainer.lightning_module, trainer.track_grad_norm, **kwargs)
169170
if grad_norm_dict:
170171
prev_fx = trainer.lightning_module._current_fx_name
171172
trainer.lightning_module._current_fx_name = "on_before_optimizer_step"

pytorch_lightning/trainer/trainer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -569,6 +569,7 @@ def __init__(
569569
self.__init_profiler(profiler)
570570

571571
# init logger flags
572+
self.logger: Optional[LightningLoggerBase]
572573
self.logger_connector.on_trainer_init(logger, flush_logs_every_n_steps, log_every_n_steps, move_metrics_to_cpu)
573574

574575
# init debugging flags

0 commit comments

Comments
 (0)