Skip to content

Commit bb2dc68

Browse files
authored
Simplify track grad norm condition (#9992)
1 parent 321502f commit bb2dc68

File tree

1 file changed

+1
-3
lines changed

1 file changed

+1
-3
lines changed

pytorch_lightning/loops/optimization/optimizer_loop.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -478,9 +478,7 @@ def _track_and_norm_grad(self, optimizer: torch.optim.Optimizer, opt_idx: int) -
478478
"""
479479
# track gradient norms
480480
grad_norm_dict = {}
481-
can_log = (self.trainer.global_step + 1) % self.trainer.log_every_n_steps == 0
482-
should_track = float(self.trainer.track_grad_norm) > 0
483-
if should_track and can_log:
481+
if self.trainer.track_grad_norm != -1:
484482
grad_norm_dict = grad_norm(self.trainer.lightning_module, self.trainer.track_grad_norm)
485483

486484
# clip gradients

0 commit comments

Comments
 (0)