We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 9826de2 commit 787f41eCopy full SHA for 787f41e
pytorch_lightning/core/lightning.py
@@ -1564,15 +1564,15 @@ def optimizer_step(
1564
using_native_amp,
1565
using_lbfgs,
1566
):
1567
- # warm up lr
+ # update params
1568
+ optimizer.step(closure=optimizer_closure)
1569
+
1570
+ # manually warm up lr without a scheduler
1571
if self.trainer.global_step < 500:
1572
lr_scale = min(1.0, float(self.trainer.global_step + 1) / 500.0)
1573
for pg in optimizer.param_groups:
1574
pg["lr"] = lr_scale * self.learning_rate
1575
- # update params
- optimizer.step(closure=optimizer_closure)
-
1576
"""
1577
optimizer.step(closure=optimizer_closure)
1578
0 commit comments