20
20
import pytorch_lightning as pl
21
21
from pytorch_lightning .plugins .precision .precision_plugin import PrecisionPlugin
22
22
from pytorch_lightning .utilities import GradClipAlgorithmType
23
+ from pytorch_lightning .utilities .exceptions import MisconfigurationException
23
24
from pytorch_lightning .utilities .model_helpers import is_overridden
24
25
from pytorch_lightning .utilities .warnings import WarningCache
25
26
@@ -44,12 +45,14 @@ def pre_optimizer_step(
44
45
"""Hook to do something before each optimizer step."""
45
46
result = lambda_closure () # DeepSpeed does not support closures
46
47
super ().pre_optimizer_step (model , optimizer , optimizer_idx , lambda_closure , ** kwargs )
47
- skipped_backward = result is None
48
48
# in manual optimization, the closure does not return a value
49
- if not model .automatic_optimization or not skipped_backward :
50
- # the following should be in a `optimizer_step` hook but we don't have one in the precision plugin.
51
- deepspeed_engine = model .trainer .model
52
- deepspeed_engine .step ()
49
+ if model .automatic_optimization and result is None :
50
+ raise MisconfigurationException (
51
+ "Skipping backward by returning `None` from your `training_step` is not supported by `DeepSpeed`"
52
+ )
53
+ # the following should be in a `optimizer_step` hook but we don't have one in the precision plugin.
54
+ deepspeed_engine = model .trainer .model
55
+ deepspeed_engine .step ()
53
56
return False
54
57
55
58
def backward (self , model : "pl.LightningModule" , closure_loss : Tensor , * args : Any , ** kwargs : Any ) -> None :
0 commit comments