Skip to content

Commit 492fc62

Browse files
committed
TypeError and other fix
1 parent 7e8ed03 commit 492fc62

File tree

5 files changed

+2
-6
lines changed

5 files changed

+2
-6
lines changed

pytorch_lightning/loops/epoch/evaluation_epoch_loop.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -218,10 +218,8 @@ def _evaluation_step(self, **kwargs: Any) -> Optional[STEP_OUTPUT]:
218218
the outputs of the step
219219
"""
220220
if self.trainer.testing:
221-
self.trainer.lightning_module._current_fx_name = "test_step"
222221
output = self.trainer._call_accelerator_hook("test_step", *kwargs.values())
223222
else:
224-
self.trainer.lightning_module._current_fx_name = "validation_step"
225223
output = self.trainer._call_accelerator_hook("validation_step", *kwargs.values())
226224

227225
return output

pytorch_lightning/loops/epoch/prediction_epoch_loop.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,6 @@ def _predict_step(self, batch: Any, batch_idx: int, dataloader_idx: int) -> None
130130

131131
self.batch_progress.increment_started()
132132

133-
self.trainer.lightning_module._current_fx_name = "predict_step"
134133
predictions = self.trainer._call_accelerator_hook("predict_step", *step_kwargs.values())
135134

136135
self.batch_progress.increment_processed()

pytorch_lightning/loops/optimization/manual_loop.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@ def advance(self, batch: Any, batch_idx: int) -> None: # type: ignore[override]
102102
)
103103

104104
# manually capture logged metrics
105-
lightning_module._current_fx_name = "training_step"
106105
training_step_output = self.trainer._call_accelerator_hook("training_step", *step_kwargs.values())
107106
self.trainer.training_type_plugin.post_training_step()
108107

pytorch_lightning/loops/optimization/optimizer_loop.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -427,7 +427,6 @@ def _training_step(self, split_batch: Any, batch_idx: int, opt_idx: int) -> Clos
427427
)
428428

429429
# manually capture logged metrics
430-
lightning_module._current_fx_name = "training_step"
431430
training_step_output = self.trainer._call_accelerator_hook("training_step", *step_kwargs.values())
432431
self.trainer.training_type_plugin.post_training_step()
433432

pytorch_lightning/trainer/trainer.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1457,7 +1457,7 @@ def _call_lightning_module_hook(
14571457
pl_module = pl_module or self.lightning_module
14581458

14591459
if pl_module is None:
1460-
raise Exception("No Lightning Module is available to call hooks on")
1460+
raise TypeError("No Lightning Module is available to call hooks on")
14611461

14621462
fn = getattr(pl_module, hook_name)
14631463
if not callable(fn):
@@ -1538,6 +1538,7 @@ def _call_accelerator_hook(
15381538
*args: Any,
15391539
**kwargs: Any,
15401540
) -> Optional[Any]:
1541+
self.lightning_module._current_fx_name = hook_name
15411542
fn = getattr(self.accelerator, hook_name)
15421543
if not callable(fn):
15431544
return None

0 commit comments

Comments
 (0)