We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 32bfe51 commit 798dd99Copy full SHA for 798dd99
pytorch_lightning/trainer/trainer.py
@@ -1036,7 +1036,7 @@ def _pre_dispatch(self):
1036
self.accelerator.pre_dispatch(self)
1037
self._log_hyperparams()
1038
1039
- def _log_hyperparams(self):
+ def _log_hyperparams(self) -> None:
1040
# log hyper-parameters
1041
hparams_initial = None
1042
@@ -1058,7 +1058,7 @@ def _log_hyperparams(self):
1058
raise MisconfigurationException(
1059
f"Error while merging hparams: the keys {inconsistent_keys} are present "
1060
"in both the LightningModule's and LightningDataModule's hparams "
1061
- "and have different values."
+ "but have different values."
1062
)
1063
hparams_initial = {**lightning_hparams, **datamodule_hparams}
1064
elif self.lightning_module._log_hyperparams:
0 commit comments