Skip to content

Commit 06f5bde

Browse files
committed
Configure our logger level
1 parent 9250f2c commit 06f5bde

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

pytorch_lightning/plugins/training_type/deepspeed.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -312,6 +312,7 @@ def __init__(
312312
)
313313
self._config_initialized = False
314314
deepspeed.utils.logging.logger.setLevel(logging_level)
315+
pl._logger.setLevel(logging_level)
315316

316317
self.remote_device = remote_device
317318
self.load_full_weights = load_full_weights
@@ -634,12 +635,11 @@ def _auto_select_batch_size(self):
634635
if hasattr(train_dataloader, "batch_sampler"):
635636
batch_size = train_dataloader.batch_sampler.batch_size
636637
except Exception:
637-
if deepspeed.utils.logging.logger.level < logging.WARN:
638-
rank_zero_warn(
639-
"Tried to Infer the batch size for internal deepspeed logging from the `train_dataloader()`. "
640-
"To ensure DeepSpeed logging remains correct, please manually pass the plugin with the "
641-
"batch size, `Trainer(strategy=DeepSpeedPlugin(logging_batch_size_per_gpu=batch_size))`."
642-
)
638+
rank_zero_warn(
639+
"Tried to Infer the batch size for internal deepspeed logging from the `train_dataloader()`. "
640+
"To ensure DeepSpeed logging remains correct, please manually pass the plugin with the "
641+
"batch size, `Trainer(strategy=DeepSpeedPlugin(logging_batch_size_per_gpu=batch_size))`."
642+
)
643643
return batch_size
644644

645645
def _format_precision_config(self):

0 commit comments

Comments
 (0)