Skip to content

Commit 20e4b6a

Browse files
authored
[LoRA] change to warning from info when notifying the users about a LoRA no-op (#11044)
* move to warning. * test related changes.
1 parent 4ea9f89 commit 20e4b6a

File tree

3 files changed

+14
-6
lines changed

3 files changed

+14
-6
lines changed

src/diffusers/loaders/lora_base.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -423,8 +423,12 @@ def _load_lora_into_text_encoder(
423423
# Unsafe code />
424424

425425
if prefix is not None and not state_dict:
426-
logger.info(
427-
f"No LoRA keys associated to {text_encoder.__class__.__name__} found with the {prefix=}. This is safe to ignore if LoRA state dict didn't originally have any {text_encoder.__class__.__name__} related params. Open an issue if you think it's unexpected: https://github.com/huggingface/diffusers/issues/new"
426+
logger.warning(
427+
f"No LoRA keys associated to {text_encoder.__class__.__name__} found with the {prefix=}. "
428+
"This is safe to ignore if LoRA state dict didn't originally have any "
429+
f"{text_encoder.__class__.__name__} related params. You can also try specifying `prefix=None` "
430+
"to resolve the warning. Otherwise, open an issue if you think it's unexpected: "
431+
"https://github.com/huggingface/diffusers/issues/new"
428432
)
429433

430434

src/diffusers/loaders/peft.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -354,8 +354,12 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
354354
# Unsafe code />
355355

356356
if prefix is not None and not state_dict:
357-
logger.info(
358-
f"No LoRA keys associated to {self.__class__.__name__} found with the {prefix=}. This is safe to ignore if LoRA state dict didn't originally have any {self.__class__.__name__} related params. Open an issue if you think it's unexpected: https://github.com/huggingface/diffusers/issues/new"
357+
logger.warning(
358+
f"No LoRA keys associated to {self.__class__.__name__} found with the {prefix=}. "
359+
"This is safe to ignore if LoRA state dict didn't originally have any "
360+
f"{self.__class__.__name__} related params. You can also try specifying `prefix=None` "
361+
"to resolve the warning. Otherwise, open an issue if you think it's unexpected: "
362+
"https://github.com/huggingface/diffusers/issues/new"
359363
)
360364

361365
def save_lora_adapter(

tests/lora/utils.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1961,7 +1961,7 @@ def test_logs_info_when_no_lora_keys_found(self):
19611961

19621962
no_op_state_dict = {"lora_foo": torch.tensor(2.0), "lora_bar": torch.tensor(3.0)}
19631963
logger = logging.get_logger("diffusers.loaders.peft")
1964-
logger.setLevel(logging.INFO)
1964+
logger.setLevel(logging.WARNING)
19651965

19661966
with CaptureLogger(logger) as cap_logger:
19671967
pipe.load_lora_weights(no_op_state_dict)
@@ -1981,7 +1981,7 @@ def test_logs_info_when_no_lora_keys_found(self):
19811981
prefix = "text_encoder_2"
19821982

19831983
logger = logging.get_logger("diffusers.loaders.lora_base")
1984-
logger.setLevel(logging.INFO)
1984+
logger.setLevel(logging.WARNING)
19851985

19861986
with CaptureLogger(logger) as cap_logger:
19871987
self.pipeline_class.load_lora_into_text_encoder(

0 commit comments

Comments
 (0)