Skip to content

Commit 438845b

Browse files
committed
Apply suggestions
1 parent bcd9aa9 commit 438845b

File tree

2 files changed

+5
-6
lines changed

2 files changed

+5
-6
lines changed

src/diffusers/loaders.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict
322322

323323
# fill attn processors
324324
attn_processors = {}
325-
ff_layers = []
325+
non_attn_lora_layers = []
326326

327327
is_lora = all("lora" in k for k in state_dict.keys())
328328
is_custom_diffusion = any("custom_diffusion" in k for k in state_dict.keys())
@@ -350,7 +350,7 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict
350350
for sub_key in key.split("."):
351351
attn_processor = getattr(attn_processor, sub_key)
352352

353-
# Process FF layers
353+
# Process non-attention layers
354354
if "lora.down.weight" in value_dict:
355355
rank = value_dict["lora.down.weight"].shape[0]
356356
hidden_size = value_dict["lora.up.weight"].shape[0]
@@ -366,7 +366,7 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict
366366

367367
value_dict = {k.replace("lora.", ""): v for k, v in value_dict.items()}
368368
lora.load_state_dict(value_dict)
369-
ff_layers.append((attn_processor, lora))
369+
non_attn_lora_layers.append((attn_processor, lora))
370370
continue
371371

372372
rank = value_dict["to_k_lora.down.weight"].shape[0]
@@ -428,13 +428,13 @@ def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict
428428

429429
# set correct dtype & device
430430
attn_processors = {k: v.to(device=self.device, dtype=self.dtype) for k, v in attn_processors.items()}
431-
ff_layers = [(t, l.to(device=self.device, dtype=self.dtype)) for t, l in ff_layers]
431+
non_attn_lora_layers = [(t, l.to(device=self.device, dtype=self.dtype)) for t, l in non_attn_lora_layers]
432432

433433
# set layers
434434
self.set_attn_processor(attn_processors)
435435

436436
# set ff layers
437-
for target_module, lora_layer in ff_layers:
437+
for target_module, lora_layer in non_attn_lora_layers:
438438
if hasattr(target_module, "set_lora_layer"):
439439
target_module.set_lora_layer(lora_layer)
440440

src/diffusers/models/lora.py

-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
from torch import nn
1818

1919

20-
# moved from attention_processor.py
2120
class LoRALinearLayer(nn.Module):
2221
def __init__(self, in_features, out_features, rank=4, network_alpha=None, device=None, dtype=None):
2322
super().__init__()

0 commit comments

Comments
 (0)