Skip to content

Commit 0d96a89

Browse files
authored
Fix copied from comment in Mochi lora loader (#10255)
update
1 parent 6fb94d5 commit 0d96a89

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/diffusers/loaders/lora_pipeline.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -3104,7 +3104,7 @@ def load_lora_weights(
31043104
)
31053105

31063106
@classmethod
3107-
# Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->CogVideoXTransformer3DModel
3107+
# Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->MochiTransformer3DModel
31083108
def load_lora_into_transformer(
31093109
cls, state_dict, transformer, adapter_name=None, _pipeline=None, low_cpu_mem_usage=False
31103110
):
@@ -3116,7 +3116,7 @@ def load_lora_into_transformer(
31163116
A standard state dict containing the lora layer parameters. The keys can either be indexed directly
31173117
into the unet or prefixed with an additional `unet` which can be used to distinguish between text
31183118
encoder lora layers.
3119-
transformer (`CogVideoXTransformer3DModel`):
3119+
transformer (`MochiTransformer3DModel`):
31203120
The Transformer model to load the LoRA layers into.
31213121
adapter_name (`str`, *optional*):
31223122
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use

0 commit comments

Comments
 (0)