Skip to content

Commit cb44c79

Browse files
chore: remove unused test setup
1 parent 1d05060 commit cb44c79

File tree

1 file changed

+0
-11
lines changed

1 file changed

+0
-11
lines changed

tests/backend/model_manager/load/model_cache/torch_module_autocast/custom_modules/test_all_custom_modules.py

-11
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from invokeai.backend.patches.layer_patcher import LayerPatcher
1515
from invokeai.backend.patches.layers.base_layer_patch import BaseLayerPatch
1616
from invokeai.backend.patches.layers.flux_control_lora_layer import FluxControlLoRALayer
17-
from invokeai.backend.patches.layers.diffusers_ada_ln_lora_layer import DiffusersAdaLN_LoRALayer
1817
from invokeai.backend.patches.layers.lokr_layer import LoKRLayer
1918
from invokeai.backend.patches.layers.lora_layer import LoRALayer
2019
from invokeai.backend.patches.layers.merged_layer_patch import MergedLayerPatch, Range
@@ -372,16 +371,6 @@ def patch_under_test(request: pytest.FixtureRequest) -> PatchUnderTest:
372371
)
373372
input = torch.randn(1, in_features)
374373
return ([(lokr_layer, 0.7)], input)
375-
elif layer_type == "diffusers_adaLN_lora":
376-
lora_layer = DiffusersAdaLN_LoRALayer(
377-
up=torch.randn(out_features, rank),
378-
mid=None,
379-
down=torch.randn(rank, in_features),
380-
alpha=1.0,
381-
bias=torch.randn(out_features),
382-
)
383-
input = torch.randn(1, in_features)
384-
return ([(lora_layer, 0.7)], input)
385374
else:
386375
raise ValueError(f"Unsupported layer_type: {layer_type}")
387376

0 commit comments

Comments
 (0)