Skip to content

Commit aa58da9

Browse files
chore: remove unused test setup
1 parent 1d05060 commit aa58da9

File tree

1 file changed

+0
-12
lines changed

1 file changed

+0
-12
lines changed

tests/backend/model_manager/load/model_cache/torch_module_autocast/custom_modules/test_all_custom_modules.py

-12
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from invokeai.backend.patches.layer_patcher import LayerPatcher
1515
from invokeai.backend.patches.layers.base_layer_patch import BaseLayerPatch
1616
from invokeai.backend.patches.layers.flux_control_lora_layer import FluxControlLoRALayer
17-
from invokeai.backend.patches.layers.diffusers_ada_ln_lora_layer import DiffusersAdaLN_LoRALayer
1817
from invokeai.backend.patches.layers.lokr_layer import LoKRLayer
1918
from invokeai.backend.patches.layers.lora_layer import LoRALayer
2019
from invokeai.backend.patches.layers.merged_layer_patch import MergedLayerPatch, Range
@@ -284,7 +283,6 @@ def test_inference_autocast_from_cpu_to_device(device: str, layer_under_test: La
284283
"multiple_loras",
285284
"concatenated_lora",
286285
"flux_control_lora",
287-
"diffusers_adaLN_lora",
288286
"single_lokr",
289287
]
290288
)
@@ -372,16 +370,6 @@ def patch_under_test(request: pytest.FixtureRequest) -> PatchUnderTest:
372370
)
373371
input = torch.randn(1, in_features)
374372
return ([(lokr_layer, 0.7)], input)
375-
elif layer_type == "diffusers_adaLN_lora":
376-
lora_layer = DiffusersAdaLN_LoRALayer(
377-
up=torch.randn(out_features, rank),
378-
mid=None,
379-
down=torch.randn(rank, in_features),
380-
alpha=1.0,
381-
bias=torch.randn(out_features),
382-
)
383-
input = torch.randn(1, in_features)
384-
return ([(lora_layer, 0.7)], input)
385373
else:
386374
raise ValueError(f"Unsupported layer_type: {layer_type}")
387375

0 commit comments

Comments
 (0)