Skip to content

Commit 09186fa

Browse files
authored
llama : remove check flash_attn with lora (#11104)
1 parent 96a1dc2 commit 09186fa

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

src/llama.cpp

-6
Original file line numberDiff line numberDiff line change
@@ -11519,13 +11519,7 @@ int32_t llama_lora_adapter_set(
1151911519
struct llama_context * ctx,
1152011520
struct llama_lora_adapter * adapter,
1152111521
float scale) {
11522-
if (ctx->cparams.flash_attn) {
11523-
LLAMA_LOG_ERROR("%s: flash_attn is not compatible with LoRA\n", __func__);
11524-
return -1;
11525-
}
11526-
1152711522
ctx->lora_adapters[adapter] = scale;
11528-
1152911523
return 0;
1153011524
}
1153111525

0 commit comments

Comments
 (0)