llama : remove check flash_attn with lora (#11104)

This commit is contained in:
Xuan Son Nguyen 2025-01-06 13:41:12 +01:00 committed by GitHub
parent 96a1dc27c3
commit 09186fabbe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -11519,13 +11519,7 @@ int32_t llama_lora_adapter_set(
struct llama_context * ctx, struct llama_context * ctx,
struct llama_lora_adapter * adapter, struct llama_lora_adapter * adapter,
float scale) { float scale) {
if (ctx->cparams.flash_attn) {
LLAMA_LOG_ERROR("%s: flash_attn is not compatible with LoRA\n", __func__);
return -1;
}
ctx->lora_adapters[adapter] = scale; ctx->lora_adapters[adapter] = scale;
return 0; return 0;
} }