Properly free llama_context on failure

This commit is contained in:
Georgi Gerganov 2023-03-24 17:21:01 +02:00
parent 481044d50c
commit afd220d9c6
No known key found for this signature in database
GPG Key ID: 449E073F9DC10735

View File

@ -1432,16 +1432,16 @@ struct llama_context * llama_init_from_file(
if (!llama_model_load(path_model, *ctx, params.n_ctx, params.n_parts, type_memory, if (!llama_model_load(path_model, *ctx, params.n_ctx, params.n_parts, type_memory,
params.vocab_only)) { params.vocab_only)) {
fprintf(stderr, "%s: failed to load model\n", __func__); fprintf(stderr, "%s: failed to load model\n", __func__);
delete ctx; llama_free(ctx);
return nullptr; return nullptr;
} }
if (params.use_mlock) { if (params.use_mlock) {
char *err; char *err;
if (!ggml_mlock(ctx->model.ctx, &err)) { if (!ggml_mlock(ctx->model.ctx, &err)) {
fprintf(stderr, "%s\n", err); fprintf(stderr, "%s\n", err);
free(err); free(err);
delete ctx; llama_free(ctx);
return nullptr; return nullptr;
} }
} }
@ -1464,7 +1464,9 @@ struct llama_context * llama_init_from_file(
} }
void llama_free(struct llama_context * ctx) { void llama_free(struct llama_context * ctx) {
ggml_free(ctx->model.ctx); if (ctx->model.ctx) {
ggml_free(ctx->model.ctx);
}
delete ctx; delete ctx;
} }