From 05cd6e5036d72d0930de4d8f6be7bce09e8dda24 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 6 Dec 2023 20:21:59 +0200 Subject: [PATCH] server : recognize cache_prompt parameter in OAI API (#4347) --- examples/server/server.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 911f7bbe1..369f81a84 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2387,6 +2387,7 @@ json oaicompat_completion_params_parse( // Map OpenAI parameters to llama.cpp parameters llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' + llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.8); llama_params["top_k"] = json_value(body, "top_k", 40); llama_params["top_p"] = json_value(body, "top_p", 0.95);