From ab25e3fbf93c777831c9578e14c45a5e5a4bf7fe Mon Sep 17 00:00:00 2001 From: ochafik Date: Thu, 26 Sep 2024 02:19:04 +0100 Subject: [PATCH] `tool-call`: allow empty message content when there's tool_calls in format_chat --- examples/server/utils.hpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index f28f7086d..b124f0771 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -77,8 +77,8 @@ inline std::string format_chat(const struct llama_model * model, const std::stri msg.content += "\n" + part["text"].get(); } } - } else { - throw std::runtime_error("Invalid 'content' type (ref: https://github.com/ggerganov/llama.cpp/issues/8367)"); + } else if (!(curr_msg.is_null() && curr_msg.contains("tool_calls"))) { + throw std::runtime_error("Invalid 'content' type (ref: https://github.com/ggerganov/llama.cpp/issues/8367): " + curr_msg.dump()); } } else { throw std::runtime_error("Missing 'content' (ref: https://github.com/ggerganov/llama.cpp/issues/8367)"); @@ -474,6 +474,7 @@ static json format_final_response_oaicompat(const json & request, const json & r auto tools = json_value(request, "tools", json::array()); json tool_calls; json message_content; + printf("# CONTENT: %s\n\n", content.c_str()); if (json_value(request, "parse_tool_calls", false) && !(parsed_tool_calls = parse_tool_calls(tools, chat_template, content)).tool_calls.empty()) { finish_reason = "tool"; @@ -513,6 +514,7 @@ static json format_final_response_oaicompat(const json & request, const json & r }}, {"id", completion_id} }; + printf("# RES: %s\n\n", res.dump(2).c_str()); // extra fields for debugging purposes if (verbose) {