From 532c1737a14bb4b99747e6f460874947df37e450 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 16 Apr 2024 23:50:38 +0300 Subject: [PATCH] llama : make general.name optional (#6709) --- llama.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 579986d1a..f4f4063cf 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4136,9 +4136,11 @@ static void llm_load_vocab( // CodeGemma (LLM_ARCH_GEMMA). This can potentially be removed once // new versions of these models have been published. std::string gen_name; - ml.get_key(LLM_KV_GENERAL_NAME, gen_name); + ml.get_key(LLM_KV_GENERAL_NAME, gen_name, false); + std::transform(gen_name.begin(), gen_name.end(), gen_name.begin(), [](unsigned char c){ return std::tolower(c); }); + if (gen_name.find("code") != std::string::npos) { if (model.arch == LLM_ARCH_LLAMA) { vocab.special_prefix_id = 32007;