mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-27 03:44:35 +00:00
Update src/llama.cpp for not contain <|end|> or </s>
Co-authored-by: Xuan Son Nguyen <thichthat@gmail.com>
This commit is contained in:
parent
692880535a
commit
5ff563257c
@ -22085,7 +22085,7 @@ static int32_t llama_chat_apply_template_internal(
|
|||||||
if (add_ass) {
|
if (add_ass) {
|
||||||
ss << "<|assistant|>";
|
ss << "<|assistant|>";
|
||||||
}
|
}
|
||||||
} else if(tmpl == "glm-edge" || tmpl_contains("<|assistant|>")){
|
} else if(tmpl == "glm-edge" || (tmpl_contains("<|assistant|>") && !tmpl_contains("<|end|>") && !tmpl_contains("</s>"))){
|
||||||
for (auto message : chat) {
|
for (auto message : chat) {
|
||||||
std::string role(message->role);
|
std::string role(message->role);
|
||||||
ss << "<|" << role << "|>" << "\n" << message->content;
|
ss << "<|" << role << "|>" << "\n" << message->content;
|
||||||
|
Loading…
Reference in New Issue
Block a user