mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-25 10:54:36 +00:00
llama : the WPM vocabs use the CLS token as BOS (#10930)
* llama : the WPM vocabs use the CLS token as BOS ggml-ci * llama : add comment
This commit is contained in:
parent
60cfa728e2
commit
30caac3a68
@ -1657,7 +1657,7 @@ bool llama_token_is_control_impl(const struct llama_vocab & vocab, llama_token t
|
||||
}
|
||||
|
||||
llama_token llama_token_bos_impl(const struct llama_vocab & vocab) {
|
||||
return vocab.special_bos_id;
|
||||
return vocab.type != LLAMA_VOCAB_TYPE_WPM ? vocab.special_bos_id : vocab.special_cls_id;
|
||||
}
|
||||
|
||||
llama_token llama_token_eos_impl(const struct llama_vocab & vocab) {
|
||||
|
@ -45,7 +45,7 @@ struct llama_vocab {
|
||||
id special_unk_id = 0;
|
||||
id special_sep_id = LLAMA_TOKEN_NULL;
|
||||
id special_pad_id = LLAMA_TOKEN_NULL;
|
||||
id special_cls_id = LLAMA_TOKEN_NULL;
|
||||
id special_cls_id = LLAMA_TOKEN_NULL; // TODO: revisit if this is really needed https://github.com/ggerganov/llama.cpp/pull/10930
|
||||
id special_mask_id = LLAMA_TOKEN_NULL;
|
||||
|
||||
id linefeed_id = 13;
|
||||
|
Loading…
Reference in New Issue
Block a user