Fix: `sentencepiece` tokenizers with added tokens failed with an incorrect assertion
This commit is contained in:
goerch 2023-10-07 06:57:01 +02:00 committed by GitHub
parent 1faaae8c2b
commit 3a716b4dae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -8200,7 +8200,9 @@ int llama_token_to_piece(const struct llama_model * model, llama_token token, ch
buf[0] = llama_token_to_byte(model->vocab, token);
return 1;
} else {
GGML_ASSERT(false);
// TODO: for now we accept all unsupported token types,
// suppressing them like CONTROL tokens.
// GGML_ASSERT(false);
}
break;
}
@ -8216,7 +8218,9 @@ int llama_token_to_piece(const struct llama_model * model, llama_token token, ch
} else if (llama_is_control_token(model->vocab, token)) {
;
} else {
GGML_ASSERT(false);
// TODO: for now we accept all unsupported token types,
// suppressing them like CONTROL tokens.
// GGML_ASSERT(false);
}
break;
}