mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2025-01-11 19:21:46 +00:00
Missing tokenizer.model error during gguf conversion (#6443)
Co-authored-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
parent
1ff4d9f3d6
commit
db214fa578
@ -323,8 +323,7 @@ class Model(ABC):
|
|||||||
toktypes: list[int] = []
|
toktypes: list[int] = []
|
||||||
|
|
||||||
if not tokenizer_path.is_file():
|
if not tokenizer_path.is_file():
|
||||||
print(f'Error: Missing {tokenizer_path}', file=sys.stderr)
|
raise FileNotFoundError(f"File not found: {tokenizer_path}")
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
tokenizer = SentencePieceProcessor(str(tokenizer_path))
|
tokenizer = SentencePieceProcessor(str(tokenizer_path))
|
||||||
vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size())
|
vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size())
|
||||||
|
Loading…
Reference in New Issue
Block a user