mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-29 04:44:34 +00:00
convert-llama-hf-to-gguf.py : special tokens
This commit is contained in:
parent
28b8c265eb
commit
76b46627e2
@ -173,18 +173,20 @@ if Path(dir_model + "/tokenizer.model").is_file():
|
|||||||
gguf_writer.add_token_scores(scores)
|
gguf_writer.add_token_scores(scores)
|
||||||
gguf_writer.add_token_types(toktypes)
|
gguf_writer.add_token_types(toktypes)
|
||||||
|
|
||||||
|
|
||||||
|
print("gguf: get special token ids")
|
||||||
|
|
||||||
if Path(dir_model + "/tokenizer.json").is_file():
|
if Path(dir_model + "/tokenizer.json").is_file():
|
||||||
|
# Look for special tokens in tokenizer.json if it exists
|
||||||
|
|
||||||
with open(dir_model + "/tokenizer.json", "r", encoding="utf-8") as f:
|
with open(dir_model + "/tokenizer.json", "r", encoding="utf-8") as f:
|
||||||
tokenizer = json.load(f)
|
tokenizer = json.load(f)
|
||||||
|
|
||||||
if "added_tokens" in tokenizer and Path(dir_model + "/tokenizer_config.json").is_file():
|
if "added_tokens" in tokenizer and Path(dir_model + "/tokenizer_config.json").is_file():
|
||||||
print("gguf: get special token ids")
|
|
||||||
|
|
||||||
with open(dir_model + "/tokenizer_config.json", "r", encoding="utf-8") as f:
|
with open(dir_model + "/tokenizer_config.json", "r", encoding="utf-8") as f:
|
||||||
tokenizer_config = json.load(f)
|
tokenizer_config = json.load(f)
|
||||||
|
|
||||||
# find special token ids
|
|
||||||
|
|
||||||
if "bos_token" in tokenizer_config and tokenizer_config["bos_token"] != None:
|
if "bos_token" in tokenizer_config and tokenizer_config["bos_token"] != None:
|
||||||
for key in tokenizer["added_tokens"]:
|
for key in tokenizer["added_tokens"]:
|
||||||
if key["content"] == tokenizer_config["bos_token"]["content"]:
|
if key["content"] == tokenizer_config["bos_token"]["content"]:
|
||||||
@ -209,6 +211,23 @@ if Path(dir_model + "/tokenizer.json").is_file():
|
|||||||
for key in tokenizer["added_tokens"]:
|
for key in tokenizer["added_tokens"]:
|
||||||
if key["content"] == tokenizer_config["pad_token"]["content"]:
|
if key["content"] == tokenizer_config["pad_token"]["content"]:
|
||||||
gguf_writer.add_pad_token_id(key["id"])
|
gguf_writer.add_pad_token_id(key["id"])
|
||||||
|
else:
|
||||||
|
# If no tokenizer.json: Look for special tokens in config.json
|
||||||
|
|
||||||
|
if "bos_token_id" in hparams and hparams["bos_token_id"] != None:
|
||||||
|
gguf_writer.add_bos_token_id(hparams["bos_token_id"])
|
||||||
|
|
||||||
|
if "eos_token_id" in hparams and hparams["eos_token_id"] != None:
|
||||||
|
gguf_writer.add_eos_token_id(hparams["eos_token_id"])
|
||||||
|
|
||||||
|
if "unk_token_id" in hparams and hparams["unk_token_id"] != None:
|
||||||
|
gguf_writer.add_unk_token_id(hparams["unk_token_id"])
|
||||||
|
|
||||||
|
if "sep_token_id" in hparams and hparams["sep_token_id"] != None:
|
||||||
|
gguf_writer.add_sep_token_id(hparams["sep_token_id"])
|
||||||
|
|
||||||
|
if "pad_token_id" in hparams and hparams["pad_token_id"] != None:
|
||||||
|
gguf_writer.add_pad_token_id(hparams["pad_token_id"])
|
||||||
|
|
||||||
|
|
||||||
# TENSORS
|
# TENSORS
|
||||||
|
Loading…
Reference in New Issue
Block a user