mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-11-11 21:39:52 +00:00
convert.py: Support models which are stored in a single pytorch_model.bin (#1469)
* Support models in a single pytorch_model.bin * Remove spurious line with typo
This commit is contained in:
parent
42627421ec
commit
2b2646931b
@ -121,7 +121,6 @@ def make_tensors_list() -> List[str]:
|
||||
f'layers.{i}.feed_forward.w1.weight',
|
||||
f'layers.{i}.feed_forward.w2.weight',
|
||||
f'layers.{i}.feed_forward.w3.weight',
|
||||
f'layers.{i}.atttention_norm.weight',
|
||||
f'layers.{i}.ffn_norm.weight',
|
||||
]
|
||||
return ret
|
||||
@ -1055,7 +1054,7 @@ def load_some_model(path: Path) -> ModelPlus:
|
||||
files = list(path.glob("model-00001-of-*.safetensors"))
|
||||
if not files:
|
||||
# Try the PyTorch patterns too, with lower priority
|
||||
globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt"]
|
||||
globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt", "pytorch_model.bin" ]
|
||||
files = [file for glob in globs for file in path.glob(glob)]
|
||||
if not files:
|
||||
# Try GGML too, but with lower priority, since if both a non-GGML
|
||||
|
Loading…
Reference in New Issue
Block a user