convert.py: Support models which are stored in a single pytorch_model.bin (#1469)

* Support models in a single pytorch_model.bin

* Remove spurious line with typo
This commit is contained in:
Tom Jobbins 2023-05-16 23:04:35 +01:00 committed by GitHub
parent 42627421ec
commit 2b2646931b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -121,7 +121,6 @@ def make_tensors_list() -> List[str]:
f'layers.{i}.feed_forward.w1.weight', f'layers.{i}.feed_forward.w1.weight',
f'layers.{i}.feed_forward.w2.weight', f'layers.{i}.feed_forward.w2.weight',
f'layers.{i}.feed_forward.w3.weight', f'layers.{i}.feed_forward.w3.weight',
f'layers.{i}.atttention_norm.weight',
f'layers.{i}.ffn_norm.weight', f'layers.{i}.ffn_norm.weight',
] ]
return ret return ret
@ -1055,7 +1054,7 @@ def load_some_model(path: Path) -> ModelPlus:
files = list(path.glob("model-00001-of-*.safetensors")) files = list(path.glob("model-00001-of-*.safetensors"))
if not files: if not files:
# Try the PyTorch patterns too, with lower priority # Try the PyTorch patterns too, with lower priority
globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt"] globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt", "pytorch_model.bin" ]
files = [file for glob in globs for file in path.glob(glob)] files = [file for glob in globs for file in path.glob(glob)]
if not files: if not files:
# Try GGML too, but with lower priority, since if both a non-GGML # Try GGML too, but with lower priority, since if both a non-GGML