From 8403152257b3e0c405f88a0cbf08dc640e29e206 Mon Sep 17 00:00:00 2001 From: HideLord Date: Sun, 12 Mar 2023 17:28:15 +0200 Subject: [PATCH] Fixing compatibility with GPTQ repo commit 2f667f7da051967566a5fb0546f8614bcd3a1ccd. Expects string and breaks on --- modules/quantized_LLaMA.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/quantized_LLaMA.py b/modules/quantized_LLaMA.py index ca4eebf2..9ab7f333 100644 --- a/modules/quantized_LLaMA.py +++ b/modules/quantized_LLaMA.py @@ -41,7 +41,7 @@ def load_quantized_LLaMA(model_name): print(f"Could not find {pt_model}, exiting...") exit() - model = load_quant(path_to_model, pt_path, bits) + model = load_quant(path_to_model, str(pt_path), bits) # Multi-GPU setup if shared.args.gpu_memory: