Add error message when GPTQ-for-LLaMa import fails (#1871)

---------

Co-authored-by: oobabooga <112222186+oobabooga@users.noreply.github.com>
This commit is contained in:
Matthew McAllister 2023-05-08 18:29:09 -07:00 committed by GitHub
parent 99d2dd8d0d
commit d78b04f0b4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -12,7 +12,13 @@ from transformers import AutoConfig, AutoModelForCausalLM
import modules.shared as shared import modules.shared as shared
sys.path.insert(0, str(Path("repositories/GPTQ-for-LLaMa"))) sys.path.insert(0, str(Path("repositories/GPTQ-for-LLaMa")))
try:
import llama_inference_offload import llama_inference_offload
except ImportError:
logging.error('Failed to load GPTQ-for-LLaMa')
logging.error('See https://github.com/oobabooga/text-generation-webui/blob/main/docs/GPTQ-models-(4-bit-mode).md')
sys.exit(-1)
try: try:
from modelutils import find_layers from modelutils import find_layers