mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 08:07:56 +01:00
Add error message when GPTQ-for-LLaMa import fails (#1871)
--------- Co-authored-by: oobabooga <112222186+oobabooga@users.noreply.github.com>
This commit is contained in:
parent
99d2dd8d0d
commit
d78b04f0b4
@ -12,7 +12,13 @@ from transformers import AutoConfig, AutoModelForCausalLM
|
||||
import modules.shared as shared
|
||||
|
||||
sys.path.insert(0, str(Path("repositories/GPTQ-for-LLaMa")))
|
||||
import llama_inference_offload
|
||||
|
||||
try:
|
||||
import llama_inference_offload
|
||||
except ImportError:
|
||||
logging.error('Failed to load GPTQ-for-LLaMa')
|
||||
logging.error('See https://github.com/oobabooga/text-generation-webui/blob/main/docs/GPTQ-models-(4-bit-mode).md')
|
||||
sys.exit(-1)
|
||||
|
||||
try:
|
||||
from modelutils import find_layers
|
||||
|
Loading…
Reference in New Issue
Block a user