mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 16:17:57 +01:00
Add error message when GPTQ-for-LLaMa import fails (#1871)
--------- Co-authored-by: oobabooga <112222186+oobabooga@users.noreply.github.com>
This commit is contained in:
parent
99d2dd8d0d
commit
d78b04f0b4
@ -12,7 +12,13 @@ from transformers import AutoConfig, AutoModelForCausalLM
|
|||||||
import modules.shared as shared
|
import modules.shared as shared
|
||||||
|
|
||||||
sys.path.insert(0, str(Path("repositories/GPTQ-for-LLaMa")))
|
sys.path.insert(0, str(Path("repositories/GPTQ-for-LLaMa")))
|
||||||
|
|
||||||
|
try:
|
||||||
import llama_inference_offload
|
import llama_inference_offload
|
||||||
|
except ImportError:
|
||||||
|
logging.error('Failed to load GPTQ-for-LLaMa')
|
||||||
|
logging.error('See https://github.com/oobabooga/text-generation-webui/blob/main/docs/GPTQ-models-(4-bit-mode).md')
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from modelutils import find_layers
|
from modelutils import find_layers
|
||||||
|
Loading…
Reference in New Issue
Block a user