mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 08:07:56 +01:00
Fix is_ccl_available & is_xpu_available imports
This commit is contained in:
parent
778a010df8
commit
839a87bac8
@ -1,6 +1,6 @@
|
||||
from pathlib import Path
|
||||
|
||||
from accelerate import is_xpu_available
|
||||
from accelerate.utils import is_xpu_available
|
||||
from auto_gptq import AutoGPTQForCausalLM, BaseQuantizeConfig
|
||||
|
||||
import modules.shared as shared
|
||||
|
@ -5,7 +5,7 @@ from pathlib import Path
|
||||
import accelerate
|
||||
import torch
|
||||
import transformers
|
||||
from accelerate import is_xpu_available
|
||||
from accelerate.utils import is_xpu_available
|
||||
from gptq_for_llama import llama_inference_offload
|
||||
from gptq_for_llama.modelutils import find_layers
|
||||
from gptq_for_llama.quant import make_quant
|
||||
|
@ -7,12 +7,8 @@ from pathlib import Path
|
||||
|
||||
import torch
|
||||
import transformers
|
||||
from accelerate import (
|
||||
infer_auto_device_map,
|
||||
init_empty_weights,
|
||||
is_ccl_available,
|
||||
is_xpu_available
|
||||
)
|
||||
from accelerate import infer_auto_device_map, init_empty_weights
|
||||
from accelerate.utils import is_ccl_available, is_xpu_available
|
||||
from transformers import (
|
||||
AutoConfig,
|
||||
AutoModel,
|
||||
|
@ -1,4 +1,4 @@
|
||||
accelerate==0.23.*
|
||||
accelerate==0.24.*
|
||||
colorama
|
||||
datasets
|
||||
einops
|
||||
|
Loading…
Reference in New Issue
Block a user