mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 08:07:56 +01:00
Fix is_ccl_available & is_xpu_available imports
This commit is contained in:
parent
778a010df8
commit
839a87bac8
@ -1,6 +1,6 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from accelerate import is_xpu_available
|
from accelerate.utils import is_xpu_available
|
||||||
from auto_gptq import AutoGPTQForCausalLM, BaseQuantizeConfig
|
from auto_gptq import AutoGPTQForCausalLM, BaseQuantizeConfig
|
||||||
|
|
||||||
import modules.shared as shared
|
import modules.shared as shared
|
||||||
|
@ -5,7 +5,7 @@ from pathlib import Path
|
|||||||
import accelerate
|
import accelerate
|
||||||
import torch
|
import torch
|
||||||
import transformers
|
import transformers
|
||||||
from accelerate import is_xpu_available
|
from accelerate.utils import is_xpu_available
|
||||||
from gptq_for_llama import llama_inference_offload
|
from gptq_for_llama import llama_inference_offload
|
||||||
from gptq_for_llama.modelutils import find_layers
|
from gptq_for_llama.modelutils import find_layers
|
||||||
from gptq_for_llama.quant import make_quant
|
from gptq_for_llama.quant import make_quant
|
||||||
|
@ -7,12 +7,8 @@ from pathlib import Path
|
|||||||
|
|
||||||
import torch
|
import torch
|
||||||
import transformers
|
import transformers
|
||||||
from accelerate import (
|
from accelerate import infer_auto_device_map, init_empty_weights
|
||||||
infer_auto_device_map,
|
from accelerate.utils import is_ccl_available, is_xpu_available
|
||||||
init_empty_weights,
|
|
||||||
is_ccl_available,
|
|
||||||
is_xpu_available
|
|
||||||
)
|
|
||||||
from transformers import (
|
from transformers import (
|
||||||
AutoConfig,
|
AutoConfig,
|
||||||
AutoModel,
|
AutoModel,
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
accelerate==0.23.*
|
accelerate==0.24.*
|
||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
|
Loading…
Reference in New Issue
Block a user