mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 08:07:56 +01:00
Don't install AutoAWQ on CUDA 11.8
This commit is contained in:
parent
14584fda36
commit
d581334a41
@ -395,7 +395,11 @@ def update_requirements(initial_installation=False, pull=True):
|
|||||||
textgen_requirements = open(requirements_file).read().splitlines()
|
textgen_requirements = open(requirements_file).read().splitlines()
|
||||||
|
|
||||||
if is_cuda118:
|
if is_cuda118:
|
||||||
textgen_requirements = [req.replace('+cu121', '+cu118').replace('+cu122', '+cu118') for req in textgen_requirements if "auto-gptq" not in req]
|
textgen_requirements = [
|
||||||
|
req.replace('+cu121', '+cu118').replace('+cu122', '+cu118')
|
||||||
|
for req in textgen_requirements
|
||||||
|
if "auto-gptq" not in req.lower() and "autoawq" not in req.lower()
|
||||||
|
]
|
||||||
if is_windows() and is_cuda118: # No flash-attention on Windows for CUDA 11
|
if is_windows() and is_cuda118: # No flash-attention on Windows for CUDA 11
|
||||||
textgen_requirements = [req for req in textgen_requirements if 'oobabooga/flash-attention' not in req]
|
textgen_requirements = [req for req in textgen_requirements if 'oobabooga/flash-attention' not in req]
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user