mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-31 22:50:15 +01:00
Merge pull request #6 from jllllll/oobabooga-windows
Attempt to Improve Reliability
This commit is contained in:
commit
75465fa041
@ -2,7 +2,7 @@ Thank you for downloading oobabooga/text-generation-webui.
|
|||||||
Here is how to get it up and running:
|
Here is how to get it up and running:
|
||||||
|
|
||||||
1. Run the "install" script to install the web UI and its requirements in this folder.
|
1. Run the "install" script to install the web UI and its requirements in this folder.
|
||||||
2. Run the "download" script to download a model of your choice. Change TextOnly variable at top of script to download only config files.
|
2. Run the "download-model" script to download a model of your choice. Change TextOnly variable at top of script to download only config files.
|
||||||
3. Run the "start-webui" script to launch the web UI.
|
3. Run the "start-webui" script to launch the web UI.
|
||||||
|
|
||||||
To add flags like --chat, --notebook, --extensions, etc, edit the
|
To add flags like --chat, --notebook, --extensions, etc, edit the
|
||||||
@ -11,3 +11,9 @@ to the line that says "python server.py...".
|
|||||||
|
|
||||||
To get the latest updates in the future, just re-run the "install" script.
|
To get the latest updates in the future, just re-run the "install" script.
|
||||||
This will only install the updates, so it should be much faster.
|
This will only install the updates, so it should be much faster.
|
||||||
|
May need to delete '\text-generation-webui\repositories\GPTQ-for-LLaMa' folder if GPTQ-for-LLaMa needs to be updated.
|
||||||
|
|
||||||
|
You can open a command-line attached to the virtual environment by running the "micromamba-cmd" script.
|
||||||
|
|
||||||
|
This installer uses a custom-built Windows-compatible version of bitsandbytes. Source: https://github.com/acpopescu/bitsandbytes/tree/cmake_windows
|
||||||
|
When starting the webui, you may encounter an error referencing cuda 116. Starting the webui again should allow bitsandbytes to detect the correct version.
|
21
install.bat
21
install.bat
@ -5,6 +5,9 @@
|
|||||||
@rem using micromamba (an 8mb static-linked single-file binary, conda replacement).
|
@rem using micromamba (an 8mb static-linked single-file binary, conda replacement).
|
||||||
@rem This enables a user to install this project without manually installing conda and git.
|
@rem This enables a user to install this project without manually installing conda and git.
|
||||||
|
|
||||||
|
echo WARNING: This script relies on Micromamba which may have issues on some systems when installed under a path with spaces.
|
||||||
|
echo May also have issues with long paths.&& echo.
|
||||||
|
|
||||||
echo What is your GPU?
|
echo What is your GPU?
|
||||||
echo.
|
echo.
|
||||||
echo A) NVIDIA
|
echo A) NVIDIA
|
||||||
@ -26,7 +29,7 @@ if /I "%gpuchoice%" == "A" (
|
|||||||
|
|
||||||
cd /D "%~dp0"
|
cd /D "%~dp0"
|
||||||
|
|
||||||
set PATH=%SystemRoot%\system32;%PATH%
|
set PATH=%PATH%;%SystemRoot%\system32
|
||||||
|
|
||||||
set MAMBA_ROOT_PREFIX=%cd%\installer_files\mamba
|
set MAMBA_ROOT_PREFIX=%cd%\installer_files\mamba
|
||||||
set INSTALL_ENV_DIR=%cd%\installer_files\env
|
set INSTALL_ENV_DIR=%cd%\installer_files\env
|
||||||
@ -45,7 +48,7 @@ if "%PACKAGES_TO_INSTALL%" NEQ "" (
|
|||||||
echo "Downloading Micromamba from %MICROMAMBA_DOWNLOAD_URL% to %MAMBA_ROOT_PREFIX%\micromamba.exe"
|
echo "Downloading Micromamba from %MICROMAMBA_DOWNLOAD_URL% to %MAMBA_ROOT_PREFIX%\micromamba.exe"
|
||||||
|
|
||||||
mkdir "%MAMBA_ROOT_PREFIX%"
|
mkdir "%MAMBA_ROOT_PREFIX%"
|
||||||
call curl -L "%MICROMAMBA_DOWNLOAD_URL%" > "%MAMBA_ROOT_PREFIX%\micromamba.exe"
|
call curl -L "%MICROMAMBA_DOWNLOAD_URL%" > "%MAMBA_ROOT_PREFIX%\micromamba.exe" || ( echo Micromamba failed to download. && goto end )
|
||||||
|
|
||||||
@rem test the mamba binary
|
@rem test the mamba binary
|
||||||
echo Micromamba version:
|
echo Micromamba version:
|
||||||
@ -73,6 +76,7 @@ if exist text-generation-webui\ (
|
|||||||
git pull
|
git pull
|
||||||
) else (
|
) else (
|
||||||
git clone https://github.com/oobabooga/text-generation-webui.git
|
git clone https://github.com/oobabooga/text-generation-webui.git
|
||||||
|
call python -m pip install https://github.com/jllllll/bitsandbytes-windows-webui/raw/main/bitsandbytes-0.37.2-py3-none-any.whl
|
||||||
cd text-generation-webui || goto end
|
cd text-generation-webui || goto end
|
||||||
)
|
)
|
||||||
call python -m pip install -r requirements.txt --upgrade
|
call python -m pip install -r requirements.txt --upgrade
|
||||||
@ -97,20 +101,9 @@ if not exist GPTQ-for-LLaMa\ (
|
|||||||
call python setup_cuda.py install
|
call python setup_cuda.py install
|
||||||
if not exist "%INSTALL_ENV_DIR%\lib\site-packages\quant_cuda-0.0.0-py3.10-win-amd64.egg" (
|
if not exist "%INSTALL_ENV_DIR%\lib\site-packages\quant_cuda-0.0.0-py3.10-win-amd64.egg" (
|
||||||
echo CUDA kernal compilation failed. Will try to install from wheel.
|
echo CUDA kernal compilation failed. Will try to install from wheel.
|
||||||
curl -LO https://github.com/jllllll/GPTQ-for-LLaMa-Wheels/raw/main/quant_cuda-0.0.0-cp310-cp310-win_amd64.whl
|
call python -m pip install https://github.com/jllllll/GPTQ-for-LLaMa-Wheels/raw/main/quant_cuda-0.0.0-cp310-cp310-win_amd64.whl || ( echo Wheel installation failed. && goto end )
|
||||||
call python -m pip install quant_cuda-0.0.0-cp310-cp310-win_amd64.whl || ( echo Wheel installation failed. && goto end )
|
|
||||||
)
|
)
|
||||||
cd ..
|
|
||||||
)
|
)
|
||||||
cd ..\..
|
|
||||||
|
|
||||||
:bandaid
|
|
||||||
curl -LO https://github.com/DeXtmL/bitsandbytes-win-prebuilt/raw/main/libbitsandbytes_cpu.dll
|
|
||||||
curl -LO https://github.com/james-things/bitsandbytes-prebuilt-all_arch/raw/main/0.37.0/libbitsandbytes_cudaall.dll
|
|
||||||
mv libbitsandbytes_cpu.dll "%INSTALL_ENV_DIR%\lib\site-packages\bitsandbytes"
|
|
||||||
mv libbitsandbytes_cudaall.dll "%INSTALL_ENV_DIR%\lib\site-packages\bitsandbytes"
|
|
||||||
sed -i "s/if not torch.cuda.is_available(): return 'libsbitsandbytes_cpu.so', None, None, None, None/if torch.cuda.is_available(): return 'libbitsandbytes_cudaall.dll', None, None, None, None\n else: return 'libbitsandbytes_cpu.dll', None, None, None, None/g" "%INSTALL_ENV_DIR%\lib\site-packages\bitsandbytes\cuda_setup\main.py"
|
|
||||||
sed -i "s/ct.cdll.LoadLibrary(binary_path)/ct.cdll.LoadLibrary(str(binary_path))/g" "%INSTALL_ENV_DIR%\lib\site-packages\bitsandbytes\cuda_setup\main.py"
|
|
||||||
|
|
||||||
:end
|
:end
|
||||||
pause
|
pause
|
||||||
|
Loading…
Reference in New Issue
Block a user