2023-03-12 20:42:50 +01:00
@ echo off
@ rem Based on the installer found here: https://github.com/Sygil-Dev/sygil-webui
2023-03-24 23:27:29 +01:00
@ rem This script will install git and all dependencies
2023-03-12 20:42:50 +01:00
@ rem using micromamba (an 8mb static-linked single-file binary, conda replacement).
@ rem This enables a user to install this project without manually installing conda and git.
echo What is your GPU?
echo .
echo A) NVIDIA
echo B) None (I want to run in CPU mode)
echo .
set /p " gpuchoice=Input> "
set gpuchoice = %gpuchoice:~0,1%
2023-03-24 23:27:29 +01:00
if /I " %gpuchoice% " == " A " (
set " PACKAGES_TO_INSTALL=python=3.10.9 pytorch torchvision torchaudio pytorch-cuda=11.7 cuda-toolkit conda-forge::ninja conda-forge::git "
set " CHANNEL=-c pytorch -c nvidia/label/cuda-11.7.0 -c nvidia "
) else if /I "%gpuchoice%" == "B" (
set " PACKAGES_TO_INSTALL=pytorch torchvision torchaudio cpuonly git "
set " CHANNEL=-c conda-forge -c pytorch "
2023-03-12 20:42:50 +01:00
) else (
echo Invalid choice. Exiting...
exit
)
2023-03-24 23:51:13 +01:00
cd /D " %~dp0 "
2023-03-25 03:25:14 +01:00
set PATH = %SystemRoot% \system32;%PATH%
2023-03-12 20:42:50 +01:00
set MAMBA_ROOT_PREFIX = %cd% \installer_files\mamba
set INSTALL_ENV_DIR = %cd% \installer_files\env
set MICROMAMBA_DOWNLOAD_URL = https://github.com/cmdr2/stable-diffusion-ui/releases/download/v1.1/micromamba.exe
set REPO_URL = https://github.com/oobabooga/text-generation-webui.git
set umamba_exists = F
@ rem figure out whether git and conda needs to be installed
2023-03-24 23:27:29 +01:00
call " %MAMBA_ROOT_PREFIX% \micromamba.exe " --version > nul 2 >& 1
2023-03-12 20:42:50 +01:00
if " %ERRORLEVEL% " EQU " 0 " set umamba_exists = T
@ rem (if necessary) install git and conda into a contained environment
if " %PACKAGES_TO_INSTALL% " NEQ " " (
@ rem download micromamba
if " %umamba_exists% " == " F " (
2023-03-24 23:27:29 +01:00
echo " Downloading Micromamba from %MICROMAMBA_DOWNLOAD_URL% to %MAMBA_ROOT_PREFIX% \micromamba.exe "
2023-03-12 20:42:50 +01:00
mkdir " %MAMBA_ROOT_PREFIX% "
call curl -L " %MICROMAMBA_DOWNLOAD_URL% " > " %MAMBA_ROOT_PREFIX% \micromamba.exe "
@ rem test the mamba binary
echo Micromamba version:
2023-03-24 23:27:29 +01:00
call " %MAMBA_ROOT_PREFIX% \micromamba.exe " --version || ( echo Micromamba not found. && goto end )
)
@ rem create micromamba hook
2023-03-25 07:14:29 +01:00
if not exist " %MAMBA_ROOT_PREFIX% \condabin\micromamba.bat " (
2023-03-24 23:27:29 +01:00
call " %MAMBA_ROOT_PREFIX% \micromamba.exe " shell hook > nul 2 >& 1
2023-03-12 20:42:50 +01:00
)
@ rem create the installer env
if not exist " %INSTALL_ENV_DIR% " (
2023-03-25 07:14:29 +01:00
echo Packages to install: %PACKAGES_TO_INSTALL%
call " %MAMBA_ROOT_PREFIX% \micromamba.exe " create -y --prefix " %INSTALL_ENV_DIR% " %CHANNEL% %PACKAGES_TO_INSTALL%
2023-03-12 20:42:50 +01:00
)
)
2023-03-25 07:14:29 +01:00
@ rem activate installer env
call " %MAMBA_ROOT_PREFIX% \condabin\micromamba.bat " activate " %INSTALL_ENV_DIR% " || ( echo MicroMamba hook not found. && goto end )
2023-03-12 20:42:50 +01:00
@ rem clone the repository and install the pip requirements
if exist text-generation-webui\ (
cd text-generation-webui
git pull
) else (
git clone https://github.com/oobabooga/text-generation-webui.git
2023-03-24 23:27:29 +01:00
cd text-generation-webui || goto end
2023-03-12 20:42:50 +01:00
)
2023-03-14 02:49:13 +01:00
call python -m pip install -r requirements.txt --upgrade
2023-03-24 23:27:29 +01:00
call python -m pip install -r extensions\api\requirements.txt --upgrade
call python -m pip install -r extensions\elevenlabs_tts\requirements.txt --upgrade
call python -m pip install -r extensions\google_translate\requirements.txt --upgrade
call python -m pip install -r extensions\silero_tts\requirements.txt --upgrade
call python -m pip install -r extensions\whisper_stt\requirements.txt --upgrade
@ rem skip gptq install if cpu only
if /I not " %gpuchoice% " == " A " goto bandaid
@ rem download gptq and compile locally and if compile fails, install from wheel
if not exist repositories\ (
mkdir repositories
)
cd repositories || goto end
if not exist GPTQ-for-LLaMa\ (
git clone https://github.com/qwopqwop200/GPTQ-for-LLaMa.git
cd GPTQ-for-LLaMa || goto end
2023-03-25 08:22:02 +01:00
call python -m pip install -r requirements.txt
2023-03-24 23:27:29 +01:00
call python setup_cuda.py install
if not exist " %INSTALL_ENV_DIR% \lib\site-packages\quant_cuda-0.0.0-py3.10-win-amd64.egg " (
echo CUDA kernal compilation failed. Will try to install from wheel.
2023-03-26 07:46:07 +02:00
curl -LO https://github.com/jllllll/GPTQ-for-LLaMa-Wheels/raw/main/quant_cuda-0.0.0-cp310-cp310-win_amd64.whl
2023-03-25 08:22:02 +01:00
call python -m pip install quant_cuda-0.0.0-cp310-cp310-win_amd64.whl || ( echo Wheel installation failed. && goto end )
2023-03-24 23:27:29 +01:00
)
cd ..
)
cd ..\..
: bandaid
curl -LO https://github.com/DeXtmL/bitsandbytes-win-prebuilt/raw/main/libbitsandbytes_cpu.dll
curl -LO https://github.com/james-things/bitsandbytes-prebuilt-all_arch/raw/main/0.37.0/libbitsandbytes_cudaall.dll
mv libbitsandbytes_cpu.dll " %INSTALL_ENV_DIR% \lib\site-packages\bitsandbytes "
2023-03-24 23:54:47 +01:00
mv libbitsandbytes_cudaall.dll " %INSTALL_ENV_DIR% \lib\site-packages\bitsandbytes "
2023-03-24 23:27:29 +01:00
sed -i " s/if not torch.cuda.is_available(): return 'libsbitsandbytes_cpu.so', None, None, None, None/if torch.cuda.is_available(): return 'libbitsandbytes_cudaall.dll', None, None, None, None\n else: return 'libbitsandbytes_cpu.dll', None, None, None, None/g " " %INSTALL_ENV_DIR% \lib\site-packages\bitsandbytes\cuda_setup\main.py "
sed -i " s/ct.cdll.LoadLibrary(binary_path)/ct.cdll.LoadLibrary(str(binary_path))/g " " %INSTALL_ENV_DIR% \lib\site-packages\bitsandbytes\cuda_setup\main.py "
2023-03-26 08:00:52 +02:00
sed -i " s/make_quant(model, layers, wbits, groupsize, faster=args.faster_kernel)/make_quant(model, layers, wbits, groupsize, faster=( " args" in globals() and args.faster_kernel))/g " " %INSTALL_ENV_DIR% \..\..\text-generation-webui\repositories\GPTQ-for-LLaMa\llama.py "
2023-03-24 23:27:29 +01:00
: end
2023-03-25 08:22:02 +01:00
pause