mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-21 23:57:58 +01:00
Make the Google Colab notebook functional again (attempt)
This commit is contained in:
parent
e777b73349
commit
8b52b93e85
@ -22,7 +22,7 @@
|
||||
"source": [
|
||||
"# oobabooga/text-generation-webui\n",
|
||||
"\n",
|
||||
"After running both cells, a public gradio URL will appear at the bottom in a few minutes. You can optionally generate an API link.\n",
|
||||
"After running both cells, a public gradio URL will appear at the bottom in around 10 minutes. You can optionally generate an API link.\n",
|
||||
"\n",
|
||||
"* Project page: https://github.com/oobabooga/text-generation-webui\n",
|
||||
"* Gradio server status: https://status.gradio.app/"
|
||||
@ -53,43 +53,27 @@
|
||||
"\n",
|
||||
"#@markdown If unsure about the branch, write \"main\" or leave it blank.\n",
|
||||
"\n",
|
||||
"import torch\n",
|
||||
"import os\n",
|
||||
"from pathlib import Path\n",
|
||||
"\n",
|
||||
"os.environ.pop('PYTHONPATH', None)\n",
|
||||
"\n",
|
||||
"if Path.cwd().name != 'text-generation-webui':\n",
|
||||
" print(\"Installing the webui...\")\n",
|
||||
" print(\"\\033[1;32;1m\\n --> Installing the web UI. This will take a while, but after the initial setup, you can download and test as many models as you like.\\033[0;37;0m\\n\")\n",
|
||||
"\n",
|
||||
" !git clone https://github.com/oobabooga/text-generation-webui\n",
|
||||
" %cd text-generation-webui\n",
|
||||
"\n",
|
||||
" torver = torch.__version__\n",
|
||||
" print(f\"TORCH: {torver}\")\n",
|
||||
" is_cuda118 = '+cu118' in torver # 2.1.0+cu118\n",
|
||||
"\n",
|
||||
" if is_cuda118:\n",
|
||||
" !python -m pip install --upgrade torch==2.2.1 torchvision==0.17.1 torchaudio==2.2.1 --index-url https://download.pytorch.org/whl/cu118\n",
|
||||
" else:\n",
|
||||
" !python -m pip install --upgrade torch==2.2.1 torchvision==0.17.1 torchaudio==2.2.1 --index-url https://download.pytorch.org/whl/cu121\n",
|
||||
"\n",
|
||||
" textgen_requirements = open('requirements.txt').read().splitlines()\n",
|
||||
" if is_cuda118:\n",
|
||||
" textgen_requirements = [req.replace('+cu121', '+cu118').replace('+cu122', '+cu118') for req in textgen_requirements]\n",
|
||||
" with open('temp_requirements.txt', 'w') as file:\n",
|
||||
" file.write('\\n'.join(textgen_requirements))\n",
|
||||
"\n",
|
||||
" !pip install -r temp_requirements.txt --upgrade\n",
|
||||
"\n",
|
||||
" print(\"\\033[1;32;1m\\n --> If you see a warning about \\\"previously imported packages\\\", just ignore it.\\033[0;37;0m\")\n",
|
||||
" print(\"\\033[1;32;1m\\n --> There is no need to restart the runtime.\\n\\033[0;37;0m\")\n",
|
||||
"\n",
|
||||
" try:\n",
|
||||
" import flash_attn\n",
|
||||
" except:\n",
|
||||
" !pip uninstall -y flash_attn\n",
|
||||
" # Install the project in an isolated environment\n",
|
||||
" !GPU_CHOICE=A \\\n",
|
||||
" USE_CUDA118=FALSE \\\n",
|
||||
" LAUNCH_AFTER_INSTALL=FALSE \\\n",
|
||||
" INSTALL_EXTENSIONS=FALSE \\\n",
|
||||
" ./start_linux.sh\n",
|
||||
"\n",
|
||||
"# Parameters\n",
|
||||
"model_url = \"https://huggingface.co/TheBloke/MythoMax-L2-13B-GPTQ\" #@param {type:\"string\"}\n",
|
||||
"branch = \"gptq-4bit-32g-actorder_True\" #@param {type:\"string\"}\n",
|
||||
"model_url = \"https://huggingface.co/turboderp/gemma-2-9b-it-exl2\" #@param {type:\"string\"}\n",
|
||||
"branch = \"8.0bpw\" #@param {type:\"string\"}\n",
|
||||
"command_line_flags = \"--n-gpu-layers 128 --load-in-4bit --use_double_quant\" #@param {type:\"string\"}\n",
|
||||
"api = False #@param {type:\"boolean\"}\n",
|
||||
"\n",
|
||||
@ -116,11 +100,10 @@
|
||||
" output_folder = \"\"\n",
|
||||
"\n",
|
||||
"# Start the web UI\n",
|
||||
"cmd = f\"python server.py --share\"\n",
|
||||
"cmd = f\"./start_linux.sh {command_line_flags} --share\"\n",
|
||||
"if output_folder != \"\":\n",
|
||||
" cmd += f\" --model {output_folder}\"\n",
|
||||
"cmd += f\" {command_line_flags}\"\n",
|
||||
"print(cmd)\n",
|
||||
"\n",
|
||||
"!$cmd"
|
||||
],
|
||||
"metadata": {
|
||||
@ -131,4 +114,4 @@
|
||||
"outputs": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user