text-generation-webui/requirements.txt

28 lines
1.1 KiB
Plaintext
Raw Normal View History

accelerate==0.20.3
2023-04-17 04:26:52 +02:00
colorama
datasets
einops
2023-03-02 16:03:57 +01:00
flexgen==0.1.7
gradio_client==0.2.5
2023-06-05 22:29:21 +02:00
gradio==3.33.1
2023-03-15 16:40:03 +01:00
markdown
numpy
pandas
2023-04-08 23:48:46 +02:00
Pillow>=9.5.0
pyyaml
2023-03-11 18:47:30 +01:00
requests
safetensors==0.3.1
2023-03-05 14:02:24 +01:00
sentencepiece
2023-03-12 12:48:16 +01:00
tqdm
scipy
transformers==4.30.2
2023-06-18 21:42:11 +02:00
git+https://github.com/huggingface/peft@03eb378eb914fbee709ff7c86ba5b1d033b89524
2023-06-21 20:04:45 +02:00
bitsandbytes==0.39.1; platform_system != "Windows"
https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.39.1-py3-none-win_amd64.whl; platform_system == "Windows"
2023-06-20 05:49:38 +02:00
llama-cpp-python==0.1.64; platform_system != "Windows"
https://github.com/abetlen/llama-cpp-python/releases/download/v0.1.64/llama_cpp_python-0.1.64-cp310-cp310-win_amd64.whl; platform_system == "Windows"
https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.2.2/auto_gptq-0.2.2+cu117-cp310-cp310-win_amd64.whl; platform_system == "Windows"
2023-06-20 03:41:53 +02:00
https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.2.2/auto_gptq-0.2.2+cu117-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
--find-links=https://jllllll.github.io/exllama/cu117
exllama; platform_machine == "x86_64" or platform_machine == "AMD64"