mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 16:17:57 +01:00
Bump llama-cpp-python
This commit is contained in:
parent
20740ab16e
commit
c6cae106e7
@ -19,8 +19,8 @@ transformers==4.30.2
|
||||
git+https://github.com/huggingface/peft@03eb378eb914fbee709ff7c86ba5b1d033b89524
|
||||
bitsandbytes==0.39.1; platform_system != "Windows"
|
||||
https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.39.1-py3-none-win_amd64.whl; platform_system == "Windows"
|
||||
llama-cpp-python==0.1.64; platform_system != "Windows"
|
||||
https://github.com/abetlen/llama-cpp-python/releases/download/v0.1.64/llama_cpp_python-0.1.64-cp310-cp310-win_amd64.whl; platform_system == "Windows"
|
||||
llama-cpp-python==0.1.66; platform_system != "Windows"
|
||||
https://github.com/abetlen/llama-cpp-python/releases/download/v0.1.66/llama_cpp_python-0.1.66-cp310-cp310-win_amd64.whl; platform_system == "Windows"
|
||||
https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.2.2/auto_gptq-0.2.2+cu117-cp310-cp310-win_amd64.whl; platform_system == "Windows"
|
||||
https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.2.2/auto_gptq-0.2.2+cu117-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64"
|
||||
https://github.com/jllllll/exllama/releases/download/0.0.4/exllama-0.0.4+cu117-cp310-cp310-win_amd64.whl; platform_system == "Windows"
|
||||
|
Loading…
Reference in New Issue
Block a user