From 9626f57721c6df25683bf2ccf2a384b56e75ffd3 Mon Sep 17 00:00:00 2001 From: jllllll <3887729+jllllll@users.noreply.github.com> Date: Wed, 30 Aug 2023 11:43:38 -0500 Subject: [PATCH] Bump exllama to 0.0.14 (#3758) --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 43db99e8..f6b11cdc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,8 +31,8 @@ https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.4.2/auto_gptq-0.4.2+cu https://github.com/PanQiWei/AutoGPTQ/releases/download/v0.4.2/auto_gptq-0.4.2+cu117-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" # ExLlama -https://github.com/jllllll/exllama/releases/download/0.0.10/exllama-0.0.10+cu117-cp310-cp310-win_amd64.whl; platform_system == "Windows" -https://github.com/jllllll/exllama/releases/download/0.0.10/exllama-0.0.10+cu117-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" +https://github.com/jllllll/exllama/releases/download/0.0.14/exllama-0.0.14+cu117-cp310-cp310-win_amd64.whl; platform_system == "Windows" +https://github.com/jllllll/exllama/releases/download/0.0.14/exllama-0.0.14+cu117-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" # llama-cpp-python without GPU support llama-cpp-python==0.1.83; platform_system != "Windows"