2024-12-16 18:58:50 -03:00
|
|
|
accelerate==1.2.*
|
2024-12-16 19:48:51 -03:00
|
|
|
bitsandbytes==0.45.*
|
2023-04-16 23:26:52 -03:00
|
|
|
colorama
|
2023-04-03 01:34:25 +02:00
|
|
|
datasets
|
2023-05-29 08:20:18 -05:00
|
|
|
einops
|
2024-09-06 18:38:39 -07:00
|
|
|
fastapi==0.112.4
|
2024-12-17 00:47:41 -03:00
|
|
|
gradio==4.37.*
|
2024-06-27 21:12:39 -03:00
|
|
|
jinja2==3.1.4
|
2023-03-15 12:40:03 -03:00
|
|
|
markdown
|
2024-03-10 16:13:29 -07:00
|
|
|
numba==0.59.*
|
2024-02-13 16:26:35 -03:00
|
|
|
numpy==1.26.*
|
2023-04-21 00:20:33 -03:00
|
|
|
pandas
|
2024-08-19 23:33:56 -03:00
|
|
|
peft==0.12.*
|
2023-04-08 18:48:46 -03:00
|
|
|
Pillow>=9.5.0
|
2024-04-06 22:02:20 -04:00
|
|
|
psutil
|
2024-09-06 18:38:39 -07:00
|
|
|
pydantic==2.8.2
|
2023-04-21 00:20:33 -03:00
|
|
|
pyyaml
|
2023-03-11 14:47:30 -03:00
|
|
|
requests
|
2023-12-19 21:58:36 -08:00
|
|
|
rich
|
2024-02-04 18:40:25 -08:00
|
|
|
safetensors==0.4.*
|
2023-05-25 21:26:25 -05:00
|
|
|
scipy
|
2023-07-19 19:31:19 -07:00
|
|
|
sentencepiece
|
2023-07-12 17:53:31 +03:00
|
|
|
tensorboard
|
2024-12-09 07:00:15 -08:00
|
|
|
transformers==4.47.*
|
2023-07-19 19:31:19 -07:00
|
|
|
tqdm
|
|
|
|
wandb
|
2023-08-09 08:07:55 -07:00
|
|
|
|
2024-03-04 04:46:39 -03:00
|
|
|
# API
|
|
|
|
SpeechRecognition==3.10.0
|
|
|
|
flask_cloudflared==0.0.14
|
2024-04-18 23:35:00 +05:30
|
|
|
sse-starlette==1.6.5
|
2024-03-04 04:46:39 -03:00
|
|
|
tiktoken
|
|
|
|
|
2024-04-30 09:11:31 -03:00
|
|
|
# llama-cpp-python (CPU only, AVX2)
|
2024-12-11 07:14:59 -08:00
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.3.5+cpuavx2-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.3.5+cpuavx2-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.3.5+cpuavx2-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.3.5+cpuavx2-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
2024-04-30 09:11:31 -03:00
|
|
|
|
|
|
|
# llama-cpp-python (CUDA, no tensor cores)
|
2024-12-11 07:14:59 -08:00
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.3.5+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.3.5+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.3.5+cu121-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.3.5+cu121-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
2024-07-22 18:05:11 -07:00
|
|
|
|
|
|
|
# llama-cpp-python (CUDA, tensor cores)
|
2024-12-11 07:14:59 -08:00
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.3.5+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.3.5+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.3.5+cu121-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.3.5+cu121-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
2024-04-30 09:11:31 -03:00
|
|
|
|
2023-09-24 09:58:29 -03:00
|
|
|
# CUDA wheels
|
2024-12-09 10:16:33 -08:00
|
|
|
https://github.com/oobabooga/exllamav2/releases/download/v0.2.6/exllamav2-0.2.6+cu121.torch2.4.1-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/exllamav2/releases/download/v0.2.6/exllamav2-0.2.6+cu121.torch2.4.1-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
|
|
|
https://github.com/oobabooga/exllamav2/releases/download/v0.2.6/exllamav2-0.2.6+cu121.torch2.4.1-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/exllamav2/releases/download/v0.2.6/exllamav2-0.2.6+cu121.torch2.4.1-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|
|
|
|
https://github.com/oobabooga/exllamav2/releases/download/v0.2.6/exllamav2-0.2.6-py3-none-any.whl; platform_system == "Linux" and platform_machine != "x86_64"
|
2024-12-09 10:17:17 -08:00
|
|
|
https://github.com/oobabooga/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu122torch2.4.1cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
|
|
|
|
https://github.com/oobabooga/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu122torch2.4.1cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
|
|
|
|
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu12torch2.4cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
|
|
|
|
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu12torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
|