2023-05-09 07:17:27 +02:00
|
|
|
accelerate==0.19.0
|
2023-04-17 04:26:52 +02:00
|
|
|
colorama
|
2023-04-03 01:34:25 +02:00
|
|
|
datasets
|
2023-03-02 16:03:57 +01:00
|
|
|
flexgen==0.1.7
|
2023-05-17 03:21:05 +02:00
|
|
|
gradio_client==0.2.5
|
|
|
|
gradio==3.31.0
|
2023-03-15 16:40:03 +01:00
|
|
|
markdown
|
2023-02-13 19:25:16 +01:00
|
|
|
numpy
|
2023-04-21 05:20:33 +02:00
|
|
|
pandas
|
2023-04-08 23:48:46 +02:00
|
|
|
Pillow>=9.5.0
|
2023-04-21 05:20:33 +02:00
|
|
|
pyyaml
|
2023-03-11 18:47:30 +01:00
|
|
|
requests
|
2023-04-06 22:52:27 +02:00
|
|
|
rwkv==0.7.3
|
2023-05-02 05:42:39 +02:00
|
|
|
safetensors==0.3.1
|
2023-03-05 14:02:24 +01:00
|
|
|
sentencepiece
|
2023-03-12 12:48:16 +01:00
|
|
|
tqdm
|
2023-04-14 19:52:06 +02:00
|
|
|
git+https://github.com/huggingface/peft
|
2023-05-16 00:25:24 +02:00
|
|
|
transformers==4.29.1
|
2023-04-14 02:23:14 +02:00
|
|
|
bitsandbytes==0.38.1; platform_system != "Windows"
|
2023-05-15 03:41:14 +02:00
|
|
|
llama-cpp-python==0.1.50; platform_system != "Windows"
|
|
|
|
https://github.com/abetlen/llama-cpp-python/releases/download/v0.1.50/llama_cpp_python-0.1.50-cp310-cp310-win_amd64.whl; platform_system == "Windows"
|