2023-05-09 02:17:27 -03:00
|
|
|
accelerate==0.19.0
|
2023-04-16 23:26:52 -03:00
|
|
|
colorama
|
2023-04-03 01:34:25 +02:00
|
|
|
datasets
|
2023-03-02 12:03:57 -03:00
|
|
|
flexgen==0.1.7
|
2023-05-16 22:21:05 -03:00
|
|
|
gradio_client==0.2.5
|
|
|
|
gradio==3.31.0
|
2023-03-15 12:40:03 -03:00
|
|
|
markdown
|
2023-02-13 15:25:16 -03:00
|
|
|
numpy
|
2023-04-21 00:20:33 -03:00
|
|
|
pandas
|
2023-04-08 18:48:46 -03:00
|
|
|
Pillow>=9.5.0
|
2023-04-21 00:20:33 -03:00
|
|
|
pyyaml
|
2023-03-11 14:47:30 -03:00
|
|
|
requests
|
2023-04-06 17:52:27 -03:00
|
|
|
rwkv==0.7.3
|
2023-05-02 00:42:39 -03:00
|
|
|
safetensors==0.3.1
|
2023-03-05 10:02:24 -03:00
|
|
|
sentencepiece
|
2023-03-12 08:48:16 -03:00
|
|
|
tqdm
|
2023-05-20 15:54:51 -05:00
|
|
|
git+https://github.com/huggingface/peft@4fd374e80d670781c0d82c96ce94d1215ff23306
|
2023-05-15 19:25:24 -03:00
|
|
|
transformers==4.29.1
|
2023-04-13 21:23:14 -03:00
|
|
|
bitsandbytes==0.38.1; platform_system != "Windows"
|
2023-05-19 12:13:25 -03:00
|
|
|
llama-cpp-python==0.1.51; platform_system != "Windows"
|
|
|
|
https://github.com/abetlen/llama-cpp-python/releases/download/v0.1.51/llama_cpp_python-0.1.51-cp310-cp310-win_amd64.whl; platform_system == "Windows"
|