mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2025-01-27 04:23:06 +01:00
py : use cpu-only torch in requirements.txt
This commit is contained in:
parent
87e25a1d1b
commit
a44f22e7d3
@ -1,3 +1,4 @@
|
|||||||
-r ../../requirements/requirements-convert_legacy_llama.txt
|
-r ../../requirements/requirements-convert_legacy_llama.txt
|
||||||
|
--extra-index-url https://download.pytorch.org/whl/cpu
|
||||||
pillow~=10.2.0
|
pillow~=10.2.0
|
||||||
torch~=2.2.1
|
torch~=2.2.1
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
-r ./requirements-convert_legacy_llama.txt
|
-r ./requirements-convert_legacy_llama.txt
|
||||||
|
--extra-index-url https://download.pytorch.org/whl/cpu
|
||||||
torch~=2.2.1
|
torch~=2.2.1
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
-r ./requirements-convert_legacy_llama.txt
|
-r ./requirements-convert_legacy_llama.txt
|
||||||
|
--extra-index-url https://download.pytorch.org/whl/cpu
|
||||||
torch~=2.2.1
|
torch~=2.2.1
|
||||||
|
Loading…
Reference in New Issue
Block a user