mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-26 14:20:31 +01:00
cann: dockerfile and doc adjustment (#10302)
Co-authored-by: noemotiovon <noemotiovon@gmail.com>
This commit is contained in:
parent
4802ad350b
commit
231f9360d9
@ -1,6 +1,6 @@
|
|||||||
ARG ASCEND_VERSION=8.0.rc2.alpha003-910b-openeuler22.03-py3.8
|
ARG ASCEND_VERSION=8.0.rc2.alpha003-910b-openeuler22.03-py3.8
|
||||||
|
|
||||||
FROM cosdt/cann:$ASCEND_VERSION AS build
|
FROM ascendai/cann:$ASCEND_VERSION AS build
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ RUN echo "Building with static libs" && \
|
|||||||
cmake --build build --config Release --target llama-cli
|
cmake --build build --config Release --target llama-cli
|
||||||
|
|
||||||
# TODO: use image with NNRT
|
# TODO: use image with NNRT
|
||||||
FROM cosdt/cann:$ASCEND_VERSION AS runtime
|
FROM ascendai/cann:$ASCEND_VERSION AS runtime
|
||||||
COPY --from=build /app/build/bin/llama-cli /llama-cli
|
COPY --from=build /app/build/bin/llama-cli /llama-cli
|
||||||
|
|
||||||
ENV LC_ALL=C.utf8
|
ENV LC_ALL=C.utf8
|
||||||
|
@ -375,7 +375,7 @@ cmake --build build --config release
|
|||||||
|
|
||||||
You can test with:
|
You can test with:
|
||||||
|
|
||||||
`./build/llama-cli -m PATH_TO_MODEL -p "Building a website can be done in 10 steps:" -ngl 32`
|
`./build/bin/llama-cli -m PATH_TO_MODEL -p "Building a website can be done in 10 steps:" -ngl 32`
|
||||||
|
|
||||||
If the fllowing info is output on screen, you are using `llama.cpp by CANN backend`:
|
If the fllowing info is output on screen, you are using `llama.cpp by CANN backend`:
|
||||||
```bash
|
```bash
|
||||||
|
Loading…
Reference in New Issue
Block a user