mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-26 22:30:32 +01:00
201294ae17
Exposes a few attributes demonstrating how to build [singularity](https://docs.sylabs.io/guides/latest/user-guide/)/[apptainer](https://apptainer.org/) and Docker images re-using llama.cpp's Nix expression. Built locally on `x86_64-linux` with `nix build github:someoneserge/llama.cpp/feat/nix/images#llamaPackages.{docker,docker-min,sif,llama-cpp}` and it's fast and effective.
28 lines
731 B
Nix
28 lines
731 B
Nix
{
|
|
lib,
|
|
singularity-tools,
|
|
llama-cpp,
|
|
bashInteractive,
|
|
interactive ? false,
|
|
}:
|
|
|
|
let
|
|
optionalInt = cond: x: if cond then x else 0;
|
|
in
|
|
singularity-tools.buildImage rec {
|
|
inherit (llama-cpp) name;
|
|
contents = [ llama-cpp ] ++ lib.optionals interactive [ bashInteractive ];
|
|
|
|
# These are excessive (but safe) for most variants. Building singularity
|
|
# images requires superuser privileges, so we build them inside a VM in a
|
|
# writable image of pre-determined size.
|
|
#
|
|
# ROCm is currently affected by https://github.com/NixOS/nixpkgs/issues/276846
|
|
#
|
|
# Expected image sizes:
|
|
# - cpu/blas: 150M,
|
|
# - cuda, all gencodes: 560M,
|
|
diskSize = 4096 + optionalInt llama-cpp.useRocm 16384;
|
|
memSize = diskSize;
|
|
}
|