mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-25 13:58:46 +01:00
add Vulkan support to Nix flake
This commit is contained in:
parent
e920ed393d
commit
60ecf099ed
@ -13,18 +13,22 @@
|
|||||||
cudaPackages,
|
cudaPackages,
|
||||||
darwin,
|
darwin,
|
||||||
rocmPackages,
|
rocmPackages,
|
||||||
|
vulkan-headers,
|
||||||
|
vulkan-loader,
|
||||||
clblast,
|
clblast,
|
||||||
useBlas ? builtins.all (x: !x) [
|
useBlas ? builtins.all (x: !x) [
|
||||||
useCuda
|
useCuda
|
||||||
useMetalKit
|
useMetalKit
|
||||||
useOpenCL
|
useOpenCL
|
||||||
useRocm
|
useRocm
|
||||||
|
useVulkan
|
||||||
],
|
],
|
||||||
useCuda ? config.cudaSupport,
|
useCuda ? config.cudaSupport,
|
||||||
useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin && !useOpenCL,
|
useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin && !useOpenCL,
|
||||||
useMpi ? false, # Increases the runtime closure size by ~700M
|
useMpi ? false, # Increases the runtime closure size by ~700M
|
||||||
useOpenCL ? false,
|
useOpenCL ? false,
|
||||||
useRocm ? config.rocmSupport,
|
useRocm ? config.rocmSupport,
|
||||||
|
useVulkan ? false,
|
||||||
llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake
|
llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake
|
||||||
}@inputs:
|
}@inputs:
|
||||||
|
|
||||||
@ -48,7 +52,8 @@ let
|
|||||||
++ lib.optionals useMetalKit [ "MetalKit" ]
|
++ lib.optionals useMetalKit [ "MetalKit" ]
|
||||||
++ lib.optionals useMpi [ "MPI" ]
|
++ lib.optionals useMpi [ "MPI" ]
|
||||||
++ lib.optionals useOpenCL [ "OpenCL" ]
|
++ lib.optionals useOpenCL [ "OpenCL" ]
|
||||||
++ lib.optionals useRocm [ "ROCm" ];
|
++ lib.optionals useRocm [ "ROCm" ]
|
||||||
|
++ lib.optionals useVulkan [ "Vulkan" ];
|
||||||
|
|
||||||
pnameSuffix =
|
pnameSuffix =
|
||||||
strings.optionalString (suffices != [ ])
|
strings.optionalString (suffices != [ ])
|
||||||
@ -108,6 +113,11 @@ let
|
|||||||
hipblas
|
hipblas
|
||||||
rocblas
|
rocblas
|
||||||
];
|
];
|
||||||
|
|
||||||
|
vulkanBuildInputs = [
|
||||||
|
vulkan-headers
|
||||||
|
vulkan-loader
|
||||||
|
];
|
||||||
in
|
in
|
||||||
|
|
||||||
effectiveStdenv.mkDerivation (
|
effectiveStdenv.mkDerivation (
|
||||||
@ -164,7 +174,8 @@ effectiveStdenv.mkDerivation (
|
|||||||
++ optionals useCuda cudaBuildInputs
|
++ optionals useCuda cudaBuildInputs
|
||||||
++ optionals useMpi [ mpi ]
|
++ optionals useMpi [ mpi ]
|
||||||
++ optionals useOpenCL [ clblast ]
|
++ optionals useOpenCL [ clblast ]
|
||||||
++ optionals useRocm rocmBuildInputs;
|
++ optionals useRocm rocmBuildInputs
|
||||||
|
++ optionals useVulkan vulkanBuildInputs;
|
||||||
|
|
||||||
cmakeFlags =
|
cmakeFlags =
|
||||||
[
|
[
|
||||||
@ -178,6 +189,7 @@ effectiveStdenv.mkDerivation (
|
|||||||
(cmakeBool "LLAMA_HIPBLAS" useRocm)
|
(cmakeBool "LLAMA_HIPBLAS" useRocm)
|
||||||
(cmakeBool "LLAMA_METAL" useMetalKit)
|
(cmakeBool "LLAMA_METAL" useMetalKit)
|
||||||
(cmakeBool "LLAMA_MPI" useMpi)
|
(cmakeBool "LLAMA_MPI" useMpi)
|
||||||
|
(cmakeBool "LLAMA_VULKAN" useVulkan)
|
||||||
]
|
]
|
||||||
++ optionals useCuda [
|
++ optionals useCuda [
|
||||||
(
|
(
|
||||||
@ -218,6 +230,7 @@ effectiveStdenv.mkDerivation (
|
|||||||
useMpi
|
useMpi
|
||||||
useOpenCL
|
useOpenCL
|
||||||
useRocm
|
useRocm
|
||||||
|
useVulkan
|
||||||
;
|
;
|
||||||
|
|
||||||
shell = mkShell {
|
shell = mkShell {
|
||||||
@ -242,11 +255,11 @@ effectiveStdenv.mkDerivation (
|
|||||||
# Configurations we don't want even the CI to evaluate. Results in the
|
# Configurations we don't want even the CI to evaluate. Results in the
|
||||||
# "unsupported platform" messages. This is mostly a no-op, because
|
# "unsupported platform" messages. This is mostly a no-op, because
|
||||||
# cudaPackages would've refused to evaluate anyway.
|
# cudaPackages would've refused to evaluate anyway.
|
||||||
badPlatforms = optionals (useCuda || useOpenCL) lib.platforms.darwin;
|
badPlatforms = optionals (useCuda || useOpenCL || useVulkan) lib.platforms.darwin;
|
||||||
|
|
||||||
# Configurations that are known to result in build failures. Can be
|
# Configurations that are known to result in build failures. Can be
|
||||||
# overridden by importing Nixpkgs with `allowBroken = true`.
|
# overridden by importing Nixpkgs with `allowBroken = true`.
|
||||||
broken = (useMetalKit && !effectiveStdenv.isDarwin);
|
broken = (useMetalKit && !effectiveStdenv.isDarwin) || (useVulkan && effectiveStdenv.isDarwin);
|
||||||
|
|
||||||
description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}";
|
description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}";
|
||||||
homepage = "https://github.com/ggerganov/llama.cpp/";
|
homepage = "https://github.com/ggerganov/llama.cpp/";
|
||||||
|
@ -157,6 +157,7 @@
|
|||||||
|
|
||||||
mpi-cpu = config.packages.default.override { useMpi = true; };
|
mpi-cpu = config.packages.default.override { useMpi = true; };
|
||||||
mpi-cuda = config.packages.default.override { useMpi = true; };
|
mpi-cuda = config.packages.default.override { useMpi = true; };
|
||||||
|
vulkan = config.packages.default.override { useVulkan = true; };
|
||||||
}
|
}
|
||||||
// lib.optionalAttrs (system == "x86_64-linux") {
|
// lib.optionalAttrs (system == "x86_64-linux") {
|
||||||
rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp;
|
rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp;
|
||||||
|
Loading…
Reference in New Issue
Block a user