mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-25 05:48:47 +01:00
554c247caf
ggml-ci
62 lines
1.8 KiB
CMake
62 lines
1.8 KiB
CMake
set(LLAMA_VERSION @LLAMA_INSTALL_VERSION@)
|
|
set(LLAMA_BUILD_COMMIT @LLAMA_BUILD_COMMIT@)
|
|
set(LLAMA_BUILD_NUMBER @LLAMA_BUILD_NUMBER@)
|
|
set(LLAMA_SHARED_LIB @BUILD_SHARED_LIBS@)
|
|
set(LLAMA_BLAS @LLAMA_BLAS@)
|
|
set(LLAMA_CUDA @LLAMA_CUDA@)
|
|
set(LLAMA_METAL @LLAMA_METAL@)
|
|
set(LLAMA_HIPBLAS @LLAMA_HIPBLAS@)
|
|
set(LLAMA_ACCELERATE @LLAMA_ACCELERATE@)
|
|
|
|
@PACKAGE_INIT@
|
|
|
|
set_and_check(LLAMA_INCLUDE_DIR "@PACKAGE_LLAMA_INCLUDE_INSTALL_DIR@")
|
|
set_and_check(LLAMA_LIB_DIR "@PACKAGE_LLAMA_LIB_INSTALL_DIR@")
|
|
set_and_check(LLAMA_BIN_DIR "@PACKAGE_LLAMA_BIN_INSTALL_DIR@")
|
|
|
|
# Ensure transient dependencies satisfied
|
|
|
|
find_package(Threads REQUIRED)
|
|
if (APPLE AND LLAMA_ACCELERATE)
|
|
find_library(ACCELERATE_FRAMEWORK Accelerate REQUIRED)
|
|
endif()
|
|
|
|
if (LLAMA_BLAS)
|
|
find_package(BLAS REQUIRED)
|
|
endif()
|
|
|
|
if (LLAMA_CUDA)
|
|
find_package(CUDAToolkit REQUIRED)
|
|
endif()
|
|
|
|
if (LLAMA_METAL)
|
|
find_library(FOUNDATION_LIBRARY Foundation REQUIRED)
|
|
find_library(METAL_FRAMEWORK Metal REQUIRED)
|
|
find_library(METALKIT_FRAMEWORK MetalKit REQUIRED)
|
|
endif()
|
|
|
|
if (LLAMA_HIPBLAS)
|
|
find_package(hip REQUIRED)
|
|
find_package(hipblas REQUIRED)
|
|
find_package(rocblas REQUIRED)
|
|
endif()
|
|
|
|
find_library(llama_LIBRARY llama
|
|
REQUIRED
|
|
HINTS ${LLAMA_LIB_DIR})
|
|
|
|
set(_llama_link_deps "Threads::Threads" "@LLAMA_EXTRA_LIBS@")
|
|
set(_llama_transient_defines "@LLAMA_TRANSIENT_DEFINES@")
|
|
add_library(llama UNKNOWN IMPORTED)
|
|
set_target_properties(llama
|
|
PROPERTIES
|
|
INTERFACE_INCLUDE_DIRECTORIES "${LLAMA_INCLUDE_DIR}"
|
|
INTERFACE_LINK_LIBRARIES "${_llama_link_deps}"
|
|
INTERFACE_COMPILE_DEFINITIONS "${_llama_transient_defines}"
|
|
IMPORTED_LINK_INTERFACE_LANGUAGES "CXX"
|
|
IMPORTED_LOCATION "${llama_LIBRARY}"
|
|
INTERFACE_COMPILE_FEATURES cxx_std_11
|
|
POSITION_INDEPENDENT_CODE ON )
|
|
|
|
check_required_components(Llama)
|