From bb62e796eba0bfe4610ac879bf7b2f390d96cf92 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 14 Oct 2024 13:24:13 -0700 Subject: [PATCH] Fix locally compiled llama-cpp-python failing to import --- modules/llama_cpp_python_hijack.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/modules/llama_cpp_python_hijack.py b/modules/llama_cpp_python_hijack.py index 2a9c10da..f3872a74 100644 --- a/modules/llama_cpp_python_hijack.py +++ b/modules/llama_cpp_python_hijack.py @@ -9,10 +9,11 @@ from modules import shared from modules.cache_utils import process_llamacpp_cache imported_module = None +not_available_modules = set() def llama_cpp_lib(): - global imported_module + global imported_module, not_available_modules # Determine the platform is_macos = platform.system() == 'Darwin' @@ -31,6 +32,9 @@ def llama_cpp_lib(): ] for arg, lib_name in lib_names: + if lib_name in not_available_modules: + continue + should_import = (arg is None or getattr(shared.args, arg)) if should_import: @@ -44,6 +48,7 @@ def llama_cpp_lib(): monkey_patch_llama_cpp_python(return_lib) return return_lib except ImportError: + not_available_modules.add(lib_name) continue return None