From 69b0aedd95d96cb0f1a0f7869e2b8a00df9aeaf8 Mon Sep 17 00:00:00 2001 From: jllllll <3887729+jllllll@users.noreply.github.com> Date: Fri, 22 Sep 2023 01:12:08 -0500 Subject: [PATCH] Fix missing models warning --- webui.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webui.py b/webui.py index 19847879..d5071c80 100644 --- a/webui.py +++ b/webui.py @@ -251,7 +251,7 @@ if __name__ == "__main__": os.chdir(script_dir) # Check if a model has been downloaded yet - if len([item for item in glob.glob('text-generation-webui/models/*') if not item.endswith(('.txt', '.yaml'))]) == 0: + if len([item for item in glob.glob('models/*') if not item.endswith(('.txt', '.yaml'))]) == 0: print_big_message("WARNING: You haven't downloaded any model yet.\nOnce the web UI launches, head over to the \"Model\" tab and download one.") # Workaround for llama-cpp-python loading paths in CUDA env vars even if they do not exist