From 1b44204bd734749d86c897a27692265449a99f43 Mon Sep 17 00:00:00 2001 From: Artificiangel Date: Mon, 29 Apr 2024 07:21:09 -0400 Subject: [PATCH] Use custom model/lora download folder in model downloader --- download-model.py | 3 ++- modules/ui_model_menu.py | 5 ----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/download-model.py b/download-model.py index c38e79fb..37ddf70a 100644 --- a/download-model.py +++ b/download-model.py @@ -22,6 +22,7 @@ import tqdm from requests.adapters import HTTPAdapter from requests.exceptions import ConnectionError, RequestException, Timeout from tqdm.contrib.concurrent import thread_map +from modules import shared base = os.environ.get("HF_ENDPOINT") or "https://huggingface.co" @@ -165,7 +166,7 @@ class ModelDownloader: return links, sha256, is_lora, is_llamacpp def get_output_folder(self, model, branch, is_lora, is_llamacpp=False): - base_folder = 'models' if not is_lora else 'loras' + base_folder = shared.args.model_dir if not is_lora else shared.args.lora_dir # If the model is of type GGUF, save directly in the base_folder if is_llamacpp: diff --git a/modules/ui_model_menu.py b/modules/ui_model_menu.py index 5c4b2f89..7969e2f6 100644 --- a/modules/ui_model_menu.py +++ b/modules/ui_model_menu.py @@ -290,11 +290,6 @@ def download_model_wrapper(repo_id, specific_file, progress=gr.Progress(), retur yield ("Getting the output folder") output_folder = downloader.get_output_folder(model, branch, is_lora, is_llamacpp=is_llamacpp) - if output_folder == Path("models"): - output_folder = Path(shared.args.model_dir) - elif output_folder == Path("loras"): - output_folder = Path(shared.args.lora_dir) - if check: progress(0.5)