mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2025-02-05 16:30:44 +01:00
Revert "Use custom model/lora download folder in model downloader"
This commit is contained in:
parent
d9fdb3db71
commit
0270af4101
@ -22,7 +22,6 @@ import tqdm
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.exceptions import ConnectionError, RequestException, Timeout
|
||||
from tqdm.contrib.concurrent import thread_map
|
||||
from modules import shared
|
||||
|
||||
base = os.environ.get("HF_ENDPOINT") or "https://huggingface.co"
|
||||
|
||||
@ -169,7 +168,7 @@ class ModelDownloader:
|
||||
return links, sha256, is_lora, is_llamacpp
|
||||
|
||||
def get_output_folder(self, model, branch, is_lora, is_llamacpp=False):
|
||||
base_folder = shared.args.model_dir if not is_lora else shared.args.lora_dir
|
||||
base_folder = 'models' if not is_lora else 'loras'
|
||||
|
||||
# If the model is of type GGUF, save directly in the base_folder
|
||||
if is_llamacpp:
|
||||
|
@ -292,6 +292,11 @@ def download_model_wrapper(repo_id, specific_file, progress=gr.Progress(), retur
|
||||
yield ("Getting the output folder")
|
||||
output_folder = downloader.get_output_folder(model, branch, is_lora, is_llamacpp=is_llamacpp)
|
||||
|
||||
if output_folder == Path("models"):
|
||||
output_folder = Path(shared.args.model_dir)
|
||||
elif output_folder == Path("loras"):
|
||||
output_folder = Path(shared.args.lora_dir)
|
||||
|
||||
if check:
|
||||
progress(0.5)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user