From 706a03b2cb5bf3c0667d8c13b3a47f1a6e33cc81 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Fri, 10 Mar 2023 11:02:25 -0300 Subject: [PATCH] Minor changes --- modules/models.py | 8 +++----- presets/LLaMA-Default.txt | 12 ------------ 2 files changed, 3 insertions(+), 17 deletions(-) delete mode 100644 presets/LLaMA-Default.txt diff --git a/modules/models.py b/modules/models.py index a2256b98..a23f1fa9 100644 --- a/modules/models.py +++ b/modules/models.py @@ -104,13 +104,11 @@ def load_model(model_name): elif path_to_model.name.lower().startswith('llama-65b'): pt_model = 'llama-65b-4bit.pt' else: - print(f"Could not find the .pt model for {model_name}, exiting...") - exit() + pt_model = f'{model_name}-4bit.pt' - # check root of models folder, and model path root - paths = [ f"{path_to_model}/{pt_model}", f"models/{pt_model}" ] + # Try to find the .pt both in models/ and in the subfolder pt_path = None - for path in [ Path(p) for p in paths ]: + for path in [Path(p) for p in [f"models/{pt_model}", f"{path_to_model}/{pt_model}"]]: if path.exists(): pt_path = path diff --git a/presets/LLaMA-Default.txt b/presets/LLaMA-Default.txt deleted file mode 100644 index 3df8209a..00000000 --- a/presets/LLaMA-Default.txt +++ /dev/null @@ -1,12 +0,0 @@ -do_sample=False -temperature=0.7 -top_p=0 -typical_p=1 -repetition_penalty=1.15 -top_k=40 -num_beams=1 -penalty_alpha=0 -min_length=0 -length_penalty=1 -no_repeat_ngram_size=0 -early_stopping=True