From f39c99fa14901f649ef9fffa00caa80155921d3e Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Tue, 25 Apr 2023 22:58:48 -0300 Subject: [PATCH] Load more than one LoRA with --lora, fix a bug --- README.md | 2 +- modules/LoRA.py | 2 +- modules/shared.py | 2 +- server.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 3b0c6936..3bbaf84e 100644 --- a/README.md +++ b/README.md @@ -190,7 +190,7 @@ Optionally, you can use the following command-line flags: | `--chat` | Launch the web UI in chat mode. | | `--character CHARACTER` | The name of the character to load in chat mode by default. | | `--model MODEL` | Name of the model to load by default. | -| `--lora LORA` | Name of the LoRA to apply to the model by default. | +| `--lora LORA [LORA ...]` | The list of LoRAs to load. If you want to load more than one LoRA, write the names separated by spaces. | | `--model-dir MODEL_DIR` | Path to directory with all the models. | | `--lora-dir LORA_DIR` | Path to directory with all the loras. | | `--model-menu` | Show a model menu in the terminal when the web UI is first launched. | diff --git a/modules/LoRA.py b/modules/LoRA.py index a4ebe208..f734f3cd 100644 --- a/modules/LoRA.py +++ b/modules/LoRA.py @@ -7,10 +7,10 @@ import modules.shared as shared def add_lora_to_model(lora_names): - shared.lora_names = list(lora_names) prior_set = set(shared.lora_names) added_set = set(lora_names) - prior_set removed_set = prior_set - set(lora_names) + shared.lora_names = list(lora_names) # If no LoRA needs to be added or removed, exit if len(added_set) == 0 and len(removed_set) == 0: diff --git a/modules/shared.py b/modules/shared.py index a3b867de..4881ffa5 100644 --- a/modules/shared.py +++ b/modules/shared.py @@ -96,7 +96,7 @@ parser.add_argument('--chat', action='store_true', help='Launch the web UI in ch parser.add_argument('--cai-chat', action='store_true', help='DEPRECATED: use --chat instead.') parser.add_argument('--character', type=str, help='The name of the character to load in chat mode by default.') parser.add_argument('--model', type=str, help='Name of the model to load by default.') -parser.add_argument('--lora', type=str, help='Name of the LoRA to apply to the model by default.') +parser.add_argument('--lora', type=str, nargs="+", help='The list of LoRAs to load. If you want to load more than one LoRA, write the names separated by spaces.') parser.add_argument("--model-dir", type=str, default='models/', help="Path to directory with all the models") parser.add_argument("--lora-dir", type=str, default='loras/', help="Path to directory with all the loras") parser.add_argument('--model-menu', action='store_true', help='Show a model menu in the terminal when the web UI is first launched.') diff --git a/server.py b/server.py index dc804a98..b7d6aa34 100644 --- a/server.py +++ b/server.py @@ -913,7 +913,7 @@ if __name__ == "__main__": # Load the model shared.model, shared.tokenizer = load_model(shared.model_name) if shared.args.lora: - add_lora_to_model([shared.args.lora]) + add_lora_to_model(shared.args.lora) # Force a character to be loaded if shared.is_chat():