diff --git a/modules/LoRA.py b/modules/LoRA.py index c0c8ae08..e54d5337 100644 --- a/modules/LoRA.py +++ b/modules/LoRA.py @@ -106,8 +106,10 @@ def add_lora_transformers(lora_names): # If any LoRA needs to be removed, start over if len(removed_set) > 0: - shared.model.disable_adapter() - shared.model = shared.model.base_model.model + # shared.model may no longer be PeftModel + if hasattr(shared.model, 'disable_adapter'): + shared.model.disable_adapter() + shared.model = shared.model.base_model.model if len(lora_names) > 0: params = {}