mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-23 00:18:20 +01:00
Add AutoGPTQ LoRA support
This commit is contained in:
parent
3a5cfe96f0
commit
11f38b5c2b
@ -1,10 +1,13 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
from auto_gptq import get_gptq_peft_model
|
||||||
|
from auto_gptq.utils.peft_utils import GPTQLoraConfig
|
||||||
from peft import PeftModel
|
from peft import PeftModel
|
||||||
|
|
||||||
import modules.shared as shared
|
import modules.shared as shared
|
||||||
from modules.logging_colors import logger
|
from modules.logging_colors import logger
|
||||||
|
from modules.models import reload_model
|
||||||
|
|
||||||
|
|
||||||
def add_lora_to_model(lora_names):
|
def add_lora_to_model(lora_names):
|
||||||
@ -13,6 +16,31 @@ def add_lora_to_model(lora_names):
|
|||||||
removed_set = prior_set - set(lora_names)
|
removed_set = prior_set - set(lora_names)
|
||||||
shared.lora_names = list(lora_names)
|
shared.lora_names = list(lora_names)
|
||||||
|
|
||||||
|
is_autogptq = 'GPTQForCausalLM' in shared.model.__class__.__name__
|
||||||
|
|
||||||
|
# AutoGPTQ case. It doesn't use the peft functions.
|
||||||
|
# Copied from https://github.com/Ph0rk0z/text-generation-webui-testing
|
||||||
|
if is_autogptq:
|
||||||
|
if len(prior_set) > 0:
|
||||||
|
reload_model()
|
||||||
|
|
||||||
|
if len(shared.lora_names) == 0:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
if len(shared.lora_names) > 1:
|
||||||
|
logger.warning('AutoGPTQ can only work with 1 LoRA at the moment. Only the first one in the list will be loaded')
|
||||||
|
|
||||||
|
peft_config = GPTQLoraConfig(
|
||||||
|
inference_mode=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
lora_path = Path(f"{shared.args.lora_dir}/{shared.lora_names[0]}")
|
||||||
|
logger.info("Applying the following LoRAs to {}: {}".format(shared.model_name, ', '.join([lora_names[0]])))
|
||||||
|
shared.model = get_gptq_peft_model(shared.model, peft_config, lora_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Transformers case
|
||||||
|
else:
|
||||||
# If no LoRA needs to be added or removed, exit
|
# If no LoRA needs to be added or removed, exit
|
||||||
if len(added_set) == 0 and len(removed_set) == 0:
|
if len(added_set) == 0 and len(removed_set) == 0:
|
||||||
return
|
return
|
||||||
|
Loading…
Reference in New Issue
Block a user