mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-25 09:19:23 +01:00
Merge remote-tracking branch 'refs/remotes/origin/dev' into dev
This commit is contained in:
commit
42f816312d
1
.gitignore
vendored
1
.gitignore
vendored
@ -26,6 +26,7 @@
|
||||
.DS_Store
|
||||
.eslintrc.js
|
||||
.idea
|
||||
.env
|
||||
.venv
|
||||
venv
|
||||
.vscode
|
||||
|
@ -45,9 +45,6 @@
|
||||
.*starchat-beta:
|
||||
instruction_template: 'Starchat-Beta'
|
||||
custom_stopping_strings: '"<|end|>"'
|
||||
.*(openorca-platypus2):
|
||||
instruction_template: 'OpenOrca-Platypus2'
|
||||
custom_stopping_strings: '"### Instruction:", "### Response:"'
|
||||
(?!.*v0)(?!.*1.1)(?!.*1_1)(?!.*stable)(?!.*chinese).*vicuna:
|
||||
instruction_template: 'Vicuna-v0'
|
||||
.*vicuna.*v0:
|
||||
@ -152,6 +149,9 @@
|
||||
instruction_template: 'Orca Mini'
|
||||
.*(platypus|gplatty|superplatty):
|
||||
instruction_template: 'Alpaca'
|
||||
.*(openorca-platypus2):
|
||||
instruction_template: 'OpenOrca-Platypus2'
|
||||
custom_stopping_strings: '"### Instruction:", "### Response:"'
|
||||
.*longchat:
|
||||
instruction_template: 'Vicuna-v1.1'
|
||||
.*vicuna-33b:
|
||||
|
@ -62,7 +62,7 @@ def _load_quant(model, checkpoint, wbits, groupsize=-1, faster_kernel=False, exc
|
||||
from safetensors.torch import load_file as safe_load
|
||||
model.load_state_dict(safe_load(checkpoint), strict=False)
|
||||
else:
|
||||
model.load_state_dict(torch.load(checkpoint), strict=False)
|
||||
model.load_state_dict(torch.load(checkpoint, weights_only=True), strict=False)
|
||||
|
||||
model.seqlen = 2048
|
||||
return model
|
||||
|
@ -544,7 +544,7 @@ def do_train(lora_name: str, always_override: bool, q_proj_en: bool, v_proj_en:
|
||||
lora_model = get_peft_model(shared.model, config)
|
||||
if not always_override and Path(f"{lora_file_path}/adapter_model.bin").is_file():
|
||||
logger.info("Loading existing LoRA data...")
|
||||
state_dict_peft = torch.load(f"{lora_file_path}/adapter_model.bin")
|
||||
state_dict_peft = torch.load(f"{lora_file_path}/adapter_model.bin", weights_only=True)
|
||||
set_peft_model_state_dict(lora_model, state_dict_peft)
|
||||
except:
|
||||
yield traceback.format_exc().replace('\n', '\n\n')
|
||||
|
Loading…
Reference in New Issue
Block a user