Added instruct-following template for Metharme (#1679)

This commit is contained in:
minipasila 2023-05-10 04:29:22 +03:00 committed by GitHub
parent 1aaa47070a
commit 334486f527
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 20 additions and 5 deletions

View File

@ -0,0 +1,4 @@
name: "<|model|>"
your_name: "<|user|>"
context: "<|system|>"
turn_template: "<|user|><|user-message|><|bot|><|bot-message|>"

View File

@ -74,6 +74,9 @@
.*chatglm:
mode: 'instruct'
instruction_template: 'ChatGLM'
.*metharme:
mode: 'instruct'
instruction_template: 'Metharme'
.*llava:
mode: 'instruct'
model_type: 'llama'

View File

@ -147,9 +147,9 @@ def load_quantized(model_name):
name = model_name.lower()
if any((k in name for k in ['opt-', 'opt_', 'opt1', 'opt3', 'optfor', 'galactica', 'galpaca', 'pygmalion-350m'])):
model_type = 'opt'
elif any((k in name for k in ['gpt-j', 'gptj', 'gpt4all-j', 'malion-6b', 'pygway'])):
elif any((k in name for k in ['gpt-j', 'gptj', 'gpt4all-j', 'malion-6b', 'pygway', 'pygmalion-6b'])):
model_type = 'gptj'
elif any((k in name for k in ['llama', 'alpac', 'vicuna', 'guanaco', 'koala', 'llava', 'wizardlm'])):
elif any((k in name for k in ['llama', 'alpac', 'vicuna', 'guanaco', 'koala', 'llava', 'wizardlm', 'metharme'])):
model_type = 'llama'
else:
logging.error("Can't determine model type from model name. Please specify it manually using --model_type argument")

View File

@ -78,6 +78,7 @@ settings = {
".*vicuna.*v0": "Vicuna v0",
".*vicuna.*(1.1|1_1)": "Vicuna v1.1",
".*stable.*vicuna": "StableVicuna",
'.*metharme': 'Metharme',
".*guanaco": "Guanaco-Chat",
".*koala": "Koala",
".*stablelm-tuned": "StableLM",

5
prompts/Metharme.txt Normal file
View File

@ -0,0 +1,5 @@
<|system|>This is a text adventure game. Describe the scenario to the user and give him three options to pick from on each turn.<|user|>Start!<|model|>You are standing in front of an old, abandoned house. The windows are boarded up, and there's no sign of life around it. As you approach, you notice a strange feeling emanating from within. Suddenly, you hear a voice calling out to you... 'Come inside!'
- Go inside the house.
- Ignore the call and move away.
- Run as fast as you can.<|user|>go inside<|model|>

View File

@ -35,17 +35,20 @@
"default": "Default",
".*(alpaca|llama|llava)": "LLaMA-Precise",
".*pygmalion": "NovelAI-Storywriter",
".*RWKV": "Naive"
".*RWKV": "Naive",
".*moss": "MOSS"
},
"prompts": {
"default": "QA",
".*(gpt4chan|gpt-4chan|4chan)": "GPT-4chan",
".*(oasst|stablelm-7b-sft-v7-epoch-3)": "Open Assistant",
".*(alpac|dolly)": "Alpaca",
".*mpt-.*instruct": "Alpaca",
"(?!.*v0)(?!.*1.1)(?!.*1_1)(?!.*stable).*vicuna": "Vicuna v0",
".*vicuna.*v0": "Vicuna v0",
".*vicuna.*(1.1|1_1)": "Vicuna v1.1",
".*stable.*vicuna": "StableVicuna",
".*metharme": "Metharme",
".*guanaco": "Guanaco-Chat",
".*koala": "Koala",
".*stablelm-tuned": "StableLM",
@ -54,7 +57,6 @@
".*galactica.*-v2": "Galactica v2",
"(?!.*finetuned)(?!.*-v2).*galactica": "Galactica",
".*baize": "Baize",
".*mpt-.*instruct": "Alpaca",
".*mpt-.*chat": "MPT-Chat",
"(?!.*-flan-)(?!.*-t5-).*lamini-": "Alpaca",
".*incite.*chat": "INCITE-Chat",
@ -62,7 +64,7 @@
},
"lora_prompts": {
"default": "QA",
".*(alpaca-lora-7b|alpaca-lora-13b|alpaca-lora-30b)": "Alpaca",
".*alpaca": "Alpaca",
".*baize": "Baize"
}
}