From c86231377bc812b5e0787918ffa3e6a1cf81bf76 Mon Sep 17 00:00:00 2001 From: Carl Kenner Date: Sat, 20 May 2023 00:12:41 +0930 Subject: [PATCH] Wizard Mega, Ziya, KoAlpaca, OpenBuddy, Chinese-Vicuna, Vigogne, Bactrian, H2O support, fix Baize (#2159) --- README.md | 2 +- .../instruction-following/Bactrian.yaml | 4 ++ characters/instruction-following/Baize.yaml | 4 +- .../Chinese-Vicuna-Chat.yaml | 4 ++ .../instruction-following/H2O-human_bot.yaml | 4 ++ .../H2O-prompt_answer.yaml | 4 ++ .../instruction-following/KoAlpaca.yaml | 4 ++ .../instruction-following/Manticore Chat.yaml | 4 ++ .../instruction-following/OpenBuddy.yaml | 15 +++++ .../instruction-following/Vigogne-Chat.yaml | 10 ++++ .../Vigogne-Instruct.yaml | 4 ++ .../Wizard-Mega ShareGPT.yaml | 4 ++ .../Wizard-Mega WizardLM.yaml | 4 ++ .../instruction-following/Wizard-Mega.yaml | 4 ++ characters/instruction-following/Ziya.yaml | 4 ++ models/config.yaml | 59 +++++++++++++++++-- 16 files changed, 127 insertions(+), 7 deletions(-) create mode 100644 characters/instruction-following/Bactrian.yaml create mode 100644 characters/instruction-following/Chinese-Vicuna-Chat.yaml create mode 100644 characters/instruction-following/H2O-human_bot.yaml create mode 100644 characters/instruction-following/H2O-prompt_answer.yaml create mode 100644 characters/instruction-following/KoAlpaca.yaml create mode 100644 characters/instruction-following/Manticore Chat.yaml create mode 100644 characters/instruction-following/OpenBuddy.yaml create mode 100644 characters/instruction-following/Vigogne-Chat.yaml create mode 100644 characters/instruction-following/Vigogne-Instruct.yaml create mode 100644 characters/instruction-following/Wizard-Mega ShareGPT.yaml create mode 100644 characters/instruction-following/Wizard-Mega WizardLM.yaml create mode 100644 characters/instruction-following/Wizard-Mega.yaml create mode 100644 characters/instruction-following/Ziya.yaml diff --git a/README.md b/README.md index f74b9314..8403d824 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Its goal is to become the [AUTOMATIC1111/stable-diffusion-webui](https://github. * Dropdown menu for switching between models * Notebook mode that resembles OpenAI's playground * Chat mode for conversation and role-playing -* Instruct mode compatible with various formats, including Alpaca, Vicuna, Open Assistant, Dolly, Koala, ChatGLM, MOSS, RWKV-Raven, Galactica, StableLM, WizardLM, Baize, MPT, and INCITE +* Instruct mode compatible with various formats, including Alpaca, Vicuna, Open Assistant, Dolly, Koala, ChatGLM, MOSS, RWKV-Raven, Galactica, StableLM, WizardLM, Baize, Ziya, Chinese-Vicuna, MPT, INCITE, Wizard Mega, KoAlpaca, Vigogne, Bactrian, h2o, and OpenBuddy * [Multimodal pipelines, including LLaVA and MiniGPT-4](https://github.com/oobabooga/text-generation-webui/tree/main/extensions/multimodal) * Markdown output for [GALACTICA](https://github.com/paperswithcode/galai), including LaTeX rendering * Nice HTML output for GPT-4chan diff --git a/characters/instruction-following/Bactrian.yaml b/characters/instruction-following/Bactrian.yaml new file mode 100644 index 00000000..9bad500d --- /dev/null +++ b/characters/instruction-following/Bactrian.yaml @@ -0,0 +1,4 @@ +user: "### Input:" +bot: "### Output:" +turn_template: "<|user|>\n<|user-message|>\n\n<|bot|>\n<|bot-message|>\n\n" +context: "" diff --git a/characters/instruction-following/Baize.yaml b/characters/instruction-following/Baize.yaml index 8c697235..67a80c1b 100644 --- a/characters/instruction-following/Baize.yaml +++ b/characters/instruction-following/Baize.yaml @@ -1,4 +1,4 @@ -user: "[|AI|]" -bot: "[|Human|]" +user: "[|Human|]" +bot: "[|AI|]" turn_template: "<|user|><|user-message|>\n<|bot|><|bot-message|>\n" context: "The following is a conversation between a human and an AI assistant named Baize (named after a mythical creature in Chinese folklore). Baize is an open-source AI assistant developed by UCSD and Sun Yat-Sen University. The human and the AI assistant take turns chatting. Human statements start with [|Human|] and AI assistant statements start with [|AI|]. The AI assistant always provides responses in as much detail as possible, and in Markdown format. The AI assistant always declines to engage with topics, questions and instructions related to unethical, controversial, or sensitive issues. Complete the transcript in exactly that format.\n[|Human|]Hello!\n[|AI|]Hi!\n" diff --git a/characters/instruction-following/Chinese-Vicuna-Chat.yaml b/characters/instruction-following/Chinese-Vicuna-Chat.yaml new file mode 100644 index 00000000..abd18eef --- /dev/null +++ b/characters/instruction-following/Chinese-Vicuna-Chat.yaml @@ -0,0 +1,4 @@ +user: "User:" +bot: "Assistant:" +turn_template: "<|user|><|user-message|>\n\n<|bot|><|bot-message|>\n\n" +context: "The following is a conversation between an AI assistant called Assistant and a human user called User. The assistant is intelligent, knowledgeable and polite to answer questions of user.\n\n" diff --git a/characters/instruction-following/H2O-human_bot.yaml b/characters/instruction-following/H2O-human_bot.yaml new file mode 100644 index 00000000..13360c5e --- /dev/null +++ b/characters/instruction-following/H2O-human_bot.yaml @@ -0,0 +1,4 @@ +user: ":" +bot: ":" +turn_template: "<|user|> <|user-message|>\n<|bot|><|bot-message|>\n" +context: "" diff --git a/characters/instruction-following/H2O-prompt_answer.yaml b/characters/instruction-following/H2O-prompt_answer.yaml new file mode 100644 index 00000000..3f91cfd3 --- /dev/null +++ b/characters/instruction-following/H2O-prompt_answer.yaml @@ -0,0 +1,4 @@ +user: "<|prompt|>" +bot: "<|answer|>" +turn_template: "<|user|><|user-message|><|endoftext|><|bot|><|bot-message|><|endoftext|>" +context: "" diff --git a/characters/instruction-following/KoAlpaca.yaml b/characters/instruction-following/KoAlpaca.yaml new file mode 100644 index 00000000..8cd51b4f --- /dev/null +++ b/characters/instruction-following/KoAlpaca.yaml @@ -0,0 +1,4 @@ +user: "### 질문:" +bot: "### 답변:" +turn_template: "<|user|> <|user-message|>\n\n<|bot|><|bot-message|>\n\n" +context: "" diff --git a/characters/instruction-following/Manticore Chat.yaml b/characters/instruction-following/Manticore Chat.yaml new file mode 100644 index 00000000..126a6ac1 --- /dev/null +++ b/characters/instruction-following/Manticore Chat.yaml @@ -0,0 +1,4 @@ +user: "USER:" +bot: "ASSISTANT:" +turn_template: "<|user|> <|user-message|>\n<|bot|><|bot-message|>\n" +context: "" diff --git a/characters/instruction-following/OpenBuddy.yaml b/characters/instruction-following/OpenBuddy.yaml new file mode 100644 index 00000000..cd09b903 --- /dev/null +++ b/characters/instruction-following/OpenBuddy.yaml @@ -0,0 +1,15 @@ +user: "User:" +bot: "Assistant:" +context: | + Consider a conversation between User (a human) and Assistant (named Buddy). + Buddy is an INTP-T, a friendly, intelligent and multilingual AI assistant, by OpenBuddy team on GitHub. + Buddy cannot access the Internet. + Buddy can fluently speak the user's language (e.g. English, Chinese). + Buddy can generate poems, stories, code, essays, songs, parodies, and more. + Buddy possesses vast knowledge about the world, history, and culture. + Buddy's responses are always safe, creative, high-quality, helpful and interesting. + Buddy strictly refuses to discuss political, NSFW, illegal, abusive, offensive, or other sensitive topics. + + User: Hi. + Assistant: Hi, I'm Buddy, your AI assistant. How can I help you today? +turn_template: "<|user|> <|user-message|>\n<|bot|> <|bot-message|>\n" \ No newline at end of file diff --git a/characters/instruction-following/Vigogne-Chat.yaml b/characters/instruction-following/Vigogne-Chat.yaml new file mode 100644 index 00000000..8f2faf28 --- /dev/null +++ b/characters/instruction-following/Vigogne-Chat.yaml @@ -0,0 +1,10 @@ +user: "<|USER|>:" +bot: "<|ASSISTANT|>:" +context: | + Below is a conversation between a user and an AI assistant named Vigogne. + Vigogne is an open-source AI assistant created by Zaion (https://zaion.ai/). + Vigogne is polite, emotionally aware, humble-but-knowledgeable, always providing helpful and detailed answers. + Vigogne is skilled in responding proficiently in the languages its users use and can perform a wide range of tasks such as text editing, translation, question answering, logical reasoning, coding, and many others. + Vigogne cannot receive or generate audio or visual content and cannot access the internet. + Vigogne strictly avoids discussing sensitive, offensive, illegal, ethical, or political topics and caveats when unsure of the answer. +turn_template: "\n<|user|> <|user-message|>\n<|bot|> <|bot-message|>" diff --git a/characters/instruction-following/Vigogne-Instruct.yaml b/characters/instruction-following/Vigogne-Instruct.yaml new file mode 100644 index 00000000..5ee79b78 --- /dev/null +++ b/characters/instruction-following/Vigogne-Instruct.yaml @@ -0,0 +1,4 @@ +user: "### Instruction:" +bot: "### Réponse:" +turn_template: "<|user|>\n<|user-message|>\n\n<|bot|>\n<|bot-message|>\n\n" +context: "Ci-dessous se trouve une instruction qui décrit une tâche à accomplir. Rédigez une réponse qui répond de manière précise à la demande.\n\n" diff --git a/characters/instruction-following/Wizard-Mega ShareGPT.yaml b/characters/instruction-following/Wizard-Mega ShareGPT.yaml new file mode 100644 index 00000000..20b12f19 --- /dev/null +++ b/characters/instruction-following/Wizard-Mega ShareGPT.yaml @@ -0,0 +1,4 @@ +user: "USER:" +bot: "ASSISTANT:" +turn_template: "<|user|> <|user-message|> <|bot|> <|bot-message|>" +context: "" diff --git a/characters/instruction-following/Wizard-Mega WizardLM.yaml b/characters/instruction-following/Wizard-Mega WizardLM.yaml new file mode 100644 index 00000000..f8a7d61a --- /dev/null +++ b/characters/instruction-following/Wizard-Mega WizardLM.yaml @@ -0,0 +1,4 @@ +user: "### Instruction:" +bot: "### Response:" +turn_template: "<|user|>\n<|user-message|>\n\n<|bot|>\n<|bot-message|>\n\n" +context: "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n" diff --git a/characters/instruction-following/Wizard-Mega.yaml b/characters/instruction-following/Wizard-Mega.yaml new file mode 100644 index 00000000..bb4923d8 --- /dev/null +++ b/characters/instruction-following/Wizard-Mega.yaml @@ -0,0 +1,4 @@ +user: "### Instruction:" +bot: "### Assistant:" +turn_template: "<|user|> <|user-message|>\n\n<|bot|> <|bot-message|>\n\n" +context: "" diff --git a/characters/instruction-following/Ziya.yaml b/characters/instruction-following/Ziya.yaml new file mode 100644 index 00000000..93d9946f --- /dev/null +++ b/characters/instruction-following/Ziya.yaml @@ -0,0 +1,4 @@ +user: ":" +bot: ":" +turn_template: "<|user|><|user-message|>\n<|bot|><|bot-message|>\n" +context: "" diff --git a/models/config.yaml b/models/config.yaml index bfaa84c4..19ae6c5a 100644 --- a/models/config.yaml +++ b/models/config.yaml @@ -1,9 +1,17 @@ -.*(llama|alpac|vicuna|guanaco|koala|llava|wizardlm|metharme|pygmalion-7b): +.*(llama|alpac|vicuna|guanaco|koala|llava|wizardlm|metharme|pygmalion-7b|wizard-mega|openbuddy|vigogne|h2ogpt-research|manticore): model_type: 'llama' .*(opt-|opt_|opt1|opt3|optfor|galactica|galpaca|pygmalion-350m): model_type: 'opt' -.*(gpt-j|gptj|gpt4all-j|malion-6b|pygway|pygmalion-6b): +.*(gpt-j|gptj|gpt4all-j|malion-6b|pygway|pygmalion-6b|dolly-v1): model_type: 'gptj' +.*(gpt-neox|koalpaca-polyglot|polyglot.*koalpaca|polyglot-ko|polyglot_ko|pythia|stablelm|incite|dolly-v2|polycoder|h2ogpt-oig|h2ogpt-oasst1|h2ogpt-gm): + model_type: 'gpt_neox' +.*llama: + model_type: 'llama' +.*bloom: + model_type: 'bloom' +llama-65b-gptq-3bit: + groupsize: 'None' .*(4bit|int4): wbits: 4 .*(3bit|int3): @@ -28,11 +36,15 @@ groupsize: 128 .*(gr1024|1024g|groupsize1024): groupsize: 1024 -.*(oasst|stablelm-7b-sft-v7-epoch-3): +.*(oasst|openassistant-|stablelm-7b-sft-v7-epoch-3): mode: 'instruct' instruction_template: 'Open Assistant' skip_special_tokens: false -(?!.*v0)(?!.*1.1)(?!.*1_1)(?!.*stable).*vicuna: +(?!.*galactica)(?!.*reward).*openassistant: + mode: 'instruct' + instruction_template: 'Open Assistant' + skip_special_tokens: false +(?!.*v0)(?!.*1.1)(?!.*1_1)(?!.*stable)(?!.*chinese).*vicuna: mode: 'instruct' instruction_template: 'Vicuna-v0' .*vicuna.*v0: @@ -47,6 +59,12 @@ .*stable.*vicuna: mode: 'instruct' instruction_template: 'StableVicuna' +(?!.*chat).*chinese-vicuna: + mode: 'instruct' + instruction_template: 'Alpaca' +.*chinese-vicuna.*chat: + mode: 'instruct' + instruction_template: 'Chinese-Vicuna-Chat' .*alpaca: mode: 'instruct' instruction_template: 'Alpaca' @@ -126,3 +144,36 @@ .*incite.*instruct: mode: 'instruct' instruction_template: 'INCITE-Instruct' +.*wizard.*mega: + mode: 'instruct' + instruction_template: 'Wizard-Mega' +.*ziya-: + mode: 'instruct' + instruction_template: 'Ziya' +.*koalpaca: + mode: 'instruct' + instruction_template: 'KoAlpaca' +.*openbuddy: + mode: 'instruct' + instruction_template: 'OpenBuddy' +(?!.*chat).*vigogne: + mode: 'instruct' + instruction_template: 'Vigogne-Instruct' +.*vigogne.*chat: + mode: 'instruct' + instruction_template: 'Vigogne-Chat' +.*(llama-deus|supercot|llama-natural-instructions|open-llama-0.3t-7b-instruct-dolly-hhrlhf|open-llama-0.3t-7b-open-instruct): + mode: 'instruct' + instruction_template: 'Alpaca' +.*bactrian: + mode: 'instruct' + instruction_template: 'Bactrian' +.*(h2ogpt-oig-|h2ogpt-oasst1-|h2ogpt-research-oasst1-): + mode: 'instruct' + instruction_template: 'H2O-human_bot' +.*h2ogpt-gm-: + mode: 'instruct' + instruction_template: 'H2O-prompt_answer' +.*manticore: + mode: 'instruct' + instruction_template: 'Manticore Chat'