From c40932eb39140e039a8a3574497ca94567188ddb Mon Sep 17 00:00:00 2001 From: MikoAL <66015876+MikoAL@users.noreply.github.com> Date: Tue, 20 Jun 2023 12:03:44 +0800 Subject: [PATCH] Added Falcon LoRA training support (#2684) I am 50% sure this will work --- modules/training.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/training.py b/modules/training.py index d039e807..65f1668a 100644 --- a/modules/training.py +++ b/modules/training.py @@ -30,12 +30,14 @@ try: MODEL_CLASSES = {v: k for k, v in MODEL_FOR_CAUSAL_LM_MAPPING_NAMES} except: standard_modules = ["q_proj", "v_proj"] - model_to_lora_modules = {"llama": standard_modules, "opt": standard_modules, "gptj": standard_modules, "gpt_neox": ["query_key_value"]} + model_to_lora_modules = {"llama": standard_modules, "opt": standard_modules, "gptj": standard_modules, "gpt_neox": ["query_key_value"], "rw":["query_key_value"]} MODEL_CLASSES = { "LlamaForCausalLM": "llama", "OPTForCausalLM": "opt", "GPTJForCausalLM": "gptj", - "GPTNeoXForCausalLM": "gpt_neox" + "GPTNeoXForCausalLM": "gpt_neox", + "RWForCausalLM": "rw" + } train_log = {}