From abef355ed0272bc5108542c655a6d08b596be2d9 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Sat, 15 Apr 2023 01:21:19 -0300 Subject: [PATCH] Remove deprecated flag --- modules/training.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/training.py b/modules/training.py index f6033d60..9dfe0a23 100644 --- a/modules/training.py +++ b/modules/training.py @@ -124,7 +124,7 @@ def do_train(lora_name: str, micro_batch_size: int, batch_size: int, epochs: int print(f"Warning: LoRA training has only currently been validated for LLaMA models. (Found model type: {model_type})") time.sleep(5) - if shared.args.wbits > 0 or shared.args.gptq_bits > 0: + if shared.args.wbits > 0: yield "LoRA training does not yet support 4bit. Please use `--load-in-8bit` for now." return