From 9ab90d8b608170fe57d893c2150eda3bc11a8b06 Mon Sep 17 00:00:00 2001 From: Forkoz <59298527+Ph0rk0z@users.noreply.github.com> Date: Tue, 30 May 2023 09:09:18 -0500 Subject: [PATCH] Fix warning for qlora (#2438) --- modules/training.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/training.py b/modules/training.py index 6af26520..75ba82ca 100644 --- a/modules/training.py +++ b/modules/training.py @@ -237,7 +237,7 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch yield "LoRA training with GPTQ models requires loading with `--monkey-patch`" return - elif not shared.args.load_in_8bit and shared.args.wbits <= 0: + elif not (shared.args.load_in_8bit or shared.args.load_in_4bit) and shared.args.wbits <= 0: yield "It is highly recommended you use `--load-in-8bit` for LoRA training. *(Will continue anyway in 2 seconds, press `Interrupt` to stop.)*" logger.warning("It is highly recommended you use `--load-in-8bit` for LoRA training.") time.sleep(2) # Give it a moment for the message to show in UI before continuing