diff --git a/modules/models.py b/modules/models.py index f4bb11fd..a934514b 100644 --- a/modules/models.py +++ b/modules/models.py @@ -98,7 +98,7 @@ def load_model(model_name): command = "AutoModelForCausalLM.from_pretrained" params = ["low_cpu_mem_usage=True"] if not shared.args.cpu and not torch.cuda.is_available(): - print("Warning: no GPU has been detected.\nFalling back to CPU mode.\n") + print("Warning: torch.cuda.is_available() returned False.\nThis means that no GPU has been detected.\nFalling back to CPU mode.\n") shared.args.cpu = True if shared.args.cpu: