From 5d3f15b91568cae984baeb4216df273e7077f7b4 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Sat, 11 Feb 2023 23:17:06 -0300 Subject: [PATCH] Use the CPU if no GPU is detected --- server.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/server.py b/server.py index f7391487..6bc3673b 100644 --- a/server.py +++ b/server.py @@ -120,6 +120,9 @@ def load_model(model_name): else: command = "AutoModelForCausalLM.from_pretrained" params = ["low_cpu_mem_usage=True"] + if not args.cpu and not torch.cuda.is_available(): + print("Warning: no GPU has been detected.\nFalling back to CPU mode.\n") + args.cpu = True if args.cpu: params.append("low_cpu_mem_usage=True")