From 8ea554bc19bf7df2a08ab7a23322f69829b140db Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 16 Oct 2023 12:53:40 -0700 Subject: [PATCH] Check for torch.xpu.is_available() --- modules/models.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/modules/models.py b/modules/models.py index c376c808..087adada 100644 --- a/modules/models.py +++ b/modules/models.py @@ -151,8 +151,15 @@ def huggingface_loader(model_name): # Load with quantization and/or offloading else: - if not any((shared.args.cpu, torch.cuda.is_available(), torch.backends.mps.is_available())): - logger.warning('torch.cuda.is_available() returned False. This means that no GPU has been detected. Falling back to CPU mode.') + conditions = [ + shared.args.cpu, + torch.cuda.is_available(), + torch.backends.mps.is_available(), + hasattr(torch, 'xpu') and torch.xpu.is_available(), + ] + + if not any(conditions): + logger.warning('No GPU has been detected by Pytorch. Falling back to CPU mode.') shared.args.cpu = True if shared.args.cpu: