mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-25 09:19:23 +01:00
Experimental Intel Arc transformers support (untested)
This commit is contained in:
parent
8cce1f1126
commit
b88b2b74a6
@ -137,6 +137,8 @@ def huggingface_loader(model_name):
|
|||||||
if torch.backends.mps.is_available():
|
if torch.backends.mps.is_available():
|
||||||
device = torch.device('mps')
|
device = torch.device('mps')
|
||||||
model = model.to(device)
|
model = model.to(device)
|
||||||
|
elif hasattr(torch, 'xpu') and torch.xpu.is_available():
|
||||||
|
model = model.to('xpu')
|
||||||
else:
|
else:
|
||||||
model = model.cuda()
|
model = model.cuda()
|
||||||
|
|
||||||
|
@ -132,6 +132,8 @@ def encode(prompt, add_special_tokens=True, add_bos_token=True, truncation_lengt
|
|||||||
elif torch.backends.mps.is_available():
|
elif torch.backends.mps.is_available():
|
||||||
device = torch.device('mps')
|
device = torch.device('mps')
|
||||||
return input_ids.to(device)
|
return input_ids.to(device)
|
||||||
|
elif hasattr(torch, 'xpu') and torch.xpu.is_available():
|
||||||
|
return input_ids.to('xpu')
|
||||||
else:
|
else:
|
||||||
return input_ids.cuda()
|
return input_ids.cuda()
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user