mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 16:17:57 +01:00
Fix trust_remote_code in wrong location (#1953)
This commit is contained in:
parent
13e7ebfc77
commit
a2b25322f0
@ -160,7 +160,11 @@ def load_model(model_name):
|
|||||||
|
|
||||||
# Custom
|
# Custom
|
||||||
else:
|
else:
|
||||||
params = {"low_cpu_mem_usage": True}
|
params = {
|
||||||
|
"low_cpu_mem_usage": True,
|
||||||
|
"trust_remote_code": trust_remote_code
|
||||||
|
}
|
||||||
|
|
||||||
if not any((shared.args.cpu, torch.cuda.is_available(), torch.has_mps)):
|
if not any((shared.args.cpu, torch.cuda.is_available(), torch.has_mps)):
|
||||||
logging.warning("torch.cuda.is_available() returned False. This means that no GPU has been detected. Falling back to CPU mode.")
|
logging.warning("torch.cuda.is_available() returned False. This means that no GPU has been detected. Falling back to CPU mode.")
|
||||||
shared.args.cpu = True
|
shared.args.cpu = True
|
||||||
@ -169,7 +173,6 @@ def load_model(model_name):
|
|||||||
params["torch_dtype"] = torch.float32
|
params["torch_dtype"] = torch.float32
|
||||||
else:
|
else:
|
||||||
params["device_map"] = 'auto'
|
params["device_map"] = 'auto'
|
||||||
params["trust_remote_code"] = trust_remote_code
|
|
||||||
if shared.args.load_in_8bit and any((shared.args.auto_devices, shared.args.gpu_memory)):
|
if shared.args.load_in_8bit and any((shared.args.auto_devices, shared.args.gpu_memory)):
|
||||||
params['quantization_config'] = BitsAndBytesConfig(load_in_8bit=True, llm_int8_enable_fp32_cpu_offload=True)
|
params['quantization_config'] = BitsAndBytesConfig(load_in_8bit=True, llm_int8_enable_fp32_cpu_offload=True)
|
||||||
elif shared.args.load_in_8bit:
|
elif shared.args.load_in_8bit:
|
||||||
@ -285,6 +288,7 @@ def load_soft_prompt(name):
|
|||||||
logging.info(f"{field}: {', '.join(j[field])}")
|
logging.info(f"{field}: {', '.join(j[field])}")
|
||||||
else:
|
else:
|
||||||
logging.info(f"{field}: {j[field]}")
|
logging.info(f"{field}: {j[field]}")
|
||||||
|
|
||||||
logging.info()
|
logging.info()
|
||||||
tensor = np.load('tensor.npy')
|
tensor = np.load('tensor.npy')
|
||||||
Path('tensor.npy').unlink()
|
Path('tensor.npy').unlink()
|
||||||
|
Loading…
Reference in New Issue
Block a user