Show a warning if two quantized models are found

This commit is contained in:
oobabooga 2023-04-13 12:04:27 -03:00
parent fbb448ce4f
commit ca293bb713

View File

@ -131,8 +131,12 @@ def load_quantized(model_name):
pt_path = None pt_path = None
if len(found_pts) > 0: if len(found_pts) > 0:
if len(found_pts) > 1:
print('Warning: more than one .pt model has been found. The last one will be selected. It could be wrong.')
pt_path = found_pts[-1] pt_path = found_pts[-1]
elif len(found_safetensors) > 0: elif len(found_safetensors) > 0:
if len(found_pts) > 1:
print('Warning: more than one .safetensors model has been found. The last one will be selected. It could be wrong.')
pt_path = found_safetensors[-1] pt_path = found_safetensors[-1]
if not pt_path: if not pt_path: