This commit is contained in:
oobabooga 2023-11-28 18:43:33 -08:00
parent 6e51bae2e0
commit a7670c31ca

View File

@ -85,6 +85,15 @@ loaders_and_params = OrderedDict({
'no_use_fast',
'autogptq_info',
],
'AutoAWQ': [
'cpu_memory',
'gpu_memory',
'auto_devices',
'max_seq_len',
'no_inject_fused_attention',
'trust_remote_code',
'no_use_fast',
],
'GPTQ-for-LLaMa': [
'wbits',
'groupsize',
@ -125,15 +134,6 @@ loaders_and_params = OrderedDict({
'alpha_value',
'compress_pos_emb',
],
'AutoAWQ': [
'cpu_memory',
'gpu_memory',
'auto_devices',
'max_seq_len',
'no_inject_fused_attention',
'trust_remote_code',
'no_use_fast',
],
'ctransformers': [
'n_ctx',
'n_gpu_layers',