From 6e51bae2e0fe8a871625ff232d85bd8af17ed651 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Tue, 28 Nov 2023 18:41:11 -0800 Subject: [PATCH] Sort the loaders menu --- modules/loaders.py | 80 +++++++++++++++++++++++----------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/modules/loaders.py b/modules/loaders.py index 5d9836c9..12c30e78 100644 --- a/modules/loaders.py +++ b/modules/loaders.py @@ -27,6 +27,26 @@ loaders_and_params = OrderedDict({ 'disable_exllama', 'transformers_info' ], + 'llamacpp_HF': [ + 'n_ctx', + 'n_gpu_layers', + 'tensor_split', + 'n_batch', + 'threads', + 'threads_batch', + 'no_mmap', + 'mlock', + 'no_mul_mat_q', + 'alpha_value', + 'rope_freq_base', + 'compress_pos_emb', + 'cpu', + 'numa', + 'cfg_cache', + 'no_use_fast', + 'logits_all', + 'llamacpp_HF_info', + ], 'ExLlama_HF': [ 'gpu_split', 'max_seq_len', @@ -47,22 +67,6 @@ loaders_and_params = OrderedDict({ 'compress_pos_emb', 'no_use_fast', ], - 'ExLlama': [ - 'gpu_split', - 'max_seq_len', - 'alpha_value', - 'rope_freq_base', - 'compress_pos_emb', - 'exllama_info', - ], - 'ExLlamav2': [ - 'gpu_split', - 'max_seq_len', - 'no_flash_attn', - 'cache_8bit', - 'alpha_value', - 'compress_pos_emb', - ], 'AutoGPTQ': [ 'triton', 'no_inject_fused_attention', @@ -105,25 +109,30 @@ loaders_and_params = OrderedDict({ 'cpu', 'numa', ], - 'llamacpp_HF': [ - 'n_ctx', - 'n_gpu_layers', - 'tensor_split', - 'n_batch', - 'threads', - 'threads_batch', - 'no_mmap', - 'mlock', - 'no_mul_mat_q', + 'ExLlama': [ + 'gpu_split', + 'max_seq_len', 'alpha_value', 'rope_freq_base', 'compress_pos_emb', - 'cpu', - 'numa', - 'cfg_cache', + 'exllama_info', + ], + 'ExLlamav2': [ + 'gpu_split', + 'max_seq_len', + 'no_flash_attn', + 'cache_8bit', + 'alpha_value', + 'compress_pos_emb', + ], + 'AutoAWQ': [ + 'cpu_memory', + 'gpu_memory', + 'auto_devices', + 'max_seq_len', + 'no_inject_fused_attention', + 'trust_remote_code', 'no_use_fast', - 'logits_all', - 'llamacpp_HF_info', ], 'ctransformers': [ 'n_ctx', @@ -134,15 +143,6 @@ loaders_and_params = OrderedDict({ 'no_mmap', 'mlock' ], - 'AutoAWQ': [ - 'cpu_memory', - 'gpu_memory', - 'auto_devices', - 'max_seq_len', - 'no_inject_fused_attention', - 'trust_remote_code', - 'no_use_fast', - ] }) loaders_samplers = {