2023-06-17 00:00:37 +02:00
|
|
|
import functools
|
2023-08-11 19:41:33 +02:00
|
|
|
from collections import OrderedDict
|
2023-06-17 00:00:37 +02:00
|
|
|
|
|
|
|
import gradio as gr
|
|
|
|
|
|
|
|
from modules import shared
|
|
|
|
|
2023-08-11 19:41:33 +02:00
|
|
|
loaders_and_params = OrderedDict({
|
|
|
|
'Transformers': [
|
|
|
|
'cpu_memory',
|
|
|
|
'gpu_memory',
|
|
|
|
'load_in_8bit',
|
|
|
|
'bf16',
|
|
|
|
'cpu',
|
|
|
|
'disk',
|
|
|
|
'auto_devices',
|
|
|
|
'load_in_4bit',
|
|
|
|
'use_double_quant',
|
|
|
|
'quant_type',
|
|
|
|
'compute_dtype',
|
|
|
|
'trust_remote_code',
|
2023-11-17 04:45:05 +01:00
|
|
|
'no_use_fast',
|
2023-11-04 17:59:33 +01:00
|
|
|
'use_flash_attention_2',
|
2023-08-11 19:41:33 +02:00
|
|
|
'alpha_value',
|
2023-08-25 15:53:37 +02:00
|
|
|
'rope_freq_base',
|
2023-08-11 19:41:33 +02:00
|
|
|
'compress_pos_emb',
|
2023-09-25 05:03:11 +02:00
|
|
|
'disable_exllama',
|
2023-12-15 15:46:13 +01:00
|
|
|
'disable_exllamav2',
|
2024-02-05 04:13:34 +01:00
|
|
|
'transformers_info',
|
2023-08-11 19:41:33 +02:00
|
|
|
],
|
2023-11-30 00:13:03 +01:00
|
|
|
'llama.cpp': [
|
|
|
|
'n_ctx',
|
|
|
|
'n_gpu_layers',
|
|
|
|
'tensor_split',
|
|
|
|
'n_batch',
|
|
|
|
'threads',
|
|
|
|
'threads_batch',
|
|
|
|
'no_mmap',
|
|
|
|
'mlock',
|
|
|
|
'no_mul_mat_q',
|
|
|
|
'alpha_value',
|
|
|
|
'rope_freq_base',
|
|
|
|
'compress_pos_emb',
|
|
|
|
'cpu',
|
|
|
|
'numa',
|
2023-12-19 21:30:53 +01:00
|
|
|
'no_offload_kqv',
|
2024-02-05 03:36:40 +01:00
|
|
|
'row_split',
|
2023-12-19 21:30:53 +01:00
|
|
|
'tensorcores',
|
2024-03-09 04:25:33 +01:00
|
|
|
'streaming_llm',
|
|
|
|
'attention_sink_size',
|
2023-11-30 00:13:03 +01:00
|
|
|
],
|
2023-11-29 03:41:11 +01:00
|
|
|
'llamacpp_HF': [
|
|
|
|
'n_ctx',
|
|
|
|
'n_gpu_layers',
|
|
|
|
'tensor_split',
|
|
|
|
'n_batch',
|
|
|
|
'threads',
|
|
|
|
'threads_batch',
|
|
|
|
'no_mmap',
|
|
|
|
'mlock',
|
|
|
|
'no_mul_mat_q',
|
|
|
|
'alpha_value',
|
|
|
|
'rope_freq_base',
|
|
|
|
'compress_pos_emb',
|
|
|
|
'cpu',
|
|
|
|
'numa',
|
|
|
|
'cfg_cache',
|
2023-12-08 15:29:26 +01:00
|
|
|
'trust_remote_code',
|
2023-11-29 03:41:11 +01:00
|
|
|
'no_use_fast',
|
|
|
|
'logits_all',
|
2023-12-19 19:22:21 +01:00
|
|
|
'no_offload_kqv',
|
2024-02-05 03:36:40 +01:00
|
|
|
'row_split',
|
2023-12-19 21:30:53 +01:00
|
|
|
'tensorcores',
|
2024-03-09 04:25:33 +01:00
|
|
|
'streaming_llm',
|
|
|
|
'attention_sink_size',
|
2023-11-29 03:41:11 +01:00
|
|
|
'llamacpp_HF_info',
|
|
|
|
],
|
2023-12-17 16:08:33 +01:00
|
|
|
'ExLlamav2_HF': [
|
2023-08-11 19:41:33 +02:00
|
|
|
'gpu_split',
|
|
|
|
'max_seq_len',
|
2023-12-17 16:08:33 +01:00
|
|
|
'cfg_cache',
|
|
|
|
'no_flash_attn',
|
|
|
|
'num_experts_per_token',
|
|
|
|
'cache_8bit',
|
2024-03-07 03:02:25 +01:00
|
|
|
'cache_4bit',
|
2024-02-16 19:26:10 +01:00
|
|
|
'autosplit',
|
2023-08-11 19:41:33 +02:00
|
|
|
'alpha_value',
|
|
|
|
'compress_pos_emb',
|
2023-12-08 15:29:26 +01:00
|
|
|
'trust_remote_code',
|
2023-11-17 04:45:05 +01:00
|
|
|
'no_use_fast',
|
2023-08-11 19:41:33 +02:00
|
|
|
],
|
2024-02-06 15:21:17 +01:00
|
|
|
'ExLlamav2': [
|
|
|
|
'gpu_split',
|
|
|
|
'max_seq_len',
|
|
|
|
'no_flash_attn',
|
|
|
|
'num_experts_per_token',
|
|
|
|
'cache_8bit',
|
2024-03-07 03:02:25 +01:00
|
|
|
'cache_4bit',
|
2024-02-16 19:26:10 +01:00
|
|
|
'autosplit',
|
2024-02-06 15:21:17 +01:00
|
|
|
'alpha_value',
|
|
|
|
'compress_pos_emb',
|
|
|
|
'exllamav2_info',
|
|
|
|
],
|
2023-06-17 00:00:37 +02:00
|
|
|
'AutoGPTQ': [
|
|
|
|
'triton',
|
|
|
|
'no_inject_fused_attention',
|
|
|
|
'no_inject_fused_mlp',
|
2023-06-23 17:22:56 +02:00
|
|
|
'no_use_cuda_fp16',
|
2023-06-17 00:00:37 +02:00
|
|
|
'wbits',
|
|
|
|
'groupsize',
|
|
|
|
'desc_act',
|
2023-08-12 08:26:58 +02:00
|
|
|
'disable_exllama',
|
2023-12-15 15:46:13 +01:00
|
|
|
'disable_exllamav2',
|
2023-06-17 00:00:37 +02:00
|
|
|
'gpu_memory',
|
|
|
|
'cpu_memory',
|
|
|
|
'cpu',
|
|
|
|
'disk',
|
|
|
|
'auto_devices',
|
|
|
|
'trust_remote_code',
|
2023-11-17 04:45:05 +01:00
|
|
|
'no_use_fast',
|
2023-06-17 00:00:37 +02:00
|
|
|
'autogptq_info',
|
|
|
|
],
|
2023-11-29 03:43:33 +01:00
|
|
|
'AutoAWQ': [
|
|
|
|
'cpu_memory',
|
|
|
|
'gpu_memory',
|
|
|
|
'auto_devices',
|
|
|
|
'max_seq_len',
|
|
|
|
'no_inject_fused_attention',
|
|
|
|
'trust_remote_code',
|
|
|
|
'no_use_fast',
|
|
|
|
],
|
2023-06-17 00:00:37 +02:00
|
|
|
'GPTQ-for-LLaMa': [
|
|
|
|
'wbits',
|
|
|
|
'groupsize',
|
|
|
|
'model_type',
|
|
|
|
'pre_layer',
|
2023-12-08 15:29:26 +01:00
|
|
|
'trust_remote_code',
|
2023-11-17 04:45:05 +01:00
|
|
|
'no_use_fast',
|
2023-06-17 00:00:37 +02:00
|
|
|
'gptq_for_llama_info',
|
|
|
|
],
|
2023-08-11 19:41:33 +02:00
|
|
|
'ctransformers': [
|
2023-08-14 04:09:03 +02:00
|
|
|
'n_ctx',
|
2023-08-11 19:41:33 +02:00
|
|
|
'n_gpu_layers',
|
|
|
|
'n_batch',
|
|
|
|
'threads',
|
2023-08-22 21:51:34 +02:00
|
|
|
'model_type',
|
|
|
|
'no_mmap',
|
|
|
|
'mlock'
|
2023-10-05 18:19:18 +02:00
|
|
|
],
|
2023-12-06 04:01:01 +01:00
|
|
|
'QuIP#': [
|
|
|
|
'trust_remote_code',
|
|
|
|
'no_use_fast',
|
|
|
|
'no_flash_attn',
|
2023-12-19 03:05:02 +01:00
|
|
|
'quipsharp_info',
|
2023-12-19 01:23:16 +01:00
|
|
|
],
|
|
|
|
'HQQ': [
|
|
|
|
'hqq_backend',
|
|
|
|
'trust_remote_code',
|
|
|
|
'no_use_fast',
|
2023-12-06 04:01:01 +01:00
|
|
|
]
|
2023-08-11 19:41:33 +02:00
|
|
|
})
|
2023-06-17 00:00:37 +02:00
|
|
|
|
2023-12-21 04:18:07 +01:00
|
|
|
|
|
|
|
def transformers_samplers():
|
|
|
|
return {
|
2023-08-01 03:44:00 +02:00
|
|
|
'temperature',
|
2023-11-04 17:09:07 +01:00
|
|
|
'temperature_last',
|
2024-01-07 21:03:47 +01:00
|
|
|
'dynamic_temperature',
|
2024-01-09 03:28:35 +01:00
|
|
|
'dynatemp_low',
|
|
|
|
'dynatemp_high',
|
|
|
|
'dynatemp_exponent',
|
2024-02-04 04:20:02 +01:00
|
|
|
'smoothing_factor',
|
2024-03-03 17:22:21 +01:00
|
|
|
'smoothing_curve',
|
2023-08-01 03:44:00 +02:00
|
|
|
'top_p',
|
2023-11-02 20:32:51 +01:00
|
|
|
'min_p',
|
2023-08-01 03:44:00 +02:00
|
|
|
'top_k',
|
|
|
|
'typical_p',
|
|
|
|
'epsilon_cutoff',
|
|
|
|
'eta_cutoff',
|
|
|
|
'tfs',
|
|
|
|
'top_a',
|
|
|
|
'repetition_penalty',
|
2023-10-25 17:10:28 +02:00
|
|
|
'presence_penalty',
|
|
|
|
'frequency_penalty',
|
2023-08-01 03:44:00 +02:00
|
|
|
'repetition_penalty_range',
|
|
|
|
'encoder_repetition_penalty',
|
|
|
|
'no_repeat_ngram_size',
|
|
|
|
'seed',
|
|
|
|
'do_sample',
|
|
|
|
'penalty_alpha',
|
|
|
|
'mirostat_mode',
|
|
|
|
'mirostat_tau',
|
|
|
|
'mirostat_eta',
|
2023-10-05 15:01:36 +02:00
|
|
|
'grammar_file_row',
|
|
|
|
'grammar_string',
|
2023-08-06 22:22:48 +02:00
|
|
|
'guidance_scale',
|
|
|
|
'negative_prompt',
|
2023-08-01 03:44:00 +02:00
|
|
|
'ban_eos_token',
|
2023-09-15 23:27:27 +02:00
|
|
|
'custom_token_bans',
|
2024-02-06 15:20:10 +01:00
|
|
|
'sampler_priority',
|
2023-08-01 03:44:00 +02:00
|
|
|
'add_bos_token',
|
|
|
|
'skip_special_tokens',
|
2023-08-02 19:52:20 +02:00
|
|
|
'auto_max_new_tokens',
|
2024-01-17 21:09:36 +01:00
|
|
|
'prompt_lookup_num_tokens'
|
2023-12-21 04:18:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
loaders_samplers = {
|
|
|
|
'Transformers': transformers_samplers(),
|
|
|
|
'AutoGPTQ': transformers_samplers(),
|
|
|
|
'GPTQ-for-LLaMa': transformers_samplers(),
|
|
|
|
'AutoAWQ': transformers_samplers(),
|
|
|
|
'QuIP#': transformers_samplers(),
|
|
|
|
'HQQ': transformers_samplers(),
|
2024-02-06 15:21:17 +01:00
|
|
|
'ExLlamav2': {
|
|
|
|
'temperature',
|
|
|
|
'temperature_last',
|
|
|
|
'top_p',
|
|
|
|
'min_p',
|
|
|
|
'top_k',
|
|
|
|
'typical_p',
|
|
|
|
'tfs',
|
|
|
|
'top_a',
|
|
|
|
'repetition_penalty',
|
|
|
|
'presence_penalty',
|
|
|
|
'frequency_penalty',
|
|
|
|
'repetition_penalty_range',
|
|
|
|
'seed',
|
|
|
|
'mirostat_mode',
|
|
|
|
'mirostat_tau',
|
|
|
|
'mirostat_eta',
|
|
|
|
'ban_eos_token',
|
|
|
|
'add_bos_token',
|
|
|
|
'custom_token_bans',
|
|
|
|
'skip_special_tokens',
|
|
|
|
'auto_max_new_tokens',
|
|
|
|
},
|
2023-09-12 19:33:07 +02:00
|
|
|
'ExLlamav2_HF': {
|
|
|
|
'temperature',
|
2023-11-04 17:09:07 +01:00
|
|
|
'temperature_last',
|
2024-01-07 21:03:47 +01:00
|
|
|
'dynamic_temperature',
|
2024-01-09 03:28:35 +01:00
|
|
|
'dynatemp_low',
|
|
|
|
'dynatemp_high',
|
|
|
|
'dynatemp_exponent',
|
2024-02-04 04:20:02 +01:00
|
|
|
'smoothing_factor',
|
2024-03-03 17:22:21 +01:00
|
|
|
'smoothing_curve',
|
2023-09-12 19:33:07 +02:00
|
|
|
'top_p',
|
2023-11-02 20:32:51 +01:00
|
|
|
'min_p',
|
2023-09-12 19:33:07 +02:00
|
|
|
'top_k',
|
|
|
|
'typical_p',
|
|
|
|
'epsilon_cutoff',
|
|
|
|
'eta_cutoff',
|
|
|
|
'tfs',
|
|
|
|
'top_a',
|
|
|
|
'repetition_penalty',
|
2023-10-25 17:10:28 +02:00
|
|
|
'presence_penalty',
|
|
|
|
'frequency_penalty',
|
2023-09-12 19:33:07 +02:00
|
|
|
'repetition_penalty_range',
|
|
|
|
'encoder_repetition_penalty',
|
|
|
|
'no_repeat_ngram_size',
|
|
|
|
'seed',
|
|
|
|
'do_sample',
|
|
|
|
'mirostat_mode',
|
|
|
|
'mirostat_tau',
|
|
|
|
'mirostat_eta',
|
2023-10-05 15:01:36 +02:00
|
|
|
'grammar_file_row',
|
|
|
|
'grammar_string',
|
2023-09-12 19:33:07 +02:00
|
|
|
'guidance_scale',
|
|
|
|
'negative_prompt',
|
|
|
|
'ban_eos_token',
|
2023-09-15 23:27:27 +02:00
|
|
|
'custom_token_bans',
|
2024-02-06 15:20:10 +01:00
|
|
|
'sampler_priority',
|
2023-09-12 19:33:07 +02:00
|
|
|
'add_bos_token',
|
|
|
|
'skip_special_tokens',
|
|
|
|
'auto_max_new_tokens',
|
|
|
|
},
|
2023-08-01 03:44:00 +02:00
|
|
|
'llama.cpp': {
|
|
|
|
'temperature',
|
|
|
|
'top_p',
|
2023-11-22 00:59:39 +01:00
|
|
|
'min_p',
|
2023-08-01 03:44:00 +02:00
|
|
|
'top_k',
|
2023-11-22 00:59:39 +01:00
|
|
|
'typical_p',
|
2023-08-01 03:44:00 +02:00
|
|
|
'tfs',
|
|
|
|
'repetition_penalty',
|
2023-10-25 17:10:28 +02:00
|
|
|
'presence_penalty',
|
|
|
|
'frequency_penalty',
|
2023-11-18 04:31:27 +01:00
|
|
|
'seed',
|
2023-08-01 03:44:00 +02:00
|
|
|
'mirostat_mode',
|
|
|
|
'mirostat_tau',
|
|
|
|
'mirostat_eta',
|
2023-09-24 16:17:33 +02:00
|
|
|
'grammar_file_row',
|
2023-09-24 23:05:24 +02:00
|
|
|
'grammar_string',
|
2023-08-01 03:44:00 +02:00
|
|
|
'ban_eos_token',
|
2023-09-15 23:27:27 +02:00
|
|
|
'custom_token_bans',
|
2023-08-01 03:44:00 +02:00
|
|
|
},
|
|
|
|
'llamacpp_HF': {
|
|
|
|
'temperature',
|
2023-11-04 17:09:07 +01:00
|
|
|
'temperature_last',
|
2024-01-07 21:03:47 +01:00
|
|
|
'dynamic_temperature',
|
2024-01-09 03:28:35 +01:00
|
|
|
'dynatemp_low',
|
|
|
|
'dynatemp_high',
|
|
|
|
'dynatemp_exponent',
|
2024-02-04 04:20:02 +01:00
|
|
|
'smoothing_factor',
|
2024-03-03 17:22:21 +01:00
|
|
|
'smoothing_curve',
|
2023-08-01 03:44:00 +02:00
|
|
|
'top_p',
|
2023-11-02 20:32:51 +01:00
|
|
|
'min_p',
|
2023-08-01 03:44:00 +02:00
|
|
|
'top_k',
|
|
|
|
'typical_p',
|
|
|
|
'epsilon_cutoff',
|
|
|
|
'eta_cutoff',
|
|
|
|
'tfs',
|
|
|
|
'top_a',
|
|
|
|
'repetition_penalty',
|
2023-10-25 17:10:28 +02:00
|
|
|
'presence_penalty',
|
|
|
|
'frequency_penalty',
|
2023-08-01 03:44:00 +02:00
|
|
|
'repetition_penalty_range',
|
|
|
|
'encoder_repetition_penalty',
|
|
|
|
'no_repeat_ngram_size',
|
|
|
|
'seed',
|
|
|
|
'do_sample',
|
|
|
|
'mirostat_mode',
|
|
|
|
'mirostat_tau',
|
|
|
|
'mirostat_eta',
|
2023-10-05 15:01:36 +02:00
|
|
|
'grammar_file_row',
|
|
|
|
'grammar_string',
|
2023-08-25 01:32:21 +02:00
|
|
|
'guidance_scale',
|
|
|
|
'negative_prompt',
|
2023-08-01 03:44:00 +02:00
|
|
|
'ban_eos_token',
|
2023-09-15 23:27:27 +02:00
|
|
|
'custom_token_bans',
|
2024-02-06 15:20:10 +01:00
|
|
|
'sampler_priority',
|
2023-08-01 03:44:00 +02:00
|
|
|
'add_bos_token',
|
|
|
|
'skip_special_tokens',
|
2023-08-02 19:52:20 +02:00
|
|
|
'auto_max_new_tokens',
|
2023-08-01 03:44:00 +02:00
|
|
|
},
|
2023-08-11 19:41:33 +02:00
|
|
|
'ctransformers': {
|
|
|
|
'temperature',
|
|
|
|
'top_p',
|
|
|
|
'top_k',
|
|
|
|
'repetition_penalty',
|
2023-08-11 20:02:56 +02:00
|
|
|
'repetition_penalty_range',
|
2023-10-05 18:19:18 +02:00
|
|
|
},
|
2023-08-11 19:41:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
loaders_model_types = {
|
|
|
|
'GPTQ-for-LLaMa': [
|
|
|
|
"None",
|
|
|
|
"llama",
|
|
|
|
"opt",
|
|
|
|
"gptj"
|
|
|
|
],
|
|
|
|
'ctransformers': [
|
|
|
|
"None",
|
|
|
|
"gpt2",
|
|
|
|
"gptj",
|
|
|
|
"gptneox",
|
|
|
|
"llama",
|
|
|
|
"mpt",
|
2023-08-27 15:53:48 +02:00
|
|
|
"dollyv2",
|
2023-08-11 19:41:33 +02:00
|
|
|
"replit",
|
|
|
|
"starcoder",
|
2023-08-14 20:17:24 +02:00
|
|
|
"gptbigcode",
|
2023-08-11 19:41:33 +02:00
|
|
|
"falcon"
|
|
|
|
],
|
2023-08-01 03:44:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@functools.cache
|
|
|
|
def list_all_samplers():
|
|
|
|
all_samplers = set()
|
|
|
|
for k in loaders_samplers:
|
|
|
|
for sampler in loaders_samplers[k]:
|
|
|
|
all_samplers.add(sampler)
|
|
|
|
|
|
|
|
return sorted(all_samplers)
|
|
|
|
|
|
|
|
|
2024-01-09 04:42:31 +01:00
|
|
|
def blacklist_samplers(loader, dynamic_temperature):
|
2023-08-01 03:44:00 +02:00
|
|
|
all_samplers = list_all_samplers()
|
2024-01-09 04:42:31 +01:00
|
|
|
output = []
|
|
|
|
|
|
|
|
for sampler in all_samplers:
|
|
|
|
if loader == 'All' or sampler in loaders_samplers[loader]:
|
|
|
|
if sampler.startswith('dynatemp'):
|
|
|
|
output.append(gr.update(visible=dynamic_temperature))
|
|
|
|
else:
|
|
|
|
output.append(gr.update(visible=True))
|
|
|
|
else:
|
|
|
|
output.append(gr.update(visible=False))
|
|
|
|
|
|
|
|
return output
|
2023-08-01 03:44:00 +02:00
|
|
|
|
2023-06-17 00:00:37 +02:00
|
|
|
|
2023-08-11 19:41:33 +02:00
|
|
|
def get_model_types(loader):
|
|
|
|
if loader in loaders_model_types:
|
|
|
|
return loaders_model_types[loader]
|
|
|
|
|
|
|
|
return ["None"]
|
|
|
|
|
|
|
|
|
2023-06-17 00:00:37 +02:00
|
|
|
def get_gpu_memory_keys():
|
|
|
|
return [k for k in shared.gradio if k.startswith('gpu_memory')]
|
|
|
|
|
|
|
|
|
|
|
|
@functools.cache
|
|
|
|
def get_all_params():
|
|
|
|
all_params = set()
|
|
|
|
for k in loaders_and_params:
|
|
|
|
for el in loaders_and_params[k]:
|
|
|
|
all_params.add(el)
|
|
|
|
|
|
|
|
if 'gpu_memory' in all_params:
|
|
|
|
all_params.remove('gpu_memory')
|
|
|
|
for k in get_gpu_memory_keys():
|
|
|
|
all_params.add(k)
|
|
|
|
|
|
|
|
return sorted(all_params)
|
|
|
|
|
|
|
|
|
|
|
|
def make_loader_params_visible(loader):
|
|
|
|
params = []
|
|
|
|
all_params = get_all_params()
|
|
|
|
if loader in loaders_and_params:
|
|
|
|
params = loaders_and_params[loader]
|
|
|
|
|
|
|
|
if 'gpu_memory' in params:
|
|
|
|
params.remove('gpu_memory')
|
|
|
|
params += get_gpu_memory_keys()
|
|
|
|
|
|
|
|
return [gr.update(visible=True) if k in params else gr.update(visible=False) for k in all_params]
|