Sort the loaders menu

This commit is contained in:
oobabooga 2023-11-28 18:41:11 -08:00
parent f4b956b47c
commit 6e51bae2e0

View File

@ -27,6 +27,26 @@ loaders_and_params = OrderedDict({
'disable_exllama',
'transformers_info'
],
'llamacpp_HF': [
'n_ctx',
'n_gpu_layers',
'tensor_split',
'n_batch',
'threads',
'threads_batch',
'no_mmap',
'mlock',
'no_mul_mat_q',
'alpha_value',
'rope_freq_base',
'compress_pos_emb',
'cpu',
'numa',
'cfg_cache',
'no_use_fast',
'logits_all',
'llamacpp_HF_info',
],
'ExLlama_HF': [
'gpu_split',
'max_seq_len',
@ -47,22 +67,6 @@ loaders_and_params = OrderedDict({
'compress_pos_emb',
'no_use_fast',
],
'ExLlama': [
'gpu_split',
'max_seq_len',
'alpha_value',
'rope_freq_base',
'compress_pos_emb',
'exllama_info',
],
'ExLlamav2': [
'gpu_split',
'max_seq_len',
'no_flash_attn',
'cache_8bit',
'alpha_value',
'compress_pos_emb',
],
'AutoGPTQ': [
'triton',
'no_inject_fused_attention',
@ -105,25 +109,30 @@ loaders_and_params = OrderedDict({
'cpu',
'numa',
],
'llamacpp_HF': [
'n_ctx',
'n_gpu_layers',
'tensor_split',
'n_batch',
'threads',
'threads_batch',
'no_mmap',
'mlock',
'no_mul_mat_q',
'ExLlama': [
'gpu_split',
'max_seq_len',
'alpha_value',
'rope_freq_base',
'compress_pos_emb',
'cpu',
'numa',
'cfg_cache',
'exllama_info',
],
'ExLlamav2': [
'gpu_split',
'max_seq_len',
'no_flash_attn',
'cache_8bit',
'alpha_value',
'compress_pos_emb',
],
'AutoAWQ': [
'cpu_memory',
'gpu_memory',
'auto_devices',
'max_seq_len',
'no_inject_fused_attention',
'trust_remote_code',
'no_use_fast',
'logits_all',
'llamacpp_HF_info',
],
'ctransformers': [
'n_ctx',
@ -134,15 +143,6 @@ loaders_and_params = OrderedDict({
'no_mmap',
'mlock'
],
'AutoAWQ': [
'cpu_memory',
'gpu_memory',
'auto_devices',
'max_seq_len',
'no_inject_fused_attention',
'trust_remote_code',
'no_use_fast',
]
})
loaders_samplers = {