text-generation-webui/modules/ui.py

283 lines
8.1 KiB
Python
Raw Normal View History

import copy
2023-03-15 11:33:26 -04:00
from pathlib import Path
import gradio as gr
import torch
import yaml
from transformers import is_torch_xpu_available
import extensions
2023-04-12 09:27:06 -04:00
from modules import shared
with open(Path(__file__).resolve().parent / '../css/NotoSans/stylesheet.css', 'r') as f:
2023-03-15 11:33:26 -04:00
css = f.read()
with open(Path(__file__).resolve().parent / '../css/main.css', 'r') as f:
css += f.read()
with open(Path(__file__).resolve().parent / '../css/katex/katex.min.css', 'r') as f:
css += f.read()
with open(Path(__file__).resolve().parent / '../css/highlightjs/github-dark.min.css', 'r') as f:
css += f.read()
with open(Path(__file__).resolve().parent / '../css/highlightjs/highlightjs-copy.min.css', 'r') as f:
css += f.read()
2023-08-06 20:49:27 -04:00
with open(Path(__file__).resolve().parent / '../js/main.js', 'r') as f:
2023-08-13 00:12:15 -04:00
js = f.read()
2023-08-06 20:49:27 -04:00
with open(Path(__file__).resolve().parent / '../js/save_files.js', 'r') as f:
save_files_js = f.read()
with open(Path(__file__).resolve().parent / '../js/switch_tabs.js', 'r') as f:
switch_tabs_js = f.read()
with open(Path(__file__).resolve().parent / '../js/show_controls.js', 'r') as f:
show_controls_js = f.read()
with open(Path(__file__).resolve().parent / '../js/update_big_picture.js', 'r') as f:
update_big_picture_js = f.read()
refresh_symbol = '🔄'
delete_symbol = '🗑️'
save_symbol = '💾'
2023-04-18 22:36:23 -04:00
theme = gr.themes.Default(
font=['Noto Sans', 'Helvetica', 'ui-sans-serif', 'system-ui', 'sans-serif'],
2023-04-18 22:36:23 -04:00
font_mono=['IBM Plex Mono', 'ui-monospace', 'Consolas', 'monospace'],
).set(
border_color_primary='#c5c5d2',
button_large_padding='6px 12px',
2023-04-21 01:47:18 -04:00
body_text_color_subdued='#484848',
2024-03-13 11:18:49 -04:00
background_fill_secondary='#eaeaea',
background_fill_primary='var(--neutral-50)',
2023-04-18 22:36:23 -04:00
)
2023-08-06 20:49:27 -04:00
if Path("notification.mp3").exists():
audio_notification_js = "document.querySelector('#audio_notification audio')?.play();"
else:
audio_notification_js = ""
2023-05-03 20:43:17 -04:00
def list_model_elements():
elements = [
'loader',
'filter_by_loader',
'cpu_memory',
'auto_devices',
'disk',
'cpu',
'bf16',
'load_in_8bit',
'trust_remote_code',
'no_use_fast',
'use_flash_attention_2',
'load_in_4bit',
'compute_dtype',
'quant_type',
'use_double_quant',
'wbits',
'groupsize',
'triton',
'desc_act',
'no_inject_fused_attention',
'no_inject_fused_mlp',
'no_use_cuda_fp16',
'disable_exllama',
'disable_exllamav2',
2023-08-24 15:27:36 -04:00
'cfg_cache',
2023-11-02 14:23:04 -04:00
'no_flash_attn',
'num_experts_per_token',
2023-11-02 14:23:04 -04:00
'cache_8bit',
'cache_4bit',
'autosplit',
'threads',
2023-10-02 00:27:04 -04:00
'threads_batch',
'n_batch',
'no_mmap',
'mlock',
'no_mul_mat_q',
'n_gpu_layers',
'tensor_split',
'n_ctx',
'gpu_split',
'max_seq_len',
'compress_pos_emb',
'alpha_value',
'rope_freq_base',
'numa',
'logits_all',
'no_offload_kqv',
'row_split',
'tensorcores',
'flash-attn',
'streaming_llm',
'attention_sink_size',
'hqq_backend',
]
if is_torch_xpu_available():
for i in range(torch.xpu.device_count()):
elements.append(f'gpu_memory_{i}')
else:
for i in range(torch.cuda.device_count()):
elements.append(f'gpu_memory_{i}')
2023-05-25 00:14:13 -04:00
return elements
def list_interface_input_elements():
elements = [
'max_new_tokens',
'auto_max_new_tokens',
2023-08-29 16:44:31 -04:00
'max_tokens_second',
'max_updates_second',
'prompt_lookup_num_tokens',
'seed',
'temperature',
2023-11-04 12:09:07 -04:00
'temperature_last',
'dynamic_temperature',
'dynatemp_low',
'dynatemp_high',
'dynatemp_exponent',
'smoothing_factor',
'smoothing_curve',
'top_p',
'min_p',
'top_k',
'typical_p',
'epsilon_cutoff',
'eta_cutoff',
'repetition_penalty',
'presence_penalty',
'frequency_penalty',
'repetition_penalty_range',
'encoder_repetition_penalty',
'no_repeat_ngram_size',
'dry_multiplier',
'dry_base',
'dry_allowed_length',
'dry_sequence_breakers',
'do_sample',
'penalty_alpha',
'mirostat_mode',
'mirostat_tau',
'mirostat_eta',
2023-09-24 17:05:24 -04:00
'grammar_string',
'negative_prompt',
'guidance_scale',
'add_bos_token',
'ban_eos_token',
2023-09-15 17:27:27 -04:00
'custom_token_bans',
'sampler_priority',
'truncation_length',
'custom_stopping_strings',
'skip_special_tokens',
'stream',
'tfs',
'top_a',
]
2023-08-13 00:12:15 -04:00
# Chat elements
elements += [
'textbox',
'start_with',
2023-08-13 00:12:15 -04:00
'character_menu',
'history',
'name1',
'user_bio',
2023-08-13 00:12:15 -04:00
'name2',
'greeting',
'context',
'mode',
'custom_system_message',
'instruction_template_str',
'chat_template_str',
2023-08-13 00:12:15 -04:00
'chat_style',
'chat-instruct_command',
]
2023-08-13 00:12:15 -04:00
# Notebook/default elements
elements += [
'textbox-notebook',
'textbox-default',
'output_textbox',
'prompt_menu-default',
'prompt_menu-notebook',
2023-08-13 00:12:15 -04:00
]
# Model elements
elements += list_model_elements()
2023-08-13 00:12:15 -04:00
2023-04-12 09:27:06 -04:00
return elements
def gather_interface_values(*args):
output = {}
for i, element in enumerate(list_interface_input_elements()):
2023-04-12 09:27:06 -04:00
output[element] = args[i]
if not shared.args.multi_user:
shared.persistent_interface_state = output
2023-04-12 09:27:06 -04:00
return output
def apply_interface_values(state, use_persistent=False):
if use_persistent:
state = shared.persistent_interface_state
elements = list_interface_input_elements()
if len(state) == 0:
return [gr.update() for k in elements] # Dummy, do nothing
else:
2023-07-07 12:09:14 -04:00
return [state[k] if k in state else gr.update() for k in elements]
def save_settings(state, preset, extensions_list, show_controls, theme_state):
output = copy.deepcopy(shared.settings)
exclude = ['name2', 'greeting', 'context', 'turn_template', 'truncation_length']
for k in state:
if k in shared.settings and k not in exclude:
output[k] = state[k]
output['preset'] = preset
output['prompt-default'] = state['prompt_menu-default']
output['prompt-notebook'] = state['prompt_menu-notebook']
output['character'] = state['character_menu']
output['default_extensions'] = extensions_list
output['seed'] = int(output['seed'])
output['show_controls'] = show_controls
output['dark_theme'] = True if theme_state == 'dark' else False
# Save extension values in the UI
for extension_name in extensions_list:
extension = getattr(extensions, extension_name, None)
if extension:
extension = extension.script
if hasattr(extension, 'params'):
params = getattr(extension, 'params')
for param in params:
_id = f"{extension_name}-{param}"
# Only save if different from default value
if param not in shared.default_settings or params[param] != shared.default_settings[param]:
output[_id] = params[param]
# Do not save unchanged settings
for key in list(output.keys()):
if key in shared.default_settings and output[key] == shared.default_settings[key]:
output.pop(key)
return yaml.dump(output, sort_keys=False, width=float("inf"))
2023-09-26 08:44:04 -04:00
def create_refresh_button(refresh_component, refresh_method, refreshed_args, elem_class, interactive=True):
2023-07-25 18:49:04 -04:00
"""
Copied from https://github.com/AUTOMATIC1111/stable-diffusion-webui
"""
def refresh():
refresh_method()
args = refreshed_args() if callable(refreshed_args) else refreshed_args
return gr.update(**(args or {}))
2023-10-10 21:20:49 -04:00
refresh_button = gr.Button(refresh_symbol, elem_classes=elem_class, interactive=interactive)
refresh_button.click(
fn=lambda: {k: tuple(v) if type(k) is list else v for k, v in refresh().items()},
inputs=[],
outputs=[refresh_component]
)
return refresh_button