mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-01 01:26:03 -04:00
67 lines
2.2 KiB
Python
67 lines
2.2 KiB
Python
import functools
|
|
from pathlib import Path
|
|
|
|
import yaml
|
|
|
|
|
|
def default_preset():
|
|
return {
|
|
'do_sample': True,
|
|
'temperature': 1,
|
|
'top_p': 1,
|
|
'typical_p': 1,
|
|
'epsilon_cutoff': 0,
|
|
'eta_cutoff': 0,
|
|
'tfs': 1,
|
|
'top_a': 0,
|
|
'repetition_penalty': 1,
|
|
'repetition_penalty_range': 0,
|
|
'encoder_repetition_penalty': 1,
|
|
'top_k': 0,
|
|
'num_beams': 1,
|
|
'penalty_alpha': 0,
|
|
'min_length': 0,
|
|
'length_penalty': 1,
|
|
'no_repeat_ngram_size': 0,
|
|
'early_stopping': False,
|
|
'mirostat_mode': 0,
|
|
'mirostat_tau': 5.0,
|
|
'mirostat_eta': 0.1,
|
|
}
|
|
|
|
|
|
def load_preset(name):
|
|
generate_params = default_preset()
|
|
if name not in ['None', None, '']:
|
|
with open(Path(f'presets/{name}.yaml'), 'r') as infile:
|
|
preset = yaml.safe_load(infile)
|
|
|
|
for k in preset:
|
|
generate_params[k] = preset[k]
|
|
|
|
generate_params['temperature'] = min(1.99, generate_params['temperature'])
|
|
return generate_params
|
|
|
|
|
|
@functools.cache
|
|
def load_preset_memoized(name):
|
|
return load_preset(name)
|
|
|
|
|
|
def load_preset_for_ui(name, state):
|
|
generate_params = load_preset(name)
|
|
state.update(generate_params)
|
|
return state, *[generate_params[k] for k in ['do_sample', 'temperature', 'top_p', 'typical_p', 'epsilon_cutoff', 'eta_cutoff', 'repetition_penalty', 'repetition_penalty_range', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping', 'mirostat_mode', 'mirostat_tau', 'mirostat_eta', 'tfs', 'top_a']]
|
|
|
|
|
|
def generate_preset_yaml(state):
|
|
defaults = default_preset()
|
|
data = {k: state[k] for k in ['do_sample', 'temperature', 'top_p', 'typical_p', 'epsilon_cutoff', 'eta_cutoff', 'repetition_penalty', 'repetition_penalty_range', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping', 'mirostat_mode', 'mirostat_tau', 'mirostat_eta', 'tfs', 'top_a']}
|
|
|
|
# Remove entries that are identical to the defaults
|
|
for k in list(data.keys()):
|
|
if data[k] == defaults[k]:
|
|
del data[k]
|
|
|
|
return yaml.dump(data, sort_keys=False)
|