UI: update context upper limit to 200000

This commit is contained in:
oobabooga 2023-12-04 15:48:34 -08:00
parent f7145544f9
commit 131a5212ce
3 changed files with 3 additions and 3 deletions

View File

@ -43,7 +43,7 @@ settings = {
'seed': -1,
'truncation_length': 2048,
'truncation_length_min': 0,
'truncation_length_max': 32768,
'truncation_length_max': 200000,
'max_tokens_second': 0,
'custom_stopping_strings': '',
'custom_token_bans': '',

View File

@ -165,7 +165,7 @@ def create_ui():
stride_length = gr.Slider(label='Stride', minimum=0, maximum=32768, value=512, step=256, info='Used to make the evaluation faster at the cost of accuracy. 1 = slowest but most accurate. 512 is a common value.')
with gr.Column():
max_length = gr.Slider(label='max_length', minimum=0, maximum=32768, value=0, step=256, info='The context for each evaluation. If set to 0, the maximum context length for the model will be used.')
max_length = gr.Slider(label='max_length', minimum=0, maximum=shared.settings['truncation_length_max'], value=0, step=256, info='The context for each evaluation. If set to 0, the maximum context length for the model will be used.')
with gr.Row():
start_current_evaluation = gr.Button("Evaluate loaded model", interactive=not mu)

View File

@ -13,7 +13,7 @@ seed: -1
negative_prompt: ''
truncation_length: 2048
truncation_length_min: 0
truncation_length_max: 32768
truncation_length_max: 200000
custom_stopping_strings: ''
auto_max_new_tokens: false
max_tokens_second: 0