text-generation-webui/settings-template.json

46 lines
1.3 KiB
JSON
Raw Normal View History

{
2023-03-17 10:41:12 -04:00
"max_new_tokens": 200,
"max_new_tokens_min": 1,
"max_new_tokens_max": 2000,
2023-03-31 11:22:07 -04:00
"seed": -1,
"name1": "You",
"name2": "Assistant",
"context": "This is a conversation with your Assistant. The Assistant is very helpful and is eager to chat with you and answer your questions.",
2023-04-12 16:09:56 -04:00
"greeting": "",
2023-04-10 15:53:07 -04:00
"end_of_turn": "",
"custom_stopping_strings": "",
2023-03-18 09:55:57 -04:00
"stop_at_newline": false,
2023-04-10 15:53:07 -04:00
"add_bos_token": true,
2023-04-12 17:30:43 -04:00
"ban_eos_token": false,
"truncation_length": 2048,
"truncation_length_min": 0,
"truncation_length_max": 4096,
2023-04-12 16:09:56 -04:00
"mode": "cai-chat",
2023-03-17 10:41:12 -04:00
"chat_prompt_size": 2048,
"chat_prompt_size_min": 0,
"chat_prompt_size_max": 2048,
"chat_generation_attempts": 1,
"chat_generation_attempts_min": 1,
"chat_generation_attempts_max": 5,
"default_extensions": [],
"chat_default_extensions": [
"gallery"
],
"presets": {
"default": "Default",
2023-04-10 15:53:07 -04:00
".*(alpaca|llama)": "LLaMA-Precise",
2023-03-31 09:43:05 -04:00
".*pygmalion": "NovelAI-Storywriter",
".*RWKV": "Naive"
2023-03-17 10:41:12 -04:00
},
"prompts": {
"default": "QA",
".*(gpt4chan|gpt-4chan|4chan)": "GPT-4chan",
".*oasst": "Open Assistant",
".*alpaca": "Alpaca"
2023-03-17 10:41:12 -04:00
},
"lora_prompts": {
"default": "QA",
".*(alpaca-lora-7b|alpaca-lora-13b|alpaca-lora-30b)": "Alpaca"
2023-03-17 10:41:12 -04:00
}
}