text-generation-webui/settings-template.json

71 lines
2.3 KiB
JSON

{
"autoload_model": true,
"max_new_tokens": 200,
"max_new_tokens_min": 1,
"max_new_tokens_max": 2000,
"seed": -1,
"character": "None",
"name1": "You",
"name2": "Assistant",
"context": "This is a conversation with your Assistant. The Assistant is very helpful and is eager to chat with you and answer your questions.",
"greeting": "",
"turn_template": "",
"custom_stopping_strings": "",
"stop_at_newline": false,
"add_bos_token": true,
"ban_eos_token": false,
"skip_special_tokens": true,
"truncation_length": 2048,
"truncation_length_min": 0,
"truncation_length_max": 8192,
"mode": "chat",
"chat_style": "cai-chat",
"instruction_template": "None",
"chat_prompt_size": 2048,
"chat_prompt_size_min": 0,
"chat_prompt_size_max": 2048,
"chat_generation_attempts": 1,
"chat_generation_attempts_min": 1,
"chat_generation_attempts_max": 5,
"default_extensions": [],
"chat_default_extensions": [
"gallery"
],
"presets": {
"default": "Default",
".*(alpaca|llama|llava)": "LLaMA-Precise",
".*pygmalion": "NovelAI-Storywriter",
".*RWKV": "Naive",
".*moss": "MOSS"
},
"prompts": {
"default": "QA",
".*(gpt4chan|gpt-4chan|4chan)": "GPT-4chan",
".*(oasst|stablelm-7b-sft-v7-epoch-3)": "Open Assistant",
".*(alpac|dolly)": "Alpaca",
".*mpt-.*instruct": "Alpaca",
"(?!.*v0)(?!.*1.1)(?!.*1_1)(?!.*stable).*vicuna": "Vicuna v0",
".*vicuna.*v0": "Vicuna v0",
".*vicuna.*(1.1|1_1)": "Vicuna v1.1",
".*stable.*vicuna": "StableVicuna",
".*metharme": "Metharme",
".*guanaco": "Guanaco-Chat",
".*koala": "Koala",
".*stablelm-tuned": "StableLM",
".*wizardlm": "WizardLM",
".*galactica.*finetuned": "Galactica Finetuned",
".*galactica.*-v2": "Galactica v2",
"(?!.*finetuned)(?!.*-v2).*galactica": "Galactica",
".*baize": "Baize",
".*mpt-.*chat": "MPT-Chat",
"(?!.*-flan-)(?!.*-t5-).*lamini-": "Alpaca",
".*incite.*chat": "INCITE-Chat",
".*incite.*instruct": "INCITE-Instruct"
},
"lora_prompts": {
"default": "QA",
".*alpaca": "Alpaca",
".*baize": "Baize"
}
}