2023-04-14 10:07:28 -04:00
|
|
|
.*:
|
|
|
|
wbits: 'None'
|
|
|
|
model_type: 'None'
|
|
|
|
groupsize: 'None'
|
|
|
|
pre_layer: 0
|
|
|
|
mode: 'cai-chat'
|
2023-04-16 13:24:49 -04:00
|
|
|
skip_special_tokens: true
|
|
|
|
custom_stopping_strings: ''
|
2023-04-14 10:07:28 -04:00
|
|
|
llama-[0-9]*b-4bit$:
|
|
|
|
wbits: 4
|
|
|
|
model_type: 'llama'
|
|
|
|
.*-(4bit|int4)-(gr128|128g):
|
|
|
|
wbits: 4
|
|
|
|
groupsize: 128
|
|
|
|
.*-(gr128|128g)-(4bit|int4):
|
|
|
|
wbits: 4
|
|
|
|
groupsize: 128
|
|
|
|
.*-3bit-(gr128|128g):
|
|
|
|
wbits: 3
|
|
|
|
groupsize: 128
|
|
|
|
.*-(gr128|128g)-3bit:
|
|
|
|
wbits: 3
|
|
|
|
groupsize: 128
|
|
|
|
.*oasst-sft-1-pythia-12b:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Open Assistant'
|
|
|
|
.*vicuna:
|
|
|
|
mode: 'instruct'
|
2023-04-26 15:20:27 -04:00
|
|
|
instruction_template: 'Vicuna-v0'
|
2023-04-14 10:07:28 -04:00
|
|
|
.*alpaca:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Alpaca'
|
2023-04-14 10:15:59 -04:00
|
|
|
.*alpaca-native-4bit:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Alpaca'
|
|
|
|
wbits: 4
|
|
|
|
groupsize: 128
|
2023-04-16 13:24:49 -04:00
|
|
|
.*(galactica|oasst):
|
|
|
|
skip_special_tokens: false
|
|
|
|
.*dolly-v[0-9]-[0-9]*b:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Alpaca'
|
|
|
|
skip_special_tokens: false
|
|
|
|
custom_stopping_strings: '"### End"'
|
2023-04-16 13:40:45 -04:00
|
|
|
.*koala:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Koala'
|
2023-04-16 18:15:03 -04:00
|
|
|
.*chatglm:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'ChatGLM'
|
2023-04-23 19:32:22 -04:00
|
|
|
.*llava:
|
|
|
|
mode: 'instruct'
|
|
|
|
model_type: 'llama'
|
|
|
|
instruction_template: 'LLaVA'
|
2023-04-26 02:47:34 -04:00
|
|
|
custom_stopping_strings: '"\n###"'
|
2023-04-26 02:21:53 -04:00
|
|
|
.*raven:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'RWKV-Raven'
|