2023-04-14 16:07:28 +02:00
|
|
|
.*:
|
|
|
|
wbits: 'None'
|
|
|
|
model_type: 'None'
|
|
|
|
groupsize: 'None'
|
|
|
|
pre_layer: 0
|
|
|
|
mode: 'cai-chat'
|
2023-04-16 19:24:49 +02:00
|
|
|
skip_special_tokens: true
|
|
|
|
custom_stopping_strings: ''
|
2023-04-14 16:07:28 +02:00
|
|
|
llama-[0-9]*b-4bit$:
|
|
|
|
wbits: 4
|
|
|
|
model_type: 'llama'
|
|
|
|
.*-(4bit|int4)-(gr128|128g):
|
|
|
|
wbits: 4
|
|
|
|
groupsize: 128
|
|
|
|
.*-(gr128|128g)-(4bit|int4):
|
|
|
|
wbits: 4
|
|
|
|
groupsize: 128
|
|
|
|
.*-3bit-(gr128|128g):
|
|
|
|
wbits: 3
|
|
|
|
groupsize: 128
|
|
|
|
.*-(gr128|128g)-3bit:
|
|
|
|
wbits: 3
|
|
|
|
groupsize: 128
|
|
|
|
.*oasst-sft-1-pythia-12b:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Open Assistant'
|
|
|
|
.*vicuna:
|
|
|
|
mode: 'instruct'
|
2023-04-26 21:20:27 +02:00
|
|
|
instruction_template: 'Vicuna-v0'
|
2023-04-14 16:07:28 +02:00
|
|
|
.*alpaca:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Alpaca'
|
2023-04-14 16:15:59 +02:00
|
|
|
.*alpaca-native-4bit:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Alpaca'
|
|
|
|
wbits: 4
|
|
|
|
groupsize: 128
|
2023-04-16 19:24:49 +02:00
|
|
|
.*(galactica|oasst):
|
|
|
|
skip_special_tokens: false
|
|
|
|
.*dolly-v[0-9]-[0-9]*b:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Alpaca'
|
|
|
|
skip_special_tokens: false
|
|
|
|
custom_stopping_strings: '"### End"'
|
2023-04-16 19:40:45 +02:00
|
|
|
.*koala:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'Koala'
|
2023-04-17 00:15:03 +02:00
|
|
|
.*chatglm:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'ChatGLM'
|
2023-04-24 01:32:22 +02:00
|
|
|
.*llava:
|
|
|
|
mode: 'instruct'
|
|
|
|
model_type: 'llama'
|
|
|
|
instruction_template: 'LLaVA'
|
2023-04-26 08:47:34 +02:00
|
|
|
custom_stopping_strings: '"\n###"'
|
2023-04-26 08:21:53 +02:00
|
|
|
.*raven:
|
|
|
|
mode: 'instruct'
|
|
|
|
instruction_template: 'RWKV-Raven'
|