Remove unused parameter in AutoAWQ

This commit is contained in:
oobabooga 2023-10-23 20:45:43 -07:00
parent 1edf321362
commit ef1489cd4d
3 changed files with 1 additions and 4 deletions

View File

@ -20,8 +20,6 @@
model_type: 'dollyv2' model_type: 'dollyv2'
.*replit: .*replit:
model_type: 'replit' model_type: 'replit'
.*AWQ:
n_batch: 1
.*(oasst|openassistant-|stablelm-7b-sft-v7-epoch-3): .*(oasst|openassistant-|stablelm-7b-sft-v7-epoch-3):
instruction_template: 'Open Assistant' instruction_template: 'Open Assistant'
skip_special_tokens: false skip_special_tokens: false

View File

@ -135,7 +135,6 @@ loaders_and_params = OrderedDict({
'gpu_memory', 'gpu_memory',
'auto_devices', 'auto_devices',
'max_seq_len', 'max_seq_len',
'n_batch',
'no_inject_fused_attention', 'no_inject_fused_attention',
'trust_remote_code', 'trust_remote_code',
'use_fast', 'use_fast',

View File

@ -298,7 +298,7 @@ def AutoAWQ_loader(model_name):
trust_remote_code=shared.args.trust_remote_code, trust_remote_code=shared.args.trust_remote_code,
fuse_layers=not shared.args.no_inject_fused_attention, fuse_layers=not shared.args.no_inject_fused_attention,
max_memory=get_max_memory_dict(), max_memory=get_max_memory_dict(),
batch_size=shared.args.n_batch, batch_size=1,
safetensors=any(model_dir.glob('*.safetensors')), safetensors=any(model_dir.glob('*.safetensors')),
) )