From ee4547cd34c0bb3ebbb862db159734135276baaf Mon Sep 17 00:00:00 2001 From: OWKenobi Date: Tue, 4 Apr 2023 18:23:27 +0200 Subject: [PATCH] Detect "vicuna" as llama model type (#772) --- modules/GPTQ_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/GPTQ_loader.py b/modules/GPTQ_loader.py index e7877de7..917f58f3 100644 --- a/modules/GPTQ_loader.py +++ b/modules/GPTQ_loader.py @@ -52,7 +52,7 @@ def load_quantized(model_name): if not shared.args.model_type: # Try to determine model type from model name name = model_name.lower() - if any((k in name for k in ['llama', 'alpaca'])): + if any((k in name for k in ['llama', 'alpaca', 'vicuna'])): model_type = 'llama' elif any((k in name for k in ['opt-', 'galactica'])): model_type = 'opt'