From 1f8cae14f9f3088e72f8a8a3fc78ae54ec68061d Mon Sep 17 00:00:00 2001 From: FartyPants Date: Mon, 3 Jul 2023 16:41:18 -0400 Subject: [PATCH] Update training.py - correct use of lora_names (#2988) --- modules/training.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/training.py b/modules/training.py index 7ead4b47..cdf7c591 100644 --- a/modules/training.py +++ b/modules/training.py @@ -283,7 +283,7 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch else: model_id = "llama" if model_type == "PeftModelForCausalLM": - if len(shared.args.lora_names) > 0: + if len(shared.lora_names) > 0: yield "You are trying to train a LoRA while you already have another LoRA loaded. This will work, but may have unexpected effects. *(Will continue anyway in 5 seconds, press `Interrupt` to stop.)*" logger.warning("Training LoRA over top of another LoRA. May have unexpected effects.") else: