diff --git a/modules/LoRA.py b/modules/LoRA.py index 97027eb4..be2a7c75 100644 --- a/modules/LoRA.py +++ b/modules/LoRA.py @@ -53,7 +53,10 @@ def add_lora_exllama(lora_names): lora_path = get_lora_path(lora_names[0]) lora_config_path = lora_path / "adapter_config.json" - lora_adapter_path = lora_path / "adapter_model.bin" + for file_name in ["adapter_model.safetensors", "adapter_model.bin"]: + file_path = lora_path / file_name + if file_path.is_file(): + lora_adapter_path = file_path logger.info("Applying the following LoRAs to {}: {}".format(shared.model_name, ', '.join([lora_names[0]]))) if shared.model.__class__.__name__ == 'ExllamaModel':