mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-01 01:26:03 -04:00
Organize
This commit is contained in:
parent
49e5eecce4
commit
280ae720d7
@ -8,14 +8,6 @@ from modules.logging_colors import logger
|
||||
from modules.models import reload_model
|
||||
|
||||
|
||||
def merge_loras():
|
||||
if len(list({shared.model.peft_config[adapter].r for adapter in shared.model.peft_config.keys()})) > 1:
|
||||
logger.warning("The loaded LoRAs cannot be merged, as they have dissimilar ranks. Only the first one will be active.")
|
||||
return
|
||||
|
||||
shared.model.add_weighted_adapter(shared.lora_names, [1] * len(shared.lora_names), "__merged")
|
||||
shared.model.set_adapter("__merged")
|
||||
|
||||
def add_lora_to_model(lora_names):
|
||||
if 'GPTQForCausalLM' in shared.model.__class__.__name__ or shared.args.loader == 'AutoGPTQ':
|
||||
add_lora_autogptq(lora_names)
|
||||
@ -189,3 +181,12 @@ def add_lora_transformers(lora_names):
|
||||
shared.model = shared.model.to(device)
|
||||
else:
|
||||
shared.model = shared.model.cuda()
|
||||
|
||||
|
||||
def merge_loras():
|
||||
if len(list({shared.model.peft_config[adapter].r for adapter in shared.model.peft_config.keys()})) > 1:
|
||||
logger.warning("The loaded LoRAs cannot be merged, as they have dissimilar ranks. Only the first one will be active.")
|
||||
return
|
||||
|
||||
shared.model.add_weighted_adapter(shared.lora_names, [1] * len(shared.lora_names), "__merged")
|
||||
shared.model.set_adapter("__merged")
|
||||
|
Loading…
Reference in New Issue
Block a user