Make custom LoRAs work by default #385

This commit is contained in:
oobabooga 2023-03-19 12:11:35 -03:00
parent 7073e96093
commit a78b6508fc
2 changed files with 2 additions and 2 deletions

View File

@ -17,6 +17,6 @@ def add_lora_to_model(lora_name):
print(f"Adding the LoRA {lora_name} to the model...")
params = {}
#params['device_map'] = {'': 0}
params['device_map'] = {'': 0}
#params['dtype'] = shared.model.dtype
shared.model = PeftModel.from_pretrained(shared.model, Path(f"loras/{lora_name}"), **params)

View File

@ -56,7 +56,7 @@ settings = {
},
'lora_prompts': {
'default': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
'alpaca-lora-7b': "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction:\nWrite a poem about the transformers Python library. \nMention the word \"large language models\" in that poem.\n### Response:\n"
'(alpaca-lora-7b|alpaca-lora-13b|alpaca-lora-30b)': "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction:\nWrite a poem about the transformers Python library. \nMention the word \"large language models\" in that poem.\n### Response:\n"
}
}