mirror of
https://github.com/tloen/alpaca-lora.git
synced 2024-10-01 01:05:56 -04:00
Fix LoRa weight merging
This commit is contained in:
parent
b8c32be806
commit
dde89950f3
@ -21,7 +21,12 @@ lora_model = PeftModel.from_pretrained(
|
||||
torch_dtype=torch.float16,
|
||||
)
|
||||
|
||||
lora_model.eval() # merge weights
|
||||
# merge weights
|
||||
for layer in lora_model.base_model.model.model.layers:
|
||||
layer.self_attn.q_proj.merge_weights = True
|
||||
layer.self_attn.v_proj.merge_weights = True
|
||||
|
||||
lora_model.train(False)
|
||||
|
||||
lora_model_sd = lora_model.state_dict()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user