Fix label masking length when setting add_eos_token=True and train_on_inputs=False (#306)

Co-authored-by: muximus3 <muximus10@gmail.com>
This commit is contained in:
Toshiro Mifune 2023-04-14 04:37:44 +08:00 committed by GitHub
parent a472672459
commit 179f3974f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -47,6 +47,7 @@ def train(
],
# llm hyperparams
train_on_inputs: bool = True, # if False, masks out inputs in loss
add_eos_token: bool = False,
group_by_length: bool = False, # faster, but produces an odd training loss curve
# wandb params
wandb_project: str = "",
@ -73,6 +74,7 @@ def train(
f"lora_dropout: {lora_dropout}\n"
f"lora_target_modules: {lora_target_modules}\n"
f"train_on_inputs: {train_on_inputs}\n"
f"add_eos_token: {add_eos_token}\n"
f"group_by_length: {group_by_length}\n"
f"wandb_project: {wandb_project}\n"
f"wandb_run_name: {wandb_run_name}\n"
@ -154,9 +156,12 @@ def train(
user_prompt = prompter.generate_prompt(
data_point["instruction"], data_point["input"]
)
tokenized_user_prompt = tokenize(user_prompt, add_eos_token=False)
tokenized_user_prompt = tokenize(user_prompt, add_eos_token=add_eos_token)
user_prompt_len = len(tokenized_user_prompt["input_ids"])
if add_eos_token:
user_prompt_len -= 1
tokenized_full_prompt["labels"] = [
-100
] * user_prompt_len + tokenized_full_prompt["labels"][