gpt4all/configs/train/finetune.yaml
2023-03-28 20:58:03 -07:00

31 lines
513 B
YAML

# model/tokenizer
model_name: # add model here
tokenizer_name: # add model here
gradient_checkpointing: true
save_name: "nomic-ai/gpt4all-full-multi-turn"
# dataset
streaming: false
num_proc: 64
dataset_path: # update
max_length: 1024
batch_size: 32
# train dynamics
lr: 5.0e-5
eval_every: 800
eval_steps: 100
save_every: 800
output_dir: "ckpts/gpt4all-full-multi"
checkpoint: null
lora: false
warmup_steps: 100
num_epochs: 2
# logging
wandb: true
wandb_entity: # update
wandb_project_name: # update
seed: 42