gpt4all/configs/train/finetune_gptj_lora.yaml
2023-04-08 17:03:30 -04:00

34 lines
587 B
YAML

# model/tokenizer
model_name: "EleutherAI/gpt-j-6b"
tokenizer_name: "EleutherAI/gpt-j-6b"
gradient_checkpointing: false
save_name: "nomic-ai/gpt4all-mosaic"
# dataset
streaming: false
num_proc: 64
dataset_path: "nomic-ai/turbo-500k-multi"
max_length: 1024
batch_size: 2
# train dynamics
lr: 2.0e-5
min_lr: 0
weight_decay: 0.0
eval_every: 500
eval_steps: 105
save_every: 500
log_grads_every: 500
output_dir: "ckpts/gpt4all-gptj-multinode"
checkpoint: null
lora: true
warmup_steps: 500
num_epochs: 2
# logging
wandb: true
wandb_entity: zanussbaum
wandb_project_name: mosaic
seed: 42