{ | |
"model": "unsloth/gemma-2-9b", | |
"training_type": "SFT and DPO", | |
"max_seq_length": 1024, | |
"dtype": "float16", | |
"training_args": { | |
"sft": { | |
"learning_rate": 2e-06, | |
"batch_size": 4, | |
"gradient_accumulation_steps": 4, | |
"num_train_epochs": 3, | |
"scheduler": "cosine", | |
"optim": "adamw_8bit", | |
"seed": 2802 | |
} | |
} | |
} |