|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"global_step": 14688, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.897875816993464e-05, |
|
"loss": 2.0084, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.795751633986928e-05, |
|
"loss": 1.5875, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.6936274509803924e-05, |
|
"loss": 1.5189, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.5915032679738562e-05, |
|
"loss": 1.447, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.48937908496732e-05, |
|
"loss": 1.4006, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.3872549019607843e-05, |
|
"loss": 1.35, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.2851307189542485e-05, |
|
"loss": 1.3541, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.1830065359477124e-05, |
|
"loss": 1.0592, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.0808823529411766e-05, |
|
"loss": 0.9374, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.9787581699346405e-05, |
|
"loss": 0.9286, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.8766339869281047e-05, |
|
"loss": 0.9295, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.7745098039215686e-05, |
|
"loss": 0.9148, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.6723856209150328e-05, |
|
"loss": 0.9295, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.5702614379084967e-05, |
|
"loss": 0.8957, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.4681372549019607e-05, |
|
"loss": 0.7651, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.366013071895425e-05, |
|
"loss": 0.4515, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.2638888888888888e-05, |
|
"loss": 0.4702, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.161764705882353e-05, |
|
"loss": 0.4578, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.059640522875817e-05, |
|
"loss": 0.4478, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.575163398692811e-06, |
|
"loss": 0.4501, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.553921568627451e-06, |
|
"loss": 0.4619, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.532679738562092e-06, |
|
"loss": 0.4468, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.511437908496732e-06, |
|
"loss": 0.1485, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 5.490196078431373e-06, |
|
"loss": 0.1502, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.468954248366013e-06, |
|
"loss": 0.1412, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.447712418300654e-06, |
|
"loss": 0.1327, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 2.4264705882352943e-06, |
|
"loss": 0.1521, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.4052287581699345e-06, |
|
"loss": 0.1475, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.839869281045752e-07, |
|
"loss": 0.1381, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 14688, |
|
"total_flos": 3.682861839160443e+17, |
|
"train_loss": 0.7583477986664034, |
|
"train_runtime": 15779.9103, |
|
"train_samples_per_second": 33.507, |
|
"train_steps_per_second": 0.931 |
|
} |
|
], |
|
"max_steps": 14688, |
|
"num_train_epochs": 4, |
|
"total_flos": 3.682861839160443e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|