|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 2.25, |
|
"learning_rate": 5.208333333333334e-06, |
|
"loss": 2.3536, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.0416666666666668e-05, |
|
"loss": 2.3694, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09375, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 1.5625e-05, |
|
"loss": 2.3358, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 2.3675, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.15625, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.604166666666667e-05, |
|
"loss": 2.339, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 3.125e-05, |
|
"loss": 2.3121, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21875, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.6458333333333336e-05, |
|
"loss": 2.3198, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 2.3085, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.28125, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 4.6875e-05, |
|
"loss": 2.3422, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 4.976851851851852e-05, |
|
"loss": 2.2933, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.34375, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 4.9189814814814815e-05, |
|
"loss": 2.278, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 4.8611111111111115e-05, |
|
"loss": 2.2502, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.40625, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 4.803240740740741e-05, |
|
"loss": 2.3312, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 4.745370370370371e-05, |
|
"loss": 2.2619, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.46875, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.6875e-05, |
|
"loss": 2.2527, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 2.3142, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.53125, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.5717592592592594e-05, |
|
"loss": 2.2887, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 4.5138888888888894e-05, |
|
"loss": 2.2449, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.59375, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.456018518518519e-05, |
|
"loss": 2.2605, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 4.3981481481481486e-05, |
|
"loss": 2.2888, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.65625, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.340277777777778e-05, |
|
"loss": 2.3326, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.282407407407408e-05, |
|
"loss": 2.2427, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.71875, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.224537037037037e-05, |
|
"loss": 2.2691, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 2.274, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.78125, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.1087962962962965e-05, |
|
"loss": 2.2769, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.0509259259259265e-05, |
|
"loss": 2.1991, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.84375, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.993055555555556e-05, |
|
"loss": 2.2433, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 3.935185185185186e-05, |
|
"loss": 2.2614, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.90625, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.877314814814815e-05, |
|
"loss": 2.2938, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 3.8194444444444444e-05, |
|
"loss": 2.2696, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.96875, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.7615740740740744e-05, |
|
"loss": 2.2715, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 2.2377, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.03125, |
|
"grad_norm": 2.375, |
|
"learning_rate": 3.6458333333333336e-05, |
|
"loss": 2.0091, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0625, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.587962962962963e-05, |
|
"loss": 1.9842, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.09375, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.530092592592593e-05, |
|
"loss": 1.9515, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.125, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 3.472222222222222e-05, |
|
"loss": 1.9648, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.15625, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.414351851851852e-05, |
|
"loss": 1.9811, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.1875, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.3564814814814815e-05, |
|
"loss": 1.9638, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.21875, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.2986111111111115e-05, |
|
"loss": 1.9788, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.240740740740741e-05, |
|
"loss": 2.014, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.28125, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.182870370370371e-05, |
|
"loss": 1.9551, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.3125, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.995, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.34375, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.06712962962963e-05, |
|
"loss": 1.9717, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.375, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.0092592592592593e-05, |
|
"loss": 2.0301, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.40625, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.951388888888889e-05, |
|
"loss": 1.9311, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.4375, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.8935185185185186e-05, |
|
"loss": 2.0162, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.46875, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.8356481481481483e-05, |
|
"loss": 1.9227, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 1.9816, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.53125, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.7199074074074076e-05, |
|
"loss": 1.9684, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.5625, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.6620370370370372e-05, |
|
"loss": 1.9591, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.59375, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 2.604166666666667e-05, |
|
"loss": 1.997, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.5462962962962965e-05, |
|
"loss": 1.9312, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.65625, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.488425925925926e-05, |
|
"loss": 1.9073, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.6875, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.4305555555555558e-05, |
|
"loss": 1.9367, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.71875, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.3726851851851854e-05, |
|
"loss": 1.9933, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.314814814814815e-05, |
|
"loss": 1.9843, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.78125, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.2569444444444447e-05, |
|
"loss": 1.9439, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.8125, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 2.1990740740740743e-05, |
|
"loss": 1.9441, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.84375, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.141203703703704e-05, |
|
"loss": 1.9521, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 1.9585, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.90625, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 2.0254629629629632e-05, |
|
"loss": 1.9464, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.9375, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.967592592592593e-05, |
|
"loss": 1.8949, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.96875, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.9097222222222222e-05, |
|
"loss": 1.9655, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 1.9846, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.03125, |
|
"grad_norm": 1.75, |
|
"learning_rate": 1.7939814814814815e-05, |
|
"loss": 1.7054, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.0625, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 1.736111111111111e-05, |
|
"loss": 1.7147, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.09375, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.6782407407407408e-05, |
|
"loss": 1.7225, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.125, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 1.6203703703703704e-05, |
|
"loss": 1.7715, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.15625, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 1.5625e-05, |
|
"loss": 1.6884, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.1875, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.5046296296296297e-05, |
|
"loss": 1.7029, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.21875, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.4467592592592593e-05, |
|
"loss": 1.6946, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 1.7329, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.28125, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.3310185185185186e-05, |
|
"loss": 1.7032, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.3125, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 1.2731481481481482e-05, |
|
"loss": 1.7113, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.34375, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.2152777777777779e-05, |
|
"loss": 1.6568, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.375, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.1574074074074075e-05, |
|
"loss": 1.7525, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.40625, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.0995370370370372e-05, |
|
"loss": 1.6914, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.4375, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.0416666666666668e-05, |
|
"loss": 1.6791, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.46875, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 9.837962962962964e-06, |
|
"loss": 1.7092, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 1.6663, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.53125, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 8.680555555555556e-06, |
|
"loss": 1.6959, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.5625, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 8.101851851851852e-06, |
|
"loss": 1.6847, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.59375, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 7.523148148148148e-06, |
|
"loss": 1.6527, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.625, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 1.6578, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.65625, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 6.365740740740741e-06, |
|
"loss": 1.7115, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.6875, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.787037037037038e-06, |
|
"loss": 1.7117, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.71875, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.208333333333334e-06, |
|
"loss": 1.7297, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.6296296296296296e-06, |
|
"loss": 1.7166, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.78125, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.050925925925926e-06, |
|
"loss": 1.6857, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.8125, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.4722222222222224e-06, |
|
"loss": 1.6657, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.84375, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.893518518518519e-06, |
|
"loss": 1.708, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.875, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.3148148148148148e-06, |
|
"loss": 1.7472, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.90625, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 1.7361111111111112e-06, |
|
"loss": 1.6997, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.9375, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.1574074074074074e-06, |
|
"loss": 1.7339, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.96875, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.787037037037037e-07, |
|
"loss": 1.7217, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 0.0, |
|
"loss": 1.6548, |
|
"step": 480 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 480, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1887812691715686e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|