|
{ |
|
"best_metric": 0.0004703931335825473, |
|
"best_model_checkpoint": "./vit-base-beans-demo-v5\\checkpoint-10000", |
|
"epoch": 70.92198581560284, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.992907801418441e-05, |
|
"loss": 2.1762, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.98581560283688e-05, |
|
"loss": 2.1295, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.97872340425532e-05, |
|
"loss": 2.068, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.971631205673759e-05, |
|
"loss": 2.0647, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.9645390070922e-05, |
|
"loss": 1.9875, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.95744680851064e-05, |
|
"loss": 1.9116, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.950354609929079e-05, |
|
"loss": 1.7607, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.943262411347518e-05, |
|
"loss": 1.7694, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.936170212765959e-05, |
|
"loss": 1.6876, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.929078014184398e-05, |
|
"loss": 1.5852, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"eval_accuracy": 0.4691358024691358, |
|
"eval_loss": 1.652011752128601, |
|
"eval_runtime": 4.9705, |
|
"eval_samples_per_second": 114.072, |
|
"eval_steps_per_second": 14.284, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.921985815602838e-05, |
|
"loss": 1.5447, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.914893617021277e-05, |
|
"loss": 1.4957, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.907801418439718e-05, |
|
"loss": 1.4538, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.900709219858157e-05, |
|
"loss": 1.4942, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.893617021276596e-05, |
|
"loss": 1.3494, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.886524822695036e-05, |
|
"loss": 1.3974, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.879432624113476e-05, |
|
"loss": 1.455, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.872340425531916e-05, |
|
"loss": 1.2304, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.865248226950355e-05, |
|
"loss": 1.3776, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.858156028368794e-05, |
|
"loss": 1.2438, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"eval_accuracy": 0.5767195767195767, |
|
"eval_loss": 1.2383878231048584, |
|
"eval_runtime": 5.082, |
|
"eval_samples_per_second": 111.569, |
|
"eval_steps_per_second": 13.971, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.851063829787235e-05, |
|
"loss": 1.1531, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.843971631205675e-05, |
|
"loss": 1.2229, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.836879432624114e-05, |
|
"loss": 1.1538, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.829787234042553e-05, |
|
"loss": 1.0427, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.822695035460994e-05, |
|
"loss": 1.0046, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.815602836879433e-05, |
|
"loss": 1.1033, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.808510638297873e-05, |
|
"loss": 0.809, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.801418439716312e-05, |
|
"loss": 0.8627, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.794326241134753e-05, |
|
"loss": 0.8803, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.787234042553192e-05, |
|
"loss": 1.0439, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"eval_accuracy": 0.7336860670194003, |
|
"eval_loss": 0.8531990647315979, |
|
"eval_runtime": 5.0605, |
|
"eval_samples_per_second": 112.043, |
|
"eval_steps_per_second": 14.03, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.780141843971632e-05, |
|
"loss": 1.0166, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.773049645390071e-05, |
|
"loss": 0.7573, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 9.765957446808512e-05, |
|
"loss": 0.8116, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.758865248226951e-05, |
|
"loss": 0.7621, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.75177304964539e-05, |
|
"loss": 0.7075, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.74468085106383e-05, |
|
"loss": 0.6867, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.73758865248227e-05, |
|
"loss": 0.7251, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.73049645390071e-05, |
|
"loss": 0.9261, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 9.723404255319149e-05, |
|
"loss": 0.8155, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 9.716312056737588e-05, |
|
"loss": 0.695, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"eval_accuracy": 0.800705467372134, |
|
"eval_loss": 0.659205436706543, |
|
"eval_runtime": 5.1, |
|
"eval_samples_per_second": 111.176, |
|
"eval_steps_per_second": 13.921, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 9.709219858156029e-05, |
|
"loss": 0.729, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 9.702127659574469e-05, |
|
"loss": 0.658, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 9.695035460992908e-05, |
|
"loss": 0.7035, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 9.687943262411347e-05, |
|
"loss": 0.6528, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 9.680851063829788e-05, |
|
"loss": 0.7444, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 9.673758865248227e-05, |
|
"loss": 0.6091, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 0.7101, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 9.659574468085106e-05, |
|
"loss": 0.5602, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 9.652482269503547e-05, |
|
"loss": 0.6144, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 9.645390070921986e-05, |
|
"loss": 0.5892, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"eval_accuracy": 0.8059964726631393, |
|
"eval_loss": 0.6282046437263489, |
|
"eval_runtime": 5.29, |
|
"eval_samples_per_second": 107.182, |
|
"eval_steps_per_second": 13.421, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 9.638297872340426e-05, |
|
"loss": 0.5771, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 9.631205673758865e-05, |
|
"loss": 0.5012, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 9.624113475177306e-05, |
|
"loss": 0.532, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 9.617021276595745e-05, |
|
"loss": 0.5639, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 9.609929078014184e-05, |
|
"loss": 0.6251, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 9.602836879432625e-05, |
|
"loss": 0.4537, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 9.595744680851064e-05, |
|
"loss": 0.4307, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 9.588652482269505e-05, |
|
"loss": 0.446, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 9.581560283687943e-05, |
|
"loss": 0.4428, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 9.574468085106384e-05, |
|
"loss": 0.4763, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"eval_accuracy": 0.8359788359788359, |
|
"eval_loss": 0.5606168508529663, |
|
"eval_runtime": 5.2615, |
|
"eval_samples_per_second": 107.763, |
|
"eval_steps_per_second": 13.494, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 9.567375886524823e-05, |
|
"loss": 0.4822, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 9.560283687943264e-05, |
|
"loss": 0.5021, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 9.553191489361702e-05, |
|
"loss": 0.4713, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 9.546099290780143e-05, |
|
"loss": 0.5357, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 9.539007092198582e-05, |
|
"loss": 0.5435, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 9.531914893617023e-05, |
|
"loss": 0.3855, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 9.524822695035461e-05, |
|
"loss": 0.4365, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 9.517730496453901e-05, |
|
"loss": 0.4521, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 9.510638297872341e-05, |
|
"loss": 0.4258, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 9.503546099290782e-05, |
|
"loss": 0.4664, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"eval_accuracy": 0.8447971781305115, |
|
"eval_loss": 0.4649847149848938, |
|
"eval_runtime": 5.0845, |
|
"eval_samples_per_second": 111.514, |
|
"eval_steps_per_second": 13.964, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 9.49645390070922e-05, |
|
"loss": 0.3395, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 9.48936170212766e-05, |
|
"loss": 0.3529, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 9.4822695035461e-05, |
|
"loss": 0.3969, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 9.47517730496454e-05, |
|
"loss": 0.3368, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 9.468085106382978e-05, |
|
"loss": 0.2199, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 9.460992907801419e-05, |
|
"loss": 0.3191, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 9.453900709219858e-05, |
|
"loss": 0.3199, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 9.446808510638299e-05, |
|
"loss": 0.3181, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 9.439716312056737e-05, |
|
"loss": 0.3619, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 9.432624113475178e-05, |
|
"loss": 0.2486, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"eval_accuracy": 0.8800705467372134, |
|
"eval_loss": 0.3964751958847046, |
|
"eval_runtime": 4.927, |
|
"eval_samples_per_second": 115.079, |
|
"eval_steps_per_second": 14.41, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 9.425531914893617e-05, |
|
"loss": 0.4254, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 9.418439716312058e-05, |
|
"loss": 0.3612, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 9.411347517730497e-05, |
|
"loss": 0.3508, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 9.404255319148937e-05, |
|
"loss": 0.2778, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 9.397163120567376e-05, |
|
"loss": 0.1774, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 9.390070921985817e-05, |
|
"loss": 0.2778, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 9.382978723404256e-05, |
|
"loss": 0.2184, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 9.375886524822695e-05, |
|
"loss": 0.3215, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 9.368794326241135e-05, |
|
"loss": 0.3755, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 9.361702127659576e-05, |
|
"loss": 0.2726, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"eval_accuracy": 0.9047619047619048, |
|
"eval_loss": 0.34163007140159607, |
|
"eval_runtime": 4.777, |
|
"eval_samples_per_second": 118.693, |
|
"eval_steps_per_second": 14.863, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 9.354609929078015e-05, |
|
"loss": 0.2767, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 9.34822695035461e-05, |
|
"loss": 0.3285, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 9.34113475177305e-05, |
|
"loss": 0.3168, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 9.33404255319149e-05, |
|
"loss": 0.1633, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 9.326950354609929e-05, |
|
"loss": 0.3204, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 9.319858156028369e-05, |
|
"loss": 0.2753, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 9.312765957446809e-05, |
|
"loss": 0.2775, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 9.30567375886525e-05, |
|
"loss": 0.2352, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 9.298581560283687e-05, |
|
"loss": 0.2665, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 9.291489361702128e-05, |
|
"loss": 0.2587, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"eval_accuracy": 0.9241622574955908, |
|
"eval_loss": 0.25383827090263367, |
|
"eval_runtime": 4.9285, |
|
"eval_samples_per_second": 115.044, |
|
"eval_steps_per_second": 14.406, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 9.284397163120567e-05, |
|
"loss": 0.3072, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 9.277304964539008e-05, |
|
"loss": 0.3872, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 9.270212765957446e-05, |
|
"loss": 0.2625, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 9.263120567375887e-05, |
|
"loss": 0.3353, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 9.256028368794328e-05, |
|
"loss": 0.1992, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 9.248936170212767e-05, |
|
"loss": 0.244, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 9.241843971631206e-05, |
|
"loss": 0.1355, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 9.234751773049646e-05, |
|
"loss": 0.1959, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 9.227659574468086e-05, |
|
"loss": 0.3748, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 9.220567375886526e-05, |
|
"loss": 0.2134, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"eval_accuracy": 0.9347442680776014, |
|
"eval_loss": 0.21870087087154388, |
|
"eval_runtime": 4.96, |
|
"eval_samples_per_second": 114.314, |
|
"eval_steps_per_second": 14.314, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 9.213475177304965e-05, |
|
"loss": 0.2255, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 9.206382978723404e-05, |
|
"loss": 0.2098, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.199290780141845e-05, |
|
"loss": 0.1824, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 9.192198581560285e-05, |
|
"loss": 0.1689, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 9.185106382978724e-05, |
|
"loss": 0.1869, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 9.178014184397163e-05, |
|
"loss": 0.3218, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 9.170921985815604e-05, |
|
"loss": 0.2401, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 9.163829787234043e-05, |
|
"loss": 0.2202, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 9.156737588652483e-05, |
|
"loss": 0.1199, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 9.149645390070922e-05, |
|
"loss": 0.1366, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"eval_accuracy": 0.9365079365079365, |
|
"eval_loss": 0.25872817635536194, |
|
"eval_runtime": 5.0905, |
|
"eval_samples_per_second": 111.383, |
|
"eval_steps_per_second": 13.947, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 9.142553191489363e-05, |
|
"loss": 0.2913, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 9.135460992907802e-05, |
|
"loss": 0.1504, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 9.128368794326241e-05, |
|
"loss": 0.1568, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 9.121276595744681e-05, |
|
"loss": 0.2478, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 9.114184397163122e-05, |
|
"loss": 0.1182, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 9.107092198581561e-05, |
|
"loss": 0.1879, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 9.1e-05, |
|
"loss": 0.0922, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 9.09290780141844e-05, |
|
"loss": 0.1083, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 9.08581560283688e-05, |
|
"loss": 0.1389, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 9.07872340425532e-05, |
|
"loss": 0.1062, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"eval_accuracy": 0.9329805996472663, |
|
"eval_loss": 0.23322318494319916, |
|
"eval_runtime": 4.9, |
|
"eval_samples_per_second": 115.713, |
|
"eval_steps_per_second": 14.49, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 9.071631205673759e-05, |
|
"loss": 0.1666, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 9.064539007092198e-05, |
|
"loss": 0.143, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 9.057446808510639e-05, |
|
"loss": 0.1701, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 9.050354609929079e-05, |
|
"loss": 0.1869, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 9.043262411347518e-05, |
|
"loss": 0.2483, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 9.036170212765957e-05, |
|
"loss": 0.1374, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 9.029078014184398e-05, |
|
"loss": 0.2322, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 9.021985815602837e-05, |
|
"loss": 0.2282, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 9.014893617021277e-05, |
|
"loss": 0.1862, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 9.007801418439716e-05, |
|
"loss": 0.1873, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"eval_accuracy": 0.9382716049382716, |
|
"eval_loss": 0.19933444261550903, |
|
"eval_runtime": 4.8755, |
|
"eval_samples_per_second": 116.295, |
|
"eval_steps_per_second": 14.562, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 9.000709219858157e-05, |
|
"loss": 0.0941, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 8.993617021276596e-05, |
|
"loss": 0.1053, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 8.986524822695035e-05, |
|
"loss": 0.1271, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 8.979432624113475e-05, |
|
"loss": 0.1664, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 8.972340425531916e-05, |
|
"loss": 0.123, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 8.965248226950355e-05, |
|
"loss": 0.2243, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 8.958156028368794e-05, |
|
"loss": 0.1105, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 8.951063829787234e-05, |
|
"loss": 0.128, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 8.943971631205674e-05, |
|
"loss": 0.177, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 8.936879432624114e-05, |
|
"loss": 0.0827, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"eval_accuracy": 0.9435626102292769, |
|
"eval_loss": 0.2013310194015503, |
|
"eval_runtime": 4.8795, |
|
"eval_samples_per_second": 116.2, |
|
"eval_steps_per_second": 14.551, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 8.929787234042553e-05, |
|
"loss": 0.0708, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 8.922695035460992e-05, |
|
"loss": 0.0474, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 8.915602836879433e-05, |
|
"loss": 0.2452, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 8.908510638297873e-05, |
|
"loss": 0.1033, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 8.901418439716312e-05, |
|
"loss": 0.1123, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 8.894326241134751e-05, |
|
"loss": 0.0597, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 8.887234042553192e-05, |
|
"loss": 0.1401, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 8.880141843971633e-05, |
|
"loss": 0.1481, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 8.873049645390071e-05, |
|
"loss": 0.0823, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 8.865957446808511e-05, |
|
"loss": 0.2071, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"eval_accuracy": 0.9188712522045855, |
|
"eval_loss": 0.2737657129764557, |
|
"eval_runtime": 4.986, |
|
"eval_samples_per_second": 113.717, |
|
"eval_steps_per_second": 14.24, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 8.858865248226951e-05, |
|
"loss": 0.1693, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 8.852482269503546e-05, |
|
"loss": 0.1344, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 8.845390070921986e-05, |
|
"loss": 0.0623, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 8.838297872340426e-05, |
|
"loss": 0.1087, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 8.831205673758866e-05, |
|
"loss": 0.0971, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 8.824113475177305e-05, |
|
"loss": 0.0733, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 8.817021276595744e-05, |
|
"loss": 0.0471, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 8.809929078014185e-05, |
|
"loss": 0.2112, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 8.802836879432625e-05, |
|
"loss": 0.1347, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 8.795744680851065e-05, |
|
"loss": 0.0477, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"eval_accuracy": 0.9735449735449735, |
|
"eval_loss": 0.09033568948507309, |
|
"eval_runtime": 5.073, |
|
"eval_samples_per_second": 111.767, |
|
"eval_steps_per_second": 13.996, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 8.788652482269503e-05, |
|
"loss": 0.105, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 8.781560283687944e-05, |
|
"loss": 0.046, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 8.774468085106383e-05, |
|
"loss": 0.0763, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 8.767375886524824e-05, |
|
"loss": 0.1138, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 8.760283687943262e-05, |
|
"loss": 0.0569, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 8.753191489361703e-05, |
|
"loss": 0.1693, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 8.746099290780142e-05, |
|
"loss": 0.0377, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"learning_rate": 8.739007092198583e-05, |
|
"loss": 0.0173, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 8.731914893617021e-05, |
|
"loss": 0.0383, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 8.724822695035462e-05, |
|
"loss": 0.081, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"eval_accuracy": 0.9753086419753086, |
|
"eval_loss": 0.10142536461353302, |
|
"eval_runtime": 5.0755, |
|
"eval_samples_per_second": 111.712, |
|
"eval_steps_per_second": 13.989, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 8.717730496453901e-05, |
|
"loss": 0.011, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 8.710638297872342e-05, |
|
"loss": 0.0556, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"learning_rate": 8.70354609929078e-05, |
|
"loss": 0.0798, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 8.69645390070922e-05, |
|
"loss": 0.0156, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 8.68936170212766e-05, |
|
"loss": 0.05, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 8.6822695035461e-05, |
|
"loss": 0.1144, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 8.675177304964538e-05, |
|
"loss": 0.1024, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 8.668085106382979e-05, |
|
"loss": 0.0993, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 8.660992907801419e-05, |
|
"loss": 0.0832, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 8.653900709219859e-05, |
|
"loss": 0.0191, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"eval_accuracy": 0.9523809523809523, |
|
"eval_loss": 0.1780620813369751, |
|
"eval_runtime": 4.8325, |
|
"eval_samples_per_second": 117.33, |
|
"eval_steps_per_second": 14.692, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 13.55, |
|
"learning_rate": 8.646808510638297e-05, |
|
"loss": 0.0464, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 8.639716312056738e-05, |
|
"loss": 0.0368, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 8.632624113475177e-05, |
|
"loss": 0.0582, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 8.625531914893618e-05, |
|
"loss": 0.0272, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"learning_rate": 8.618439716312056e-05, |
|
"loss": 0.0241, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 8.611347517730497e-05, |
|
"loss": 0.1776, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 8.604255319148936e-05, |
|
"loss": 0.108, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 14.04, |
|
"learning_rate": 8.597163120567377e-05, |
|
"loss": 0.1158, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 8.590070921985815e-05, |
|
"loss": 0.0953, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 8.582978723404256e-05, |
|
"loss": 0.1016, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"eval_accuracy": 0.9365079365079365, |
|
"eval_loss": 0.23041455447673798, |
|
"eval_runtime": 5.125, |
|
"eval_samples_per_second": 110.633, |
|
"eval_steps_per_second": 13.854, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 8.575886524822695e-05, |
|
"loss": 0.0435, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 8.568794326241136e-05, |
|
"loss": 0.1088, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 8.561702127659574e-05, |
|
"loss": 0.0868, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 8.554609929078014e-05, |
|
"loss": 0.0725, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 8.547517730496454e-05, |
|
"loss": 0.1031, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 8.540425531914894e-05, |
|
"loss": 0.1031, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"learning_rate": 8.533333333333334e-05, |
|
"loss": 0.0604, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 8.526241134751773e-05, |
|
"loss": 0.1943, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 8.519148936170214e-05, |
|
"loss": 0.106, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 8.512056737588653e-05, |
|
"loss": 0.0465, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"eval_accuracy": 0.9594356261022927, |
|
"eval_loss": 0.14327698945999146, |
|
"eval_runtime": 5.158, |
|
"eval_samples_per_second": 109.925, |
|
"eval_steps_per_second": 13.765, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 8.504964539007093e-05, |
|
"loss": 0.0911, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 8.497872340425532e-05, |
|
"loss": 0.0861, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 8.490780141843973e-05, |
|
"loss": 0.0557, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 15.18, |
|
"learning_rate": 8.483687943262412e-05, |
|
"loss": 0.103, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 8.476595744680851e-05, |
|
"loss": 0.0555, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"learning_rate": 8.469503546099291e-05, |
|
"loss": 0.0473, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 15.39, |
|
"learning_rate": 8.462411347517732e-05, |
|
"loss": 0.1456, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 15.46, |
|
"learning_rate": 8.455319148936171e-05, |
|
"loss": 0.0771, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"learning_rate": 8.44822695035461e-05, |
|
"loss": 0.0529, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 8.44113475177305e-05, |
|
"loss": 0.0412, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"eval_accuracy": 0.9735449735449735, |
|
"eval_loss": 0.12634754180908203, |
|
"eval_runtime": 4.8775, |
|
"eval_samples_per_second": 116.247, |
|
"eval_steps_per_second": 14.557, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 15.67, |
|
"learning_rate": 8.43404255319149e-05, |
|
"loss": 0.0639, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 8.42695035460993e-05, |
|
"loss": 0.0561, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 8.419858156028369e-05, |
|
"loss": 0.0537, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 8.412765957446808e-05, |
|
"loss": 0.0372, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"learning_rate": 8.405673758865249e-05, |
|
"loss": 0.0293, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 8.398581560283688e-05, |
|
"loss": 0.0203, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 8.391489361702128e-05, |
|
"loss": 0.0838, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 8.384397163120567e-05, |
|
"loss": 0.015, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 8.377304964539008e-05, |
|
"loss": 0.0496, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"learning_rate": 8.370212765957447e-05, |
|
"loss": 0.0449, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"eval_accuracy": 0.9594356261022927, |
|
"eval_loss": 0.17587855458259583, |
|
"eval_runtime": 4.748, |
|
"eval_samples_per_second": 119.418, |
|
"eval_steps_per_second": 14.954, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 8.363120567375887e-05, |
|
"loss": 0.1219, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 16.45, |
|
"learning_rate": 8.356028368794326e-05, |
|
"loss": 0.0748, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"learning_rate": 8.348936170212767e-05, |
|
"loss": 0.1141, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 16.6, |
|
"learning_rate": 8.341843971631206e-05, |
|
"loss": 0.0559, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 8.334751773049645e-05, |
|
"loss": 0.0261, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 16.74, |
|
"learning_rate": 8.327659574468085e-05, |
|
"loss": 0.0896, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 8.320567375886526e-05, |
|
"loss": 0.0891, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 8.313475177304965e-05, |
|
"loss": 0.0454, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 8.306382978723404e-05, |
|
"loss": 0.0323, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 17.02, |
|
"learning_rate": 8.299290780141844e-05, |
|
"loss": 0.0614, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 17.02, |
|
"eval_accuracy": 0.9682539682539683, |
|
"eval_loss": 0.11620796471834183, |
|
"eval_runtime": 5.1925, |
|
"eval_samples_per_second": 109.195, |
|
"eval_steps_per_second": 13.673, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 8.292198581560284e-05, |
|
"loss": 0.0402, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 8.285106382978724e-05, |
|
"loss": 0.0472, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"learning_rate": 8.278014184397163e-05, |
|
"loss": 0.0331, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 8.270921985815602e-05, |
|
"loss": 0.1932, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 8.263829787234043e-05, |
|
"loss": 0.0786, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"learning_rate": 8.256737588652482e-05, |
|
"loss": 0.1532, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 17.52, |
|
"learning_rate": 8.249645390070922e-05, |
|
"loss": 0.028, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 8.242553191489361e-05, |
|
"loss": 0.0448, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"learning_rate": 8.235460992907802e-05, |
|
"loss": 0.0867, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 8.228368794326241e-05, |
|
"loss": 0.019, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"eval_accuracy": 0.9841269841269841, |
|
"eval_loss": 0.057109106332063675, |
|
"eval_runtime": 4.9535, |
|
"eval_samples_per_second": 114.464, |
|
"eval_steps_per_second": 14.333, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 8.221276595744682e-05, |
|
"loss": 0.0047, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"learning_rate": 8.21418439716312e-05, |
|
"loss": 0.0665, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 17.94, |
|
"learning_rate": 8.207092198581561e-05, |
|
"loss": 0.1127, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 18.01, |
|
"learning_rate": 8.2e-05, |
|
"loss": 0.073, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"learning_rate": 8.192907801418441e-05, |
|
"loss": 0.0051, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 8.185815602836879e-05, |
|
"loss": 0.1039, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"learning_rate": 8.17872340425532e-05, |
|
"loss": 0.117, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 8.171631205673759e-05, |
|
"loss": 0.0763, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 8.1645390070922e-05, |
|
"loss": 0.0678, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 8.157446808510638e-05, |
|
"loss": 0.1211, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"eval_accuracy": 0.9541446208112875, |
|
"eval_loss": 0.21715059876441956, |
|
"eval_runtime": 5.0345, |
|
"eval_samples_per_second": 112.622, |
|
"eval_steps_per_second": 14.103, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 18.51, |
|
"learning_rate": 8.150354609929078e-05, |
|
"loss": 0.1104, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 8.143262411347519e-05, |
|
"loss": 0.0693, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"learning_rate": 8.136170212765958e-05, |
|
"loss": 0.0279, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 8.129078014184398e-05, |
|
"loss": 0.0757, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 8.121985815602837e-05, |
|
"loss": 0.0996, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 8.114893617021278e-05, |
|
"loss": 0.0508, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 18.94, |
|
"learning_rate": 8.107801418439717e-05, |
|
"loss": 0.0114, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 8.100709219858157e-05, |
|
"loss": 0.0097, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 19.08, |
|
"learning_rate": 8.093617021276596e-05, |
|
"loss": 0.0695, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"learning_rate": 8.086524822695037e-05, |
|
"loss": 0.12, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"eval_accuracy": 0.9664902998236331, |
|
"eval_loss": 0.14609535038471222, |
|
"eval_runtime": 4.9415, |
|
"eval_samples_per_second": 114.741, |
|
"eval_steps_per_second": 14.368, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"learning_rate": 8.079432624113476e-05, |
|
"loss": 0.0491, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 8.072340425531915e-05, |
|
"loss": 0.0297, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"learning_rate": 8.065248226950355e-05, |
|
"loss": 0.0691, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 8.058156028368795e-05, |
|
"loss": 0.0482, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 8.051063829787235e-05, |
|
"loss": 0.0036, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 8.043971631205674e-05, |
|
"loss": 0.0894, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 19.65, |
|
"learning_rate": 8.036879432624114e-05, |
|
"loss": 0.0737, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 19.72, |
|
"learning_rate": 8.029787234042554e-05, |
|
"loss": 0.0783, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 8.022695035460994e-05, |
|
"loss": 0.069, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 19.86, |
|
"learning_rate": 8.015602836879433e-05, |
|
"loss": 0.0688, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 19.86, |
|
"eval_accuracy": 0.9488536155202821, |
|
"eval_loss": 0.2023719996213913, |
|
"eval_runtime": 5.025, |
|
"eval_samples_per_second": 112.835, |
|
"eval_steps_per_second": 14.129, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 19.93, |
|
"learning_rate": 8.008510638297872e-05, |
|
"loss": 0.2303, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 8.001418439716313e-05, |
|
"loss": 0.0436, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 20.07, |
|
"learning_rate": 7.994326241134752e-05, |
|
"loss": 0.0297, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 20.14, |
|
"learning_rate": 7.987234042553192e-05, |
|
"loss": 0.0584, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 20.21, |
|
"learning_rate": 7.980141843971631e-05, |
|
"loss": 0.0388, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 20.28, |
|
"learning_rate": 7.973049645390072e-05, |
|
"loss": 0.0745, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 20.35, |
|
"learning_rate": 7.965957446808511e-05, |
|
"loss": 0.0029, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 20.43, |
|
"learning_rate": 7.95886524822695e-05, |
|
"loss": 0.0432, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 7.95177304964539e-05, |
|
"loss": 0.0085, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 20.57, |
|
"learning_rate": 7.94468085106383e-05, |
|
"loss": 0.0462, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 20.57, |
|
"eval_accuracy": 0.982363315696649, |
|
"eval_loss": 0.04924876615405083, |
|
"eval_runtime": 4.8635, |
|
"eval_samples_per_second": 116.582, |
|
"eval_steps_per_second": 14.598, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 20.64, |
|
"learning_rate": 7.93758865248227e-05, |
|
"loss": 0.0076, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 20.71, |
|
"learning_rate": 7.93049645390071e-05, |
|
"loss": 0.0285, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"learning_rate": 7.923404255319149e-05, |
|
"loss": 0.0394, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 20.85, |
|
"learning_rate": 7.91631205673759e-05, |
|
"loss": 0.2017, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 20.92, |
|
"learning_rate": 7.909219858156029e-05, |
|
"loss": 0.1279, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 20.99, |
|
"learning_rate": 7.902127659574468e-05, |
|
"loss": 0.1861, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 21.06, |
|
"learning_rate": 7.895035460992908e-05, |
|
"loss": 0.0247, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 21.13, |
|
"learning_rate": 7.887943262411348e-05, |
|
"loss": 0.0872, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 21.21, |
|
"learning_rate": 7.880851063829788e-05, |
|
"loss": 0.0511, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 21.28, |
|
"learning_rate": 7.873758865248227e-05, |
|
"loss": 0.0147, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 21.28, |
|
"eval_accuracy": 0.9788359788359788, |
|
"eval_loss": 0.08856220543384552, |
|
"eval_runtime": 4.799, |
|
"eval_samples_per_second": 118.149, |
|
"eval_steps_per_second": 14.795, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 21.35, |
|
"learning_rate": 7.866666666666666e-05, |
|
"loss": 0.0564, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 21.42, |
|
"learning_rate": 7.859574468085107e-05, |
|
"loss": 0.0178, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 21.49, |
|
"learning_rate": 7.852482269503546e-05, |
|
"loss": 0.0616, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 21.56, |
|
"learning_rate": 7.845390070921986e-05, |
|
"loss": 0.0285, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 21.63, |
|
"learning_rate": 7.838297872340425e-05, |
|
"loss": 0.0139, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 21.7, |
|
"learning_rate": 7.831205673758866e-05, |
|
"loss": 0.0465, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 21.77, |
|
"learning_rate": 7.824113475177305e-05, |
|
"loss": 0.0542, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 21.84, |
|
"learning_rate": 7.817021276595745e-05, |
|
"loss": 0.0835, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 21.91, |
|
"learning_rate": 7.809929078014184e-05, |
|
"loss": 0.0596, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 21.99, |
|
"learning_rate": 7.802836879432625e-05, |
|
"loss": 0.012, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 21.99, |
|
"eval_accuracy": 0.9894179894179894, |
|
"eval_loss": 0.03947828337550163, |
|
"eval_runtime": 4.8825, |
|
"eval_samples_per_second": 116.128, |
|
"eval_steps_per_second": 14.542, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 22.06, |
|
"learning_rate": 7.795744680851064e-05, |
|
"loss": 0.0741, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 22.13, |
|
"learning_rate": 7.788652482269503e-05, |
|
"loss": 0.0112, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 22.2, |
|
"learning_rate": 7.781560283687943e-05, |
|
"loss": 0.015, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 22.27, |
|
"learning_rate": 7.774468085106383e-05, |
|
"loss": 0.0037, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 22.34, |
|
"learning_rate": 7.767375886524824e-05, |
|
"loss": 0.0021, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 22.41, |
|
"learning_rate": 7.760283687943262e-05, |
|
"loss": 0.0854, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 22.48, |
|
"learning_rate": 7.753191489361703e-05, |
|
"loss": 0.0451, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 22.55, |
|
"learning_rate": 7.746099290780142e-05, |
|
"loss": 0.0276, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 22.62, |
|
"learning_rate": 7.739007092198583e-05, |
|
"loss": 0.0297, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"learning_rate": 7.731914893617021e-05, |
|
"loss": 0.0059, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"eval_accuracy": 0.982363315696649, |
|
"eval_loss": 0.0684400424361229, |
|
"eval_runtime": 4.958, |
|
"eval_samples_per_second": 114.36, |
|
"eval_steps_per_second": 14.32, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 22.77, |
|
"learning_rate": 7.724822695035462e-05, |
|
"loss": 0.038, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 22.84, |
|
"learning_rate": 7.717730496453901e-05, |
|
"loss": 0.0066, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 22.91, |
|
"learning_rate": 7.710638297872342e-05, |
|
"loss": 0.0023, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 22.98, |
|
"learning_rate": 7.70354609929078e-05, |
|
"loss": 0.0053, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 23.05, |
|
"learning_rate": 7.69645390070922e-05, |
|
"loss": 0.0833, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 23.12, |
|
"learning_rate": 7.68936170212766e-05, |
|
"loss": 0.0016, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 23.19, |
|
"learning_rate": 7.6822695035461e-05, |
|
"loss": 0.0563, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 23.26, |
|
"learning_rate": 7.675177304964539e-05, |
|
"loss": 0.0051, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 23.33, |
|
"learning_rate": 7.668085106382979e-05, |
|
"loss": 0.0029, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 23.4, |
|
"learning_rate": 7.660992907801419e-05, |
|
"loss": 0.0657, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 23.4, |
|
"eval_accuracy": 0.9735449735449735, |
|
"eval_loss": 0.10394405573606491, |
|
"eval_runtime": 4.9825, |
|
"eval_samples_per_second": 113.797, |
|
"eval_steps_per_second": 14.25, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 23.48, |
|
"learning_rate": 7.65390070921986e-05, |
|
"loss": 0.0159, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 23.55, |
|
"learning_rate": 7.646808510638299e-05, |
|
"loss": 0.0245, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 23.62, |
|
"learning_rate": 7.639716312056738e-05, |
|
"loss": 0.0262, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 23.69, |
|
"learning_rate": 7.632624113475177e-05, |
|
"loss": 0.0342, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 23.76, |
|
"learning_rate": 7.625531914893618e-05, |
|
"loss": 0.002, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 23.83, |
|
"learning_rate": 7.618439716312058e-05, |
|
"loss": 0.005, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 23.9, |
|
"learning_rate": 7.611347517730497e-05, |
|
"loss": 0.0269, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 23.97, |
|
"learning_rate": 7.604255319148936e-05, |
|
"loss": 0.0055, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 24.04, |
|
"learning_rate": 7.597163120567377e-05, |
|
"loss": 0.0762, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 24.11, |
|
"learning_rate": 7.590070921985816e-05, |
|
"loss": 0.0198, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 24.11, |
|
"eval_accuracy": 0.9805996472663139, |
|
"eval_loss": 0.09220729023218155, |
|
"eval_runtime": 4.826, |
|
"eval_samples_per_second": 117.488, |
|
"eval_steps_per_second": 14.712, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 24.18, |
|
"learning_rate": 7.582978723404256e-05, |
|
"loss": 0.0705, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 24.26, |
|
"learning_rate": 7.575886524822695e-05, |
|
"loss": 0.0074, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 24.33, |
|
"learning_rate": 7.568794326241136e-05, |
|
"loss": 0.0536, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 24.4, |
|
"learning_rate": 7.561702127659575e-05, |
|
"loss": 0.0031, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 24.47, |
|
"learning_rate": 7.554609929078014e-05, |
|
"loss": 0.036, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 24.54, |
|
"learning_rate": 7.547517730496454e-05, |
|
"loss": 0.0025, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 24.61, |
|
"learning_rate": 7.540425531914895e-05, |
|
"loss": 0.0025, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 24.68, |
|
"learning_rate": 7.533333333333334e-05, |
|
"loss": 0.0637, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 24.75, |
|
"learning_rate": 7.526241134751773e-05, |
|
"loss": 0.0017, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 24.82, |
|
"learning_rate": 7.519148936170213e-05, |
|
"loss": 0.0657, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 24.82, |
|
"eval_accuracy": 0.9858906525573192, |
|
"eval_loss": 0.048500701785087585, |
|
"eval_runtime": 5.0205, |
|
"eval_samples_per_second": 112.936, |
|
"eval_steps_per_second": 14.142, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 24.89, |
|
"learning_rate": 7.512056737588653e-05, |
|
"loss": 0.0545, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 24.96, |
|
"learning_rate": 7.504964539007093e-05, |
|
"loss": 0.0184, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 25.04, |
|
"learning_rate": 7.497872340425532e-05, |
|
"loss": 0.1491, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 25.11, |
|
"learning_rate": 7.490780141843971e-05, |
|
"loss": 0.0862, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 25.18, |
|
"learning_rate": 7.483687943262412e-05, |
|
"loss": 0.0532, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 25.25, |
|
"learning_rate": 7.476595744680852e-05, |
|
"loss": 0.0437, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 25.32, |
|
"learning_rate": 7.469503546099291e-05, |
|
"loss": 0.0189, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 25.39, |
|
"learning_rate": 7.46241134751773e-05, |
|
"loss": 0.013, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 25.46, |
|
"learning_rate": 7.455319148936171e-05, |
|
"loss": 0.0178, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 25.53, |
|
"learning_rate": 7.44822695035461e-05, |
|
"loss": 0.0749, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 25.53, |
|
"eval_accuracy": 0.9858906525573192, |
|
"eval_loss": 0.05182398855686188, |
|
"eval_runtime": 4.88, |
|
"eval_samples_per_second": 116.187, |
|
"eval_steps_per_second": 14.549, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 25.6, |
|
"learning_rate": 7.44113475177305e-05, |
|
"loss": 0.0469, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 25.67, |
|
"learning_rate": 7.434042553191489e-05, |
|
"loss": 0.0346, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 25.74, |
|
"learning_rate": 7.42695035460993e-05, |
|
"loss": 0.0324, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 25.82, |
|
"learning_rate": 7.419858156028369e-05, |
|
"loss": 0.0021, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 25.89, |
|
"learning_rate": 7.412765957446809e-05, |
|
"loss": 0.0149, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 25.96, |
|
"learning_rate": 7.405673758865248e-05, |
|
"loss": 0.115, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 26.03, |
|
"learning_rate": 7.398581560283689e-05, |
|
"loss": 0.0316, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 26.1, |
|
"learning_rate": 7.391489361702128e-05, |
|
"loss": 0.0417, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 26.17, |
|
"learning_rate": 7.384397163120567e-05, |
|
"loss": 0.0943, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 26.24, |
|
"learning_rate": 7.377304964539008e-05, |
|
"loss": 0.0909, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 26.24, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.0322270467877388, |
|
"eval_runtime": 4.85, |
|
"eval_samples_per_second": 116.906, |
|
"eval_steps_per_second": 14.639, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 26.31, |
|
"learning_rate": 7.370212765957447e-05, |
|
"loss": 0.0028, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 26.38, |
|
"learning_rate": 7.363120567375887e-05, |
|
"loss": 0.0625, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 26.45, |
|
"learning_rate": 7.356028368794326e-05, |
|
"loss": 0.0526, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 26.52, |
|
"learning_rate": 7.348936170212767e-05, |
|
"loss": 0.0272, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 26.6, |
|
"learning_rate": 7.341843971631206e-05, |
|
"loss": 0.0791, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 26.67, |
|
"learning_rate": 7.334751773049647e-05, |
|
"loss": 0.0018, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 26.74, |
|
"learning_rate": 7.327659574468085e-05, |
|
"loss": 0.1004, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 26.81, |
|
"learning_rate": 7.320567375886526e-05, |
|
"loss": 0.0096, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 26.88, |
|
"learning_rate": 7.313475177304965e-05, |
|
"loss": 0.0094, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 26.95, |
|
"learning_rate": 7.306382978723406e-05, |
|
"loss": 0.0688, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 26.95, |
|
"eval_accuracy": 0.9876543209876543, |
|
"eval_loss": 0.0609063059091568, |
|
"eval_runtime": 4.94, |
|
"eval_samples_per_second": 114.776, |
|
"eval_steps_per_second": 14.372, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 27.02, |
|
"learning_rate": 7.299290780141844e-05, |
|
"loss": 0.0015, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 27.09, |
|
"learning_rate": 7.292198581560284e-05, |
|
"loss": 0.007, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 27.16, |
|
"learning_rate": 7.285106382978724e-05, |
|
"loss": 0.0021, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 27.23, |
|
"learning_rate": 7.278014184397164e-05, |
|
"loss": 0.0342, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"learning_rate": 7.270921985815603e-05, |
|
"loss": 0.0551, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 27.38, |
|
"learning_rate": 7.263829787234043e-05, |
|
"loss": 0.0155, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 27.45, |
|
"learning_rate": 7.256737588652483e-05, |
|
"loss": 0.0038, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 27.52, |
|
"learning_rate": 7.249645390070923e-05, |
|
"loss": 0.0548, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 27.59, |
|
"learning_rate": 7.242553191489361e-05, |
|
"loss": 0.0674, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 27.66, |
|
"learning_rate": 7.235460992907802e-05, |
|
"loss": 0.0163, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 27.66, |
|
"eval_accuracy": 0.9700176366843033, |
|
"eval_loss": 0.12531960010528564, |
|
"eval_runtime": 4.8785, |
|
"eval_samples_per_second": 116.223, |
|
"eval_steps_per_second": 14.554, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 27.73, |
|
"learning_rate": 7.228368794326241e-05, |
|
"loss": 0.0656, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 27.8, |
|
"learning_rate": 7.221276595744682e-05, |
|
"loss": 0.0163, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 27.87, |
|
"learning_rate": 7.21418439716312e-05, |
|
"loss": 0.0037, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 27.94, |
|
"learning_rate": 7.207092198581561e-05, |
|
"loss": 0.0409, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 28.01, |
|
"learning_rate": 7.2e-05, |
|
"loss": 0.0033, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 28.09, |
|
"learning_rate": 7.192907801418441e-05, |
|
"loss": 0.026, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 28.16, |
|
"learning_rate": 7.185815602836879e-05, |
|
"loss": 0.0373, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 28.23, |
|
"learning_rate": 7.17872340425532e-05, |
|
"loss": 0.0323, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 28.3, |
|
"learning_rate": 7.171631205673759e-05, |
|
"loss": 0.0354, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 28.37, |
|
"learning_rate": 7.1645390070922e-05, |
|
"loss": 0.0458, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 28.37, |
|
"eval_accuracy": 0.9982363315696648, |
|
"eval_loss": 0.006850986275821924, |
|
"eval_runtime": 4.764, |
|
"eval_samples_per_second": 119.017, |
|
"eval_steps_per_second": 14.903, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 28.44, |
|
"learning_rate": 7.157446808510638e-05, |
|
"loss": 0.0027, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 28.51, |
|
"learning_rate": 7.150354609929078e-05, |
|
"loss": 0.1157, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 28.58, |
|
"learning_rate": 7.143262411347518e-05, |
|
"loss": 0.0919, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 28.65, |
|
"learning_rate": 7.136170212765958e-05, |
|
"loss": 0.0063, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 28.72, |
|
"learning_rate": 7.129078014184397e-05, |
|
"loss": 0.069, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 28.79, |
|
"learning_rate": 7.121985815602837e-05, |
|
"loss": 0.0034, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 28.87, |
|
"learning_rate": 7.114893617021277e-05, |
|
"loss": 0.0278, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 28.94, |
|
"learning_rate": 7.107801418439717e-05, |
|
"loss": 0.0084, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 29.01, |
|
"learning_rate": 7.100709219858155e-05, |
|
"loss": 0.012, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 29.08, |
|
"learning_rate": 7.093617021276596e-05, |
|
"loss": 0.0728, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 29.08, |
|
"eval_accuracy": 0.9894179894179894, |
|
"eval_loss": 0.03627217188477516, |
|
"eval_runtime": 4.911, |
|
"eval_samples_per_second": 115.454, |
|
"eval_steps_per_second": 14.457, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 29.15, |
|
"learning_rate": 7.086524822695035e-05, |
|
"loss": 0.0034, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"learning_rate": 7.079432624113476e-05, |
|
"loss": 0.0015, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 29.29, |
|
"learning_rate": 7.072340425531914e-05, |
|
"loss": 0.0416, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 29.36, |
|
"learning_rate": 7.065248226950355e-05, |
|
"loss": 0.0031, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 29.43, |
|
"learning_rate": 7.058156028368794e-05, |
|
"loss": 0.0031, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 29.5, |
|
"learning_rate": 7.051063829787235e-05, |
|
"loss": 0.0256, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 29.57, |
|
"learning_rate": 7.043971631205674e-05, |
|
"loss": 0.001, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 29.65, |
|
"learning_rate": 7.036879432624114e-05, |
|
"loss": 0.0613, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 29.72, |
|
"learning_rate": 7.029787234042553e-05, |
|
"loss": 0.0031, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 29.79, |
|
"learning_rate": 7.022695035460994e-05, |
|
"loss": 0.1474, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 29.79, |
|
"eval_accuracy": 0.9894179894179894, |
|
"eval_loss": 0.08292630314826965, |
|
"eval_runtime": 4.8845, |
|
"eval_samples_per_second": 116.08, |
|
"eval_steps_per_second": 14.536, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 29.86, |
|
"learning_rate": 7.015602836879433e-05, |
|
"loss": 0.001, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 29.93, |
|
"learning_rate": 7.008510638297872e-05, |
|
"loss": 0.0717, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 7.001418439716312e-05, |
|
"loss": 0.0246, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 30.07, |
|
"learning_rate": 6.994326241134753e-05, |
|
"loss": 0.0333, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 30.14, |
|
"learning_rate": 6.987234042553192e-05, |
|
"loss": 0.0012, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 30.21, |
|
"learning_rate": 6.980141843971631e-05, |
|
"loss": 0.0737, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 30.28, |
|
"learning_rate": 6.973049645390072e-05, |
|
"loss": 0.001, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 30.35, |
|
"learning_rate": 6.965957446808511e-05, |
|
"loss": 0.0071, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 30.43, |
|
"learning_rate": 6.95886524822695e-05, |
|
"loss": 0.0252, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 30.5, |
|
"learning_rate": 6.95177304964539e-05, |
|
"loss": 0.0011, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 30.5, |
|
"eval_accuracy": 0.9858906525573192, |
|
"eval_loss": 0.07146803289651871, |
|
"eval_runtime": 4.945, |
|
"eval_samples_per_second": 114.66, |
|
"eval_steps_per_second": 14.358, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 30.57, |
|
"learning_rate": 6.944680851063831e-05, |
|
"loss": 0.0327, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 30.64, |
|
"learning_rate": 6.93758865248227e-05, |
|
"loss": 0.021, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 30.71, |
|
"learning_rate": 6.93049645390071e-05, |
|
"loss": 0.0422, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 30.78, |
|
"learning_rate": 6.923404255319149e-05, |
|
"loss": 0.1148, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 30.85, |
|
"learning_rate": 6.91631205673759e-05, |
|
"loss": 0.0802, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 30.92, |
|
"learning_rate": 6.909219858156029e-05, |
|
"loss": 0.0624, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 30.99, |
|
"learning_rate": 6.902127659574468e-05, |
|
"loss": 0.0206, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 31.06, |
|
"learning_rate": 6.895035460992908e-05, |
|
"loss": 0.0265, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 31.13, |
|
"learning_rate": 6.887943262411348e-05, |
|
"loss": 0.0684, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 31.21, |
|
"learning_rate": 6.880851063829788e-05, |
|
"loss": 0.1199, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 31.21, |
|
"eval_accuracy": 0.9735449735449735, |
|
"eval_loss": 0.14859944581985474, |
|
"eval_runtime": 4.6535, |
|
"eval_samples_per_second": 121.843, |
|
"eval_steps_per_second": 15.257, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 31.28, |
|
"learning_rate": 6.873758865248227e-05, |
|
"loss": 0.1303, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 31.35, |
|
"learning_rate": 6.866666666666666e-05, |
|
"loss": 0.0416, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 31.42, |
|
"learning_rate": 6.859574468085107e-05, |
|
"loss": 0.0012, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 31.49, |
|
"learning_rate": 6.852482269503547e-05, |
|
"loss": 0.0821, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 31.56, |
|
"learning_rate": 6.845390070921986e-05, |
|
"loss": 0.0271, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 31.63, |
|
"learning_rate": 6.838297872340425e-05, |
|
"loss": 0.0103, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 31.7, |
|
"learning_rate": 6.831205673758866e-05, |
|
"loss": 0.0016, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 31.77, |
|
"learning_rate": 6.824113475177305e-05, |
|
"loss": 0.036, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 31.84, |
|
"learning_rate": 6.817021276595745e-05, |
|
"loss": 0.0018, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 31.91, |
|
"learning_rate": 6.809929078014184e-05, |
|
"loss": 0.003, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 31.91, |
|
"eval_accuracy": 0.9841269841269841, |
|
"eval_loss": 0.08768551796674728, |
|
"eval_runtime": 4.81, |
|
"eval_samples_per_second": 117.879, |
|
"eval_steps_per_second": 14.761, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 31.99, |
|
"learning_rate": 6.802836879432625e-05, |
|
"loss": 0.0019, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 32.06, |
|
"learning_rate": 6.795744680851064e-05, |
|
"loss": 0.0646, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 32.13, |
|
"learning_rate": 6.788652482269503e-05, |
|
"loss": 0.0018, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 32.2, |
|
"learning_rate": 6.781560283687943e-05, |
|
"loss": 0.0381, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 32.27, |
|
"learning_rate": 6.774468085106384e-05, |
|
"loss": 0.0053, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 32.34, |
|
"learning_rate": 6.767375886524823e-05, |
|
"loss": 0.0357, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 32.41, |
|
"learning_rate": 6.760283687943262e-05, |
|
"loss": 0.008, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 32.48, |
|
"learning_rate": 6.753191489361702e-05, |
|
"loss": 0.0378, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 32.55, |
|
"learning_rate": 6.746099290780142e-05, |
|
"loss": 0.001, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 32.62, |
|
"learning_rate": 6.739007092198582e-05, |
|
"loss": 0.0098, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 32.62, |
|
"eval_accuracy": 0.9858906525573192, |
|
"eval_loss": 0.06747796386480331, |
|
"eval_runtime": 4.862, |
|
"eval_samples_per_second": 116.618, |
|
"eval_steps_per_second": 14.603, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 32.7, |
|
"learning_rate": 6.731914893617022e-05, |
|
"loss": 0.0011, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 32.77, |
|
"learning_rate": 6.72482269503546e-05, |
|
"loss": 0.0434, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 32.84, |
|
"learning_rate": 6.717730496453901e-05, |
|
"loss": 0.0545, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 32.91, |
|
"learning_rate": 6.71063829787234e-05, |
|
"loss": 0.0032, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 32.98, |
|
"learning_rate": 6.703546099290781e-05, |
|
"loss": 0.0259, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 33.05, |
|
"learning_rate": 6.696453900709219e-05, |
|
"loss": 0.0068, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 33.12, |
|
"learning_rate": 6.68936170212766e-05, |
|
"loss": 0.0493, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 33.19, |
|
"learning_rate": 6.682269503546099e-05, |
|
"loss": 0.0999, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 33.26, |
|
"learning_rate": 6.67517730496454e-05, |
|
"loss": 0.0049, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"learning_rate": 6.668085106382978e-05, |
|
"loss": 0.0028, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"eval_accuracy": 0.9876543209876543, |
|
"eval_loss": 0.08373798429965973, |
|
"eval_runtime": 4.809, |
|
"eval_samples_per_second": 117.903, |
|
"eval_steps_per_second": 14.764, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 33.4, |
|
"learning_rate": 6.660992907801419e-05, |
|
"loss": 0.0795, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 33.48, |
|
"learning_rate": 6.653900709219858e-05, |
|
"loss": 0.0605, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 33.55, |
|
"learning_rate": 6.646808510638299e-05, |
|
"loss": 0.0758, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 33.62, |
|
"learning_rate": 6.639716312056737e-05, |
|
"loss": 0.0798, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 33.69, |
|
"learning_rate": 6.632624113475178e-05, |
|
"loss": 0.0036, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 33.76, |
|
"learning_rate": 6.625531914893617e-05, |
|
"loss": 0.0013, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 33.83, |
|
"learning_rate": 6.618439716312058e-05, |
|
"loss": 0.0289, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 33.9, |
|
"learning_rate": 6.611347517730497e-05, |
|
"loss": 0.0408, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 33.97, |
|
"learning_rate": 6.604255319148936e-05, |
|
"loss": 0.0031, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 34.04, |
|
"learning_rate": 6.597163120567377e-05, |
|
"loss": 0.0269, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 34.04, |
|
"eval_accuracy": 0.9805996472663139, |
|
"eval_loss": 0.08315479755401611, |
|
"eval_runtime": 4.926, |
|
"eval_samples_per_second": 115.103, |
|
"eval_steps_per_second": 14.413, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 34.11, |
|
"learning_rate": 6.590070921985816e-05, |
|
"loss": 0.0023, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 34.18, |
|
"learning_rate": 6.582978723404256e-05, |
|
"loss": 0.0211, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 34.26, |
|
"learning_rate": 6.575886524822695e-05, |
|
"loss": 0.0012, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 34.33, |
|
"learning_rate": 6.568794326241136e-05, |
|
"loss": 0.0007, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 34.4, |
|
"learning_rate": 6.561702127659575e-05, |
|
"loss": 0.0007, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 34.47, |
|
"learning_rate": 6.554609929078015e-05, |
|
"loss": 0.0007, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 34.54, |
|
"learning_rate": 6.547517730496454e-05, |
|
"loss": 0.038, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 34.61, |
|
"learning_rate": 6.540425531914895e-05, |
|
"loss": 0.0072, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 34.68, |
|
"learning_rate": 6.533333333333334e-05, |
|
"loss": 0.0007, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 34.75, |
|
"learning_rate": 6.526241134751773e-05, |
|
"loss": 0.0028, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 34.75, |
|
"eval_accuracy": 0.9876543209876543, |
|
"eval_loss": 0.06866313517093658, |
|
"eval_runtime": 4.82, |
|
"eval_samples_per_second": 117.634, |
|
"eval_steps_per_second": 14.73, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 34.82, |
|
"learning_rate": 6.519148936170213e-05, |
|
"loss": 0.0202, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 34.89, |
|
"learning_rate": 6.512056737588653e-05, |
|
"loss": 0.0592, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 34.96, |
|
"learning_rate": 6.504964539007093e-05, |
|
"loss": 0.0196, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 35.04, |
|
"learning_rate": 6.497872340425532e-05, |
|
"loss": 0.0336, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 35.11, |
|
"learning_rate": 6.490780141843972e-05, |
|
"loss": 0.0212, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 35.18, |
|
"learning_rate": 6.483687943262412e-05, |
|
"loss": 0.0013, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 35.25, |
|
"learning_rate": 6.476595744680852e-05, |
|
"loss": 0.0024, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 35.32, |
|
"learning_rate": 6.469503546099291e-05, |
|
"loss": 0.0427, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 35.39, |
|
"learning_rate": 6.46241134751773e-05, |
|
"loss": 0.0008, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 35.46, |
|
"learning_rate": 6.455319148936171e-05, |
|
"loss": 0.0016, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 35.46, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.0424884632229805, |
|
"eval_runtime": 4.8085, |
|
"eval_samples_per_second": 117.915, |
|
"eval_steps_per_second": 14.765, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 35.53, |
|
"learning_rate": 6.44822695035461e-05, |
|
"loss": 0.0147, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 35.6, |
|
"learning_rate": 6.44113475177305e-05, |
|
"loss": 0.0009, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 35.67, |
|
"learning_rate": 6.434042553191489e-05, |
|
"loss": 0.0012, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 35.74, |
|
"learning_rate": 6.42695035460993e-05, |
|
"loss": 0.0008, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 35.82, |
|
"learning_rate": 6.419858156028369e-05, |
|
"loss": 0.001, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 35.89, |
|
"learning_rate": 6.412765957446809e-05, |
|
"loss": 0.0712, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 35.96, |
|
"learning_rate": 6.405673758865248e-05, |
|
"loss": 0.076, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 36.03, |
|
"learning_rate": 6.398581560283689e-05, |
|
"loss": 0.0092, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 36.1, |
|
"learning_rate": 6.391489361702128e-05, |
|
"loss": 0.1197, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 36.17, |
|
"learning_rate": 6.384397163120567e-05, |
|
"loss": 0.0262, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 36.17, |
|
"eval_accuracy": 0.9841269841269841, |
|
"eval_loss": 0.08268722891807556, |
|
"eval_runtime": 4.865, |
|
"eval_samples_per_second": 116.546, |
|
"eval_steps_per_second": 14.594, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 36.24, |
|
"learning_rate": 6.377304964539007e-05, |
|
"loss": 0.0717, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 36.31, |
|
"learning_rate": 6.370212765957447e-05, |
|
"loss": 0.0486, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 36.38, |
|
"learning_rate": 6.363120567375887e-05, |
|
"loss": 0.049, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 36.45, |
|
"learning_rate": 6.356737588652482e-05, |
|
"loss": 0.0872, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 36.52, |
|
"learning_rate": 6.349645390070922e-05, |
|
"loss": 0.0394, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 36.6, |
|
"learning_rate": 6.342553191489362e-05, |
|
"loss": 0.0489, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 36.67, |
|
"learning_rate": 6.335460992907802e-05, |
|
"loss": 0.0121, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 36.74, |
|
"learning_rate": 6.328368794326241e-05, |
|
"loss": 0.0255, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 36.81, |
|
"learning_rate": 6.32127659574468e-05, |
|
"loss": 0.0584, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 36.88, |
|
"learning_rate": 6.314184397163121e-05, |
|
"loss": 0.0397, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 36.88, |
|
"eval_accuracy": 0.9753086419753086, |
|
"eval_loss": 0.15599842369556427, |
|
"eval_runtime": 4.811, |
|
"eval_samples_per_second": 117.854, |
|
"eval_steps_per_second": 14.758, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 36.95, |
|
"learning_rate": 6.30709219858156e-05, |
|
"loss": 0.0908, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 37.02, |
|
"learning_rate": 6.3e-05, |
|
"loss": 0.0488, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 37.09, |
|
"learning_rate": 6.29290780141844e-05, |
|
"loss": 0.0014, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 37.16, |
|
"learning_rate": 6.28581560283688e-05, |
|
"loss": 0.003, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 37.23, |
|
"learning_rate": 6.27872340425532e-05, |
|
"loss": 0.0039, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 37.3, |
|
"learning_rate": 6.271631205673759e-05, |
|
"loss": 0.0154, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 37.38, |
|
"learning_rate": 6.2645390070922e-05, |
|
"loss": 0.0412, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 37.45, |
|
"learning_rate": 6.257446808510639e-05, |
|
"loss": 0.0007, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 37.52, |
|
"learning_rate": 6.250354609929078e-05, |
|
"loss": 0.0178, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 37.59, |
|
"learning_rate": 6.243262411347518e-05, |
|
"loss": 0.0204, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 37.59, |
|
"eval_accuracy": 0.9876543209876543, |
|
"eval_loss": 0.07057899981737137, |
|
"eval_runtime": 4.7545, |
|
"eval_samples_per_second": 119.254, |
|
"eval_steps_per_second": 14.933, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 37.66, |
|
"learning_rate": 6.236170212765958e-05, |
|
"loss": 0.0513, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 37.73, |
|
"learning_rate": 6.229078014184398e-05, |
|
"loss": 0.0071, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 37.8, |
|
"learning_rate": 6.221985815602837e-05, |
|
"loss": 0.0691, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 37.87, |
|
"learning_rate": 6.214893617021276e-05, |
|
"loss": 0.0135, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 37.94, |
|
"learning_rate": 6.207801418439717e-05, |
|
"loss": 0.0033, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 38.01, |
|
"learning_rate": 6.200709219858156e-05, |
|
"loss": 0.0695, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 38.09, |
|
"learning_rate": 6.193617021276596e-05, |
|
"loss": 0.064, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 38.16, |
|
"learning_rate": 6.186524822695035e-05, |
|
"loss": 0.0013, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 38.23, |
|
"learning_rate": 6.179432624113476e-05, |
|
"loss": 0.0264, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 38.3, |
|
"learning_rate": 6.172340425531915e-05, |
|
"loss": 0.0146, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 38.3, |
|
"eval_accuracy": 0.9858906525573192, |
|
"eval_loss": 0.06010299175977707, |
|
"eval_runtime": 4.902, |
|
"eval_samples_per_second": 115.666, |
|
"eval_steps_per_second": 14.484, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 38.37, |
|
"learning_rate": 6.165248226950356e-05, |
|
"loss": 0.0185, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 38.44, |
|
"learning_rate": 6.158156028368794e-05, |
|
"loss": 0.0145, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 38.51, |
|
"learning_rate": 6.151063829787235e-05, |
|
"loss": 0.003, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 38.58, |
|
"learning_rate": 6.143971631205674e-05, |
|
"loss": 0.0207, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 38.65, |
|
"learning_rate": 6.136879432624115e-05, |
|
"loss": 0.1072, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 38.72, |
|
"learning_rate": 6.129787234042553e-05, |
|
"loss": 0.0283, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 38.79, |
|
"learning_rate": 6.122695035460994e-05, |
|
"loss": 0.0006, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 38.87, |
|
"learning_rate": 6.115602836879433e-05, |
|
"loss": 0.0007, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 38.94, |
|
"learning_rate": 6.108510638297874e-05, |
|
"loss": 0.0094, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 39.01, |
|
"learning_rate": 6.101418439716312e-05, |
|
"loss": 0.0288, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 39.01, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.03392454981803894, |
|
"eval_runtime": 4.849, |
|
"eval_samples_per_second": 116.93, |
|
"eval_steps_per_second": 14.642, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 39.08, |
|
"learning_rate": 6.094326241134752e-05, |
|
"loss": 0.0007, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 39.15, |
|
"learning_rate": 6.087234042553192e-05, |
|
"loss": 0.001, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 39.22, |
|
"learning_rate": 6.080141843971632e-05, |
|
"loss": 0.0709, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 39.29, |
|
"learning_rate": 6.073049645390071e-05, |
|
"loss": 0.0006, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 39.36, |
|
"learning_rate": 6.065957446808511e-05, |
|
"loss": 0.0255, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 39.43, |
|
"learning_rate": 6.0588652482269505e-05, |
|
"loss": 0.0448, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 39.5, |
|
"learning_rate": 6.0517730496453905e-05, |
|
"loss": 0.0056, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 39.57, |
|
"learning_rate": 6.04468085106383e-05, |
|
"loss": 0.0006, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 39.65, |
|
"learning_rate": 6.03758865248227e-05, |
|
"loss": 0.0006, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 39.72, |
|
"learning_rate": 6.030496453900709e-05, |
|
"loss": 0.0009, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 39.72, |
|
"eval_accuracy": 0.9911816578483245, |
|
"eval_loss": 0.05390332266688347, |
|
"eval_runtime": 4.9225, |
|
"eval_samples_per_second": 115.184, |
|
"eval_steps_per_second": 14.423, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 39.79, |
|
"learning_rate": 6.023404255319149e-05, |
|
"loss": 0.0287, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 39.86, |
|
"learning_rate": 6.016312056737589e-05, |
|
"loss": 0.0005, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 39.93, |
|
"learning_rate": 6.009219858156029e-05, |
|
"loss": 0.0243, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 6.002127659574468e-05, |
|
"loss": 0.0007, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 40.07, |
|
"learning_rate": 5.995035460992908e-05, |
|
"loss": 0.0372, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 40.14, |
|
"learning_rate": 5.9879432624113475e-05, |
|
"loss": 0.0366, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 40.21, |
|
"learning_rate": 5.9808510638297875e-05, |
|
"loss": 0.0693, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 40.28, |
|
"learning_rate": 5.973758865248227e-05, |
|
"loss": 0.0025, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 40.35, |
|
"learning_rate": 5.966666666666667e-05, |
|
"loss": 0.0091, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 40.43, |
|
"learning_rate": 5.959574468085106e-05, |
|
"loss": 0.0492, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 40.43, |
|
"eval_accuracy": 0.9876543209876543, |
|
"eval_loss": 0.04930723458528519, |
|
"eval_runtime": 4.869, |
|
"eval_samples_per_second": 116.45, |
|
"eval_steps_per_second": 14.582, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 40.5, |
|
"learning_rate": 5.952482269503546e-05, |
|
"loss": 0.0008, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 40.57, |
|
"learning_rate": 5.945390070921986e-05, |
|
"loss": 0.0086, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 40.64, |
|
"learning_rate": 5.938297872340426e-05, |
|
"loss": 0.0328, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 40.71, |
|
"learning_rate": 5.931205673758865e-05, |
|
"loss": 0.0011, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 40.78, |
|
"learning_rate": 5.924113475177305e-05, |
|
"loss": 0.0077, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 40.85, |
|
"learning_rate": 5.9170212765957445e-05, |
|
"loss": 0.0641, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 40.92, |
|
"learning_rate": 5.9099290780141845e-05, |
|
"loss": 0.0005, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 40.99, |
|
"learning_rate": 5.902836879432624e-05, |
|
"loss": 0.0007, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 41.06, |
|
"learning_rate": 5.895744680851064e-05, |
|
"loss": 0.0722, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 41.13, |
|
"learning_rate": 5.888652482269503e-05, |
|
"loss": 0.0275, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 41.13, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.03137438744306564, |
|
"eval_runtime": 4.7505, |
|
"eval_samples_per_second": 119.355, |
|
"eval_steps_per_second": 14.946, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 41.21, |
|
"learning_rate": 5.881560283687943e-05, |
|
"loss": 0.0332, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 41.28, |
|
"learning_rate": 5.874468085106384e-05, |
|
"loss": 0.0467, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 41.35, |
|
"learning_rate": 5.867375886524823e-05, |
|
"loss": 0.0108, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 41.42, |
|
"learning_rate": 5.8602836879432634e-05, |
|
"loss": 0.0548, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 41.49, |
|
"learning_rate": 5.853191489361702e-05, |
|
"loss": 0.0569, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 41.56, |
|
"learning_rate": 5.846099290780143e-05, |
|
"loss": 0.0009, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 41.63, |
|
"learning_rate": 5.8390070921985815e-05, |
|
"loss": 0.0383, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 41.7, |
|
"learning_rate": 5.831914893617022e-05, |
|
"loss": 0.0294, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 41.77, |
|
"learning_rate": 5.824822695035461e-05, |
|
"loss": 0.014, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 41.84, |
|
"learning_rate": 5.8177304964539016e-05, |
|
"loss": 0.0192, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 41.84, |
|
"eval_accuracy": 0.9805996472663139, |
|
"eval_loss": 0.1111595556139946, |
|
"eval_runtime": 4.7365, |
|
"eval_samples_per_second": 119.708, |
|
"eval_steps_per_second": 14.99, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 41.91, |
|
"learning_rate": 5.81063829787234e-05, |
|
"loss": 0.0577, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 41.99, |
|
"learning_rate": 5.803546099290781e-05, |
|
"loss": 0.0911, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 42.06, |
|
"learning_rate": 5.79645390070922e-05, |
|
"loss": 0.0007, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 42.13, |
|
"learning_rate": 5.7893617021276604e-05, |
|
"loss": 0.0281, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 42.2, |
|
"learning_rate": 5.782269503546099e-05, |
|
"loss": 0.0429, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 42.27, |
|
"learning_rate": 5.77517730496454e-05, |
|
"loss": 0.0224, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 42.34, |
|
"learning_rate": 5.7680851063829785e-05, |
|
"loss": 0.0216, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 42.41, |
|
"learning_rate": 5.760992907801419e-05, |
|
"loss": 0.0015, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 42.48, |
|
"learning_rate": 5.753900709219858e-05, |
|
"loss": 0.0052, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 42.55, |
|
"learning_rate": 5.7468085106382986e-05, |
|
"loss": 0.0013, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 42.55, |
|
"eval_accuracy": 0.9894179894179894, |
|
"eval_loss": 0.06109577417373657, |
|
"eval_runtime": 4.7055, |
|
"eval_samples_per_second": 120.496, |
|
"eval_steps_per_second": 15.089, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 42.62, |
|
"learning_rate": 5.739716312056738e-05, |
|
"loss": 0.0009, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 42.7, |
|
"learning_rate": 5.732624113475178e-05, |
|
"loss": 0.0016, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 42.77, |
|
"learning_rate": 5.7255319148936174e-05, |
|
"loss": 0.0014, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 42.84, |
|
"learning_rate": 5.7184397163120574e-05, |
|
"loss": 0.0007, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 42.91, |
|
"learning_rate": 5.711347517730497e-05, |
|
"loss": 0.0062, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 42.98, |
|
"learning_rate": 5.704255319148937e-05, |
|
"loss": 0.0006, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 43.05, |
|
"learning_rate": 5.697163120567376e-05, |
|
"loss": 0.0023, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 43.12, |
|
"learning_rate": 5.690070921985816e-05, |
|
"loss": 0.0004, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 43.19, |
|
"learning_rate": 5.6829787234042556e-05, |
|
"loss": 0.0005, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 43.26, |
|
"learning_rate": 5.6758865248226956e-05, |
|
"loss": 0.0208, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 43.26, |
|
"eval_accuracy": 0.9947089947089947, |
|
"eval_loss": 0.013094047084450722, |
|
"eval_runtime": 4.8125, |
|
"eval_samples_per_second": 117.817, |
|
"eval_steps_per_second": 14.753, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 43.33, |
|
"learning_rate": 5.668794326241135e-05, |
|
"loss": 0.0005, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 43.4, |
|
"learning_rate": 5.661702127659575e-05, |
|
"loss": 0.0007, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 43.48, |
|
"learning_rate": 5.6546099290780144e-05, |
|
"loss": 0.0018, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 43.55, |
|
"learning_rate": 5.6475177304964544e-05, |
|
"loss": 0.0005, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 43.62, |
|
"learning_rate": 5.640425531914894e-05, |
|
"loss": 0.0405, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 43.69, |
|
"learning_rate": 5.633333333333334e-05, |
|
"loss": 0.0089, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 43.76, |
|
"learning_rate": 5.626241134751773e-05, |
|
"loss": 0.0127, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 43.83, |
|
"learning_rate": 5.619148936170213e-05, |
|
"loss": 0.0506, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 43.9, |
|
"learning_rate": 5.6120567375886526e-05, |
|
"loss": 0.0835, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 43.97, |
|
"learning_rate": 5.6049645390070926e-05, |
|
"loss": 0.0018, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 43.97, |
|
"eval_accuracy": 0.9805996472663139, |
|
"eval_loss": 0.09741748869419098, |
|
"eval_runtime": 4.787, |
|
"eval_samples_per_second": 118.445, |
|
"eval_steps_per_second": 14.832, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 44.04, |
|
"learning_rate": 5.597872340425532e-05, |
|
"loss": 0.031, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 44.11, |
|
"learning_rate": 5.590780141843972e-05, |
|
"loss": 0.0296, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 44.18, |
|
"learning_rate": 5.5836879432624114e-05, |
|
"loss": 0.0049, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 44.26, |
|
"learning_rate": 5.5765957446808514e-05, |
|
"loss": 0.0814, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 44.33, |
|
"learning_rate": 5.569503546099291e-05, |
|
"loss": 0.0165, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 44.4, |
|
"learning_rate": 5.562411347517731e-05, |
|
"loss": 0.0414, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 44.47, |
|
"learning_rate": 5.55531914893617e-05, |
|
"loss": 0.0137, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 44.54, |
|
"learning_rate": 5.54822695035461e-05, |
|
"loss": 0.0005, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 44.61, |
|
"learning_rate": 5.5411347517730496e-05, |
|
"loss": 0.011, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 44.68, |
|
"learning_rate": 5.5340425531914896e-05, |
|
"loss": 0.0285, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 44.68, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.008973962627351284, |
|
"eval_runtime": 4.7045, |
|
"eval_samples_per_second": 120.522, |
|
"eval_steps_per_second": 15.092, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 44.75, |
|
"learning_rate": 5.526950354609929e-05, |
|
"loss": 0.0006, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 44.82, |
|
"learning_rate": 5.519858156028369e-05, |
|
"loss": 0.0011, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 44.89, |
|
"learning_rate": 5.5127659574468084e-05, |
|
"loss": 0.0005, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 44.96, |
|
"learning_rate": 5.5056737588652484e-05, |
|
"loss": 0.0027, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 45.04, |
|
"learning_rate": 5.4985815602836885e-05, |
|
"loss": 0.0155, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 45.11, |
|
"learning_rate": 5.491489361702128e-05, |
|
"loss": 0.0139, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 45.18, |
|
"learning_rate": 5.484397163120568e-05, |
|
"loss": 0.0019, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 45.25, |
|
"learning_rate": 5.477304964539007e-05, |
|
"loss": 0.084, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 45.32, |
|
"learning_rate": 5.470212765957447e-05, |
|
"loss": 0.0038, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 45.39, |
|
"learning_rate": 5.4631205673758866e-05, |
|
"loss": 0.0365, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 45.39, |
|
"eval_accuracy": 0.9876543209876543, |
|
"eval_loss": 0.040918827056884766, |
|
"eval_runtime": 4.769, |
|
"eval_samples_per_second": 118.892, |
|
"eval_steps_per_second": 14.888, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 45.46, |
|
"learning_rate": 5.456028368794327e-05, |
|
"loss": 0.0014, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 45.53, |
|
"learning_rate": 5.448936170212766e-05, |
|
"loss": 0.0566, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 45.6, |
|
"learning_rate": 5.441843971631206e-05, |
|
"loss": 0.0243, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 45.67, |
|
"learning_rate": 5.4347517730496454e-05, |
|
"loss": 0.0018, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 45.74, |
|
"learning_rate": 5.427659574468086e-05, |
|
"loss": 0.1125, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 45.82, |
|
"learning_rate": 5.420567375886525e-05, |
|
"loss": 0.0053, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 45.89, |
|
"learning_rate": 5.4134751773049656e-05, |
|
"loss": 0.0143, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 45.96, |
|
"learning_rate": 5.406382978723404e-05, |
|
"loss": 0.0023, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 46.03, |
|
"learning_rate": 5.399290780141845e-05, |
|
"loss": 0.0005, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 46.1, |
|
"learning_rate": 5.3921985815602836e-05, |
|
"loss": 0.0007, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 46.1, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.04250045865774155, |
|
"eval_runtime": 4.579, |
|
"eval_samples_per_second": 123.825, |
|
"eval_steps_per_second": 15.505, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 46.17, |
|
"learning_rate": 5.3851063829787244e-05, |
|
"loss": 0.0431, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 46.24, |
|
"learning_rate": 5.378014184397163e-05, |
|
"loss": 0.0006, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 46.31, |
|
"learning_rate": 5.370921985815604e-05, |
|
"loss": 0.0006, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 46.38, |
|
"learning_rate": 5.3638297872340424e-05, |
|
"loss": 0.0007, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 46.45, |
|
"learning_rate": 5.356737588652483e-05, |
|
"loss": 0.041, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 46.52, |
|
"learning_rate": 5.349645390070922e-05, |
|
"loss": 0.0242, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 46.6, |
|
"learning_rate": 5.3425531914893626e-05, |
|
"loss": 0.041, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 46.67, |
|
"learning_rate": 5.335460992907801e-05, |
|
"loss": 0.0233, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 46.74, |
|
"learning_rate": 5.328368794326242e-05, |
|
"loss": 0.0155, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 46.81, |
|
"learning_rate": 5.3212765957446806e-05, |
|
"loss": 0.0094, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 46.81, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.04256556183099747, |
|
"eval_runtime": 4.876, |
|
"eval_samples_per_second": 116.283, |
|
"eval_steps_per_second": 14.561, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 46.88, |
|
"learning_rate": 5.3141843971631214e-05, |
|
"loss": 0.0216, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 46.95, |
|
"learning_rate": 5.30709219858156e-05, |
|
"loss": 0.0003, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 47.02, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 0.0089, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 47.09, |
|
"learning_rate": 5.2929078014184394e-05, |
|
"loss": 0.0327, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 47.16, |
|
"learning_rate": 5.28581560283688e-05, |
|
"loss": 0.0008, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 47.23, |
|
"learning_rate": 5.278723404255319e-05, |
|
"loss": 0.1177, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 47.3, |
|
"learning_rate": 5.2716312056737596e-05, |
|
"loss": 0.0005, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 47.38, |
|
"learning_rate": 5.264539007092198e-05, |
|
"loss": 0.0582, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 47.45, |
|
"learning_rate": 5.257446808510639e-05, |
|
"loss": 0.0611, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 47.52, |
|
"learning_rate": 5.2503546099290776e-05, |
|
"loss": 0.0278, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 47.52, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.02249726839363575, |
|
"eval_runtime": 4.833, |
|
"eval_samples_per_second": 117.317, |
|
"eval_steps_per_second": 14.691, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 47.59, |
|
"learning_rate": 5.2432624113475184e-05, |
|
"loss": 0.0208, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 47.66, |
|
"learning_rate": 5.236170212765957e-05, |
|
"loss": 0.0485, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 47.73, |
|
"learning_rate": 5.229078014184398e-05, |
|
"loss": 0.0007, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 47.8, |
|
"learning_rate": 5.2219858156028364e-05, |
|
"loss": 0.0082, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 47.87, |
|
"learning_rate": 5.214893617021277e-05, |
|
"loss": 0.0151, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 47.94, |
|
"learning_rate": 5.207801418439716e-05, |
|
"loss": 0.0006, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 48.01, |
|
"learning_rate": 5.2007092198581566e-05, |
|
"loss": 0.0007, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 48.09, |
|
"learning_rate": 5.193617021276595e-05, |
|
"loss": 0.0211, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 48.16, |
|
"learning_rate": 5.186524822695036e-05, |
|
"loss": 0.0202, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 48.23, |
|
"learning_rate": 5.1794326241134746e-05, |
|
"loss": 0.0062, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 48.23, |
|
"eval_accuracy": 0.9947089947089947, |
|
"eval_loss": 0.031184891238808632, |
|
"eval_runtime": 4.7515, |
|
"eval_samples_per_second": 119.33, |
|
"eval_steps_per_second": 14.943, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 48.3, |
|
"learning_rate": 5.1723404255319154e-05, |
|
"loss": 0.0005, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 48.37, |
|
"learning_rate": 5.165248226950354e-05, |
|
"loss": 0.0538, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 48.44, |
|
"learning_rate": 5.158156028368795e-05, |
|
"loss": 0.0006, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 48.51, |
|
"learning_rate": 5.151063829787234e-05, |
|
"loss": 0.023, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 48.58, |
|
"learning_rate": 5.143971631205674e-05, |
|
"loss": 0.0009, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 48.65, |
|
"learning_rate": 5.1368794326241135e-05, |
|
"loss": 0.0005, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 48.72, |
|
"learning_rate": 5.1297872340425536e-05, |
|
"loss": 0.0025, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 48.79, |
|
"learning_rate": 5.122695035460993e-05, |
|
"loss": 0.0353, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 48.87, |
|
"learning_rate": 5.115602836879433e-05, |
|
"loss": 0.0091, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 48.94, |
|
"learning_rate": 5.108510638297873e-05, |
|
"loss": 0.0128, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 48.94, |
|
"eval_accuracy": 0.9911816578483245, |
|
"eval_loss": 0.029220430180430412, |
|
"eval_runtime": 4.8855, |
|
"eval_samples_per_second": 116.057, |
|
"eval_steps_per_second": 14.533, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 49.01, |
|
"learning_rate": 5.1014184397163124e-05, |
|
"loss": 0.0011, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 49.08, |
|
"learning_rate": 5.0943262411347524e-05, |
|
"loss": 0.0494, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 49.15, |
|
"learning_rate": 5.087234042553192e-05, |
|
"loss": 0.0007, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 49.22, |
|
"learning_rate": 5.080141843971632e-05, |
|
"loss": 0.0325, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 49.29, |
|
"learning_rate": 5.073049645390071e-05, |
|
"loss": 0.0009, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 49.36, |
|
"learning_rate": 5.065957446808511e-05, |
|
"loss": 0.0564, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 49.43, |
|
"learning_rate": 5.0588652482269506e-05, |
|
"loss": 0.0004, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 49.5, |
|
"learning_rate": 5.0517730496453906e-05, |
|
"loss": 0.0344, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 49.57, |
|
"learning_rate": 5.04468085106383e-05, |
|
"loss": 0.0244, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 49.65, |
|
"learning_rate": 5.03758865248227e-05, |
|
"loss": 0.0559, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 49.65, |
|
"eval_accuracy": 0.9911816578483245, |
|
"eval_loss": 0.04575134441256523, |
|
"eval_runtime": 4.7935, |
|
"eval_samples_per_second": 118.284, |
|
"eval_steps_per_second": 14.812, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 49.72, |
|
"learning_rate": 5.0304964539007094e-05, |
|
"loss": 0.0502, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 49.79, |
|
"learning_rate": 5.0234042553191494e-05, |
|
"loss": 0.0074, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 49.86, |
|
"learning_rate": 5.016312056737589e-05, |
|
"loss": 0.0005, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 49.93, |
|
"learning_rate": 5.009219858156029e-05, |
|
"loss": 0.001, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 5.002127659574468e-05, |
|
"loss": 0.0151, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 50.07, |
|
"learning_rate": 4.995035460992908e-05, |
|
"loss": 0.0018, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 50.14, |
|
"learning_rate": 4.9879432624113476e-05, |
|
"loss": 0.0278, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 50.21, |
|
"learning_rate": 4.9808510638297876e-05, |
|
"loss": 0.0346, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 50.28, |
|
"learning_rate": 4.973758865248227e-05, |
|
"loss": 0.0004, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 50.35, |
|
"learning_rate": 4.966666666666667e-05, |
|
"loss": 0.0042, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 50.35, |
|
"eval_accuracy": 0.982363315696649, |
|
"eval_loss": 0.063356913626194, |
|
"eval_runtime": 4.855, |
|
"eval_samples_per_second": 116.786, |
|
"eval_steps_per_second": 14.624, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 50.43, |
|
"learning_rate": 4.9595744680851064e-05, |
|
"loss": 0.0006, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 50.5, |
|
"learning_rate": 4.9524822695035464e-05, |
|
"loss": 0.0005, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 50.57, |
|
"learning_rate": 4.945390070921986e-05, |
|
"loss": 0.0004, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 50.64, |
|
"learning_rate": 4.938297872340426e-05, |
|
"loss": 0.0555, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 50.71, |
|
"learning_rate": 4.931205673758865e-05, |
|
"loss": 0.0236, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 50.78, |
|
"learning_rate": 4.924113475177305e-05, |
|
"loss": 0.0131, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 50.85, |
|
"learning_rate": 4.917021276595745e-05, |
|
"loss": 0.0021, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 50.92, |
|
"learning_rate": 4.9099290780141846e-05, |
|
"loss": 0.0013, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 50.99, |
|
"learning_rate": 4.9028368794326246e-05, |
|
"loss": 0.0007, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 51.06, |
|
"learning_rate": 4.895744680851064e-05, |
|
"loss": 0.0005, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 51.06, |
|
"eval_accuracy": 1.0, |
|
"eval_loss": 0.0010965272085741162, |
|
"eval_runtime": 4.691, |
|
"eval_samples_per_second": 120.869, |
|
"eval_steps_per_second": 15.135, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 51.13, |
|
"learning_rate": 4.888652482269504e-05, |
|
"loss": 0.0005, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 51.21, |
|
"learning_rate": 4.8815602836879434e-05, |
|
"loss": 0.0003, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 51.28, |
|
"learning_rate": 4.8744680851063834e-05, |
|
"loss": 0.0003, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 51.35, |
|
"learning_rate": 4.867375886524823e-05, |
|
"loss": 0.0004, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 51.42, |
|
"learning_rate": 4.860283687943263e-05, |
|
"loss": 0.0111, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 51.49, |
|
"learning_rate": 4.853191489361702e-05, |
|
"loss": 0.0003, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 51.56, |
|
"learning_rate": 4.846099290780142e-05, |
|
"loss": 0.0562, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 51.63, |
|
"learning_rate": 4.839007092198582e-05, |
|
"loss": 0.0004, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 51.7, |
|
"learning_rate": 4.8319148936170216e-05, |
|
"loss": 0.0003, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 51.77, |
|
"learning_rate": 4.824822695035462e-05, |
|
"loss": 0.0003, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 51.77, |
|
"eval_accuracy": 0.9947089947089947, |
|
"eval_loss": 0.030498141422867775, |
|
"eval_runtime": 4.635, |
|
"eval_samples_per_second": 122.329, |
|
"eval_steps_per_second": 15.318, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 51.84, |
|
"learning_rate": 4.817730496453901e-05, |
|
"loss": 0.0049, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 51.91, |
|
"learning_rate": 4.810638297872341e-05, |
|
"loss": 0.0071, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 51.99, |
|
"learning_rate": 4.8035460992907804e-05, |
|
"loss": 0.0004, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 52.06, |
|
"learning_rate": 4.7964539007092205e-05, |
|
"loss": 0.0004, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 52.13, |
|
"learning_rate": 4.78936170212766e-05, |
|
"loss": 0.0237, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 52.2, |
|
"learning_rate": 4.7822695035461e-05, |
|
"loss": 0.0005, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 52.27, |
|
"learning_rate": 4.775177304964539e-05, |
|
"loss": 0.0036, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 52.34, |
|
"learning_rate": 4.768085106382979e-05, |
|
"loss": 0.0502, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 52.41, |
|
"learning_rate": 4.7609929078014186e-05, |
|
"loss": 0.0133, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 52.48, |
|
"learning_rate": 4.753900709219859e-05, |
|
"loss": 0.0003, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 52.48, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.044626634567976, |
|
"eval_runtime": 4.8305, |
|
"eval_samples_per_second": 117.378, |
|
"eval_steps_per_second": 14.698, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 52.55, |
|
"learning_rate": 4.746808510638298e-05, |
|
"loss": 0.0003, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 52.62, |
|
"learning_rate": 4.739716312056738e-05, |
|
"loss": 0.0175, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 52.7, |
|
"learning_rate": 4.7326241134751774e-05, |
|
"loss": 0.0013, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 52.77, |
|
"learning_rate": 4.7255319148936175e-05, |
|
"loss": 0.0581, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 52.84, |
|
"learning_rate": 4.718439716312057e-05, |
|
"loss": 0.0116, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 52.91, |
|
"learning_rate": 4.711347517730497e-05, |
|
"loss": 0.0003, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 52.98, |
|
"learning_rate": 4.704255319148936e-05, |
|
"loss": 0.0054, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 53.05, |
|
"learning_rate": 4.697163120567376e-05, |
|
"loss": 0.0429, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 53.12, |
|
"learning_rate": 4.6900709219858156e-05, |
|
"loss": 0.0294, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 53.19, |
|
"learning_rate": 4.682978723404256e-05, |
|
"loss": 0.0032, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 53.19, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.046005308628082275, |
|
"eval_runtime": 4.868, |
|
"eval_samples_per_second": 116.474, |
|
"eval_steps_per_second": 14.585, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 53.26, |
|
"learning_rate": 4.675886524822695e-05, |
|
"loss": 0.0003, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 53.33, |
|
"learning_rate": 4.668794326241135e-05, |
|
"loss": 0.0505, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 53.4, |
|
"learning_rate": 4.6617021276595744e-05, |
|
"loss": 0.0034, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 53.48, |
|
"learning_rate": 4.6546099290780145e-05, |
|
"loss": 0.0102, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 53.55, |
|
"learning_rate": 4.647517730496454e-05, |
|
"loss": 0.0003, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 53.62, |
|
"learning_rate": 4.640425531914894e-05, |
|
"loss": 0.0101, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 53.69, |
|
"learning_rate": 4.633333333333333e-05, |
|
"loss": 0.0705, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 53.76, |
|
"learning_rate": 4.626241134751773e-05, |
|
"loss": 0.0007, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 53.83, |
|
"learning_rate": 4.6191489361702126e-05, |
|
"loss": 0.0875, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 53.9, |
|
"learning_rate": 4.612056737588653e-05, |
|
"loss": 0.0893, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 53.9, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.03069092519581318, |
|
"eval_runtime": 4.635, |
|
"eval_samples_per_second": 122.329, |
|
"eval_steps_per_second": 15.318, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 53.97, |
|
"learning_rate": 4.604964539007092e-05, |
|
"loss": 0.0006, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 54.04, |
|
"learning_rate": 4.597872340425532e-05, |
|
"loss": 0.0604, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 54.11, |
|
"learning_rate": 4.5907801418439714e-05, |
|
"loss": 0.0007, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 54.18, |
|
"learning_rate": 4.5836879432624115e-05, |
|
"loss": 0.0064, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 54.26, |
|
"learning_rate": 4.576595744680851e-05, |
|
"loss": 0.0005, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 54.33, |
|
"learning_rate": 4.569503546099291e-05, |
|
"loss": 0.0195, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 54.4, |
|
"learning_rate": 4.56241134751773e-05, |
|
"loss": 0.0255, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 54.47, |
|
"learning_rate": 4.55531914893617e-05, |
|
"loss": 0.0018, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 54.54, |
|
"learning_rate": 4.5482269503546096e-05, |
|
"loss": 0.0747, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 54.61, |
|
"learning_rate": 4.54113475177305e-05, |
|
"loss": 0.0506, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 54.61, |
|
"eval_accuracy": 0.9858906525573192, |
|
"eval_loss": 0.08691811561584473, |
|
"eval_runtime": 4.666, |
|
"eval_samples_per_second": 121.516, |
|
"eval_steps_per_second": 15.216, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 54.68, |
|
"learning_rate": 4.53404255319149e-05, |
|
"loss": 0.0006, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 54.75, |
|
"learning_rate": 4.52695035460993e-05, |
|
"loss": 0.0301, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 54.82, |
|
"learning_rate": 4.519858156028369e-05, |
|
"loss": 0.0571, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 54.89, |
|
"learning_rate": 4.512765957446809e-05, |
|
"loss": 0.0067, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 54.96, |
|
"learning_rate": 4.5056737588652485e-05, |
|
"loss": 0.0014, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 55.04, |
|
"learning_rate": 4.4985815602836886e-05, |
|
"loss": 0.0408, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 55.11, |
|
"learning_rate": 4.491489361702128e-05, |
|
"loss": 0.0127, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 55.18, |
|
"learning_rate": 4.4851063829787235e-05, |
|
"loss": 0.0532, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 55.25, |
|
"learning_rate": 4.4780141843971635e-05, |
|
"loss": 0.0005, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 55.32, |
|
"learning_rate": 4.470921985815603e-05, |
|
"loss": 0.0321, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 55.32, |
|
"eval_accuracy": 0.9894179894179894, |
|
"eval_loss": 0.06382487714290619, |
|
"eval_runtime": 4.709, |
|
"eval_samples_per_second": 120.407, |
|
"eval_steps_per_second": 15.077, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 55.39, |
|
"learning_rate": 4.463829787234043e-05, |
|
"loss": 0.0004, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 55.46, |
|
"learning_rate": 4.456737588652482e-05, |
|
"loss": 0.0036, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 55.53, |
|
"learning_rate": 4.449645390070922e-05, |
|
"loss": 0.0005, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 55.6, |
|
"learning_rate": 4.442553191489362e-05, |
|
"loss": 0.0004, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 55.67, |
|
"learning_rate": 4.435460992907802e-05, |
|
"loss": 0.0549, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 55.74, |
|
"learning_rate": 4.428368794326241e-05, |
|
"loss": 0.0272, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 55.82, |
|
"learning_rate": 4.421276595744681e-05, |
|
"loss": 0.0036, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 55.89, |
|
"learning_rate": 4.4141843971631205e-05, |
|
"loss": 0.0113, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 55.96, |
|
"learning_rate": 4.4070921985815605e-05, |
|
"loss": 0.0232, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 56.03, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 0.0004, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 56.03, |
|
"eval_accuracy": 0.9894179894179894, |
|
"eval_loss": 0.06995675712823868, |
|
"eval_runtime": 4.5765, |
|
"eval_samples_per_second": 123.893, |
|
"eval_steps_per_second": 15.514, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 56.1, |
|
"learning_rate": 4.39290780141844e-05, |
|
"loss": 0.0058, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 56.17, |
|
"learning_rate": 4.38581560283688e-05, |
|
"loss": 0.0234, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 56.24, |
|
"learning_rate": 4.378723404255319e-05, |
|
"loss": 0.0524, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 56.31, |
|
"learning_rate": 4.3716312056737594e-05, |
|
"loss": 0.038, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 56.38, |
|
"learning_rate": 4.364539007092199e-05, |
|
"loss": 0.0047, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 56.45, |
|
"learning_rate": 4.357446808510639e-05, |
|
"loss": 0.0114, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 56.52, |
|
"learning_rate": 4.350354609929078e-05, |
|
"loss": 0.0129, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 56.6, |
|
"learning_rate": 4.343262411347518e-05, |
|
"loss": 0.0081, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 56.67, |
|
"learning_rate": 4.3361702127659575e-05, |
|
"loss": 0.0011, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 56.74, |
|
"learning_rate": 4.3290780141843976e-05, |
|
"loss": 0.0162, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 56.74, |
|
"eval_accuracy": 0.9858906525573192, |
|
"eval_loss": 0.05421828478574753, |
|
"eval_runtime": 4.7415, |
|
"eval_samples_per_second": 119.581, |
|
"eval_steps_per_second": 14.974, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 56.81, |
|
"learning_rate": 4.321985815602837e-05, |
|
"loss": 0.0244, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 56.88, |
|
"learning_rate": 4.314893617021277e-05, |
|
"loss": 0.007, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 56.95, |
|
"learning_rate": 4.307801418439716e-05, |
|
"loss": 0.0306, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 57.02, |
|
"learning_rate": 4.3007092198581564e-05, |
|
"loss": 0.0004, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 57.09, |
|
"learning_rate": 4.293617021276596e-05, |
|
"loss": 0.0215, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 57.16, |
|
"learning_rate": 4.286524822695036e-05, |
|
"loss": 0.0607, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 57.23, |
|
"learning_rate": 4.279432624113475e-05, |
|
"loss": 0.0018, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 57.3, |
|
"learning_rate": 4.272340425531915e-05, |
|
"loss": 0.0304, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 57.38, |
|
"learning_rate": 4.2652482269503545e-05, |
|
"loss": 0.0389, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 57.45, |
|
"learning_rate": 4.2581560283687946e-05, |
|
"loss": 0.0093, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 57.45, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.04040713235735893, |
|
"eval_runtime": 4.616, |
|
"eval_samples_per_second": 122.833, |
|
"eval_steps_per_second": 15.381, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 57.52, |
|
"learning_rate": 4.251063829787234e-05, |
|
"loss": 0.0174, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 57.59, |
|
"learning_rate": 4.243971631205674e-05, |
|
"loss": 0.0023, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 57.66, |
|
"learning_rate": 4.236879432624113e-05, |
|
"loss": 0.0448, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 57.73, |
|
"learning_rate": 4.2297872340425534e-05, |
|
"loss": 0.0004, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 57.8, |
|
"learning_rate": 4.222695035460993e-05, |
|
"loss": 0.0004, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 57.87, |
|
"learning_rate": 4.215602836879433e-05, |
|
"loss": 0.0842, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 57.94, |
|
"learning_rate": 4.208510638297872e-05, |
|
"loss": 0.0196, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 58.01, |
|
"learning_rate": 4.201418439716312e-05, |
|
"loss": 0.0234, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 58.09, |
|
"learning_rate": 4.1943262411347515e-05, |
|
"loss": 0.0214, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 58.16, |
|
"learning_rate": 4.187234042553192e-05, |
|
"loss": 0.0004, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 58.16, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.02249576337635517, |
|
"eval_runtime": 4.65, |
|
"eval_samples_per_second": 121.934, |
|
"eval_steps_per_second": 15.269, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 58.23, |
|
"learning_rate": 4.1801418439716316e-05, |
|
"loss": 0.0003, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 58.3, |
|
"learning_rate": 4.1730496453900716e-05, |
|
"loss": 0.0003, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 58.37, |
|
"learning_rate": 4.165957446808511e-05, |
|
"loss": 0.0006, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 58.44, |
|
"learning_rate": 4.158865248226951e-05, |
|
"loss": 0.0003, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 58.51, |
|
"learning_rate": 4.1517730496453904e-05, |
|
"loss": 0.0003, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 58.58, |
|
"learning_rate": 4.1446808510638304e-05, |
|
"loss": 0.0003, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 58.65, |
|
"learning_rate": 4.13758865248227e-05, |
|
"loss": 0.0003, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 58.72, |
|
"learning_rate": 4.13049645390071e-05, |
|
"loss": 0.0003, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 58.79, |
|
"learning_rate": 4.123404255319149e-05, |
|
"loss": 0.0004, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 58.87, |
|
"learning_rate": 4.116312056737589e-05, |
|
"loss": 0.0023, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 58.87, |
|
"eval_accuracy": 0.9947089947089947, |
|
"eval_loss": 0.023073412477970123, |
|
"eval_runtime": 4.6315, |
|
"eval_samples_per_second": 122.422, |
|
"eval_steps_per_second": 15.33, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 58.94, |
|
"learning_rate": 4.1092198581560286e-05, |
|
"loss": 0.0004, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 59.01, |
|
"learning_rate": 4.1021276595744686e-05, |
|
"loss": 0.0023, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 59.08, |
|
"learning_rate": 4.095035460992908e-05, |
|
"loss": 0.0002, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 59.15, |
|
"learning_rate": 4.087943262411348e-05, |
|
"loss": 0.0004, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 59.22, |
|
"learning_rate": 4.0808510638297874e-05, |
|
"loss": 0.0424, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 59.29, |
|
"learning_rate": 4.0737588652482274e-05, |
|
"loss": 0.0003, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 59.36, |
|
"learning_rate": 4.066666666666667e-05, |
|
"loss": 0.0008, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 59.43, |
|
"learning_rate": 4.059574468085107e-05, |
|
"loss": 0.0003, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 59.5, |
|
"learning_rate": 4.052482269503546e-05, |
|
"loss": 0.0029, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 59.57, |
|
"learning_rate": 4.045390070921986e-05, |
|
"loss": 0.0017, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 59.57, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.03826703876256943, |
|
"eval_runtime": 4.6215, |
|
"eval_samples_per_second": 122.686, |
|
"eval_steps_per_second": 15.363, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 59.65, |
|
"learning_rate": 4.0382978723404256e-05, |
|
"loss": 0.0002, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 59.72, |
|
"learning_rate": 4.0312056737588656e-05, |
|
"loss": 0.0007, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 59.79, |
|
"learning_rate": 4.024113475177305e-05, |
|
"loss": 0.0187, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 59.86, |
|
"learning_rate": 4.017021276595745e-05, |
|
"loss": 0.0002, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 59.93, |
|
"learning_rate": 4.0099290780141844e-05, |
|
"loss": 0.0003, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"learning_rate": 4.0028368794326244e-05, |
|
"loss": 0.0003, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 60.07, |
|
"learning_rate": 3.995744680851064e-05, |
|
"loss": 0.0023, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 60.14, |
|
"learning_rate": 3.988652482269504e-05, |
|
"loss": 0.0003, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 60.21, |
|
"learning_rate": 3.981560283687943e-05, |
|
"loss": 0.0003, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 60.28, |
|
"learning_rate": 3.974468085106383e-05, |
|
"loss": 0.0002, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 60.28, |
|
"eval_accuracy": 0.9894179894179894, |
|
"eval_loss": 0.0411054752767086, |
|
"eval_runtime": 4.6025, |
|
"eval_samples_per_second": 123.193, |
|
"eval_steps_per_second": 15.426, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 60.35, |
|
"learning_rate": 3.9673758865248226e-05, |
|
"loss": 0.0007, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 60.43, |
|
"learning_rate": 3.9602836879432626e-05, |
|
"loss": 0.0002, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 60.5, |
|
"learning_rate": 3.953191489361702e-05, |
|
"loss": 0.0002, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 60.57, |
|
"learning_rate": 3.946099290780142e-05, |
|
"loss": 0.0002, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 60.64, |
|
"learning_rate": 3.9390070921985814e-05, |
|
"loss": 0.0008, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 60.71, |
|
"learning_rate": 3.9319148936170214e-05, |
|
"loss": 0.0092, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 60.78, |
|
"learning_rate": 3.924822695035461e-05, |
|
"loss": 0.0023, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 60.85, |
|
"learning_rate": 3.917730496453901e-05, |
|
"loss": 0.0106, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 60.92, |
|
"learning_rate": 3.91063829787234e-05, |
|
"loss": 0.0004, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 60.99, |
|
"learning_rate": 3.90354609929078e-05, |
|
"loss": 0.0002, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 60.99, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.0303778238594532, |
|
"eval_runtime": 4.6325, |
|
"eval_samples_per_second": 122.395, |
|
"eval_steps_per_second": 15.326, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 61.06, |
|
"learning_rate": 3.8964539007092196e-05, |
|
"loss": 0.0362, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 61.13, |
|
"learning_rate": 3.8893617021276596e-05, |
|
"loss": 0.0026, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 61.21, |
|
"learning_rate": 3.882269503546099e-05, |
|
"loss": 0.0186, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 61.28, |
|
"learning_rate": 3.875177304964539e-05, |
|
"loss": 0.0002, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 61.35, |
|
"learning_rate": 3.8680851063829784e-05, |
|
"loss": 0.0002, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 61.42, |
|
"learning_rate": 3.8609929078014184e-05, |
|
"loss": 0.0002, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 61.49, |
|
"learning_rate": 3.853900709219858e-05, |
|
"loss": 0.0003, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 61.56, |
|
"learning_rate": 3.846808510638298e-05, |
|
"loss": 0.0002, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 61.63, |
|
"learning_rate": 3.839716312056737e-05, |
|
"loss": 0.0002, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 61.7, |
|
"learning_rate": 3.832624113475177e-05, |
|
"loss": 0.0002, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 61.7, |
|
"eval_accuracy": 0.9964726631393298, |
|
"eval_loss": 0.01797201670706272, |
|
"eval_runtime": 4.6115, |
|
"eval_samples_per_second": 122.953, |
|
"eval_steps_per_second": 15.396, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 61.77, |
|
"learning_rate": 3.825531914893617e-05, |
|
"loss": 0.0507, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 61.84, |
|
"learning_rate": 3.8184397163120567e-05, |
|
"loss": 0.0002, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 61.91, |
|
"learning_rate": 3.811347517730497e-05, |
|
"loss": 0.0002, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 61.99, |
|
"learning_rate": 3.804255319148937e-05, |
|
"loss": 0.0004, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 62.06, |
|
"learning_rate": 3.797163120567376e-05, |
|
"loss": 0.0002, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 62.13, |
|
"learning_rate": 3.790070921985816e-05, |
|
"loss": 0.0002, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 62.2, |
|
"learning_rate": 3.7829787234042555e-05, |
|
"loss": 0.0184, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 62.27, |
|
"learning_rate": 3.7758865248226955e-05, |
|
"loss": 0.0002, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 62.34, |
|
"learning_rate": 3.768794326241135e-05, |
|
"loss": 0.0003, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 62.41, |
|
"learning_rate": 3.761702127659575e-05, |
|
"loss": 0.0002, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 62.41, |
|
"eval_accuracy": 0.9911816578483245, |
|
"eval_loss": 0.06262991577386856, |
|
"eval_runtime": 4.583, |
|
"eval_samples_per_second": 123.717, |
|
"eval_steps_per_second": 15.492, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 62.48, |
|
"learning_rate": 3.754609929078014e-05, |
|
"loss": 0.0002, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 62.55, |
|
"learning_rate": 3.747517730496454e-05, |
|
"loss": 0.0002, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 62.62, |
|
"learning_rate": 3.740425531914894e-05, |
|
"loss": 0.0002, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 62.7, |
|
"learning_rate": 3.733333333333334e-05, |
|
"loss": 0.0002, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 62.77, |
|
"learning_rate": 3.726241134751773e-05, |
|
"loss": 0.0002, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 62.84, |
|
"learning_rate": 3.719148936170213e-05, |
|
"loss": 0.0547, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 62.91, |
|
"learning_rate": 3.7120567375886525e-05, |
|
"loss": 0.0002, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 62.98, |
|
"learning_rate": 3.7049645390070925e-05, |
|
"loss": 0.0058, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 63.05, |
|
"learning_rate": 3.697872340425532e-05, |
|
"loss": 0.0002, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 63.12, |
|
"learning_rate": 3.690780141843972e-05, |
|
"loss": 0.0002, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 63.12, |
|
"eval_accuracy": 0.9964726631393298, |
|
"eval_loss": 0.034689877182245255, |
|
"eval_runtime": 4.55, |
|
"eval_samples_per_second": 124.614, |
|
"eval_steps_per_second": 15.604, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 63.19, |
|
"learning_rate": 3.683687943262411e-05, |
|
"loss": 0.0002, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 63.26, |
|
"learning_rate": 3.676595744680851e-05, |
|
"loss": 0.0003, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 63.33, |
|
"learning_rate": 3.6695035460992914e-05, |
|
"loss": 0.0002, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 63.4, |
|
"learning_rate": 3.662411347517731e-05, |
|
"loss": 0.0002, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 63.48, |
|
"learning_rate": 3.655319148936171e-05, |
|
"loss": 0.0002, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 63.55, |
|
"learning_rate": 3.64822695035461e-05, |
|
"loss": 0.0002, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 63.62, |
|
"learning_rate": 3.64113475177305e-05, |
|
"loss": 0.0002, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 63.69, |
|
"learning_rate": 3.6340425531914895e-05, |
|
"loss": 0.0005, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 63.76, |
|
"learning_rate": 3.6269503546099296e-05, |
|
"loss": 0.0002, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 63.83, |
|
"learning_rate": 3.619858156028369e-05, |
|
"loss": 0.0021, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 63.83, |
|
"eval_accuracy": 0.9947089947089947, |
|
"eval_loss": 0.040755413472652435, |
|
"eval_runtime": 4.651, |
|
"eval_samples_per_second": 121.908, |
|
"eval_steps_per_second": 15.265, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 63.9, |
|
"learning_rate": 3.612765957446809e-05, |
|
"loss": 0.0002, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 63.97, |
|
"learning_rate": 3.605673758865248e-05, |
|
"loss": 0.0002, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 64.04, |
|
"learning_rate": 3.5985815602836884e-05, |
|
"loss": 0.0679, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 64.11, |
|
"learning_rate": 3.591489361702128e-05, |
|
"loss": 0.0064, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 64.18, |
|
"learning_rate": 3.584397163120568e-05, |
|
"loss": 0.0003, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 64.26, |
|
"learning_rate": 3.577304964539007e-05, |
|
"loss": 0.0044, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 64.33, |
|
"learning_rate": 3.570212765957447e-05, |
|
"loss": 0.0296, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 64.4, |
|
"learning_rate": 3.5631205673758865e-05, |
|
"loss": 0.0498, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 64.47, |
|
"learning_rate": 3.5560283687943266e-05, |
|
"loss": 0.0003, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 64.54, |
|
"learning_rate": 3.548936170212766e-05, |
|
"loss": 0.0002, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 64.54, |
|
"eval_accuracy": 0.9947089947089947, |
|
"eval_loss": 0.022945033386349678, |
|
"eval_runtime": 4.681, |
|
"eval_samples_per_second": 121.127, |
|
"eval_steps_per_second": 15.168, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 64.61, |
|
"learning_rate": 3.541843971631206e-05, |
|
"loss": 0.0002, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 64.68, |
|
"learning_rate": 3.534751773049645e-05, |
|
"loss": 0.0234, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 64.75, |
|
"learning_rate": 3.5276595744680854e-05, |
|
"loss": 0.0002, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 64.82, |
|
"learning_rate": 3.520567375886525e-05, |
|
"loss": 0.0122, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 64.89, |
|
"learning_rate": 3.513475177304965e-05, |
|
"loss": 0.0002, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 64.96, |
|
"learning_rate": 3.506382978723404e-05, |
|
"loss": 0.0431, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 65.04, |
|
"learning_rate": 3.499290780141844e-05, |
|
"loss": 0.0003, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 65.11, |
|
"learning_rate": 3.4921985815602835e-05, |
|
"loss": 0.0629, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 65.18, |
|
"learning_rate": 3.4851063829787236e-05, |
|
"loss": 0.0004, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 65.25, |
|
"learning_rate": 3.478014184397163e-05, |
|
"loss": 0.0003, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 65.25, |
|
"eval_accuracy": 0.9911816578483245, |
|
"eval_loss": 0.02761363796889782, |
|
"eval_runtime": 4.5695, |
|
"eval_samples_per_second": 124.083, |
|
"eval_steps_per_second": 15.538, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 65.32, |
|
"learning_rate": 3.470921985815603e-05, |
|
"loss": 0.0011, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 65.39, |
|
"learning_rate": 3.463829787234042e-05, |
|
"loss": 0.0002, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 65.46, |
|
"learning_rate": 3.4567375886524824e-05, |
|
"loss": 0.0011, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 65.53, |
|
"learning_rate": 3.449645390070922e-05, |
|
"loss": 0.0005, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 65.6, |
|
"learning_rate": 3.442553191489362e-05, |
|
"loss": 0.0745, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 65.67, |
|
"learning_rate": 3.435460992907801e-05, |
|
"loss": 0.0002, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 65.74, |
|
"learning_rate": 3.428368794326241e-05, |
|
"loss": 0.0002, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 65.82, |
|
"learning_rate": 3.421276595744681e-05, |
|
"loss": 0.0002, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 65.89, |
|
"learning_rate": 3.414184397163121e-05, |
|
"loss": 0.0431, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 65.96, |
|
"learning_rate": 3.4070921985815606e-05, |
|
"loss": 0.0004, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 65.96, |
|
"eval_accuracy": 0.9947089947089947, |
|
"eval_loss": 0.03187382593750954, |
|
"eval_runtime": 4.623, |
|
"eval_samples_per_second": 122.647, |
|
"eval_steps_per_second": 15.358, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 66.03, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 0.0336, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 66.1, |
|
"learning_rate": 3.39290780141844e-05, |
|
"loss": 0.0002, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 66.17, |
|
"learning_rate": 3.38581560283688e-05, |
|
"loss": 0.0259, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 66.24, |
|
"learning_rate": 3.3787234042553194e-05, |
|
"loss": 0.0002, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 66.31, |
|
"learning_rate": 3.372340425531915e-05, |
|
"loss": 0.0056, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 66.38, |
|
"learning_rate": 3.365248226950355e-05, |
|
"loss": 0.0313, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 66.45, |
|
"learning_rate": 3.3581560283687944e-05, |
|
"loss": 0.0003, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 66.52, |
|
"learning_rate": 3.3510638297872344e-05, |
|
"loss": 0.0002, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 66.6, |
|
"learning_rate": 3.343971631205674e-05, |
|
"loss": 0.0004, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 66.67, |
|
"learning_rate": 3.336879432624114e-05, |
|
"loss": 0.0002, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 66.67, |
|
"eval_accuracy": 0.9964726631393298, |
|
"eval_loss": 0.016740096732974052, |
|
"eval_runtime": 4.8155, |
|
"eval_samples_per_second": 117.744, |
|
"eval_steps_per_second": 14.744, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 66.74, |
|
"learning_rate": 3.329787234042553e-05, |
|
"loss": 0.0015, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 66.81, |
|
"learning_rate": 3.322695035460993e-05, |
|
"loss": 0.0002, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 66.88, |
|
"learning_rate": 3.3156028368794326e-05, |
|
"loss": 0.0002, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 66.95, |
|
"learning_rate": 3.3085106382978726e-05, |
|
"loss": 0.0002, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 67.02, |
|
"learning_rate": 3.301418439716312e-05, |
|
"loss": 0.0002, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 67.09, |
|
"learning_rate": 3.294326241134752e-05, |
|
"loss": 0.0002, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 67.16, |
|
"learning_rate": 3.2872340425531914e-05, |
|
"loss": 0.0002, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 67.23, |
|
"learning_rate": 3.2801418439716314e-05, |
|
"loss": 0.0003, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 67.3, |
|
"learning_rate": 3.273049645390071e-05, |
|
"loss": 0.0002, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 67.38, |
|
"learning_rate": 3.265957446808511e-05, |
|
"loss": 0.0036, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 67.38, |
|
"eval_accuracy": 0.9964726631393298, |
|
"eval_loss": 0.01307621132582426, |
|
"eval_runtime": 4.6715, |
|
"eval_samples_per_second": 121.373, |
|
"eval_steps_per_second": 15.198, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 67.45, |
|
"learning_rate": 3.25886524822695e-05, |
|
"loss": 0.0002, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 67.52, |
|
"learning_rate": 3.25177304964539e-05, |
|
"loss": 0.0002, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 67.59, |
|
"learning_rate": 3.2446808510638296e-05, |
|
"loss": 0.0002, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 67.66, |
|
"learning_rate": 3.2375886524822696e-05, |
|
"loss": 0.0004, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 67.73, |
|
"learning_rate": 3.2304964539007097e-05, |
|
"loss": 0.0002, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 67.8, |
|
"learning_rate": 3.223404255319149e-05, |
|
"loss": 0.0002, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 67.87, |
|
"learning_rate": 3.216312056737589e-05, |
|
"loss": 0.0002, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 67.94, |
|
"learning_rate": 3.2092198581560284e-05, |
|
"loss": 0.0002, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 68.01, |
|
"learning_rate": 3.2021276595744685e-05, |
|
"loss": 0.0004, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 68.09, |
|
"learning_rate": 3.195035460992908e-05, |
|
"loss": 0.0007, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 68.09, |
|
"eval_accuracy": 0.9929453262786596, |
|
"eval_loss": 0.0501413494348526, |
|
"eval_runtime": 4.575, |
|
"eval_samples_per_second": 123.933, |
|
"eval_steps_per_second": 15.519, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 68.16, |
|
"learning_rate": 3.187943262411348e-05, |
|
"loss": 0.0158, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 68.23, |
|
"learning_rate": 3.180851063829787e-05, |
|
"loss": 0.0002, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 68.3, |
|
"learning_rate": 3.173758865248227e-05, |
|
"loss": 0.0007, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 68.37, |
|
"learning_rate": 3.1666666666666666e-05, |
|
"loss": 0.0002, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 68.44, |
|
"learning_rate": 3.1595744680851067e-05, |
|
"loss": 0.0002, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 68.51, |
|
"learning_rate": 3.152482269503546e-05, |
|
"loss": 0.0003, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 68.58, |
|
"learning_rate": 3.145390070921986e-05, |
|
"loss": 0.1066, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 68.65, |
|
"learning_rate": 3.1382978723404254e-05, |
|
"loss": 0.0002, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 68.72, |
|
"learning_rate": 3.1312056737588655e-05, |
|
"loss": 0.0003, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 68.79, |
|
"learning_rate": 3.124113475177305e-05, |
|
"loss": 0.0002, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 68.79, |
|
"eval_accuracy": 0.9982363315696648, |
|
"eval_loss": 0.003756000893190503, |
|
"eval_runtime": 4.627, |
|
"eval_samples_per_second": 122.541, |
|
"eval_steps_per_second": 15.345, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 68.87, |
|
"learning_rate": 3.117021276595745e-05, |
|
"loss": 0.002, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 68.94, |
|
"learning_rate": 3.109929078014184e-05, |
|
"loss": 0.0002, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 69.01, |
|
"learning_rate": 3.102836879432624e-05, |
|
"loss": 0.0181, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 69.08, |
|
"learning_rate": 3.0957446808510636e-05, |
|
"loss": 0.0084, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 69.15, |
|
"learning_rate": 3.0886524822695037e-05, |
|
"loss": 0.0223, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 69.22, |
|
"learning_rate": 3.081560283687943e-05, |
|
"loss": 0.0009, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 69.29, |
|
"learning_rate": 3.074468085106384e-05, |
|
"loss": 0.0573, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 69.36, |
|
"learning_rate": 3.067375886524823e-05, |
|
"loss": 0.0084, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 69.43, |
|
"learning_rate": 3.060283687943263e-05, |
|
"loss": 0.0039, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 69.5, |
|
"learning_rate": 3.0531914893617025e-05, |
|
"loss": 0.0009, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 69.5, |
|
"eval_accuracy": 0.9982363315696648, |
|
"eval_loss": 0.012646459974348545, |
|
"eval_runtime": 4.6265, |
|
"eval_samples_per_second": 122.554, |
|
"eval_steps_per_second": 15.346, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 69.57, |
|
"learning_rate": 3.0460992907801422e-05, |
|
"loss": 0.0002, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 69.65, |
|
"learning_rate": 3.039007092198582e-05, |
|
"loss": 0.0002, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 69.72, |
|
"learning_rate": 3.0319148936170216e-05, |
|
"loss": 0.047, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 69.79, |
|
"learning_rate": 3.0248226950354613e-05, |
|
"loss": 0.0007, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 69.86, |
|
"learning_rate": 3.017730496453901e-05, |
|
"loss": 0.0007, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 69.93, |
|
"learning_rate": 3.0106382978723407e-05, |
|
"loss": 0.0038, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"learning_rate": 3.0035460992907804e-05, |
|
"loss": 0.0004, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 70.07, |
|
"learning_rate": 2.99645390070922e-05, |
|
"loss": 0.0008, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 70.14, |
|
"learning_rate": 2.9893617021276598e-05, |
|
"loss": 0.0003, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 70.21, |
|
"learning_rate": 2.9822695035460995e-05, |
|
"loss": 0.0002, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 70.21, |
|
"eval_accuracy": 0.9964726631393298, |
|
"eval_loss": 0.016881294548511505, |
|
"eval_runtime": 4.685, |
|
"eval_samples_per_second": 121.024, |
|
"eval_steps_per_second": 15.155, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 70.28, |
|
"learning_rate": 2.9751773049645392e-05, |
|
"loss": 0.0001, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 70.35, |
|
"learning_rate": 2.968085106382979e-05, |
|
"loss": 0.0002, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 70.43, |
|
"learning_rate": 2.9609929078014186e-05, |
|
"loss": 0.0002, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 70.5, |
|
"learning_rate": 2.9539007092198583e-05, |
|
"loss": 0.0005, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 70.57, |
|
"learning_rate": 2.946808510638298e-05, |
|
"loss": 0.0003, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 70.64, |
|
"learning_rate": 2.9397163120567377e-05, |
|
"loss": 0.0002, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 70.71, |
|
"learning_rate": 2.9326241134751774e-05, |
|
"loss": 0.0001, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 70.78, |
|
"learning_rate": 2.925531914893617e-05, |
|
"loss": 0.0003, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 70.85, |
|
"learning_rate": 2.9184397163120568e-05, |
|
"loss": 0.0002, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 70.92, |
|
"learning_rate": 2.9113475177304965e-05, |
|
"loss": 0.0002, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 70.92, |
|
"eval_accuracy": 1.0, |
|
"eval_loss": 0.0004703931335825473, |
|
"eval_runtime": 4.594, |
|
"eval_samples_per_second": 123.421, |
|
"eval_steps_per_second": 15.455, |
|
"step": 10000 |
|
} |
|
], |
|
"max_steps": 14100, |
|
"num_train_epochs": 100, |
|
"total_flos": 1.2366947541295227e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|