|
{ |
|
"best_metric": 0.31727737305643144, |
|
"best_model_checkpoint": "./save/wietsedv/wav2vec2-large-xlsr-53-frisian/checkpoint-7086", |
|
"epoch": 11.0, |
|
"global_step": 12991, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999703640982218e-05, |
|
"loss": 8.2316, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9992802709568166e-05, |
|
"loss": 3.9359, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998856900931414e-05, |
|
"loss": 3.571, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998433530906012e-05, |
|
"loss": 3.3783, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.99801016088061e-05, |
|
"loss": 3.2272, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9975867908552076e-05, |
|
"loss": 3.1061, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9971634208298054e-05, |
|
"loss": 3.0072, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.996740050804403e-05, |
|
"loss": 2.8691, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9963166807790016e-05, |
|
"loss": 2.8989, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.995893310753599e-05, |
|
"loss": 2.8434, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.995469940728197e-05, |
|
"loss": 2.7761, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.995046570702794e-05, |
|
"loss": 2.689, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9946232006773926e-05, |
|
"loss": 2.6679, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.99419983065199e-05, |
|
"loss": 2.5585, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.993776460626588e-05, |
|
"loss": 2.5666, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.993353090601185e-05, |
|
"loss": 2.359, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.992929720575784e-05, |
|
"loss": 2.2344, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.992506350550381e-05, |
|
"loss": 2.107, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.992082980524979e-05, |
|
"loss": 1.9212, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.991659610499577e-05, |
|
"loss": 1.8697, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.991236240474175e-05, |
|
"loss": 1.6646, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9908128704487725e-05, |
|
"loss": 1.5922, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.99038950042337e-05, |
|
"loss": 1.6642, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.989966130397968e-05, |
|
"loss": 1.5205, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.989542760372566e-05, |
|
"loss": 1.6621, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.989119390347164e-05, |
|
"loss": 1.3049, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.988696020321761e-05, |
|
"loss": 1.2634, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.98827265029636e-05, |
|
"loss": 1.2498, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.987849280270957e-05, |
|
"loss": 1.3302, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.987425910245555e-05, |
|
"loss": 1.3514, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9870025402201523e-05, |
|
"loss": 1.1561, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.986579170194751e-05, |
|
"loss": 1.0196, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.986155800169348e-05, |
|
"loss": 1.1086, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.985732430143946e-05, |
|
"loss": 1.1578, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.9853090601185434e-05, |
|
"loss": 1.2532, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.984885690093142e-05, |
|
"loss": 0.9676, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9844623200677396e-05, |
|
"loss": 1.0227, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9840389500423373e-05, |
|
"loss": 1.0093, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.983615580016935e-05, |
|
"loss": 1.0484, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.983192209991533e-05, |
|
"loss": 1.1028, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.9827688399661306e-05, |
|
"loss": 0.9524, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.9823454699407284e-05, |
|
"loss": 0.9216, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.981922099915326e-05, |
|
"loss": 0.8981, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.981498729889924e-05, |
|
"loss": 0.9876, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.981075359864522e-05, |
|
"loss": 1.1131, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.9806519898391194e-05, |
|
"loss": 0.8188, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.980228619813718e-05, |
|
"loss": 0.8327, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.979805249788315e-05, |
|
"loss": 0.9766, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.9793818797629134e-05, |
|
"loss": 0.9445, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.9789585097375105e-05, |
|
"loss": 1.0943, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.978535139712109e-05, |
|
"loss": 0.8088, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.978111769686706e-05, |
|
"loss": 0.8138, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.9776883996613045e-05, |
|
"loss": 0.8536, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.977265029635902e-05, |
|
"loss": 0.9564, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.97688399661304e-05, |
|
"loss": 0.9857, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.976460626587638e-05, |
|
"loss": 0.7244, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.976037256562235e-05, |
|
"loss": 0.8323, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.975613886536834e-05, |
|
"loss": 0.8926, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.975190516511431e-05, |
|
"loss": 0.855, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.974767146486029e-05, |
|
"loss": 1.063, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.974343776460627e-05, |
|
"loss": 0.6973, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.973920406435225e-05, |
|
"loss": 0.8564, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.9734970364098226e-05, |
|
"loss": 0.8261, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.97307366638442e-05, |
|
"loss": 0.8945, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.972650296359018e-05, |
|
"loss": 0.9791, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.972226926333616e-05, |
|
"loss": 0.7355, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.9718035563082136e-05, |
|
"loss": 0.7749, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.9713801862828114e-05, |
|
"loss": 0.8765, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.970956816257409e-05, |
|
"loss": 0.914, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.970533446232007e-05, |
|
"loss": 1.0133, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.970110076206605e-05, |
|
"loss": 0.6857, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.9696867061812024e-05, |
|
"loss": 0.6903, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.969263336155801e-05, |
|
"loss": 0.7509, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.968839966130398e-05, |
|
"loss": 0.8627, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.9684165961049964e-05, |
|
"loss": 1.0669, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.9679932260795935e-05, |
|
"loss": 0.6301, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.967569856054192e-05, |
|
"loss": 0.7653, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.967146486028789e-05, |
|
"loss": 0.7241, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.9667231160033874e-05, |
|
"loss": 0.8716, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9662997459779845e-05, |
|
"loss": 0.9682, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.965876375952583e-05, |
|
"loss": 0.6231, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.96545300592718e-05, |
|
"loss": 0.7077, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.9650296359017785e-05, |
|
"loss": 0.6977, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.964606265876376e-05, |
|
"loss": 0.77, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.9642252328535145e-05, |
|
"loss": 0.8742, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.9638018628281116e-05, |
|
"loss": 0.725, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.96337849280271e-05, |
|
"loss": 0.7597, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.962955122777308e-05, |
|
"loss": 0.7368, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.9625317527519055e-05, |
|
"loss": 0.8343, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.962108382726503e-05, |
|
"loss": 1.0221, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.961685012701101e-05, |
|
"loss": 0.6496, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.961261642675699e-05, |
|
"loss": 0.7248, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.9608382726502966e-05, |
|
"loss": 0.7301, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.9604149026248944e-05, |
|
"loss": 0.8114, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.959991532599492e-05, |
|
"loss": 0.8109, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.95956816257409e-05, |
|
"loss": 0.6532, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.9591447925486876e-05, |
|
"loss": 0.7072, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.958721422523286e-05, |
|
"loss": 0.7874, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.958298052497883e-05, |
|
"loss": 0.7866, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.9578746824724816e-05, |
|
"loss": 1.0286, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.957451312447079e-05, |
|
"loss": 0.6527, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.957027942421677e-05, |
|
"loss": 0.697, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.956604572396274e-05, |
|
"loss": 0.7519, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.9561812023708727e-05, |
|
"loss": 0.7881, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.9557578323454704e-05, |
|
"loss": 1.1226, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.955334462320068e-05, |
|
"loss": 0.6177, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.954911092294666e-05, |
|
"loss": 0.6793, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.954487722269264e-05, |
|
"loss": 0.7337, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.9540643522438615e-05, |
|
"loss": 0.7287, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.953640982218459e-05, |
|
"loss": 0.805, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.953217612193057e-05, |
|
"loss": 0.6376, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.952794242167655e-05, |
|
"loss": 0.6818, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.9523708721422525e-05, |
|
"loss": 0.7406, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.95194750211685e-05, |
|
"loss": 0.7519, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.951524132091448e-05, |
|
"loss": 0.9363, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.951100762066046e-05, |
|
"loss": 0.6559, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.9506773920406436e-05, |
|
"loss": 0.6731, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.950254022015241e-05, |
|
"loss": 0.9067, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_cer": 0.1576126742082073, |
|
"eval_loss": 0.6544902324676514, |
|
"eval_mer": 0.4400347939545504, |
|
"eval_runtime": 29.5295, |
|
"eval_samples_per_second": 73.757, |
|
"eval_steps_per_second": 4.639, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.949830651989839e-05, |
|
"loss": 0.5277, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.949407281964437e-05, |
|
"loss": 0.5804, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.948983911939035e-05, |
|
"loss": 0.6302, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.948560541913633e-05, |
|
"loss": 0.6828, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.948137171888231e-05, |
|
"loss": 0.8391, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.9477138018628286e-05, |
|
"loss": 0.6364, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.947290431837426e-05, |
|
"loss": 0.5684, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.946867061812024e-05, |
|
"loss": 0.6349, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.946443691786622e-05, |
|
"loss": 0.7817, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.94606265876376e-05, |
|
"loss": 0.8709, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.945639288738357e-05, |
|
"loss": 0.5876, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.9452159187129556e-05, |
|
"loss": 0.5962, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.944792548687553e-05, |
|
"loss": 0.6397, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.944369178662151e-05, |
|
"loss": 0.6253, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.943945808636748e-05, |
|
"loss": 0.7778, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.943522438611347e-05, |
|
"loss": 0.5529, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.9430990685859444e-05, |
|
"loss": 0.5595, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.942675698560542e-05, |
|
"loss": 0.6319, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.94225232853514e-05, |
|
"loss": 0.6649, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.941828958509738e-05, |
|
"loss": 0.8097, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.9414055884843355e-05, |
|
"loss": 0.6178, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.940982218458933e-05, |
|
"loss": 0.5642, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.940558848433531e-05, |
|
"loss": 0.5683, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.940135478408129e-05, |
|
"loss": 0.5921, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.939712108382727e-05, |
|
"loss": 0.757, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.939288738357324e-05, |
|
"loss": 0.6185, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.938865368331923e-05, |
|
"loss": 0.5439, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.93844199830652e-05, |
|
"loss": 0.6081, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.938018628281118e-05, |
|
"loss": 0.6448, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.9375952582557153e-05, |
|
"loss": 0.7704, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.937171888230314e-05, |
|
"loss": 0.5645, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.936748518204911e-05, |
|
"loss": 0.5591, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.936325148179509e-05, |
|
"loss": 0.6333, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.9359017781541064e-05, |
|
"loss": 0.6027, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.935478408128705e-05, |
|
"loss": 0.8533, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.9350550381033026e-05, |
|
"loss": 0.5244, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.9346316680779004e-05, |
|
"loss": 0.5265, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.934208298052498e-05, |
|
"loss": 0.605, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.933784928027096e-05, |
|
"loss": 0.6919, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.9333615580016936e-05, |
|
"loss": 0.8943, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.9329381879762914e-05, |
|
"loss": 0.5891, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.93251481795089e-05, |
|
"loss": 0.5561, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.932091447925487e-05, |
|
"loss": 0.5738, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.9316680779000854e-05, |
|
"loss": 0.7086, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.9312447078746824e-05, |
|
"loss": 0.8479, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.930821337849281e-05, |
|
"loss": 0.5043, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.930397967823878e-05, |
|
"loss": 0.5307, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.9299745977984764e-05, |
|
"loss": 0.5975, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.9295512277730735e-05, |
|
"loss": 0.6344, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.929127857747672e-05, |
|
"loss": 0.7336, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.928704487722269e-05, |
|
"loss": 0.5094, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.9282811176968675e-05, |
|
"loss": 0.5233, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.927857747671465e-05, |
|
"loss": 0.5785, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.927434377646063e-05, |
|
"loss": 0.6206, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.927011007620661e-05, |
|
"loss": 0.7518, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.9265876375952585e-05, |
|
"loss": 0.498, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.926164267569856e-05, |
|
"loss": 0.5847, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.925740897544454e-05, |
|
"loss": 0.5772, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.925317527519052e-05, |
|
"loss": 0.5796, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.9248941574936496e-05, |
|
"loss": 0.6816, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.924470787468248e-05, |
|
"loss": 0.6114, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.924047417442845e-05, |
|
"loss": 0.5652, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.9236240474174435e-05, |
|
"loss": 0.5561, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.9232006773920406e-05, |
|
"loss": 0.6507, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.922777307366639e-05, |
|
"loss": 0.8466, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.922353937341236e-05, |
|
"loss": 0.5228, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.9219305673158346e-05, |
|
"loss": 0.5455, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.9215071972904316e-05, |
|
"loss": 0.5846, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.92108382726503e-05, |
|
"loss": 0.533, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.920660457239628e-05, |
|
"loss": 0.7392, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.9202370872142256e-05, |
|
"loss": 0.4746, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.9198137171888234e-05, |
|
"loss": 0.5498, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.919390347163421e-05, |
|
"loss": 0.5825, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.918966977138019e-05, |
|
"loss": 0.6736, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.9185436071126167e-05, |
|
"loss": 0.8107, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.9181202370872144e-05, |
|
"loss": 0.4814, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.917696867061812e-05, |
|
"loss": 0.4699, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.91727349703641e-05, |
|
"loss": 0.5519, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.916850127011008e-05, |
|
"loss": 0.6432, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.9164267569856055e-05, |
|
"loss": 0.7772, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.916003386960203e-05, |
|
"loss": 0.4573, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.915580016934802e-05, |
|
"loss": 0.5186, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.915156646909399e-05, |
|
"loss": 0.5835, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.914733276883997e-05, |
|
"loss": 0.6779, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.914309906858594e-05, |
|
"loss": 0.6225, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.9139288738357325e-05, |
|
"loss": 0.3982, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.91350550381033e-05, |
|
"loss": 0.4857, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.913082133784928e-05, |
|
"loss": 0.6068, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.9126587637595265e-05, |
|
"loss": 0.5961, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.9122353937341236e-05, |
|
"loss": 0.767, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.911812023708722e-05, |
|
"loss": 0.504, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.911388653683319e-05, |
|
"loss": 0.5235, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.9109652836579175e-05, |
|
"loss": 0.5895, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.9105419136325146e-05, |
|
"loss": 0.6355, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.910118543607113e-05, |
|
"loss": 0.7225, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.90969517358171e-05, |
|
"loss": 0.5133, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.9092718035563086e-05, |
|
"loss": 0.4604, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.9088484335309063e-05, |
|
"loss": 0.5613, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.908425063505504e-05, |
|
"loss": 0.6061, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.908001693480102e-05, |
|
"loss": 0.693, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.9075783234546996e-05, |
|
"loss": 0.4893, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.9071549534292974e-05, |
|
"loss": 0.4893, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.906731583403895e-05, |
|
"loss": 0.5351, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.906308213378493e-05, |
|
"loss": 0.5383, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.905884843353091e-05, |
|
"loss": 0.7868, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.905461473327689e-05, |
|
"loss": 0.4928, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.905038103302286e-05, |
|
"loss": 0.5192, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.9046147332768846e-05, |
|
"loss": 0.5743, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.904191363251482e-05, |
|
"loss": 0.595, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.90376799322608e-05, |
|
"loss": 0.7781, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.903344623200677e-05, |
|
"loss": 0.437, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.902921253175276e-05, |
|
"loss": 0.4474, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.902497883149873e-05, |
|
"loss": 0.5298, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.902074513124471e-05, |
|
"loss": 0.628, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.901651143099068e-05, |
|
"loss": 0.5911, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.901227773073667e-05, |
|
"loss": 0.4738, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.900804403048264e-05, |
|
"loss": 0.5894, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.900381033022862e-05, |
|
"loss": 0.5701, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_cer": 0.13196314866227435, |
|
"eval_loss": 0.576825737953186, |
|
"eval_mer": 0.3741437425247363, |
|
"eval_runtime": 24.0365, |
|
"eval_samples_per_second": 90.612, |
|
"eval_steps_per_second": 5.7, |
|
"step": 2362 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.89995766299746e-05, |
|
"loss": 0.5197, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.899534292972058e-05, |
|
"loss": 0.4524, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.8991109229466555e-05, |
|
"loss": 0.4972, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.898687552921253e-05, |
|
"loss": 0.4893, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.898264182895852e-05, |
|
"loss": 0.5848, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.897840812870449e-05, |
|
"loss": 0.5342, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.897417442845047e-05, |
|
"loss": 0.4377, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.8969940728196444e-05, |
|
"loss": 0.4364, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.896570702794243e-05, |
|
"loss": 0.4539, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.89614733276884e-05, |
|
"loss": 0.6036, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.895723962743438e-05, |
|
"loss": 0.5118, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.8953005927180354e-05, |
|
"loss": 0.4158, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.894877222692634e-05, |
|
"loss": 0.511, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.894453852667231e-05, |
|
"loss": 0.4624, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.8940304826418294e-05, |
|
"loss": 0.6149, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.893607112616427e-05, |
|
"loss": 0.3919, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.893183742591025e-05, |
|
"loss": 0.3736, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.8927603725656227e-05, |
|
"loss": 0.4507, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.8923370025402204e-05, |
|
"loss": 0.4847, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.891913632514818e-05, |
|
"loss": 0.6876, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.891490262489416e-05, |
|
"loss": 0.5018, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.891066892464014e-05, |
|
"loss": 0.4007, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.8906435224386115e-05, |
|
"loss": 0.4385, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.89022015241321e-05, |
|
"loss": 0.5196, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.889796782387807e-05, |
|
"loss": 0.5398, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.8893734123624054e-05, |
|
"loss": 0.4602, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.8889500423370025e-05, |
|
"loss": 0.3691, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.888526672311601e-05, |
|
"loss": 0.4319, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.888103302286198e-05, |
|
"loss": 0.5262, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.8876799322607965e-05, |
|
"loss": 0.6359, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.8872565622353936e-05, |
|
"loss": 0.4502, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.886833192209992e-05, |
|
"loss": 0.4397, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.88640982218459e-05, |
|
"loss": 0.4362, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.8859864521591875e-05, |
|
"loss": 0.4408, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.885563082133785e-05, |
|
"loss": 0.5903, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.885139712108383e-05, |
|
"loss": 0.5097, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.884716342082981e-05, |
|
"loss": 0.4245, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.8842929720575786e-05, |
|
"loss": 0.496, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.883869602032176e-05, |
|
"loss": 0.5596, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.883446232006774e-05, |
|
"loss": 0.5434, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.883022861981372e-05, |
|
"loss": 0.4734, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.8825994919559696e-05, |
|
"loss": 0.4264, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.8821761219305674e-05, |
|
"loss": 0.4084, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.881752751905165e-05, |
|
"loss": 0.4768, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.8813293818797636e-05, |
|
"loss": 0.566, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.8809060118543607e-05, |
|
"loss": 0.4692, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.880482641828959e-05, |
|
"loss": 0.428, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.880059271803556e-05, |
|
"loss": 0.4359, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.8796359017781546e-05, |
|
"loss": 0.5232, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.8792125317527524e-05, |
|
"loss": 0.6228, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.87883149872989e-05, |
|
"loss": 0.3815, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.878408128704488e-05, |
|
"loss": 0.4012, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.8779847586790855e-05, |
|
"loss": 0.3818, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.877561388653684e-05, |
|
"loss": 0.456, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.877138018628281e-05, |
|
"loss": 0.4737, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.8767146486028795e-05, |
|
"loss": 0.4481, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.8762912785774765e-05, |
|
"loss": 0.4737, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.875867908552075e-05, |
|
"loss": 0.5637, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.875444538526673e-05, |
|
"loss": 0.5322, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.8750211685012705e-05, |
|
"loss": 0.6033, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.874597798475868e-05, |
|
"loss": 0.4817, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.874174428450466e-05, |
|
"loss": 0.449, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.873751058425064e-05, |
|
"loss": 0.4926, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.8733276883996615e-05, |
|
"loss": 0.5486, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.872904318374259e-05, |
|
"loss": 0.6407, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.872480948348857e-05, |
|
"loss": 0.376, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.872057578323455e-05, |
|
"loss": 0.3779, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.8716342082980526e-05, |
|
"loss": 0.486, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.8712108382726504e-05, |
|
"loss": 0.5398, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.870787468247248e-05, |
|
"loss": 0.5042, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.8703640982218466e-05, |
|
"loss": 0.4882, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.8699407281964436e-05, |
|
"loss": 0.4227, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.869517358171042e-05, |
|
"loss": 0.4296, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.869093988145639e-05, |
|
"loss": 0.5495, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.8686706181202376e-05, |
|
"loss": 0.5862, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.868247248094835e-05, |
|
"loss": 0.4743, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.867823878069433e-05, |
|
"loss": 0.4568, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.86740050804403e-05, |
|
"loss": 0.4377, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.8669771380186286e-05, |
|
"loss": 0.4263, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.866553767993226e-05, |
|
"loss": 0.5528, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.866130397967824e-05, |
|
"loss": 0.4947, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.865707027942422e-05, |
|
"loss": 0.3768, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.86528365791702e-05, |
|
"loss": 0.4159, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.8648602878916175e-05, |
|
"loss": 0.464, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.864436917866215e-05, |
|
"loss": 0.6557, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.8640558848433535e-05, |
|
"loss": 0.4162, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.863632514817951e-05, |
|
"loss": 0.3844, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.863209144792549e-05, |
|
"loss": 0.5124, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.862785774767147e-05, |
|
"loss": 0.561, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.8623624047417445e-05, |
|
"loss": 0.5773, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.861939034716342e-05, |
|
"loss": 0.4254, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.86151566469094e-05, |
|
"loss": 0.4155, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.861092294665538e-05, |
|
"loss": 0.4501, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.8606689246401356e-05, |
|
"loss": 0.4316, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.860245554614733e-05, |
|
"loss": 0.5491, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.859822184589332e-05, |
|
"loss": 0.5051, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.859398814563929e-05, |
|
"loss": 0.3897, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.858975444538527e-05, |
|
"loss": 0.4812, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.8585520745131244e-05, |
|
"loss": 0.4686, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.858128704487723e-05, |
|
"loss": 0.5991, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.85770533446232e-05, |
|
"loss": 0.4779, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.8572819644369183e-05, |
|
"loss": 0.3563, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.856858594411516e-05, |
|
"loss": 0.4934, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.856435224386114e-05, |
|
"loss": 0.4977, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.8560118543607116e-05, |
|
"loss": 0.581, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.8555884843353094e-05, |
|
"loss": 0.4515, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.855165114309907e-05, |
|
"loss": 0.394, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.854741744284505e-05, |
|
"loss": 0.4212, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.854318374259103e-05, |
|
"loss": 0.4282, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.8538950042337004e-05, |
|
"loss": 0.4853, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.853471634208298e-05, |
|
"loss": 0.3892, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.853048264182896e-05, |
|
"loss": 0.3855, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.852624894157494e-05, |
|
"loss": 0.4771, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.8522015241320915e-05, |
|
"loss": 0.5244, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.851778154106689e-05, |
|
"loss": 0.5556, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.851354784081287e-05, |
|
"loss": 0.3799, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.850931414055885e-05, |
|
"loss": 0.3802, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.8505080440304825e-05, |
|
"loss": 0.5161, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_cer": 0.12360667965027142, |
|
"eval_loss": 0.5676856637001038, |
|
"eval_mer": 0.3453843644666739, |
|
"eval_runtime": 24.0168, |
|
"eval_samples_per_second": 90.686, |
|
"eval_steps_per_second": 5.704, |
|
"step": 3543 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.850084674005081e-05, |
|
"loss": 0.3992, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.849661303979679e-05, |
|
"loss": 0.3129, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.8492379339542765e-05, |
|
"loss": 0.3575, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.848814563928874e-05, |
|
"loss": 0.3663, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.848391193903472e-05, |
|
"loss": 0.3684, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.84796782387807e-05, |
|
"loss": 0.4295, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.8475444538526675e-05, |
|
"loss": 0.3392, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.847121083827265e-05, |
|
"loss": 0.3861, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.846697713801863e-05, |
|
"loss": 0.4343, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.846274343776461e-05, |
|
"loss": 0.4075, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.8458509737510586e-05, |
|
"loss": 0.4533, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.8454276037256564e-05, |
|
"loss": 0.3654, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.845004233700254e-05, |
|
"loss": 0.3483, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.844580863674852e-05, |
|
"loss": 0.3918, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.8441574936494496e-05, |
|
"loss": 0.4442, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.8437341236240474e-05, |
|
"loss": 0.3351, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.843310753598646e-05, |
|
"loss": 0.3112, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.842887383573243e-05, |
|
"loss": 0.3461, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.8424640135478414e-05, |
|
"loss": 0.3967, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.8420406435224384e-05, |
|
"loss": 0.5231, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.841617273497037e-05, |
|
"loss": 0.3954, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.8411939034716346e-05, |
|
"loss": 0.3153, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.8407705334462324e-05, |
|
"loss": 0.3667, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.84034716342083e-05, |
|
"loss": 0.4518, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.839923793395428e-05, |
|
"loss": 0.5369, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.839500423370026e-05, |
|
"loss": 0.3811, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.8390770533446235e-05, |
|
"loss": 0.3276, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.838653683319221e-05, |
|
"loss": 0.3428, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.838230313293819e-05, |
|
"loss": 0.4002, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.837806943268417e-05, |
|
"loss": 0.512, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.8373835732430145e-05, |
|
"loss": 0.4051, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.836960203217612e-05, |
|
"loss": 0.3545, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.83653683319221e-05, |
|
"loss": 0.4291, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.8361134631668085e-05, |
|
"loss": 0.3827, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.8356900931414055e-05, |
|
"loss": 0.4711, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.835266723116004e-05, |
|
"loss": 0.4549, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.834843353090601e-05, |
|
"loss": 0.3589, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.8344199830651995e-05, |
|
"loss": 0.3354, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.8339966130397966e-05, |
|
"loss": 0.4284, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.833573243014395e-05, |
|
"loss": 0.5216, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.833149872988992e-05, |
|
"loss": 0.4689, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.8327265029635906e-05, |
|
"loss": 0.3412, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.832303132938188e-05, |
|
"loss": 0.3283, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.831879762912786e-05, |
|
"loss": 0.3946, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.831456392887384e-05, |
|
"loss": 0.504, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.8310330228619816e-05, |
|
"loss": 0.4394, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.8306096528365794e-05, |
|
"loss": 0.3218, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.830186282811177e-05, |
|
"loss": 0.4287, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.829762912785775e-05, |
|
"loss": 0.3832, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.8293395427603727e-05, |
|
"loss": 0.5022, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.828916172734971e-05, |
|
"loss": 0.4091, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.828492802709568e-05, |
|
"loss": 0.2818, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.8280694326841666e-05, |
|
"loss": 0.4073, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.827646062658764e-05, |
|
"loss": 0.3177, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.827222692633362e-05, |
|
"loss": 0.4037, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.826799322607959e-05, |
|
"loss": 0.4102, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.826375952582558e-05, |
|
"loss": 0.3053, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.825952582557155e-05, |
|
"loss": 0.4242, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.825529212531753e-05, |
|
"loss": 0.4177, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.82510584250635e-05, |
|
"loss": 0.4545, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.824682472480949e-05, |
|
"loss": 0.358, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.824259102455546e-05, |
|
"loss": 0.3987, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.823835732430144e-05, |
|
"loss": 0.3892, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.823412362404742e-05, |
|
"loss": 0.4229, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.82298899237934e-05, |
|
"loss": 0.4626, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.822607959356478e-05, |
|
"loss": 0.3527, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.822184589331076e-05, |
|
"loss": 0.3111, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.8217612193056735e-05, |
|
"loss": 0.3451, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.821337849280271e-05, |
|
"loss": 0.3782, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.820914479254869e-05, |
|
"loss": 0.4518, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.820491109229467e-05, |
|
"loss": 0.375, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.8200677392040646e-05, |
|
"loss": 0.3501, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.8196443691786623e-05, |
|
"loss": 0.3453, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.81922099915326e-05, |
|
"loss": 0.3315, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.818797629127858e-05, |
|
"loss": 0.5497, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.8183742591024556e-05, |
|
"loss": 0.3749, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.8179508890770534e-05, |
|
"loss": 0.312, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.817527519051651e-05, |
|
"loss": 0.3664, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.817104149026249e-05, |
|
"loss": 0.4353, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.8166807790008474e-05, |
|
"loss": 0.4016, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.8162574089754444e-05, |
|
"loss": 0.3658, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.815834038950043e-05, |
|
"loss": 0.2703, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 4.8154106689246406e-05, |
|
"loss": 0.3408, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.8149872988992384e-05, |
|
"loss": 0.3901, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.814563928873836e-05, |
|
"loss": 0.4561, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.814140558848434e-05, |
|
"loss": 0.4306, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.813717188823032e-05, |
|
"loss": 0.3023, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.8132938187976295e-05, |
|
"loss": 0.3275, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.812870448772227e-05, |
|
"loss": 0.3832, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.812447078746825e-05, |
|
"loss": 0.452, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.812066045723963e-05, |
|
"loss": 0.453, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 4.81164267569856e-05, |
|
"loss": 0.3208, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 4.811219305673159e-05, |
|
"loss": 0.3845, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.810795935647756e-05, |
|
"loss": 0.4085, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.810372565622354e-05, |
|
"loss": 0.4681, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 4.809949195596952e-05, |
|
"loss": 0.3528, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.80952582557155e-05, |
|
"loss": 0.3221, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.8091024555461476e-05, |
|
"loss": 0.3381, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.808679085520745e-05, |
|
"loss": 0.3763, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.808255715495343e-05, |
|
"loss": 0.5083, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.807832345469941e-05, |
|
"loss": 0.4185, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.8074089754445386e-05, |
|
"loss": 0.3625, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 4.8069856054191364e-05, |
|
"loss": 0.3564, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.806562235393735e-05, |
|
"loss": 0.3963, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.806138865368332e-05, |
|
"loss": 0.4047, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.80571549534293e-05, |
|
"loss": 0.4063, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.8052921253175274e-05, |
|
"loss": 0.3512, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.804868755292126e-05, |
|
"loss": 0.3988, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.804445385266723e-05, |
|
"loss": 0.4998, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.8040220152413214e-05, |
|
"loss": 0.4857, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.8035986452159185e-05, |
|
"loss": 0.4727, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.803175275190517e-05, |
|
"loss": 0.3506, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.802751905165114e-05, |
|
"loss": 0.3694, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.8023285351397124e-05, |
|
"loss": 0.3642, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.80190516511431e-05, |
|
"loss": 0.4493, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.801481795088908e-05, |
|
"loss": 0.4068, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.801058425063506e-05, |
|
"loss": 0.3905, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.8006350550381035e-05, |
|
"loss": 0.4209, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_cer": 0.12201825992071713, |
|
"eval_loss": 0.5889235138893127, |
|
"eval_mer": 0.3426117212134392, |
|
"eval_runtime": 27.1757, |
|
"eval_samples_per_second": 80.145, |
|
"eval_steps_per_second": 5.041, |
|
"step": 4724 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.800211685012701e-05, |
|
"loss": 0.404, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.799788314987299e-05, |
|
"loss": 0.2647, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 4.7993649449618974e-05, |
|
"loss": 0.2844, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.7989415749364945e-05, |
|
"loss": 0.2902, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 4.798518204911093e-05, |
|
"loss": 0.416, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 4.79809483488569e-05, |
|
"loss": 0.2932, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.7976714648602885e-05, |
|
"loss": 0.3144, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.7972480948348856e-05, |
|
"loss": 0.2721, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 4.796824724809484e-05, |
|
"loss": 0.3158, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.796401354784081e-05, |
|
"loss": 0.4105, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 4.7959779847586795e-05, |
|
"loss": 0.43, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 4.7955546147332766e-05, |
|
"loss": 0.2806, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 4.795131244707875e-05, |
|
"loss": 0.2901, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.794707874682473e-05, |
|
"loss": 0.3427, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.7942845046570706e-05, |
|
"loss": 0.3866, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 4.7938611346316683e-05, |
|
"loss": 0.338, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 4.793437764606266e-05, |
|
"loss": 0.2824, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 4.793014394580864e-05, |
|
"loss": 0.3001, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 4.7925910245554616e-05, |
|
"loss": 0.3255, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 4.7921676545300594e-05, |
|
"loss": 0.3247, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 4.791744284504657e-05, |
|
"loss": 0.3804, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.7913209144792556e-05, |
|
"loss": 0.2978, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 4.790897544453853e-05, |
|
"loss": 0.279, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 4.790474174428451e-05, |
|
"loss": 0.3198, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.790050804403048e-05, |
|
"loss": 0.3622, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.7896274343776466e-05, |
|
"loss": 0.4556, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.789204064352244e-05, |
|
"loss": 0.275, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.788780694326842e-05, |
|
"loss": 0.339, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.788357324301439e-05, |
|
"loss": 0.3579, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 4.787933954276038e-05, |
|
"loss": 0.3492, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.787552921253175e-05, |
|
"loss": 0.5051, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.787129551227773e-05, |
|
"loss": 0.2782, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.7867061812023715e-05, |
|
"loss": 0.3414, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.7862828111769686e-05, |
|
"loss": 0.3327, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.785859441151567e-05, |
|
"loss": 0.3754, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.785436071126165e-05, |
|
"loss": 0.3647, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.7850127011007625e-05, |
|
"loss": 0.2644, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.78458933107536e-05, |
|
"loss": 0.2835, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.784165961049958e-05, |
|
"loss": 0.3444, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.783742591024556e-05, |
|
"loss": 0.3817, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.7833192209991536e-05, |
|
"loss": 0.3676, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.782895850973751e-05, |
|
"loss": 0.2442, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.782472480948349e-05, |
|
"loss": 0.2907, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.782049110922947e-05, |
|
"loss": 0.3163, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.7816257408975446e-05, |
|
"loss": 0.338, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 4.7812023708721424e-05, |
|
"loss": 0.4044, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 4.78077900084674e-05, |
|
"loss": 0.3073, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.780355630821338e-05, |
|
"loss": 0.2753, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 4.7799322607959357e-05, |
|
"loss": 0.3416, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 4.779508890770534e-05, |
|
"loss": 0.3171, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.779085520745131e-05, |
|
"loss": 0.4188, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.7786621507197296e-05, |
|
"loss": 0.2775, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 4.778238780694327e-05, |
|
"loss": 0.3707, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 4.777815410668925e-05, |
|
"loss": 0.3064, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 4.777392040643522e-05, |
|
"loss": 0.3798, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 4.776968670618121e-05, |
|
"loss": 0.3791, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.7765453005927184e-05, |
|
"loss": 0.2691, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 4.776121930567316e-05, |
|
"loss": 0.2965, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 4.775698560541914e-05, |
|
"loss": 0.3141, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 4.775275190516512e-05, |
|
"loss": 0.4067, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.7748518204911095e-05, |
|
"loss": 0.3959, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.774428450465707e-05, |
|
"loss": 0.2693, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.774005080440305e-05, |
|
"loss": 0.2673, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.773581710414903e-05, |
|
"loss": 0.2952, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.7731583403895005e-05, |
|
"loss": 0.3988, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.772734970364098e-05, |
|
"loss": 0.309, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.772311600338697e-05, |
|
"loss": 0.2741, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.771888230313294e-05, |
|
"loss": 0.3079, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.771464860287892e-05, |
|
"loss": 0.3819, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.771041490262489e-05, |
|
"loss": 0.3842, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.770618120237088e-05, |
|
"loss": 0.4219, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.770194750211685e-05, |
|
"loss": 0.2819, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.769771380186283e-05, |
|
"loss": 0.2979, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.7693480101608804e-05, |
|
"loss": 0.2718, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 4.768924640135479e-05, |
|
"loss": 0.3435, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.768501270110076e-05, |
|
"loss": 0.3944, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 4.7680779000846743e-05, |
|
"loss": 0.2934, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 4.7676545300592714e-05, |
|
"loss": 0.3293, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 4.76723116003387e-05, |
|
"loss": 0.3617, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 4.7668077900084676e-05, |
|
"loss": 0.3764, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.7663844199830654e-05, |
|
"loss": 0.4426, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 4.765961049957663e-05, |
|
"loss": 0.255, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 4.765537679932261e-05, |
|
"loss": 0.2891, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.7651143099068594e-05, |
|
"loss": 0.311, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.7646909398814564e-05, |
|
"loss": 0.3547, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.764267569856055e-05, |
|
"loss": 0.3187, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 4.763844199830652e-05, |
|
"loss": 0.2431, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 4.7634208298052504e-05, |
|
"loss": 0.2886, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.7629974597798475e-05, |
|
"loss": 0.3226, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 4.762574089754446e-05, |
|
"loss": 0.3693, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 4.762150719729043e-05, |
|
"loss": 0.3709, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.7617273497036414e-05, |
|
"loss": 0.297, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.7613039796782385e-05, |
|
"loss": 0.2794, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.760880609652837e-05, |
|
"loss": 0.3495, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.760457239627435e-05, |
|
"loss": 0.3761, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 4.7600338696020325e-05, |
|
"loss": 0.3361, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 4.75961049957663e-05, |
|
"loss": 0.2881, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.759187129551228e-05, |
|
"loss": 0.3044, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.758763759525826e-05, |
|
"loss": 0.3407, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 4.7583403895004235e-05, |
|
"loss": 0.3124, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.757917019475022e-05, |
|
"loss": 0.3587, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.757493649449619e-05, |
|
"loss": 0.2854, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.7570702794242175e-05, |
|
"loss": 0.2995, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.7566469093988146e-05, |
|
"loss": 0.3462, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.756223539373413e-05, |
|
"loss": 0.3729, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.75580016934801e-05, |
|
"loss": 0.3514, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 4.7553767993226086e-05, |
|
"loss": 0.2361, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.7549534292972056e-05, |
|
"loss": 0.2972, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.754530059271804e-05, |
|
"loss": 0.3605, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.754106689246401e-05, |
|
"loss": 0.3621, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 4.7536833192209996e-05, |
|
"loss": 0.3325, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 4.7532599491955974e-05, |
|
"loss": 0.2995, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.752836579170195e-05, |
|
"loss": 0.3203, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.752413209144793e-05, |
|
"loss": 0.3237, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 4.7519898391193906e-05, |
|
"loss": 0.4474, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 4.7515664690939884e-05, |
|
"loss": 0.2895, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.751143099068586e-05, |
|
"loss": 0.3015, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.750719729043184e-05, |
|
"loss": 0.3663, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_cer": 0.11584414149366704, |
|
"eval_loss": 0.5903986096382141, |
|
"eval_mer": 0.32532347504621073, |
|
"eval_runtime": 24.1423, |
|
"eval_samples_per_second": 90.215, |
|
"eval_steps_per_second": 5.675, |
|
"step": 5905 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.750338696020322e-05, |
|
"loss": 0.2998, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 4.74991532599492e-05, |
|
"loss": 0.222, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 4.749491955969518e-05, |
|
"loss": 0.2321, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 4.7490685859441155e-05, |
|
"loss": 0.2734, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 4.748645215918713e-05, |
|
"loss": 0.2485, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 4.748221845893311e-05, |
|
"loss": 0.3122, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 4.747798475867909e-05, |
|
"loss": 0.2434, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 4.7473751058425065e-05, |
|
"loss": 0.2556, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 4.746951735817104e-05, |
|
"loss": 0.3138, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 4.746528365791702e-05, |
|
"loss": 0.2655, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 4.7461049957663e-05, |
|
"loss": 0.3957, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 4.7456816257408976e-05, |
|
"loss": 0.2179, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 4.745258255715495e-05, |
|
"loss": 0.2319, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 4.744834885690093e-05, |
|
"loss": 0.2649, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 4.7444115156646915e-05, |
|
"loss": 0.2678, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 4.7439881456392886e-05, |
|
"loss": 0.2765, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 4.743564775613887e-05, |
|
"loss": 0.221, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 4.743141405588484e-05, |
|
"loss": 0.3079, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 4.7427180355630826e-05, |
|
"loss": 0.2536, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 4.7422946655376803e-05, |
|
"loss": 0.3262, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 4.741871295512278e-05, |
|
"loss": 0.3531, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 4.741447925486876e-05, |
|
"loss": 0.2218, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 4.7410245554614736e-05, |
|
"loss": 0.2826, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 4.7406011854360714e-05, |
|
"loss": 0.3085, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 4.740177815410669e-05, |
|
"loss": 0.2716, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 4.739754445385267e-05, |
|
"loss": 0.3887, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 4.739331075359865e-05, |
|
"loss": 0.2004, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 4.7389077053344624e-05, |
|
"loss": 0.2359, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 4.73848433530906e-05, |
|
"loss": 0.2551, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 4.738060965283658e-05, |
|
"loss": 0.3348, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 4.737637595258256e-05, |
|
"loss": 0.2479, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 4.737214225232854e-05, |
|
"loss": 0.2186, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.736790855207451e-05, |
|
"loss": 0.2655, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.73636748518205e-05, |
|
"loss": 0.2573, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 4.735944115156647e-05, |
|
"loss": 0.3231, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 4.735520745131245e-05, |
|
"loss": 0.4099, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 4.735097375105842e-05, |
|
"loss": 0.227, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 4.734674005080441e-05, |
|
"loss": 0.2678, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 4.734250635055038e-05, |
|
"loss": 0.266, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 4.733827265029636e-05, |
|
"loss": 0.342, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 4.733403895004234e-05, |
|
"loss": 0.3919, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 4.732980524978832e-05, |
|
"loss": 0.2578, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 4.7325571549534295e-05, |
|
"loss": 0.2261, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 4.732133784928027e-05, |
|
"loss": 0.305, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 4.731710414902625e-05, |
|
"loss": 0.2893, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 4.731287044877223e-05, |
|
"loss": 0.3134, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 4.7308636748518206e-05, |
|
"loss": 0.2319, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 4.7304403048264183e-05, |
|
"loss": 0.2853, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 4.730016934801017e-05, |
|
"loss": 0.2884, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 4.729593564775614e-05, |
|
"loss": 0.3469, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 4.729170194750212e-05, |
|
"loss": 0.34, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.7287468247248094e-05, |
|
"loss": 0.2632, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.728323454699408e-05, |
|
"loss": 0.2544, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 4.727900084674005e-05, |
|
"loss": 0.3574, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 4.7274767146486034e-05, |
|
"loss": 0.3068, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 4.7270533446232004e-05, |
|
"loss": 0.3268, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 4.726629974597799e-05, |
|
"loss": 0.2279, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 4.726206604572396e-05, |
|
"loss": 0.2484, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.7257832345469944e-05, |
|
"loss": 0.2426, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.725359864521592e-05, |
|
"loss": 0.2924, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 4.72493649449619e-05, |
|
"loss": 0.3111, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 4.724513124470788e-05, |
|
"loss": 0.2546, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.7240897544453855e-05, |
|
"loss": 0.2674, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 4.723666384419983e-05, |
|
"loss": 0.3439, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 4.723243014394581e-05, |
|
"loss": 0.3073, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 4.7228196443691794e-05, |
|
"loss": 0.3552, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 4.7223962743437765e-05, |
|
"loss": 0.2658, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 4.721972904318375e-05, |
|
"loss": 0.2772, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 4.721549534292972e-05, |
|
"loss": 0.2463, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 4.7211261642675705e-05, |
|
"loss": 0.2514, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 4.7207027942421675e-05, |
|
"loss": 0.286, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 4.720279424216766e-05, |
|
"loss": 0.2621, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 4.719856054191363e-05, |
|
"loss": 0.209, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 4.7194326841659615e-05, |
|
"loss": 0.296, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 4.7190093141405586e-05, |
|
"loss": 0.3293, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 4.718585944115157e-05, |
|
"loss": 0.3592, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 4.718162574089755e-05, |
|
"loss": 0.2537, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 4.7177392040643526e-05, |
|
"loss": 0.224, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 4.71731583403895e-05, |
|
"loss": 0.2843, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 4.716892464013548e-05, |
|
"loss": 0.3668, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.716511430990686e-05, |
|
"loss": 0.2834, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 4.716088060965284e-05, |
|
"loss": 0.217, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 4.715664690939882e-05, |
|
"loss": 0.2405, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 4.7152413209144796e-05, |
|
"loss": 0.3263, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 4.7148179508890774e-05, |
|
"loss": 0.3204, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 4.714394580863675e-05, |
|
"loss": 0.3493, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 4.713971210838273e-05, |
|
"loss": 0.2501, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 4.713547840812871e-05, |
|
"loss": 0.2726, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 4.7131244707874684e-05, |
|
"loss": 0.2663, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 4.712701100762066e-05, |
|
"loss": 0.3557, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 4.712277730736664e-05, |
|
"loss": 0.329, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 4.711854360711262e-05, |
|
"loss": 0.221, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 4.7114309906858595e-05, |
|
"loss": 0.2512, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 4.711007620660457e-05, |
|
"loss": 0.3038, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 4.710584250635055e-05, |
|
"loss": 0.3058, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 4.7101608806096534e-05, |
|
"loss": 0.2861, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 4.7097375105842505e-05, |
|
"loss": 0.2352, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 4.709314140558849e-05, |
|
"loss": 0.2317, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 4.708890770533446e-05, |
|
"loss": 0.3027, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 4.7084674005080445e-05, |
|
"loss": 0.3641, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 4.708086367485182e-05, |
|
"loss": 0.2341, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 4.7076629974597805e-05, |
|
"loss": 0.2346, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 4.7072396274343776e-05, |
|
"loss": 0.2628, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 4.706816257408976e-05, |
|
"loss": 0.281, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 4.706392887383573e-05, |
|
"loss": 0.2478, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 4.7059695173581716e-05, |
|
"loss": 0.3587, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 4.7055461473327686e-05, |
|
"loss": 0.2311, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 4.705122777307367e-05, |
|
"loss": 0.2168, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 4.704699407281964e-05, |
|
"loss": 0.2599, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 4.7042760372565626e-05, |
|
"loss": 0.3293, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.7038526672311604e-05, |
|
"loss": 0.4742, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.703429297205758e-05, |
|
"loss": 0.2366, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 4.703005927180356e-05, |
|
"loss": 0.2503, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 4.7025825571549536e-05, |
|
"loss": 0.3153, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 4.7021591871295514e-05, |
|
"loss": 0.2815, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 4.701735817104149e-05, |
|
"loss": 0.3547, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 4.7013124470787476e-05, |
|
"loss": 0.2523, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 4.700889077053345e-05, |
|
"loss": 0.3156, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_cer": 0.11519496125637095, |
|
"eval_loss": 0.619243323802948, |
|
"eval_mer": 0.31727737305643144, |
|
"eval_runtime": 24.0991, |
|
"eval_samples_per_second": 90.377, |
|
"eval_steps_per_second": 5.685, |
|
"step": 7086 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.700465707027943e-05, |
|
"loss": 0.346, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 4.70004233700254e-05, |
|
"loss": 0.1886, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 4.6996189669771387e-05, |
|
"loss": 0.2036, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 4.699195596951736e-05, |
|
"loss": 0.2303, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 4.698772226926334e-05, |
|
"loss": 0.2681, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 4.698348856900931e-05, |
|
"loss": 0.2554, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 4.69792548687553e-05, |
|
"loss": 0.2108, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 4.697502116850127e-05, |
|
"loss": 0.2299, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 4.697078746824725e-05, |
|
"loss": 0.2759, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 4.696655376799323e-05, |
|
"loss": 0.2758, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 4.696232006773921e-05, |
|
"loss": 0.2705, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 4.6958086367485185e-05, |
|
"loss": 0.2431, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 4.695385266723116e-05, |
|
"loss": 0.2274, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 4.694961896697714e-05, |
|
"loss": 0.2633, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 4.694538526672312e-05, |
|
"loss": 0.2357, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 4.6941151566469096e-05, |
|
"loss": 0.3212, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 4.693691786621507e-05, |
|
"loss": 0.2035, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 4.693268416596105e-05, |
|
"loss": 0.2362, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.692845046570703e-05, |
|
"loss": 0.2695, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.692421676545301e-05, |
|
"loss": 0.2789, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 4.6919983065198984e-05, |
|
"loss": 0.2722, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 4.691574936494497e-05, |
|
"loss": 0.2175, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 4.691151566469094e-05, |
|
"loss": 0.2006, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 4.690728196443692e-05, |
|
"loss": 0.2645, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 4.6903048264182894e-05, |
|
"loss": 0.2411, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 4.689881456392888e-05, |
|
"loss": 0.286, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 4.6894580863674856e-05, |
|
"loss": 0.2099, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 4.6890347163420834e-05, |
|
"loss": 0.2343, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 4.688611346316681e-05, |
|
"loss": 0.2238, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 4.688187976291279e-05, |
|
"loss": 0.2739, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 4.687764606265877e-05, |
|
"loss": 0.2951, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 4.6873412362404744e-05, |
|
"loss": 0.1936, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 4.686917866215072e-05, |
|
"loss": 0.2228, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 4.68649449618967e-05, |
|
"loss": 0.2278, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 4.686071126164268e-05, |
|
"loss": 0.2854, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 4.685690093141406e-05, |
|
"loss": 0.3059, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 4.685266723116004e-05, |
|
"loss": 0.193, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 4.6848433530906015e-05, |
|
"loss": 0.2073, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 4.684419983065199e-05, |
|
"loss": 0.2896, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 4.683996613039797e-05, |
|
"loss": 0.2773, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 4.683573243014395e-05, |
|
"loss": 0.22, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 4.6831498729889925e-05, |
|
"loss": 0.2059, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 4.68272650296359e-05, |
|
"loss": 0.2365, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 4.682303132938188e-05, |
|
"loss": 0.2368, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 4.681879762912786e-05, |
|
"loss": 0.2624, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 4.681498729889924e-05, |
|
"loss": 0.2982, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 4.681075359864522e-05, |
|
"loss": 0.2593, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 4.6806519898391196e-05, |
|
"loss": 0.2347, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 4.6802286198137174e-05, |
|
"loss": 0.253, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 4.679805249788316e-05, |
|
"loss": 0.2896, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 4.679381879762913e-05, |
|
"loss": 0.3142, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 4.678958509737511e-05, |
|
"loss": 0.2309, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 4.6785351397121084e-05, |
|
"loss": 0.2358, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 4.678111769686707e-05, |
|
"loss": 0.2623, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 4.677688399661304e-05, |
|
"loss": 0.335, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 4.6772650296359024e-05, |
|
"loss": 0.317, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 4.6768416596104995e-05, |
|
"loss": 0.2199, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 4.676418289585098e-05, |
|
"loss": 0.2316, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 4.675994919559695e-05, |
|
"loss": 0.2344, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 4.6755715495342934e-05, |
|
"loss": 0.2517, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 4.6751481795088905e-05, |
|
"loss": 0.2638, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 4.674724809483489e-05, |
|
"loss": 0.2674, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 4.674301439458087e-05, |
|
"loss": 0.2267, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 4.6738780694326845e-05, |
|
"loss": 0.2819, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 4.673454699407282e-05, |
|
"loss": 0.2611, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 4.67303132938188e-05, |
|
"loss": 0.4463, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 4.672607959356478e-05, |
|
"loss": 0.2092, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 4.6721845893310755e-05, |
|
"loss": 0.2554, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 4.671761219305673e-05, |
|
"loss": 0.2662, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 4.671337849280271e-05, |
|
"loss": 0.3036, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 4.6709144792548695e-05, |
|
"loss": 0.3882, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 4.6704911092294666e-05, |
|
"loss": 0.2478, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 4.670067739204065e-05, |
|
"loss": 0.276, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 4.669644369178662e-05, |
|
"loss": 0.2807, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 4.6692209991532605e-05, |
|
"loss": 0.3415, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 4.6687976291278576e-05, |
|
"loss": 0.3372, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 4.668374259102456e-05, |
|
"loss": 0.2488, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 4.667950889077053e-05, |
|
"loss": 0.2932, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 4.6675275190516516e-05, |
|
"loss": 0.3033, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 4.667104149026249e-05, |
|
"loss": 0.2999, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 4.666680779000847e-05, |
|
"loss": 0.4221, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 4.666257408975445e-05, |
|
"loss": 0.2158, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 4.6658340389500426e-05, |
|
"loss": 0.2835, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 4.6654106689246404e-05, |
|
"loss": 0.2663, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 4.664987298899238e-05, |
|
"loss": 0.2763, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 4.664563928873836e-05, |
|
"loss": 0.3322, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 4.664140558848434e-05, |
|
"loss": 0.2148, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 4.6637171888230314e-05, |
|
"loss": 0.232, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 4.663293818797629e-05, |
|
"loss": 0.3275, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 4.662870448772227e-05, |
|
"loss": 0.318, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 4.662447078746825e-05, |
|
"loss": 0.3615, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 4.662023708721423e-05, |
|
"loss": 0.2483, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 4.66160033869602e-05, |
|
"loss": 0.232, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 4.661176968670619e-05, |
|
"loss": 0.3508, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 4.660753598645216e-05, |
|
"loss": 0.3212, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 4.660330228619814e-05, |
|
"loss": 0.3302, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 4.659906858594412e-05, |
|
"loss": 0.2584, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 4.65948348856901e-05, |
|
"loss": 0.2511, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 4.6590601185436075e-05, |
|
"loss": 0.2861, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 4.658636748518205e-05, |
|
"loss": 0.24, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 4.658213378492803e-05, |
|
"loss": 0.3855, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 4.657790008467401e-05, |
|
"loss": 0.2516, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 4.6573666384419985e-05, |
|
"loss": 0.243, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 4.656943268416596e-05, |
|
"loss": 0.264, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 4.656519898391194e-05, |
|
"loss": 0.2724, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 4.656096528365792e-05, |
|
"loss": 0.3747, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 4.6556731583403896e-05, |
|
"loss": 0.2474, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 4.6552497883149873e-05, |
|
"loss": 0.2911, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 4.654826418289585e-05, |
|
"loss": 0.3067, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 4.654403048264183e-05, |
|
"loss": 0.2923, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 4.6539796782387806e-05, |
|
"loss": 0.3718, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 4.653556308213379e-05, |
|
"loss": 0.2335, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 4.653132938187976e-05, |
|
"loss": 0.2691, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 4.6527095681625746e-05, |
|
"loss": 0.2714, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 4.6522861981371724e-05, |
|
"loss": 0.318, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 4.65186282811177e-05, |
|
"loss": 0.2996, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 4.651439458086368e-05, |
|
"loss": 0.2504, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 4.6510160880609656e-05, |
|
"loss": 0.3216, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_cer": 0.12025027969999585, |
|
"eval_loss": 0.6333013772964478, |
|
"eval_mer": 0.3283679460693704, |
|
"eval_runtime": 28.1996, |
|
"eval_samples_per_second": 77.235, |
|
"eval_steps_per_second": 4.858, |
|
"step": 8267 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 4.6505927180355634e-05, |
|
"loss": 0.4237, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 4.650169348010161e-05, |
|
"loss": 0.211, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 4.649745977984759e-05, |
|
"loss": 0.2188, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 4.649322607959357e-05, |
|
"loss": 0.2625, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 4.6488992379339545e-05, |
|
"loss": 0.2426, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 4.648475867908552e-05, |
|
"loss": 0.3566, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 4.64805249788315e-05, |
|
"loss": 0.2016, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 4.647629127857748e-05, |
|
"loss": 0.2261, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 4.6472057578323455e-05, |
|
"loss": 0.2851, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 4.646782387806943e-05, |
|
"loss": 0.2439, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 4.646359017781542e-05, |
|
"loss": 0.2758, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 4.645935647756139e-05, |
|
"loss": 0.2368, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 4.645512277730737e-05, |
|
"loss": 0.2133, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 4.645088907705334e-05, |
|
"loss": 0.2877, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 4.644665537679933e-05, |
|
"loss": 0.3397, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 4.64424216765453e-05, |
|
"loss": 0.2874, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 4.643818797629128e-05, |
|
"loss": 0.2254, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 4.643395427603726e-05, |
|
"loss": 0.2248, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 4.642972057578324e-05, |
|
"loss": 0.3419, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 4.6425486875529216e-05, |
|
"loss": 0.298, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 4.642125317527519e-05, |
|
"loss": 0.3014, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 4.641701947502117e-05, |
|
"loss": 0.2305, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 4.641278577476715e-05, |
|
"loss": 0.2746, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 4.6408552074513126e-05, |
|
"loss": 0.2775, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 4.6404318374259104e-05, |
|
"loss": 0.2395, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 4.6400508044030486e-05, |
|
"loss": 0.3842, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 4.6396274343776464e-05, |
|
"loss": 0.2504, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 4.639204064352244e-05, |
|
"loss": 0.2445, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 4.638780694326842e-05, |
|
"loss": 0.3471, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 4.63835732430144e-05, |
|
"loss": 0.2972, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 4.6379339542760374e-05, |
|
"loss": 0.3146, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 4.637510584250635e-05, |
|
"loss": 0.2575, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 4.637087214225233e-05, |
|
"loss": 0.2692, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 4.6366638441998314e-05, |
|
"loss": 0.3045, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 4.6362404741744285e-05, |
|
"loss": 0.3537, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 4.635817104149027e-05, |
|
"loss": 0.3779, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 4.635393734123624e-05, |
|
"loss": 0.2749, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 4.6349703640982224e-05, |
|
"loss": 0.2731, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 4.6345469940728195e-05, |
|
"loss": 0.3241, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 4.634123624047418e-05, |
|
"loss": 0.3026, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 4.633700254022015e-05, |
|
"loss": 0.3854, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 4.6332768839966135e-05, |
|
"loss": 0.3231, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 4.632853513971211e-05, |
|
"loss": 0.3154, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 4.632430143945809e-05, |
|
"loss": 0.3299, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 4.632006773920407e-05, |
|
"loss": 0.3534, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 4.6316257408975444e-05, |
|
"loss": 0.3238, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 4.631202370872143e-05, |
|
"loss": 0.2916, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 4.6307790008467406e-05, |
|
"loss": 0.2876, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 4.630355630821338e-05, |
|
"loss": 0.3361, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 4.629932260795936e-05, |
|
"loss": 0.412, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 4.629508890770534e-05, |
|
"loss": 0.4966, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 4.6290855207451316e-05, |
|
"loss": 0.3582, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 4.6286621507197294e-05, |
|
"loss": 0.3851, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 4.628238780694327e-05, |
|
"loss": 0.4106, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 4.627815410668925e-05, |
|
"loss": 0.3664, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 4.6273920406435226e-05, |
|
"loss": 0.411, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 4.6269686706181204e-05, |
|
"loss": 0.3432, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 4.626545300592718e-05, |
|
"loss": 0.3722, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 4.626121930567316e-05, |
|
"loss": 0.3891, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 4.625698560541914e-05, |
|
"loss": 0.4339, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 4.6252751905165115e-05, |
|
"loss": 0.4677, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 4.624851820491109e-05, |
|
"loss": 0.4089, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 4.624428450465707e-05, |
|
"loss": 0.3883, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 4.6240050804403054e-05, |
|
"loss": 0.4075, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 4.6235817104149025e-05, |
|
"loss": 0.4336, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 4.623158340389501e-05, |
|
"loss": 0.404, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 4.622734970364098e-05, |
|
"loss": 0.3651, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 4.6223116003386965e-05, |
|
"loss": 0.3829, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 4.6218882303132936e-05, |
|
"loss": 0.4203, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 4.621464860287892e-05, |
|
"loss": 0.4489, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 4.62104149026249e-05, |
|
"loss": 0.5394, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 4.6206181202370875e-05, |
|
"loss": 0.4269, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 4.620194750211685e-05, |
|
"loss": 0.3924, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 4.619771380186283e-05, |
|
"loss": 0.3954, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 4.619348010160881e-05, |
|
"loss": 0.4603, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 4.6189246401354786e-05, |
|
"loss": 0.5261, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 4.618501270110076e-05, |
|
"loss": 0.38, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 4.618077900084674e-05, |
|
"loss": 0.411, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 4.617654530059272e-05, |
|
"loss": 0.4202, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 4.6172311600338696e-05, |
|
"loss": 0.4901, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 4.616807790008468e-05, |
|
"loss": 0.515, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 4.616384419983065e-05, |
|
"loss": 0.3756, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 4.6159610499576636e-05, |
|
"loss": 0.4191, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 4.6155376799322607e-05, |
|
"loss": 0.4276, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 4.615114309906859e-05, |
|
"loss": 0.4326, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 4.614690939881456e-05, |
|
"loss": 0.6135, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 4.6142675698560546e-05, |
|
"loss": 0.4903, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 4.613844199830652e-05, |
|
"loss": 0.4487, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 4.61342082980525e-05, |
|
"loss": 0.4621, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 4.612997459779847e-05, |
|
"loss": 0.4555, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 4.612574089754446e-05, |
|
"loss": 0.4764, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 4.6121507197290434e-05, |
|
"loss": 0.3884, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 4.611727349703641e-05, |
|
"loss": 0.4318, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 4.611303979678239e-05, |
|
"loss": 0.4756, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 4.610880609652837e-05, |
|
"loss": 0.4576, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 4.6104572396274345e-05, |
|
"loss": 0.5186, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 4.610033869602032e-05, |
|
"loss": 0.4117, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 4.609610499576631e-05, |
|
"loss": 0.4868, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 4.609187129551228e-05, |
|
"loss": 0.5135, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 4.608763759525826e-05, |
|
"loss": 0.5431, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 4.608340389500423e-05, |
|
"loss": 0.5298, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 4.607917019475022e-05, |
|
"loss": 0.464, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 4.607493649449619e-05, |
|
"loss": 0.4526, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 4.607070279424217e-05, |
|
"loss": 0.4326, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 4.606646909398814e-05, |
|
"loss": 0.5202, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 4.606223539373413e-05, |
|
"loss": 0.5257, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 4.60580016934801e-05, |
|
"loss": 0.398, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 4.605376799322608e-05, |
|
"loss": 0.4207, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 4.604953429297206e-05, |
|
"loss": 0.4808, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 4.604530059271804e-05, |
|
"loss": 0.4916, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 4.6041066892464016e-05, |
|
"loss": 0.514, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 4.6036833192209993e-05, |
|
"loss": 0.4566, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 4.603259949195598e-05, |
|
"loss": 0.4996, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 4.602836579170195e-05, |
|
"loss": 0.5469, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 4.602413209144793e-05, |
|
"loss": 0.5833, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 4.6019898391193904e-05, |
|
"loss": 0.5203, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 4.601566469093989e-05, |
|
"loss": 0.4557, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 4.601143099068586e-05, |
|
"loss": 0.4486, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_cer": 0.12729457589193222, |
|
"eval_loss": 0.6445448398590088, |
|
"eval_mer": 0.3521256931608133, |
|
"eval_runtime": 24.3275, |
|
"eval_samples_per_second": 89.528, |
|
"eval_steps_per_second": 5.631, |
|
"step": 9448 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 4.6007197290431844e-05, |
|
"loss": 0.4439, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 4.6002963590177814e-05, |
|
"loss": 0.416, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 4.59987298899238e-05, |
|
"loss": 0.4454, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 4.599449618966977e-05, |
|
"loss": 0.4962, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 4.5990262489415754e-05, |
|
"loss": 0.5252, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 4.5986028789161725e-05, |
|
"loss": 0.5922, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 4.598179508890771e-05, |
|
"loss": 0.4395, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 4.597756138865369e-05, |
|
"loss": 0.4726, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 4.5973327688399664e-05, |
|
"loss": 0.4738, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 4.596909398814564e-05, |
|
"loss": 0.5014, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 4.5965283657917025e-05, |
|
"loss": 0.4696, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 4.5961049957663e-05, |
|
"loss": 0.4771, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 4.595681625740898e-05, |
|
"loss": 0.4974, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 4.595258255715496e-05, |
|
"loss": 0.4569, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 4.5948348856900935e-05, |
|
"loss": 0.4682, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 4.594411515664691e-05, |
|
"loss": 0.5194, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 4.593988145639289e-05, |
|
"loss": 0.5246, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 4.593564775613887e-05, |
|
"loss": 0.4881, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 4.5931414055884846e-05, |
|
"loss": 0.5594, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 4.592718035563082e-05, |
|
"loss": 0.5379, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 4.59229466553768e-05, |
|
"loss": 0.5215, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 4.591871295512278e-05, |
|
"loss": 0.5857, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 4.5914479254868756e-05, |
|
"loss": 0.6261, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 4.5910245554614734e-05, |
|
"loss": 0.653, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 4.590601185436071e-05, |
|
"loss": 0.6165, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 4.590177815410669e-05, |
|
"loss": 0.6577, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 4.589754445385267e-05, |
|
"loss": 0.6304, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 4.5893310753598644e-05, |
|
"loss": 0.6166, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 4.588907705334463e-05, |
|
"loss": 0.6116, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 4.58848433530906e-05, |
|
"loss": 0.6094, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 4.5880609652836584e-05, |
|
"loss": 0.6384, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 4.587637595258256e-05, |
|
"loss": 0.6939, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 4.587214225232854e-05, |
|
"loss": 0.6348, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 4.586790855207452e-05, |
|
"loss": 0.6529, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 4.5863674851820494e-05, |
|
"loss": 0.6509, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 4.585944115156647e-05, |
|
"loss": 0.7135, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 4.585520745131245e-05, |
|
"loss": 0.6971, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 4.585097375105843e-05, |
|
"loss": 0.6923, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 4.5846740050804405e-05, |
|
"loss": 0.703, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 4.584250635055038e-05, |
|
"loss": 0.7819, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 4.583827265029636e-05, |
|
"loss": 0.8499, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 4.583403895004234e-05, |
|
"loss": 0.7978, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 4.5829805249788315e-05, |
|
"loss": 0.8724, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 4.58255715495343e-05, |
|
"loss": 0.8589, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 4.582133784928027e-05, |
|
"loss": 0.8798, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 4.5817104149026255e-05, |
|
"loss": 0.8424, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 4.5812870448772226e-05, |
|
"loss": 0.8478, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 4.580863674851821e-05, |
|
"loss": 0.8032, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 4.580440304826418e-05, |
|
"loss": 0.8084, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 4.5800169348010165e-05, |
|
"loss": 0.8081, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 4.5795935647756136e-05, |
|
"loss": 0.8007, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 4.579170194750212e-05, |
|
"loss": 0.838, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 4.57874682472481e-05, |
|
"loss": 0.8209, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 4.5783234546994076e-05, |
|
"loss": 0.8775, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 4.577900084674005e-05, |
|
"loss": 0.8605, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 4.577476714648603e-05, |
|
"loss": 0.8787, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 4.577053344623201e-05, |
|
"loss": 0.8102, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 4.5766299745977986e-05, |
|
"loss": 0.8055, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 4.5762066045723964e-05, |
|
"loss": 0.8361, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 4.575783234546994e-05, |
|
"loss": 0.8053, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 4.5753598645215926e-05, |
|
"loss": 0.8641, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 4.57493649449619e-05, |
|
"loss": 0.8012, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 4.574513124470788e-05, |
|
"loss": 0.7983, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 4.574089754445385e-05, |
|
"loss": 0.8566, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 4.5736663844199836e-05, |
|
"loss": 0.8913, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 4.573243014394581e-05, |
|
"loss": 0.9211, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 4.572819644369179e-05, |
|
"loss": 0.8904, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 4.572396274343776e-05, |
|
"loss": 0.9624, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 4.571972904318375e-05, |
|
"loss": 0.9416, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 4.571549534292972e-05, |
|
"loss": 0.9605, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 4.57112616426757e-05, |
|
"loss": 0.9472, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 4.570702794242168e-05, |
|
"loss": 0.9249, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 4.570279424216766e-05, |
|
"loss": 0.9371, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 4.5698560541913635e-05, |
|
"loss": 1.0134, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 4.569432684165961e-05, |
|
"loss": 1.0054, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 4.569009314140559e-05, |
|
"loss": 0.9591, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 4.568585944115157e-05, |
|
"loss": 1.004, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 4.568162574089755e-05, |
|
"loss": 0.9699, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 4.567739204064352e-05, |
|
"loss": 0.9819, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 4.567315834038951e-05, |
|
"loss": 0.9193, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 4.566892464013548e-05, |
|
"loss": 1.008, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 4.566469093988146e-05, |
|
"loss": 1.0062, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 4.5660457239627433e-05, |
|
"loss": 0.9635, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 4.565622353937342e-05, |
|
"loss": 0.956, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 4.565198983911939e-05, |
|
"loss": 0.9055, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 4.564775613886537e-05, |
|
"loss": 0.9496, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 4.5643522438611344e-05, |
|
"loss": 0.9319, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 4.563928873835733e-05, |
|
"loss": 0.874, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 4.5635055038103306e-05, |
|
"loss": 0.9066, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 4.5630821337849284e-05, |
|
"loss": 0.9554, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 4.562658763759526e-05, |
|
"loss": 0.9359, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 4.562235393734124e-05, |
|
"loss": 0.9179, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 4.5618120237087216e-05, |
|
"loss": 0.8902, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 4.5613886536833194e-05, |
|
"loss": 0.9614, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 4.560965283657917e-05, |
|
"loss": 0.8877, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 4.560541913632515e-05, |
|
"loss": 0.8941, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 4.5601185436071134e-05, |
|
"loss": 0.9478, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 4.5596951735817104e-05, |
|
"loss": 0.9337, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 4.559271803556309e-05, |
|
"loss": 0.9109, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 4.558848433530906e-05, |
|
"loss": 0.9358, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 4.558467400508044e-05, |
|
"loss": 0.9414, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 4.558044030482642e-05, |
|
"loss": 0.9808, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 4.55762066045724e-05, |
|
"loss": 0.9936, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 4.5571972904318375e-05, |
|
"loss": 0.9651, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 4.556773920406435e-05, |
|
"loss": 1.0022, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 4.556350550381033e-05, |
|
"loss": 0.9491, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 4.555927180355631e-05, |
|
"loss": 1.0564, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 4.5555038103302286e-05, |
|
"loss": 0.9787, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 4.555080440304826e-05, |
|
"loss": 0.9995, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 4.554657070279425e-05, |
|
"loss": 0.9776, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 4.554233700254022e-05, |
|
"loss": 0.8444, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 4.55381033022862e-05, |
|
"loss": 0.9355, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 4.553386960203218e-05, |
|
"loss": 0.9401, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 4.552963590177816e-05, |
|
"loss": 0.9758, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 4.5525402201524136e-05, |
|
"loss": 0.9573, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 4.552116850127011e-05, |
|
"loss": 0.9769, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 4.551693480101609e-05, |
|
"loss": 1.0239, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 4.551270110076207e-05, |
|
"loss": 0.959, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_cer": 0.1727233801571845, |
|
"eval_loss": 0.9587426781654358, |
|
"eval_mer": 0.48635424594976623, |
|
"eval_runtime": 23.7449, |
|
"eval_samples_per_second": 91.725, |
|
"eval_steps_per_second": 5.77, |
|
"step": 10629 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 4.5508467400508046e-05, |
|
"loss": 1.0417, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.5504233700254024e-05, |
|
"loss": 1.0691, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.55e-05, |
|
"loss": 1.0545, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 4.549576629974598e-05, |
|
"loss": 1.169, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 4.549153259949196e-05, |
|
"loss": 1.1724, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 4.5487298899237934e-05, |
|
"loss": 1.1514, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 4.548306519898391e-05, |
|
"loss": 1.2339, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 4.547883149872989e-05, |
|
"loss": 1.2019, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 4.5474597798475874e-05, |
|
"loss": 1.2867, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 4.5470364098221845e-05, |
|
"loss": 1.2397, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.546613039796783e-05, |
|
"loss": 1.2359, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.54618966977138e-05, |
|
"loss": 1.2958, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 4.5457662997459784e-05, |
|
"loss": 1.2138, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 4.5453429297205755e-05, |
|
"loss": 1.2546, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.544919559695174e-05, |
|
"loss": 1.2149, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.544496189669772e-05, |
|
"loss": 1.2046, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.5440728196443695e-05, |
|
"loss": 1.2594, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.543649449618967e-05, |
|
"loss": 1.2634, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.543226079593565e-05, |
|
"loss": 1.2014, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.542802709568163e-05, |
|
"loss": 1.1851, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.5423793395427605e-05, |
|
"loss": 1.1358, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.541955969517358e-05, |
|
"loss": 1.2123, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.541532599491956e-05, |
|
"loss": 1.1453, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 4.541109229466554e-05, |
|
"loss": 1.1274, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 4.5406858594411516e-05, |
|
"loss": 1.0398, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 4.54026248941575e-05, |
|
"loss": 1.0025, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 4.539839119390347e-05, |
|
"loss": 1.0871, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 4.5394157493649455e-05, |
|
"loss": 1.1312, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 4.5389923793395426e-05, |
|
"loss": 1.0552, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 4.538569009314141e-05, |
|
"loss": 1.0676, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 4.538145639288738e-05, |
|
"loss": 0.9928, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 4.5377222692633366e-05, |
|
"loss": 1.1595, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 4.537298899237934e-05, |
|
"loss": 1.187, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 4.536875529212532e-05, |
|
"loss": 1.1794, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 4.536452159187129e-05, |
|
"loss": 1.2095, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 4.5360287891617276e-05, |
|
"loss": 1.1446, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 4.5356054191363254e-05, |
|
"loss": 1.2508, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 4.535182049110923e-05, |
|
"loss": 1.2618, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 4.534758679085521e-05, |
|
"loss": 1.2381, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 4.534335309060119e-05, |
|
"loss": 1.2213, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 4.5339119390347164e-05, |
|
"loss": 1.0845, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 4.533488569009314e-05, |
|
"loss": 1.2409, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 4.5330651989839126e-05, |
|
"loss": 1.2389, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 4.53264182895851e-05, |
|
"loss": 1.2307, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 4.532218458933108e-05, |
|
"loss": 1.223, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 4.531795088907705e-05, |
|
"loss": 1.1984, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 4.531371718882304e-05, |
|
"loss": 1.2973, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 4.530948348856901e-05, |
|
"loss": 1.2701, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 4.530524978831499e-05, |
|
"loss": 1.2675, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 4.530101608806096e-05, |
|
"loss": 1.228, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 4.529678238780695e-05, |
|
"loss": 1.1648, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 4.529254868755292e-05, |
|
"loss": 1.2387, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 4.52883149872989e-05, |
|
"loss": 1.2762, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 4.528408128704488e-05, |
|
"loss": 1.2244, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 4.527984758679086e-05, |
|
"loss": 1.2044, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 4.5275613886536836e-05, |
|
"loss": 1.2404, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 4.527138018628281e-05, |
|
"loss": 1.3162, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 4.526714648602879e-05, |
|
"loss": 1.3218, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 4.526291278577477e-05, |
|
"loss": 1.3126, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 4.525867908552075e-05, |
|
"loss": 1.322, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 4.5254445385266724e-05, |
|
"loss": 1.2755, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 4.525021168501271e-05, |
|
"loss": 1.3486, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 4.524597798475868e-05, |
|
"loss": 1.3322, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 4.524174428450466e-05, |
|
"loss": 1.3325, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 4.5237510584250634e-05, |
|
"loss": 1.2634, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 4.523327688399662e-05, |
|
"loss": 1.2853, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 4.522904318374259e-05, |
|
"loss": 1.3415, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 4.5224809483488574e-05, |
|
"loss": 1.3399, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 4.5220575783234545e-05, |
|
"loss": 1.364, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 4.521634208298053e-05, |
|
"loss": 1.3059, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 4.5212108382726507e-05, |
|
"loss": 1.281, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 4.5207874682472484e-05, |
|
"loss": 1.3479, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 4.520364098221846e-05, |
|
"loss": 1.3463, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 4.519940728196444e-05, |
|
"loss": 1.3277, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 4.519517358171042e-05, |
|
"loss": 1.3, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 4.5190939881456395e-05, |
|
"loss": 1.2584, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 4.518670618120237e-05, |
|
"loss": 1.3805, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 4.518247248094835e-05, |
|
"loss": 1.3249, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 4.517823878069433e-05, |
|
"loss": 1.3617, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 4.5174005080440305e-05, |
|
"loss": 1.4079, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 4.516977138018629e-05, |
|
"loss": 1.2564, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 4.516553767993226e-05, |
|
"loss": 1.357, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 4.5161303979678245e-05, |
|
"loss": 1.3412, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 4.5157070279424216e-05, |
|
"loss": 1.339, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 4.51528365791702e-05, |
|
"loss": 1.355, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 4.514860287891617e-05, |
|
"loss": 1.3243, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 4.5144369178662155e-05, |
|
"loss": 1.3956, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 4.514013547840813e-05, |
|
"loss": 1.4206, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 4.513590177815411e-05, |
|
"loss": 1.3762, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 4.513166807790009e-05, |
|
"loss": 1.3001, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 4.5127434377646066e-05, |
|
"loss": 1.3047, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 4.512320067739204e-05, |
|
"loss": 1.44, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 4.511896697713802e-05, |
|
"loss": 1.4547, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 4.5114733276884e-05, |
|
"loss": 1.3994, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 4.5110499576629976e-05, |
|
"loss": 1.4244, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 4.510668924640136e-05, |
|
"loss": 1.2827, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 4.5102455546147336e-05, |
|
"loss": 1.5738, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 4.5098221845893314e-05, |
|
"loss": 1.5026, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 4.509398814563929e-05, |
|
"loss": 1.5072, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 4.508975444538527e-05, |
|
"loss": 1.5236, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 4.508552074513125e-05, |
|
"loss": 1.4025, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 4.5081287044877224e-05, |
|
"loss": 1.4741, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 4.50770533446232e-05, |
|
"loss": 1.4503, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 4.507281964436918e-05, |
|
"loss": 1.4235, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 4.506858594411516e-05, |
|
"loss": 1.397, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 4.5064352243861135e-05, |
|
"loss": 1.3583, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 4.506011854360712e-05, |
|
"loss": 1.4387, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.505588484335309e-05, |
|
"loss": 1.4338, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 4.5051651143099075e-05, |
|
"loss": 1.4273, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 4.5047417442845045e-05, |
|
"loss": 1.4278, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 4.504318374259103e-05, |
|
"loss": 1.4686, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 4.5038950042337e-05, |
|
"loss": 1.4621, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 4.5034716342082985e-05, |
|
"loss": 1.5398, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 4.5030482641828956e-05, |
|
"loss": 1.4768, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 4.502624894157494e-05, |
|
"loss": 1.4836, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 4.5022438611346316e-05, |
|
"loss": 1.479, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 4.50182049110923e-05, |
|
"loss": 1.5087, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 4.501397121083827e-05, |
|
"loss": 1.4942, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.5009737510584256e-05, |
|
"loss": 1.5324, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_cer": 0.24601168524427133, |
|
"eval_loss": 1.2874042987823486, |
|
"eval_mer": 0.66684788517995, |
|
"eval_runtime": 28.2038, |
|
"eval_samples_per_second": 77.224, |
|
"eval_steps_per_second": 4.857, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 4.5005503810330227e-05, |
|
"loss": 1.5366, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 4.500127011007621e-05, |
|
"loss": 1.5396, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 4.499703640982219e-05, |
|
"loss": 1.541, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 4.4992802709568166e-05, |
|
"loss": 1.5522, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 4.4988569009314144e-05, |
|
"loss": 1.7348, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 4.498433530906012e-05, |
|
"loss": 1.5764, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 4.49801016088061e-05, |
|
"loss": 1.5444, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 4.4975867908552077e-05, |
|
"loss": 1.6274, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 4.4971634208298054e-05, |
|
"loss": 1.7053, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 4.496740050804403e-05, |
|
"loss": 1.7295, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 4.496316680779001e-05, |
|
"loss": 1.6501, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 4.495893310753599e-05, |
|
"loss": 1.7354, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 4.4954699407281965e-05, |
|
"loss": 1.761, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 4.495046570702794e-05, |
|
"loss": 1.9456, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 4.494623200677393e-05, |
|
"loss": 2.3037, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 4.49419983065199e-05, |
|
"loss": 1.8925, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 4.493776460626588e-05, |
|
"loss": 1.9024, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 4.493353090601185e-05, |
|
"loss": 1.8458, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 4.492929720575784e-05, |
|
"loss": 1.8047, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 4.4925063505503815e-05, |
|
"loss": 2.0453, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 4.492082980524979e-05, |
|
"loss": 1.7405, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 4.491659610499577e-05, |
|
"loss": 1.8958, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 4.491236240474175e-05, |
|
"loss": 2.0114, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 4.4908128704487725e-05, |
|
"loss": 1.9778, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 4.49038950042337e-05, |
|
"loss": 2.3987, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 4.489966130397968e-05, |
|
"loss": 1.948, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 4.489542760372566e-05, |
|
"loss": 1.8516, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 4.4891193903471636e-05, |
|
"loss": 2.0011, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 4.488696020321761e-05, |
|
"loss": 2.0376, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 4.4883149872988996e-05, |
|
"loss": 2.3684, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 4.4878916172734974e-05, |
|
"loss": 1.8877, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 4.487468247248095e-05, |
|
"loss": 2.0353, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 4.487044877222693e-05, |
|
"loss": 2.001, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 4.4866215071972906e-05, |
|
"loss": 2.2172, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 4.4861981371718884e-05, |
|
"loss": 2.6229, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 4.485774767146486e-05, |
|
"loss": 1.9102, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 4.485351397121084e-05, |
|
"loss": 1.9907, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 4.484928027095682e-05, |
|
"loss": 2.0939, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 4.4845046570702794e-05, |
|
"loss": 2.1526, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 4.484081287044877e-05, |
|
"loss": 2.4638, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 4.4836579170194757e-05, |
|
"loss": 2.013, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 4.483234546994073e-05, |
|
"loss": 1.9353, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 4.482811176968671e-05, |
|
"loss": 2.0008, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 4.482387806943268e-05, |
|
"loss": 2.1421, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 4.481964436917867e-05, |
|
"loss": 2.3906, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 4.481541066892464e-05, |
|
"loss": 1.9363, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 4.481117696867062e-05, |
|
"loss": 1.9357, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 4.480694326841659e-05, |
|
"loss": 2.0629, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 4.480270956816258e-05, |
|
"loss": 2.108, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 4.4798475867908555e-05, |
|
"loss": 2.5918, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 4.479424216765453e-05, |
|
"loss": 1.9791, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 4.479000846740051e-05, |
|
"loss": 1.9266, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 4.478577476714649e-05, |
|
"loss": 2.0217, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 4.4781541066892466e-05, |
|
"loss": 2.2233, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 4.477730736663844e-05, |
|
"loss": 2.4653, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 4.477307366638442e-05, |
|
"loss": 1.985, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 4.47688399661304e-05, |
|
"loss": 1.9862, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 4.476460626587638e-05, |
|
"loss": 2.0553, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.4760372565622354e-05, |
|
"loss": 2.1504, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 4.475613886536834e-05, |
|
"loss": 2.4646, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 4.475190516511431e-05, |
|
"loss": 1.9892, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 4.474767146486029e-05, |
|
"loss": 1.9598, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 4.4743437764606264e-05, |
|
"loss": 1.9647, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 4.473920406435225e-05, |
|
"loss": 2.1751, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 4.473497036409822e-05, |
|
"loss": 2.6336, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 4.4730736663844204e-05, |
|
"loss": 1.9717, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 4.4726502963590175e-05, |
|
"loss": 1.9569, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 4.472226926333616e-05, |
|
"loss": 2.0809, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 4.4718035563082137e-05, |
|
"loss": 2.2418, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 4.4713801862828114e-05, |
|
"loss": 2.5448, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 4.470956816257409e-05, |
|
"loss": 1.9617, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 4.470533446232007e-05, |
|
"loss": 1.9676, |
|
"step": 12530 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 4.4701100762066054e-05, |
|
"loss": 2.0919, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 4.4696867061812025e-05, |
|
"loss": 2.2593, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 4.469263336155801e-05, |
|
"loss": 2.4808, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 4.468839966130398e-05, |
|
"loss": 2.0293, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 4.4684165961049964e-05, |
|
"loss": 1.9555, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 4.4679932260795935e-05, |
|
"loss": 2.0828, |
|
"step": 12590 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 4.467569856054192e-05, |
|
"loss": 2.2231, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 4.467146486028789e-05, |
|
"loss": 2.6046, |
|
"step": 12610 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 4.4667231160033875e-05, |
|
"loss": 1.9986, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 4.4662997459779846e-05, |
|
"loss": 1.9526, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 4.465876375952583e-05, |
|
"loss": 2.1019, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 4.46545300592718e-05, |
|
"loss": 2.1972, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 4.4650296359017785e-05, |
|
"loss": 2.6334, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 4.464606265876376e-05, |
|
"loss": 1.9876, |
|
"step": 12670 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 4.464182895850974e-05, |
|
"loss": 1.9677, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 4.463759525825572e-05, |
|
"loss": 2.0477, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 4.4633361558001696e-05, |
|
"loss": 2.188, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 4.462955122777308e-05, |
|
"loss": 2.6296, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 4.4625317527519056e-05, |
|
"loss": 1.9847, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 4.4621083827265034e-05, |
|
"loss": 1.97, |
|
"step": 12730 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 4.461685012701101e-05, |
|
"loss": 1.9851, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 4.461261642675699e-05, |
|
"loss": 2.2719, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 4.4608382726502966e-05, |
|
"loss": 2.4926, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 4.4604149026248944e-05, |
|
"loss": 1.9853, |
|
"step": 12770 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 4.459991532599492e-05, |
|
"loss": 2.0494, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 4.45956816257409e-05, |
|
"loss": 2.0386, |
|
"step": 12790 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 4.459144792548688e-05, |
|
"loss": 2.2637, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 4.4587214225232854e-05, |
|
"loss": 2.7218, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 4.458298052497883e-05, |
|
"loss": 1.9578, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 4.457874682472481e-05, |
|
"loss": 1.9637, |
|
"step": 12830 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 4.457451312447079e-05, |
|
"loss": 2.055, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.4570279424216765e-05, |
|
"loss": 2.2645, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 4.456604572396275e-05, |
|
"loss": 2.7069, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 4.456181202370872e-05, |
|
"loss": 1.9068, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 4.4557578323454705e-05, |
|
"loss": 1.9396, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 4.4553344623200675e-05, |
|
"loss": 2.1007, |
|
"step": 12890 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 4.454911092294666e-05, |
|
"loss": 2.2855, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 4.454487722269264e-05, |
|
"loss": 2.5381, |
|
"step": 12910 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 4.4540643522438615e-05, |
|
"loss": 2.0182, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 4.453640982218459e-05, |
|
"loss": 2.0102, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 4.453217612193057e-05, |
|
"loss": 1.9872, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 4.452794242167655e-05, |
|
"loss": 2.0364, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 4.4523708721422526e-05, |
|
"loss": 2.5555, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 4.45194750211685e-05, |
|
"loss": 2.0951, |
|
"step": 12970 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 4.451524132091448e-05, |
|
"loss": 2.1552, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.451100762066046e-05, |
|
"loss": 2.4549, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_cer": 0.3198386718048592, |
|
"eval_loss": 1.721533179283142, |
|
"eval_mer": 0.6885397412199631, |
|
"eval_runtime": 27.2327, |
|
"eval_samples_per_second": 79.977, |
|
"eval_steps_per_second": 5.031, |
|
"step": 12991 |
|
} |
|
], |
|
"max_steps": 118100, |
|
"num_train_epochs": 100, |
|
"total_flos": 1.4197154668742943e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|