|
{ |
|
"best_metric": 0.9985528219971056, |
|
"best_model_checkpoint": "videomae-base-finetuned-isl-numbers_aug/checkpoint-4180", |
|
"epoch": 4.2, |
|
"eval_steps": 500, |
|
"global_step": 4180, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0023923444976076554, |
|
"grad_norm": 14.015558242797852, |
|
"learning_rate": 1.1961722488038277e-06, |
|
"loss": 1.36, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004784688995215311, |
|
"grad_norm": 13.480476379394531, |
|
"learning_rate": 2.3923444976076554e-06, |
|
"loss": 1.4805, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.007177033492822967, |
|
"grad_norm": 24.253854751586914, |
|
"learning_rate": 3.5885167464114835e-06, |
|
"loss": 1.1355, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.009569377990430622, |
|
"grad_norm": 24.031822204589844, |
|
"learning_rate": 4.784688995215311e-06, |
|
"loss": 1.2175, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.011961722488038277, |
|
"grad_norm": 33.942161560058594, |
|
"learning_rate": 5.980861244019139e-06, |
|
"loss": 1.5014, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.014354066985645933, |
|
"grad_norm": 25.417530059814453, |
|
"learning_rate": 7.177033492822967e-06, |
|
"loss": 1.0382, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01674641148325359, |
|
"grad_norm": 33.37525939941406, |
|
"learning_rate": 8.373205741626795e-06, |
|
"loss": 0.818, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.019138755980861243, |
|
"grad_norm": 11.674866676330566, |
|
"learning_rate": 9.569377990430622e-06, |
|
"loss": 1.0039, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0215311004784689, |
|
"grad_norm": 29.53816795349121, |
|
"learning_rate": 1.0765550239234451e-05, |
|
"loss": 1.0634, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.023923444976076555, |
|
"grad_norm": 32.427513122558594, |
|
"learning_rate": 1.1961722488038278e-05, |
|
"loss": 1.1812, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02631578947368421, |
|
"grad_norm": 47.19343566894531, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 0.9474, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.028708133971291867, |
|
"grad_norm": 21.350784301757812, |
|
"learning_rate": 1.4354066985645934e-05, |
|
"loss": 0.7303, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03110047846889952, |
|
"grad_norm": 27.72856903076172, |
|
"learning_rate": 1.555023923444976e-05, |
|
"loss": 0.7567, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03349282296650718, |
|
"grad_norm": 24.274192810058594, |
|
"learning_rate": 1.674641148325359e-05, |
|
"loss": 0.5606, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03588516746411483, |
|
"grad_norm": 52.520484924316406, |
|
"learning_rate": 1.7942583732057417e-05, |
|
"loss": 0.718, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03827751196172249, |
|
"grad_norm": 37.529579162597656, |
|
"learning_rate": 1.9138755980861243e-05, |
|
"loss": 0.8521, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04066985645933014, |
|
"grad_norm": 4.263017177581787, |
|
"learning_rate": 2.0334928229665073e-05, |
|
"loss": 0.6431, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0430622009569378, |
|
"grad_norm": 15.213534355163574, |
|
"learning_rate": 2.1531100478468903e-05, |
|
"loss": 0.7901, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 31.36748504638672, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.6664, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04784688995215311, |
|
"grad_norm": 16.556751251220703, |
|
"learning_rate": 2.3923444976076556e-05, |
|
"loss": 0.5235, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.050239234449760764, |
|
"grad_norm": 46.87775421142578, |
|
"learning_rate": 2.5119617224880382e-05, |
|
"loss": 0.8653, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 26.508085250854492, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.8489, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05502392344497608, |
|
"grad_norm": 25.13356590270996, |
|
"learning_rate": 2.751196172248804e-05, |
|
"loss": 0.6788, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05741626794258373, |
|
"grad_norm": 31.49532699584961, |
|
"learning_rate": 2.8708133971291868e-05, |
|
"loss": 0.9036, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05980861244019139, |
|
"grad_norm": 42.16746520996094, |
|
"learning_rate": 2.9904306220095695e-05, |
|
"loss": 0.9568, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.06220095693779904, |
|
"grad_norm": 22.292442321777344, |
|
"learning_rate": 3.110047846889952e-05, |
|
"loss": 0.3915, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.0645933014354067, |
|
"grad_norm": 31.647417068481445, |
|
"learning_rate": 3.229665071770335e-05, |
|
"loss": 0.8292, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06698564593301436, |
|
"grad_norm": 31.727983474731445, |
|
"learning_rate": 3.349282296650718e-05, |
|
"loss": 0.8209, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06937799043062201, |
|
"grad_norm": 24.073183059692383, |
|
"learning_rate": 3.4688995215311004e-05, |
|
"loss": 0.4971, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.07177033492822966, |
|
"grad_norm": 19.445539474487305, |
|
"learning_rate": 3.5885167464114834e-05, |
|
"loss": 0.7874, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07416267942583732, |
|
"grad_norm": 19.681711196899414, |
|
"learning_rate": 3.7081339712918663e-05, |
|
"loss": 0.6186, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07655502392344497, |
|
"grad_norm": 19.926891326904297, |
|
"learning_rate": 3.8277511961722486e-05, |
|
"loss": 0.5922, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07894736842105263, |
|
"grad_norm": 26.6019229888916, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 0.4758, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.08133971291866028, |
|
"grad_norm": 29.542890548706055, |
|
"learning_rate": 4.0669856459330146e-05, |
|
"loss": 0.6328, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.08373205741626795, |
|
"grad_norm": 27.611955642700195, |
|
"learning_rate": 4.1866028708133976e-05, |
|
"loss": 0.8429, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0861244019138756, |
|
"grad_norm": 26.79853057861328, |
|
"learning_rate": 4.3062200956937806e-05, |
|
"loss": 0.7488, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08851674641148326, |
|
"grad_norm": 20.399744033813477, |
|
"learning_rate": 4.425837320574163e-05, |
|
"loss": 0.9143, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 31.50543785095215, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.9285, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.09330143540669857, |
|
"grad_norm": 12.433365821838379, |
|
"learning_rate": 4.665071770334928e-05, |
|
"loss": 0.729, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.09569377990430622, |
|
"grad_norm": 5.882745742797852, |
|
"learning_rate": 4.784688995215311e-05, |
|
"loss": 0.6221, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09808612440191387, |
|
"grad_norm": 47.791988372802734, |
|
"learning_rate": 4.904306220095694e-05, |
|
"loss": 0.697, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.10047846889952153, |
|
"grad_norm": 32.664573669433594, |
|
"learning_rate": 4.997341839447103e-05, |
|
"loss": 0.6012, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.10287081339712918, |
|
"grad_norm": 5.583775997161865, |
|
"learning_rate": 4.984051036682616e-05, |
|
"loss": 0.4423, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 19.180892944335938, |
|
"learning_rate": 4.970760233918128e-05, |
|
"loss": 0.7457, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.1076555023923445, |
|
"grad_norm": 16.862192153930664, |
|
"learning_rate": 4.9574694311536416e-05, |
|
"loss": 0.8563, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.11004784688995216, |
|
"grad_norm": 36.542415618896484, |
|
"learning_rate": 4.944178628389155e-05, |
|
"loss": 0.6835, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.11244019138755981, |
|
"grad_norm": 26.89089584350586, |
|
"learning_rate": 4.930887825624668e-05, |
|
"loss": 0.5772, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.11483253588516747, |
|
"grad_norm": 64.69689178466797, |
|
"learning_rate": 4.917597022860181e-05, |
|
"loss": 0.6574, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.11722488038277512, |
|
"grad_norm": 32.71666717529297, |
|
"learning_rate": 4.904306220095694e-05, |
|
"loss": 0.7411, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.11961722488038277, |
|
"grad_norm": 28.70256805419922, |
|
"learning_rate": 4.8910154173312074e-05, |
|
"loss": 0.7735, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.12200956937799043, |
|
"grad_norm": 47.490455627441406, |
|
"learning_rate": 4.87772461456672e-05, |
|
"loss": 0.9173, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.12440191387559808, |
|
"grad_norm": 0.40462446212768555, |
|
"learning_rate": 4.8644338118022334e-05, |
|
"loss": 0.3499, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.12679425837320574, |
|
"grad_norm": 13.079553604125977, |
|
"learning_rate": 4.8511430090377467e-05, |
|
"loss": 0.6362, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.1291866028708134, |
|
"grad_norm": 16.84446907043457, |
|
"learning_rate": 4.837852206273259e-05, |
|
"loss": 0.6439, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.13157894736842105, |
|
"grad_norm": 1.5127249956130981, |
|
"learning_rate": 4.824561403508772e-05, |
|
"loss": 0.688, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.1339712918660287, |
|
"grad_norm": 5.6877923011779785, |
|
"learning_rate": 4.811270600744285e-05, |
|
"loss": 0.4104, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 12.39535140991211, |
|
"learning_rate": 4.797979797979798e-05, |
|
"loss": 0.5339, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.13875598086124402, |
|
"grad_norm": 55.42013168334961, |
|
"learning_rate": 4.784688995215311e-05, |
|
"loss": 0.4111, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.14114832535885166, |
|
"grad_norm": 26.363779067993164, |
|
"learning_rate": 4.7713981924508244e-05, |
|
"loss": 0.8168, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.14354066985645933, |
|
"grad_norm": 22.93970489501953, |
|
"learning_rate": 4.758107389686337e-05, |
|
"loss": 0.6611, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.145933014354067, |
|
"grad_norm": 17.63263511657715, |
|
"learning_rate": 4.7448165869218504e-05, |
|
"loss": 0.6952, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.14832535885167464, |
|
"grad_norm": 42.79911804199219, |
|
"learning_rate": 4.731525784157364e-05, |
|
"loss": 0.6323, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.1507177033492823, |
|
"grad_norm": 52.291927337646484, |
|
"learning_rate": 4.718234981392876e-05, |
|
"loss": 0.5826, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.15311004784688995, |
|
"grad_norm": 5.49782133102417, |
|
"learning_rate": 4.7049441786283896e-05, |
|
"loss": 0.6368, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.15550239234449761, |
|
"grad_norm": 24.484716415405273, |
|
"learning_rate": 4.691653375863902e-05, |
|
"loss": 0.767, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 43.46366882324219, |
|
"learning_rate": 4.678362573099415e-05, |
|
"loss": 0.6198, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.16028708133971292, |
|
"grad_norm": 0.9531241059303284, |
|
"learning_rate": 4.665071770334928e-05, |
|
"loss": 0.3393, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.16267942583732056, |
|
"grad_norm": 23.464101791381836, |
|
"learning_rate": 4.6517809675704415e-05, |
|
"loss": 0.3039, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.16507177033492823, |
|
"grad_norm": 61.715240478515625, |
|
"learning_rate": 4.638490164805955e-05, |
|
"loss": 0.635, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.1674641148325359, |
|
"grad_norm": 4.636231422424316, |
|
"learning_rate": 4.6251993620414674e-05, |
|
"loss": 0.4028, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.16985645933014354, |
|
"grad_norm": 19.922252655029297, |
|
"learning_rate": 4.611908559276981e-05, |
|
"loss": 0.4871, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.1722488038277512, |
|
"grad_norm": 25.78838348388672, |
|
"learning_rate": 4.598617756512494e-05, |
|
"loss": 0.7949, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.17464114832535885, |
|
"grad_norm": 8.514424324035645, |
|
"learning_rate": 4.5853269537480066e-05, |
|
"loss": 0.6953, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.17703349282296652, |
|
"grad_norm": 44.999107360839844, |
|
"learning_rate": 4.57203615098352e-05, |
|
"loss": 0.7568, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.17942583732057416, |
|
"grad_norm": 33.99626541137695, |
|
"learning_rate": 4.5587453482190326e-05, |
|
"loss": 0.3274, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 1.6210479736328125, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.2306, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.18421052631578946, |
|
"grad_norm": 6.679223537445068, |
|
"learning_rate": 4.5321637426900585e-05, |
|
"loss": 0.6284, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.18660287081339713, |
|
"grad_norm": 43.11864471435547, |
|
"learning_rate": 4.518872939925572e-05, |
|
"loss": 0.9184, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.18899521531100477, |
|
"grad_norm": 39.860008239746094, |
|
"learning_rate": 4.5055821371610844e-05, |
|
"loss": 0.3346, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.19138755980861244, |
|
"grad_norm": 58.848724365234375, |
|
"learning_rate": 4.492291334396598e-05, |
|
"loss": 0.7516, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.1937799043062201, |
|
"grad_norm": 1.0923501253128052, |
|
"learning_rate": 4.479000531632111e-05, |
|
"loss": 0.5148, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.19617224880382775, |
|
"grad_norm": 24.75638198852539, |
|
"learning_rate": 4.4657097288676236e-05, |
|
"loss": 0.5507, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.19856459330143542, |
|
"grad_norm": 0.345415860414505, |
|
"learning_rate": 4.452418926103137e-05, |
|
"loss": 0.1649, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_accuracy": 0.8552821997105644, |
|
"eval_loss": 0.4412595331668854, |
|
"eval_runtime": 761.629, |
|
"eval_samples_per_second": 0.907, |
|
"eval_steps_per_second": 0.227, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.000956937799043, |
|
"grad_norm": 55.6067008972168, |
|
"learning_rate": 4.43912812333865e-05, |
|
"loss": 0.3236, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.0033492822966508, |
|
"grad_norm": 0.7264971733093262, |
|
"learning_rate": 4.425837320574163e-05, |
|
"loss": 0.3854, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.0057416267942583, |
|
"grad_norm": 0.12801001965999603, |
|
"learning_rate": 4.412546517809676e-05, |
|
"loss": 0.2788, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.008133971291866, |
|
"grad_norm": 29.689430236816406, |
|
"learning_rate": 4.399255715045189e-05, |
|
"loss": 0.5362, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.0105263157894737, |
|
"grad_norm": 3.776660442352295, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.2869, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.0129186602870814, |
|
"grad_norm": 0.310320645570755, |
|
"learning_rate": 4.372674109516215e-05, |
|
"loss": 0.3163, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.015311004784689, |
|
"grad_norm": 25.65765953063965, |
|
"learning_rate": 4.359383306751728e-05, |
|
"loss": 0.1512, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.0177033492822967, |
|
"grad_norm": 68.732666015625, |
|
"learning_rate": 4.346092503987241e-05, |
|
"loss": 0.7086, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.0200956937799044, |
|
"grad_norm": 18.452295303344727, |
|
"learning_rate": 4.332801701222754e-05, |
|
"loss": 0.3176, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.022488038277512, |
|
"grad_norm": 1.5882939100265503, |
|
"learning_rate": 4.319510898458267e-05, |
|
"loss": 0.3374, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.0248803827751196, |
|
"grad_norm": 22.042537689208984, |
|
"learning_rate": 4.3062200956937806e-05, |
|
"loss": 0.1728, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.0272727272727273, |
|
"grad_norm": 1.0320727825164795, |
|
"learning_rate": 4.292929292929293e-05, |
|
"loss": 0.2954, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.0296650717703348, |
|
"grad_norm": 27.908615112304688, |
|
"learning_rate": 4.2796384901648065e-05, |
|
"loss": 0.1296, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.0320574162679426, |
|
"grad_norm": 39.791996002197266, |
|
"learning_rate": 4.266347687400319e-05, |
|
"loss": 0.0841, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.0344497607655503, |
|
"grad_norm": 1.0737611055374146, |
|
"learning_rate": 4.253056884635832e-05, |
|
"loss": 0.4375, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.0368421052631578, |
|
"grad_norm": 40.09695053100586, |
|
"learning_rate": 4.239766081871345e-05, |
|
"loss": 0.2464, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.0392344497607655, |
|
"grad_norm": 5.695016384124756, |
|
"learning_rate": 4.2264752791068584e-05, |
|
"loss": 0.3341, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.0416267942583732, |
|
"grad_norm": 12.370247840881348, |
|
"learning_rate": 4.213184476342371e-05, |
|
"loss": 0.3143, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.044019138755981, |
|
"grad_norm": 0.9093998670578003, |
|
"learning_rate": 4.199893673577884e-05, |
|
"loss": 0.2729, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.0464114832535885, |
|
"grad_norm": 7.610515117645264, |
|
"learning_rate": 4.1866028708133976e-05, |
|
"loss": 0.1896, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.0488038277511962, |
|
"grad_norm": 7.180859088897705, |
|
"learning_rate": 4.17331206804891e-05, |
|
"loss": 0.4265, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.051196172248804, |
|
"grad_norm": 0.1555383950471878, |
|
"learning_rate": 4.1600212652844235e-05, |
|
"loss": 0.0456, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.0535885167464114, |
|
"grad_norm": 0.6277172565460205, |
|
"learning_rate": 4.146730462519937e-05, |
|
"loss": 0.3013, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.0559808612440191, |
|
"grad_norm": 26.010774612426758, |
|
"learning_rate": 4.1334396597554494e-05, |
|
"loss": 0.1184, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0583732057416269, |
|
"grad_norm": 20.830724716186523, |
|
"learning_rate": 4.120148856990963e-05, |
|
"loss": 0.2435, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.0607655502392344, |
|
"grad_norm": 43.08734893798828, |
|
"learning_rate": 4.1068580542264754e-05, |
|
"loss": 0.1626, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.063157894736842, |
|
"grad_norm": 1.0129386186599731, |
|
"learning_rate": 4.093567251461988e-05, |
|
"loss": 0.1131, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0655502392344498, |
|
"grad_norm": 74.8167495727539, |
|
"learning_rate": 4.080276448697501e-05, |
|
"loss": 0.3853, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0679425837320573, |
|
"grad_norm": 0.1025879979133606, |
|
"learning_rate": 4.0669856459330146e-05, |
|
"loss": 0.1161, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.070334928229665, |
|
"grad_norm": 37.73963165283203, |
|
"learning_rate": 4.053694843168527e-05, |
|
"loss": 0.4214, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.0727272727272728, |
|
"grad_norm": 0.028422629460692406, |
|
"learning_rate": 4.0404040404040405e-05, |
|
"loss": 0.2753, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.0751196172248805, |
|
"grad_norm": 71.2274398803711, |
|
"learning_rate": 4.027113237639554e-05, |
|
"loss": 0.2009, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.077511961722488, |
|
"grad_norm": 39.377342224121094, |
|
"learning_rate": 4.0138224348750665e-05, |
|
"loss": 0.3117, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0799043062200957, |
|
"grad_norm": 0.112571582198143, |
|
"learning_rate": 4.00053163211058e-05, |
|
"loss": 0.1175, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0822966507177034, |
|
"grad_norm": 25.220449447631836, |
|
"learning_rate": 3.987240829346093e-05, |
|
"loss": 0.3254, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.084688995215311, |
|
"grad_norm": 50.95032501220703, |
|
"learning_rate": 3.973950026581606e-05, |
|
"loss": 0.2824, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.0870813397129186, |
|
"grad_norm": 24.743955612182617, |
|
"learning_rate": 3.960659223817118e-05, |
|
"loss": 0.2875, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0894736842105264, |
|
"grad_norm": 57.72587966918945, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 0.2018, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.0918660287081339, |
|
"grad_norm": 0.6570279598236084, |
|
"learning_rate": 3.934077618288145e-05, |
|
"loss": 0.4795, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.0942583732057416, |
|
"grad_norm": 2.577521324157715, |
|
"learning_rate": 3.9207868155236576e-05, |
|
"loss": 0.117, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.0966507177033493, |
|
"grad_norm": 0.06854643672704697, |
|
"learning_rate": 3.907496012759171e-05, |
|
"loss": 0.209, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.0990430622009568, |
|
"grad_norm": 0.17914560437202454, |
|
"learning_rate": 3.894205209994684e-05, |
|
"loss": 0.5758, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.1014354066985645, |
|
"grad_norm": 6.280195236206055, |
|
"learning_rate": 3.880914407230197e-05, |
|
"loss": 0.1779, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.1038277511961723, |
|
"grad_norm": 1.3312019109725952, |
|
"learning_rate": 3.86762360446571e-05, |
|
"loss": 0.1311, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.10622009569378, |
|
"grad_norm": 9.676103591918945, |
|
"learning_rate": 3.8543328017012234e-05, |
|
"loss": 0.1207, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.1086124401913875, |
|
"grad_norm": 104.29061889648438, |
|
"learning_rate": 3.841041998936736e-05, |
|
"loss": 0.3504, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.1110047846889952, |
|
"grad_norm": 0.15878012776374817, |
|
"learning_rate": 3.8277511961722486e-05, |
|
"loss": 0.1365, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.113397129186603, |
|
"grad_norm": 0.07822709530591965, |
|
"learning_rate": 3.814460393407762e-05, |
|
"loss": 0.4698, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.1157894736842104, |
|
"grad_norm": 0.5035278797149658, |
|
"learning_rate": 3.8011695906432746e-05, |
|
"loss": 0.2533, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.1181818181818182, |
|
"grad_norm": 35.283233642578125, |
|
"learning_rate": 3.787878787878788e-05, |
|
"loss": 0.3578, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.120574162679426, |
|
"grad_norm": 55.772911071777344, |
|
"learning_rate": 3.774587985114301e-05, |
|
"loss": 0.3235, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.1229665071770334, |
|
"grad_norm": 0.025184158235788345, |
|
"learning_rate": 3.761297182349814e-05, |
|
"loss": 0.0543, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1253588516746411, |
|
"grad_norm": 0.04470227286219597, |
|
"learning_rate": 3.748006379585327e-05, |
|
"loss": 0.0378, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.1277511961722488, |
|
"grad_norm": 0.10474701225757599, |
|
"learning_rate": 3.7347155768208404e-05, |
|
"loss": 0.2598, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.1301435406698563, |
|
"grad_norm": 0.1666097193956375, |
|
"learning_rate": 3.721424774056353e-05, |
|
"loss": 0.3193, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.132535885167464, |
|
"grad_norm": 0.2347351312637329, |
|
"learning_rate": 3.7081339712918663e-05, |
|
"loss": 0.1546, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.1349282296650718, |
|
"grad_norm": 2.922492027282715, |
|
"learning_rate": 3.6948431685273796e-05, |
|
"loss": 0.0459, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.1373205741626795, |
|
"grad_norm": 90.11852264404297, |
|
"learning_rate": 3.681552365762892e-05, |
|
"loss": 0.1136, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.139712918660287, |
|
"grad_norm": 0.0664602667093277, |
|
"learning_rate": 3.668261562998405e-05, |
|
"loss": 0.1917, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.1421052631578947, |
|
"grad_norm": 28.271459579467773, |
|
"learning_rate": 3.654970760233918e-05, |
|
"loss": 0.0633, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.1444976076555025, |
|
"grad_norm": 48.338539123535156, |
|
"learning_rate": 3.6416799574694315e-05, |
|
"loss": 0.1383, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.14688995215311, |
|
"grad_norm": 1.0220551490783691, |
|
"learning_rate": 3.628389154704944e-05, |
|
"loss": 0.2675, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.1492822966507177, |
|
"grad_norm": 0.07435596734285355, |
|
"learning_rate": 3.6150983519404574e-05, |
|
"loss": 0.0058, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.1516746411483254, |
|
"grad_norm": 0.025288909673690796, |
|
"learning_rate": 3.601807549175971e-05, |
|
"loss": 0.3105, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.1540669856459331, |
|
"grad_norm": 3.0553793907165527, |
|
"learning_rate": 3.5885167464114834e-05, |
|
"loss": 0.1359, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.1564593301435406, |
|
"grad_norm": 12.173036575317383, |
|
"learning_rate": 3.5752259436469967e-05, |
|
"loss": 0.1923, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.1588516746411484, |
|
"grad_norm": 0.060364119708538055, |
|
"learning_rate": 3.56193514088251e-05, |
|
"loss": 0.0117, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.1612440191387559, |
|
"grad_norm": 0.03194255381822586, |
|
"learning_rate": 3.5486443381180226e-05, |
|
"loss": 0.004, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.1636363636363636, |
|
"grad_norm": 81.4512710571289, |
|
"learning_rate": 3.535353535353535e-05, |
|
"loss": 0.0776, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.1660287081339713, |
|
"grad_norm": 0.016529429703950882, |
|
"learning_rate": 3.5220627325890485e-05, |
|
"loss": 0.3877, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.168421052631579, |
|
"grad_norm": 0.6734187602996826, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 0.1606, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.1708133971291865, |
|
"grad_norm": 0.019191304221749306, |
|
"learning_rate": 3.4954811270600744e-05, |
|
"loss": 0.1287, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.1732057416267943, |
|
"grad_norm": 53.84104919433594, |
|
"learning_rate": 3.482190324295588e-05, |
|
"loss": 0.1832, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.175598086124402, |
|
"grad_norm": 30.083900451660156, |
|
"learning_rate": 3.4688995215311004e-05, |
|
"loss": 0.3451, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.1779904306220095, |
|
"grad_norm": 9.459929466247559, |
|
"learning_rate": 3.455608718766614e-05, |
|
"loss": 0.5572, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.1803827751196172, |
|
"grad_norm": 0.8345646262168884, |
|
"learning_rate": 3.442317916002127e-05, |
|
"loss": 0.0222, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.182775119617225, |
|
"grad_norm": 29.698089599609375, |
|
"learning_rate": 3.4290271132376396e-05, |
|
"loss": 0.1476, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.1851674641148326, |
|
"grad_norm": 40.125308990478516, |
|
"learning_rate": 3.415736310473153e-05, |
|
"loss": 0.1169, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.1875598086124401, |
|
"grad_norm": 0.015308327041566372, |
|
"learning_rate": 3.402445507708666e-05, |
|
"loss": 0.1322, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.1899521531100479, |
|
"grad_norm": 57.31826400756836, |
|
"learning_rate": 3.389154704944179e-05, |
|
"loss": 0.2188, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.1923444976076556, |
|
"grad_norm": 2.116442918777466, |
|
"learning_rate": 3.3758639021796915e-05, |
|
"loss": 0.0865, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.194736842105263, |
|
"grad_norm": 2.048318862915039, |
|
"learning_rate": 3.362573099415205e-05, |
|
"loss": 0.0313, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.1971291866028708, |
|
"grad_norm": 1.6264055967330933, |
|
"learning_rate": 3.349282296650718e-05, |
|
"loss": 0.0748, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.1995215311004785, |
|
"grad_norm": 0.020111849531531334, |
|
"learning_rate": 3.335991493886231e-05, |
|
"loss": 0.1242, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"eval_accuracy": 0.9681620839363242, |
|
"eval_loss": 0.11400265246629715, |
|
"eval_runtime": 833.2657, |
|
"eval_samples_per_second": 0.829, |
|
"eval_steps_per_second": 0.208, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 2.001913875598086, |
|
"grad_norm": 0.04623245820403099, |
|
"learning_rate": 3.322700691121744e-05, |
|
"loss": 0.2269, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.004306220095694, |
|
"grad_norm": 0.04105829820036888, |
|
"learning_rate": 3.309409888357257e-05, |
|
"loss": 0.0314, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.0066985645933015, |
|
"grad_norm": 0.012316164560616016, |
|
"learning_rate": 3.29611908559277e-05, |
|
"loss": 0.0812, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.0090909090909093, |
|
"grad_norm": 0.014452405273914337, |
|
"learning_rate": 3.282828282828283e-05, |
|
"loss": 0.095, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.0114832535885165, |
|
"grad_norm": 0.02369452267885208, |
|
"learning_rate": 3.2695374800637965e-05, |
|
"loss": 0.0393, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.0138755980861243, |
|
"grad_norm": 0.05732015147805214, |
|
"learning_rate": 3.256246677299309e-05, |
|
"loss": 0.0019, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.016267942583732, |
|
"grad_norm": 0.23258601129055023, |
|
"learning_rate": 3.242955874534822e-05, |
|
"loss": 0.1842, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.0186602870813397, |
|
"grad_norm": 0.09864845871925354, |
|
"learning_rate": 3.229665071770335e-05, |
|
"loss": 0.0921, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.0210526315789474, |
|
"grad_norm": 472.58856201171875, |
|
"learning_rate": 3.216374269005848e-05, |
|
"loss": 0.1973, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.023444976076555, |
|
"grad_norm": 0.010467225685715675, |
|
"learning_rate": 3.203083466241361e-05, |
|
"loss": 0.0142, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.025837320574163, |
|
"grad_norm": 34.93497848510742, |
|
"learning_rate": 3.189792663476874e-05, |
|
"loss": 0.1638, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.02822966507177, |
|
"grad_norm": 108.92484283447266, |
|
"learning_rate": 3.176501860712387e-05, |
|
"loss": 0.2049, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.030622009569378, |
|
"grad_norm": 0.12743550539016724, |
|
"learning_rate": 3.1632110579479e-05, |
|
"loss": 0.0541, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.0330143540669856, |
|
"grad_norm": 0.01830146089196205, |
|
"learning_rate": 3.1499202551834136e-05, |
|
"loss": 0.0266, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.0354066985645933, |
|
"grad_norm": 0.07744389772415161, |
|
"learning_rate": 3.136629452418926e-05, |
|
"loss": 0.2081, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.037799043062201, |
|
"grad_norm": 0.09580251574516296, |
|
"learning_rate": 3.1233386496544395e-05, |
|
"loss": 0.1809, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.0401913875598088, |
|
"grad_norm": 1.139581561088562, |
|
"learning_rate": 3.110047846889952e-05, |
|
"loss": 0.0047, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.042583732057416, |
|
"grad_norm": 109.73816680908203, |
|
"learning_rate": 3.0967570441254654e-05, |
|
"loss": 0.1755, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.044976076555024, |
|
"grad_norm": 0.021538248285651207, |
|
"learning_rate": 3.083466241360978e-05, |
|
"loss": 0.146, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.0473684210526315, |
|
"grad_norm": 0.06147049739956856, |
|
"learning_rate": 3.0701754385964913e-05, |
|
"loss": 0.1163, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.0497607655502392, |
|
"grad_norm": 0.05214022099971771, |
|
"learning_rate": 3.056884635832004e-05, |
|
"loss": 0.0021, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.052153110047847, |
|
"grad_norm": 0.006269617471843958, |
|
"learning_rate": 3.0435938330675173e-05, |
|
"loss": 0.0012, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.0545454545454547, |
|
"grad_norm": 3.528353691101074, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 0.0097, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.0569377990430624, |
|
"grad_norm": 0.43716534972190857, |
|
"learning_rate": 3.017012227538544e-05, |
|
"loss": 0.0138, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.0593301435406697, |
|
"grad_norm": 0.008898725733160973, |
|
"learning_rate": 3.0037214247740565e-05, |
|
"loss": 0.1143, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.0617224880382774, |
|
"grad_norm": 18.777921676635742, |
|
"learning_rate": 2.9904306220095695e-05, |
|
"loss": 0.0059, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.064114832535885, |
|
"grad_norm": 0.010017622262239456, |
|
"learning_rate": 2.9771398192450828e-05, |
|
"loss": 0.0021, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.066507177033493, |
|
"grad_norm": 0.028644366189837456, |
|
"learning_rate": 2.9638490164805954e-05, |
|
"loss": 0.0024, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.0688995215311006, |
|
"grad_norm": 0.018241288140416145, |
|
"learning_rate": 2.9505582137161087e-05, |
|
"loss": 0.2653, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.0712918660287083, |
|
"grad_norm": 0.027866262942552567, |
|
"learning_rate": 2.937267410951622e-05, |
|
"loss": 0.0659, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.0736842105263156, |
|
"grad_norm": 0.0228541512042284, |
|
"learning_rate": 2.9239766081871346e-05, |
|
"loss": 0.0024, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.0760765550239233, |
|
"grad_norm": 1.270308494567871, |
|
"learning_rate": 2.9106858054226476e-05, |
|
"loss": 0.1811, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.078468899521531, |
|
"grad_norm": 0.03892623633146286, |
|
"learning_rate": 2.897395002658161e-05, |
|
"loss": 0.2084, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.0808612440191387, |
|
"grad_norm": 145.8848876953125, |
|
"learning_rate": 2.8841041998936735e-05, |
|
"loss": 0.064, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.0832535885167465, |
|
"grad_norm": 0.01369253359735012, |
|
"learning_rate": 2.8708133971291868e-05, |
|
"loss": 0.0026, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.085645933014354, |
|
"grad_norm": 0.06184624508023262, |
|
"learning_rate": 2.8575225943646998e-05, |
|
"loss": 0.0449, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.088038277511962, |
|
"grad_norm": 0.018334228545427322, |
|
"learning_rate": 2.8442317916002124e-05, |
|
"loss": 0.039, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.090430622009569, |
|
"grad_norm": 0.01396853942424059, |
|
"learning_rate": 2.8309409888357257e-05, |
|
"loss": 0.0009, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.092822966507177, |
|
"grad_norm": 9.39311408996582, |
|
"learning_rate": 2.817650186071239e-05, |
|
"loss": 0.0502, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.0952153110047846, |
|
"grad_norm": 0.01102653332054615, |
|
"learning_rate": 2.8043593833067516e-05, |
|
"loss": 0.0142, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.0976076555023924, |
|
"grad_norm": 0.030104391276836395, |
|
"learning_rate": 2.791068580542265e-05, |
|
"loss": 0.0014, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 2.8127565383911133, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.0858, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.102392344497608, |
|
"grad_norm": 0.022678395733237267, |
|
"learning_rate": 2.7644869750132905e-05, |
|
"loss": 0.0855, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.104784688995215, |
|
"grad_norm": 0.011784915812313557, |
|
"learning_rate": 2.751196172248804e-05, |
|
"loss": 0.2841, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.107177033492823, |
|
"grad_norm": 0.01566784456372261, |
|
"learning_rate": 2.737905369484317e-05, |
|
"loss": 0.4591, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.1095693779904305, |
|
"grad_norm": 0.22584687173366547, |
|
"learning_rate": 2.7246145667198298e-05, |
|
"loss": 0.1164, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.1119617224880383, |
|
"grad_norm": 0.293266624212265, |
|
"learning_rate": 2.711323763955343e-05, |
|
"loss": 0.0014, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.114354066985646, |
|
"grad_norm": 0.04173046350479126, |
|
"learning_rate": 2.698032961190856e-05, |
|
"loss": 0.0024, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.1167464114832537, |
|
"grad_norm": 0.014709273353219032, |
|
"learning_rate": 2.6847421584263693e-05, |
|
"loss": 0.0234, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.1191387559808614, |
|
"grad_norm": 0.060059864073991776, |
|
"learning_rate": 2.671451355661882e-05, |
|
"loss": 0.0406, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.1215311004784687, |
|
"grad_norm": 0.025863196700811386, |
|
"learning_rate": 2.6581605528973953e-05, |
|
"loss": 0.1082, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.1239234449760764, |
|
"grad_norm": 0.011012042872607708, |
|
"learning_rate": 2.6448697501329082e-05, |
|
"loss": 0.1153, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.126315789473684, |
|
"grad_norm": 0.009336967021226883, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.1899, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.128708133971292, |
|
"grad_norm": 0.022180695086717606, |
|
"learning_rate": 2.618288144603934e-05, |
|
"loss": 0.0947, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.1311004784688996, |
|
"grad_norm": 0.06437038630247116, |
|
"learning_rate": 2.6049973418394475e-05, |
|
"loss": 0.046, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.1334928229665073, |
|
"grad_norm": 0.023490458726882935, |
|
"learning_rate": 2.59170653907496e-05, |
|
"loss": 0.0028, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.1358851674641146, |
|
"grad_norm": 0.010341249406337738, |
|
"learning_rate": 2.5784157363104734e-05, |
|
"loss": 0.0011, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.1382775119617223, |
|
"grad_norm": 0.019244810566306114, |
|
"learning_rate": 2.5651249335459864e-05, |
|
"loss": 0.001, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.14066985645933, |
|
"grad_norm": 2.1345505714416504, |
|
"learning_rate": 2.551834130781499e-05, |
|
"loss": 0.1981, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.143062200956938, |
|
"grad_norm": 0.06913293898105621, |
|
"learning_rate": 2.5385433280170123e-05, |
|
"loss": 0.0117, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.1454545454545455, |
|
"grad_norm": 17.53659439086914, |
|
"learning_rate": 2.5252525252525256e-05, |
|
"loss": 0.0048, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.1478468899521532, |
|
"grad_norm": 1.950246810913086, |
|
"learning_rate": 2.5119617224880382e-05, |
|
"loss": 0.0045, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.150239234449761, |
|
"grad_norm": 0.15338365733623505, |
|
"learning_rate": 2.4986709197235515e-05, |
|
"loss": 0.0287, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.1526315789473682, |
|
"grad_norm": 0.08566514402627945, |
|
"learning_rate": 2.485380116959064e-05, |
|
"loss": 0.0009, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.155023923444976, |
|
"grad_norm": 0.005700881592929363, |
|
"learning_rate": 2.4720893141945774e-05, |
|
"loss": 0.0013, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.1574162679425837, |
|
"grad_norm": 0.010471828281879425, |
|
"learning_rate": 2.4587985114300904e-05, |
|
"loss": 0.04, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.1598086124401914, |
|
"grad_norm": 0.15442727506160736, |
|
"learning_rate": 2.4455077086656037e-05, |
|
"loss": 0.0032, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.162200956937799, |
|
"grad_norm": 0.013687992468476295, |
|
"learning_rate": 2.4322169059011167e-05, |
|
"loss": 0.0006, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.164593301435407, |
|
"grad_norm": 15.276226043701172, |
|
"learning_rate": 2.4189261031366296e-05, |
|
"loss": 0.1538, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.166985645933014, |
|
"grad_norm": 0.011922297067940235, |
|
"learning_rate": 2.4056353003721426e-05, |
|
"loss": 0.004, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.169377990430622, |
|
"grad_norm": 0.012630579993128777, |
|
"learning_rate": 2.3923444976076556e-05, |
|
"loss": 0.0029, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.1717703349282296, |
|
"grad_norm": 2.478867530822754, |
|
"learning_rate": 2.3790536948431685e-05, |
|
"loss": 0.0054, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.1741626794258373, |
|
"grad_norm": 0.032478444278240204, |
|
"learning_rate": 2.365762892078682e-05, |
|
"loss": 0.0009, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.176555023923445, |
|
"grad_norm": 1.2129992246627808, |
|
"learning_rate": 2.3524720893141948e-05, |
|
"loss": 0.0015, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.1789473684210527, |
|
"grad_norm": 68.73790740966797, |
|
"learning_rate": 2.3391812865497074e-05, |
|
"loss": 0.0066, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.1813397129186605, |
|
"grad_norm": 0.03823688626289368, |
|
"learning_rate": 2.3258904837852207e-05, |
|
"loss": 0.0034, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.1837320574162677, |
|
"grad_norm": 0.010348823852837086, |
|
"learning_rate": 2.3125996810207337e-05, |
|
"loss": 0.0034, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.1861244019138755, |
|
"grad_norm": 0.03327915072441101, |
|
"learning_rate": 2.299308878256247e-05, |
|
"loss": 0.0044, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.188516746411483, |
|
"grad_norm": 0.0061185345984995365, |
|
"learning_rate": 2.28601807549176e-05, |
|
"loss": 0.001, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.190909090909091, |
|
"grad_norm": 0.019545989111065865, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.0545, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.1933014354066986, |
|
"grad_norm": 0.027101656422019005, |
|
"learning_rate": 2.259436469962786e-05, |
|
"loss": 0.0021, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.1956937799043064, |
|
"grad_norm": 99.50630950927734, |
|
"learning_rate": 2.246145667198299e-05, |
|
"loss": 0.0218, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.1980861244019136, |
|
"grad_norm": 0.009054621681571007, |
|
"learning_rate": 2.2328548644338118e-05, |
|
"loss": 0.0008, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"eval_accuracy": 0.9797395079594791, |
|
"eval_loss": 0.09082657098770142, |
|
"eval_runtime": 794.713, |
|
"eval_samples_per_second": 0.869, |
|
"eval_steps_per_second": 0.218, |
|
"step": 2508 |
|
}, |
|
{ |
|
"epoch": 3.0004784688995216, |
|
"grad_norm": 0.006668519228696823, |
|
"learning_rate": 2.219564061669325e-05, |
|
"loss": 0.2009, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.0028708133971294, |
|
"grad_norm": 38.766822814941406, |
|
"learning_rate": 2.206273258904838e-05, |
|
"loss": 0.2188, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.0052631578947366, |
|
"grad_norm": 0.01620459370315075, |
|
"learning_rate": 2.1929824561403507e-05, |
|
"loss": 0.0007, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.0076555023923444, |
|
"grad_norm": 0.010571425780653954, |
|
"learning_rate": 2.179691653375864e-05, |
|
"loss": 0.0011, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.010047846889952, |
|
"grad_norm": 0.007847270928323269, |
|
"learning_rate": 2.166400850611377e-05, |
|
"loss": 0.0019, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.01244019138756, |
|
"grad_norm": 2.9406144618988037, |
|
"learning_rate": 2.1531100478468903e-05, |
|
"loss": 0.008, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.0148325358851675, |
|
"grad_norm": 0.16233409941196442, |
|
"learning_rate": 2.1398192450824032e-05, |
|
"loss": 0.001, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.0172248803827753, |
|
"grad_norm": 0.034926705062389374, |
|
"learning_rate": 2.126528442317916e-05, |
|
"loss": 0.0007, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.019617224880383, |
|
"grad_norm": 0.007242594845592976, |
|
"learning_rate": 2.1132376395534292e-05, |
|
"loss": 0.2104, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.0220095693779903, |
|
"grad_norm": 0.007837713696062565, |
|
"learning_rate": 2.099946836788942e-05, |
|
"loss": 0.2077, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.024401913875598, |
|
"grad_norm": 0.006022488698363304, |
|
"learning_rate": 2.086656034024455e-05, |
|
"loss": 0.0011, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.0267942583732057, |
|
"grad_norm": 0.08888078480958939, |
|
"learning_rate": 2.0733652312599684e-05, |
|
"loss": 0.0017, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.0291866028708134, |
|
"grad_norm": 0.05022843927145004, |
|
"learning_rate": 2.0600744284954814e-05, |
|
"loss": 0.1537, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.031578947368421, |
|
"grad_norm": 0.020734012126922607, |
|
"learning_rate": 2.046783625730994e-05, |
|
"loss": 0.0015, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.033971291866029, |
|
"grad_norm": 97.93255615234375, |
|
"learning_rate": 2.0334928229665073e-05, |
|
"loss": 0.0167, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.036363636363636, |
|
"grad_norm": 0.018566004931926727, |
|
"learning_rate": 2.0202020202020203e-05, |
|
"loss": 0.0012, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 3.038755980861244, |
|
"grad_norm": 0.005064102355390787, |
|
"learning_rate": 2.0069112174375332e-05, |
|
"loss": 0.0005, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 3.0411483253588516, |
|
"grad_norm": 0.019417481496930122, |
|
"learning_rate": 1.9936204146730465e-05, |
|
"loss": 0.0049, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 3.0435406698564593, |
|
"grad_norm": 0.007396561559289694, |
|
"learning_rate": 1.980329611908559e-05, |
|
"loss": 0.0025, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 3.045933014354067, |
|
"grad_norm": 61.74516677856445, |
|
"learning_rate": 1.9670388091440725e-05, |
|
"loss": 0.0991, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 3.0483253588516748, |
|
"grad_norm": 0.021895471960306168, |
|
"learning_rate": 1.9537480063795854e-05, |
|
"loss": 0.0007, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 3.0507177033492825, |
|
"grad_norm": 0.011149121448397636, |
|
"learning_rate": 1.9404572036150984e-05, |
|
"loss": 0.0006, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 3.0531100478468898, |
|
"grad_norm": 0.004441165365278721, |
|
"learning_rate": 1.9271664008506117e-05, |
|
"loss": 0.1103, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 3.0555023923444975, |
|
"grad_norm": 0.01931576617062092, |
|
"learning_rate": 1.9138755980861243e-05, |
|
"loss": 0.0029, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 3.057894736842105, |
|
"grad_norm": 0.005823149345815182, |
|
"learning_rate": 1.9005847953216373e-05, |
|
"loss": 0.1682, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.060287081339713, |
|
"grad_norm": 0.00678264070302248, |
|
"learning_rate": 1.8872939925571506e-05, |
|
"loss": 0.1651, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 3.0626794258373207, |
|
"grad_norm": 0.010020887479186058, |
|
"learning_rate": 1.8740031897926636e-05, |
|
"loss": 0.0011, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 3.0650717703349284, |
|
"grad_norm": 0.005996669642627239, |
|
"learning_rate": 1.8607123870281765e-05, |
|
"loss": 0.0018, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 3.0674641148325357, |
|
"grad_norm": 0.09342297166585922, |
|
"learning_rate": 1.8474215842636898e-05, |
|
"loss": 0.0008, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 3.0698564593301434, |
|
"grad_norm": 0.00897641945630312, |
|
"learning_rate": 1.8341307814992024e-05, |
|
"loss": 0.0108, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.072248803827751, |
|
"grad_norm": 0.1280699372291565, |
|
"learning_rate": 1.8208399787347157e-05, |
|
"loss": 0.0066, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 3.074641148325359, |
|
"grad_norm": 0.0044610267505049706, |
|
"learning_rate": 1.8075491759702287e-05, |
|
"loss": 0.0006, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 3.0770334928229666, |
|
"grad_norm": 0.006019377615302801, |
|
"learning_rate": 1.7942583732057417e-05, |
|
"loss": 0.0005, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 3.0794258373205743, |
|
"grad_norm": 0.0070389206521213055, |
|
"learning_rate": 1.780967570441255e-05, |
|
"loss": 0.006, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 3.081818181818182, |
|
"grad_norm": 0.010742398910224438, |
|
"learning_rate": 1.7676767676767676e-05, |
|
"loss": 0.0012, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.0842105263157893, |
|
"grad_norm": 0.019535677507519722, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 0.0005, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 3.086602870813397, |
|
"grad_norm": 0.004338996019214392, |
|
"learning_rate": 1.741095162147794e-05, |
|
"loss": 0.0006, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 3.0889952153110047, |
|
"grad_norm": 0.004541581030935049, |
|
"learning_rate": 1.727804359383307e-05, |
|
"loss": 0.0873, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 3.0913875598086125, |
|
"grad_norm": 0.006278311833739281, |
|
"learning_rate": 1.7145135566188198e-05, |
|
"loss": 0.0012, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 3.09377990430622, |
|
"grad_norm": 1.023903250694275, |
|
"learning_rate": 1.701222753854333e-05, |
|
"loss": 0.0007, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.096172248803828, |
|
"grad_norm": 0.005157362204045057, |
|
"learning_rate": 1.6879319510898457e-05, |
|
"loss": 0.0004, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 3.098564593301435, |
|
"grad_norm": 0.006036865524947643, |
|
"learning_rate": 1.674641148325359e-05, |
|
"loss": 0.0041, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 3.100956937799043, |
|
"grad_norm": 0.7861006259918213, |
|
"learning_rate": 1.661350345560872e-05, |
|
"loss": 0.003, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 3.1033492822966506, |
|
"grad_norm": 0.4006275534629822, |
|
"learning_rate": 1.648059542796385e-05, |
|
"loss": 0.0253, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 3.1057416267942584, |
|
"grad_norm": 0.13688111305236816, |
|
"learning_rate": 1.6347687400318983e-05, |
|
"loss": 0.0006, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.108133971291866, |
|
"grad_norm": 0.007709797937422991, |
|
"learning_rate": 1.621477937267411e-05, |
|
"loss": 0.0006, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 3.110526315789474, |
|
"grad_norm": 0.004617704544216394, |
|
"learning_rate": 1.608187134502924e-05, |
|
"loss": 0.0008, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 3.1129186602870815, |
|
"grad_norm": 0.011701893992722034, |
|
"learning_rate": 1.594896331738437e-05, |
|
"loss": 0.0582, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 3.115311004784689, |
|
"grad_norm": 0.20932549238204956, |
|
"learning_rate": 1.58160552897395e-05, |
|
"loss": 0.0012, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 3.1177033492822965, |
|
"grad_norm": 0.008711748756468296, |
|
"learning_rate": 1.568314726209463e-05, |
|
"loss": 0.0938, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.1200956937799043, |
|
"grad_norm": 0.004468224477022886, |
|
"learning_rate": 1.555023923444976e-05, |
|
"loss": 0.0006, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 3.122488038277512, |
|
"grad_norm": 0.061067596077919006, |
|
"learning_rate": 1.541733120680489e-05, |
|
"loss": 0.0007, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 3.1248803827751197, |
|
"grad_norm": 0.025924433022737503, |
|
"learning_rate": 1.528442317916002e-05, |
|
"loss": 0.0147, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 3.1272727272727274, |
|
"grad_norm": 0.33229324221611023, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.0006, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 3.1296650717703347, |
|
"grad_norm": 0.019822843372821808, |
|
"learning_rate": 1.5018607123870283e-05, |
|
"loss": 0.0017, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.1320574162679424, |
|
"grad_norm": 0.009158506989479065, |
|
"learning_rate": 1.4885699096225414e-05, |
|
"loss": 0.0179, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 3.13444976076555, |
|
"grad_norm": 104.4133071899414, |
|
"learning_rate": 1.4752791068580543e-05, |
|
"loss": 0.1185, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 3.136842105263158, |
|
"grad_norm": 0.013615977019071579, |
|
"learning_rate": 1.4619883040935673e-05, |
|
"loss": 0.0012, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 3.1392344497607656, |
|
"grad_norm": 0.007452609483152628, |
|
"learning_rate": 1.4486975013290804e-05, |
|
"loss": 0.0006, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 3.1416267942583733, |
|
"grad_norm": 0.017694944515824318, |
|
"learning_rate": 1.4354066985645934e-05, |
|
"loss": 0.0889, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.144019138755981, |
|
"grad_norm": 0.0034980145283043385, |
|
"learning_rate": 1.4221158958001062e-05, |
|
"loss": 0.0137, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 3.1464114832535883, |
|
"grad_norm": 0.006997366435825825, |
|
"learning_rate": 1.4088250930356195e-05, |
|
"loss": 0.1826, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 3.148803827751196, |
|
"grad_norm": 0.20292145013809204, |
|
"learning_rate": 1.3955342902711325e-05, |
|
"loss": 0.0007, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 3.1511961722488038, |
|
"grad_norm": 0.009792889468371868, |
|
"learning_rate": 1.3822434875066453e-05, |
|
"loss": 0.0338, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 3.1535885167464115, |
|
"grad_norm": 0.006899695377796888, |
|
"learning_rate": 1.3689526847421586e-05, |
|
"loss": 0.0006, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.155980861244019, |
|
"grad_norm": 0.004146776627749205, |
|
"learning_rate": 1.3556618819776715e-05, |
|
"loss": 0.0019, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 3.158373205741627, |
|
"grad_norm": 0.003076452063396573, |
|
"learning_rate": 1.3423710792131847e-05, |
|
"loss": 0.0004, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 3.1607655502392342, |
|
"grad_norm": 0.022380296140909195, |
|
"learning_rate": 1.3290802764486976e-05, |
|
"loss": 0.0011, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 3.163157894736842, |
|
"grad_norm": 0.005187951028347015, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 0.0005, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 3.1655502392344497, |
|
"grad_norm": 0.004998629912734032, |
|
"learning_rate": 1.3024986709197237e-05, |
|
"loss": 0.0006, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 3.1679425837320574, |
|
"grad_norm": 0.016898494213819504, |
|
"learning_rate": 1.2892078681552367e-05, |
|
"loss": 0.0007, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 3.170334928229665, |
|
"grad_norm": 0.43881699442863464, |
|
"learning_rate": 1.2759170653907495e-05, |
|
"loss": 0.0041, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 3.172727272727273, |
|
"grad_norm": 0.10537323355674744, |
|
"learning_rate": 1.2626262626262628e-05, |
|
"loss": 0.0009, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 3.1751196172248806, |
|
"grad_norm": 0.00541999377310276, |
|
"learning_rate": 1.2493354598617758e-05, |
|
"loss": 0.0004, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 3.177511961722488, |
|
"grad_norm": 0.0035200186539441347, |
|
"learning_rate": 1.2360446570972887e-05, |
|
"loss": 0.0007, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 3.1799043062200956, |
|
"grad_norm": 0.003413601079955697, |
|
"learning_rate": 1.2227538543328019e-05, |
|
"loss": 0.0004, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 3.1822966507177033, |
|
"grad_norm": 0.003805672749876976, |
|
"learning_rate": 1.2094630515683148e-05, |
|
"loss": 0.0004, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 3.184688995215311, |
|
"grad_norm": 0.008552059531211853, |
|
"learning_rate": 1.1961722488038278e-05, |
|
"loss": 0.0048, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 3.1870813397129187, |
|
"grad_norm": 0.003063785843551159, |
|
"learning_rate": 1.182881446039341e-05, |
|
"loss": 0.0003, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 3.1894736842105265, |
|
"grad_norm": 0.41841933131217957, |
|
"learning_rate": 1.1695906432748537e-05, |
|
"loss": 0.0005, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 3.1918660287081337, |
|
"grad_norm": 0.003419842105358839, |
|
"learning_rate": 1.1562998405103668e-05, |
|
"loss": 0.0048, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 3.1942583732057415, |
|
"grad_norm": 0.007587193511426449, |
|
"learning_rate": 1.14300903774588e-05, |
|
"loss": 0.0004, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 3.196650717703349, |
|
"grad_norm": 0.014273949898779392, |
|
"learning_rate": 1.129718234981393e-05, |
|
"loss": 0.0004, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 3.199043062200957, |
|
"grad_norm": 0.003971030004322529, |
|
"learning_rate": 1.1164274322169059e-05, |
|
"loss": 0.0004, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"eval_accuracy": 0.9956584659913169, |
|
"eval_loss": 0.011571008712053299, |
|
"eval_runtime": 802.1767, |
|
"eval_samples_per_second": 0.861, |
|
"eval_steps_per_second": 0.216, |
|
"step": 3344 |
|
}, |
|
{ |
|
"epoch": 4.0014354066985645, |
|
"grad_norm": 0.0065171197056770325, |
|
"learning_rate": 1.103136629452419e-05, |
|
"loss": 0.0004, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 4.003827751196172, |
|
"grad_norm": 0.005604379344731569, |
|
"learning_rate": 1.089845826687932e-05, |
|
"loss": 0.0013, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 4.00622009569378, |
|
"grad_norm": 0.009186330251395702, |
|
"learning_rate": 1.0765550239234451e-05, |
|
"loss": 0.0004, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 4.008612440191388, |
|
"grad_norm": 0.004106089938431978, |
|
"learning_rate": 1.063264221158958e-05, |
|
"loss": 0.0013, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 4.011004784688995, |
|
"grad_norm": 0.01519398856908083, |
|
"learning_rate": 1.049973418394471e-05, |
|
"loss": 0.0004, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 4.013397129186603, |
|
"grad_norm": 0.025932341814041138, |
|
"learning_rate": 1.0366826156299842e-05, |
|
"loss": 0.0005, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 4.015789473684211, |
|
"grad_norm": 0.003742667380720377, |
|
"learning_rate": 1.023391812865497e-05, |
|
"loss": 0.0015, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 4.0181818181818185, |
|
"grad_norm": 0.011091819033026695, |
|
"learning_rate": 1.0101010101010101e-05, |
|
"loss": 0.0005, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 4.020574162679426, |
|
"grad_norm": 0.14429621398448944, |
|
"learning_rate": 9.968102073365233e-06, |
|
"loss": 0.0008, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 4.022966507177033, |
|
"grad_norm": 0.006444338243454695, |
|
"learning_rate": 9.835194045720362e-06, |
|
"loss": 0.0004, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 4.025358851674641, |
|
"grad_norm": 0.003463227301836014, |
|
"learning_rate": 9.702286018075492e-06, |
|
"loss": 0.0008, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 4.0277511961722485, |
|
"grad_norm": 0.003970850259065628, |
|
"learning_rate": 9.569377990430622e-06, |
|
"loss": 0.0003, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 4.030143540669856, |
|
"grad_norm": 0.004473558627068996, |
|
"learning_rate": 9.436469962785753e-06, |
|
"loss": 0.0325, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 4.032535885167464, |
|
"grad_norm": 0.0038171063642948866, |
|
"learning_rate": 9.303561935140883e-06, |
|
"loss": 0.0004, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 4.034928229665072, |
|
"grad_norm": 0.0045017702504992485, |
|
"learning_rate": 9.170653907496012e-06, |
|
"loss": 0.0003, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 4.037320574162679, |
|
"grad_norm": 0.0030059998389333487, |
|
"learning_rate": 9.037745879851144e-06, |
|
"loss": 0.0004, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 4.039712918660287, |
|
"grad_norm": 0.03430689498782158, |
|
"learning_rate": 8.904837852206275e-06, |
|
"loss": 0.0005, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 4.042105263157895, |
|
"grad_norm": 0.0038995530921965837, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 0.0004, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 4.044497607655503, |
|
"grad_norm": 0.005899826996028423, |
|
"learning_rate": 8.639021796916534e-06, |
|
"loss": 0.0004, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 4.04688995215311, |
|
"grad_norm": 0.01342977024614811, |
|
"learning_rate": 8.506113769271666e-06, |
|
"loss": 0.0003, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 4.049282296650718, |
|
"grad_norm": 0.0030721963848918676, |
|
"learning_rate": 8.373205741626795e-06, |
|
"loss": 0.0005, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 4.051674641148326, |
|
"grad_norm": 0.017302511259913445, |
|
"learning_rate": 8.240297713981925e-06, |
|
"loss": 0.0005, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 4.054066985645933, |
|
"grad_norm": 0.018140502274036407, |
|
"learning_rate": 8.107389686337054e-06, |
|
"loss": 0.0003, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 4.05645933014354, |
|
"grad_norm": 0.0029472303576767445, |
|
"learning_rate": 7.974481658692186e-06, |
|
"loss": 0.0003, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 4.058851674641148, |
|
"grad_norm": 0.005908080376684666, |
|
"learning_rate": 7.841573631047315e-06, |
|
"loss": 0.0003, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 4.061244019138756, |
|
"grad_norm": 0.004262011032551527, |
|
"learning_rate": 7.708665603402445e-06, |
|
"loss": 0.0005, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 4.0636363636363635, |
|
"grad_norm": 0.004099706653505564, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.0003, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 4.066028708133971, |
|
"grad_norm": 0.00331715471111238, |
|
"learning_rate": 7.442849548112707e-06, |
|
"loss": 0.0003, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 4.068421052631579, |
|
"grad_norm": 0.0059309545904397964, |
|
"learning_rate": 7.3099415204678366e-06, |
|
"loss": 0.0003, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 4.070813397129187, |
|
"grad_norm": 0.004388094414025545, |
|
"learning_rate": 7.177033492822967e-06, |
|
"loss": 0.0015, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 4.073205741626794, |
|
"grad_norm": 0.029676349833607674, |
|
"learning_rate": 7.0441254651780975e-06, |
|
"loss": 0.0004, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 4.075598086124402, |
|
"grad_norm": 0.003281336510553956, |
|
"learning_rate": 6.911217437533226e-06, |
|
"loss": 0.0003, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 4.07799043062201, |
|
"grad_norm": 0.02144763059914112, |
|
"learning_rate": 6.778309409888358e-06, |
|
"loss": 0.001, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 4.0803827751196176, |
|
"grad_norm": 0.0047360495664179325, |
|
"learning_rate": 6.645401382243488e-06, |
|
"loss": 0.0004, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 4.082775119617225, |
|
"grad_norm": 0.004007202573120594, |
|
"learning_rate": 6.512493354598619e-06, |
|
"loss": 0.0003, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 4.085167464114832, |
|
"grad_norm": 0.01407600287348032, |
|
"learning_rate": 6.3795853269537475e-06, |
|
"loss": 0.083, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.08755980861244, |
|
"grad_norm": 0.004998557735234499, |
|
"learning_rate": 6.246677299308879e-06, |
|
"loss": 0.0004, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 4.089952153110048, |
|
"grad_norm": 0.003679374000057578, |
|
"learning_rate": 6.113769271664009e-06, |
|
"loss": 0.0003, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 4.092344497607655, |
|
"grad_norm": 0.004649085458368063, |
|
"learning_rate": 5.980861244019139e-06, |
|
"loss": 0.0003, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 4.094736842105263, |
|
"grad_norm": 0.009950079955160618, |
|
"learning_rate": 5.8479532163742686e-06, |
|
"loss": 0.0003, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 4.097129186602871, |
|
"grad_norm": 0.002897342899814248, |
|
"learning_rate": 5.7150451887294e-06, |
|
"loss": 0.0004, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.0995215311004785, |
|
"grad_norm": 0.002834454644471407, |
|
"learning_rate": 5.5821371610845296e-06, |
|
"loss": 0.0004, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 4.101913875598086, |
|
"grad_norm": 0.0034763123840093613, |
|
"learning_rate": 5.44922913343966e-06, |
|
"loss": 0.0003, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 4.104306220095694, |
|
"grad_norm": 0.01105444598942995, |
|
"learning_rate": 5.31632110579479e-06, |
|
"loss": 0.0004, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 4.106698564593302, |
|
"grad_norm": 0.0030907553154975176, |
|
"learning_rate": 5.183413078149921e-06, |
|
"loss": 0.0019, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 4.109090909090909, |
|
"grad_norm": 0.00505461310967803, |
|
"learning_rate": 5.050505050505051e-06, |
|
"loss": 0.0004, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.111483253588517, |
|
"grad_norm": 0.0030872768256813288, |
|
"learning_rate": 4.917597022860181e-06, |
|
"loss": 0.0003, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 4.113875598086125, |
|
"grad_norm": 0.0036562816239893436, |
|
"learning_rate": 4.784688995215311e-06, |
|
"loss": 0.0004, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 4.116267942583732, |
|
"grad_norm": 0.00387106416746974, |
|
"learning_rate": 4.651780967570441e-06, |
|
"loss": 0.0004, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 4.118660287081339, |
|
"grad_norm": 0.00350612192414701, |
|
"learning_rate": 4.518872939925572e-06, |
|
"loss": 0.0004, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 4.121052631578947, |
|
"grad_norm": 0.015147331170737743, |
|
"learning_rate": 4.3859649122807014e-06, |
|
"loss": 0.0003, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.123444976076555, |
|
"grad_norm": 0.003980391658842564, |
|
"learning_rate": 4.253056884635833e-06, |
|
"loss": 0.0003, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 4.1258373205741625, |
|
"grad_norm": 0.00292576034553349, |
|
"learning_rate": 4.120148856990962e-06, |
|
"loss": 0.0003, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 4.12822966507177, |
|
"grad_norm": 0.003524401690810919, |
|
"learning_rate": 3.987240829346093e-06, |
|
"loss": 0.0003, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 4.130622009569378, |
|
"grad_norm": 0.003340751165524125, |
|
"learning_rate": 3.8543328017012225e-06, |
|
"loss": 0.0003, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 4.133014354066986, |
|
"grad_norm": 0.00597594678401947, |
|
"learning_rate": 3.7214247740563535e-06, |
|
"loss": 0.0003, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.135406698564593, |
|
"grad_norm": 0.40342918038368225, |
|
"learning_rate": 3.5885167464114835e-06, |
|
"loss": 0.0004, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 4.137799043062201, |
|
"grad_norm": 0.0027185771614313126, |
|
"learning_rate": 3.455608718766613e-06, |
|
"loss": 0.0003, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 4.140191387559809, |
|
"grad_norm": 0.003001193981617689, |
|
"learning_rate": 3.322700691121744e-06, |
|
"loss": 0.0003, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 4.142583732057417, |
|
"grad_norm": 0.011745316907763481, |
|
"learning_rate": 3.1897926634768737e-06, |
|
"loss": 0.0004, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 4.144976076555024, |
|
"grad_norm": 0.004574726335704327, |
|
"learning_rate": 3.0568846358320046e-06, |
|
"loss": 0.0003, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 4.147368421052631, |
|
"grad_norm": 0.00624841870740056, |
|
"learning_rate": 2.9239766081871343e-06, |
|
"loss": 0.0003, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 4.149760765550239, |
|
"grad_norm": 0.005558318924158812, |
|
"learning_rate": 2.7910685805422648e-06, |
|
"loss": 0.0003, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 4.152153110047847, |
|
"grad_norm": 0.0035703121684491634, |
|
"learning_rate": 2.658160552897395e-06, |
|
"loss": 0.0003, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 4.154545454545454, |
|
"grad_norm": 0.0034527035895735025, |
|
"learning_rate": 2.5252525252525253e-06, |
|
"loss": 0.0003, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 4.156937799043062, |
|
"grad_norm": 0.0036090209614485502, |
|
"learning_rate": 2.3923444976076554e-06, |
|
"loss": 0.0003, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4.15933014354067, |
|
"grad_norm": 0.0026591310743242502, |
|
"learning_rate": 2.259436469962786e-06, |
|
"loss": 0.004, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 4.1617224880382775, |
|
"grad_norm": 0.0031338431872427464, |
|
"learning_rate": 2.1265284423179164e-06, |
|
"loss": 0.0003, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 4.164114832535885, |
|
"grad_norm": 0.006377640645951033, |
|
"learning_rate": 1.9936204146730465e-06, |
|
"loss": 0.0003, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 4.166507177033493, |
|
"grad_norm": 0.003080443711951375, |
|
"learning_rate": 1.8607123870281767e-06, |
|
"loss": 0.0003, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 4.168899521531101, |
|
"grad_norm": 0.1149815171957016, |
|
"learning_rate": 1.7278043593833066e-06, |
|
"loss": 0.0004, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 4.171291866028708, |
|
"grad_norm": 0.0024256748147308826, |
|
"learning_rate": 1.5948963317384369e-06, |
|
"loss": 0.1869, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 4.173684210526316, |
|
"grad_norm": 0.007068783510476351, |
|
"learning_rate": 1.4619883040935671e-06, |
|
"loss": 0.0003, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 4.176076555023924, |
|
"grad_norm": 0.034253306686878204, |
|
"learning_rate": 1.3290802764486974e-06, |
|
"loss": 0.0004, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 4.1784688995215316, |
|
"grad_norm": 0.003701072186231613, |
|
"learning_rate": 1.1961722488038277e-06, |
|
"loss": 0.0003, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 4.180861244019138, |
|
"grad_norm": 0.0036447285674512386, |
|
"learning_rate": 1.0632642211589582e-06, |
|
"loss": 0.0003, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 4.183253588516746, |
|
"grad_norm": 0.011764708906412125, |
|
"learning_rate": 9.303561935140884e-07, |
|
"loss": 0.0003, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 4.185645933014354, |
|
"grad_norm": 0.005143681075423956, |
|
"learning_rate": 7.974481658692184e-07, |
|
"loss": 0.0025, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 4.188038277511962, |
|
"grad_norm": 0.005259453784674406, |
|
"learning_rate": 6.645401382243487e-07, |
|
"loss": 0.0003, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 4.190430622009569, |
|
"grad_norm": 0.00764945475384593, |
|
"learning_rate": 5.316321105794791e-07, |
|
"loss": 0.0003, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 4.192822966507177, |
|
"grad_norm": 0.004675131756812334, |
|
"learning_rate": 3.987240829346092e-07, |
|
"loss": 0.0003, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 4.195215311004785, |
|
"grad_norm": 0.003127172589302063, |
|
"learning_rate": 2.6581605528973955e-07, |
|
"loss": 0.0003, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 4.1976076555023925, |
|
"grad_norm": 0.003473317250609398, |
|
"learning_rate": 1.3290802764486977e-07, |
|
"loss": 0.0009, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 0.006573684047907591, |
|
"learning_rate": 0.0, |
|
"loss": 0.0004, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"eval_accuracy": 0.9985528219971056, |
|
"eval_loss": 0.004572847858071327, |
|
"eval_runtime": 792.1261, |
|
"eval_samples_per_second": 0.872, |
|
"eval_steps_per_second": 0.218, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"step": 4180, |
|
"total_flos": 2.0835872156866314e+19, |
|
"train_loss": 0.2077333271433265, |
|
"train_runtime": 25357.3809, |
|
"train_samples_per_second": 0.659, |
|
"train_steps_per_second": 0.165 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"eval_accuracy": 0.9929988331388565, |
|
"eval_loss": 0.025693388655781746, |
|
"eval_runtime": 985.7172, |
|
"eval_samples_per_second": 0.869, |
|
"eval_steps_per_second": 0.218, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"eval_accuracy": 0.9929988331388565, |
|
"eval_loss": 0.025693388655781746, |
|
"eval_runtime": 981.9678, |
|
"eval_samples_per_second": 0.873, |
|
"eval_steps_per_second": 0.219, |
|
"step": 4180 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4180, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 500, |
|
"total_flos": 2.0835872156866314e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|