{ "best_metric": null, "best_model_checkpoint": null, "epoch": 1.0, "eval_steps": 500, "global_step": 625, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0032, "grad_norm": 5.2346367835998535, "learning_rate": 3.1746031746031746e-06, "loss": 5.8721, "step": 2 }, { "epoch": 0.0064, "grad_norm": 6.022397994995117, "learning_rate": 6.349206349206349e-06, "loss": 5.775, "step": 4 }, { "epoch": 0.0096, "grad_norm": 4.282025337219238, "learning_rate": 9.523809523809523e-06, "loss": 5.6253, "step": 6 }, { "epoch": 0.0128, "grad_norm": 3.7371106147766113, "learning_rate": 1.2698412698412699e-05, "loss": 5.1153, "step": 8 }, { "epoch": 0.016, "grad_norm": 8.722667694091797, "learning_rate": 1.5873015873015872e-05, "loss": 5.3364, "step": 10 }, { "epoch": 0.0192, "grad_norm": 4.185220718383789, "learning_rate": 1.9047619047619046e-05, "loss": 5.3102, "step": 12 }, { "epoch": 0.0224, "grad_norm": 3.5511889457702637, "learning_rate": 2.2222222222222223e-05, "loss": 4.9739, "step": 14 }, { "epoch": 0.0256, "grad_norm": 3.152355194091797, "learning_rate": 2.5396825396825397e-05, "loss": 4.5878, "step": 16 }, { "epoch": 0.0288, "grad_norm": 2.925100326538086, "learning_rate": 2.857142857142857e-05, "loss": 4.7562, "step": 18 }, { "epoch": 0.032, "grad_norm": 3.192331552505493, "learning_rate": 3.1746031746031745e-05, "loss": 4.6627, "step": 20 }, { "epoch": 0.0352, "grad_norm": 2.958089590072632, "learning_rate": 3.492063492063492e-05, "loss": 4.6227, "step": 22 }, { "epoch": 0.0384, "grad_norm": 4.2280592918396, "learning_rate": 3.809523809523809e-05, "loss": 4.4283, "step": 24 }, { "epoch": 0.0416, "grad_norm": 2.748825788497925, "learning_rate": 4.126984126984127e-05, "loss": 4.2503, "step": 26 }, { "epoch": 0.0448, "grad_norm": 2.705292224884033, "learning_rate": 4.4444444444444447e-05, "loss": 4.4007, "step": 28 }, { "epoch": 0.048, "grad_norm": 2.5890371799468994, "learning_rate": 4.761904761904762e-05, "loss": 4.3219, "step": 30 }, { "epoch": 0.0512, "grad_norm": 2.485609769821167, "learning_rate": 5.0793650793650794e-05, "loss": 4.2298, "step": 32 }, { "epoch": 0.0544, "grad_norm": 2.5438082218170166, "learning_rate": 5.396825396825397e-05, "loss": 4.2862, "step": 34 }, { "epoch": 0.0576, "grad_norm": 2.5402183532714844, "learning_rate": 5.714285714285714e-05, "loss": 4.0492, "step": 36 }, { "epoch": 0.0608, "grad_norm": 2.5497469902038574, "learning_rate": 6.0317460317460316e-05, "loss": 4.0702, "step": 38 }, { "epoch": 0.064, "grad_norm": 2.3628146648406982, "learning_rate": 6.349206349206349e-05, "loss": 3.9895, "step": 40 }, { "epoch": 0.0672, "grad_norm": 2.201223611831665, "learning_rate": 6.666666666666667e-05, "loss": 3.9743, "step": 42 }, { "epoch": 0.0704, "grad_norm": 2.448514938354492, "learning_rate": 6.984126984126984e-05, "loss": 4.076, "step": 44 }, { "epoch": 0.0736, "grad_norm": 2.386176824569702, "learning_rate": 7.301587301587302e-05, "loss": 3.912, "step": 46 }, { "epoch": 0.0768, "grad_norm": 2.2783148288726807, "learning_rate": 7.619047619047618e-05, "loss": 3.8678, "step": 48 }, { "epoch": 0.08, "grad_norm": 2.472463369369507, "learning_rate": 7.936507936507937e-05, "loss": 3.9058, "step": 50 }, { "epoch": 0.0832, "grad_norm": 2.548892021179199, "learning_rate": 8.253968253968255e-05, "loss": 3.7761, "step": 52 }, { "epoch": 0.0864, "grad_norm": 2.4397833347320557, "learning_rate": 8.571428571428571e-05, "loss": 3.9437, "step": 54 }, { "epoch": 0.0896, "grad_norm": 2.532597303390503, "learning_rate": 8.888888888888889e-05, "loss": 3.8437, "step": 56 }, { "epoch": 0.0928, "grad_norm": 2.45221209526062, "learning_rate": 9.206349206349206e-05, "loss": 3.8429, "step": 58 }, { "epoch": 0.096, "grad_norm": 2.645132064819336, "learning_rate": 9.523809523809524e-05, "loss": 3.7965, "step": 60 }, { "epoch": 0.0992, "grad_norm": 2.385370969772339, "learning_rate": 9.841269841269841e-05, "loss": 3.7968, "step": 62 }, { "epoch": 0.1024, "grad_norm": 2.2052712440490723, "learning_rate": 9.99998248790669e-05, "loss": 3.7794, "step": 64 }, { "epoch": 0.1056, "grad_norm": 2.3219361305236816, "learning_rate": 9.999842391896222e-05, "loss": 3.6841, "step": 66 }, { "epoch": 0.1088, "grad_norm": 2.5465614795684814, "learning_rate": 9.999562203800676e-05, "loss": 3.5312, "step": 68 }, { "epoch": 0.112, "grad_norm": 2.497755527496338, "learning_rate": 9.999141931470729e-05, "loss": 3.6803, "step": 70 }, { "epoch": 0.1152, "grad_norm": 2.157752752304077, "learning_rate": 9.998581586682116e-05, "loss": 3.5856, "step": 72 }, { "epoch": 0.1184, "grad_norm": 2.1215410232543945, "learning_rate": 9.997881185135307e-05, "loss": 3.5429, "step": 74 }, { "epoch": 0.1216, "grad_norm": 3.784062385559082, "learning_rate": 9.997040746455062e-05, "loss": 3.6365, "step": 76 }, { "epoch": 0.1248, "grad_norm": 2.2366509437561035, "learning_rate": 9.996060294189887e-05, "loss": 3.5129, "step": 78 }, { "epoch": 0.128, "grad_norm": 2.240877628326416, "learning_rate": 9.994939855811362e-05, "loss": 3.6185, "step": 80 }, { "epoch": 0.1312, "grad_norm": 2.176579475402832, "learning_rate": 9.993679462713395e-05, "loss": 3.4999, "step": 82 }, { "epoch": 0.1344, "grad_norm": 2.3514060974121094, "learning_rate": 9.992279150211314e-05, "loss": 3.466, "step": 84 }, { "epoch": 0.1376, "grad_norm": 2.2598917484283447, "learning_rate": 9.990738957540896e-05, "loss": 3.6413, "step": 86 }, { "epoch": 0.1408, "grad_norm": 2.0476276874542236, "learning_rate": 9.989058927857263e-05, "loss": 3.5978, "step": 88 }, { "epoch": 0.144, "grad_norm": 2.1762521266937256, "learning_rate": 9.987239108233668e-05, "loss": 3.6667, "step": 90 }, { "epoch": 0.1472, "grad_norm": 2.274958848953247, "learning_rate": 9.985279549660185e-05, "loss": 3.6054, "step": 92 }, { "epoch": 0.1504, "grad_norm": 2.249992847442627, "learning_rate": 9.983180307042274e-05, "loss": 3.6287, "step": 94 }, { "epoch": 0.1536, "grad_norm": 2.23592209815979, "learning_rate": 9.980941439199246e-05, "loss": 3.5967, "step": 96 }, { "epoch": 0.1568, "grad_norm": 2.1270549297332764, "learning_rate": 9.97856300886261e-05, "loss": 3.4583, "step": 98 }, { "epoch": 0.16, "grad_norm": 2.140577554702759, "learning_rate": 9.976045082674319e-05, "loss": 3.4091, "step": 100 }, { "epoch": 0.1632, "grad_norm": 2.1698827743530273, "learning_rate": 9.973387731184902e-05, "loss": 3.5535, "step": 102 }, { "epoch": 0.1664, "grad_norm": 2.188966751098633, "learning_rate": 9.97059102885149e-05, "loss": 3.4673, "step": 104 }, { "epoch": 0.1696, "grad_norm": 2.015054702758789, "learning_rate": 9.967655054035727e-05, "loss": 3.5025, "step": 106 }, { "epoch": 0.1728, "grad_norm": 2.42785906791687, "learning_rate": 9.964579889001569e-05, "loss": 3.3789, "step": 108 }, { "epoch": 0.176, "grad_norm": 3.828245162963867, "learning_rate": 9.961365619912989e-05, "loss": 3.3673, "step": 110 }, { "epoch": 0.1792, "grad_norm": 2.0954813957214355, "learning_rate": 9.95801233683156e-05, "loss": 3.501, "step": 112 }, { "epoch": 0.1824, "grad_norm": 2.17081618309021, "learning_rate": 9.954520133713924e-05, "loss": 3.3926, "step": 114 }, { "epoch": 0.1856, "grad_norm": 2.04852557182312, "learning_rate": 9.950889108409172e-05, "loss": 3.391, "step": 116 }, { "epoch": 0.1888, "grad_norm": 2.426689386367798, "learning_rate": 9.947119362656092e-05, "loss": 3.4257, "step": 118 }, { "epoch": 0.192, "grad_norm": 3.680421829223633, "learning_rate": 9.94321100208032e-05, "loss": 3.2982, "step": 120 }, { "epoch": 0.1952, "grad_norm": 2.1409482955932617, "learning_rate": 9.939164136191384e-05, "loss": 3.4619, "step": 122 }, { "epoch": 0.1984, "grad_norm": 1.9399126768112183, "learning_rate": 9.934978878379636e-05, "loss": 3.3362, "step": 124 }, { "epoch": 0.2016, "grad_norm": 1.954500675201416, "learning_rate": 9.930655345913071e-05, "loss": 3.1957, "step": 126 }, { "epoch": 0.2048, "grad_norm": 2.1550300121307373, "learning_rate": 9.926193659934043e-05, "loss": 3.4578, "step": 128 }, { "epoch": 0.208, "grad_norm": 2.44838547706604, "learning_rate": 9.921593945455869e-05, "loss": 3.3975, "step": 130 }, { "epoch": 0.2112, "grad_norm": 2.087881565093994, "learning_rate": 9.916856331359335e-05, "loss": 3.3682, "step": 132 }, { "epoch": 0.2144, "grad_norm": 2.253127336502075, "learning_rate": 9.911980950389067e-05, "loss": 3.2451, "step": 134 }, { "epoch": 0.2176, "grad_norm": 2.3103411197662354, "learning_rate": 9.906967939149831e-05, "loss": 3.3999, "step": 136 }, { "epoch": 0.2208, "grad_norm": 2.2471373081207275, "learning_rate": 9.901817438102695e-05, "loss": 3.2925, "step": 138 }, { "epoch": 0.224, "grad_norm": 2.1333861351013184, "learning_rate": 9.896529591561093e-05, "loss": 3.3604, "step": 140 }, { "epoch": 0.2272, "grad_norm": 1.914016842842102, "learning_rate": 9.891104547686782e-05, "loss": 3.1728, "step": 142 }, { "epoch": 0.2304, "grad_norm": 2.061126232147217, "learning_rate": 9.8855424584857e-05, "loss": 3.2384, "step": 144 }, { "epoch": 0.2336, "grad_norm": 2.153684139251709, "learning_rate": 9.879843479803691e-05, "loss": 3.2965, "step": 146 }, { "epoch": 0.2368, "grad_norm": 2.109224557876587, "learning_rate": 9.874007771322151e-05, "loss": 3.2568, "step": 148 }, { "epoch": 0.24, "grad_norm": 3.588501214981079, "learning_rate": 9.868035496553546e-05, "loss": 3.223, "step": 150 }, { "epoch": 0.2432, "grad_norm": 2.2085981369018555, "learning_rate": 9.86192682283684e-05, "loss": 3.3506, "step": 152 }, { "epoch": 0.2464, "grad_norm": 1.9722400903701782, "learning_rate": 9.855681921332793e-05, "loss": 3.2184, "step": 154 }, { "epoch": 0.2496, "grad_norm": 2.0837275981903076, "learning_rate": 9.849300967019175e-05, "loss": 3.2083, "step": 156 }, { "epoch": 0.2528, "grad_norm": 1.799812912940979, "learning_rate": 9.84278413868586e-05, "loss": 3.2038, "step": 158 }, { "epoch": 0.256, "grad_norm": 2.0242528915405273, "learning_rate": 9.836131618929819e-05, "loss": 3.3193, "step": 160 }, { "epoch": 0.2592, "grad_norm": 1.948160171508789, "learning_rate": 9.82934359415e-05, "loss": 3.2132, "step": 162 }, { "epoch": 0.2624, "grad_norm": 2.1192405223846436, "learning_rate": 9.822420254542108e-05, "loss": 3.2444, "step": 164 }, { "epoch": 0.2656, "grad_norm": 1.9564788341522217, "learning_rate": 9.815361794093272e-05, "loss": 3.105, "step": 166 }, { "epoch": 0.2688, "grad_norm": 2.1899731159210205, "learning_rate": 9.808168410576617e-05, "loss": 3.1558, "step": 168 }, { "epoch": 0.272, "grad_norm": 2.0988922119140625, "learning_rate": 9.800840305545715e-05, "loss": 3.2485, "step": 170 }, { "epoch": 0.2752, "grad_norm": 2.2193591594696045, "learning_rate": 9.793377684328939e-05, "loss": 3.296, "step": 172 }, { "epoch": 0.2784, "grad_norm": 2.007413864135742, "learning_rate": 9.785780756023714e-05, "loss": 3.1287, "step": 174 }, { "epoch": 0.2816, "grad_norm": 1.9749376773834229, "learning_rate": 9.778049733490655e-05, "loss": 3.0076, "step": 176 }, { "epoch": 0.2848, "grad_norm": 2.059288263320923, "learning_rate": 9.770184833347606e-05, "loss": 3.1663, "step": 178 }, { "epoch": 0.288, "grad_norm": 1.96829354763031, "learning_rate": 9.762186275963563e-05, "loss": 3.2163, "step": 180 }, { "epoch": 0.2912, "grad_norm": 2.0208017826080322, "learning_rate": 9.754054285452506e-05, "loss": 3.2242, "step": 182 }, { "epoch": 0.2944, "grad_norm": 1.88913094997406, "learning_rate": 9.745789089667121e-05, "loss": 3.0072, "step": 184 }, { "epoch": 0.2976, "grad_norm": 1.996383547782898, "learning_rate": 9.737390920192408e-05, "loss": 3.2108, "step": 186 }, { "epoch": 0.3008, "grad_norm": 2.0667550563812256, "learning_rate": 9.7288600123392e-05, "loss": 3.108, "step": 188 }, { "epoch": 0.304, "grad_norm": 1.9526984691619873, "learning_rate": 9.720196605137565e-05, "loss": 3.067, "step": 190 }, { "epoch": 0.3072, "grad_norm": 2.0643012523651123, "learning_rate": 9.71140094133011e-05, "loss": 3.2143, "step": 192 }, { "epoch": 0.3104, "grad_norm": 2.187326431274414, "learning_rate": 9.702473267365182e-05, "loss": 3.1007, "step": 194 }, { "epoch": 0.3136, "grad_norm": 2.1299145221710205, "learning_rate": 9.693413833389956e-05, "loss": 3.1868, "step": 196 }, { "epoch": 0.3168, "grad_norm": 1.7644037008285522, "learning_rate": 9.684222893243431e-05, "loss": 2.9406, "step": 198 }, { "epoch": 0.32, "grad_norm": 2.112617015838623, "learning_rate": 9.674900704449324e-05, "loss": 3.1198, "step": 200 }, { "epoch": 0.3232, "grad_norm": 1.8327059745788574, "learning_rate": 9.665447528208836e-05, "loss": 3.1278, "step": 202 }, { "epoch": 0.3264, "grad_norm": 1.8569375276565552, "learning_rate": 9.655863629393351e-05, "loss": 3.2069, "step": 204 }, { "epoch": 0.3296, "grad_norm": 1.7960104942321777, "learning_rate": 9.64614927653701e-05, "loss": 3.0708, "step": 206 }, { "epoch": 0.3328, "grad_norm": 1.888593316078186, "learning_rate": 9.636304741829181e-05, "loss": 3.1365, "step": 208 }, { "epoch": 0.336, "grad_norm": 1.8564034700393677, "learning_rate": 9.626330301106837e-05, "loss": 3.0059, "step": 210 }, { "epoch": 0.3392, "grad_norm": 1.9591517448425293, "learning_rate": 9.616226233846828e-05, "loss": 2.9778, "step": 212 }, { "epoch": 0.3424, "grad_norm": 2.025777816772461, "learning_rate": 9.605992823158046e-05, "loss": 3.0969, "step": 214 }, { "epoch": 0.3456, "grad_norm": 1.8839352130889893, "learning_rate": 9.595630355773501e-05, "loss": 3.1342, "step": 216 }, { "epoch": 0.3488, "grad_norm": 5.388115882873535, "learning_rate": 9.585139122042274e-05, "loss": 3.1961, "step": 218 }, { "epoch": 0.352, "grad_norm": 2.056678533554077, "learning_rate": 9.574519415921396e-05, "loss": 3.1183, "step": 220 }, { "epoch": 0.3552, "grad_norm": 3.0575530529022217, "learning_rate": 9.5637715349676e-05, "loss": 3.1446, "step": 222 }, { "epoch": 0.3584, "grad_norm": 1.8165247440338135, "learning_rate": 9.552895780328987e-05, "loss": 3.0338, "step": 224 }, { "epoch": 0.3616, "grad_norm": 1.845023512840271, "learning_rate": 9.541892456736595e-05, "loss": 3.194, "step": 226 }, { "epoch": 0.3648, "grad_norm": 1.9389755725860596, "learning_rate": 9.530761872495849e-05, "loss": 3.0054, "step": 228 }, { "epoch": 0.368, "grad_norm": 1.9471769332885742, "learning_rate": 9.519504339477932e-05, "loss": 3.1499, "step": 230 }, { "epoch": 0.3712, "grad_norm": 1.9367070198059082, "learning_rate": 9.508120173111039e-05, "loss": 3.0068, "step": 232 }, { "epoch": 0.3744, "grad_norm": 2.018630027770996, "learning_rate": 9.496609692371548e-05, "loss": 3.1722, "step": 234 }, { "epoch": 0.3776, "grad_norm": 2.0086734294891357, "learning_rate": 9.484973219775074e-05, "loss": 3.2773, "step": 236 }, { "epoch": 0.3808, "grad_norm": 1.9771322011947632, "learning_rate": 9.473211081367436e-05, "loss": 3.0502, "step": 238 }, { "epoch": 0.384, "grad_norm": 1.9231762886047363, "learning_rate": 9.46132360671552e-05, "loss": 3.0415, "step": 240 }, { "epoch": 0.3872, "grad_norm": 1.924302101135254, "learning_rate": 9.449311128898049e-05, "loss": 3.0794, "step": 242 }, { "epoch": 0.3904, "grad_norm": 1.967323899269104, "learning_rate": 9.437173984496246e-05, "loss": 3.1527, "step": 244 }, { "epoch": 0.3936, "grad_norm": 1.9681285619735718, "learning_rate": 9.424912513584401e-05, "loss": 3.1767, "step": 246 }, { "epoch": 0.3968, "grad_norm": 1.908687710762024, "learning_rate": 9.412527059720352e-05, "loss": 2.9755, "step": 248 }, { "epoch": 0.4, "grad_norm": 1.9891773462295532, "learning_rate": 9.400017969935848e-05, "loss": 2.9644, "step": 250 }, { "epoch": 0.4032, "grad_norm": 1.8200337886810303, "learning_rate": 9.387385594726829e-05, "loss": 3.0334, "step": 252 }, { "epoch": 0.4064, "grad_norm": 1.9553104639053345, "learning_rate": 9.374630288043614e-05, "loss": 3.1011, "step": 254 }, { "epoch": 0.4096, "grad_norm": 3.5282905101776123, "learning_rate": 9.361752407280965e-05, "loss": 2.9859, "step": 256 }, { "epoch": 0.4128, "grad_norm": 1.9712797403335571, "learning_rate": 9.348752313268093e-05, "loss": 2.9472, "step": 258 }, { "epoch": 0.416, "grad_norm": 1.927635908126831, "learning_rate": 9.335630370258533e-05, "loss": 3.1396, "step": 260 }, { "epoch": 0.4192, "grad_norm": 1.9233123064041138, "learning_rate": 9.322386945919946e-05, "loss": 3.1889, "step": 262 }, { "epoch": 0.4224, "grad_norm": 1.870160460472107, "learning_rate": 9.309022411323816e-05, "loss": 3.0916, "step": 264 }, { "epoch": 0.4256, "grad_norm": 1.7860538959503174, "learning_rate": 9.295537140935049e-05, "loss": 3.1584, "step": 266 }, { "epoch": 0.4288, "grad_norm": 1.723097801208496, "learning_rate": 9.281931512601485e-05, "loss": 2.8587, "step": 268 }, { "epoch": 0.432, "grad_norm": 1.976706862449646, "learning_rate": 9.26820590754331e-05, "loss": 2.9942, "step": 270 }, { "epoch": 0.4352, "grad_norm": 1.8147152662277222, "learning_rate": 9.254360710342371e-05, "loss": 3.087, "step": 272 }, { "epoch": 0.4384, "grad_norm": 1.8946576118469238, "learning_rate": 9.240396308931407e-05, "loss": 3.0101, "step": 274 }, { "epoch": 0.4416, "grad_norm": 1.8432953357696533, "learning_rate": 9.226313094583173e-05, "loss": 3.0351, "step": 276 }, { "epoch": 0.4448, "grad_norm": 1.8600575923919678, "learning_rate": 9.212111461899479e-05, "loss": 3.0027, "step": 278 }, { "epoch": 0.448, "grad_norm": 1.7912688255310059, "learning_rate": 9.197791808800135e-05, "loss": 3.0568, "step": 280 }, { "epoch": 0.4512, "grad_norm": 2.005932569503784, "learning_rate": 9.183354536511803e-05, "loss": 2.9778, "step": 282 }, { "epoch": 0.4544, "grad_norm": 1.8989531993865967, "learning_rate": 9.168800049556747e-05, "loss": 2.9711, "step": 284 }, { "epoch": 0.4576, "grad_norm": 1.7888331413269043, "learning_rate": 9.154128755741509e-05, "loss": 2.9901, "step": 286 }, { "epoch": 0.4608, "grad_norm": 1.9094816446304321, "learning_rate": 9.139341066145472e-05, "loss": 3.0248, "step": 288 }, { "epoch": 0.464, "grad_norm": 1.7940737009048462, "learning_rate": 9.124437395109353e-05, "loss": 3.0141, "step": 290 }, { "epoch": 0.4672, "grad_norm": 1.7626845836639404, "learning_rate": 9.109418160223585e-05, "loss": 2.9531, "step": 292 }, { "epoch": 0.4704, "grad_norm": 1.9440515041351318, "learning_rate": 9.094283782316619e-05, "loss": 2.9732, "step": 294 }, { "epoch": 0.4736, "grad_norm": 1.7515082359313965, "learning_rate": 9.079034685443133e-05, "loss": 2.8, "step": 296 }, { "epoch": 0.4768, "grad_norm": 1.8595532178878784, "learning_rate": 9.063671296872149e-05, "loss": 2.9873, "step": 298 }, { "epoch": 0.48, "grad_norm": 1.9954842329025269, "learning_rate": 9.048194047075069e-05, "loss": 2.9793, "step": 300 }, { "epoch": 0.4832, "grad_norm": 1.8819364309310913, "learning_rate": 9.032603369713596e-05, "loss": 2.8904, "step": 302 }, { "epoch": 0.4864, "grad_norm": 1.75027596950531, "learning_rate": 9.016899701627604e-05, "loss": 2.9811, "step": 304 }, { "epoch": 0.4896, "grad_norm": 1.9617975950241089, "learning_rate": 9.00108348282288e-05, "loss": 3.0418, "step": 306 }, { "epoch": 0.4928, "grad_norm": 1.8097938299179077, "learning_rate": 8.985155156458811e-05, "loss": 3.0068, "step": 308 }, { "epoch": 0.496, "grad_norm": 2.008989095687866, "learning_rate": 8.969115168835954e-05, "loss": 2.8913, "step": 310 }, { "epoch": 0.4992, "grad_norm": 1.8119149208068848, "learning_rate": 8.952963969383538e-05, "loss": 3.004, "step": 312 }, { "epoch": 0.5024, "grad_norm": 1.9150359630584717, "learning_rate": 8.93670201064687e-05, "loss": 2.9404, "step": 314 }, { "epoch": 0.5056, "grad_norm": 1.8803378343582153, "learning_rate": 8.920329748274649e-05, "loss": 2.9279, "step": 316 }, { "epoch": 0.5088, "grad_norm": 1.7308014631271362, "learning_rate": 8.903847641006218e-05, "loss": 2.9482, "step": 318 }, { "epoch": 0.512, "grad_norm": 2.0764575004577637, "learning_rate": 8.887256150658684e-05, "loss": 2.9425, "step": 320 }, { "epoch": 0.5152, "grad_norm": 1.8219692707061768, "learning_rate": 8.870555742113998e-05, "loss": 3.0176, "step": 322 }, { "epoch": 0.5184, "grad_norm": 1.8642444610595703, "learning_rate": 8.85374688330592e-05, "loss": 3.0474, "step": 324 }, { "epoch": 0.5216, "grad_norm": 1.9277112483978271, "learning_rate": 8.836830045206911e-05, "loss": 2.9487, "step": 326 }, { "epoch": 0.5248, "grad_norm": 1.793232798576355, "learning_rate": 8.81980570181494e-05, "loss": 2.8907, "step": 328 }, { "epoch": 0.528, "grad_norm": 1.8513329029083252, "learning_rate": 8.802674330140192e-05, "loss": 2.9645, "step": 330 }, { "epoch": 0.5312, "grad_norm": 1.7978984117507935, "learning_rate": 8.785436410191714e-05, "loss": 2.9939, "step": 332 }, { "epoch": 0.5344, "grad_norm": 1.7157683372497559, "learning_rate": 8.76809242496396e-05, "loss": 2.8079, "step": 334 }, { "epoch": 0.5376, "grad_norm": 1.7450584173202515, "learning_rate": 8.750642860423262e-05, "loss": 2.9477, "step": 336 }, { "epoch": 0.5408, "grad_norm": 1.812904715538025, "learning_rate": 8.733088205494205e-05, "loss": 2.9842, "step": 338 }, { "epoch": 0.544, "grad_norm": 1.878509759902954, "learning_rate": 8.715428952045936e-05, "loss": 2.8992, "step": 340 }, { "epoch": 0.5472, "grad_norm": 1.8090356588363647, "learning_rate": 8.697665594878382e-05, "loss": 2.9507, "step": 342 }, { "epoch": 0.5504, "grad_norm": 1.8601405620574951, "learning_rate": 8.679798631708375e-05, "loss": 2.8263, "step": 344 }, { "epoch": 0.5536, "grad_norm": 1.8406038284301758, "learning_rate": 8.661828563155727e-05, "loss": 2.8991, "step": 346 }, { "epoch": 0.5568, "grad_norm": 1.7687346935272217, "learning_rate": 8.643755892729179e-05, "loss": 2.8437, "step": 348 }, { "epoch": 0.56, "grad_norm": 1.9318656921386719, "learning_rate": 8.625581126812312e-05, "loss": 3.0261, "step": 350 }, { "epoch": 0.5632, "grad_norm": 1.8095970153808594, "learning_rate": 8.607304774649349e-05, "loss": 2.9269, "step": 352 }, { "epoch": 0.5664, "grad_norm": 1.8424136638641357, "learning_rate": 8.588927348330887e-05, "loss": 2.7917, "step": 354 }, { "epoch": 0.5696, "grad_norm": 1.9472522735595703, "learning_rate": 8.57044936277955e-05, "loss": 2.7557, "step": 356 }, { "epoch": 0.5728, "grad_norm": 1.8774663209915161, "learning_rate": 8.551871335735565e-05, "loss": 2.8426, "step": 358 }, { "epoch": 0.576, "grad_norm": 2.0363616943359375, "learning_rate": 8.533193787742251e-05, "loss": 2.8605, "step": 360 }, { "epoch": 0.5792, "grad_norm": 1.802148699760437, "learning_rate": 8.51441724213143e-05, "loss": 2.8999, "step": 362 }, { "epoch": 0.5824, "grad_norm": 1.9002522230148315, "learning_rate": 8.495542225008771e-05, "loss": 2.854, "step": 364 }, { "epoch": 0.5856, "grad_norm": 1.715409755706787, "learning_rate": 8.476569265239046e-05, "loss": 2.8574, "step": 366 }, { "epoch": 0.5888, "grad_norm": 1.9336328506469727, "learning_rate": 8.457498894431311e-05, "loss": 2.7513, "step": 368 }, { "epoch": 0.592, "grad_norm": 1.8150614500045776, "learning_rate": 8.438331646924013e-05, "loss": 2.8648, "step": 370 }, { "epoch": 0.5952, "grad_norm": 1.745450735092163, "learning_rate": 8.419068059770011e-05, "loss": 2.8102, "step": 372 }, { "epoch": 0.5984, "grad_norm": 1.7649202346801758, "learning_rate": 8.399708672721539e-05, "loss": 2.943, "step": 374 }, { "epoch": 0.6016, "grad_norm": 1.9029461145401, "learning_rate": 8.380254028215076e-05, "loss": 2.9549, "step": 376 }, { "epoch": 0.6048, "grad_norm": 1.6569948196411133, "learning_rate": 8.360704671356145e-05, "loss": 2.762, "step": 378 }, { "epoch": 0.608, "grad_norm": 1.8082654476165771, "learning_rate": 8.341061149904045e-05, "loss": 2.8673, "step": 380 }, { "epoch": 0.6112, "grad_norm": 2.0130746364593506, "learning_rate": 8.321324014256504e-05, "loss": 2.8208, "step": 382 }, { "epoch": 0.6144, "grad_norm": 1.9243208169937134, "learning_rate": 8.30149381743425e-05, "loss": 2.8175, "step": 384 }, { "epoch": 0.6176, "grad_norm": 1.7602218389511108, "learning_rate": 8.28157111506552e-05, "loss": 2.8133, "step": 386 }, { "epoch": 0.6208, "grad_norm": 1.9011287689208984, "learning_rate": 8.261556465370493e-05, "loss": 2.915, "step": 388 }, { "epoch": 0.624, "grad_norm": 1.8260935544967651, "learning_rate": 8.24145042914565e-05, "loss": 2.7879, "step": 390 }, { "epoch": 0.6272, "grad_norm": 2.0104498863220215, "learning_rate": 8.221253569748055e-05, "loss": 2.9628, "step": 392 }, { "epoch": 0.6304, "grad_norm": 1.821583867073059, "learning_rate": 8.200966453079575e-05, "loss": 2.8391, "step": 394 }, { "epoch": 0.6336, "grad_norm": 1.8291980028152466, "learning_rate": 8.180589647571023e-05, "loss": 2.8921, "step": 396 }, { "epoch": 0.6368, "grad_norm": 1.8733659982681274, "learning_rate": 8.16012372416623e-05, "loss": 2.8988, "step": 398 }, { "epoch": 0.64, "grad_norm": 1.9064126014709473, "learning_rate": 8.13956925630605e-05, "loss": 2.8047, "step": 400 }, { "epoch": 0.6432, "grad_norm": 1.7694967985153198, "learning_rate": 8.118926819912287e-05, "loss": 2.8175, "step": 402 }, { "epoch": 0.6464, "grad_norm": 1.9278019666671753, "learning_rate": 8.098196993371565e-05, "loss": 2.7233, "step": 404 }, { "epoch": 0.6496, "grad_norm": 1.7496165037155151, "learning_rate": 8.077380357519115e-05, "loss": 2.8196, "step": 406 }, { "epoch": 0.6528, "grad_norm": 1.8212895393371582, "learning_rate": 8.056477495622511e-05, "loss": 2.8749, "step": 408 }, { "epoch": 0.656, "grad_norm": 1.8443467617034912, "learning_rate": 8.035488993365312e-05, "loss": 2.8373, "step": 410 }, { "epoch": 0.6592, "grad_norm": 1.804992437362671, "learning_rate": 8.014415438830667e-05, "loss": 2.8248, "step": 412 }, { "epoch": 0.6624, "grad_norm": 1.7744520902633667, "learning_rate": 7.993257422484826e-05, "loss": 2.8403, "step": 414 }, { "epoch": 0.6656, "grad_norm": 1.9854934215545654, "learning_rate": 7.972015537160602e-05, "loss": 2.9588, "step": 416 }, { "epoch": 0.6688, "grad_norm": 1.7830610275268555, "learning_rate": 7.950690378040758e-05, "loss": 2.7737, "step": 418 }, { "epoch": 0.672, "grad_norm": 1.8114231824874878, "learning_rate": 7.929282542641325e-05, "loss": 2.7003, "step": 420 }, { "epoch": 0.6752, "grad_norm": 1.931700348854065, "learning_rate": 7.907792630794876e-05, "loss": 2.8088, "step": 422 }, { "epoch": 0.6784, "grad_norm": 1.8684518337249756, "learning_rate": 7.886221244633703e-05, "loss": 2.878, "step": 424 }, { "epoch": 0.6816, "grad_norm": 1.8997987508773804, "learning_rate": 7.864568988572947e-05, "loss": 2.9374, "step": 426 }, { "epoch": 0.6848, "grad_norm": 1.7682809829711914, "learning_rate": 7.842836469293673e-05, "loss": 2.7694, "step": 428 }, { "epoch": 0.688, "grad_norm": 1.8019146919250488, "learning_rate": 7.821024295725865e-05, "loss": 2.8153, "step": 430 }, { "epoch": 0.6912, "grad_norm": 1.8119292259216309, "learning_rate": 7.79913307903136e-05, "loss": 2.8072, "step": 432 }, { "epoch": 0.6944, "grad_norm": 1.8016608953475952, "learning_rate": 7.777163432586734e-05, "loss": 2.7276, "step": 434 }, { "epoch": 0.6976, "grad_norm": 1.8160144090652466, "learning_rate": 7.755115971966104e-05, "loss": 2.8539, "step": 436 }, { "epoch": 0.7008, "grad_norm": 1.825020670890808, "learning_rate": 7.732991314923891e-05, "loss": 2.7796, "step": 438 }, { "epoch": 0.704, "grad_norm": 1.7632222175598145, "learning_rate": 7.710790081377502e-05, "loss": 2.7914, "step": 440 }, { "epoch": 0.7072, "grad_norm": 1.8498951196670532, "learning_rate": 7.688512893389964e-05, "loss": 2.6861, "step": 442 }, { "epoch": 0.7104, "grad_norm": 2.0129451751708984, "learning_rate": 7.666160375152496e-05, "loss": 2.8217, "step": 444 }, { "epoch": 0.7136, "grad_norm": 1.780062198638916, "learning_rate": 7.643733152967019e-05, "loss": 2.8554, "step": 446 }, { "epoch": 0.7168, "grad_norm": 1.726577877998352, "learning_rate": 7.621231855228604e-05, "loss": 2.831, "step": 448 }, { "epoch": 0.72, "grad_norm": 1.7651227712631226, "learning_rate": 7.598657112407865e-05, "loss": 2.8193, "step": 450 }, { "epoch": 0.7232, "grad_norm": 1.7961740493774414, "learning_rate": 7.576009557033304e-05, "loss": 2.8149, "step": 452 }, { "epoch": 0.7264, "grad_norm": 1.813366174697876, "learning_rate": 7.553289823673568e-05, "loss": 2.855, "step": 454 }, { "epoch": 0.7296, "grad_norm": 1.8143000602722168, "learning_rate": 7.530498548919693e-05, "loss": 2.8651, "step": 456 }, { "epoch": 0.7328, "grad_norm": 1.7585805654525757, "learning_rate": 7.507636371367246e-05, "loss": 3.0031, "step": 458 }, { "epoch": 0.736, "grad_norm": 2.2777135372161865, "learning_rate": 7.484703931598445e-05, "loss": 2.8548, "step": 460 }, { "epoch": 0.7392, "grad_norm": 1.8288154602050781, "learning_rate": 7.461701872164204e-05, "loss": 2.7425, "step": 462 }, { "epoch": 0.7424, "grad_norm": 1.8734841346740723, "learning_rate": 7.438630837566133e-05, "loss": 2.8703, "step": 464 }, { "epoch": 0.7456, "grad_norm": 1.742242455482483, "learning_rate": 7.415491474238475e-05, "loss": 2.772, "step": 466 }, { "epoch": 0.7488, "grad_norm": 1.7874287366867065, "learning_rate": 7.39228443053e-05, "loss": 2.7379, "step": 468 }, { "epoch": 0.752, "grad_norm": 1.8021794557571411, "learning_rate": 7.369010356685833e-05, "loss": 2.9262, "step": 470 }, { "epoch": 0.7552, "grad_norm": 1.7524378299713135, "learning_rate": 7.345669904829237e-05, "loss": 2.6676, "step": 472 }, { "epoch": 0.7584, "grad_norm": 1.8277724981307983, "learning_rate": 7.32226372894334e-05, "loss": 2.771, "step": 474 }, { "epoch": 0.7616, "grad_norm": 1.7385722398757935, "learning_rate": 7.298792484852808e-05, "loss": 2.7508, "step": 476 }, { "epoch": 0.7648, "grad_norm": 1.927331805229187, "learning_rate": 7.27525683020548e-05, "loss": 2.8279, "step": 478 }, { "epoch": 0.768, "grad_norm": 1.7165371179580688, "learning_rate": 7.251657424453928e-05, "loss": 2.6799, "step": 480 }, { "epoch": 0.7712, "grad_norm": 1.6585590839385986, "learning_rate": 7.227994928836988e-05, "loss": 2.6849, "step": 482 }, { "epoch": 0.7744, "grad_norm": 1.7066259384155273, "learning_rate": 7.204270006361228e-05, "loss": 2.7189, "step": 484 }, { "epoch": 0.7776, "grad_norm": 1.8811277151107788, "learning_rate": 7.180483321782374e-05, "loss": 2.7771, "step": 486 }, { "epoch": 0.7808, "grad_norm": 1.790667176246643, "learning_rate": 7.156635541586682e-05, "loss": 2.6777, "step": 488 }, { "epoch": 0.784, "grad_norm": 1.7074140310287476, "learning_rate": 7.132727333972265e-05, "loss": 2.7974, "step": 490 }, { "epoch": 0.7872, "grad_norm": 1.6692975759506226, "learning_rate": 7.108759368830371e-05, "loss": 2.7194, "step": 492 }, { "epoch": 0.7904, "grad_norm": 1.759386420249939, "learning_rate": 7.084732317726611e-05, "loss": 2.7475, "step": 494 }, { "epoch": 0.7936, "grad_norm": 1.7140787839889526, "learning_rate": 7.060646853882145e-05, "loss": 2.7576, "step": 496 }, { "epoch": 0.7968, "grad_norm": 1.6590884923934937, "learning_rate": 7.036503652154812e-05, "loss": 2.8157, "step": 498 }, { "epoch": 0.8, "grad_norm": 1.7052589654922485, "learning_rate": 7.012303389020234e-05, "loss": 2.7951, "step": 500 }, { "epoch": 0.8032, "grad_norm": 1.730635166168213, "learning_rate": 6.988046742552845e-05, "loss": 2.8279, "step": 502 }, { "epoch": 0.8064, "grad_norm": 1.7786180973052979, "learning_rate": 6.963734392406907e-05, "loss": 2.6559, "step": 504 }, { "epoch": 0.8096, "grad_norm": 1.901053547859192, "learning_rate": 6.93936701979746e-05, "loss": 2.8896, "step": 506 }, { "epoch": 0.8128, "grad_norm": 1.7321664094924927, "learning_rate": 6.914945307481228e-05, "loss": 2.795, "step": 508 }, { "epoch": 0.816, "grad_norm": 1.7901755571365356, "learning_rate": 6.890469939737506e-05, "loss": 2.7142, "step": 510 }, { "epoch": 0.8192, "grad_norm": 1.7946327924728394, "learning_rate": 6.865941602348966e-05, "loss": 2.781, "step": 512 }, { "epoch": 0.8224, "grad_norm": 1.8260494470596313, "learning_rate": 6.841360982582463e-05, "loss": 2.6868, "step": 514 }, { "epoch": 0.8256, "grad_norm": 1.8089832067489624, "learning_rate": 6.816728769169757e-05, "loss": 2.7845, "step": 516 }, { "epoch": 0.8288, "grad_norm": 1.711962342262268, "learning_rate": 6.792045652288234e-05, "loss": 2.7037, "step": 518 }, { "epoch": 0.832, "grad_norm": 1.7422336339950562, "learning_rate": 6.767312323541555e-05, "loss": 2.7938, "step": 520 }, { "epoch": 0.8352, "grad_norm": 1.7964292764663696, "learning_rate": 6.742529475940284e-05, "loss": 2.6584, "step": 522 }, { "epoch": 0.8384, "grad_norm": 1.7422146797180176, "learning_rate": 6.717697803882467e-05, "loss": 2.735, "step": 524 }, { "epoch": 0.8416, "grad_norm": 1.7856199741363525, "learning_rate": 6.692818003134184e-05, "loss": 2.7753, "step": 526 }, { "epoch": 0.8448, "grad_norm": 1.7519943714141846, "learning_rate": 6.667890770810035e-05, "loss": 2.7173, "step": 528 }, { "epoch": 0.848, "grad_norm": 1.705423355102539, "learning_rate": 6.64291680535363e-05, "loss": 2.7212, "step": 530 }, { "epoch": 0.8512, "grad_norm": 1.787747859954834, "learning_rate": 6.617896806518005e-05, "loss": 2.7499, "step": 532 }, { "epoch": 0.8544, "grad_norm": 1.652585744857788, "learning_rate": 6.592831475346018e-05, "loss": 2.5542, "step": 534 }, { "epoch": 0.8576, "grad_norm": 1.8116321563720703, "learning_rate": 6.56772151415071e-05, "loss": 2.8155, "step": 536 }, { "epoch": 0.8608, "grad_norm": 1.7901153564453125, "learning_rate": 6.542567626495619e-05, "loss": 2.7472, "step": 538 }, { "epoch": 0.864, "grad_norm": 1.7034342288970947, "learning_rate": 6.517370517175081e-05, "loss": 2.7116, "step": 540 }, { "epoch": 0.8672, "grad_norm": 1.832322597503662, "learning_rate": 6.492130892194461e-05, "loss": 2.7618, "step": 542 }, { "epoch": 0.8704, "grad_norm": 1.7125661373138428, "learning_rate": 6.466849458750394e-05, "loss": 2.6383, "step": 544 }, { "epoch": 0.8736, "grad_norm": 1.801355004310608, "learning_rate": 6.441526925210949e-05, "loss": 2.5274, "step": 546 }, { "epoch": 0.8768, "grad_norm": 1.7398046255111694, "learning_rate": 6.416164001095799e-05, "loss": 2.7207, "step": 548 }, { "epoch": 0.88, "grad_norm": 1.6901566982269287, "learning_rate": 6.390761397056328e-05, "loss": 2.7043, "step": 550 }, { "epoch": 0.8832, "grad_norm": 1.6404509544372559, "learning_rate": 6.365319824855727e-05, "loss": 2.6003, "step": 552 }, { "epoch": 0.8864, "grad_norm": 1.6571403741836548, "learning_rate": 6.339839997349045e-05, "loss": 2.7515, "step": 554 }, { "epoch": 0.8896, "grad_norm": 1.64620041847229, "learning_rate": 6.314322628463219e-05, "loss": 2.619, "step": 556 }, { "epoch": 0.8928, "grad_norm": 1.7471263408660889, "learning_rate": 6.288768433177068e-05, "loss": 2.6689, "step": 558 }, { "epoch": 0.896, "grad_norm": 1.7717193365097046, "learning_rate": 6.26317812750126e-05, "loss": 2.7607, "step": 560 }, { "epoch": 0.8992, "grad_norm": 1.6764250993728638, "learning_rate": 6.237552428458256e-05, "loss": 2.6201, "step": 562 }, { "epoch": 0.9024, "grad_norm": 1.6406267881393433, "learning_rate": 6.21189205406221e-05, "loss": 2.7044, "step": 564 }, { "epoch": 0.9056, "grad_norm": 1.7228118181228638, "learning_rate": 6.186197723298855e-05, "loss": 2.8106, "step": 566 }, { "epoch": 0.9088, "grad_norm": 1.6297177076339722, "learning_rate": 6.160470156105362e-05, "loss": 2.7442, "step": 568 }, { "epoch": 0.912, "grad_norm": 1.691156268119812, "learning_rate": 6.134710073350156e-05, "loss": 2.6915, "step": 570 }, { "epoch": 0.9152, "grad_norm": 1.7338085174560547, "learning_rate": 6.108918196812734e-05, "loss": 2.6418, "step": 572 }, { "epoch": 0.9184, "grad_norm": 1.7589360475540161, "learning_rate": 6.083095249163424e-05, "loss": 2.7577, "step": 574 }, { "epoch": 0.9216, "grad_norm": 1.6277837753295898, "learning_rate": 6.057241953943154e-05, "loss": 2.6624, "step": 576 }, { "epoch": 0.9248, "grad_norm": 1.7026268243789673, "learning_rate": 6.031359035543158e-05, "loss": 2.6401, "step": 578 }, { "epoch": 0.928, "grad_norm": 1.781171202659607, "learning_rate": 6.005447219184702e-05, "loss": 2.7218, "step": 580 }, { "epoch": 0.9312, "grad_norm": 1.7018693685531616, "learning_rate": 5.9795072308987485e-05, "loss": 2.5756, "step": 582 }, { "epoch": 0.9344, "grad_norm": 1.710750699043274, "learning_rate": 5.9535397975056154e-05, "loss": 2.738, "step": 584 }, { "epoch": 0.9376, "grad_norm": 1.7480794191360474, "learning_rate": 5.927545646594617e-05, "loss": 2.5714, "step": 586 }, { "epoch": 0.9408, "grad_norm": 1.7700692415237427, "learning_rate": 5.901525506503668e-05, "loss": 2.8079, "step": 588 }, { "epoch": 0.944, "grad_norm": 1.7823493480682373, "learning_rate": 5.87548010629889e-05, "loss": 2.6776, "step": 590 }, { "epoch": 0.9472, "grad_norm": 1.6947530508041382, "learning_rate": 5.8494101757541676e-05, "loss": 2.6956, "step": 592 }, { "epoch": 0.9504, "grad_norm": 3.6984357833862305, "learning_rate": 5.8233164453307156e-05, "loss": 2.7138, "step": 594 }, { "epoch": 0.9536, "grad_norm": 1.7368056774139404, "learning_rate": 5.797199646156596e-05, "loss": 2.6646, "step": 596 }, { "epoch": 0.9568, "grad_norm": 1.7944828271865845, "learning_rate": 5.7710605100062485e-05, "loss": 2.7675, "step": 598 }, { "epoch": 0.96, "grad_norm": 1.6918281316757202, "learning_rate": 5.7448997692799764e-05, "loss": 2.5737, "step": 600 }, { "epoch": 0.9632, "grad_norm": 1.6261357069015503, "learning_rate": 5.718718156983428e-05, "loss": 2.6307, "step": 602 }, { "epoch": 0.9664, "grad_norm": 1.7582976818084717, "learning_rate": 5.69251640670706e-05, "loss": 2.6687, "step": 604 }, { "epoch": 0.9696, "grad_norm": 1.6695537567138672, "learning_rate": 5.6662952526055793e-05, "loss": 2.5779, "step": 606 }, { "epoch": 0.9728, "grad_norm": 1.7661280632019043, "learning_rate": 5.6400554293773744e-05, "loss": 2.7142, "step": 608 }, { "epoch": 0.976, "grad_norm": 1.8265255689620972, "learning_rate": 5.61379767224393e-05, "loss": 2.749, "step": 610 }, { "epoch": 0.9792, "grad_norm": 1.6744146347045898, "learning_rate": 5.587522716929228e-05, "loss": 2.5499, "step": 612 }, { "epoch": 0.9824, "grad_norm": 1.6788204908370972, "learning_rate": 5.561231299639127e-05, "loss": 2.7511, "step": 614 }, { "epoch": 0.9856, "grad_norm": 1.8337587118148804, "learning_rate": 5.534924157040745e-05, "loss": 2.7018, "step": 616 }, { "epoch": 0.9888, "grad_norm": 1.706388235092163, "learning_rate": 5.508602026241807e-05, "loss": 2.6037, "step": 618 }, { "epoch": 0.992, "grad_norm": 1.699271559715271, "learning_rate": 5.482265644769998e-05, "loss": 2.7222, "step": 620 }, { "epoch": 0.9952, "grad_norm": 1.67972993850708, "learning_rate": 5.4559157505522985e-05, "loss": 2.4653, "step": 622 }, { "epoch": 0.9984, "grad_norm": 1.6633968353271484, "learning_rate": 5.429553081894304e-05, "loss": 2.6239, "step": 624 } ], "logging_steps": 2, "max_steps": 1250, "num_input_tokens_seen": 0, "num_train_epochs": 2, "save_steps": 625, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 1.0577401470989107e+17, "train_batch_size": 8, "trial_name": null, "trial_params": null }