diff --git "a/checkpoint-1250/trainer_state.json" "b/checkpoint-1250/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1250/trainer_state.json" @@ -0,0 +1,4408 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.0, + "eval_steps": 500, + "global_step": 1250, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0032, + "grad_norm": 5.2346367835998535, + "learning_rate": 3.1746031746031746e-06, + "loss": 5.8721, + "step": 2 + }, + { + "epoch": 0.0064, + "grad_norm": 6.022397994995117, + "learning_rate": 6.349206349206349e-06, + "loss": 5.775, + "step": 4 + }, + { + "epoch": 0.0096, + "grad_norm": 4.282025337219238, + "learning_rate": 9.523809523809523e-06, + "loss": 5.6253, + "step": 6 + }, + { + "epoch": 0.0128, + "grad_norm": 3.7371106147766113, + "learning_rate": 1.2698412698412699e-05, + "loss": 5.1153, + "step": 8 + }, + { + "epoch": 0.016, + "grad_norm": 8.722667694091797, + "learning_rate": 1.5873015873015872e-05, + "loss": 5.3364, + "step": 10 + }, + { + "epoch": 0.0192, + "grad_norm": 4.185220718383789, + "learning_rate": 1.9047619047619046e-05, + "loss": 5.3102, + "step": 12 + }, + { + "epoch": 0.0224, + "grad_norm": 3.5511889457702637, + "learning_rate": 2.2222222222222223e-05, + "loss": 4.9739, + "step": 14 + }, + { + "epoch": 0.0256, + "grad_norm": 3.152355194091797, + "learning_rate": 2.5396825396825397e-05, + "loss": 4.5878, + "step": 16 + }, + { + "epoch": 0.0288, + "grad_norm": 2.925100326538086, + "learning_rate": 2.857142857142857e-05, + "loss": 4.7562, + "step": 18 + }, + { + "epoch": 0.032, + "grad_norm": 3.192331552505493, + "learning_rate": 3.1746031746031745e-05, + "loss": 4.6627, + "step": 20 + }, + { + "epoch": 0.0352, + "grad_norm": 2.958089590072632, + "learning_rate": 3.492063492063492e-05, + "loss": 4.6227, + "step": 22 + }, + { + "epoch": 0.0384, + "grad_norm": 4.2280592918396, + "learning_rate": 3.809523809523809e-05, + "loss": 4.4283, + "step": 24 + }, + { + "epoch": 0.0416, + "grad_norm": 2.748825788497925, + "learning_rate": 4.126984126984127e-05, + "loss": 4.2503, + "step": 26 + }, + { + "epoch": 0.0448, + "grad_norm": 2.705292224884033, + "learning_rate": 4.4444444444444447e-05, + "loss": 4.4007, + "step": 28 + }, + { + "epoch": 0.048, + "grad_norm": 2.5890371799468994, + "learning_rate": 4.761904761904762e-05, + "loss": 4.3219, + "step": 30 + }, + { + "epoch": 0.0512, + "grad_norm": 2.485609769821167, + "learning_rate": 5.0793650793650794e-05, + "loss": 4.2298, + "step": 32 + }, + { + "epoch": 0.0544, + "grad_norm": 2.5438082218170166, + "learning_rate": 5.396825396825397e-05, + "loss": 4.2862, + "step": 34 + }, + { + "epoch": 0.0576, + "grad_norm": 2.5402183532714844, + "learning_rate": 5.714285714285714e-05, + "loss": 4.0492, + "step": 36 + }, + { + "epoch": 0.0608, + "grad_norm": 2.5497469902038574, + "learning_rate": 6.0317460317460316e-05, + "loss": 4.0702, + "step": 38 + }, + { + "epoch": 0.064, + "grad_norm": 2.3628146648406982, + "learning_rate": 6.349206349206349e-05, + "loss": 3.9895, + "step": 40 + }, + { + "epoch": 0.0672, + "grad_norm": 2.201223611831665, + "learning_rate": 6.666666666666667e-05, + "loss": 3.9743, + "step": 42 + }, + { + "epoch": 0.0704, + "grad_norm": 2.448514938354492, + "learning_rate": 6.984126984126984e-05, + "loss": 4.076, + "step": 44 + }, + { + "epoch": 0.0736, + "grad_norm": 2.386176824569702, + "learning_rate": 7.301587301587302e-05, + "loss": 3.912, + "step": 46 + }, + { + "epoch": 0.0768, + "grad_norm": 2.2783148288726807, + "learning_rate": 7.619047619047618e-05, + "loss": 3.8678, + "step": 48 + }, + { + "epoch": 0.08, + "grad_norm": 2.472463369369507, + "learning_rate": 7.936507936507937e-05, + "loss": 3.9058, + "step": 50 + }, + { + "epoch": 0.0832, + "grad_norm": 2.548892021179199, + "learning_rate": 8.253968253968255e-05, + "loss": 3.7761, + "step": 52 + }, + { + "epoch": 0.0864, + "grad_norm": 2.4397833347320557, + "learning_rate": 8.571428571428571e-05, + "loss": 3.9437, + "step": 54 + }, + { + "epoch": 0.0896, + "grad_norm": 2.532597303390503, + "learning_rate": 8.888888888888889e-05, + "loss": 3.8437, + "step": 56 + }, + { + "epoch": 0.0928, + "grad_norm": 2.45221209526062, + "learning_rate": 9.206349206349206e-05, + "loss": 3.8429, + "step": 58 + }, + { + "epoch": 0.096, + "grad_norm": 2.645132064819336, + "learning_rate": 9.523809523809524e-05, + "loss": 3.7965, + "step": 60 + }, + { + "epoch": 0.0992, + "grad_norm": 2.385370969772339, + "learning_rate": 9.841269841269841e-05, + "loss": 3.7968, + "step": 62 + }, + { + "epoch": 0.1024, + "grad_norm": 2.2052712440490723, + "learning_rate": 9.99998248790669e-05, + "loss": 3.7794, + "step": 64 + }, + { + "epoch": 0.1056, + "grad_norm": 2.3219361305236816, + "learning_rate": 9.999842391896222e-05, + "loss": 3.6841, + "step": 66 + }, + { + "epoch": 0.1088, + "grad_norm": 2.5465614795684814, + "learning_rate": 9.999562203800676e-05, + "loss": 3.5312, + "step": 68 + }, + { + "epoch": 0.112, + "grad_norm": 2.497755527496338, + "learning_rate": 9.999141931470729e-05, + "loss": 3.6803, + "step": 70 + }, + { + "epoch": 0.1152, + "grad_norm": 2.157752752304077, + "learning_rate": 9.998581586682116e-05, + "loss": 3.5856, + "step": 72 + }, + { + "epoch": 0.1184, + "grad_norm": 2.1215410232543945, + "learning_rate": 9.997881185135307e-05, + "loss": 3.5429, + "step": 74 + }, + { + "epoch": 0.1216, + "grad_norm": 3.784062385559082, + "learning_rate": 9.997040746455062e-05, + "loss": 3.6365, + "step": 76 + }, + { + "epoch": 0.1248, + "grad_norm": 2.2366509437561035, + "learning_rate": 9.996060294189887e-05, + "loss": 3.5129, + "step": 78 + }, + { + "epoch": 0.128, + "grad_norm": 2.240877628326416, + "learning_rate": 9.994939855811362e-05, + "loss": 3.6185, + "step": 80 + }, + { + "epoch": 0.1312, + "grad_norm": 2.176579475402832, + "learning_rate": 9.993679462713395e-05, + "loss": 3.4999, + "step": 82 + }, + { + "epoch": 0.1344, + "grad_norm": 2.3514060974121094, + "learning_rate": 9.992279150211314e-05, + "loss": 3.466, + "step": 84 + }, + { + "epoch": 0.1376, + "grad_norm": 2.2598917484283447, + "learning_rate": 9.990738957540896e-05, + "loss": 3.6413, + "step": 86 + }, + { + "epoch": 0.1408, + "grad_norm": 2.0476276874542236, + "learning_rate": 9.989058927857263e-05, + "loss": 3.5978, + "step": 88 + }, + { + "epoch": 0.144, + "grad_norm": 2.1762521266937256, + "learning_rate": 9.987239108233668e-05, + "loss": 3.6667, + "step": 90 + }, + { + "epoch": 0.1472, + "grad_norm": 2.274958848953247, + "learning_rate": 9.985279549660185e-05, + "loss": 3.6054, + "step": 92 + }, + { + "epoch": 0.1504, + "grad_norm": 2.249992847442627, + "learning_rate": 9.983180307042274e-05, + "loss": 3.6287, + "step": 94 + }, + { + "epoch": 0.1536, + "grad_norm": 2.23592209815979, + "learning_rate": 9.980941439199246e-05, + "loss": 3.5967, + "step": 96 + }, + { + "epoch": 0.1568, + "grad_norm": 2.1270549297332764, + "learning_rate": 9.97856300886261e-05, + "loss": 3.4583, + "step": 98 + }, + { + "epoch": 0.16, + "grad_norm": 2.140577554702759, + "learning_rate": 9.976045082674319e-05, + "loss": 3.4091, + "step": 100 + }, + { + "epoch": 0.1632, + "grad_norm": 2.1698827743530273, + "learning_rate": 9.973387731184902e-05, + "loss": 3.5535, + "step": 102 + }, + { + "epoch": 0.1664, + "grad_norm": 2.188966751098633, + "learning_rate": 9.97059102885149e-05, + "loss": 3.4673, + "step": 104 + }, + { + "epoch": 0.1696, + "grad_norm": 2.015054702758789, + "learning_rate": 9.967655054035727e-05, + "loss": 3.5025, + "step": 106 + }, + { + "epoch": 0.1728, + "grad_norm": 2.42785906791687, + "learning_rate": 9.964579889001569e-05, + "loss": 3.3789, + "step": 108 + }, + { + "epoch": 0.176, + "grad_norm": 3.828245162963867, + "learning_rate": 9.961365619912989e-05, + "loss": 3.3673, + "step": 110 + }, + { + "epoch": 0.1792, + "grad_norm": 2.0954813957214355, + "learning_rate": 9.95801233683156e-05, + "loss": 3.501, + "step": 112 + }, + { + "epoch": 0.1824, + "grad_norm": 2.17081618309021, + "learning_rate": 9.954520133713924e-05, + "loss": 3.3926, + "step": 114 + }, + { + "epoch": 0.1856, + "grad_norm": 2.04852557182312, + "learning_rate": 9.950889108409172e-05, + "loss": 3.391, + "step": 116 + }, + { + "epoch": 0.1888, + "grad_norm": 2.426689386367798, + "learning_rate": 9.947119362656092e-05, + "loss": 3.4257, + "step": 118 + }, + { + "epoch": 0.192, + "grad_norm": 3.680421829223633, + "learning_rate": 9.94321100208032e-05, + "loss": 3.2982, + "step": 120 + }, + { + "epoch": 0.1952, + "grad_norm": 2.1409482955932617, + "learning_rate": 9.939164136191384e-05, + "loss": 3.4619, + "step": 122 + }, + { + "epoch": 0.1984, + "grad_norm": 1.9399126768112183, + "learning_rate": 9.934978878379636e-05, + "loss": 3.3362, + "step": 124 + }, + { + "epoch": 0.2016, + "grad_norm": 1.954500675201416, + "learning_rate": 9.930655345913071e-05, + "loss": 3.1957, + "step": 126 + }, + { + "epoch": 0.2048, + "grad_norm": 2.1550300121307373, + "learning_rate": 9.926193659934043e-05, + "loss": 3.4578, + "step": 128 + }, + { + "epoch": 0.208, + "grad_norm": 2.44838547706604, + "learning_rate": 9.921593945455869e-05, + "loss": 3.3975, + "step": 130 + }, + { + "epoch": 0.2112, + "grad_norm": 2.087881565093994, + "learning_rate": 9.916856331359335e-05, + "loss": 3.3682, + "step": 132 + }, + { + "epoch": 0.2144, + "grad_norm": 2.253127336502075, + "learning_rate": 9.911980950389067e-05, + "loss": 3.2451, + "step": 134 + }, + { + "epoch": 0.2176, + "grad_norm": 2.3103411197662354, + "learning_rate": 9.906967939149831e-05, + "loss": 3.3999, + "step": 136 + }, + { + "epoch": 0.2208, + "grad_norm": 2.2471373081207275, + "learning_rate": 9.901817438102695e-05, + "loss": 3.2925, + "step": 138 + }, + { + "epoch": 0.224, + "grad_norm": 2.1333861351013184, + "learning_rate": 9.896529591561093e-05, + "loss": 3.3604, + "step": 140 + }, + { + "epoch": 0.2272, + "grad_norm": 1.914016842842102, + "learning_rate": 9.891104547686782e-05, + "loss": 3.1728, + "step": 142 + }, + { + "epoch": 0.2304, + "grad_norm": 2.061126232147217, + "learning_rate": 9.8855424584857e-05, + "loss": 3.2384, + "step": 144 + }, + { + "epoch": 0.2336, + "grad_norm": 2.153684139251709, + "learning_rate": 9.879843479803691e-05, + "loss": 3.2965, + "step": 146 + }, + { + "epoch": 0.2368, + "grad_norm": 2.109224557876587, + "learning_rate": 9.874007771322151e-05, + "loss": 3.2568, + "step": 148 + }, + { + "epoch": 0.24, + "grad_norm": 3.588501214981079, + "learning_rate": 9.868035496553546e-05, + "loss": 3.223, + "step": 150 + }, + { + "epoch": 0.2432, + "grad_norm": 2.2085981369018555, + "learning_rate": 9.86192682283684e-05, + "loss": 3.3506, + "step": 152 + }, + { + "epoch": 0.2464, + "grad_norm": 1.9722400903701782, + "learning_rate": 9.855681921332793e-05, + "loss": 3.2184, + "step": 154 + }, + { + "epoch": 0.2496, + "grad_norm": 2.0837275981903076, + "learning_rate": 9.849300967019175e-05, + "loss": 3.2083, + "step": 156 + }, + { + "epoch": 0.2528, + "grad_norm": 1.799812912940979, + "learning_rate": 9.84278413868586e-05, + "loss": 3.2038, + "step": 158 + }, + { + "epoch": 0.256, + "grad_norm": 2.0242528915405273, + "learning_rate": 9.836131618929819e-05, + "loss": 3.3193, + "step": 160 + }, + { + "epoch": 0.2592, + "grad_norm": 1.948160171508789, + "learning_rate": 9.82934359415e-05, + "loss": 3.2132, + "step": 162 + }, + { + "epoch": 0.2624, + "grad_norm": 2.1192405223846436, + "learning_rate": 9.822420254542108e-05, + "loss": 3.2444, + "step": 164 + }, + { + "epoch": 0.2656, + "grad_norm": 1.9564788341522217, + "learning_rate": 9.815361794093272e-05, + "loss": 3.105, + "step": 166 + }, + { + "epoch": 0.2688, + "grad_norm": 2.1899731159210205, + "learning_rate": 9.808168410576617e-05, + "loss": 3.1558, + "step": 168 + }, + { + "epoch": 0.272, + "grad_norm": 2.0988922119140625, + "learning_rate": 9.800840305545715e-05, + "loss": 3.2485, + "step": 170 + }, + { + "epoch": 0.2752, + "grad_norm": 2.2193591594696045, + "learning_rate": 9.793377684328939e-05, + "loss": 3.296, + "step": 172 + }, + { + "epoch": 0.2784, + "grad_norm": 2.007413864135742, + "learning_rate": 9.785780756023714e-05, + "loss": 3.1287, + "step": 174 + }, + { + "epoch": 0.2816, + "grad_norm": 1.9749376773834229, + "learning_rate": 9.778049733490655e-05, + "loss": 3.0076, + "step": 176 + }, + { + "epoch": 0.2848, + "grad_norm": 2.059288263320923, + "learning_rate": 9.770184833347606e-05, + "loss": 3.1663, + "step": 178 + }, + { + "epoch": 0.288, + "grad_norm": 1.96829354763031, + "learning_rate": 9.762186275963563e-05, + "loss": 3.2163, + "step": 180 + }, + { + "epoch": 0.2912, + "grad_norm": 2.0208017826080322, + "learning_rate": 9.754054285452506e-05, + "loss": 3.2242, + "step": 182 + }, + { + "epoch": 0.2944, + "grad_norm": 1.88913094997406, + "learning_rate": 9.745789089667121e-05, + "loss": 3.0072, + "step": 184 + }, + { + "epoch": 0.2976, + "grad_norm": 1.996383547782898, + "learning_rate": 9.737390920192408e-05, + "loss": 3.2108, + "step": 186 + }, + { + "epoch": 0.3008, + "grad_norm": 2.0667550563812256, + "learning_rate": 9.7288600123392e-05, + "loss": 3.108, + "step": 188 + }, + { + "epoch": 0.304, + "grad_norm": 1.9526984691619873, + "learning_rate": 9.720196605137565e-05, + "loss": 3.067, + "step": 190 + }, + { + "epoch": 0.3072, + "grad_norm": 2.0643012523651123, + "learning_rate": 9.71140094133011e-05, + "loss": 3.2143, + "step": 192 + }, + { + "epoch": 0.3104, + "grad_norm": 2.187326431274414, + "learning_rate": 9.702473267365182e-05, + "loss": 3.1007, + "step": 194 + }, + { + "epoch": 0.3136, + "grad_norm": 2.1299145221710205, + "learning_rate": 9.693413833389956e-05, + "loss": 3.1868, + "step": 196 + }, + { + "epoch": 0.3168, + "grad_norm": 1.7644037008285522, + "learning_rate": 9.684222893243431e-05, + "loss": 2.9406, + "step": 198 + }, + { + "epoch": 0.32, + "grad_norm": 2.112617015838623, + "learning_rate": 9.674900704449324e-05, + "loss": 3.1198, + "step": 200 + }, + { + "epoch": 0.3232, + "grad_norm": 1.8327059745788574, + "learning_rate": 9.665447528208836e-05, + "loss": 3.1278, + "step": 202 + }, + { + "epoch": 0.3264, + "grad_norm": 1.8569375276565552, + "learning_rate": 9.655863629393351e-05, + "loss": 3.2069, + "step": 204 + }, + { + "epoch": 0.3296, + "grad_norm": 1.7960104942321777, + "learning_rate": 9.64614927653701e-05, + "loss": 3.0708, + "step": 206 + }, + { + "epoch": 0.3328, + "grad_norm": 1.888593316078186, + "learning_rate": 9.636304741829181e-05, + "loss": 3.1365, + "step": 208 + }, + { + "epoch": 0.336, + "grad_norm": 1.8564034700393677, + "learning_rate": 9.626330301106837e-05, + "loss": 3.0059, + "step": 210 + }, + { + "epoch": 0.3392, + "grad_norm": 1.9591517448425293, + "learning_rate": 9.616226233846828e-05, + "loss": 2.9778, + "step": 212 + }, + { + "epoch": 0.3424, + "grad_norm": 2.025777816772461, + "learning_rate": 9.605992823158046e-05, + "loss": 3.0969, + "step": 214 + }, + { + "epoch": 0.3456, + "grad_norm": 1.8839352130889893, + "learning_rate": 9.595630355773501e-05, + "loss": 3.1342, + "step": 216 + }, + { + "epoch": 0.3488, + "grad_norm": 5.388115882873535, + "learning_rate": 9.585139122042274e-05, + "loss": 3.1961, + "step": 218 + }, + { + "epoch": 0.352, + "grad_norm": 2.056678533554077, + "learning_rate": 9.574519415921396e-05, + "loss": 3.1183, + "step": 220 + }, + { + "epoch": 0.3552, + "grad_norm": 3.0575530529022217, + "learning_rate": 9.5637715349676e-05, + "loss": 3.1446, + "step": 222 + }, + { + "epoch": 0.3584, + "grad_norm": 1.8165247440338135, + "learning_rate": 9.552895780328987e-05, + "loss": 3.0338, + "step": 224 + }, + { + "epoch": 0.3616, + "grad_norm": 1.845023512840271, + "learning_rate": 9.541892456736595e-05, + "loss": 3.194, + "step": 226 + }, + { + "epoch": 0.3648, + "grad_norm": 1.9389755725860596, + "learning_rate": 9.530761872495849e-05, + "loss": 3.0054, + "step": 228 + }, + { + "epoch": 0.368, + "grad_norm": 1.9471769332885742, + "learning_rate": 9.519504339477932e-05, + "loss": 3.1499, + "step": 230 + }, + { + "epoch": 0.3712, + "grad_norm": 1.9367070198059082, + "learning_rate": 9.508120173111039e-05, + "loss": 3.0068, + "step": 232 + }, + { + "epoch": 0.3744, + "grad_norm": 2.018630027770996, + "learning_rate": 9.496609692371548e-05, + "loss": 3.1722, + "step": 234 + }, + { + "epoch": 0.3776, + "grad_norm": 2.0086734294891357, + "learning_rate": 9.484973219775074e-05, + "loss": 3.2773, + "step": 236 + }, + { + "epoch": 0.3808, + "grad_norm": 1.9771322011947632, + "learning_rate": 9.473211081367436e-05, + "loss": 3.0502, + "step": 238 + }, + { + "epoch": 0.384, + "grad_norm": 1.9231762886047363, + "learning_rate": 9.46132360671552e-05, + "loss": 3.0415, + "step": 240 + }, + { + "epoch": 0.3872, + "grad_norm": 1.924302101135254, + "learning_rate": 9.449311128898049e-05, + "loss": 3.0794, + "step": 242 + }, + { + "epoch": 0.3904, + "grad_norm": 1.967323899269104, + "learning_rate": 9.437173984496246e-05, + "loss": 3.1527, + "step": 244 + }, + { + "epoch": 0.3936, + "grad_norm": 1.9681285619735718, + "learning_rate": 9.424912513584401e-05, + "loss": 3.1767, + "step": 246 + }, + { + "epoch": 0.3968, + "grad_norm": 1.908687710762024, + "learning_rate": 9.412527059720352e-05, + "loss": 2.9755, + "step": 248 + }, + { + "epoch": 0.4, + "grad_norm": 1.9891773462295532, + "learning_rate": 9.400017969935848e-05, + "loss": 2.9644, + "step": 250 + }, + { + "epoch": 0.4032, + "grad_norm": 1.8200337886810303, + "learning_rate": 9.387385594726829e-05, + "loss": 3.0334, + "step": 252 + }, + { + "epoch": 0.4064, + "grad_norm": 1.9553104639053345, + "learning_rate": 9.374630288043614e-05, + "loss": 3.1011, + "step": 254 + }, + { + "epoch": 0.4096, + "grad_norm": 3.5282905101776123, + "learning_rate": 9.361752407280965e-05, + "loss": 2.9859, + "step": 256 + }, + { + "epoch": 0.4128, + "grad_norm": 1.9712797403335571, + "learning_rate": 9.348752313268093e-05, + "loss": 2.9472, + "step": 258 + }, + { + "epoch": 0.416, + "grad_norm": 1.927635908126831, + "learning_rate": 9.335630370258533e-05, + "loss": 3.1396, + "step": 260 + }, + { + "epoch": 0.4192, + "grad_norm": 1.9233123064041138, + "learning_rate": 9.322386945919946e-05, + "loss": 3.1889, + "step": 262 + }, + { + "epoch": 0.4224, + "grad_norm": 1.870160460472107, + "learning_rate": 9.309022411323816e-05, + "loss": 3.0916, + "step": 264 + }, + { + "epoch": 0.4256, + "grad_norm": 1.7860538959503174, + "learning_rate": 9.295537140935049e-05, + "loss": 3.1584, + "step": 266 + }, + { + "epoch": 0.4288, + "grad_norm": 1.723097801208496, + "learning_rate": 9.281931512601485e-05, + "loss": 2.8587, + "step": 268 + }, + { + "epoch": 0.432, + "grad_norm": 1.976706862449646, + "learning_rate": 9.26820590754331e-05, + "loss": 2.9942, + "step": 270 + }, + { + "epoch": 0.4352, + "grad_norm": 1.8147152662277222, + "learning_rate": 9.254360710342371e-05, + "loss": 3.087, + "step": 272 + }, + { + "epoch": 0.4384, + "grad_norm": 1.8946576118469238, + "learning_rate": 9.240396308931407e-05, + "loss": 3.0101, + "step": 274 + }, + { + "epoch": 0.4416, + "grad_norm": 1.8432953357696533, + "learning_rate": 9.226313094583173e-05, + "loss": 3.0351, + "step": 276 + }, + { + "epoch": 0.4448, + "grad_norm": 1.8600575923919678, + "learning_rate": 9.212111461899479e-05, + "loss": 3.0027, + "step": 278 + }, + { + "epoch": 0.448, + "grad_norm": 1.7912688255310059, + "learning_rate": 9.197791808800135e-05, + "loss": 3.0568, + "step": 280 + }, + { + "epoch": 0.4512, + "grad_norm": 2.005932569503784, + "learning_rate": 9.183354536511803e-05, + "loss": 2.9778, + "step": 282 + }, + { + "epoch": 0.4544, + "grad_norm": 1.8989531993865967, + "learning_rate": 9.168800049556747e-05, + "loss": 2.9711, + "step": 284 + }, + { + "epoch": 0.4576, + "grad_norm": 1.7888331413269043, + "learning_rate": 9.154128755741509e-05, + "loss": 2.9901, + "step": 286 + }, + { + "epoch": 0.4608, + "grad_norm": 1.9094816446304321, + "learning_rate": 9.139341066145472e-05, + "loss": 3.0248, + "step": 288 + }, + { + "epoch": 0.464, + "grad_norm": 1.7940737009048462, + "learning_rate": 9.124437395109353e-05, + "loss": 3.0141, + "step": 290 + }, + { + "epoch": 0.4672, + "grad_norm": 1.7626845836639404, + "learning_rate": 9.109418160223585e-05, + "loss": 2.9531, + "step": 292 + }, + { + "epoch": 0.4704, + "grad_norm": 1.9440515041351318, + "learning_rate": 9.094283782316619e-05, + "loss": 2.9732, + "step": 294 + }, + { + "epoch": 0.4736, + "grad_norm": 1.7515082359313965, + "learning_rate": 9.079034685443133e-05, + "loss": 2.8, + "step": 296 + }, + { + "epoch": 0.4768, + "grad_norm": 1.8595532178878784, + "learning_rate": 9.063671296872149e-05, + "loss": 2.9873, + "step": 298 + }, + { + "epoch": 0.48, + "grad_norm": 1.9954842329025269, + "learning_rate": 9.048194047075069e-05, + "loss": 2.9793, + "step": 300 + }, + { + "epoch": 0.4832, + "grad_norm": 1.8819364309310913, + "learning_rate": 9.032603369713596e-05, + "loss": 2.8904, + "step": 302 + }, + { + "epoch": 0.4864, + "grad_norm": 1.75027596950531, + "learning_rate": 9.016899701627604e-05, + "loss": 2.9811, + "step": 304 + }, + { + "epoch": 0.4896, + "grad_norm": 1.9617975950241089, + "learning_rate": 9.00108348282288e-05, + "loss": 3.0418, + "step": 306 + }, + { + "epoch": 0.4928, + "grad_norm": 1.8097938299179077, + "learning_rate": 8.985155156458811e-05, + "loss": 3.0068, + "step": 308 + }, + { + "epoch": 0.496, + "grad_norm": 2.008989095687866, + "learning_rate": 8.969115168835954e-05, + "loss": 2.8913, + "step": 310 + }, + { + "epoch": 0.4992, + "grad_norm": 1.8119149208068848, + "learning_rate": 8.952963969383538e-05, + "loss": 3.004, + "step": 312 + }, + { + "epoch": 0.5024, + "grad_norm": 1.9150359630584717, + "learning_rate": 8.93670201064687e-05, + "loss": 2.9404, + "step": 314 + }, + { + "epoch": 0.5056, + "grad_norm": 1.8803378343582153, + "learning_rate": 8.920329748274649e-05, + "loss": 2.9279, + "step": 316 + }, + { + "epoch": 0.5088, + "grad_norm": 1.7308014631271362, + "learning_rate": 8.903847641006218e-05, + "loss": 2.9482, + "step": 318 + }, + { + "epoch": 0.512, + "grad_norm": 2.0764575004577637, + "learning_rate": 8.887256150658684e-05, + "loss": 2.9425, + "step": 320 + }, + { + "epoch": 0.5152, + "grad_norm": 1.8219692707061768, + "learning_rate": 8.870555742113998e-05, + "loss": 3.0176, + "step": 322 + }, + { + "epoch": 0.5184, + "grad_norm": 1.8642444610595703, + "learning_rate": 8.85374688330592e-05, + "loss": 3.0474, + "step": 324 + }, + { + "epoch": 0.5216, + "grad_norm": 1.9277112483978271, + "learning_rate": 8.836830045206911e-05, + "loss": 2.9487, + "step": 326 + }, + { + "epoch": 0.5248, + "grad_norm": 1.793232798576355, + "learning_rate": 8.81980570181494e-05, + "loss": 2.8907, + "step": 328 + }, + { + "epoch": 0.528, + "grad_norm": 1.8513329029083252, + "learning_rate": 8.802674330140192e-05, + "loss": 2.9645, + "step": 330 + }, + { + "epoch": 0.5312, + "grad_norm": 1.7978984117507935, + "learning_rate": 8.785436410191714e-05, + "loss": 2.9939, + "step": 332 + }, + { + "epoch": 0.5344, + "grad_norm": 1.7157683372497559, + "learning_rate": 8.76809242496396e-05, + "loss": 2.8079, + "step": 334 + }, + { + "epoch": 0.5376, + "grad_norm": 1.7450584173202515, + "learning_rate": 8.750642860423262e-05, + "loss": 2.9477, + "step": 336 + }, + { + "epoch": 0.5408, + "grad_norm": 1.812904715538025, + "learning_rate": 8.733088205494205e-05, + "loss": 2.9842, + "step": 338 + }, + { + "epoch": 0.544, + "grad_norm": 1.878509759902954, + "learning_rate": 8.715428952045936e-05, + "loss": 2.8992, + "step": 340 + }, + { + "epoch": 0.5472, + "grad_norm": 1.8090356588363647, + "learning_rate": 8.697665594878382e-05, + "loss": 2.9507, + "step": 342 + }, + { + "epoch": 0.5504, + "grad_norm": 1.8601405620574951, + "learning_rate": 8.679798631708375e-05, + "loss": 2.8263, + "step": 344 + }, + { + "epoch": 0.5536, + "grad_norm": 1.8406038284301758, + "learning_rate": 8.661828563155727e-05, + "loss": 2.8991, + "step": 346 + }, + { + "epoch": 0.5568, + "grad_norm": 1.7687346935272217, + "learning_rate": 8.643755892729179e-05, + "loss": 2.8437, + "step": 348 + }, + { + "epoch": 0.56, + "grad_norm": 1.9318656921386719, + "learning_rate": 8.625581126812312e-05, + "loss": 3.0261, + "step": 350 + }, + { + "epoch": 0.5632, + "grad_norm": 1.8095970153808594, + "learning_rate": 8.607304774649349e-05, + "loss": 2.9269, + "step": 352 + }, + { + "epoch": 0.5664, + "grad_norm": 1.8424136638641357, + "learning_rate": 8.588927348330887e-05, + "loss": 2.7917, + "step": 354 + }, + { + "epoch": 0.5696, + "grad_norm": 1.9472522735595703, + "learning_rate": 8.57044936277955e-05, + "loss": 2.7557, + "step": 356 + }, + { + "epoch": 0.5728, + "grad_norm": 1.8774663209915161, + "learning_rate": 8.551871335735565e-05, + "loss": 2.8426, + "step": 358 + }, + { + "epoch": 0.576, + "grad_norm": 2.0363616943359375, + "learning_rate": 8.533193787742251e-05, + "loss": 2.8605, + "step": 360 + }, + { + "epoch": 0.5792, + "grad_norm": 1.802148699760437, + "learning_rate": 8.51441724213143e-05, + "loss": 2.8999, + "step": 362 + }, + { + "epoch": 0.5824, + "grad_norm": 1.9002522230148315, + "learning_rate": 8.495542225008771e-05, + "loss": 2.854, + "step": 364 + }, + { + "epoch": 0.5856, + "grad_norm": 1.715409755706787, + "learning_rate": 8.476569265239046e-05, + "loss": 2.8574, + "step": 366 + }, + { + "epoch": 0.5888, + "grad_norm": 1.9336328506469727, + "learning_rate": 8.457498894431311e-05, + "loss": 2.7513, + "step": 368 + }, + { + "epoch": 0.592, + "grad_norm": 1.8150614500045776, + "learning_rate": 8.438331646924013e-05, + "loss": 2.8648, + "step": 370 + }, + { + "epoch": 0.5952, + "grad_norm": 1.745450735092163, + "learning_rate": 8.419068059770011e-05, + "loss": 2.8102, + "step": 372 + }, + { + "epoch": 0.5984, + "grad_norm": 1.7649202346801758, + "learning_rate": 8.399708672721539e-05, + "loss": 2.943, + "step": 374 + }, + { + "epoch": 0.6016, + "grad_norm": 1.9029461145401, + "learning_rate": 8.380254028215076e-05, + "loss": 2.9549, + "step": 376 + }, + { + "epoch": 0.6048, + "grad_norm": 1.6569948196411133, + "learning_rate": 8.360704671356145e-05, + "loss": 2.762, + "step": 378 + }, + { + "epoch": 0.608, + "grad_norm": 1.8082654476165771, + "learning_rate": 8.341061149904045e-05, + "loss": 2.8673, + "step": 380 + }, + { + "epoch": 0.6112, + "grad_norm": 2.0130746364593506, + "learning_rate": 8.321324014256504e-05, + "loss": 2.8208, + "step": 382 + }, + { + "epoch": 0.6144, + "grad_norm": 1.9243208169937134, + "learning_rate": 8.30149381743425e-05, + "loss": 2.8175, + "step": 384 + }, + { + "epoch": 0.6176, + "grad_norm": 1.7602218389511108, + "learning_rate": 8.28157111506552e-05, + "loss": 2.8133, + "step": 386 + }, + { + "epoch": 0.6208, + "grad_norm": 1.9011287689208984, + "learning_rate": 8.261556465370493e-05, + "loss": 2.915, + "step": 388 + }, + { + "epoch": 0.624, + "grad_norm": 1.8260935544967651, + "learning_rate": 8.24145042914565e-05, + "loss": 2.7879, + "step": 390 + }, + { + "epoch": 0.6272, + "grad_norm": 2.0104498863220215, + "learning_rate": 8.221253569748055e-05, + "loss": 2.9628, + "step": 392 + }, + { + "epoch": 0.6304, + "grad_norm": 1.821583867073059, + "learning_rate": 8.200966453079575e-05, + "loss": 2.8391, + "step": 394 + }, + { + "epoch": 0.6336, + "grad_norm": 1.8291980028152466, + "learning_rate": 8.180589647571023e-05, + "loss": 2.8921, + "step": 396 + }, + { + "epoch": 0.6368, + "grad_norm": 1.8733659982681274, + "learning_rate": 8.16012372416623e-05, + "loss": 2.8988, + "step": 398 + }, + { + "epoch": 0.64, + "grad_norm": 1.9064126014709473, + "learning_rate": 8.13956925630605e-05, + "loss": 2.8047, + "step": 400 + }, + { + "epoch": 0.6432, + "grad_norm": 1.7694967985153198, + "learning_rate": 8.118926819912287e-05, + "loss": 2.8175, + "step": 402 + }, + { + "epoch": 0.6464, + "grad_norm": 1.9278019666671753, + "learning_rate": 8.098196993371565e-05, + "loss": 2.7233, + "step": 404 + }, + { + "epoch": 0.6496, + "grad_norm": 1.7496165037155151, + "learning_rate": 8.077380357519115e-05, + "loss": 2.8196, + "step": 406 + }, + { + "epoch": 0.6528, + "grad_norm": 1.8212895393371582, + "learning_rate": 8.056477495622511e-05, + "loss": 2.8749, + "step": 408 + }, + { + "epoch": 0.656, + "grad_norm": 1.8443467617034912, + "learning_rate": 8.035488993365312e-05, + "loss": 2.8373, + "step": 410 + }, + { + "epoch": 0.6592, + "grad_norm": 1.804992437362671, + "learning_rate": 8.014415438830667e-05, + "loss": 2.8248, + "step": 412 + }, + { + "epoch": 0.6624, + "grad_norm": 1.7744520902633667, + "learning_rate": 7.993257422484826e-05, + "loss": 2.8403, + "step": 414 + }, + { + "epoch": 0.6656, + "grad_norm": 1.9854934215545654, + "learning_rate": 7.972015537160602e-05, + "loss": 2.9588, + "step": 416 + }, + { + "epoch": 0.6688, + "grad_norm": 1.7830610275268555, + "learning_rate": 7.950690378040758e-05, + "loss": 2.7737, + "step": 418 + }, + { + "epoch": 0.672, + "grad_norm": 1.8114231824874878, + "learning_rate": 7.929282542641325e-05, + "loss": 2.7003, + "step": 420 + }, + { + "epoch": 0.6752, + "grad_norm": 1.931700348854065, + "learning_rate": 7.907792630794876e-05, + "loss": 2.8088, + "step": 422 + }, + { + "epoch": 0.6784, + "grad_norm": 1.8684518337249756, + "learning_rate": 7.886221244633703e-05, + "loss": 2.878, + "step": 424 + }, + { + "epoch": 0.6816, + "grad_norm": 1.8997987508773804, + "learning_rate": 7.864568988572947e-05, + "loss": 2.9374, + "step": 426 + }, + { + "epoch": 0.6848, + "grad_norm": 1.7682809829711914, + "learning_rate": 7.842836469293673e-05, + "loss": 2.7694, + "step": 428 + }, + { + "epoch": 0.688, + "grad_norm": 1.8019146919250488, + "learning_rate": 7.821024295725865e-05, + "loss": 2.8153, + "step": 430 + }, + { + "epoch": 0.6912, + "grad_norm": 1.8119292259216309, + "learning_rate": 7.79913307903136e-05, + "loss": 2.8072, + "step": 432 + }, + { + "epoch": 0.6944, + "grad_norm": 1.8016608953475952, + "learning_rate": 7.777163432586734e-05, + "loss": 2.7276, + "step": 434 + }, + { + "epoch": 0.6976, + "grad_norm": 1.8160144090652466, + "learning_rate": 7.755115971966104e-05, + "loss": 2.8539, + "step": 436 + }, + { + "epoch": 0.7008, + "grad_norm": 1.825020670890808, + "learning_rate": 7.732991314923891e-05, + "loss": 2.7796, + "step": 438 + }, + { + "epoch": 0.704, + "grad_norm": 1.7632222175598145, + "learning_rate": 7.710790081377502e-05, + "loss": 2.7914, + "step": 440 + }, + { + "epoch": 0.7072, + "grad_norm": 1.8498951196670532, + "learning_rate": 7.688512893389964e-05, + "loss": 2.6861, + "step": 442 + }, + { + "epoch": 0.7104, + "grad_norm": 2.0129451751708984, + "learning_rate": 7.666160375152496e-05, + "loss": 2.8217, + "step": 444 + }, + { + "epoch": 0.7136, + "grad_norm": 1.780062198638916, + "learning_rate": 7.643733152967019e-05, + "loss": 2.8554, + "step": 446 + }, + { + "epoch": 0.7168, + "grad_norm": 1.726577877998352, + "learning_rate": 7.621231855228604e-05, + "loss": 2.831, + "step": 448 + }, + { + "epoch": 0.72, + "grad_norm": 1.7651227712631226, + "learning_rate": 7.598657112407865e-05, + "loss": 2.8193, + "step": 450 + }, + { + "epoch": 0.7232, + "grad_norm": 1.7961740493774414, + "learning_rate": 7.576009557033304e-05, + "loss": 2.8149, + "step": 452 + }, + { + "epoch": 0.7264, + "grad_norm": 1.813366174697876, + "learning_rate": 7.553289823673568e-05, + "loss": 2.855, + "step": 454 + }, + { + "epoch": 0.7296, + "grad_norm": 1.8143000602722168, + "learning_rate": 7.530498548919693e-05, + "loss": 2.8651, + "step": 456 + }, + { + "epoch": 0.7328, + "grad_norm": 1.7585805654525757, + "learning_rate": 7.507636371367246e-05, + "loss": 3.0031, + "step": 458 + }, + { + "epoch": 0.736, + "grad_norm": 2.2777135372161865, + "learning_rate": 7.484703931598445e-05, + "loss": 2.8548, + "step": 460 + }, + { + "epoch": 0.7392, + "grad_norm": 1.8288154602050781, + "learning_rate": 7.461701872164204e-05, + "loss": 2.7425, + "step": 462 + }, + { + "epoch": 0.7424, + "grad_norm": 1.8734841346740723, + "learning_rate": 7.438630837566133e-05, + "loss": 2.8703, + "step": 464 + }, + { + "epoch": 0.7456, + "grad_norm": 1.742242455482483, + "learning_rate": 7.415491474238475e-05, + "loss": 2.772, + "step": 466 + }, + { + "epoch": 0.7488, + "grad_norm": 1.7874287366867065, + "learning_rate": 7.39228443053e-05, + "loss": 2.7379, + "step": 468 + }, + { + "epoch": 0.752, + "grad_norm": 1.8021794557571411, + "learning_rate": 7.369010356685833e-05, + "loss": 2.9262, + "step": 470 + }, + { + "epoch": 0.7552, + "grad_norm": 1.7524378299713135, + "learning_rate": 7.345669904829237e-05, + "loss": 2.6676, + "step": 472 + }, + { + "epoch": 0.7584, + "grad_norm": 1.8277724981307983, + "learning_rate": 7.32226372894334e-05, + "loss": 2.771, + "step": 474 + }, + { + "epoch": 0.7616, + "grad_norm": 1.7385722398757935, + "learning_rate": 7.298792484852808e-05, + "loss": 2.7508, + "step": 476 + }, + { + "epoch": 0.7648, + "grad_norm": 1.927331805229187, + "learning_rate": 7.27525683020548e-05, + "loss": 2.8279, + "step": 478 + }, + { + "epoch": 0.768, + "grad_norm": 1.7165371179580688, + "learning_rate": 7.251657424453928e-05, + "loss": 2.6799, + "step": 480 + }, + { + "epoch": 0.7712, + "grad_norm": 1.6585590839385986, + "learning_rate": 7.227994928836988e-05, + "loss": 2.6849, + "step": 482 + }, + { + "epoch": 0.7744, + "grad_norm": 1.7066259384155273, + "learning_rate": 7.204270006361228e-05, + "loss": 2.7189, + "step": 484 + }, + { + "epoch": 0.7776, + "grad_norm": 1.8811277151107788, + "learning_rate": 7.180483321782374e-05, + "loss": 2.7771, + "step": 486 + }, + { + "epoch": 0.7808, + "grad_norm": 1.790667176246643, + "learning_rate": 7.156635541586682e-05, + "loss": 2.6777, + "step": 488 + }, + { + "epoch": 0.784, + "grad_norm": 1.7074140310287476, + "learning_rate": 7.132727333972265e-05, + "loss": 2.7974, + "step": 490 + }, + { + "epoch": 0.7872, + "grad_norm": 1.6692975759506226, + "learning_rate": 7.108759368830371e-05, + "loss": 2.7194, + "step": 492 + }, + { + "epoch": 0.7904, + "grad_norm": 1.759386420249939, + "learning_rate": 7.084732317726611e-05, + "loss": 2.7475, + "step": 494 + }, + { + "epoch": 0.7936, + "grad_norm": 1.7140787839889526, + "learning_rate": 7.060646853882145e-05, + "loss": 2.7576, + "step": 496 + }, + { + "epoch": 0.7968, + "grad_norm": 1.6590884923934937, + "learning_rate": 7.036503652154812e-05, + "loss": 2.8157, + "step": 498 + }, + { + "epoch": 0.8, + "grad_norm": 1.7052589654922485, + "learning_rate": 7.012303389020234e-05, + "loss": 2.7951, + "step": 500 + }, + { + "epoch": 0.8032, + "grad_norm": 1.730635166168213, + "learning_rate": 6.988046742552845e-05, + "loss": 2.8279, + "step": 502 + }, + { + "epoch": 0.8064, + "grad_norm": 1.7786180973052979, + "learning_rate": 6.963734392406907e-05, + "loss": 2.6559, + "step": 504 + }, + { + "epoch": 0.8096, + "grad_norm": 1.901053547859192, + "learning_rate": 6.93936701979746e-05, + "loss": 2.8896, + "step": 506 + }, + { + "epoch": 0.8128, + "grad_norm": 1.7321664094924927, + "learning_rate": 6.914945307481228e-05, + "loss": 2.795, + "step": 508 + }, + { + "epoch": 0.816, + "grad_norm": 1.7901755571365356, + "learning_rate": 6.890469939737506e-05, + "loss": 2.7142, + "step": 510 + }, + { + "epoch": 0.8192, + "grad_norm": 1.7946327924728394, + "learning_rate": 6.865941602348966e-05, + "loss": 2.781, + "step": 512 + }, + { + "epoch": 0.8224, + "grad_norm": 1.8260494470596313, + "learning_rate": 6.841360982582463e-05, + "loss": 2.6868, + "step": 514 + }, + { + "epoch": 0.8256, + "grad_norm": 1.8089832067489624, + "learning_rate": 6.816728769169757e-05, + "loss": 2.7845, + "step": 516 + }, + { + "epoch": 0.8288, + "grad_norm": 1.711962342262268, + "learning_rate": 6.792045652288234e-05, + "loss": 2.7037, + "step": 518 + }, + { + "epoch": 0.832, + "grad_norm": 1.7422336339950562, + "learning_rate": 6.767312323541555e-05, + "loss": 2.7938, + "step": 520 + }, + { + "epoch": 0.8352, + "grad_norm": 1.7964292764663696, + "learning_rate": 6.742529475940284e-05, + "loss": 2.6584, + "step": 522 + }, + { + "epoch": 0.8384, + "grad_norm": 1.7422146797180176, + "learning_rate": 6.717697803882467e-05, + "loss": 2.735, + "step": 524 + }, + { + "epoch": 0.8416, + "grad_norm": 1.7856199741363525, + "learning_rate": 6.692818003134184e-05, + "loss": 2.7753, + "step": 526 + }, + { + "epoch": 0.8448, + "grad_norm": 1.7519943714141846, + "learning_rate": 6.667890770810035e-05, + "loss": 2.7173, + "step": 528 + }, + { + "epoch": 0.848, + "grad_norm": 1.705423355102539, + "learning_rate": 6.64291680535363e-05, + "loss": 2.7212, + "step": 530 + }, + { + "epoch": 0.8512, + "grad_norm": 1.787747859954834, + "learning_rate": 6.617896806518005e-05, + "loss": 2.7499, + "step": 532 + }, + { + "epoch": 0.8544, + "grad_norm": 1.652585744857788, + "learning_rate": 6.592831475346018e-05, + "loss": 2.5542, + "step": 534 + }, + { + "epoch": 0.8576, + "grad_norm": 1.8116321563720703, + "learning_rate": 6.56772151415071e-05, + "loss": 2.8155, + "step": 536 + }, + { + "epoch": 0.8608, + "grad_norm": 1.7901153564453125, + "learning_rate": 6.542567626495619e-05, + "loss": 2.7472, + "step": 538 + }, + { + "epoch": 0.864, + "grad_norm": 1.7034342288970947, + "learning_rate": 6.517370517175081e-05, + "loss": 2.7116, + "step": 540 + }, + { + "epoch": 0.8672, + "grad_norm": 1.832322597503662, + "learning_rate": 6.492130892194461e-05, + "loss": 2.7618, + "step": 542 + }, + { + "epoch": 0.8704, + "grad_norm": 1.7125661373138428, + "learning_rate": 6.466849458750394e-05, + "loss": 2.6383, + "step": 544 + }, + { + "epoch": 0.8736, + "grad_norm": 1.801355004310608, + "learning_rate": 6.441526925210949e-05, + "loss": 2.5274, + "step": 546 + }, + { + "epoch": 0.8768, + "grad_norm": 1.7398046255111694, + "learning_rate": 6.416164001095799e-05, + "loss": 2.7207, + "step": 548 + }, + { + "epoch": 0.88, + "grad_norm": 1.6901566982269287, + "learning_rate": 6.390761397056328e-05, + "loss": 2.7043, + "step": 550 + }, + { + "epoch": 0.8832, + "grad_norm": 1.6404509544372559, + "learning_rate": 6.365319824855727e-05, + "loss": 2.6003, + "step": 552 + }, + { + "epoch": 0.8864, + "grad_norm": 1.6571403741836548, + "learning_rate": 6.339839997349045e-05, + "loss": 2.7515, + "step": 554 + }, + { + "epoch": 0.8896, + "grad_norm": 1.64620041847229, + "learning_rate": 6.314322628463219e-05, + "loss": 2.619, + "step": 556 + }, + { + "epoch": 0.8928, + "grad_norm": 1.7471263408660889, + "learning_rate": 6.288768433177068e-05, + "loss": 2.6689, + "step": 558 + }, + { + "epoch": 0.896, + "grad_norm": 1.7717193365097046, + "learning_rate": 6.26317812750126e-05, + "loss": 2.7607, + "step": 560 + }, + { + "epoch": 0.8992, + "grad_norm": 1.6764250993728638, + "learning_rate": 6.237552428458256e-05, + "loss": 2.6201, + "step": 562 + }, + { + "epoch": 0.9024, + "grad_norm": 1.6406267881393433, + "learning_rate": 6.21189205406221e-05, + "loss": 2.7044, + "step": 564 + }, + { + "epoch": 0.9056, + "grad_norm": 1.7228118181228638, + "learning_rate": 6.186197723298855e-05, + "loss": 2.8106, + "step": 566 + }, + { + "epoch": 0.9088, + "grad_norm": 1.6297177076339722, + "learning_rate": 6.160470156105362e-05, + "loss": 2.7442, + "step": 568 + }, + { + "epoch": 0.912, + "grad_norm": 1.691156268119812, + "learning_rate": 6.134710073350156e-05, + "loss": 2.6915, + "step": 570 + }, + { + "epoch": 0.9152, + "grad_norm": 1.7338085174560547, + "learning_rate": 6.108918196812734e-05, + "loss": 2.6418, + "step": 572 + }, + { + "epoch": 0.9184, + "grad_norm": 1.7589360475540161, + "learning_rate": 6.083095249163424e-05, + "loss": 2.7577, + "step": 574 + }, + { + "epoch": 0.9216, + "grad_norm": 1.6277837753295898, + "learning_rate": 6.057241953943154e-05, + "loss": 2.6624, + "step": 576 + }, + { + "epoch": 0.9248, + "grad_norm": 1.7026268243789673, + "learning_rate": 6.031359035543158e-05, + "loss": 2.6401, + "step": 578 + }, + { + "epoch": 0.928, + "grad_norm": 1.781171202659607, + "learning_rate": 6.005447219184702e-05, + "loss": 2.7218, + "step": 580 + }, + { + "epoch": 0.9312, + "grad_norm": 1.7018693685531616, + "learning_rate": 5.9795072308987485e-05, + "loss": 2.5756, + "step": 582 + }, + { + "epoch": 0.9344, + "grad_norm": 1.710750699043274, + "learning_rate": 5.9535397975056154e-05, + "loss": 2.738, + "step": 584 + }, + { + "epoch": 0.9376, + "grad_norm": 1.7480794191360474, + "learning_rate": 5.927545646594617e-05, + "loss": 2.5714, + "step": 586 + }, + { + "epoch": 0.9408, + "grad_norm": 1.7700692415237427, + "learning_rate": 5.901525506503668e-05, + "loss": 2.8079, + "step": 588 + }, + { + "epoch": 0.944, + "grad_norm": 1.7823493480682373, + "learning_rate": 5.87548010629889e-05, + "loss": 2.6776, + "step": 590 + }, + { + "epoch": 0.9472, + "grad_norm": 1.6947530508041382, + "learning_rate": 5.8494101757541676e-05, + "loss": 2.6956, + "step": 592 + }, + { + "epoch": 0.9504, + "grad_norm": 3.6984357833862305, + "learning_rate": 5.8233164453307156e-05, + "loss": 2.7138, + "step": 594 + }, + { + "epoch": 0.9536, + "grad_norm": 1.7368056774139404, + "learning_rate": 5.797199646156596e-05, + "loss": 2.6646, + "step": 596 + }, + { + "epoch": 0.9568, + "grad_norm": 1.7944828271865845, + "learning_rate": 5.7710605100062485e-05, + "loss": 2.7675, + "step": 598 + }, + { + "epoch": 0.96, + "grad_norm": 1.6918281316757202, + "learning_rate": 5.7448997692799764e-05, + "loss": 2.5737, + "step": 600 + }, + { + "epoch": 0.9632, + "grad_norm": 1.6261357069015503, + "learning_rate": 5.718718156983428e-05, + "loss": 2.6307, + "step": 602 + }, + { + "epoch": 0.9664, + "grad_norm": 1.7582976818084717, + "learning_rate": 5.69251640670706e-05, + "loss": 2.6687, + "step": 604 + }, + { + "epoch": 0.9696, + "grad_norm": 1.6695537567138672, + "learning_rate": 5.6662952526055793e-05, + "loss": 2.5779, + "step": 606 + }, + { + "epoch": 0.9728, + "grad_norm": 1.7661280632019043, + "learning_rate": 5.6400554293773744e-05, + "loss": 2.7142, + "step": 608 + }, + { + "epoch": 0.976, + "grad_norm": 1.8265255689620972, + "learning_rate": 5.61379767224393e-05, + "loss": 2.749, + "step": 610 + }, + { + "epoch": 0.9792, + "grad_norm": 1.6744146347045898, + "learning_rate": 5.587522716929228e-05, + "loss": 2.5499, + "step": 612 + }, + { + "epoch": 0.9824, + "grad_norm": 1.6788204908370972, + "learning_rate": 5.561231299639127e-05, + "loss": 2.7511, + "step": 614 + }, + { + "epoch": 0.9856, + "grad_norm": 1.8337587118148804, + "learning_rate": 5.534924157040745e-05, + "loss": 2.7018, + "step": 616 + }, + { + "epoch": 0.9888, + "grad_norm": 1.706388235092163, + "learning_rate": 5.508602026241807e-05, + "loss": 2.6037, + "step": 618 + }, + { + "epoch": 0.992, + "grad_norm": 1.699271559715271, + "learning_rate": 5.482265644769998e-05, + "loss": 2.7222, + "step": 620 + }, + { + "epoch": 0.9952, + "grad_norm": 1.67972993850708, + "learning_rate": 5.4559157505522985e-05, + "loss": 2.4653, + "step": 622 + }, + { + "epoch": 0.9984, + "grad_norm": 1.6633968353271484, + "learning_rate": 5.429553081894304e-05, + "loss": 2.6239, + "step": 624 + }, + { + "epoch": 1.0016, + "grad_norm": 1.6871411800384521, + "learning_rate": 5.4031783774595455e-05, + "loss": 2.4794, + "step": 626 + }, + { + "epoch": 1.0048, + "grad_norm": 1.5942273139953613, + "learning_rate": 5.3767923762487824e-05, + "loss": 2.1013, + "step": 628 + }, + { + "epoch": 1.008, + "grad_norm": 1.6011604070663452, + "learning_rate": 5.3503958175793055e-05, + "loss": 2.139, + "step": 630 + }, + { + "epoch": 1.0112, + "grad_norm": 1.642553687095642, + "learning_rate": 5.323989441064216e-05, + "loss": 2.0493, + "step": 632 + }, + { + "epoch": 1.0144, + "grad_norm": 1.7297475337982178, + "learning_rate": 5.2975739865917074e-05, + "loss": 1.9964, + "step": 634 + }, + { + "epoch": 1.0176, + "grad_norm": 1.7036877870559692, + "learning_rate": 5.271150194304326e-05, + "loss": 2.0491, + "step": 636 + }, + { + "epoch": 1.0208, + "grad_norm": 1.6151326894760132, + "learning_rate": 5.244718804578246e-05, + "loss": 2.1586, + "step": 638 + }, + { + "epoch": 1.024, + "grad_norm": 1.6182959079742432, + "learning_rate": 5.218280558002506e-05, + "loss": 2.0155, + "step": 640 + }, + { + "epoch": 1.0272, + "grad_norm": 1.5469590425491333, + "learning_rate": 5.191836195358278e-05, + "loss": 2.0697, + "step": 642 + }, + { + "epoch": 1.0304, + "grad_norm": 1.574875831604004, + "learning_rate": 5.165386457598099e-05, + "loss": 2.0498, + "step": 644 + }, + { + "epoch": 1.0336, + "grad_norm": 1.6611751317977905, + "learning_rate": 5.13893208582511e-05, + "loss": 2.0674, + "step": 646 + }, + { + "epoch": 1.0368, + "grad_norm": 1.622523546218872, + "learning_rate": 5.1124738212722966e-05, + "loss": 2.1196, + "step": 648 + }, + { + "epoch": 1.04, + "grad_norm": 1.5410690307617188, + "learning_rate": 5.086012405281717e-05, + "loss": 2.1537, + "step": 650 + }, + { + "epoch": 1.0432, + "grad_norm": 1.564372181892395, + "learning_rate": 5.0595485792837305e-05, + "loss": 1.9727, + "step": 652 + }, + { + "epoch": 1.0464, + "grad_norm": 1.5646798610687256, + "learning_rate": 5.033083084776222e-05, + "loss": 2.1061, + "step": 654 + }, + { + "epoch": 1.0496, + "grad_norm": 1.661833643913269, + "learning_rate": 5.0066166633038305e-05, + "loss": 2.075, + "step": 656 + }, + { + "epoch": 1.0528, + "grad_norm": 1.6063848733901978, + "learning_rate": 4.980150056437163e-05, + "loss": 2.0384, + "step": 658 + }, + { + "epoch": 1.056, + "grad_norm": 1.7570315599441528, + "learning_rate": 4.9536840057520224e-05, + "loss": 2.1477, + "step": 660 + }, + { + "epoch": 1.0592, + "grad_norm": 1.6103792190551758, + "learning_rate": 4.927219252808631e-05, + "loss": 2.025, + "step": 662 + }, + { + "epoch": 1.0624, + "grad_norm": 1.6376656293869019, + "learning_rate": 4.900756539130846e-05, + "loss": 2.0121, + "step": 664 + }, + { + "epoch": 1.0656, + "grad_norm": 1.6290217638015747, + "learning_rate": 4.874296606185387e-05, + "loss": 1.9497, + "step": 666 + }, + { + "epoch": 1.0688, + "grad_norm": 1.622162938117981, + "learning_rate": 4.847840195361058e-05, + "loss": 2.1003, + "step": 668 + }, + { + "epoch": 1.072, + "grad_norm": 1.6676520109176636, + "learning_rate": 4.821388047947979e-05, + "loss": 2.0269, + "step": 670 + }, + { + "epoch": 1.0752, + "grad_norm": 1.6200871467590332, + "learning_rate": 4.7949409051168085e-05, + "loss": 2.186, + "step": 672 + }, + { + "epoch": 1.0784, + "grad_norm": 1.6458507776260376, + "learning_rate": 4.768499507897981e-05, + "loss": 2.1137, + "step": 674 + }, + { + "epoch": 1.0816, + "grad_norm": 1.5950709581375122, + "learning_rate": 4.742064597160948e-05, + "loss": 1.9582, + "step": 676 + }, + { + "epoch": 1.0848, + "grad_norm": 1.7151204347610474, + "learning_rate": 4.715636913593404e-05, + "loss": 2.0608, + "step": 678 + }, + { + "epoch": 1.088, + "grad_norm": 1.6758900880813599, + "learning_rate": 4.689217197680554e-05, + "loss": 1.9813, + "step": 680 + }, + { + "epoch": 1.0912, + "grad_norm": 1.7488733530044556, + "learning_rate": 4.6628061896843474e-05, + "loss": 1.983, + "step": 682 + }, + { + "epoch": 1.0944, + "grad_norm": 1.6792566776275635, + "learning_rate": 4.6364046296227484e-05, + "loss": 2.0934, + "step": 684 + }, + { + "epoch": 1.0976, + "grad_norm": 1.595950722694397, + "learning_rate": 4.6100132572489915e-05, + "loss": 2.0593, + "step": 686 + }, + { + "epoch": 1.1008, + "grad_norm": 1.6133977174758911, + "learning_rate": 4.5836328120308674e-05, + "loss": 2.1856, + "step": 688 + }, + { + "epoch": 1.104, + "grad_norm": 1.6371673345565796, + "learning_rate": 4.5572640331299875e-05, + "loss": 1.8256, + "step": 690 + }, + { + "epoch": 1.1072, + "grad_norm": 1.6217482089996338, + "learning_rate": 4.530907659381086e-05, + "loss": 2.0228, + "step": 692 + }, + { + "epoch": 1.1104, + "grad_norm": 1.8364999294281006, + "learning_rate": 4.504564429271311e-05, + "loss": 2.2268, + "step": 694 + }, + { + "epoch": 1.1136, + "grad_norm": 1.6585414409637451, + "learning_rate": 4.478235080919536e-05, + "loss": 2.1736, + "step": 696 + }, + { + "epoch": 1.1168, + "grad_norm": 1.7598919868469238, + "learning_rate": 4.451920352055678e-05, + "loss": 2.0017, + "step": 698 + }, + { + "epoch": 1.12, + "grad_norm": 1.659216284751892, + "learning_rate": 4.425620980000026e-05, + "loss": 2.1184, + "step": 700 + }, + { + "epoch": 1.1232, + "grad_norm": 1.6339902877807617, + "learning_rate": 4.39933770164258e-05, + "loss": 2.0396, + "step": 702 + }, + { + "epoch": 1.1264, + "grad_norm": 1.713364839553833, + "learning_rate": 4.373071253422408e-05, + "loss": 2.0963, + "step": 704 + }, + { + "epoch": 1.1296, + "grad_norm": 1.728036642074585, + "learning_rate": 4.346822371307009e-05, + "loss": 2.026, + "step": 706 + }, + { + "epoch": 1.1328, + "grad_norm": 1.6496070623397827, + "learning_rate": 4.320591790771691e-05, + "loss": 2.0528, + "step": 708 + }, + { + "epoch": 1.1360000000000001, + "grad_norm": 1.7058520317077637, + "learning_rate": 4.294380246778966e-05, + "loss": 1.9498, + "step": 710 + }, + { + "epoch": 1.1392, + "grad_norm": 1.5882078409194946, + "learning_rate": 4.2681884737579524e-05, + "loss": 1.9813, + "step": 712 + }, + { + "epoch": 1.1424, + "grad_norm": 1.7214953899383545, + "learning_rate": 4.242017205583805e-05, + "loss": 2.0316, + "step": 714 + }, + { + "epoch": 1.1456, + "grad_norm": 1.6648966073989868, + "learning_rate": 4.215867175557142e-05, + "loss": 2.0019, + "step": 716 + }, + { + "epoch": 1.1488, + "grad_norm": 1.6206002235412598, + "learning_rate": 4.189739116383506e-05, + "loss": 1.9073, + "step": 718 + }, + { + "epoch": 1.152, + "grad_norm": 1.6015231609344482, + "learning_rate": 4.163633760152834e-05, + "loss": 2.013, + "step": 720 + }, + { + "epoch": 1.1552, + "grad_norm": 1.6537818908691406, + "learning_rate": 4.137551838318936e-05, + "loss": 2.1472, + "step": 722 + }, + { + "epoch": 1.1584, + "grad_norm": 1.9408031702041626, + "learning_rate": 4.1114940816790135e-05, + "loss": 2.1239, + "step": 724 + }, + { + "epoch": 1.1616, + "grad_norm": 1.7310969829559326, + "learning_rate": 4.08546122035317e-05, + "loss": 2.1907, + "step": 726 + }, + { + "epoch": 1.1648, + "grad_norm": 1.6550313234329224, + "learning_rate": 4.059453983763967e-05, + "loss": 2.0719, + "step": 728 + }, + { + "epoch": 1.168, + "grad_norm": 1.8182940483093262, + "learning_rate": 4.03347310061597e-05, + "loss": 2.0437, + "step": 730 + }, + { + "epoch": 1.1712, + "grad_norm": 1.6281683444976807, + "learning_rate": 4.007519298875347e-05, + "loss": 2.0488, + "step": 732 + }, + { + "epoch": 1.1743999999999999, + "grad_norm": 3.110891342163086, + "learning_rate": 3.98159330574946e-05, + "loss": 2.0592, + "step": 734 + }, + { + "epoch": 1.1776, + "grad_norm": 1.7148778438568115, + "learning_rate": 3.955695847666494e-05, + "loss": 2.0754, + "step": 736 + }, + { + "epoch": 1.1808, + "grad_norm": 1.6069791316986084, + "learning_rate": 3.929827650255104e-05, + "loss": 1.9366, + "step": 738 + }, + { + "epoch": 1.184, + "grad_norm": 1.7344155311584473, + "learning_rate": 3.903989438324077e-05, + "loss": 2.1932, + "step": 740 + }, + { + "epoch": 1.1872, + "grad_norm": 1.7402360439300537, + "learning_rate": 3.878181935842033e-05, + "loss": 2.1352, + "step": 742 + }, + { + "epoch": 1.1904, + "grad_norm": 1.7004384994506836, + "learning_rate": 3.85240586591713e-05, + "loss": 1.9779, + "step": 744 + }, + { + "epoch": 1.1936, + "grad_norm": 1.8418961763381958, + "learning_rate": 3.8266619507768126e-05, + "loss": 2.2648, + "step": 746 + }, + { + "epoch": 1.1968, + "grad_norm": 1.7444120645523071, + "learning_rate": 3.800950911747565e-05, + "loss": 2.0055, + "step": 748 + }, + { + "epoch": 1.2, + "grad_norm": 1.709296703338623, + "learning_rate": 3.775273469234712e-05, + "loss": 2.1322, + "step": 750 + }, + { + "epoch": 1.2032, + "grad_norm": 1.6528595685958862, + "learning_rate": 3.749630342702221e-05, + "loss": 2.0173, + "step": 752 + }, + { + "epoch": 1.2064, + "grad_norm": 1.86497163772583, + "learning_rate": 3.724022250652557e-05, + "loss": 2.2187, + "step": 754 + }, + { + "epoch": 1.2096, + "grad_norm": 1.6813569068908691, + "learning_rate": 3.698449910606536e-05, + "loss": 1.9149, + "step": 756 + }, + { + "epoch": 1.2128, + "grad_norm": 1.4694783687591553, + "learning_rate": 3.672914039083233e-05, + "loss": 1.9928, + "step": 758 + }, + { + "epoch": 1.216, + "grad_norm": 1.6934654712677002, + "learning_rate": 3.6474153515799e-05, + "loss": 2.0018, + "step": 760 + }, + { + "epoch": 1.2192, + "grad_norm": 1.7023409605026245, + "learning_rate": 3.6219545625519145e-05, + "loss": 1.9795, + "step": 762 + }, + { + "epoch": 1.2224, + "grad_norm": 1.5944123268127441, + "learning_rate": 3.596532385392772e-05, + "loss": 1.9711, + "step": 764 + }, + { + "epoch": 1.2256, + "grad_norm": 1.665781855583191, + "learning_rate": 3.5711495324140845e-05, + "loss": 2.0697, + "step": 766 + }, + { + "epoch": 1.2288000000000001, + "grad_norm": 1.6670796871185303, + "learning_rate": 3.54580671482563e-05, + "loss": 2.0016, + "step": 768 + }, + { + "epoch": 1.232, + "grad_norm": 1.81361985206604, + "learning_rate": 3.520504642715424e-05, + "loss": 2.1093, + "step": 770 + }, + { + "epoch": 1.2352, + "grad_norm": 1.6931331157684326, + "learning_rate": 3.495244025029822e-05, + "loss": 2.0959, + "step": 772 + }, + { + "epoch": 1.2384, + "grad_norm": 1.6072664260864258, + "learning_rate": 3.470025569553653e-05, + "loss": 1.9525, + "step": 774 + }, + { + "epoch": 1.2416, + "grad_norm": 1.5714666843414307, + "learning_rate": 3.444849982890393e-05, + "loss": 1.9701, + "step": 776 + }, + { + "epoch": 1.2448, + "grad_norm": 1.7283622026443481, + "learning_rate": 3.4197179704423656e-05, + "loss": 2.1773, + "step": 778 + }, + { + "epoch": 1.248, + "grad_norm": 1.5926114320755005, + "learning_rate": 3.39463023639097e-05, + "loss": 1.9916, + "step": 780 + }, + { + "epoch": 1.2511999999999999, + "grad_norm": 1.6143758296966553, + "learning_rate": 3.36958748367696e-05, + "loss": 2.0623, + "step": 782 + }, + { + "epoch": 1.2544, + "grad_norm": 1.5428004264831543, + "learning_rate": 3.3445904139807405e-05, + "loss": 1.966, + "step": 784 + }, + { + "epoch": 1.2576, + "grad_norm": 1.7031697034835815, + "learning_rate": 3.319639727702716e-05, + "loss": 2.1779, + "step": 786 + }, + { + "epoch": 1.2608, + "grad_norm": 1.5835998058319092, + "learning_rate": 3.2947361239436525e-05, + "loss": 1.9549, + "step": 788 + }, + { + "epoch": 1.264, + "grad_norm": 1.7775769233703613, + "learning_rate": 3.2698803004851026e-05, + "loss": 1.9312, + "step": 790 + }, + { + "epoch": 1.2671999999999999, + "grad_norm": 1.583977460861206, + "learning_rate": 3.245072953769844e-05, + "loss": 1.8452, + "step": 792 + }, + { + "epoch": 1.2704, + "grad_norm": 1.5791876316070557, + "learning_rate": 3.2203147788823764e-05, + "loss": 2.0222, + "step": 794 + }, + { + "epoch": 1.2736, + "grad_norm": 1.6620211601257324, + "learning_rate": 3.1956064695294305e-05, + "loss": 2.0612, + "step": 796 + }, + { + "epoch": 1.2768, + "grad_norm": 1.6324514150619507, + "learning_rate": 3.170948718020546e-05, + "loss": 2.0428, + "step": 798 + }, + { + "epoch": 1.28, + "grad_norm": 1.6789875030517578, + "learning_rate": 3.1463422152486674e-05, + "loss": 1.967, + "step": 800 + }, + { + "epoch": 1.2832, + "grad_norm": 2.000178337097168, + "learning_rate": 3.121787650670781e-05, + "loss": 1.9851, + "step": 802 + }, + { + "epoch": 1.2864, + "grad_norm": 1.7736194133758545, + "learning_rate": 3.097285712288605e-05, + "loss": 1.8732, + "step": 804 + }, + { + "epoch": 1.2896, + "grad_norm": 1.7309801578521729, + "learning_rate": 3.072837086629305e-05, + "loss": 2.0212, + "step": 806 + }, + { + "epoch": 1.2928, + "grad_norm": 1.7109277248382568, + "learning_rate": 3.0484424587262655e-05, + "loss": 2.0203, + "step": 808 + }, + { + "epoch": 1.296, + "grad_norm": 1.6125346422195435, + "learning_rate": 3.024102512099889e-05, + "loss": 2.075, + "step": 810 + }, + { + "epoch": 1.2992, + "grad_norm": 1.661042332649231, + "learning_rate": 2.9998179287384485e-05, + "loss": 2.1594, + "step": 812 + }, + { + "epoch": 1.3024, + "grad_norm": 1.6753617525100708, + "learning_rate": 2.975589389078975e-05, + "loss": 2.0397, + "step": 814 + }, + { + "epoch": 1.3056, + "grad_norm": 1.6913373470306396, + "learning_rate": 2.9514175719881966e-05, + "loss": 2.0017, + "step": 816 + }, + { + "epoch": 1.3088, + "grad_norm": 1.6717087030410767, + "learning_rate": 2.9273031547435114e-05, + "loss": 2.1042, + "step": 818 + }, + { + "epoch": 1.312, + "grad_norm": 1.7565637826919556, + "learning_rate": 2.9032468130140168e-05, + "loss": 2.1539, + "step": 820 + }, + { + "epoch": 1.3152, + "grad_norm": 1.6280590295791626, + "learning_rate": 2.8792492208415768e-05, + "loss": 1.9744, + "step": 822 + }, + { + "epoch": 1.3184, + "grad_norm": 1.5978885889053345, + "learning_rate": 2.8553110506219283e-05, + "loss": 2.0044, + "step": 824 + }, + { + "epoch": 1.3216, + "grad_norm": 1.6840837001800537, + "learning_rate": 2.831432973085848e-05, + "loss": 2.062, + "step": 826 + }, + { + "epoch": 1.3248, + "grad_norm": 1.5866726636886597, + "learning_rate": 2.8076156572803635e-05, + "loss": 1.9896, + "step": 828 + }, + { + "epoch": 1.328, + "grad_norm": 1.5741474628448486, + "learning_rate": 2.783859770549996e-05, + "loss": 1.9465, + "step": 830 + }, + { + "epoch": 1.3312, + "grad_norm": 1.683282732963562, + "learning_rate": 2.760165978518067e-05, + "loss": 2.0189, + "step": 832 + }, + { + "epoch": 1.3344, + "grad_norm": 1.6024737358093262, + "learning_rate": 2.7365349450680466e-05, + "loss": 1.941, + "step": 834 + }, + { + "epoch": 1.3376000000000001, + "grad_norm": 1.6115479469299316, + "learning_rate": 2.7129673323249604e-05, + "loss": 1.9948, + "step": 836 + }, + { + "epoch": 1.3408, + "grad_norm": 1.6458511352539062, + "learning_rate": 2.689463800636824e-05, + "loss": 2.0941, + "step": 838 + }, + { + "epoch": 1.3439999999999999, + "grad_norm": 1.5999996662139893, + "learning_rate": 2.6660250085561457e-05, + "loss": 2.081, + "step": 840 + }, + { + "epoch": 1.3472, + "grad_norm": 1.6597297191619873, + "learning_rate": 2.6426516128214807e-05, + "loss": 1.8927, + "step": 842 + }, + { + "epoch": 1.3504, + "grad_norm": 1.7434853315353394, + "learning_rate": 2.619344268339021e-05, + "loss": 2.0059, + "step": 844 + }, + { + "epoch": 1.3536000000000001, + "grad_norm": 2.4289755821228027, + "learning_rate": 2.5961036281642493e-05, + "loss": 2.1133, + "step": 846 + }, + { + "epoch": 1.3568, + "grad_norm": 1.623465657234192, + "learning_rate": 2.572930343483637e-05, + "loss": 1.9284, + "step": 848 + }, + { + "epoch": 1.3599999999999999, + "grad_norm": 1.9334876537322998, + "learning_rate": 2.54982506359641e-05, + "loss": 2.0351, + "step": 850 + }, + { + "epoch": 1.3632, + "grad_norm": 1.7356194257736206, + "learning_rate": 2.526788435896339e-05, + "loss": 2.0995, + "step": 852 + }, + { + "epoch": 1.3664, + "grad_norm": 1.581726312637329, + "learning_rate": 2.5038211058536133e-05, + "loss": 1.9878, + "step": 854 + }, + { + "epoch": 1.3696, + "grad_norm": 1.7448982000350952, + "learning_rate": 2.4809237169967458e-05, + "loss": 2.0996, + "step": 856 + }, + { + "epoch": 1.3728, + "grad_norm": 1.6610596179962158, + "learning_rate": 2.4580969108945533e-05, + "loss": 1.9928, + "step": 858 + }, + { + "epoch": 1.376, + "grad_norm": 1.6751465797424316, + "learning_rate": 2.435341327138168e-05, + "loss": 2.0282, + "step": 860 + }, + { + "epoch": 1.3792, + "grad_norm": 1.7888554334640503, + "learning_rate": 2.4126576033231208e-05, + "loss": 1.9525, + "step": 862 + }, + { + "epoch": 1.3824, + "grad_norm": 1.66560959815979, + "learning_rate": 2.3900463750314834e-05, + "loss": 1.984, + "step": 864 + }, + { + "epoch": 1.3856, + "grad_norm": 1.6778205633163452, + "learning_rate": 2.3675082758140475e-05, + "loss": 1.8466, + "step": 866 + }, + { + "epoch": 1.3888, + "grad_norm": 1.6759799718856812, + "learning_rate": 2.3450439371725825e-05, + "loss": 2.0435, + "step": 868 + }, + { + "epoch": 1.392, + "grad_norm": 1.6375926733016968, + "learning_rate": 2.3226539885421343e-05, + "loss": 1.9285, + "step": 870 + }, + { + "epoch": 1.3952, + "grad_norm": 1.7163138389587402, + "learning_rate": 2.3003390572734006e-05, + "loss": 2.0509, + "step": 872 + }, + { + "epoch": 1.3984, + "grad_norm": 1.7290489673614502, + "learning_rate": 2.2780997686151378e-05, + "loss": 2.0219, + "step": 874 + }, + { + "epoch": 1.4016, + "grad_norm": 1.5673465728759766, + "learning_rate": 2.255936745696652e-05, + "loss": 2.0706, + "step": 876 + }, + { + "epoch": 1.4048, + "grad_norm": 1.7429379224777222, + "learning_rate": 2.2338506095103334e-05, + "loss": 1.9849, + "step": 878 + }, + { + "epoch": 1.408, + "grad_norm": 1.6174511909484863, + "learning_rate": 2.2118419788942672e-05, + "loss": 1.958, + "step": 880 + }, + { + "epoch": 1.4112, + "grad_norm": 1.653412938117981, + "learning_rate": 2.189911470514881e-05, + "loss": 1.9639, + "step": 882 + }, + { + "epoch": 1.4144, + "grad_norm": 1.7575905323028564, + "learning_rate": 2.1680596988496705e-05, + "loss": 2.0275, + "step": 884 + }, + { + "epoch": 1.4176, + "grad_norm": 1.4820189476013184, + "learning_rate": 2.1462872761699905e-05, + "loss": 1.8751, + "step": 886 + }, + { + "epoch": 1.4208, + "grad_norm": 1.558645248413086, + "learning_rate": 2.1245948125238867e-05, + "loss": 1.8586, + "step": 888 + }, + { + "epoch": 1.424, + "grad_norm": 1.6014423370361328, + "learning_rate": 2.1029829157190117e-05, + "loss": 1.9532, + "step": 890 + }, + { + "epoch": 1.4272, + "grad_norm": 1.5966588258743286, + "learning_rate": 2.081452191305587e-05, + "loss": 1.9198, + "step": 892 + }, + { + "epoch": 1.4304000000000001, + "grad_norm": 1.5865297317504883, + "learning_rate": 2.06000324255945e-05, + "loss": 1.907, + "step": 894 + }, + { + "epoch": 1.4336, + "grad_norm": 1.6410948038101196, + "learning_rate": 2.0386366704651315e-05, + "loss": 2.1222, + "step": 896 + }, + { + "epoch": 1.4368, + "grad_norm": 1.6076593399047852, + "learning_rate": 2.0173530736990304e-05, + "loss": 1.8874, + "step": 898 + }, + { + "epoch": 1.44, + "grad_norm": 1.610167384147644, + "learning_rate": 1.9961530486126327e-05, + "loss": 2.074, + "step": 900 + }, + { + "epoch": 1.4432, + "grad_norm": 1.7789791822433472, + "learning_rate": 1.9750371892158103e-05, + "loss": 2.0396, + "step": 902 + }, + { + "epoch": 1.4464000000000001, + "grad_norm": 1.6089529991149902, + "learning_rate": 1.9540060871601646e-05, + "loss": 2.0706, + "step": 904 + }, + { + "epoch": 1.4496, + "grad_norm": 1.6056288480758667, + "learning_rate": 1.933060331722457e-05, + "loss": 1.8598, + "step": 906 + }, + { + "epoch": 1.4527999999999999, + "grad_norm": 1.5929008722305298, + "learning_rate": 1.9122005097881014e-05, + "loss": 1.9494, + "step": 908 + }, + { + "epoch": 1.456, + "grad_norm": 1.6866933107376099, + "learning_rate": 1.8914272058347088e-05, + "loss": 1.8477, + "step": 910 + }, + { + "epoch": 1.4592, + "grad_norm": 1.614665150642395, + "learning_rate": 1.8707410019157196e-05, + "loss": 1.907, + "step": 912 + }, + { + "epoch": 1.4624, + "grad_norm": 1.5032063722610474, + "learning_rate": 1.8501424776440907e-05, + "loss": 1.9152, + "step": 914 + }, + { + "epoch": 1.4656, + "grad_norm": 1.6080886125564575, + "learning_rate": 1.829632210176061e-05, + "loss": 1.9224, + "step": 916 + }, + { + "epoch": 1.4687999999999999, + "grad_norm": 1.6803348064422607, + "learning_rate": 1.809210774194971e-05, + "loss": 1.9741, + "step": 918 + }, + { + "epoch": 1.472, + "grad_norm": 1.7053711414337158, + "learning_rate": 1.7888787418951645e-05, + "loss": 2.0472, + "step": 920 + }, + { + "epoch": 1.4752, + "grad_norm": 1.6643372774124146, + "learning_rate": 1.7686366829659628e-05, + "loss": 2.0729, + "step": 922 + }, + { + "epoch": 1.4784, + "grad_norm": 1.6798537969589233, + "learning_rate": 1.74848516457569e-05, + "loss": 1.9989, + "step": 924 + }, + { + "epoch": 1.4816, + "grad_norm": 1.6798433065414429, + "learning_rate": 1.72842475135579e-05, + "loss": 2.0111, + "step": 926 + }, + { + "epoch": 1.4848, + "grad_norm": 1.5733171701431274, + "learning_rate": 1.7084560053850024e-05, + "loss": 2.0498, + "step": 928 + }, + { + "epoch": 1.488, + "grad_norm": 1.6396392583847046, + "learning_rate": 1.6885794861736183e-05, + "loss": 1.9701, + "step": 930 + }, + { + "epoch": 1.4912, + "grad_norm": 1.628389596939087, + "learning_rate": 1.668795750647796e-05, + "loss": 2.0453, + "step": 932 + }, + { + "epoch": 1.4944, + "grad_norm": 1.580734372138977, + "learning_rate": 1.6491053531339607e-05, + "loss": 1.908, + "step": 934 + }, + { + "epoch": 1.4976, + "grad_norm": 1.6267027854919434, + "learning_rate": 1.62950884534327e-05, + "loss": 1.949, + "step": 936 + }, + { + "epoch": 1.5008, + "grad_norm": 1.5968527793884277, + "learning_rate": 1.6100067763561626e-05, + "loss": 1.9075, + "step": 938 + }, + { + "epoch": 1.504, + "grad_norm": 1.6220203638076782, + "learning_rate": 1.5905996926069628e-05, + "loss": 1.8555, + "step": 940 + }, + { + "epoch": 1.5072, + "grad_norm": 1.6823129653930664, + "learning_rate": 1.5712881378685755e-05, + "loss": 2.083, + "step": 942 + }, + { + "epoch": 1.5104, + "grad_norm": 1.634656548500061, + "learning_rate": 1.5520726532372537e-05, + "loss": 1.9641, + "step": 944 + }, + { + "epoch": 1.5135999999999998, + "grad_norm": 1.5676469802856445, + "learning_rate": 1.532953777117429e-05, + "loss": 1.9694, + "step": 946 + }, + { + "epoch": 1.5168, + "grad_norm": 1.652653455734253, + "learning_rate": 1.5139320452066313e-05, + "loss": 1.9754, + "step": 948 + }, + { + "epoch": 1.52, + "grad_norm": 1.5929044485092163, + "learning_rate": 1.4950079904804759e-05, + "loss": 1.9062, + "step": 950 + }, + { + "epoch": 1.5232, + "grad_norm": 1.6427743434906006, + "learning_rate": 1.4761821431777373e-05, + "loss": 2.0017, + "step": 952 + }, + { + "epoch": 1.5264, + "grad_norm": 1.5246950387954712, + "learning_rate": 1.4574550307854817e-05, + "loss": 1.9427, + "step": 954 + }, + { + "epoch": 1.5295999999999998, + "grad_norm": 1.6303575038909912, + "learning_rate": 1.4388271780242929e-05, + "loss": 1.8824, + "step": 956 + }, + { + "epoch": 1.5328, + "grad_norm": 1.690637469291687, + "learning_rate": 1.4202991068335697e-05, + "loss": 2.0095, + "step": 958 + }, + { + "epoch": 1.536, + "grad_norm": 1.7552894353866577, + "learning_rate": 1.4018713363569035e-05, + "loss": 2.0734, + "step": 960 + }, + { + "epoch": 1.5392000000000001, + "grad_norm": 1.633931279182434, + "learning_rate": 1.3835443829275268e-05, + "loss": 1.8231, + "step": 962 + }, + { + "epoch": 1.5424, + "grad_norm": 1.586737036705017, + "learning_rate": 1.365318760053848e-05, + "loss": 1.9087, + "step": 964 + }, + { + "epoch": 1.5455999999999999, + "grad_norm": 1.5858091115951538, + "learning_rate": 1.3471949784050702e-05, + "loss": 1.8308, + "step": 966 + }, + { + "epoch": 1.5488, + "grad_norm": 1.6560001373291016, + "learning_rate": 1.3291735457968701e-05, + "loss": 2.0853, + "step": 968 + }, + { + "epoch": 1.552, + "grad_norm": 1.6715130805969238, + "learning_rate": 1.3112549671771796e-05, + "loss": 2.0848, + "step": 970 + }, + { + "epoch": 1.5552000000000001, + "grad_norm": 1.6843191385269165, + "learning_rate": 1.2934397446120306e-05, + "loss": 2.0182, + "step": 972 + }, + { + "epoch": 1.5584, + "grad_norm": 1.616097092628479, + "learning_rate": 1.2757283772714957e-05, + "loss": 1.9008, + "step": 974 + }, + { + "epoch": 1.5615999999999999, + "grad_norm": 1.5122194290161133, + "learning_rate": 1.2581213614156928e-05, + "loss": 1.7905, + "step": 976 + }, + { + "epoch": 1.5648, + "grad_norm": 1.6870348453521729, + "learning_rate": 1.2406191903808844e-05, + "loss": 2.1301, + "step": 978 + }, + { + "epoch": 1.568, + "grad_norm": 1.5509247779846191, + "learning_rate": 1.2232223545656552e-05, + "loss": 1.8702, + "step": 980 + }, + { + "epoch": 1.5712000000000002, + "grad_norm": 1.5840715169906616, + "learning_rate": 1.205931341417173e-05, + "loss": 2.0163, + "step": 982 + }, + { + "epoch": 1.5744, + "grad_norm": 1.659145712852478, + "learning_rate": 1.1887466354175253e-05, + "loss": 1.8171, + "step": 984 + }, + { + "epoch": 1.5776, + "grad_norm": 1.7107597589492798, + "learning_rate": 1.1716687180701474e-05, + "loss": 2.0593, + "step": 986 + }, + { + "epoch": 1.5808, + "grad_norm": 1.5526190996170044, + "learning_rate": 1.1546980678863361e-05, + "loss": 1.8897, + "step": 988 + }, + { + "epoch": 1.584, + "grad_norm": 1.6252529621124268, + "learning_rate": 1.1378351603718312e-05, + "loss": 1.9001, + "step": 990 + }, + { + "epoch": 1.5872000000000002, + "grad_norm": 1.64818274974823, + "learning_rate": 1.1210804680135022e-05, + "loss": 2.0044, + "step": 992 + }, + { + "epoch": 1.5904, + "grad_norm": 1.6926898956298828, + "learning_rate": 1.1044344602661034e-05, + "loss": 2.1088, + "step": 994 + }, + { + "epoch": 1.5936, + "grad_norm": 1.7737877368927002, + "learning_rate": 1.0878976035391252e-05, + "loss": 1.9949, + "step": 996 + }, + { + "epoch": 1.5968, + "grad_norm": 1.6282092332839966, + "learning_rate": 1.0714703611837201e-05, + "loss": 1.9759, + "step": 998 + }, + { + "epoch": 1.6, + "grad_norm": 1.5869003534317017, + "learning_rate": 1.0551531934797243e-05, + "loss": 2.0077, + "step": 1000 + }, + { + "epoch": 1.6032, + "grad_norm": 1.5880354642868042, + "learning_rate": 1.0389465576227558e-05, + "loss": 2.0529, + "step": 1002 + }, + { + "epoch": 1.6064, + "grad_norm": 1.796076774597168, + "learning_rate": 1.0228509077114146e-05, + "loss": 1.9833, + "step": 1004 + }, + { + "epoch": 1.6096, + "grad_norm": 1.6073834896087646, + "learning_rate": 1.0068666947345456e-05, + "loss": 1.8307, + "step": 1006 + }, + { + "epoch": 1.6128, + "grad_norm": 1.637624740600586, + "learning_rate": 9.909943665586102e-06, + "loss": 1.9477, + "step": 1008 + }, + { + "epoch": 1.616, + "grad_norm": 1.6284286975860596, + "learning_rate": 9.752343679151399e-06, + "loss": 1.9163, + "step": 1010 + }, + { + "epoch": 1.6192, + "grad_norm": 1.5366007089614868, + "learning_rate": 9.595871403882661e-06, + "loss": 1.8232, + "step": 1012 + }, + { + "epoch": 1.6223999999999998, + "grad_norm": 1.5945929288864136, + "learning_rate": 9.440531224023552e-06, + "loss": 1.9313, + "step": 1014 + }, + { + "epoch": 1.6256, + "grad_norm": 1.589713454246521, + "learning_rate": 9.286327492097196e-06, + "loss": 1.8511, + "step": 1016 + }, + { + "epoch": 1.6288, + "grad_norm": 1.6770306825637817, + "learning_rate": 9.133264528784274e-06, + "loss": 2.1287, + "step": 1018 + }, + { + "epoch": 1.6320000000000001, + "grad_norm": 1.5880622863769531, + "learning_rate": 8.981346622801905e-06, + "loss": 1.9876, + "step": 1020 + }, + { + "epoch": 1.6352, + "grad_norm": 1.6425656080245972, + "learning_rate": 8.830578030783493e-06, + "loss": 1.9793, + "step": 1022 + }, + { + "epoch": 1.6383999999999999, + "grad_norm": 1.7399017810821533, + "learning_rate": 8.680962977159502e-06, + "loss": 1.9023, + "step": 1024 + }, + { + "epoch": 1.6416, + "grad_norm": 1.6427820920944214, + "learning_rate": 8.53250565403903e-06, + "loss": 1.897, + "step": 1026 + }, + { + "epoch": 1.6448, + "grad_norm": 1.5444337129592896, + "learning_rate": 8.385210221092382e-06, + "loss": 1.8902, + "step": 1028 + }, + { + "epoch": 1.6480000000000001, + "grad_norm": 1.6862332820892334, + "learning_rate": 8.239080805434513e-06, + "loss": 1.9339, + "step": 1030 + }, + { + "epoch": 1.6512, + "grad_norm": 1.5926483869552612, + "learning_rate": 8.094121501509399e-06, + "loss": 1.8791, + "step": 1032 + }, + { + "epoch": 1.6543999999999999, + "grad_norm": 1.634626865386963, + "learning_rate": 7.950336370975304e-06, + "loss": 1.9781, + "step": 1034 + }, + { + "epoch": 1.6576, + "grad_norm": 1.6866514682769775, + "learning_rate": 7.80772944259096e-06, + "loss": 1.9539, + "step": 1036 + }, + { + "epoch": 1.6608, + "grad_norm": 1.58961820602417, + "learning_rate": 7.666304712102695e-06, + "loss": 1.669, + "step": 1038 + }, + { + "epoch": 1.6640000000000001, + "grad_norm": 1.800206184387207, + "learning_rate": 7.526066142132521e-06, + "loss": 1.9265, + "step": 1040 + }, + { + "epoch": 1.6672, + "grad_norm": 1.6069746017456055, + "learning_rate": 7.3870176620670194e-06, + "loss": 1.8683, + "step": 1042 + }, + { + "epoch": 1.6703999999999999, + "grad_norm": 1.639652132987976, + "learning_rate": 7.249163167947287e-06, + "loss": 1.8093, + "step": 1044 + }, + { + "epoch": 1.6736, + "grad_norm": 1.6031360626220703, + "learning_rate": 7.1125065223598076e-06, + "loss": 2.0082, + "step": 1046 + }, + { + "epoch": 1.6768, + "grad_norm": 1.6937788724899292, + "learning_rate": 6.9770515543281455e-06, + "loss": 2.0, + "step": 1048 + }, + { + "epoch": 1.6800000000000002, + "grad_norm": 1.6093850135803223, + "learning_rate": 6.842802059205727e-06, + "loss": 2.0147, + "step": 1050 + }, + { + "epoch": 1.6832, + "grad_norm": 1.541329026222229, + "learning_rate": 6.709761798569442e-06, + "loss": 1.8566, + "step": 1052 + }, + { + "epoch": 1.6864, + "grad_norm": 1.5949456691741943, + "learning_rate": 6.577934500114335e-06, + "loss": 1.873, + "step": 1054 + }, + { + "epoch": 1.6896, + "grad_norm": 1.6678704023361206, + "learning_rate": 6.44732385754902e-06, + "loss": 1.8975, + "step": 1056 + }, + { + "epoch": 1.6928, + "grad_norm": 1.7647327184677124, + "learning_rate": 6.3179335304923095e-06, + "loss": 1.932, + "step": 1058 + }, + { + "epoch": 1.696, + "grad_norm": 1.6539088487625122, + "learning_rate": 6.189767144370645e-06, + "loss": 1.9846, + "step": 1060 + }, + { + "epoch": 1.6992, + "grad_norm": 2.910256862640381, + "learning_rate": 6.062828290316469e-06, + "loss": 2.0498, + "step": 1062 + }, + { + "epoch": 1.7024, + "grad_norm": 1.6432437896728516, + "learning_rate": 5.937120525067641e-06, + "loss": 1.9539, + "step": 1064 + }, + { + "epoch": 1.7056, + "grad_norm": 1.6407873630523682, + "learning_rate": 5.812647370867763e-06, + "loss": 1.9476, + "step": 1066 + }, + { + "epoch": 1.7088, + "grad_norm": 1.617720603942871, + "learning_rate": 5.689412315367543e-06, + "loss": 1.9902, + "step": 1068 + }, + { + "epoch": 1.712, + "grad_norm": 1.5690410137176514, + "learning_rate": 5.567418811526981e-06, + "loss": 1.8755, + "step": 1070 + }, + { + "epoch": 1.7151999999999998, + "grad_norm": 1.602350115776062, + "learning_rate": 5.4466702775186785e-06, + "loss": 1.9992, + "step": 1072 + }, + { + "epoch": 1.7184, + "grad_norm": 1.511695384979248, + "learning_rate": 5.327170096632089e-06, + "loss": 1.749, + "step": 1074 + }, + { + "epoch": 1.7216, + "grad_norm": 1.547643780708313, + "learning_rate": 5.208921617178641e-06, + "loss": 1.8993, + "step": 1076 + }, + { + "epoch": 1.7248, + "grad_norm": 1.7196307182312012, + "learning_rate": 5.091928152397984e-06, + "loss": 1.9801, + "step": 1078 + }, + { + "epoch": 1.728, + "grad_norm": 1.7015469074249268, + "learning_rate": 4.976192980365124e-06, + "loss": 1.7825, + "step": 1080 + }, + { + "epoch": 1.7311999999999999, + "grad_norm": 1.688491702079773, + "learning_rate": 4.861719343898613e-06, + "loss": 2.0279, + "step": 1082 + }, + { + "epoch": 1.7344, + "grad_norm": 1.6017239093780518, + "learning_rate": 4.748510450469623e-06, + "loss": 1.8703, + "step": 1084 + }, + { + "epoch": 1.7376, + "grad_norm": 1.6276230812072754, + "learning_rate": 4.63656947211214e-06, + "loss": 1.9359, + "step": 1086 + }, + { + "epoch": 1.7408000000000001, + "grad_norm": 1.5692756175994873, + "learning_rate": 4.525899545334023e-06, + "loss": 1.9388, + "step": 1088 + }, + { + "epoch": 1.744, + "grad_norm": 1.6018142700195312, + "learning_rate": 4.416503771029201e-06, + "loss": 1.8337, + "step": 1090 + }, + { + "epoch": 1.7471999999999999, + "grad_norm": 1.6205910444259644, + "learning_rate": 4.308385214390709e-06, + "loss": 1.9551, + "step": 1092 + }, + { + "epoch": 1.7504, + "grad_norm": 1.5405246019363403, + "learning_rate": 4.2015469048248375e-06, + "loss": 1.824, + "step": 1094 + }, + { + "epoch": 1.7536, + "grad_norm": 1.6863925457000732, + "learning_rate": 4.095991835866275e-06, + "loss": 2.0388, + "step": 1096 + }, + { + "epoch": 1.7568000000000001, + "grad_norm": 1.59629225730896, + "learning_rate": 3.99172296509418e-06, + "loss": 1.9307, + "step": 1098 + }, + { + "epoch": 1.76, + "grad_norm": 1.6163777112960815, + "learning_rate": 3.888743214049346e-06, + "loss": 1.9041, + "step": 1100 + }, + { + "epoch": 1.7631999999999999, + "grad_norm": 1.6829643249511719, + "learning_rate": 3.7870554681523287e-06, + "loss": 1.9971, + "step": 1102 + }, + { + "epoch": 1.7664, + "grad_norm": 1.614225149154663, + "learning_rate": 3.6866625766226293e-06, + "loss": 1.9732, + "step": 1104 + }, + { + "epoch": 1.7696, + "grad_norm": 1.631516933441162, + "learning_rate": 3.587567352398796e-06, + "loss": 1.8115, + "step": 1106 + }, + { + "epoch": 1.7728000000000002, + "grad_norm": 1.5938745737075806, + "learning_rate": 3.489772572059674e-06, + "loss": 1.9675, + "step": 1108 + }, + { + "epoch": 1.776, + "grad_norm": 1.580931544303894, + "learning_rate": 3.393280975746588e-06, + "loss": 1.9522, + "step": 1110 + }, + { + "epoch": 1.7792, + "grad_norm": 1.5829232931137085, + "learning_rate": 3.2980952670865317e-06, + "loss": 1.7989, + "step": 1112 + }, + { + "epoch": 1.7824, + "grad_norm": 1.695078730583191, + "learning_rate": 3.2042181131164528e-06, + "loss": 1.9354, + "step": 1114 + }, + { + "epoch": 1.7856, + "grad_norm": 1.579819679260254, + "learning_rate": 3.11165214420851e-06, + "loss": 1.8468, + "step": 1116 + }, + { + "epoch": 1.7888, + "grad_norm": 1.5718368291854858, + "learning_rate": 3.020399953996389e-06, + "loss": 1.8217, + "step": 1118 + }, + { + "epoch": 1.792, + "grad_norm": 1.5997322797775269, + "learning_rate": 2.9304640993025988e-06, + "loss": 1.788, + "step": 1120 + }, + { + "epoch": 1.7952, + "grad_norm": 1.5769128799438477, + "learning_rate": 2.8418471000668523e-06, + "loss": 1.8006, + "step": 1122 + }, + { + "epoch": 1.7984, + "grad_norm": 1.602059006690979, + "learning_rate": 2.7545514392754437e-06, + "loss": 1.9391, + "step": 1124 + }, + { + "epoch": 1.8016, + "grad_norm": 1.7443532943725586, + "learning_rate": 2.6685795628917266e-06, + "loss": 2.0419, + "step": 1126 + }, + { + "epoch": 1.8048, + "grad_norm": 2.4460225105285645, + "learning_rate": 2.5839338797875036e-06, + "loss": 2.0401, + "step": 1128 + }, + { + "epoch": 1.808, + "grad_norm": 1.5434504747390747, + "learning_rate": 2.500616761675578e-06, + "loss": 1.8181, + "step": 1130 + }, + { + "epoch": 1.8112, + "grad_norm": 1.6132147312164307, + "learning_rate": 2.41863054304331e-06, + "loss": 2.0401, + "step": 1132 + }, + { + "epoch": 1.8144, + "grad_norm": 1.6048423051834106, + "learning_rate": 2.3379775210871648e-06, + "loss": 1.8892, + "step": 1134 + }, + { + "epoch": 1.8176, + "grad_norm": 1.5809746980667114, + "learning_rate": 2.2586599556483734e-06, + "loss": 1.8272, + "step": 1136 + }, + { + "epoch": 1.8208, + "grad_norm": 1.7967019081115723, + "learning_rate": 2.180680069149621e-06, + "loss": 1.9311, + "step": 1138 + }, + { + "epoch": 1.8239999999999998, + "grad_norm": 1.6240350008010864, + "learning_rate": 2.104040046532768e-06, + "loss": 1.9291, + "step": 1140 + }, + { + "epoch": 1.8272, + "grad_norm": 1.6555086374282837, + "learning_rate": 2.0287420351976063e-06, + "loss": 1.9264, + "step": 1142 + }, + { + "epoch": 1.8304, + "grad_norm": 1.6264870166778564, + "learning_rate": 1.954788144941727e-06, + "loss": 1.8148, + "step": 1144 + }, + { + "epoch": 1.8336000000000001, + "grad_norm": 1.6269564628601074, + "learning_rate": 1.8821804479013772e-06, + "loss": 1.7891, + "step": 1146 + }, + { + "epoch": 1.8368, + "grad_norm": 1.6172057390213013, + "learning_rate": 1.81092097849343e-06, + "loss": 2.0688, + "step": 1148 + }, + { + "epoch": 1.8399999999999999, + "grad_norm": 1.6283061504364014, + "learning_rate": 1.7410117333583498e-06, + "loss": 1.9176, + "step": 1150 + }, + { + "epoch": 1.8432, + "grad_norm": 1.6128772497177124, + "learning_rate": 1.6724546713042577e-06, + "loss": 1.7971, + "step": 1152 + }, + { + "epoch": 1.8464, + "grad_norm": 1.603771686553955, + "learning_rate": 1.6052517132520651e-06, + "loss": 1.9016, + "step": 1154 + }, + { + "epoch": 1.8496000000000001, + "grad_norm": 1.5155688524246216, + "learning_rate": 1.5394047421816327e-06, + "loss": 1.8073, + "step": 1156 + }, + { + "epoch": 1.8528, + "grad_norm": 1.6564611196517944, + "learning_rate": 1.4749156030790024e-06, + "loss": 1.9445, + "step": 1158 + }, + { + "epoch": 1.8559999999999999, + "grad_norm": 1.7882254123687744, + "learning_rate": 1.4117861028847267e-06, + "loss": 2.0257, + "step": 1160 + }, + { + "epoch": 1.8592, + "grad_norm": 1.6352617740631104, + "learning_rate": 1.3500180104432325e-06, + "loss": 2.1369, + "step": 1162 + }, + { + "epoch": 1.8624, + "grad_norm": 1.5756018161773682, + "learning_rate": 1.2896130564532427e-06, + "loss": 1.7884, + "step": 1164 + }, + { + "epoch": 1.8656000000000001, + "grad_norm": 1.6122503280639648, + "learning_rate": 1.2305729334192994e-06, + "loss": 1.9276, + "step": 1166 + }, + { + "epoch": 1.8688, + "grad_norm": 1.888556718826294, + "learning_rate": 1.1728992956043238e-06, + "loss": 2.0542, + "step": 1168 + }, + { + "epoch": 1.8719999999999999, + "grad_norm": 1.6685359477996826, + "learning_rate": 1.1165937589833087e-06, + "loss": 1.9068, + "step": 1170 + }, + { + "epoch": 1.8752, + "grad_norm": 1.647849202156067, + "learning_rate": 1.061657901197971e-06, + "loss": 1.905, + "step": 1172 + }, + { + "epoch": 1.8784, + "grad_norm": 1.5802327394485474, + "learning_rate": 1.008093261512616e-06, + "loss": 1.8536, + "step": 1174 + }, + { + "epoch": 1.8816000000000002, + "grad_norm": 1.6293805837631226, + "learning_rate": 9.559013407709595e-07, + "loss": 1.9312, + "step": 1176 + }, + { + "epoch": 1.8848, + "grad_norm": 1.5595163106918335, + "learning_rate": 9.050836013541009e-07, + "loss": 1.9464, + "step": 1178 + }, + { + "epoch": 1.888, + "grad_norm": 1.518751621246338, + "learning_rate": 8.55641467139534e-07, + "loss": 1.8809, + "step": 1180 + }, + { + "epoch": 1.8912, + "grad_norm": 1.6101138591766357, + "learning_rate": 8.075763234612622e-07, + "loss": 1.8115, + "step": 1182 + }, + { + "epoch": 1.8944, + "grad_norm": 1.6096612215042114, + "learning_rate": 7.60889517070984e-07, + "loss": 1.9574, + "step": 1184 + }, + { + "epoch": 1.8976, + "grad_norm": 1.5596234798431396, + "learning_rate": 7.155823561003361e-07, + "loss": 1.9247, + "step": 1186 + }, + { + "epoch": 1.9008, + "grad_norm": 1.63005530834198, + "learning_rate": 6.716561100242658e-07, + "loss": 1.9093, + "step": 1188 + }, + { + "epoch": 1.904, + "grad_norm": 1.932976245880127, + "learning_rate": 6.291120096254433e-07, + "loss": 1.9061, + "step": 1190 + }, + { + "epoch": 1.9072, + "grad_norm": 1.571678638458252, + "learning_rate": 5.879512469598058e-07, + "loss": 1.9505, + "step": 1192 + }, + { + "epoch": 1.9104, + "grad_norm": 1.6436798572540283, + "learning_rate": 5.481749753231124e-07, + "loss": 1.977, + "step": 1194 + }, + { + "epoch": 1.9136, + "grad_norm": 1.6677478551864624, + "learning_rate": 5.097843092186583e-07, + "loss": 1.9259, + "step": 1196 + }, + { + "epoch": 1.9167999999999998, + "grad_norm": 1.6268632411956787, + "learning_rate": 4.7278032432604425e-07, + "loss": 1.9393, + "step": 1198 + }, + { + "epoch": 1.92, + "grad_norm": 1.673553228378296, + "learning_rate": 4.371640574710345e-07, + "loss": 1.9702, + "step": 1200 + }, + { + "epoch": 1.9232, + "grad_norm": 1.6786904335021973, + "learning_rate": 4.0293650659650184e-07, + "loss": 1.9319, + "step": 1202 + }, + { + "epoch": 1.9264000000000001, + "grad_norm": 1.6758121252059937, + "learning_rate": 3.7009863073446673e-07, + "loss": 1.9173, + "step": 1204 + }, + { + "epoch": 1.9296, + "grad_norm": 1.7078176736831665, + "learning_rate": 3.386513499792354e-07, + "loss": 2.1281, + "step": 1206 + }, + { + "epoch": 1.9327999999999999, + "grad_norm": 1.6237930059432983, + "learning_rate": 3.0859554546160965e-07, + "loss": 1.8814, + "step": 1208 + }, + { + "epoch": 1.936, + "grad_norm": 1.6799558401107788, + "learning_rate": 2.7993205932420053e-07, + "loss": 1.891, + "step": 1210 + }, + { + "epoch": 1.9392, + "grad_norm": 1.6336385011672974, + "learning_rate": 2.5266169469783105e-07, + "loss": 1.9337, + "step": 1212 + }, + { + "epoch": 1.9424000000000001, + "grad_norm": 1.6081047058105469, + "learning_rate": 2.2678521567903176e-07, + "loss": 1.8411, + "step": 1214 + }, + { + "epoch": 1.9456, + "grad_norm": 1.64824640750885, + "learning_rate": 2.023033473086411e-07, + "loss": 1.9833, + "step": 1216 + }, + { + "epoch": 1.9487999999999999, + "grad_norm": 1.6368495225906372, + "learning_rate": 1.7921677555147177e-07, + "loss": 1.8514, + "step": 1218 + }, + { + "epoch": 1.952, + "grad_norm": 1.6810824871063232, + "learning_rate": 1.5752614727712057e-07, + "loss": 2.0633, + "step": 1220 + }, + { + "epoch": 1.9552, + "grad_norm": 1.5862808227539062, + "learning_rate": 1.3723207024180507e-07, + "loss": 1.9975, + "step": 1222 + }, + { + "epoch": 1.9584000000000001, + "grad_norm": 3.4000141620635986, + "learning_rate": 1.1833511307136613e-07, + "loss": 2.0093, + "step": 1224 + }, + { + "epoch": 1.9616, + "grad_norm": 1.5674700736999512, + "learning_rate": 1.0083580524531955e-07, + "loss": 1.6954, + "step": 1226 + }, + { + "epoch": 1.9647999999999999, + "grad_norm": 1.700727105140686, + "learning_rate": 8.473463708202345e-08, + "loss": 2.0088, + "step": 1228 + }, + { + "epoch": 1.968, + "grad_norm": 1.6896897554397583, + "learning_rate": 7.003205972494486e-08, + "loss": 2.1039, + "step": 1230 + }, + { + "epoch": 1.9712, + "grad_norm": 1.693572998046875, + "learning_rate": 5.672848513000873e-08, + "loss": 1.942, + "step": 1232 + }, + { + "epoch": 1.9744000000000002, + "grad_norm": 1.662209153175354, + "learning_rate": 4.482428605407374e-08, + "loss": 1.8457, + "step": 1234 + }, + { + "epoch": 1.9776, + "grad_norm": 1.6514451503753662, + "learning_rate": 3.431979604445745e-08, + "loss": 1.9565, + "step": 1236 + }, + { + "epoch": 1.9808, + "grad_norm": 1.5132412910461426, + "learning_rate": 2.521530942962702e-08, + "loss": 1.66, + "step": 1238 + }, + { + "epoch": 1.984, + "grad_norm": 1.609984040260315, + "learning_rate": 1.7511081310922495e-08, + "loss": 1.8459, + "step": 1240 + }, + { + "epoch": 1.9872, + "grad_norm": 1.6186810731887817, + "learning_rate": 1.1207327555429192e-08, + "loss": 1.9105, + "step": 1242 + }, + { + "epoch": 1.9904, + "grad_norm": 1.6339943408966064, + "learning_rate": 6.304224789910329e-09, + "loss": 1.867, + "step": 1244 + }, + { + "epoch": 1.9936, + "grad_norm": 1.6734284162521362, + "learning_rate": 2.801910395877627e-09, + "loss": 1.936, + "step": 1246 + }, + { + "epoch": 1.9968, + "grad_norm": 1.6463286876678467, + "learning_rate": 7.004825057277398e-10, + "loss": 2.0955, + "step": 1248 + }, + { + "epoch": 2.0, + "grad_norm": 1.7489490509033203, + "learning_rate": 0.0, + "loss": 1.8566, + "step": 1250 + } + ], + "logging_steps": 2, + "max_steps": 1250, + "num_input_tokens_seen": 0, + "num_train_epochs": 2, + "save_steps": 625, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 2.1154802941978214e+17, + "train_batch_size": 8, + "trial_name": null, + "trial_params": null +}