|
{ |
|
"best_metric": 0.6768385171890259, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-400", |
|
"epoch": 0.07969318125217911, |
|
"eval_steps": 100, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00019923295313044778, |
|
"grad_norm": 0.42145729064941406, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6572, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00019923295313044778, |
|
"eval_loss": 1.0046130418777466, |
|
"eval_runtime": 262.6844, |
|
"eval_samples_per_second": 32.183, |
|
"eval_steps_per_second": 8.048, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00039846590626089557, |
|
"grad_norm": 0.8600642681121826, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9715, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0005976988593913434, |
|
"grad_norm": 0.6406773924827576, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.8492, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0007969318125217911, |
|
"grad_norm": 0.6716526746749878, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9895, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0009961647656522388, |
|
"grad_norm": 0.49085474014282227, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.8941, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0011953977187826868, |
|
"grad_norm": 0.4583233594894409, |
|
"learning_rate": 3e-05, |
|
"loss": 0.8405, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0013946306719131345, |
|
"grad_norm": 0.3206585645675659, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.8055, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0015938636250435823, |
|
"grad_norm": 0.36268213391304016, |
|
"learning_rate": 4e-05, |
|
"loss": 0.8796, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00179309657817403, |
|
"grad_norm": 0.3162664771080017, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.8962, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0019923295313044776, |
|
"grad_norm": 0.3157712519168854, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9107, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0021915624844349255, |
|
"grad_norm": 0.2892014980316162, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.931, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0023907954375653735, |
|
"grad_norm": 0.28385069966316223, |
|
"learning_rate": 6e-05, |
|
"loss": 0.8569, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.002590028390695821, |
|
"grad_norm": 0.3141295909881592, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.9154, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002789261343826269, |
|
"grad_norm": 0.2827235162258148, |
|
"learning_rate": 7e-05, |
|
"loss": 0.7631, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0029884942969567166, |
|
"grad_norm": 0.31552836298942566, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.8641, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0031877272500871645, |
|
"grad_norm": 0.2979770600795746, |
|
"learning_rate": 8e-05, |
|
"loss": 0.8825, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.003386960203217612, |
|
"grad_norm": 0.30036237835884094, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.8403, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.00358619315634806, |
|
"grad_norm": 0.30925682187080383, |
|
"learning_rate": 9e-05, |
|
"loss": 0.8187, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0037854261094785076, |
|
"grad_norm": 0.3207469582557678, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.9215, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.003984659062608955, |
|
"grad_norm": 0.25850704312324524, |
|
"learning_rate": 0.0001, |
|
"loss": 0.7215, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0041838920157394035, |
|
"grad_norm": 0.2752808630466461, |
|
"learning_rate": 9.999892908320647e-05, |
|
"loss": 0.7553, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.004383124968869851, |
|
"grad_norm": 0.30526119470596313, |
|
"learning_rate": 9.999571637870036e-05, |
|
"loss": 0.7467, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.004582357922000299, |
|
"grad_norm": 0.35731807351112366, |
|
"learning_rate": 9.999036202410325e-05, |
|
"loss": 0.832, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.004781590875130747, |
|
"grad_norm": 0.34211745858192444, |
|
"learning_rate": 9.998286624877786e-05, |
|
"loss": 0.8119, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0049808238282611945, |
|
"grad_norm": 0.294607549905777, |
|
"learning_rate": 9.997322937381829e-05, |
|
"loss": 0.7457, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.005180056781391642, |
|
"grad_norm": 0.3165847957134247, |
|
"learning_rate": 9.996145181203615e-05, |
|
"loss": 0.8323, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00537928973452209, |
|
"grad_norm": 0.316105455160141, |
|
"learning_rate": 9.994753406794301e-05, |
|
"loss": 0.7302, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.005578522687652538, |
|
"grad_norm": 0.3156639635562897, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 0.8405, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.005777755640782986, |
|
"grad_norm": 0.28763553500175476, |
|
"learning_rate": 9.991328050923581e-05, |
|
"loss": 0.7269, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.005976988593913433, |
|
"grad_norm": 0.3319118916988373, |
|
"learning_rate": 9.989294616193017e-05, |
|
"loss": 0.7607, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.006176221547043881, |
|
"grad_norm": 0.32836800813674927, |
|
"learning_rate": 9.98704745668676e-05, |
|
"loss": 0.867, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.006375454500174329, |
|
"grad_norm": 0.3175886273384094, |
|
"learning_rate": 9.98458666866564e-05, |
|
"loss": 0.7733, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.006574687453304777, |
|
"grad_norm": 0.31788307428359985, |
|
"learning_rate": 9.981912357541627e-05, |
|
"loss": 0.7723, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.006773920406435224, |
|
"grad_norm": 0.28625956177711487, |
|
"learning_rate": 9.97902463787331e-05, |
|
"loss": 0.7911, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0069731533595656725, |
|
"grad_norm": 0.28825896978378296, |
|
"learning_rate": 9.975923633360985e-05, |
|
"loss": 0.7694, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.00717238631269612, |
|
"grad_norm": 0.29436683654785156, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.7453, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.007371619265826568, |
|
"grad_norm": 0.29670408368110657, |
|
"learning_rate": 9.969082310281891e-05, |
|
"loss": 0.7294, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.007570852218957015, |
|
"grad_norm": 0.27974042296409607, |
|
"learning_rate": 9.965342284774632e-05, |
|
"loss": 0.7269, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0077700851720874636, |
|
"grad_norm": 0.32981932163238525, |
|
"learning_rate": 9.961389560529836e-05, |
|
"loss": 0.8839, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00796931812521791, |
|
"grad_norm": 0.2603093683719635, |
|
"learning_rate": 9.957224306869053e-05, |
|
"loss": 0.7584, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.008168551078348359, |
|
"grad_norm": 0.2658812403678894, |
|
"learning_rate": 9.952846702217886e-05, |
|
"loss": 0.6587, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.008367784031478807, |
|
"grad_norm": 0.2873687446117401, |
|
"learning_rate": 9.948256934098352e-05, |
|
"loss": 0.7437, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.008567016984609254, |
|
"grad_norm": 0.29043227434158325, |
|
"learning_rate": 9.943455199120837e-05, |
|
"loss": 0.7514, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.008766249937739702, |
|
"grad_norm": 0.2623406648635864, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 0.6613, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.00896548289087015, |
|
"grad_norm": 0.33088845014572144, |
|
"learning_rate": 9.933216660424395e-05, |
|
"loss": 0.7904, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.009164715844000597, |
|
"grad_norm": 0.3347594439983368, |
|
"learning_rate": 9.927780295290389e-05, |
|
"loss": 0.8104, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.009363948797131046, |
|
"grad_norm": 0.29131022095680237, |
|
"learning_rate": 9.922132840449459e-05, |
|
"loss": 0.7463, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.009563181750261494, |
|
"grad_norm": 0.30381011962890625, |
|
"learning_rate": 9.916274537819775e-05, |
|
"loss": 0.8213, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.00976241470339194, |
|
"grad_norm": 0.29724815487861633, |
|
"learning_rate": 9.91020563835152e-05, |
|
"loss": 0.7977, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.009961647656522389, |
|
"grad_norm": 0.3157656490802765, |
|
"learning_rate": 9.903926402016153e-05, |
|
"loss": 0.8148, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010160880609652836, |
|
"grad_norm": 0.23612086474895477, |
|
"learning_rate": 9.897437097795257e-05, |
|
"loss": 0.5049, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.010360113562783284, |
|
"grad_norm": 0.33656045794487, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.7237, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.010559346515913733, |
|
"grad_norm": 0.301807701587677, |
|
"learning_rate": 9.883829406604363e-05, |
|
"loss": 0.6572, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.01075857946904418, |
|
"grad_norm": 0.2367606908082962, |
|
"learning_rate": 9.876711602542563e-05, |
|
"loss": 0.637, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.010957812422174628, |
|
"grad_norm": 0.2271997034549713, |
|
"learning_rate": 9.869384896386668e-05, |
|
"loss": 0.6629, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.011157045375305076, |
|
"grad_norm": 0.2069195657968521, |
|
"learning_rate": 9.861849601988383e-05, |
|
"loss": 0.6359, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.011356278328435523, |
|
"grad_norm": 0.20633797347545624, |
|
"learning_rate": 9.854106042134641e-05, |
|
"loss": 0.6114, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.011555511281565971, |
|
"grad_norm": 0.23585379123687744, |
|
"learning_rate": 9.846154548533773e-05, |
|
"loss": 0.6855, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.01175474423469642, |
|
"grad_norm": 0.21923767030239105, |
|
"learning_rate": 9.837995461801299e-05, |
|
"loss": 0.6436, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.011953977187826866, |
|
"grad_norm": 0.26621073484420776, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.7332, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.012153210140957315, |
|
"grad_norm": 0.21650448441505432, |
|
"learning_rate": 9.821055915851647e-05, |
|
"loss": 0.6092, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.012352443094087761, |
|
"grad_norm": 0.22831426560878754, |
|
"learning_rate": 9.812276182268236e-05, |
|
"loss": 0.7218, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.01255167604721821, |
|
"grad_norm": 0.21681798994541168, |
|
"learning_rate": 9.803290306789676e-05, |
|
"loss": 0.7292, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.012750909000348658, |
|
"grad_norm": 0.27890080213546753, |
|
"learning_rate": 9.794098674340965e-05, |
|
"loss": 0.7812, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.012950141953479105, |
|
"grad_norm": 0.21760623157024384, |
|
"learning_rate": 9.784701678661045e-05, |
|
"loss": 0.6858, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.013149374906609553, |
|
"grad_norm": 0.2346140742301941, |
|
"learning_rate": 9.775099722285935e-05, |
|
"loss": 0.6121, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.013348607859740002, |
|
"grad_norm": 0.24936628341674805, |
|
"learning_rate": 9.765293216531486e-05, |
|
"loss": 0.7378, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.013547840812870448, |
|
"grad_norm": 0.22810809314250946, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.718, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.013747073766000897, |
|
"grad_norm": 0.2293727546930313, |
|
"learning_rate": 9.74506824594107e-05, |
|
"loss": 0.7069, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.013946306719131345, |
|
"grad_norm": 0.23354655504226685, |
|
"learning_rate": 9.73465064747553e-05, |
|
"loss": 0.72, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.014145539672261792, |
|
"grad_norm": 0.2324572503566742, |
|
"learning_rate": 9.724030232334391e-05, |
|
"loss": 0.6631, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.01434477262539224, |
|
"grad_norm": 0.273137629032135, |
|
"learning_rate": 9.713207455460894e-05, |
|
"loss": 0.7187, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.014544005578522687, |
|
"grad_norm": 0.25326138734817505, |
|
"learning_rate": 9.702182780466775e-05, |
|
"loss": 0.706, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.014743238531653135, |
|
"grad_norm": 0.2347702831029892, |
|
"learning_rate": 9.690956679612421e-05, |
|
"loss": 0.7365, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.014942471484783584, |
|
"grad_norm": 0.21022625267505646, |
|
"learning_rate": 9.67952963378663e-05, |
|
"loss": 0.6422, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.01514170443791403, |
|
"grad_norm": 0.2170475721359253, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 0.685, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.015340937391044479, |
|
"grad_norm": 0.27755966782569885, |
|
"learning_rate": 9.656074673794018e-05, |
|
"loss": 0.8184, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.015540170344174927, |
|
"grad_norm": 0.22904907166957855, |
|
"learning_rate": 9.644047764359622e-05, |
|
"loss": 0.6883, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.015739403297305374, |
|
"grad_norm": 0.23313535749912262, |
|
"learning_rate": 9.631821919375591e-05, |
|
"loss": 0.6408, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.01593863625043582, |
|
"grad_norm": 0.2328289896249771, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 0.668, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01613786920356627, |
|
"grad_norm": 0.22517356276512146, |
|
"learning_rate": 9.606775526115963e-05, |
|
"loss": 0.6917, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.016337102156696717, |
|
"grad_norm": 0.22593192756175995, |
|
"learning_rate": 9.593956050744492e-05, |
|
"loss": 0.6252, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.016536335109827164, |
|
"grad_norm": 0.2786768078804016, |
|
"learning_rate": 9.580939785585681e-05, |
|
"loss": 0.847, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.016735568062957614, |
|
"grad_norm": 0.25397562980651855, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.7431, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.01693480101608806, |
|
"grad_norm": 0.26364976167678833, |
|
"learning_rate": 9.554319124605879e-05, |
|
"loss": 0.7749, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.017134033969218507, |
|
"grad_norm": 0.301891952753067, |
|
"learning_rate": 9.540715869125407e-05, |
|
"loss": 0.6903, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.017333266922348958, |
|
"grad_norm": 0.25629404187202454, |
|
"learning_rate": 9.526918104489777e-05, |
|
"loss": 0.7569, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.017532499875479404, |
|
"grad_norm": 0.24842076003551483, |
|
"learning_rate": 9.512926421749304e-05, |
|
"loss": 0.7618, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.01773173282860985, |
|
"grad_norm": 0.26040297746658325, |
|
"learning_rate": 9.498741420261108e-05, |
|
"loss": 0.8032, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0179309657817403, |
|
"grad_norm": 0.2626326084136963, |
|
"learning_rate": 9.484363707663442e-05, |
|
"loss": 0.7924, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.018130198734870748, |
|
"grad_norm": 0.22737815976142883, |
|
"learning_rate": 9.469793899849661e-05, |
|
"loss": 0.6443, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.018329431688001194, |
|
"grad_norm": 0.24532002210617065, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 0.7, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.018528664641131645, |
|
"grad_norm": 0.28320467472076416, |
|
"learning_rate": 9.440080503264037e-05, |
|
"loss": 0.783, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.01872789759426209, |
|
"grad_norm": 0.257158100605011, |
|
"learning_rate": 9.42493818731521e-05, |
|
"loss": 0.7044, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.018927130547392538, |
|
"grad_norm": 0.23965127766132355, |
|
"learning_rate": 9.409606321741775e-05, |
|
"loss": 0.6876, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.019126363500522988, |
|
"grad_norm": 0.29275670647621155, |
|
"learning_rate": 9.394085563309827e-05, |
|
"loss": 0.8103, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.019325596453653435, |
|
"grad_norm": 0.27652475237846375, |
|
"learning_rate": 9.378376576876999e-05, |
|
"loss": 0.7313, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.01952482940678388, |
|
"grad_norm": 0.2445044219493866, |
|
"learning_rate": 9.362480035363986e-05, |
|
"loss": 0.6936, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.01972406235991433, |
|
"grad_norm": 0.2570478916168213, |
|
"learning_rate": 9.34639661972572e-05, |
|
"loss": 0.6871, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.019923295313044778, |
|
"grad_norm": 0.28658804297447205, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.7063, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.019923295313044778, |
|
"eval_loss": 0.7203736305236816, |
|
"eval_runtime": 264.9972, |
|
"eval_samples_per_second": 31.902, |
|
"eval_steps_per_second": 7.977, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.020122528266175225, |
|
"grad_norm": 0.21482613682746887, |
|
"learning_rate": 9.31367192988896e-05, |
|
"loss": 0.5356, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.02032176121930567, |
|
"grad_norm": 0.28820475935935974, |
|
"learning_rate": 9.297032057507264e-05, |
|
"loss": 0.6606, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.02052099417243612, |
|
"grad_norm": 0.22919978201389313, |
|
"learning_rate": 9.280208114573859e-05, |
|
"loss": 0.6681, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.02072022712556657, |
|
"grad_norm": 0.20591995120048523, |
|
"learning_rate": 9.263200821770461e-05, |
|
"loss": 0.6523, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.020919460078697015, |
|
"grad_norm": 0.20084571838378906, |
|
"learning_rate": 9.246010907632895e-05, |
|
"loss": 0.629, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.021118693031827465, |
|
"grad_norm": 0.25101473927497864, |
|
"learning_rate": 9.228639108519868e-05, |
|
"loss": 0.7384, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.021317925984957912, |
|
"grad_norm": 0.23734250664710999, |
|
"learning_rate": 9.211086168581433e-05, |
|
"loss": 0.6895, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.02151715893808836, |
|
"grad_norm": 0.22536040842533112, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.7137, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.02171639189121881, |
|
"grad_norm": 0.2355283945798874, |
|
"learning_rate": 9.175439881593716e-05, |
|
"loss": 0.7662, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.021915624844349255, |
|
"grad_norm": 0.2228918820619583, |
|
"learning_rate": 9.157348061512727e-05, |
|
"loss": 0.6534, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.022114857797479702, |
|
"grad_norm": 0.2184191644191742, |
|
"learning_rate": 9.139078154477512e-05, |
|
"loss": 0.7054, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.022314090750610152, |
|
"grad_norm": 0.21408338844776154, |
|
"learning_rate": 9.120630943110077e-05, |
|
"loss": 0.6103, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.0225133237037406, |
|
"grad_norm": 0.25427061319351196, |
|
"learning_rate": 9.102007217627568e-05, |
|
"loss": 0.8366, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.022712556656871045, |
|
"grad_norm": 0.21583066880702972, |
|
"learning_rate": 9.083207775808396e-05, |
|
"loss": 0.7165, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.022911789610001496, |
|
"grad_norm": 0.2396174967288971, |
|
"learning_rate": 9.064233422958077e-05, |
|
"loss": 0.6794, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.023111022563131942, |
|
"grad_norm": 0.2334282398223877, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.7945, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.02331025551626239, |
|
"grad_norm": 0.20607277750968933, |
|
"learning_rate": 9.025763242814291e-05, |
|
"loss": 0.5739, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.02350948846939284, |
|
"grad_norm": 0.2125677615404129, |
|
"learning_rate": 9.006269063455304e-05, |
|
"loss": 0.6976, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.023708721422523286, |
|
"grad_norm": 0.24233980476856232, |
|
"learning_rate": 8.986603268863536e-05, |
|
"loss": 0.7768, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.023907954375653732, |
|
"grad_norm": 0.23903363943099976, |
|
"learning_rate": 8.966766701456177e-05, |
|
"loss": 0.7925, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.024107187328784183, |
|
"grad_norm": 0.2270466685295105, |
|
"learning_rate": 8.94676021096575e-05, |
|
"loss": 0.679, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.02430642028191463, |
|
"grad_norm": 0.22134599089622498, |
|
"learning_rate": 8.926584654403724e-05, |
|
"loss": 0.7155, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.024505653235045076, |
|
"grad_norm": 0.2539938986301422, |
|
"learning_rate": 8.906240896023794e-05, |
|
"loss": 0.7156, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.024704886188175523, |
|
"grad_norm": 0.24332107603549957, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.6988, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.024904119141305973, |
|
"grad_norm": 0.23084275424480438, |
|
"learning_rate": 8.865052266813685e-05, |
|
"loss": 0.7142, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.02510335209443642, |
|
"grad_norm": 0.24665014445781708, |
|
"learning_rate": 8.844209160367299e-05, |
|
"loss": 0.7471, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.025302585047566866, |
|
"grad_norm": 0.23389537632465363, |
|
"learning_rate": 8.823201380795001e-05, |
|
"loss": 0.676, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.025501818000697316, |
|
"grad_norm": 0.2225302904844284, |
|
"learning_rate": 8.802029828000156e-05, |
|
"loss": 0.6702, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.025701050953827763, |
|
"grad_norm": 0.2194124162197113, |
|
"learning_rate": 8.780695408901613e-05, |
|
"loss": 0.7173, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.02590028390695821, |
|
"grad_norm": 0.22123487293720245, |
|
"learning_rate": 8.759199037394887e-05, |
|
"loss": 0.679, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.02609951686008866, |
|
"grad_norm": 0.2465553730726242, |
|
"learning_rate": 8.737541634312985e-05, |
|
"loss": 0.7151, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.026298749813219106, |
|
"grad_norm": 0.2150459736585617, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.7106, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.026497982766349553, |
|
"grad_norm": 0.1966014802455902, |
|
"learning_rate": 8.693747451206232e-05, |
|
"loss": 0.7174, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.026697215719480003, |
|
"grad_norm": 0.25692620873451233, |
|
"learning_rate": 8.671612547178428e-05, |
|
"loss": 0.6709, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.02689644867261045, |
|
"grad_norm": 0.19534507393836975, |
|
"learning_rate": 8.649320363489179e-05, |
|
"loss": 0.5871, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.027095681625740897, |
|
"grad_norm": 0.25263822078704834, |
|
"learning_rate": 8.626871855061438e-05, |
|
"loss": 0.7815, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.027294914578871347, |
|
"grad_norm": 0.24761663377285004, |
|
"learning_rate": 8.604267983514594e-05, |
|
"loss": 0.7253, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.027494147532001793, |
|
"grad_norm": 0.2336164116859436, |
|
"learning_rate": 8.581509717123273e-05, |
|
"loss": 0.6871, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.02769338048513224, |
|
"grad_norm": 0.2253238558769226, |
|
"learning_rate": 8.558598030775857e-05, |
|
"loss": 0.6806, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.02789261343826269, |
|
"grad_norm": 0.2473248988389969, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.8147, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.028091846391393137, |
|
"grad_norm": 0.2515900731086731, |
|
"learning_rate": 8.51231833058426e-05, |
|
"loss": 0.8055, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.028291079344523583, |
|
"grad_norm": 0.2372109442949295, |
|
"learning_rate": 8.488952299208401e-05, |
|
"loss": 0.6404, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.028490312297654034, |
|
"grad_norm": 0.24354617297649384, |
|
"learning_rate": 8.46543681272818e-05, |
|
"loss": 0.7355, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.02868954525078448, |
|
"grad_norm": 0.2426522821187973, |
|
"learning_rate": 8.44177287846877e-05, |
|
"loss": 0.7561, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.028888778203914927, |
|
"grad_norm": 0.24764999747276306, |
|
"learning_rate": 8.417961510114356e-05, |
|
"loss": 0.7203, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.029088011157045374, |
|
"grad_norm": 0.2602037191390991, |
|
"learning_rate": 8.39400372766471e-05, |
|
"loss": 0.7111, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.029287244110175824, |
|
"grad_norm": 0.25291872024536133, |
|
"learning_rate": 8.36990055739149e-05, |
|
"loss": 0.7011, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.02948647706330627, |
|
"grad_norm": 0.2726818323135376, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.8132, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.029685710016436717, |
|
"grad_norm": 0.2830042839050293, |
|
"learning_rate": 8.321262189556409e-05, |
|
"loss": 0.7577, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.029884942969567167, |
|
"grad_norm": 0.2831919193267822, |
|
"learning_rate": 8.296729075500344e-05, |
|
"loss": 0.7566, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.030084175922697614, |
|
"grad_norm": 0.33581486344337463, |
|
"learning_rate": 8.272054740543052e-05, |
|
"loss": 0.7664, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.03028340887582806, |
|
"grad_norm": 0.3191780149936676, |
|
"learning_rate": 8.247240241650918e-05, |
|
"loss": 0.6583, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.03048264182895851, |
|
"grad_norm": 0.22133736312389374, |
|
"learning_rate": 8.222286641794488e-05, |
|
"loss": 0.6328, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.030681874782088957, |
|
"grad_norm": 0.21831941604614258, |
|
"learning_rate": 8.197195009902924e-05, |
|
"loss": 0.698, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.030881107735219404, |
|
"grad_norm": 0.1761123090982437, |
|
"learning_rate": 8.171966420818228e-05, |
|
"loss": 0.5441, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.031080340688349854, |
|
"grad_norm": 0.20375514030456543, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 0.6309, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.0312795736414803, |
|
"grad_norm": 0.2410995066165924, |
|
"learning_rate": 8.121102699725089e-05, |
|
"loss": 0.6337, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.03147880659461075, |
|
"grad_norm": 0.2229624092578888, |
|
"learning_rate": 8.095469746549172e-05, |
|
"loss": 0.7014, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.0316780395477412, |
|
"grad_norm": 0.23791897296905518, |
|
"learning_rate": 8.069704193751832e-05, |
|
"loss": 0.6582, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.03187727250087164, |
|
"grad_norm": 0.22524884343147278, |
|
"learning_rate": 8.043807145043604e-05, |
|
"loss": 0.7178, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03207650545400209, |
|
"grad_norm": 0.21897757053375244, |
|
"learning_rate": 8.017779709767858e-05, |
|
"loss": 0.7032, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.03227573840713254, |
|
"grad_norm": 0.19019927084445953, |
|
"learning_rate": 7.991623002853296e-05, |
|
"loss": 0.5842, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.032474971360262984, |
|
"grad_norm": 0.21591834723949432, |
|
"learning_rate": 7.965338144766186e-05, |
|
"loss": 0.7243, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.032674204313393435, |
|
"grad_norm": 0.20649899542331696, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.6814, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.032873437266523885, |
|
"grad_norm": 0.24540702998638153, |
|
"learning_rate": 7.912388484339012e-05, |
|
"loss": 0.7356, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03307267021965433, |
|
"grad_norm": 0.22577622532844543, |
|
"learning_rate": 7.88572595018617e-05, |
|
"loss": 0.6468, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.03327190317278478, |
|
"grad_norm": 0.2168670892715454, |
|
"learning_rate": 7.858939801138061e-05, |
|
"loss": 0.6448, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.03347113612591523, |
|
"grad_norm": 0.22892935574054718, |
|
"learning_rate": 7.832031184624164e-05, |
|
"loss": 0.6375, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.03367036907904567, |
|
"grad_norm": 0.2086174190044403, |
|
"learning_rate": 7.80500125332005e-05, |
|
"loss": 0.6993, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.03386960203217612, |
|
"grad_norm": 0.19050797820091248, |
|
"learning_rate": 7.777851165098012e-05, |
|
"loss": 0.5909, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03406883498530657, |
|
"grad_norm": 0.22635716199874878, |
|
"learning_rate": 7.750582082977467e-05, |
|
"loss": 0.6799, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.034268067938437015, |
|
"grad_norm": 0.2369690090417862, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.6823, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.034467300891567465, |
|
"grad_norm": 0.2258961796760559, |
|
"learning_rate": 7.695691614555003e-05, |
|
"loss": 0.6768, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.034666533844697915, |
|
"grad_norm": 0.2175053060054779, |
|
"learning_rate": 7.668072579578058e-05, |
|
"loss": 0.6501, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.03486576679782836, |
|
"grad_norm": 0.23859256505966187, |
|
"learning_rate": 7.64033925325184e-05, |
|
"loss": 0.7198, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.03506499975095881, |
|
"grad_norm": 0.2090621143579483, |
|
"learning_rate": 7.612492823579745e-05, |
|
"loss": 0.6075, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.03526423270408926, |
|
"grad_norm": 0.23009976744651794, |
|
"learning_rate": 7.584534483410137e-05, |
|
"loss": 0.6993, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.0354634656572197, |
|
"grad_norm": 0.19365736842155457, |
|
"learning_rate": 7.55646543038526e-05, |
|
"loss": 0.61, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.03566269861035015, |
|
"grad_norm": 0.24506577849388123, |
|
"learning_rate": 7.528286866889924e-05, |
|
"loss": 0.68, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.0358619315634806, |
|
"grad_norm": 0.2776792049407959, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.7224, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.036061164516611045, |
|
"grad_norm": 0.21556320786476135, |
|
"learning_rate": 7.471606041430723e-05, |
|
"loss": 0.6742, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.036260397469741495, |
|
"grad_norm": 0.2549598217010498, |
|
"learning_rate": 7.443106207484776e-05, |
|
"loss": 0.7866, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.036459630422871946, |
|
"grad_norm": 0.2427287995815277, |
|
"learning_rate": 7.414501719000187e-05, |
|
"loss": 0.7006, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.03665886337600239, |
|
"grad_norm": 0.2485671192407608, |
|
"learning_rate": 7.385793801298042e-05, |
|
"loss": 0.678, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.03685809632913284, |
|
"grad_norm": 0.23628251254558563, |
|
"learning_rate": 7.35698368412999e-05, |
|
"loss": 0.7176, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.03705732928226329, |
|
"grad_norm": 0.245437353849411, |
|
"learning_rate": 7.328072601625557e-05, |
|
"loss": 0.6784, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.03725656223539373, |
|
"grad_norm": 0.26534438133239746, |
|
"learning_rate": 7.2990617922393e-05, |
|
"loss": 0.7389, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.03745579518852418, |
|
"grad_norm": 0.26061758399009705, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 0.637, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.03765502814165463, |
|
"grad_norm": 0.247264102101326, |
|
"learning_rate": 7.240745967946113e-05, |
|
"loss": 0.7007, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.037854261094785076, |
|
"grad_norm": 0.2501027584075928, |
|
"learning_rate": 7.211443451095007e-05, |
|
"loss": 0.7552, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.038053494047915526, |
|
"grad_norm": 0.24374301731586456, |
|
"learning_rate": 7.18204620336671e-05, |
|
"loss": 0.7168, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.038252727001045976, |
|
"grad_norm": 0.2584417760372162, |
|
"learning_rate": 7.152555484041476e-05, |
|
"loss": 0.6993, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.03845195995417642, |
|
"grad_norm": 0.2678215205669403, |
|
"learning_rate": 7.122972556403567e-05, |
|
"loss": 0.7069, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.03865119290730687, |
|
"grad_norm": 0.27493688464164734, |
|
"learning_rate": 7.09329868768714e-05, |
|
"loss": 0.7271, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.03885042586043732, |
|
"grad_norm": 0.28842246532440186, |
|
"learning_rate": 7.063535149021973e-05, |
|
"loss": 0.7254, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.03904965881356776, |
|
"grad_norm": 0.26705753803253174, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.7115, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.03924889176669821, |
|
"grad_norm": 0.25876060128211975, |
|
"learning_rate": 7.003744165515705e-05, |
|
"loss": 0.7228, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.03944812471982866, |
|
"grad_norm": 0.30024516582489014, |
|
"learning_rate": 6.973719281921335e-05, |
|
"loss": 0.7615, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.039647357672959106, |
|
"grad_norm": 0.27740225195884705, |
|
"learning_rate": 6.943609850761979e-05, |
|
"loss": 0.7928, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.039846590626089556, |
|
"grad_norm": 0.3024348020553589, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 0.7479, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.039846590626089556, |
|
"eval_loss": 0.7008334398269653, |
|
"eval_runtime": 265.1826, |
|
"eval_samples_per_second": 31.88, |
|
"eval_steps_per_second": 7.972, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04004582357922, |
|
"grad_norm": 0.17536719143390656, |
|
"learning_rate": 6.883142508466054e-05, |
|
"loss": 0.5068, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.04024505653235045, |
|
"grad_norm": 0.2694966197013855, |
|
"learning_rate": 6.852787187549182e-05, |
|
"loss": 0.6759, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.0404442894854809, |
|
"grad_norm": 0.23542173206806183, |
|
"learning_rate": 6.82235249939575e-05, |
|
"loss": 0.7302, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.04064352243861134, |
|
"grad_norm": 0.2004457414150238, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 0.6047, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.04084275539174179, |
|
"grad_norm": 0.217708557844162, |
|
"learning_rate": 6.761250239606169e-05, |
|
"loss": 0.5939, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04104198834487224, |
|
"grad_norm": 0.2038157731294632, |
|
"learning_rate": 6.730585285387465e-05, |
|
"loss": 0.6299, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.04124122129800269, |
|
"grad_norm": 0.2198810577392578, |
|
"learning_rate": 6.699846198654971e-05, |
|
"loss": 0.6102, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.04144045425113314, |
|
"grad_norm": 0.21492627263069153, |
|
"learning_rate": 6.669034296168855e-05, |
|
"loss": 0.7281, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.04163968720426359, |
|
"grad_norm": 0.20070651173591614, |
|
"learning_rate": 6.638150897808468e-05, |
|
"loss": 0.6202, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.04183892015739403, |
|
"grad_norm": 0.20803692936897278, |
|
"learning_rate": 6.607197326515808e-05, |
|
"loss": 0.6648, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04203815311052448, |
|
"grad_norm": 0.21368266642093658, |
|
"learning_rate": 6.57617490823885e-05, |
|
"loss": 0.6737, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.04223738606365493, |
|
"grad_norm": 0.22453172504901886, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.71, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.042436619016785374, |
|
"grad_norm": 0.20329400897026062, |
|
"learning_rate": 6.513928849212873e-05, |
|
"loss": 0.6541, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.042635851969915824, |
|
"grad_norm": 0.18720655143260956, |
|
"learning_rate": 6.482707874877854e-05, |
|
"loss": 0.6628, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.042835084923046274, |
|
"grad_norm": 0.20335181057453156, |
|
"learning_rate": 6.451423386272312e-05, |
|
"loss": 0.7075, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.04303431787617672, |
|
"grad_norm": 0.21466787159442902, |
|
"learning_rate": 6.420076723519614e-05, |
|
"loss": 0.6608, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.04323355082930717, |
|
"grad_norm": 0.19446536898612976, |
|
"learning_rate": 6.388669229406462e-05, |
|
"loss": 0.6451, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.04343278378243762, |
|
"grad_norm": 0.202213317155838, |
|
"learning_rate": 6.357202249325371e-05, |
|
"loss": 0.631, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.04363201673556806, |
|
"grad_norm": 0.20673999190330505, |
|
"learning_rate": 6.32567713121704e-05, |
|
"loss": 0.6313, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.04383124968869851, |
|
"grad_norm": 0.22817325592041016, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.6956, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.04403048264182896, |
|
"grad_norm": 0.23594947159290314, |
|
"learning_rate": 6.26245788507579e-05, |
|
"loss": 0.6675, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.044229715594959404, |
|
"grad_norm": 0.2236422449350357, |
|
"learning_rate": 6.230766465144967e-05, |
|
"loss": 0.6337, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.044428948548089854, |
|
"grad_norm": 0.19244728982448578, |
|
"learning_rate": 6.199022323275083e-05, |
|
"loss": 0.6323, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.044628181501220304, |
|
"grad_norm": 0.23294833302497864, |
|
"learning_rate": 6.167226819279528e-05, |
|
"loss": 0.6741, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.04482741445435075, |
|
"grad_norm": 0.2407350391149521, |
|
"learning_rate": 6.135381315171867e-05, |
|
"loss": 0.7436, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.0450266474074812, |
|
"grad_norm": 0.21430183947086334, |
|
"learning_rate": 6.103487175107507e-05, |
|
"loss": 0.6578, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.04522588036061165, |
|
"grad_norm": 0.23648200929164886, |
|
"learning_rate": 6.071545765325254e-05, |
|
"loss": 0.7116, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.04542511331374209, |
|
"grad_norm": 0.22720520198345184, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.6536, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.04562434626687254, |
|
"grad_norm": 0.2226054072380066, |
|
"learning_rate": 6.007526611628086e-05, |
|
"loss": 0.684, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.04582357922000299, |
|
"grad_norm": 0.25174853205680847, |
|
"learning_rate": 5.9754516100806423e-05, |
|
"loss": 0.6755, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.046022812173133434, |
|
"grad_norm": 0.22537098824977875, |
|
"learning_rate": 5.9433348234327765e-05, |
|
"loss": 0.6145, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.046222045126263885, |
|
"grad_norm": 0.26092636585235596, |
|
"learning_rate": 5.911177627460739e-05, |
|
"loss": 0.7068, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.046421278079394335, |
|
"grad_norm": 0.24872590601444244, |
|
"learning_rate": 5.8789813996717736e-05, |
|
"loss": 0.7006, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.04662051103252478, |
|
"grad_norm": 0.27163049578666687, |
|
"learning_rate": 5.8467475192451226e-05, |
|
"loss": 0.6737, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.04681974398565523, |
|
"grad_norm": 0.24463686347007751, |
|
"learning_rate": 5.814477366972945e-05, |
|
"loss": 0.6807, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.04701897693878568, |
|
"grad_norm": 0.2596883177757263, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.7027, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.04721820989191612, |
|
"grad_norm": 0.24346858263015747, |
|
"learning_rate": 5.749833777770225e-05, |
|
"loss": 0.6946, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.04741744284504657, |
|
"grad_norm": 0.21713268756866455, |
|
"learning_rate": 5.717463109955896e-05, |
|
"loss": 0.6445, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.04761667579817702, |
|
"grad_norm": 0.22738954424858093, |
|
"learning_rate": 5.685061708409841e-05, |
|
"loss": 0.6333, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.047815908751307465, |
|
"grad_norm": 0.27048802375793457, |
|
"learning_rate": 5.6526309611002594e-05, |
|
"loss": 0.7271, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.048015141704437915, |
|
"grad_norm": 0.26063868403434753, |
|
"learning_rate": 5.6201722572524275e-05, |
|
"loss": 0.6432, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.048214374657568365, |
|
"grad_norm": 0.25063076615333557, |
|
"learning_rate": 5.587686987289189e-05, |
|
"loss": 0.7654, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.04841360761069881, |
|
"grad_norm": 0.2545402944087982, |
|
"learning_rate": 5.5551765427713884e-05, |
|
"loss": 0.683, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.04861284056382926, |
|
"grad_norm": 0.26064079999923706, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.6294, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.0488120735169597, |
|
"grad_norm": 0.2834579348564148, |
|
"learning_rate": 5.490085701647805e-05, |
|
"loss": 0.7894, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.04901130647009015, |
|
"grad_norm": 0.27373987436294556, |
|
"learning_rate": 5.457508093317013e-05, |
|
"loss": 0.7169, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.0492105394232206, |
|
"grad_norm": 0.24024784564971924, |
|
"learning_rate": 5.4249108868622086e-05, |
|
"loss": 0.7348, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.049409772376351045, |
|
"grad_norm": 0.24063633382320404, |
|
"learning_rate": 5.392295478639225e-05, |
|
"loss": 0.6727, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.049609005329481495, |
|
"grad_norm": 0.24858322739601135, |
|
"learning_rate": 5.359663265783598e-05, |
|
"loss": 0.6676, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.049808238282611945, |
|
"grad_norm": 0.32603856921195984, |
|
"learning_rate": 5.327015646150716e-05, |
|
"loss": 0.8165, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05000747123574239, |
|
"grad_norm": 0.20693735778331757, |
|
"learning_rate": 5.294354018255945e-05, |
|
"loss": 0.7082, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.05020670418887284, |
|
"grad_norm": 0.2583768367767334, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 0.6606, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.05040593714200329, |
|
"grad_norm": 0.20723305642604828, |
|
"learning_rate": 5.228994334682604e-05, |
|
"loss": 0.5572, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.05060517009513373, |
|
"grad_norm": 0.21328355371952057, |
|
"learning_rate": 5.196299078795344e-05, |
|
"loss": 0.6035, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.05080440304826418, |
|
"grad_norm": 0.17065133154392242, |
|
"learning_rate": 5.1635954141088813e-05, |
|
"loss": 0.5368, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05100363600139463, |
|
"grad_norm": 0.1970246285200119, |
|
"learning_rate": 5.1308847415393666e-05, |
|
"loss": 0.6758, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.051202868954525076, |
|
"grad_norm": 0.19214090704917908, |
|
"learning_rate": 5.0981684623031415e-05, |
|
"loss": 0.6448, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.051402101907655526, |
|
"grad_norm": 0.2105257362127304, |
|
"learning_rate": 5.0654479778567223e-05, |
|
"loss": 0.6485, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.051601334860785976, |
|
"grad_norm": 0.1960887312889099, |
|
"learning_rate": 5.0327246898367597e-05, |
|
"loss": 0.6057, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.05180056781391642, |
|
"grad_norm": 0.16844411194324493, |
|
"learning_rate": 5e-05, |
|
"loss": 0.5688, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.05199980076704687, |
|
"grad_norm": 0.20722134411334991, |
|
"learning_rate": 4.9672753101632415e-05, |
|
"loss": 0.5989, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.05219903372017732, |
|
"grad_norm": 0.20455193519592285, |
|
"learning_rate": 4.934552022143279e-05, |
|
"loss": 0.6179, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.05239826667330776, |
|
"grad_norm": 0.21068598330020905, |
|
"learning_rate": 4.901831537696859e-05, |
|
"loss": 0.6609, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.05259749962643821, |
|
"grad_norm": 0.2080422341823578, |
|
"learning_rate": 4.869115258460635e-05, |
|
"loss": 0.6534, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.05279673257956866, |
|
"grad_norm": 0.2010425478219986, |
|
"learning_rate": 4.83640458589112e-05, |
|
"loss": 0.6078, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.052995965532699106, |
|
"grad_norm": 0.1816807985305786, |
|
"learning_rate": 4.8037009212046586e-05, |
|
"loss": 0.5583, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.053195198485829556, |
|
"grad_norm": 0.21408969163894653, |
|
"learning_rate": 4.7710056653173976e-05, |
|
"loss": 0.6408, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.053394431438960006, |
|
"grad_norm": 0.23079368472099304, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 0.692, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.05359366439209045, |
|
"grad_norm": 0.24382252991199493, |
|
"learning_rate": 4.7056459817440544e-05, |
|
"loss": 0.7265, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.0537928973452209, |
|
"grad_norm": 0.23294223845005035, |
|
"learning_rate": 4.6729843538492847e-05, |
|
"loss": 0.7042, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.05399213029835135, |
|
"grad_norm": 0.22150427103042603, |
|
"learning_rate": 4.640336734216403e-05, |
|
"loss": 0.6907, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.05419136325148179, |
|
"grad_norm": 0.21380844712257385, |
|
"learning_rate": 4.607704521360776e-05, |
|
"loss": 0.6596, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.05439059620461224, |
|
"grad_norm": 0.22134555876255035, |
|
"learning_rate": 4.575089113137792e-05, |
|
"loss": 0.7081, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.05458982915774269, |
|
"grad_norm": 0.237199068069458, |
|
"learning_rate": 4.542491906682989e-05, |
|
"loss": 0.677, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.05478906211087314, |
|
"grad_norm": 0.2207615077495575, |
|
"learning_rate": 4.509914298352197e-05, |
|
"loss": 0.6395, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.05498829506400359, |
|
"grad_norm": 0.24687650799751282, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.6936, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.05518752801713404, |
|
"grad_norm": 0.271902859210968, |
|
"learning_rate": 4.444823457228612e-05, |
|
"loss": 0.7742, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.05538676097026448, |
|
"grad_norm": 0.23451142013072968, |
|
"learning_rate": 4.412313012710813e-05, |
|
"loss": 0.7369, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.05558599392339493, |
|
"grad_norm": 0.24319756031036377, |
|
"learning_rate": 4.379827742747575e-05, |
|
"loss": 0.6709, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.05578522687652538, |
|
"grad_norm": 0.24758942425251007, |
|
"learning_rate": 4.347369038899744e-05, |
|
"loss": 0.6954, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.055984459829655824, |
|
"grad_norm": 0.20816007256507874, |
|
"learning_rate": 4.3149382915901606e-05, |
|
"loss": 0.6465, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.056183692782786274, |
|
"grad_norm": 0.22767412662506104, |
|
"learning_rate": 4.282536890044104e-05, |
|
"loss": 0.6318, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.056382925735916724, |
|
"grad_norm": 0.24410390853881836, |
|
"learning_rate": 4.250166222229774e-05, |
|
"loss": 0.7362, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.05658215868904717, |
|
"grad_norm": 0.25331801176071167, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.7286, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.05678139164217762, |
|
"grad_norm": 0.2666231393814087, |
|
"learning_rate": 4.185522633027057e-05, |
|
"loss": 0.681, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.05698062459530807, |
|
"grad_norm": 0.2353515923023224, |
|
"learning_rate": 4.153252480754877e-05, |
|
"loss": 0.7308, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.05717985754843851, |
|
"grad_norm": 0.23132577538490295, |
|
"learning_rate": 4.1210186003282275e-05, |
|
"loss": 0.6479, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.05737909050156896, |
|
"grad_norm": 0.23580260574817657, |
|
"learning_rate": 4.088822372539263e-05, |
|
"loss": 0.638, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.057578323454699404, |
|
"grad_norm": 0.2322821170091629, |
|
"learning_rate": 4.0566651765672246e-05, |
|
"loss": 0.6389, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.057777556407829854, |
|
"grad_norm": 0.269782692193985, |
|
"learning_rate": 4.0245483899193595e-05, |
|
"loss": 0.7522, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.057976789360960304, |
|
"grad_norm": 0.24970482289791107, |
|
"learning_rate": 3.992473388371915e-05, |
|
"loss": 0.7166, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.05817602231409075, |
|
"grad_norm": 0.25823989510536194, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.7155, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.0583752552672212, |
|
"grad_norm": 0.23043105006217957, |
|
"learning_rate": 3.928454234674747e-05, |
|
"loss": 0.6239, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.05857448822035165, |
|
"grad_norm": 0.26879456639289856, |
|
"learning_rate": 3.896512824892495e-05, |
|
"loss": 0.7149, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.05877372117348209, |
|
"grad_norm": 0.26333490014076233, |
|
"learning_rate": 3.864618684828134e-05, |
|
"loss": 0.7298, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.05897295412661254, |
|
"grad_norm": 0.26092275977134705, |
|
"learning_rate": 3.832773180720475e-05, |
|
"loss": 0.6897, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.05917218707974299, |
|
"grad_norm": 0.2566492557525635, |
|
"learning_rate": 3.800977676724919e-05, |
|
"loss": 0.6571, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.059371420032873434, |
|
"grad_norm": 0.27104929089546204, |
|
"learning_rate": 3.769233534855035e-05, |
|
"loss": 0.7303, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.059570652986003884, |
|
"grad_norm": 0.2734347879886627, |
|
"learning_rate": 3.73754211492421e-05, |
|
"loss": 0.7918, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.059769885939134335, |
|
"grad_norm": 0.24449193477630615, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.6002, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.059769885939134335, |
|
"eval_loss": 0.6857987642288208, |
|
"eval_runtime": 265.3385, |
|
"eval_samples_per_second": 31.861, |
|
"eval_steps_per_second": 7.967, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.05996911889226478, |
|
"grad_norm": 0.1829785406589508, |
|
"learning_rate": 3.6743228687829595e-05, |
|
"loss": 0.4575, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.06016835184539523, |
|
"grad_norm": 0.2460087090730667, |
|
"learning_rate": 3.642797750674629e-05, |
|
"loss": 0.5853, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.06036758479852568, |
|
"grad_norm": 0.25549712777137756, |
|
"learning_rate": 3.6113307705935396e-05, |
|
"loss": 0.6608, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.06056681775165612, |
|
"grad_norm": 0.25257304310798645, |
|
"learning_rate": 3.579923276480387e-05, |
|
"loss": 0.6838, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.06076605070478657, |
|
"grad_norm": 0.27871885895729065, |
|
"learning_rate": 3.5485766137276894e-05, |
|
"loss": 0.7465, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06096528365791702, |
|
"grad_norm": 0.21810035407543182, |
|
"learning_rate": 3.5172921251221455e-05, |
|
"loss": 0.6733, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.061164516611047465, |
|
"grad_norm": 0.19901888072490692, |
|
"learning_rate": 3.486071150787128e-05, |
|
"loss": 0.6081, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.061363749564177915, |
|
"grad_norm": 0.21920450031757355, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.6168, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.061562982517308365, |
|
"grad_norm": 0.22061915695667267, |
|
"learning_rate": 3.423825091761153e-05, |
|
"loss": 0.6026, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.06176221547043881, |
|
"grad_norm": 0.22976446151733398, |
|
"learning_rate": 3.392802673484193e-05, |
|
"loss": 0.7188, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.06196144842356926, |
|
"grad_norm": 0.23608386516571045, |
|
"learning_rate": 3.361849102191533e-05, |
|
"loss": 0.6482, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.06216068137669971, |
|
"grad_norm": 0.24407218396663666, |
|
"learning_rate": 3.330965703831146e-05, |
|
"loss": 0.6767, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.06235991432983015, |
|
"grad_norm": 0.2779025137424469, |
|
"learning_rate": 3.300153801345028e-05, |
|
"loss": 0.7787, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.0625591472829606, |
|
"grad_norm": 0.21833088994026184, |
|
"learning_rate": 3.2694147146125345e-05, |
|
"loss": 0.5775, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.06275838023609105, |
|
"grad_norm": 0.2857174277305603, |
|
"learning_rate": 3.2387497603938326e-05, |
|
"loss": 0.7358, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.0629576131892215, |
|
"grad_norm": 0.2404899299144745, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 0.6752, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.06315684614235194, |
|
"grad_norm": 0.25618627667427063, |
|
"learning_rate": 3.177647500604252e-05, |
|
"loss": 0.7334, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.0633560790954824, |
|
"grad_norm": 0.21781964600086212, |
|
"learning_rate": 3.147212812450819e-05, |
|
"loss": 0.5937, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.06355531204861284, |
|
"grad_norm": 0.21672700345516205, |
|
"learning_rate": 3.116857491533947e-05, |
|
"loss": 0.6095, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.06375454500174328, |
|
"grad_norm": 0.2422553449869156, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 0.6426, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.06395377795487374, |
|
"grad_norm": 0.2380169779062271, |
|
"learning_rate": 3.056390149238022e-05, |
|
"loss": 0.6648, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.06415301090800418, |
|
"grad_norm": 0.224848210811615, |
|
"learning_rate": 3.0262807180786647e-05, |
|
"loss": 0.6811, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.06435224386113463, |
|
"grad_norm": 0.2206374555826187, |
|
"learning_rate": 2.996255834484296e-05, |
|
"loss": 0.6629, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.06455147681426508, |
|
"grad_norm": 0.22571969032287598, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.641, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.06475070976739553, |
|
"grad_norm": 0.22498847544193268, |
|
"learning_rate": 2.936464850978027e-05, |
|
"loss": 0.642, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.06494994272052597, |
|
"grad_norm": 0.26433423161506653, |
|
"learning_rate": 2.9067013123128613e-05, |
|
"loss": 0.7015, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.06514917567365643, |
|
"grad_norm": 0.2343292236328125, |
|
"learning_rate": 2.8770274435964355e-05, |
|
"loss": 0.6088, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.06534840862678687, |
|
"grad_norm": 0.2766936123371124, |
|
"learning_rate": 2.8474445159585235e-05, |
|
"loss": 0.6763, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.06554764157991731, |
|
"grad_norm": 0.2363848239183426, |
|
"learning_rate": 2.8179537966332887e-05, |
|
"loss": 0.7461, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.06574687453304777, |
|
"grad_norm": 0.2353663295507431, |
|
"learning_rate": 2.7885565489049946e-05, |
|
"loss": 0.7216, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.06594610748617821, |
|
"grad_norm": 0.2601001262664795, |
|
"learning_rate": 2.759254032053888e-05, |
|
"loss": 0.7722, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.06614534043930866, |
|
"grad_norm": 0.23950740694999695, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 0.5964, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.06634457339243911, |
|
"grad_norm": 0.23393948376178741, |
|
"learning_rate": 2.700938207760701e-05, |
|
"loss": 0.6738, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.06654380634556956, |
|
"grad_norm": 0.2648164629936218, |
|
"learning_rate": 2.671927398374443e-05, |
|
"loss": 0.6643, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.0667430392987, |
|
"grad_norm": 0.263930082321167, |
|
"learning_rate": 2.6430163158700115e-05, |
|
"loss": 0.6688, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.06694227225183046, |
|
"grad_norm": 0.2163466513156891, |
|
"learning_rate": 2.6142061987019577e-05, |
|
"loss": 0.5839, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.0671415052049609, |
|
"grad_norm": 0.23970170319080353, |
|
"learning_rate": 2.5854982809998153e-05, |
|
"loss": 0.6826, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.06734073815809134, |
|
"grad_norm": 0.26360487937927246, |
|
"learning_rate": 2.556893792515227e-05, |
|
"loss": 0.7177, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.0675399711112218, |
|
"grad_norm": 0.2591302692890167, |
|
"learning_rate": 2.5283939585692783e-05, |
|
"loss": 0.6781, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.06773920406435224, |
|
"grad_norm": 0.26808708906173706, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.6558, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.06793843701748269, |
|
"grad_norm": 0.2927316725254059, |
|
"learning_rate": 2.471713133110078e-05, |
|
"loss": 0.715, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.06813766997061314, |
|
"grad_norm": 0.25598007440567017, |
|
"learning_rate": 2.4435345696147403e-05, |
|
"loss": 0.6857, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.06833690292374359, |
|
"grad_norm": 0.27012765407562256, |
|
"learning_rate": 2.4154655165898627e-05, |
|
"loss": 0.6704, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.06853613587687403, |
|
"grad_norm": 0.27995193004608154, |
|
"learning_rate": 2.3875071764202563e-05, |
|
"loss": 0.6795, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.06873536883000449, |
|
"grad_norm": 0.2844606935977936, |
|
"learning_rate": 2.3596607467481603e-05, |
|
"loss": 0.7176, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.06893460178313493, |
|
"grad_norm": 0.3271996080875397, |
|
"learning_rate": 2.3319274204219428e-05, |
|
"loss": 0.7862, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.06913383473626537, |
|
"grad_norm": 0.24104076623916626, |
|
"learning_rate": 2.3043083854449988e-05, |
|
"loss": 0.6574, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.06933306768939583, |
|
"grad_norm": 0.27659985423088074, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 0.7289, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.06953230064252627, |
|
"grad_norm": 0.29960310459136963, |
|
"learning_rate": 2.2494179170225333e-05, |
|
"loss": 0.6818, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.06973153359565672, |
|
"grad_norm": 0.3256206810474396, |
|
"learning_rate": 2.2221488349019903e-05, |
|
"loss": 0.7484, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.06993076654878717, |
|
"grad_norm": 0.21653087437152863, |
|
"learning_rate": 2.194998746679952e-05, |
|
"loss": 0.6332, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.07012999950191762, |
|
"grad_norm": 0.24491187930107117, |
|
"learning_rate": 2.167968815375837e-05, |
|
"loss": 0.6758, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.07032923245504806, |
|
"grad_norm": 0.2615233361721039, |
|
"learning_rate": 2.1410601988619394e-05, |
|
"loss": 0.6832, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.07052846540817852, |
|
"grad_norm": 0.3030376136302948, |
|
"learning_rate": 2.1142740498138324e-05, |
|
"loss": 0.7022, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.07072769836130896, |
|
"grad_norm": 0.24537107348442078, |
|
"learning_rate": 2.08761151566099e-05, |
|
"loss": 0.6833, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.0709269313144394, |
|
"grad_norm": 0.22852526605129242, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.6045, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.07112616426756986, |
|
"grad_norm": 0.2126518040895462, |
|
"learning_rate": 2.034661855233815e-05, |
|
"loss": 0.6393, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.0713253972207003, |
|
"grad_norm": 0.2170926332473755, |
|
"learning_rate": 2.008376997146705e-05, |
|
"loss": 0.636, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.07152463017383075, |
|
"grad_norm": 0.24603933095932007, |
|
"learning_rate": 1.982220290232143e-05, |
|
"loss": 0.6854, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.0717238631269612, |
|
"grad_norm": 0.25937581062316895, |
|
"learning_rate": 1.9561928549563968e-05, |
|
"loss": 0.6989, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.07192309608009165, |
|
"grad_norm": 0.2045435905456543, |
|
"learning_rate": 1.9302958062481673e-05, |
|
"loss": 0.5754, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.07212232903322209, |
|
"grad_norm": 0.24368344247341156, |
|
"learning_rate": 1.9045302534508297e-05, |
|
"loss": 0.6537, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.07232156198635255, |
|
"grad_norm": 0.2470693588256836, |
|
"learning_rate": 1.8788973002749112e-05, |
|
"loss": 0.675, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.07252079493948299, |
|
"grad_norm": 0.23468363285064697, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 0.675, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.07272002789261343, |
|
"grad_norm": 0.23341692984104156, |
|
"learning_rate": 1.8280335791817733e-05, |
|
"loss": 0.6899, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.07291926084574389, |
|
"grad_norm": 0.24296964704990387, |
|
"learning_rate": 1.8028049900970767e-05, |
|
"loss": 0.6898, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.07311849379887433, |
|
"grad_norm": 0.21605587005615234, |
|
"learning_rate": 1.777713358205514e-05, |
|
"loss": 0.615, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.07331772675200478, |
|
"grad_norm": 0.23035632073879242, |
|
"learning_rate": 1.7527597583490822e-05, |
|
"loss": 0.7135, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.07351695970513523, |
|
"grad_norm": 0.24987879395484924, |
|
"learning_rate": 1.7279452594569483e-05, |
|
"loss": 0.6646, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.07371619265826568, |
|
"grad_norm": 0.23275192081928253, |
|
"learning_rate": 1.703270924499656e-05, |
|
"loss": 0.6405, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.07391542561139612, |
|
"grad_norm": 0.2441992163658142, |
|
"learning_rate": 1.678737810443593e-05, |
|
"loss": 0.7028, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.07411465856452658, |
|
"grad_norm": 0.21933166682720184, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.5545, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.07431389151765702, |
|
"grad_norm": 0.24288050830364227, |
|
"learning_rate": 1.6300994426085103e-05, |
|
"loss": 0.6971, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.07451312447078746, |
|
"grad_norm": 0.2461194097995758, |
|
"learning_rate": 1.605996272335291e-05, |
|
"loss": 0.6359, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.07471235742391792, |
|
"grad_norm": 0.25669926404953003, |
|
"learning_rate": 1.5820384898856434e-05, |
|
"loss": 0.6917, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.07491159037704836, |
|
"grad_norm": 0.23437441885471344, |
|
"learning_rate": 1.5582271215312294e-05, |
|
"loss": 0.6353, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.07511082333017881, |
|
"grad_norm": 0.2659025192260742, |
|
"learning_rate": 1.5345631872718214e-05, |
|
"loss": 0.7663, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.07531005628330927, |
|
"grad_norm": 0.23349608480930328, |
|
"learning_rate": 1.5110477007916001e-05, |
|
"loss": 0.6564, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.07550928923643971, |
|
"grad_norm": 0.24196575582027435, |
|
"learning_rate": 1.4876816694157419e-05, |
|
"loss": 0.6607, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.07570852218957015, |
|
"grad_norm": 0.23814047873020172, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.6354, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.07590775514270061, |
|
"grad_norm": 0.23850150406360626, |
|
"learning_rate": 1.4414019692241437e-05, |
|
"loss": 0.6643, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.07610698809583105, |
|
"grad_norm": 0.22468359768390656, |
|
"learning_rate": 1.4184902828767287e-05, |
|
"loss": 0.6246, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.0763062210489615, |
|
"grad_norm": 0.23000527918338776, |
|
"learning_rate": 1.3957320164854059e-05, |
|
"loss": 0.6283, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.07650545400209195, |
|
"grad_norm": 0.2583581507205963, |
|
"learning_rate": 1.373128144938563e-05, |
|
"loss": 0.6927, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.0767046869552224, |
|
"grad_norm": 0.2393674999475479, |
|
"learning_rate": 1.3506796365108232e-05, |
|
"loss": 0.668, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.07690391990835284, |
|
"grad_norm": 0.2238411009311676, |
|
"learning_rate": 1.3283874528215733e-05, |
|
"loss": 0.6291, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.0771031528614833, |
|
"grad_norm": 0.22692270576953888, |
|
"learning_rate": 1.3062525487937699e-05, |
|
"loss": 0.6488, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.07730238581461374, |
|
"grad_norm": 0.29837989807128906, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.7459, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.07750161876774418, |
|
"grad_norm": 0.26298198103904724, |
|
"learning_rate": 1.2624583656870154e-05, |
|
"loss": 0.6305, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.07770085172087464, |
|
"grad_norm": 0.2501063644886017, |
|
"learning_rate": 1.2408009626051137e-05, |
|
"loss": 0.6475, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.07790008467400508, |
|
"grad_norm": 0.26658689975738525, |
|
"learning_rate": 1.2193045910983863e-05, |
|
"loss": 0.6945, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.07809931762713553, |
|
"grad_norm": 0.27355584502220154, |
|
"learning_rate": 1.1979701719998453e-05, |
|
"loss": 0.7023, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.07829855058026598, |
|
"grad_norm": 0.24372565746307373, |
|
"learning_rate": 1.1767986192049984e-05, |
|
"loss": 0.6686, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.07849778353339643, |
|
"grad_norm": 0.28914278745651245, |
|
"learning_rate": 1.1557908396327028e-05, |
|
"loss": 0.7444, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.07869701648652687, |
|
"grad_norm": 0.26138660311698914, |
|
"learning_rate": 1.134947733186315e-05, |
|
"loss": 0.6624, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.07889624943965733, |
|
"grad_norm": 0.25570860505104065, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 0.6746, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.07909548239278777, |
|
"grad_norm": 0.2897097170352936, |
|
"learning_rate": 1.0937591039762085e-05, |
|
"loss": 0.7515, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.07929471534591821, |
|
"grad_norm": 0.2693999111652374, |
|
"learning_rate": 1.0734153455962765e-05, |
|
"loss": 0.7385, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.07949394829904867, |
|
"grad_norm": 0.2895209491252899, |
|
"learning_rate": 1.0532397890342505e-05, |
|
"loss": 0.7491, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.07969318125217911, |
|
"grad_norm": 0.2964557707309723, |
|
"learning_rate": 1.0332332985438248e-05, |
|
"loss": 0.7342, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.07969318125217911, |
|
"eval_loss": 0.6768385171890259, |
|
"eval_runtime": 265.4226, |
|
"eval_samples_per_second": 31.851, |
|
"eval_steps_per_second": 7.965, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3302341221692211e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|