|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0006826715212198, |
|
"eval_steps": 275, |
|
"global_step": 1099, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0009102286949596086, |
|
"grad_norm": 0.3924100995063782, |
|
"learning_rate": 2e-05, |
|
"loss": 2.8811, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0009102286949596086, |
|
"eval_loss": 2.6547484397888184, |
|
"eval_runtime": 202.3055, |
|
"eval_samples_per_second": 9.15, |
|
"eval_steps_per_second": 4.577, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0018204573899192173, |
|
"grad_norm": 0.4433031976222992, |
|
"learning_rate": 4e-05, |
|
"loss": 2.8541, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0027306860848788257, |
|
"grad_norm": 0.4557134211063385, |
|
"learning_rate": 6e-05, |
|
"loss": 2.6364, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0036409147798384346, |
|
"grad_norm": 0.3975633978843689, |
|
"learning_rate": 8e-05, |
|
"loss": 2.6666, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004551143474798043, |
|
"grad_norm": 0.36896416544914246, |
|
"learning_rate": 0.0001, |
|
"loss": 2.745, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0054613721697576514, |
|
"grad_norm": 0.30803996324539185, |
|
"learning_rate": 0.00012, |
|
"loss": 2.7417, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00637160086471726, |
|
"grad_norm": 0.40208426117897034, |
|
"learning_rate": 0.00014, |
|
"loss": 2.7006, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.007281829559676869, |
|
"grad_norm": 0.5293606519699097, |
|
"learning_rate": 0.00016, |
|
"loss": 2.6852, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.008192058254636477, |
|
"grad_norm": 0.4535931646823883, |
|
"learning_rate": 0.00018, |
|
"loss": 2.6472, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.009102286949596085, |
|
"grad_norm": 0.32121461629867554, |
|
"learning_rate": 0.0002, |
|
"loss": 2.6648, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.010012515644555695, |
|
"grad_norm": 0.3374486565589905, |
|
"learning_rate": 0.00019999958388469571, |
|
"loss": 2.5696, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010922744339515303, |
|
"grad_norm": 0.3663835823535919, |
|
"learning_rate": 0.00019999833554224577, |
|
"loss": 2.4938, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011832973034474913, |
|
"grad_norm": 0.32625913619995117, |
|
"learning_rate": 0.00019999625498303932, |
|
"loss": 2.5661, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01274320172943452, |
|
"grad_norm": 0.382977157831192, |
|
"learning_rate": 0.00019999334222439147, |
|
"loss": 2.5544, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.013653430424394129, |
|
"grad_norm": 0.3954707384109497, |
|
"learning_rate": 0.00019998959729054295, |
|
"loss": 2.5744, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.014563659119353738, |
|
"grad_norm": 0.30874142050743103, |
|
"learning_rate": 0.0001999850202126604, |
|
"loss": 2.5242, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.015473887814313346, |
|
"grad_norm": 0.30474328994750977, |
|
"learning_rate": 0.00019997961102883552, |
|
"loss": 2.4201, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.016384116509272954, |
|
"grad_norm": 0.32679283618927, |
|
"learning_rate": 0.00019997336978408531, |
|
"loss": 2.5165, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.017294345204232564, |
|
"grad_norm": 0.3267504572868347, |
|
"learning_rate": 0.00019996629653035126, |
|
"loss": 2.2016, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01820457389919217, |
|
"grad_norm": 0.33519116044044495, |
|
"learning_rate": 0.00019995839132649917, |
|
"loss": 2.4004, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01911480259415178, |
|
"grad_norm": 0.36370110511779785, |
|
"learning_rate": 0.00019994965423831854, |
|
"loss": 2.4736, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02002503128911139, |
|
"grad_norm": 0.36940237879753113, |
|
"learning_rate": 0.0001999400853385221, |
|
"loss": 2.4726, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.020935259984071, |
|
"grad_norm": 0.3428037762641907, |
|
"learning_rate": 0.0001999296847067452, |
|
"loss": 2.4291, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.021845488679030606, |
|
"grad_norm": 0.3404378294944763, |
|
"learning_rate": 0.00019991845242954505, |
|
"loss": 2.0424, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.022755717373990215, |
|
"grad_norm": 0.3714047074317932, |
|
"learning_rate": 0.00019990638860040006, |
|
"loss": 2.2809, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.023665946068949825, |
|
"grad_norm": 0.37741491198539734, |
|
"learning_rate": 0.00019989349331970923, |
|
"loss": 2.2841, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02457617476390943, |
|
"grad_norm": 0.38504019379615784, |
|
"learning_rate": 0.00019987976669479088, |
|
"loss": 2.2056, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02548640345886904, |
|
"grad_norm": 0.4182392954826355, |
|
"learning_rate": 0.00019986520883988232, |
|
"loss": 2.3747, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02639663215382865, |
|
"grad_norm": 0.4266968369483948, |
|
"learning_rate": 0.0001998498198761384, |
|
"loss": 2.3608, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.027306860848788257, |
|
"grad_norm": 0.3748364746570587, |
|
"learning_rate": 0.00019983359993163078, |
|
"loss": 2.0789, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.028217089543747867, |
|
"grad_norm": 0.4157109558582306, |
|
"learning_rate": 0.00019981654914134686, |
|
"loss": 2.3174, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.029127318238707477, |
|
"grad_norm": 0.4328743517398834, |
|
"learning_rate": 0.00019979866764718843, |
|
"loss": 2.3304, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.030037546933667083, |
|
"grad_norm": 0.4545007050037384, |
|
"learning_rate": 0.0001997799555979709, |
|
"loss": 2.3578, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.030947775628626693, |
|
"grad_norm": 0.4287387430667877, |
|
"learning_rate": 0.00019976041314942155, |
|
"loss": 2.1072, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0318580043235863, |
|
"grad_norm": 0.4378172755241394, |
|
"learning_rate": 0.0001997400404641787, |
|
"loss": 2.2507, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03276823301854591, |
|
"grad_norm": 0.42525431513786316, |
|
"learning_rate": 0.00019971883771179003, |
|
"loss": 2.1549, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03367846171350552, |
|
"grad_norm": 0.4390096068382263, |
|
"learning_rate": 0.00019969680506871137, |
|
"loss": 2.3869, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03458869040846513, |
|
"grad_norm": 0.4858846068382263, |
|
"learning_rate": 0.00019967394271830504, |
|
"loss": 2.5711, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03549891910342474, |
|
"grad_norm": 0.46703851222991943, |
|
"learning_rate": 0.00019965025085083858, |
|
"loss": 2.3567, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03640914779838434, |
|
"grad_norm": 0.4408855438232422, |
|
"learning_rate": 0.000199625729663483, |
|
"loss": 2.1646, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03731937649334395, |
|
"grad_norm": 0.49953216314315796, |
|
"learning_rate": 0.00019960037936031104, |
|
"loss": 2.3658, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03822960518830356, |
|
"grad_norm": 0.48640862107276917, |
|
"learning_rate": 0.00019957420015229572, |
|
"loss": 2.4125, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03913983388326317, |
|
"grad_norm": 0.5187159776687622, |
|
"learning_rate": 0.00019954719225730847, |
|
"loss": 2.3655, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04005006257822278, |
|
"grad_norm": 0.48923739790916443, |
|
"learning_rate": 0.00019951935590011718, |
|
"loss": 2.257, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04096029127318239, |
|
"grad_norm": 0.4975845515727997, |
|
"learning_rate": 0.0001994906913123846, |
|
"loss": 2.185, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.041870519968142, |
|
"grad_norm": 0.5635886788368225, |
|
"learning_rate": 0.00019946119873266613, |
|
"loss": 2.6952, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0427807486631016, |
|
"grad_norm": 0.5890567898750305, |
|
"learning_rate": 0.00019943087840640814, |
|
"loss": 2.5154, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04369097735806121, |
|
"grad_norm": 0.6770642995834351, |
|
"learning_rate": 0.0001993997305859456, |
|
"loss": 2.7011, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04460120605302082, |
|
"grad_norm": 0.7621054649353027, |
|
"learning_rate": 0.0001993677555305002, |
|
"loss": 2.5097, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04551143474798043, |
|
"grad_norm": 1.2259888648986816, |
|
"learning_rate": 0.00019933495350617813, |
|
"loss": 2.516, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04642166344294004, |
|
"grad_norm": 1.261356234550476, |
|
"learning_rate": 0.00019930132478596796, |
|
"loss": 2.7173, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04733189213789965, |
|
"grad_norm": 0.5333342552185059, |
|
"learning_rate": 0.00019926686964973813, |
|
"loss": 2.7659, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04824212083285925, |
|
"grad_norm": 0.43384629487991333, |
|
"learning_rate": 0.00019923158838423482, |
|
"loss": 2.4593, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04915234952781886, |
|
"grad_norm": 0.506197452545166, |
|
"learning_rate": 0.00019919548128307954, |
|
"loss": 2.6135, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05006257822277847, |
|
"grad_norm": 0.4372877776622772, |
|
"learning_rate": 0.00019915854864676664, |
|
"loss": 2.4952, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05097280691773808, |
|
"grad_norm": 0.4044889509677887, |
|
"learning_rate": 0.00019912079078266085, |
|
"loss": 2.6152, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05188303561269769, |
|
"grad_norm": 0.4284885823726654, |
|
"learning_rate": 0.0001990822080049946, |
|
"loss": 2.508, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0527932643076573, |
|
"grad_norm": 0.4030478298664093, |
|
"learning_rate": 0.0001990428006348656, |
|
"loss": 2.4545, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.053703493002616905, |
|
"grad_norm": 0.37658700346946716, |
|
"learning_rate": 0.00019900256900023413, |
|
"loss": 2.3762, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.054613721697576514, |
|
"grad_norm": 0.3478519022464752, |
|
"learning_rate": 0.00019896151343592008, |
|
"loss": 2.2991, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.055523950392536124, |
|
"grad_norm": 0.38357114791870117, |
|
"learning_rate": 0.00019891963428360043, |
|
"loss": 2.4336, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.056434179087495734, |
|
"grad_norm": 0.37183457612991333, |
|
"learning_rate": 0.00019887693189180633, |
|
"loss": 2.3966, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.057344407782455344, |
|
"grad_norm": 0.3586178123950958, |
|
"learning_rate": 0.00019883340661592015, |
|
"loss": 2.3195, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05825463647741495, |
|
"grad_norm": 0.37112224102020264, |
|
"learning_rate": 0.00019878905881817252, |
|
"loss": 2.3924, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.059164865172374556, |
|
"grad_norm": 0.37623754143714905, |
|
"learning_rate": 0.00019874388886763944, |
|
"loss": 2.562, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.060075093867334166, |
|
"grad_norm": 0.37167176604270935, |
|
"learning_rate": 0.00019869789714023906, |
|
"loss": 2.4175, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.060985322562293776, |
|
"grad_norm": 0.35394376516342163, |
|
"learning_rate": 0.00019865108401872857, |
|
"loss": 2.2845, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.061895551257253385, |
|
"grad_norm": 0.3475804030895233, |
|
"learning_rate": 0.00019860344989270113, |
|
"loss": 2.0894, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.062805779952213, |
|
"grad_norm": 0.387608140707016, |
|
"learning_rate": 0.0001985549951585825, |
|
"loss": 2.2795, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0637160086471726, |
|
"grad_norm": 0.38371261954307556, |
|
"learning_rate": 0.00019850572021962788, |
|
"loss": 2.1714, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06462623734213221, |
|
"grad_norm": 0.397544264793396, |
|
"learning_rate": 0.00019845562548591826, |
|
"loss": 2.167, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06553646603709182, |
|
"grad_norm": 0.394674688577652, |
|
"learning_rate": 0.00019840471137435746, |
|
"loss": 2.266, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06644669473205143, |
|
"grad_norm": 0.3893704116344452, |
|
"learning_rate": 0.00019835297830866826, |
|
"loss": 2.2651, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06735692342701104, |
|
"grad_norm": 0.430054247379303, |
|
"learning_rate": 0.00019830042671938904, |
|
"loss": 2.4341, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06826715212197064, |
|
"grad_norm": 0.4117582440376282, |
|
"learning_rate": 0.00019824705704387028, |
|
"loss": 2.2562, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06917738081693026, |
|
"grad_norm": 0.40804898738861084, |
|
"learning_rate": 0.00019819286972627066, |
|
"loss": 2.1574, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.07008760951188986, |
|
"grad_norm": 0.3954644501209259, |
|
"learning_rate": 0.00019813786521755372, |
|
"loss": 2.0693, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.07099783820684948, |
|
"grad_norm": 0.4126876890659332, |
|
"learning_rate": 0.00019808204397548377, |
|
"loss": 2.2299, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.07190806690180908, |
|
"grad_norm": 0.42160266637802124, |
|
"learning_rate": 0.0001980254064646223, |
|
"loss": 2.1759, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07281829559676868, |
|
"grad_norm": 0.4152994751930237, |
|
"learning_rate": 0.00019796795315632395, |
|
"loss": 2.1931, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0737285242917283, |
|
"grad_norm": 0.4207664728164673, |
|
"learning_rate": 0.0001979096845287328, |
|
"loss": 2.0727, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0746387529866879, |
|
"grad_norm": 0.41932612657546997, |
|
"learning_rate": 0.00019785060106677818, |
|
"loss": 2.3067, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07554898168164752, |
|
"grad_norm": 0.44273534417152405, |
|
"learning_rate": 0.00019779070326217074, |
|
"loss": 2.2838, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07645921037660712, |
|
"grad_norm": 0.4609782099723816, |
|
"learning_rate": 0.00019772999161339833, |
|
"loss": 2.2605, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07736943907156674, |
|
"grad_norm": 0.4551486074924469, |
|
"learning_rate": 0.00019766846662572191, |
|
"loss": 2.3159, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07827966776652634, |
|
"grad_norm": 0.4343845546245575, |
|
"learning_rate": 0.00019760612881117125, |
|
"loss": 2.1787, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07918989646148594, |
|
"grad_norm": 0.48003000020980835, |
|
"learning_rate": 0.00019754297868854073, |
|
"loss": 2.3255, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.08010012515644556, |
|
"grad_norm": 0.4384368062019348, |
|
"learning_rate": 0.00019747901678338496, |
|
"loss": 2.2881, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.08101035385140516, |
|
"grad_norm": 0.5119354724884033, |
|
"learning_rate": 0.00019741424362801452, |
|
"loss": 2.3529, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.08192058254636478, |
|
"grad_norm": 0.4595087766647339, |
|
"learning_rate": 0.00019734865976149145, |
|
"loss": 2.2495, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08283081124132438, |
|
"grad_norm": 0.4848781228065491, |
|
"learning_rate": 0.00019728226572962473, |
|
"loss": 2.446, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.083741039936284, |
|
"grad_norm": 0.4848371148109436, |
|
"learning_rate": 0.00019721506208496585, |
|
"loss": 2.2737, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0846512686312436, |
|
"grad_norm": 0.4835449755191803, |
|
"learning_rate": 0.00019714704938680408, |
|
"loss": 2.224, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0855614973262032, |
|
"grad_norm": 0.48262766003608704, |
|
"learning_rate": 0.00019707822820116193, |
|
"loss": 2.2449, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08647172602116282, |
|
"grad_norm": 0.49700936675071716, |
|
"learning_rate": 0.00019700859910079036, |
|
"loss": 2.1856, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08738195471612242, |
|
"grad_norm": 0.5965728163719177, |
|
"learning_rate": 0.00019693816266516407, |
|
"loss": 2.6448, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08829218341108204, |
|
"grad_norm": 0.6164146661758423, |
|
"learning_rate": 0.00019686691948047664, |
|
"loss": 2.4917, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08920241210604164, |
|
"grad_norm": 0.6717427372932434, |
|
"learning_rate": 0.00019679487013963564, |
|
"loss": 2.7093, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.09011264080100125, |
|
"grad_norm": 0.808948278427124, |
|
"learning_rate": 0.00019672201524225776, |
|
"loss": 2.766, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.09102286949596086, |
|
"grad_norm": 1.4683064222335815, |
|
"learning_rate": 0.0001966483553946637, |
|
"loss": 2.7288, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09193309819092046, |
|
"grad_norm": 1.0303999185562134, |
|
"learning_rate": 0.00019657389120987333, |
|
"loss": 2.8246, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09284332688588008, |
|
"grad_norm": 0.49279651045799255, |
|
"learning_rate": 0.00019649862330760036, |
|
"loss": 2.722, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09375355558083968, |
|
"grad_norm": 0.43273016810417175, |
|
"learning_rate": 0.00019642255231424729, |
|
"loss": 2.5446, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0946637842757993, |
|
"grad_norm": 0.4434455335140228, |
|
"learning_rate": 0.00019634567886290025, |
|
"loss": 2.4578, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0955740129707589, |
|
"grad_norm": 0.4245634973049164, |
|
"learning_rate": 0.00019626800359332362, |
|
"loss": 2.5472, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0964842416657185, |
|
"grad_norm": 0.42879197001457214, |
|
"learning_rate": 0.00019618952715195475, |
|
"loss": 2.5313, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09739447036067812, |
|
"grad_norm": 0.4644932150840759, |
|
"learning_rate": 0.0001961102501918986, |
|
"loss": 2.3859, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09830469905563773, |
|
"grad_norm": 0.40692242980003357, |
|
"learning_rate": 0.00019603017337292236, |
|
"loss": 2.433, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09921492775059734, |
|
"grad_norm": 0.36667487025260925, |
|
"learning_rate": 0.00019594929736144976, |
|
"loss": 2.3352, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.10012515644555695, |
|
"grad_norm": 0.41123342514038086, |
|
"learning_rate": 0.00019586762283055573, |
|
"loss": 2.3494, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10103538514051655, |
|
"grad_norm": 0.40255314111709595, |
|
"learning_rate": 0.00019578515045996073, |
|
"loss": 2.4524, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.10194561383547616, |
|
"grad_norm": 0.38136208057403564, |
|
"learning_rate": 0.0001957018809360251, |
|
"loss": 2.3937, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.10285584253043577, |
|
"grad_norm": 0.4538716673851013, |
|
"learning_rate": 0.00019561781495174328, |
|
"loss": 2.3144, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.10376607122539538, |
|
"grad_norm": 0.3876875042915344, |
|
"learning_rate": 0.00019553295320673807, |
|
"loss": 2.2868, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.10467629992035499, |
|
"grad_norm": 0.40529122948646545, |
|
"learning_rate": 0.00019544729640725498, |
|
"loss": 2.4219, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1055865286153146, |
|
"grad_norm": 0.36852964758872986, |
|
"learning_rate": 0.0001953608452661561, |
|
"loss": 2.3485, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1064967573102742, |
|
"grad_norm": 0.3608396053314209, |
|
"learning_rate": 0.0001952736005029142, |
|
"loss": 2.2813, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.10740698600523381, |
|
"grad_norm": 0.3647065758705139, |
|
"learning_rate": 0.00019518556284360696, |
|
"loss": 2.2284, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.10831721470019343, |
|
"grad_norm": 0.3692014217376709, |
|
"learning_rate": 0.00019509673302091075, |
|
"loss": 2.3005, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.10922744339515303, |
|
"grad_norm": 0.3861401379108429, |
|
"learning_rate": 0.00019500711177409454, |
|
"loss": 2.3023, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11013767209011265, |
|
"grad_norm": 0.38329821825027466, |
|
"learning_rate": 0.00019491669984901379, |
|
"loss": 2.0692, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.11104790078507225, |
|
"grad_norm": 0.3725563585758209, |
|
"learning_rate": 0.00019482549799810413, |
|
"loss": 2.2495, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.11195812948003186, |
|
"grad_norm": 0.37789279222488403, |
|
"learning_rate": 0.00019473350698037535, |
|
"loss": 2.1242, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.11286835817499147, |
|
"grad_norm": 0.4199522137641907, |
|
"learning_rate": 0.00019464072756140486, |
|
"loss": 2.2501, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11377858686995107, |
|
"grad_norm": 0.4012223184108734, |
|
"learning_rate": 0.00019454716051333135, |
|
"loss": 2.2263, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11468881556491069, |
|
"grad_norm": 0.4205365478992462, |
|
"learning_rate": 0.00019445280661484847, |
|
"loss": 2.1445, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11559904425987029, |
|
"grad_norm": 0.41000694036483765, |
|
"learning_rate": 0.0001943576666511982, |
|
"loss": 2.198, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1165092729548299, |
|
"grad_norm": 0.43458715081214905, |
|
"learning_rate": 0.00019426174141416448, |
|
"loss": 2.1884, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11741950164978951, |
|
"grad_norm": 0.4233211874961853, |
|
"learning_rate": 0.00019416503170206645, |
|
"loss": 2.311, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11832973034474911, |
|
"grad_norm": 0.411350816488266, |
|
"learning_rate": 0.00019406753831975203, |
|
"loss": 2.1085, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11923995903970873, |
|
"grad_norm": 0.42890501022338867, |
|
"learning_rate": 0.00019396926207859084, |
|
"loss": 2.0548, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.12015018773466833, |
|
"grad_norm": 0.45003724098205566, |
|
"learning_rate": 0.00019387020379646797, |
|
"loss": 2.2063, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.12106041642962795, |
|
"grad_norm": 0.4295034408569336, |
|
"learning_rate": 0.00019377036429777672, |
|
"loss": 2.1306, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.12197064512458755, |
|
"grad_norm": 0.4310653805732727, |
|
"learning_rate": 0.0001936697444134119, |
|
"loss": 2.1307, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.12288087381954717, |
|
"grad_norm": 0.4377802014350891, |
|
"learning_rate": 0.0001935683449807631, |
|
"loss": 2.2982, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12379110251450677, |
|
"grad_norm": 0.45586827397346497, |
|
"learning_rate": 0.0001934661668437073, |
|
"loss": 2.2334, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12470133120946637, |
|
"grad_norm": 0.4612938463687897, |
|
"learning_rate": 0.00019336321085260236, |
|
"loss": 2.2488, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.125611559904426, |
|
"grad_norm": 0.48157835006713867, |
|
"learning_rate": 0.00019325947786427952, |
|
"loss": 2.2068, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1265217885993856, |
|
"grad_norm": 0.46901848912239075, |
|
"learning_rate": 0.0001931549687420364, |
|
"loss": 2.2526, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.1274320172943452, |
|
"grad_norm": 0.4637078046798706, |
|
"learning_rate": 0.00019304968435562993, |
|
"loss": 2.1521, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12834224598930483, |
|
"grad_norm": 0.5239959955215454, |
|
"learning_rate": 0.00019294362558126905, |
|
"loss": 2.3988, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.12925247468426443, |
|
"grad_norm": 0.462212473154068, |
|
"learning_rate": 0.00019283679330160726, |
|
"loss": 2.3085, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.13016270337922403, |
|
"grad_norm": 0.49486446380615234, |
|
"learning_rate": 0.00019272918840573558, |
|
"loss": 2.2851, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.13107293207418363, |
|
"grad_norm": 0.5234097838401794, |
|
"learning_rate": 0.00019262081178917482, |
|
"loss": 2.3942, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.13198316076914324, |
|
"grad_norm": 0.525196373462677, |
|
"learning_rate": 0.0001925116643538684, |
|
"loss": 2.3057, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13289338946410287, |
|
"grad_norm": 0.5571113228797913, |
|
"learning_rate": 0.00019240174700817464, |
|
"loss": 2.2406, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.13380361815906247, |
|
"grad_norm": 0.6122002601623535, |
|
"learning_rate": 0.00019229106066685937, |
|
"loss": 2.42, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.13471384685402207, |
|
"grad_norm": 0.6147793531417847, |
|
"learning_rate": 0.0001921796062510882, |
|
"loss": 2.5769, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13562407554898168, |
|
"grad_norm": 0.7194758653640747, |
|
"learning_rate": 0.0001920673846884189, |
|
"loss": 2.5508, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13653430424394128, |
|
"grad_norm": 1.1327760219573975, |
|
"learning_rate": 0.00019195439691279363, |
|
"loss": 2.776, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1374445329389009, |
|
"grad_norm": 0.5275815725326538, |
|
"learning_rate": 0.00019184064386453128, |
|
"loss": 2.6188, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1383547616338605, |
|
"grad_norm": 0.5287826657295227, |
|
"learning_rate": 0.00019172612649031952, |
|
"loss": 2.6061, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13926499032882012, |
|
"grad_norm": 0.4138477146625519, |
|
"learning_rate": 0.00019161084574320696, |
|
"loss": 2.355, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.14017521902377972, |
|
"grad_norm": 0.4219213128089905, |
|
"learning_rate": 0.00019149480258259533, |
|
"loss": 2.5719, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.14108544771873932, |
|
"grad_norm": 0.39532896876335144, |
|
"learning_rate": 0.00019137799797423126, |
|
"loss": 2.4734, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14199567641369895, |
|
"grad_norm": 0.4149189889431, |
|
"learning_rate": 0.00019126043289019852, |
|
"loss": 2.5212, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.14290590510865855, |
|
"grad_norm": 0.45758771896362305, |
|
"learning_rate": 0.00019114210830890969, |
|
"loss": 2.5216, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.14381613380361816, |
|
"grad_norm": 0.4397677779197693, |
|
"learning_rate": 0.00019102302521509815, |
|
"loss": 2.6222, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.14472636249857776, |
|
"grad_norm": 0.4018581211566925, |
|
"learning_rate": 0.00019090318459980986, |
|
"loss": 2.546, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.14563659119353736, |
|
"grad_norm": 0.3948480486869812, |
|
"learning_rate": 0.00019078258746039507, |
|
"loss": 2.5202, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.146546819888497, |
|
"grad_norm": 0.37651899456977844, |
|
"learning_rate": 0.00019066123480050015, |
|
"loss": 2.3166, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1474570485834566, |
|
"grad_norm": 0.3794865310192108, |
|
"learning_rate": 0.00019053912763005907, |
|
"loss": 2.1592, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1483672772784162, |
|
"grad_norm": 0.39368098974227905, |
|
"learning_rate": 0.00019041626696528503, |
|
"loss": 2.3273, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.1492775059733758, |
|
"grad_norm": 0.39652615785598755, |
|
"learning_rate": 0.00019029265382866214, |
|
"loss": 2.4105, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.15018773466833543, |
|
"grad_norm": 0.4029604196548462, |
|
"learning_rate": 0.0001901682892489367, |
|
"loss": 2.3814, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15109796336329503, |
|
"grad_norm": 0.40106695890426636, |
|
"learning_rate": 0.0001900431742611089, |
|
"loss": 2.3963, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.15200819205825464, |
|
"grad_norm": 0.3830929696559906, |
|
"learning_rate": 0.00018991730990642388, |
|
"loss": 2.2472, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.15291842075321424, |
|
"grad_norm": 0.36839723587036133, |
|
"learning_rate": 0.00018979069723236333, |
|
"loss": 2.2463, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.15382864944817384, |
|
"grad_norm": 0.36918777227401733, |
|
"learning_rate": 0.00018966333729263674, |
|
"loss": 2.2286, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.15473887814313347, |
|
"grad_norm": 0.37009552121162415, |
|
"learning_rate": 0.00018953523114717245, |
|
"loss": 2.2823, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15564910683809308, |
|
"grad_norm": 0.38302165269851685, |
|
"learning_rate": 0.00018940637986210906, |
|
"loss": 2.1199, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.15655933553305268, |
|
"grad_norm": 0.40206897258758545, |
|
"learning_rate": 0.0001892767845097864, |
|
"loss": 2.3254, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.15746956422801228, |
|
"grad_norm": 0.3847138285636902, |
|
"learning_rate": 0.00018914644616873657, |
|
"loss": 2.0739, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.15837979292297188, |
|
"grad_norm": 0.37638360261917114, |
|
"learning_rate": 0.0001890153659236753, |
|
"loss": 2.1024, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.15929002161793152, |
|
"grad_norm": 0.38801488280296326, |
|
"learning_rate": 0.00018888354486549237, |
|
"loss": 2.1634, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16020025031289112, |
|
"grad_norm": 0.4164554178714752, |
|
"learning_rate": 0.00018875098409124302, |
|
"loss": 2.2987, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.16111047900785072, |
|
"grad_norm": 0.38982319831848145, |
|
"learning_rate": 0.0001886176847041386, |
|
"loss": 2.1834, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.16202070770281032, |
|
"grad_norm": 0.42200496792793274, |
|
"learning_rate": 0.00018848364781353744, |
|
"loss": 2.2793, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.16293093639776993, |
|
"grad_norm": 0.4152745306491852, |
|
"learning_rate": 0.0001883488745349355, |
|
"loss": 2.1988, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.16384116509272956, |
|
"grad_norm": 0.41070255637168884, |
|
"learning_rate": 0.0001882133659899573, |
|
"loss": 1.8985, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16475139378768916, |
|
"grad_norm": 0.4259546399116516, |
|
"learning_rate": 0.00018807712330634642, |
|
"loss": 2.4285, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.16566162248264876, |
|
"grad_norm": 0.44458258152008057, |
|
"learning_rate": 0.0001879401476179562, |
|
"loss": 2.2561, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.16657185117760837, |
|
"grad_norm": 0.4640187621116638, |
|
"learning_rate": 0.0001878024400647402, |
|
"loss": 2.4091, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.167482079872568, |
|
"grad_norm": 0.40832704305648804, |
|
"learning_rate": 0.00018766400179274286, |
|
"loss": 1.9156, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1683923085675276, |
|
"grad_norm": 0.4358328878879547, |
|
"learning_rate": 0.00018752483395408987, |
|
"loss": 2.0592, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1693025372624872, |
|
"grad_norm": 0.45089927315711975, |
|
"learning_rate": 0.00018738493770697852, |
|
"loss": 2.2189, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 0.45900994539260864, |
|
"learning_rate": 0.00018724431421566823, |
|
"loss": 2.3023, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1711229946524064, |
|
"grad_norm": 0.4608750641345978, |
|
"learning_rate": 0.00018710296465047075, |
|
"loss": 2.2467, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.17203322334736604, |
|
"grad_norm": 0.44717609882354736, |
|
"learning_rate": 0.0001869608901877404, |
|
"loss": 2.0313, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.17294345204232564, |
|
"grad_norm": 0.40987592935562134, |
|
"learning_rate": 0.0001868180920098644, |
|
"loss": 2.0965, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17385368073728524, |
|
"grad_norm": 0.502581775188446, |
|
"learning_rate": 0.00018667457130525284, |
|
"loss": 2.1991, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.17476390943224485, |
|
"grad_norm": 0.4770600199699402, |
|
"learning_rate": 0.00018653032926832896, |
|
"loss": 2.2258, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.17567413812720445, |
|
"grad_norm": 0.48825547099113464, |
|
"learning_rate": 0.00018638536709951917, |
|
"loss": 2.2884, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.17658436682216408, |
|
"grad_norm": 0.5004613995552063, |
|
"learning_rate": 0.000186239686005243, |
|
"loss": 2.2927, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.17749459551712368, |
|
"grad_norm": 0.5268414616584778, |
|
"learning_rate": 0.0001860932871979031, |
|
"loss": 2.356, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.17840482421208329, |
|
"grad_norm": 0.5261876583099365, |
|
"learning_rate": 0.00018594617189587512, |
|
"loss": 2.2864, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1793150529070429, |
|
"grad_norm": 0.6071822643280029, |
|
"learning_rate": 0.00018579834132349772, |
|
"loss": 2.684, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.1802252816020025, |
|
"grad_norm": 0.659428060054779, |
|
"learning_rate": 0.0001856497967110621, |
|
"loss": 2.5811, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.18113551029696212, |
|
"grad_norm": 0.828525960445404, |
|
"learning_rate": 0.00018550053929480202, |
|
"loss": 2.7052, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.18204573899192172, |
|
"grad_norm": 1.9569804668426514, |
|
"learning_rate": 0.00018535057031688335, |
|
"loss": 2.9428, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18295596768688133, |
|
"grad_norm": 0.6459002494812012, |
|
"learning_rate": 0.0001851998910253939, |
|
"loss": 2.6936, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.18386619638184093, |
|
"grad_norm": 0.6431388854980469, |
|
"learning_rate": 0.0001850485026743328, |
|
"loss": 2.5446, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.18477642507680053, |
|
"grad_norm": 0.5401524305343628, |
|
"learning_rate": 0.00018489640652360022, |
|
"loss": 2.4508, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.18568665377176016, |
|
"grad_norm": 0.4280170500278473, |
|
"learning_rate": 0.00018474360383898694, |
|
"loss": 2.5167, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.18659688246671977, |
|
"grad_norm": 0.4014836847782135, |
|
"learning_rate": 0.00018459009589216364, |
|
"loss": 2.4456, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.18750711116167937, |
|
"grad_norm": 0.41716477274894714, |
|
"learning_rate": 0.0001844358839606705, |
|
"loss": 2.4357, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.18841733985663897, |
|
"grad_norm": 0.40400266647338867, |
|
"learning_rate": 0.00018428096932790632, |
|
"loss": 2.5122, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1893275685515986, |
|
"grad_norm": 0.4276406764984131, |
|
"learning_rate": 0.00018412535328311814, |
|
"loss": 2.3581, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.1902377972465582, |
|
"grad_norm": 0.42548567056655884, |
|
"learning_rate": 0.0001839690371213903, |
|
"loss": 2.4639, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.1911480259415178, |
|
"grad_norm": 0.3912813365459442, |
|
"learning_rate": 0.0001838120221436338, |
|
"loss": 2.4395, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1920582546364774, |
|
"grad_norm": 0.40997177362442017, |
|
"learning_rate": 0.00018365430965657526, |
|
"loss": 2.4833, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.192968483331437, |
|
"grad_norm": 0.3765038847923279, |
|
"learning_rate": 0.00018349590097274632, |
|
"loss": 2.2293, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.19387871202639664, |
|
"grad_norm": 0.3960655629634857, |
|
"learning_rate": 0.00018333679741047254, |
|
"loss": 2.2261, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.19478894072135625, |
|
"grad_norm": 0.3758634030818939, |
|
"learning_rate": 0.00018317700029386245, |
|
"loss": 2.0951, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.19569916941631585, |
|
"grad_norm": 0.4159095883369446, |
|
"learning_rate": 0.00018301651095279655, |
|
"loss": 2.3568, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.19660939811127545, |
|
"grad_norm": 0.4104447662830353, |
|
"learning_rate": 0.0001828553307229163, |
|
"loss": 2.2262, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.19751962680623505, |
|
"grad_norm": 0.3676728308200836, |
|
"learning_rate": 0.0001826934609456129, |
|
"loss": 2.2557, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.19842985550119469, |
|
"grad_norm": 0.39037179946899414, |
|
"learning_rate": 0.00018253090296801614, |
|
"loss": 2.2632, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.1993400841961543, |
|
"grad_norm": 0.38477587699890137, |
|
"learning_rate": 0.0001823676581429833, |
|
"loss": 2.1344, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.2002503128911139, |
|
"grad_norm": 0.38563740253448486, |
|
"learning_rate": 0.00018220372782908777, |
|
"loss": 2.2298, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2011605415860735, |
|
"grad_norm": 0.38280826807022095, |
|
"learning_rate": 0.00018203911339060783, |
|
"loss": 2.0098, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.2020707702810331, |
|
"grad_norm": 0.4058510661125183, |
|
"learning_rate": 0.00018187381619751516, |
|
"loss": 2.2295, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.20298099897599273, |
|
"grad_norm": 0.42094549536705017, |
|
"learning_rate": 0.00018170783762546365, |
|
"loss": 2.1706, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.20389122767095233, |
|
"grad_norm": 0.42036664485931396, |
|
"learning_rate": 0.00018154117905577776, |
|
"loss": 2.1802, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.20480145636591193, |
|
"grad_norm": 0.39867401123046875, |
|
"learning_rate": 0.00018137384187544116, |
|
"loss": 2.165, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.20571168506087154, |
|
"grad_norm": 0.3934602737426758, |
|
"learning_rate": 0.00018120582747708502, |
|
"loss": 1.9683, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.20662191375583117, |
|
"grad_norm": 0.3914499282836914, |
|
"learning_rate": 0.0001810371372589766, |
|
"loss": 1.9805, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.20753214245079077, |
|
"grad_norm": 0.4222523868083954, |
|
"learning_rate": 0.0001808677726250076, |
|
"loss": 2.185, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.20844237114575037, |
|
"grad_norm": 0.3909006118774414, |
|
"learning_rate": 0.00018069773498468223, |
|
"loss": 2.0038, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.20935259984070997, |
|
"grad_norm": 0.42738041281700134, |
|
"learning_rate": 0.00018052702575310588, |
|
"loss": 2.2352, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21026282853566958, |
|
"grad_norm": 0.4195539653301239, |
|
"learning_rate": 0.00018035564635097298, |
|
"loss": 2.0254, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2111730572306292, |
|
"grad_norm": 0.4464121460914612, |
|
"learning_rate": 0.00018018359820455536, |
|
"loss": 2.2766, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2120832859255888, |
|
"grad_norm": 0.42468565702438354, |
|
"learning_rate": 0.00018001088274569038, |
|
"loss": 2.1718, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2129935146205484, |
|
"grad_norm": 0.41730690002441406, |
|
"learning_rate": 0.00017983750141176895, |
|
"loss": 2.0959, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.21390374331550802, |
|
"grad_norm": 0.4425525665283203, |
|
"learning_rate": 0.0001796634556457236, |
|
"loss": 2.1538, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.21481397201046762, |
|
"grad_norm": 0.4559743106365204, |
|
"learning_rate": 0.0001794887468960165, |
|
"loss": 2.1299, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.21572420070542725, |
|
"grad_norm": 0.43843337893486023, |
|
"learning_rate": 0.00017931337661662727, |
|
"loss": 2.1172, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.21663442940038685, |
|
"grad_norm": 0.46661749482154846, |
|
"learning_rate": 0.0001791373462670411, |
|
"loss": 2.1352, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.21754465809534645, |
|
"grad_norm": 0.4997507333755493, |
|
"learning_rate": 0.00017896065731223644, |
|
"loss": 2.1185, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.21845488679030606, |
|
"grad_norm": 0.480398029088974, |
|
"learning_rate": 0.00017878331122267284, |
|
"loss": 2.3208, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21936511548526566, |
|
"grad_norm": 0.4819256365299225, |
|
"learning_rate": 0.00017860530947427875, |
|
"loss": 2.3114, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2202753441802253, |
|
"grad_norm": 0.5086483359336853, |
|
"learning_rate": 0.00017842665354843922, |
|
"loss": 2.1578, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.2211855728751849, |
|
"grad_norm": 0.5267627239227295, |
|
"learning_rate": 0.0001782473449319835, |
|
"loss": 2.3594, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.2220958015701445, |
|
"grad_norm": 0.5265729427337646, |
|
"learning_rate": 0.0001780673851171728, |
|
"loss": 2.2083, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.2230060302651041, |
|
"grad_norm": 0.5762265920639038, |
|
"learning_rate": 0.00017788677560168784, |
|
"loss": 2.4887, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.22391625896006373, |
|
"grad_norm": 0.5499774217605591, |
|
"learning_rate": 0.0001777055178886162, |
|
"loss": 2.4169, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.22482648765502333, |
|
"grad_norm": 0.613876223564148, |
|
"learning_rate": 0.0001775236134864401, |
|
"loss": 2.5177, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.22573671634998294, |
|
"grad_norm": 0.6267974376678467, |
|
"learning_rate": 0.00017734106390902366, |
|
"loss": 2.5212, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.22664694504494254, |
|
"grad_norm": 0.7940219640731812, |
|
"learning_rate": 0.0001771578706756003, |
|
"loss": 2.5769, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.22755717373990214, |
|
"grad_norm": 1.3050237894058228, |
|
"learning_rate": 0.0001769740353107602, |
|
"loss": 2.3235, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22846740243486177, |
|
"grad_norm": 0.5289978981018066, |
|
"learning_rate": 0.00017678955934443758, |
|
"loss": 2.5637, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.22937763112982137, |
|
"grad_norm": 0.5574918389320374, |
|
"learning_rate": 0.0001766044443118978, |
|
"loss": 2.5542, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.23028785982478098, |
|
"grad_norm": 0.47572481632232666, |
|
"learning_rate": 0.00017641869175372493, |
|
"loss": 2.4325, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.23119808851974058, |
|
"grad_norm": 0.41480204463005066, |
|
"learning_rate": 0.00017623230321580854, |
|
"loss": 2.5176, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.23210831721470018, |
|
"grad_norm": 0.4072042405605316, |
|
"learning_rate": 0.00017604528024933115, |
|
"loss": 2.4798, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2330185459096598, |
|
"grad_norm": 0.3996100127696991, |
|
"learning_rate": 0.00017585762441075503, |
|
"loss": 2.35, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.23392877460461942, |
|
"grad_norm": 0.4213111698627472, |
|
"learning_rate": 0.00017566933726180964, |
|
"loss": 2.526, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.23483900329957902, |
|
"grad_norm": 0.42595675587654114, |
|
"learning_rate": 0.0001754804203694782, |
|
"loss": 2.4724, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.23574923199453862, |
|
"grad_norm": 0.407141774892807, |
|
"learning_rate": 0.0001752908753059849, |
|
"loss": 2.3349, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.23665946068949822, |
|
"grad_norm": 0.4092378616333008, |
|
"learning_rate": 0.00017510070364878177, |
|
"loss": 2.4487, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.23756968938445785, |
|
"grad_norm": 0.38301384449005127, |
|
"learning_rate": 0.00017490990698053563, |
|
"loss": 2.3245, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.23847991807941746, |
|
"grad_norm": 0.3946523666381836, |
|
"learning_rate": 0.00017471848688911464, |
|
"loss": 2.3614, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.23939014677437706, |
|
"grad_norm": 0.41228440403938293, |
|
"learning_rate": 0.0001745264449675755, |
|
"loss": 2.3525, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.24030037546933666, |
|
"grad_norm": 0.39352720975875854, |
|
"learning_rate": 0.00017433378281414975, |
|
"loss": 2.3851, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.24121060416429627, |
|
"grad_norm": 0.39706528186798096, |
|
"learning_rate": 0.0001741405020322309, |
|
"loss": 2.417, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2421208328592559, |
|
"grad_norm": 0.3801213204860687, |
|
"learning_rate": 0.00017394660423036075, |
|
"loss": 2.1036, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.2430310615542155, |
|
"grad_norm": 0.37401556968688965, |
|
"learning_rate": 0.00017375209102221613, |
|
"loss": 2.1668, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.2439412902491751, |
|
"grad_norm": 0.3884643614292145, |
|
"learning_rate": 0.00017355696402659548, |
|
"loss": 2.3057, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.2448515189441347, |
|
"grad_norm": 0.38754650950431824, |
|
"learning_rate": 0.00017336122486740548, |
|
"loss": 2.1363, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.24576174763909434, |
|
"grad_norm": 0.3861556053161621, |
|
"learning_rate": 0.00017316487517364721, |
|
"loss": 2.2378, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.24667197633405394, |
|
"grad_norm": 0.3905907869338989, |
|
"learning_rate": 0.000172967916579403, |
|
"loss": 2.2011, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.24758220502901354, |
|
"grad_norm": 0.40004828572273254, |
|
"learning_rate": 0.00017277035072382253, |
|
"loss": 2.1316, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.24849243372397314, |
|
"grad_norm": 0.39059486985206604, |
|
"learning_rate": 0.00017257217925110933, |
|
"loss": 2.1582, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.24940266241893275, |
|
"grad_norm": 0.3994196355342865, |
|
"learning_rate": 0.00017237340381050703, |
|
"loss": 2.0078, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.2503128911138924, |
|
"grad_norm": 0.41038015484809875, |
|
"learning_rate": 0.00017217402605628572, |
|
"loss": 2.0799, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2503128911138924, |
|
"eval_loss": 2.2957444190979004, |
|
"eval_runtime": 203.8901, |
|
"eval_samples_per_second": 9.078, |
|
"eval_steps_per_second": 4.542, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.251223119808852, |
|
"grad_norm": 0.39477062225341797, |
|
"learning_rate": 0.00017197404764772805, |
|
"loss": 2.1965, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.2521333485038116, |
|
"grad_norm": 0.4311797320842743, |
|
"learning_rate": 0.00017177347024911562, |
|
"loss": 2.2732, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.2530435771987712, |
|
"grad_norm": 0.3932170867919922, |
|
"learning_rate": 0.00017157229552971487, |
|
"loss": 2.1883, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.2539538058937308, |
|
"grad_norm": 0.40245768427848816, |
|
"learning_rate": 0.00017137052516376345, |
|
"loss": 2.1204, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2548640345886904, |
|
"grad_norm": 0.44616052508354187, |
|
"learning_rate": 0.00017116816083045602, |
|
"loss": 2.359, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.25577426328365, |
|
"grad_norm": 0.4077576994895935, |
|
"learning_rate": 0.0001709652042139306, |
|
"loss": 2.0829, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.25668449197860965, |
|
"grad_norm": 0.40657973289489746, |
|
"learning_rate": 0.0001707616570032542, |
|
"loss": 2.1668, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.25759472067356926, |
|
"grad_norm": 0.4176836907863617, |
|
"learning_rate": 0.00017055752089240907, |
|
"loss": 2.1375, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.25850494936852886, |
|
"grad_norm": 0.4195888936519623, |
|
"learning_rate": 0.00017035279758027832, |
|
"loss": 2.0588, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.25941517806348846, |
|
"grad_norm": 0.42504483461380005, |
|
"learning_rate": 0.00017014748877063214, |
|
"loss": 2.0153, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.26032540675844806, |
|
"grad_norm": 0.45724233984947205, |
|
"learning_rate": 0.00016994159617211317, |
|
"loss": 2.1907, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.26123563545340767, |
|
"grad_norm": 0.4327910542488098, |
|
"learning_rate": 0.00016973512149822274, |
|
"loss": 2.0613, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.26214586414836727, |
|
"grad_norm": 0.446054607629776, |
|
"learning_rate": 0.0001695280664673062, |
|
"loss": 2.1539, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.26305609284332687, |
|
"grad_norm": 0.4568957984447479, |
|
"learning_rate": 0.0001693204328025389, |
|
"loss": 2.2892, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.2639663215382865, |
|
"grad_norm": 0.4926307201385498, |
|
"learning_rate": 0.00016911222223191182, |
|
"loss": 2.253, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2648765502332461, |
|
"grad_norm": 0.49324727058410645, |
|
"learning_rate": 0.00016890343648821697, |
|
"loss": 2.2756, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.26578677892820574, |
|
"grad_norm": 0.478746235370636, |
|
"learning_rate": 0.0001686940773090333, |
|
"loss": 2.2775, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.26669700762316534, |
|
"grad_norm": 0.508730411529541, |
|
"learning_rate": 0.00016848414643671195, |
|
"loss": 2.3255, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.26760723631812494, |
|
"grad_norm": 0.5452204942703247, |
|
"learning_rate": 0.00016827364561836187, |
|
"loss": 2.4101, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.26851746501308454, |
|
"grad_norm": 0.5368967652320862, |
|
"learning_rate": 0.00016806257660583534, |
|
"loss": 2.3823, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.26942769370804415, |
|
"grad_norm": 0.565154492855072, |
|
"learning_rate": 0.00016785094115571322, |
|
"loss": 2.376, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.27033792240300375, |
|
"grad_norm": 0.623928964138031, |
|
"learning_rate": 0.0001676387410292906, |
|
"loss": 2.4355, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.27124815109796335, |
|
"grad_norm": 0.7393582463264465, |
|
"learning_rate": 0.00016742597799256182, |
|
"loss": 2.699, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.27215837979292296, |
|
"grad_norm": 0.845248818397522, |
|
"learning_rate": 0.000167212653816206, |
|
"loss": 2.6997, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.27306860848788256, |
|
"grad_norm": 1.6121950149536133, |
|
"learning_rate": 0.00016699877027557226, |
|
"loss": 2.7515, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2739788371828422, |
|
"grad_norm": 0.4747787117958069, |
|
"learning_rate": 0.00016678432915066488, |
|
"loss": 2.5881, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.2748890658778018, |
|
"grad_norm": 0.46245065331459045, |
|
"learning_rate": 0.00016656933222612854, |
|
"loss": 2.4625, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.2757992945727614, |
|
"grad_norm": 0.5154780745506287, |
|
"learning_rate": 0.00016635378129123342, |
|
"loss": 2.4195, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.276709523267721, |
|
"grad_norm": 0.46146687865257263, |
|
"learning_rate": 0.00016613767813986044, |
|
"loss": 2.4896, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.2776197519626806, |
|
"grad_norm": 0.42735326290130615, |
|
"learning_rate": 0.0001659210245704861, |
|
"loss": 2.4185, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.27852998065764023, |
|
"grad_norm": 0.40846991539001465, |
|
"learning_rate": 0.00016570382238616777, |
|
"loss": 2.4187, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.27944020935259983, |
|
"grad_norm": 0.3983679711818695, |
|
"learning_rate": 0.00016548607339452853, |
|
"loss": 2.3726, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.28035043804755944, |
|
"grad_norm": 0.4192310869693756, |
|
"learning_rate": 0.00016526777940774204, |
|
"loss": 2.3308, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.28126066674251904, |
|
"grad_norm": 0.4130899906158447, |
|
"learning_rate": 0.00016504894224251778, |
|
"loss": 2.3803, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.28217089543747864, |
|
"grad_norm": 0.3945166766643524, |
|
"learning_rate": 0.0001648295637200856, |
|
"loss": 2.2948, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2830811241324383, |
|
"grad_norm": 0.4377036392688751, |
|
"learning_rate": 0.0001646096456661807, |
|
"loss": 2.376, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.2839913528273979, |
|
"grad_norm": 0.4076646864414215, |
|
"learning_rate": 0.00016438918991102842, |
|
"loss": 2.202, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.2849015815223575, |
|
"grad_norm": 0.410691499710083, |
|
"learning_rate": 0.000164168198289329, |
|
"loss": 2.3081, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.2858118102173171, |
|
"grad_norm": 0.40779241919517517, |
|
"learning_rate": 0.00016394667264024246, |
|
"loss": 2.3292, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.2867220389122767, |
|
"grad_norm": 0.3915857970714569, |
|
"learning_rate": 0.00016372461480737297, |
|
"loss": 2.313, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2876322676072363, |
|
"grad_norm": 0.40646892786026, |
|
"learning_rate": 0.00016350202663875386, |
|
"loss": 1.9998, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.2885424963021959, |
|
"grad_norm": 0.3695480525493622, |
|
"learning_rate": 0.00016327890998683192, |
|
"loss": 2.0481, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.2894527249971555, |
|
"grad_norm": 0.37870198488235474, |
|
"learning_rate": 0.00016305526670845226, |
|
"loss": 2.1776, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.2903629536921151, |
|
"grad_norm": 0.3980102837085724, |
|
"learning_rate": 0.0001628310986648427, |
|
"loss": 2.21, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.2912731823870747, |
|
"grad_norm": 0.42144468426704407, |
|
"learning_rate": 0.0001626064077215983, |
|
"loss": 2.3021, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2921834110820344, |
|
"grad_norm": 0.39952829480171204, |
|
"learning_rate": 0.00016238119574866588, |
|
"loss": 2.1492, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.293093639776994, |
|
"grad_norm": 0.42757153511047363, |
|
"learning_rate": 0.0001621554646203284, |
|
"loss": 1.8572, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.2940038684719536, |
|
"grad_norm": 0.418364018201828, |
|
"learning_rate": 0.00016192921621518944, |
|
"loss": 2.1835, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.2949140971669132, |
|
"grad_norm": 0.41788679361343384, |
|
"learning_rate": 0.0001617024524161574, |
|
"loss": 2.2657, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.2958243258618728, |
|
"grad_norm": 0.44890159368515015, |
|
"learning_rate": 0.0001614751751104301, |
|
"loss": 2.2471, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2967345545568324, |
|
"grad_norm": 0.4491661787033081, |
|
"learning_rate": 0.0001612473861894788, |
|
"loss": 1.971, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.297644783251792, |
|
"grad_norm": 0.3881804943084717, |
|
"learning_rate": 0.00016101908754903268, |
|
"loss": 2.0647, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.2985550119467516, |
|
"grad_norm": 0.42832571268081665, |
|
"learning_rate": 0.00016079028108906282, |
|
"loss": 2.1406, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.2994652406417112, |
|
"grad_norm": 0.4422348439693451, |
|
"learning_rate": 0.00016056096871376667, |
|
"loss": 2.0549, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.30037546933667086, |
|
"grad_norm": 0.4560178220272064, |
|
"learning_rate": 0.00016033115233155202, |
|
"loss": 2.2055, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.30128569803163047, |
|
"grad_norm": 0.4027805030345917, |
|
"learning_rate": 0.0001601008338550211, |
|
"loss": 2.0151, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.30219592672659007, |
|
"grad_norm": 0.4404117465019226, |
|
"learning_rate": 0.00015987001520095478, |
|
"loss": 2.1838, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.3031061554215497, |
|
"grad_norm": 0.47021403908729553, |
|
"learning_rate": 0.00015963869829029658, |
|
"loss": 2.1402, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.3040163841165093, |
|
"grad_norm": 0.44921791553497314, |
|
"learning_rate": 0.00015940688504813662, |
|
"loss": 2.2947, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.3049266128114689, |
|
"grad_norm": 0.4406517744064331, |
|
"learning_rate": 0.00015917457740369565, |
|
"loss": 2.142, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3058368415064285, |
|
"grad_norm": 0.43675345182418823, |
|
"learning_rate": 0.000158941777290309, |
|
"loss": 2.0953, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.3067470702013881, |
|
"grad_norm": 0.4730078876018524, |
|
"learning_rate": 0.00015870848664541044, |
|
"loss": 2.2471, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.3076572988963477, |
|
"grad_norm": 0.43952468037605286, |
|
"learning_rate": 0.00015847470741051618, |
|
"loss": 2.1458, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.3085675275913073, |
|
"grad_norm": 0.4473972022533417, |
|
"learning_rate": 0.00015824044153120852, |
|
"loss": 2.1065, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.30947775628626695, |
|
"grad_norm": 0.49501466751098633, |
|
"learning_rate": 0.00015800569095711982, |
|
"loss": 2.1574, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.31038798498122655, |
|
"grad_norm": 0.4756048023700714, |
|
"learning_rate": 0.00015777045764191625, |
|
"loss": 2.019, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.31129821367618615, |
|
"grad_norm": 0.491579532623291, |
|
"learning_rate": 0.00015753474354328142, |
|
"loss": 2.2317, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.31220844237114576, |
|
"grad_norm": 0.5067722797393799, |
|
"learning_rate": 0.00015729855062290022, |
|
"loss": 2.4042, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.31311867106610536, |
|
"grad_norm": 0.5405014157295227, |
|
"learning_rate": 0.00015706188084644242, |
|
"loss": 2.243, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.31402889976106496, |
|
"grad_norm": 0.536719799041748, |
|
"learning_rate": 0.00015682473618354635, |
|
"loss": 2.2665, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.31493912845602456, |
|
"grad_norm": 0.5940792560577393, |
|
"learning_rate": 0.0001565871186078025, |
|
"loss": 2.4295, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.31584935715098417, |
|
"grad_norm": 0.6351253986358643, |
|
"learning_rate": 0.00015634903009673705, |
|
"loss": 2.5128, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.31675958584594377, |
|
"grad_norm": 0.7466199398040771, |
|
"learning_rate": 0.00015611047263179548, |
|
"loss": 2.56, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.31766981454090343, |
|
"grad_norm": 0.84588623046875, |
|
"learning_rate": 0.000155871448198326, |
|
"loss": 2.6547, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.31858004323586303, |
|
"grad_norm": 1.7304902076721191, |
|
"learning_rate": 0.0001556319587855631, |
|
"loss": 2.7543, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.31949027193082263, |
|
"grad_norm": 0.5234479308128357, |
|
"learning_rate": 0.00015539200638661104, |
|
"loss": 2.6642, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.32040050062578224, |
|
"grad_norm": 0.5147595405578613, |
|
"learning_rate": 0.00015515159299842707, |
|
"loss": 2.4943, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.32131072932074184, |
|
"grad_norm": 0.4990614950656891, |
|
"learning_rate": 0.00015491072062180503, |
|
"loss": 2.4958, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.32222095801570144, |
|
"grad_norm": 0.45430535078048706, |
|
"learning_rate": 0.00015466939126135856, |
|
"loss": 2.4471, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.32313118671066104, |
|
"grad_norm": 0.44151967763900757, |
|
"learning_rate": 0.00015442760692550443, |
|
"loss": 2.2696, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.32404141540562065, |
|
"grad_norm": 0.4359540641307831, |
|
"learning_rate": 0.00015418536962644592, |
|
"loss": 2.4827, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.32495164410058025, |
|
"grad_norm": 0.4277969002723694, |
|
"learning_rate": 0.00015394268138015598, |
|
"loss": 2.4197, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.32586187279553985, |
|
"grad_norm": 0.42158129811286926, |
|
"learning_rate": 0.00015369954420636048, |
|
"loss": 2.3935, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3267721014904995, |
|
"grad_norm": 0.4236292243003845, |
|
"learning_rate": 0.00015345596012852138, |
|
"loss": 2.3498, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.3276823301854591, |
|
"grad_norm": 0.39974719285964966, |
|
"learning_rate": 0.00015321193117381996, |
|
"loss": 2.2955, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3285925588804187, |
|
"grad_norm": 0.39424553513526917, |
|
"learning_rate": 0.00015296745937313987, |
|
"loss": 2.2767, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.3295027875753783, |
|
"grad_norm": 0.4082043170928955, |
|
"learning_rate": 0.00015272254676105025, |
|
"loss": 2.2224, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.3304130162703379, |
|
"grad_norm": 0.3964308798313141, |
|
"learning_rate": 0.00015247719537578883, |
|
"loss": 2.2479, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.3313232449652975, |
|
"grad_norm": 0.40646740794181824, |
|
"learning_rate": 0.00015223140725924495, |
|
"loss": 2.3327, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.33223347366025713, |
|
"grad_norm": 0.4101439118385315, |
|
"learning_rate": 0.00015198518445694255, |
|
"loss": 2.4099, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.33314370235521673, |
|
"grad_norm": 0.400878369808197, |
|
"learning_rate": 0.0001517385290180231, |
|
"loss": 2.272, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.33405393105017633, |
|
"grad_norm": 0.37038350105285645, |
|
"learning_rate": 0.00015149144299522873, |
|
"loss": 2.1738, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.334964159745136, |
|
"grad_norm": 0.37709566950798035, |
|
"learning_rate": 0.0001512439284448849, |
|
"loss": 2.1423, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.3358743884400956, |
|
"grad_norm": 0.40142226219177246, |
|
"learning_rate": 0.0001509959874268835, |
|
"loss": 2.2503, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.3367846171350552, |
|
"grad_norm": 0.38601839542388916, |
|
"learning_rate": 0.00015074762200466556, |
|
"loss": 2.15, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3376948458300148, |
|
"grad_norm": 0.39862939715385437, |
|
"learning_rate": 0.00015049883424520414, |
|
"loss": 2.2177, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.3386050745249744, |
|
"grad_norm": 0.40157076716423035, |
|
"learning_rate": 0.00015024962621898715, |
|
"loss": 2.2062, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.339515303219934, |
|
"grad_norm": 0.38866209983825684, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 2.1295, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 0.40278226137161255, |
|
"learning_rate": 0.00014974995766570855, |
|
"loss": 2.1402, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3413357606098532, |
|
"grad_norm": 0.41342514753341675, |
|
"learning_rate": 0.00014949950129704162, |
|
"loss": 2.1778, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3422459893048128, |
|
"grad_norm": 0.4095761775970459, |
|
"learning_rate": 0.00014924863297837378, |
|
"loss": 2.0599, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.3431562179997724, |
|
"grad_norm": 0.4194605350494385, |
|
"learning_rate": 0.00014899735479750794, |
|
"loss": 2.2467, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.3440664466947321, |
|
"grad_norm": 0.41888725757598877, |
|
"learning_rate": 0.00014874566884565807, |
|
"loss": 2.0157, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.3449766753896917, |
|
"grad_norm": 0.41246896982192993, |
|
"learning_rate": 0.00014849357721743168, |
|
"loss": 2.1508, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.3458869040846513, |
|
"grad_norm": 0.4054362177848816, |
|
"learning_rate": 0.00014824108201081247, |
|
"loss": 2.0896, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3467971327796109, |
|
"grad_norm": 0.4158082902431488, |
|
"learning_rate": 0.00014798818532714279, |
|
"loss": 2.0494, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.3477073614745705, |
|
"grad_norm": 0.43001338839530945, |
|
"learning_rate": 0.00014773488927110633, |
|
"loss": 2.1462, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.3486175901695301, |
|
"grad_norm": 0.4192085862159729, |
|
"learning_rate": 0.00014748119595071034, |
|
"loss": 1.9395, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.3495278188644897, |
|
"grad_norm": 0.45981523394584656, |
|
"learning_rate": 0.0001472271074772683, |
|
"loss": 2.2445, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.3504380475594493, |
|
"grad_norm": 0.4254681169986725, |
|
"learning_rate": 0.00014697262596538227, |
|
"loss": 2.2156, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3513482762544089, |
|
"grad_norm": 0.4846554100513458, |
|
"learning_rate": 0.00014671775353292525, |
|
"loss": 2.1879, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.35225850494936856, |
|
"grad_norm": 0.4701845645904541, |
|
"learning_rate": 0.00014646249230102366, |
|
"loss": 2.2736, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.35316873364432816, |
|
"grad_norm": 0.45750224590301514, |
|
"learning_rate": 0.00014620684439403962, |
|
"loss": 2.2816, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.35407896233928776, |
|
"grad_norm": 0.4719528555870056, |
|
"learning_rate": 0.00014595081193955324, |
|
"loss": 2.1235, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.35498919103424736, |
|
"grad_norm": 0.49415460228919983, |
|
"learning_rate": 0.000145694397068345, |
|
"loss": 2.1539, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.35589941972920697, |
|
"grad_norm": 0.5116055011749268, |
|
"learning_rate": 0.0001454376019143779, |
|
"loss": 2.1482, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.35680964842416657, |
|
"grad_norm": 0.4707508087158203, |
|
"learning_rate": 0.00014518042861477986, |
|
"loss": 2.1791, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.3577198771191262, |
|
"grad_norm": 0.47449609637260437, |
|
"learning_rate": 0.00014492287930982576, |
|
"loss": 2.1772, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.3586301058140858, |
|
"grad_norm": 0.5449949502944946, |
|
"learning_rate": 0.00014466495614291977, |
|
"loss": 2.454, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.3595403345090454, |
|
"grad_norm": 0.5154662728309631, |
|
"learning_rate": 0.00014440666126057744, |
|
"loss": 2.372, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.360450563204005, |
|
"grad_norm": 0.5283260941505432, |
|
"learning_rate": 0.0001441479968124078, |
|
"loss": 2.3964, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.36136079189896464, |
|
"grad_norm": 0.5910937190055847, |
|
"learning_rate": 0.0001438889649510956, |
|
"loss": 2.5046, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.36227102059392424, |
|
"grad_norm": 0.6620859503746033, |
|
"learning_rate": 0.00014362956783238324, |
|
"loss": 2.642, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.36318124928888385, |
|
"grad_norm": 0.7961871027946472, |
|
"learning_rate": 0.00014336980761505297, |
|
"loss": 2.6575, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.36409147798384345, |
|
"grad_norm": 1.4348353147506714, |
|
"learning_rate": 0.00014310968646090883, |
|
"loss": 2.7044, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.36500170667880305, |
|
"grad_norm": 0.5091971755027771, |
|
"learning_rate": 0.00014284920653475866, |
|
"loss": 2.6281, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.36591193537376265, |
|
"grad_norm": 0.4547927975654602, |
|
"learning_rate": 0.00014258837000439618, |
|
"loss": 2.3868, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.36682216406872226, |
|
"grad_norm": 0.4767512083053589, |
|
"learning_rate": 0.0001423271790405828, |
|
"loss": 2.4612, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.36773239276368186, |
|
"grad_norm": 0.4405704736709595, |
|
"learning_rate": 0.00014206563581702964, |
|
"loss": 2.3659, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.36864262145864146, |
|
"grad_norm": 0.46354222297668457, |
|
"learning_rate": 0.0001418037425103795, |
|
"loss": 2.5197, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.36955285015360106, |
|
"grad_norm": 0.4474616050720215, |
|
"learning_rate": 0.00014154150130018866, |
|
"loss": 2.5156, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.3704630788485607, |
|
"grad_norm": 0.40332525968551636, |
|
"learning_rate": 0.00014127891436890868, |
|
"loss": 2.3852, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.3713733075435203, |
|
"grad_norm": 0.3969736695289612, |
|
"learning_rate": 0.0001410159839018684, |
|
"loss": 2.316, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.37228353623847993, |
|
"grad_norm": 0.40719449520111084, |
|
"learning_rate": 0.0001407527120872557, |
|
"loss": 2.3618, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.37319376493343953, |
|
"grad_norm": 0.4055502116680145, |
|
"learning_rate": 0.00014048910111609915, |
|
"loss": 2.2901, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.37410399362839913, |
|
"grad_norm": 0.3881557285785675, |
|
"learning_rate": 0.0001402251531822499, |
|
"loss": 2.3096, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.37501422232335874, |
|
"grad_norm": 0.38520845770835876, |
|
"learning_rate": 0.00013996087048236358, |
|
"loss": 2.096, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.37592445101831834, |
|
"grad_norm": 0.42187705636024475, |
|
"learning_rate": 0.00013969625521588158, |
|
"loss": 2.3212, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.37683467971327794, |
|
"grad_norm": 0.4028996527194977, |
|
"learning_rate": 0.00013943130958501317, |
|
"loss": 2.2625, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.37774490840823755, |
|
"grad_norm": 0.4009808599948883, |
|
"learning_rate": 0.00013916603579471705, |
|
"loss": 2.3579, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3786551371031972, |
|
"grad_norm": 0.3909650146961212, |
|
"learning_rate": 0.00013890043605268283, |
|
"loss": 2.219, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.3795653657981568, |
|
"grad_norm": 0.38394787907600403, |
|
"learning_rate": 0.00013863451256931287, |
|
"loss": 2.0305, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.3804755944931164, |
|
"grad_norm": 0.38588935136795044, |
|
"learning_rate": 0.00013836826755770384, |
|
"loss": 2.1602, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.381385823188076, |
|
"grad_norm": 0.40827664732933044, |
|
"learning_rate": 0.00013810170323362816, |
|
"loss": 2.2656, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.3822960518830356, |
|
"grad_norm": 0.38694658875465393, |
|
"learning_rate": 0.0001378348218155158, |
|
"loss": 2.0107, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3832062805779952, |
|
"grad_norm": 0.4133118987083435, |
|
"learning_rate": 0.00013756762552443553, |
|
"loss": 2.2531, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.3841165092729548, |
|
"grad_norm": 0.45007333159446716, |
|
"learning_rate": 0.00013730011658407676, |
|
"loss": 2.1968, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.3850267379679144, |
|
"grad_norm": 0.4120388329029083, |
|
"learning_rate": 0.00013703229722073065, |
|
"loss": 2.149, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.385936966662874, |
|
"grad_norm": 0.3964555561542511, |
|
"learning_rate": 0.000136764169663272, |
|
"loss": 1.9833, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.38684719535783363, |
|
"grad_norm": 0.4019937813282013, |
|
"learning_rate": 0.00013649573614314044, |
|
"loss": 2.2682, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3877574240527933, |
|
"grad_norm": 0.4096015691757202, |
|
"learning_rate": 0.00013622699889432184, |
|
"loss": 2.2043, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.3886676527477529, |
|
"grad_norm": 0.4118809998035431, |
|
"learning_rate": 0.00013595796015332984, |
|
"loss": 2.0993, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.3895778814427125, |
|
"grad_norm": 0.40942683815956116, |
|
"learning_rate": 0.00013568862215918717, |
|
"loss": 2.1092, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.3904881101376721, |
|
"grad_norm": 0.44069868326187134, |
|
"learning_rate": 0.00013541898715340716, |
|
"loss": 2.1773, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.3913983388326317, |
|
"grad_norm": 0.4173736870288849, |
|
"learning_rate": 0.00013514905737997473, |
|
"loss": 2.3106, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3923085675275913, |
|
"grad_norm": 0.4134718179702759, |
|
"learning_rate": 0.00013487883508532815, |
|
"loss": 2.0739, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.3932187962225509, |
|
"grad_norm": 0.4332071840763092, |
|
"learning_rate": 0.00013460832251834011, |
|
"loss": 2.1973, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.3941290249175105, |
|
"grad_norm": 0.44685009121894836, |
|
"learning_rate": 0.00013433752193029886, |
|
"loss": 2.1495, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.3950392536124701, |
|
"grad_norm": 0.443863183259964, |
|
"learning_rate": 0.0001340664355748899, |
|
"loss": 2.098, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.39594948230742977, |
|
"grad_norm": 0.4420264661312103, |
|
"learning_rate": 0.0001337950657081768, |
|
"loss": 2.11, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.39685971100238937, |
|
"grad_norm": 0.4621724784374237, |
|
"learning_rate": 0.00013352341458858265, |
|
"loss": 2.2452, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.397769939697349, |
|
"grad_norm": 0.4760790467262268, |
|
"learning_rate": 0.00013325148447687125, |
|
"loss": 2.2252, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.3986801683923086, |
|
"grad_norm": 0.44758686423301697, |
|
"learning_rate": 0.0001329792776361282, |
|
"loss": 2.0233, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.3995903970872682, |
|
"grad_norm": 0.4756091237068176, |
|
"learning_rate": 0.00013270679633174218, |
|
"loss": 2.0235, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.4005006257822278, |
|
"grad_norm": 0.46966737508773804, |
|
"learning_rate": 0.00013243404283138597, |
|
"loss": 2.1165, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4014108544771874, |
|
"grad_norm": 0.49555307626724243, |
|
"learning_rate": 0.00013216101940499768, |
|
"loss": 2.0503, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.402321083172147, |
|
"grad_norm": 0.5097188353538513, |
|
"learning_rate": 0.00013188772832476188, |
|
"loss": 2.164, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.4032313118671066, |
|
"grad_norm": 0.5129069685935974, |
|
"learning_rate": 0.00013161417186509052, |
|
"loss": 2.2285, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.4041415405620662, |
|
"grad_norm": 0.5010905265808105, |
|
"learning_rate": 0.00013134035230260427, |
|
"loss": 2.0997, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.40505176925702585, |
|
"grad_norm": 0.5693898797035217, |
|
"learning_rate": 0.00013106627191611332, |
|
"loss": 2.2556, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.40596199795198545, |
|
"grad_norm": 0.5993850231170654, |
|
"learning_rate": 0.0001307919329865985, |
|
"loss": 2.4551, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.40687222664694506, |
|
"grad_norm": 0.6662326455116272, |
|
"learning_rate": 0.00013051733779719234, |
|
"loss": 2.5501, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.40778245534190466, |
|
"grad_norm": 0.7213377356529236, |
|
"learning_rate": 0.00013024248863316012, |
|
"loss": 2.5791, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.40869268403686426, |
|
"grad_norm": 0.8552189469337463, |
|
"learning_rate": 0.00012996738778188067, |
|
"loss": 2.5783, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.40960291273182386, |
|
"grad_norm": 1.2669048309326172, |
|
"learning_rate": 0.0001296920375328275, |
|
"loss": 2.2038, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.41051314142678347, |
|
"grad_norm": 0.4974716901779175, |
|
"learning_rate": 0.00012941644017754964, |
|
"loss": 2.4347, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.41142337012174307, |
|
"grad_norm": 0.44350573420524597, |
|
"learning_rate": 0.00012914059800965268, |
|
"loss": 2.5506, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.4123335988167027, |
|
"grad_norm": 0.43674564361572266, |
|
"learning_rate": 0.0001288645133247795, |
|
"loss": 2.4322, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.41324382751166233, |
|
"grad_norm": 0.46389102935791016, |
|
"learning_rate": 0.00012858818842059145, |
|
"loss": 2.4429, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.41415405620662193, |
|
"grad_norm": 0.4361862540245056, |
|
"learning_rate": 0.00012831162559674887, |
|
"loss": 2.4228, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.41506428490158154, |
|
"grad_norm": 0.4095577001571655, |
|
"learning_rate": 0.0001280348271548923, |
|
"loss": 2.3177, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.41597451359654114, |
|
"grad_norm": 0.41431957483291626, |
|
"learning_rate": 0.00012775779539862304, |
|
"loss": 2.592, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.41688474229150074, |
|
"grad_norm": 0.4042568802833557, |
|
"learning_rate": 0.0001274805326334842, |
|
"loss": 2.3442, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.41779497098646035, |
|
"grad_norm": 0.4499073922634125, |
|
"learning_rate": 0.00012720304116694138, |
|
"loss": 2.4013, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.41870519968141995, |
|
"grad_norm": 0.41216856241226196, |
|
"learning_rate": 0.00012692532330836346, |
|
"loss": 2.3945, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.41961542837637955, |
|
"grad_norm": 0.423570841550827, |
|
"learning_rate": 0.00012664738136900348, |
|
"loss": 2.3147, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.42052565707133915, |
|
"grad_norm": 0.40256237983703613, |
|
"learning_rate": 0.00012636921766197943, |
|
"loss": 2.3273, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.42143588576629876, |
|
"grad_norm": 0.39139389991760254, |
|
"learning_rate": 0.0001260908345022547, |
|
"loss": 2.1793, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.4223461144612584, |
|
"grad_norm": 0.4252980351448059, |
|
"learning_rate": 0.00012581223420661913, |
|
"loss": 2.4073, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.423256343156218, |
|
"grad_norm": 0.40091437101364136, |
|
"learning_rate": 0.00012553341909366978, |
|
"loss": 2.0757, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4241665718511776, |
|
"grad_norm": 0.3780224323272705, |
|
"learning_rate": 0.00012525439148379128, |
|
"loss": 2.1467, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.4250768005461372, |
|
"grad_norm": 0.3823038637638092, |
|
"learning_rate": 0.00012497515369913685, |
|
"loss": 2.0455, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.4259870292410968, |
|
"grad_norm": 0.3887813091278076, |
|
"learning_rate": 0.00012469570806360875, |
|
"loss": 2.1608, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.42689725793605643, |
|
"grad_norm": 0.3840020000934601, |
|
"learning_rate": 0.00012441605690283915, |
|
"loss": 2.0606, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.42780748663101603, |
|
"grad_norm": 0.402353972196579, |
|
"learning_rate": 0.00012413620254417057, |
|
"loss": 2.1491, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.42871771532597563, |
|
"grad_norm": 0.3938208818435669, |
|
"learning_rate": 0.00012385614731663666, |
|
"loss": 2.1968, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.42962794402093524, |
|
"grad_norm": 0.42270010709762573, |
|
"learning_rate": 0.00012357589355094275, |
|
"loss": 2.0428, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.4305381727158949, |
|
"grad_norm": 0.44319435954093933, |
|
"learning_rate": 0.0001232954435794464, |
|
"loss": 2.3332, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.4314484014108545, |
|
"grad_norm": 0.39667901396751404, |
|
"learning_rate": 0.00012301479973613822, |
|
"loss": 2.1076, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4323586301058141, |
|
"grad_norm": 0.4128730595111847, |
|
"learning_rate": 0.00012273396435662212, |
|
"loss": 2.0687, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4332688588007737, |
|
"grad_norm": 0.4556971788406372, |
|
"learning_rate": 0.00012245293977809605, |
|
"loss": 2.1699, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.4341790874957333, |
|
"grad_norm": 0.43817293643951416, |
|
"learning_rate": 0.0001221717283393326, |
|
"loss": 2.2619, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.4350893161906929, |
|
"grad_norm": 0.4148106873035431, |
|
"learning_rate": 0.0001218903323806595, |
|
"loss": 2.0805, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4359995448856525, |
|
"grad_norm": 0.428019255399704, |
|
"learning_rate": 0.00012160875424393996, |
|
"loss": 2.1673, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.4369097735806121, |
|
"grad_norm": 0.44368696212768555, |
|
"learning_rate": 0.00012132699627255347, |
|
"loss": 2.1894, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4378200022755717, |
|
"grad_norm": 0.44192251563072205, |
|
"learning_rate": 0.00012104506081137608, |
|
"loss": 2.131, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.4387302309705313, |
|
"grad_norm": 0.45728325843811035, |
|
"learning_rate": 0.00012076295020676103, |
|
"loss": 2.1606, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.439640459665491, |
|
"grad_norm": 0.45007196068763733, |
|
"learning_rate": 0.00012048066680651908, |
|
"loss": 2.1161, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.4405506883604506, |
|
"grad_norm": 0.46728429198265076, |
|
"learning_rate": 0.00012019821295989912, |
|
"loss": 2.2119, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.4414609170554102, |
|
"grad_norm": 0.44168952107429504, |
|
"learning_rate": 0.00011991559101756852, |
|
"loss": 2.1377, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4423711457503698, |
|
"grad_norm": 0.43559008836746216, |
|
"learning_rate": 0.00011963280333159358, |
|
"loss": 2.054, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.4432813744453294, |
|
"grad_norm": 0.45376813411712646, |
|
"learning_rate": 0.00011934985225541998, |
|
"loss": 2.1481, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.444191603140289, |
|
"grad_norm": 0.49441829323768616, |
|
"learning_rate": 0.00011906674014385318, |
|
"loss": 2.0623, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.4451018318352486, |
|
"grad_norm": 0.4792848229408264, |
|
"learning_rate": 0.00011878346935303883, |
|
"loss": 2.2903, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.4460120605302082, |
|
"grad_norm": 0.4943479597568512, |
|
"learning_rate": 0.00011850004224044315, |
|
"loss": 2.3084, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4469222892251678, |
|
"grad_norm": 0.5104116797447205, |
|
"learning_rate": 0.00011821646116483335, |
|
"loss": 2.2815, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.44783251792012746, |
|
"grad_norm": 0.4758678674697876, |
|
"learning_rate": 0.00011793272848625797, |
|
"loss": 2.0609, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.44874274661508706, |
|
"grad_norm": 0.5053780674934387, |
|
"learning_rate": 0.0001176488465660271, |
|
"loss": 2.1577, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.44965297531004667, |
|
"grad_norm": 0.5221775770187378, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 2.2924, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.45056320400500627, |
|
"grad_norm": 0.5941824913024902, |
|
"learning_rate": 0.00011708064445203042, |
|
"loss": 2.3528, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.45147343269996587, |
|
"grad_norm": 0.5934675931930542, |
|
"learning_rate": 0.00011679632898701649, |
|
"loss": 2.4301, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.4523836613949255, |
|
"grad_norm": 0.6429802179336548, |
|
"learning_rate": 0.0001165118737378116, |
|
"loss": 2.605, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.4532938900898851, |
|
"grad_norm": 0.7099617719650269, |
|
"learning_rate": 0.00011622728107173946, |
|
"loss": 2.4261, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.4542041187848447, |
|
"grad_norm": 0.8483153581619263, |
|
"learning_rate": 0.00011594255335726724, |
|
"loss": 2.5175, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.4551143474798043, |
|
"grad_norm": 1.6742238998413086, |
|
"learning_rate": 0.00011565769296398618, |
|
"loss": 2.6628, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4560245761747639, |
|
"grad_norm": 0.4585944712162018, |
|
"learning_rate": 0.00011537270226259169, |
|
"loss": 2.5794, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.45693480486972354, |
|
"grad_norm": 0.44302108883857727, |
|
"learning_rate": 0.00011508758362486358, |
|
"loss": 2.3893, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.45784503356468315, |
|
"grad_norm": 0.44478365778923035, |
|
"learning_rate": 0.00011480233942364645, |
|
"loss": 2.3204, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.45875526225964275, |
|
"grad_norm": 0.432339608669281, |
|
"learning_rate": 0.00011451697203282982, |
|
"loss": 2.3721, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.45966549095460235, |
|
"grad_norm": 0.4154585301876068, |
|
"learning_rate": 0.00011423148382732853, |
|
"loss": 2.4017, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.46057571964956195, |
|
"grad_norm": 0.4535946846008301, |
|
"learning_rate": 0.00011394587718306275, |
|
"loss": 2.5771, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.46148594834452156, |
|
"grad_norm": 0.4111529588699341, |
|
"learning_rate": 0.00011366015447693837, |
|
"loss": 2.2596, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.46239617703948116, |
|
"grad_norm": 0.431909441947937, |
|
"learning_rate": 0.0001133743180868273, |
|
"loss": 2.3504, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.46330640573444076, |
|
"grad_norm": 0.4069095849990845, |
|
"learning_rate": 0.00011308837039154739, |
|
"loss": 2.2642, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.46421663442940037, |
|
"grad_norm": 0.4086742401123047, |
|
"learning_rate": 0.0001128023137708429, |
|
"loss": 2.2712, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.46512686312435997, |
|
"grad_norm": 0.4243764579296112, |
|
"learning_rate": 0.0001125161506053646, |
|
"loss": 2.3859, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.4660370918193196, |
|
"grad_norm": 0.39640411734580994, |
|
"learning_rate": 0.00011222988327664997, |
|
"loss": 2.1983, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.46694732051427923, |
|
"grad_norm": 0.4024841785430908, |
|
"learning_rate": 0.00011194351416710324, |
|
"loss": 2.2473, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.46785754920923883, |
|
"grad_norm": 0.3982551693916321, |
|
"learning_rate": 0.00011165704565997593, |
|
"loss": 2.1234, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.46876777790419843, |
|
"grad_norm": 0.4064629375934601, |
|
"learning_rate": 0.00011137048013934656, |
|
"loss": 2.1587, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.46967800659915804, |
|
"grad_norm": 0.3930409550666809, |
|
"learning_rate": 0.00011108381999010111, |
|
"loss": 2.2324, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.38076552748680115, |
|
"learning_rate": 0.00011079706759791311, |
|
"loss": 2.1943, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.47149846398907724, |
|
"grad_norm": 0.38611575961112976, |
|
"learning_rate": 0.00011051022534922371, |
|
"loss": 2.1572, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.47240869268403685, |
|
"grad_norm": 0.3925657868385315, |
|
"learning_rate": 0.00011022329563122191, |
|
"loss": 2.232, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.47331892137899645, |
|
"grad_norm": 0.401633620262146, |
|
"learning_rate": 0.00010993628083182467, |
|
"loss": 2.1639, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4742291500739561, |
|
"grad_norm": 0.3987519443035126, |
|
"learning_rate": 0.000109649183339657, |
|
"loss": 2.1657, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.4751393787689157, |
|
"grad_norm": 0.3978632986545563, |
|
"learning_rate": 0.00010936200554403209, |
|
"loss": 2.1929, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.4760496074638753, |
|
"grad_norm": 0.40962356328964233, |
|
"learning_rate": 0.00010907474983493144, |
|
"loss": 2.167, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.4769598361588349, |
|
"grad_norm": 0.41112762689590454, |
|
"learning_rate": 0.00010878741860298503, |
|
"loss": 2.178, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.4778700648537945, |
|
"grad_norm": 0.4231667220592499, |
|
"learning_rate": 0.00010850001423945126, |
|
"loss": 1.995, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4787802935487541, |
|
"grad_norm": 0.42191213369369507, |
|
"learning_rate": 0.00010821253913619726, |
|
"loss": 2.1639, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.4796905222437137, |
|
"grad_norm": 0.41400229930877686, |
|
"learning_rate": 0.00010792499568567884, |
|
"loss": 2.129, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.4806007509386733, |
|
"grad_norm": 0.4085232615470886, |
|
"learning_rate": 0.00010763738628092062, |
|
"loss": 2.0848, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.48151097963363293, |
|
"grad_norm": 0.4125650227069855, |
|
"learning_rate": 0.00010734971331549603, |
|
"loss": 1.9966, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.48242120832859253, |
|
"grad_norm": 0.42445600032806396, |
|
"learning_rate": 0.00010706197918350758, |
|
"loss": 1.9826, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4833314370235522, |
|
"grad_norm": 0.45885640382766724, |
|
"learning_rate": 0.0001067741862795668, |
|
"loss": 2.1072, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.4842416657185118, |
|
"grad_norm": 0.45989179611206055, |
|
"learning_rate": 0.0001064863369987743, |
|
"loss": 2.4108, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.4851518944134714, |
|
"grad_norm": 0.4582422375679016, |
|
"learning_rate": 0.00010619843373669993, |
|
"loss": 2.1733, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.486062123108431, |
|
"grad_norm": 0.4393966495990753, |
|
"learning_rate": 0.00010591047888936274, |
|
"loss": 2.1335, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.4869723518033906, |
|
"grad_norm": 0.4389214813709259, |
|
"learning_rate": 0.00010562247485321115, |
|
"loss": 2.0692, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4878825804983502, |
|
"grad_norm": 0.4686262905597687, |
|
"learning_rate": 0.00010533442402510284, |
|
"loss": 2.226, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.4887928091933098, |
|
"grad_norm": 0.47556954622268677, |
|
"learning_rate": 0.00010504632880228498, |
|
"loss": 2.2484, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.4897030378882694, |
|
"grad_norm": 0.46617594361305237, |
|
"learning_rate": 0.00010475819158237425, |
|
"loss": 2.2602, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.490613266583229, |
|
"grad_norm": 0.47310179471969604, |
|
"learning_rate": 0.00010447001476333673, |
|
"loss": 2.089, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.49152349527818867, |
|
"grad_norm": 0.507408618927002, |
|
"learning_rate": 0.00010418180074346815, |
|
"loss": 2.2762, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4924337239731483, |
|
"grad_norm": 0.501750111579895, |
|
"learning_rate": 0.00010389355192137377, |
|
"loss": 2.1129, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.4933439526681079, |
|
"grad_norm": 0.5230852365493774, |
|
"learning_rate": 0.00010360527069594859, |
|
"loss": 2.4063, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.4942541813630675, |
|
"grad_norm": 0.499337762594223, |
|
"learning_rate": 0.00010331695946635708, |
|
"loss": 2.1379, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.4951644100580271, |
|
"grad_norm": 0.5291334390640259, |
|
"learning_rate": 0.00010302862063201367, |
|
"loss": 2.2284, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.4960746387529867, |
|
"grad_norm": 0.5520262718200684, |
|
"learning_rate": 0.00010274025659256232, |
|
"loss": 2.2331, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.4969848674479463, |
|
"grad_norm": 0.5880653262138367, |
|
"learning_rate": 0.00010245186974785685, |
|
"loss": 2.369, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.4978950961429059, |
|
"grad_norm": 0.5887028574943542, |
|
"learning_rate": 0.00010216346249794087, |
|
"loss": 2.3323, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.4988053248378655, |
|
"grad_norm": 0.664823055267334, |
|
"learning_rate": 0.00010187503724302776, |
|
"loss": 2.4451, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.4997155535328251, |
|
"grad_norm": 0.8141515254974365, |
|
"learning_rate": 0.00010158659638348081, |
|
"loss": 2.4308, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.5006257822277848, |
|
"grad_norm": 1.4680793285369873, |
|
"learning_rate": 0.0001012981423197931, |
|
"loss": 2.6184, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5006257822277848, |
|
"eval_loss": 2.2486555576324463, |
|
"eval_runtime": 203.9397, |
|
"eval_samples_per_second": 9.076, |
|
"eval_steps_per_second": 4.541, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5015360109227444, |
|
"grad_norm": 0.44648221135139465, |
|
"learning_rate": 0.00010100967745256766, |
|
"loss": 2.4608, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.502446239617704, |
|
"grad_norm": 0.4537757635116577, |
|
"learning_rate": 0.00010072120418249745, |
|
"loss": 2.3225, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.5033564683126636, |
|
"grad_norm": 0.45517390966415405, |
|
"learning_rate": 0.00010043272491034523, |
|
"loss": 2.4937, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.5042666970076232, |
|
"grad_norm": 0.4309350848197937, |
|
"learning_rate": 0.00010014424203692388, |
|
"loss": 2.3752, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.5051769257025828, |
|
"grad_norm": 0.4162474274635315, |
|
"learning_rate": 9.985575796307615e-05, |
|
"loss": 2.356, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5060871543975424, |
|
"grad_norm": 0.454484224319458, |
|
"learning_rate": 9.956727508965481e-05, |
|
"loss": 2.3114, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.506997383092502, |
|
"grad_norm": 0.4174659550189972, |
|
"learning_rate": 9.927879581750259e-05, |
|
"loss": 2.2907, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.5079076117874616, |
|
"grad_norm": 0.42208272218704224, |
|
"learning_rate": 9.899032254743235e-05, |
|
"loss": 2.3062, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.5088178404824212, |
|
"grad_norm": 0.41622260212898254, |
|
"learning_rate": 9.870185768020693e-05, |
|
"loss": 2.3293, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.5097280691773808, |
|
"grad_norm": 0.45536091923713684, |
|
"learning_rate": 9.84134036165192e-05, |
|
"loss": 2.2693, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5106382978723404, |
|
"grad_norm": 0.3866654634475708, |
|
"learning_rate": 9.812496275697226e-05, |
|
"loss": 2.1254, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.5115485265673, |
|
"grad_norm": 0.41163724660873413, |
|
"learning_rate": 9.783653750205915e-05, |
|
"loss": 2.2173, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.5124587552622596, |
|
"grad_norm": 0.4118368327617645, |
|
"learning_rate": 9.754813025214317e-05, |
|
"loss": 2.2477, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.5133689839572193, |
|
"grad_norm": 0.4303027093410492, |
|
"learning_rate": 9.725974340743769e-05, |
|
"loss": 2.3855, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.5142792126521789, |
|
"grad_norm": 0.41484424471855164, |
|
"learning_rate": 9.697137936798634e-05, |
|
"loss": 2.289, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5151894413471385, |
|
"grad_norm": 0.397540807723999, |
|
"learning_rate": 9.668304053364294e-05, |
|
"loss": 2.1869, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.5160996700420981, |
|
"grad_norm": 0.38194504380226135, |
|
"learning_rate": 9.639472930405143e-05, |
|
"loss": 2.1475, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.5170098987370577, |
|
"grad_norm": 0.42346352338790894, |
|
"learning_rate": 9.610644807862625e-05, |
|
"loss": 2.1869, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.5179201274320173, |
|
"grad_norm": 0.4076716899871826, |
|
"learning_rate": 9.581819925653188e-05, |
|
"loss": 2.1979, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.5188303561269769, |
|
"grad_norm": 0.3888402581214905, |
|
"learning_rate": 9.552998523666326e-05, |
|
"loss": 2.131, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5197405848219365, |
|
"grad_norm": 0.3931845426559448, |
|
"learning_rate": 9.524180841762577e-05, |
|
"loss": 2.07, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.5206508135168961, |
|
"grad_norm": 0.38014844059944153, |
|
"learning_rate": 9.495367119771503e-05, |
|
"loss": 1.8905, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.5215610422118557, |
|
"grad_norm": 0.4095986783504486, |
|
"learning_rate": 9.46655759748972e-05, |
|
"loss": 2.1217, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.5224712709068153, |
|
"grad_norm": 0.3965059518814087, |
|
"learning_rate": 9.437752514678887e-05, |
|
"loss": 2.0446, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.5233814996017749, |
|
"grad_norm": 0.42244595289230347, |
|
"learning_rate": 9.408952111063727e-05, |
|
"loss": 2.1184, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5242917282967345, |
|
"grad_norm": 0.4098374843597412, |
|
"learning_rate": 9.380156626330009e-05, |
|
"loss": 2.0365, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.5252019569916941, |
|
"grad_norm": 0.424630731344223, |
|
"learning_rate": 9.35136630012257e-05, |
|
"loss": 2.1321, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.5261121856866537, |
|
"grad_norm": 0.431257039308548, |
|
"learning_rate": 9.322581372043321e-05, |
|
"loss": 2.1883, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.5270224143816133, |
|
"grad_norm": 0.40972310304641724, |
|
"learning_rate": 9.293802081649243e-05, |
|
"loss": 2.0498, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.527932643076573, |
|
"grad_norm": 0.4238174855709076, |
|
"learning_rate": 9.265028668450402e-05, |
|
"loss": 2.0831, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5288428717715326, |
|
"grad_norm": 0.43082720041275024, |
|
"learning_rate": 9.23626137190794e-05, |
|
"loss": 2.1704, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.5297531004664922, |
|
"grad_norm": 0.4657386541366577, |
|
"learning_rate": 9.207500431432115e-05, |
|
"loss": 2.1338, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.5306633291614519, |
|
"grad_norm": 0.45546218752861023, |
|
"learning_rate": 9.178746086380275e-05, |
|
"loss": 2.1469, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.5315735578564115, |
|
"grad_norm": 0.4571657180786133, |
|
"learning_rate": 9.149998576054874e-05, |
|
"loss": 2.2036, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.5324837865513711, |
|
"grad_norm": 0.4736308753490448, |
|
"learning_rate": 9.121258139701502e-05, |
|
"loss": 2.2127, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5333940152463307, |
|
"grad_norm": 0.46164044737815857, |
|
"learning_rate": 9.092525016506858e-05, |
|
"loss": 2.1177, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.5343042439412903, |
|
"grad_norm": 0.48902738094329834, |
|
"learning_rate": 9.063799445596795e-05, |
|
"loss": 2.2188, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.5352144726362499, |
|
"grad_norm": 0.4848826825618744, |
|
"learning_rate": 9.035081666034304e-05, |
|
"loss": 2.2372, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.5361247013312095, |
|
"grad_norm": 0.49447596073150635, |
|
"learning_rate": 9.006371916817534e-05, |
|
"loss": 2.2786, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5370349300261691, |
|
"grad_norm": 0.45803365111351013, |
|
"learning_rate": 8.977670436877811e-05, |
|
"loss": 2.0856, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5379451587211287, |
|
"grad_norm": 0.5051137208938599, |
|
"learning_rate": 8.948977465077632e-05, |
|
"loss": 2.2219, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5388553874160883, |
|
"grad_norm": 0.5000495910644531, |
|
"learning_rate": 8.920293240208694e-05, |
|
"loss": 2.2151, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5397656161110479, |
|
"grad_norm": 0.5208660960197449, |
|
"learning_rate": 8.891618000989891e-05, |
|
"loss": 2.3381, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.5406758448060075, |
|
"grad_norm": 0.5478562116622925, |
|
"learning_rate": 8.862951986065345e-05, |
|
"loss": 2.1592, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.5415860735009671, |
|
"grad_norm": 0.5808135271072388, |
|
"learning_rate": 8.83429543400241e-05, |
|
"loss": 2.3678, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5424963021959267, |
|
"grad_norm": 0.5837730765342712, |
|
"learning_rate": 8.805648583289674e-05, |
|
"loss": 2.3397, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.5434065308908863, |
|
"grad_norm": 0.6297701597213745, |
|
"learning_rate": 8.777011672335008e-05, |
|
"loss": 2.4747, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5443167595858459, |
|
"grad_norm": 0.735091507434845, |
|
"learning_rate": 8.748384939463543e-05, |
|
"loss": 2.5623, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5452269882808055, |
|
"grad_norm": 0.8794751167297363, |
|
"learning_rate": 8.719768622915714e-05, |
|
"loss": 2.557, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5461372169757651, |
|
"grad_norm": 1.7257400751113892, |
|
"learning_rate": 8.691162960845264e-05, |
|
"loss": 2.7309, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5470474456707247, |
|
"grad_norm": 0.42087942361831665, |
|
"learning_rate": 8.662568191317273e-05, |
|
"loss": 2.3698, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.5479576743656844, |
|
"grad_norm": 0.45860013365745544, |
|
"learning_rate": 8.633984552306164e-05, |
|
"loss": 2.4207, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.548867903060644, |
|
"grad_norm": 0.4520268738269806, |
|
"learning_rate": 8.605412281693727e-05, |
|
"loss": 2.5066, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.5497781317556036, |
|
"grad_norm": 0.42296549677848816, |
|
"learning_rate": 8.57685161726715e-05, |
|
"loss": 2.3086, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.5506883604505632, |
|
"grad_norm": 0.4488202631473541, |
|
"learning_rate": 8.548302796717019e-05, |
|
"loss": 2.3949, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5515985891455228, |
|
"grad_norm": 0.43144237995147705, |
|
"learning_rate": 8.519766057635355e-05, |
|
"loss": 2.3862, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.5525088178404824, |
|
"grad_norm": 0.43359822034835815, |
|
"learning_rate": 8.491241637513644e-05, |
|
"loss": 2.2576, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.553419046535442, |
|
"grad_norm": 0.41321370005607605, |
|
"learning_rate": 8.462729773740832e-05, |
|
"loss": 2.294, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.5543292752304017, |
|
"grad_norm": 0.4104655981063843, |
|
"learning_rate": 8.434230703601384e-05, |
|
"loss": 2.2005, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.5552395039253613, |
|
"grad_norm": 0.4300507605075836, |
|
"learning_rate": 8.405744664273278e-05, |
|
"loss": 2.4242, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5561497326203209, |
|
"grad_norm": 0.42208558320999146, |
|
"learning_rate": 8.37727189282606e-05, |
|
"loss": 2.2788, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.5570599613152805, |
|
"grad_norm": 0.4338865280151367, |
|
"learning_rate": 8.34881262621884e-05, |
|
"loss": 2.4823, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.5579701900102401, |
|
"grad_norm": 0.399996280670166, |
|
"learning_rate": 8.320367101298351e-05, |
|
"loss": 2.1704, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.5588804187051997, |
|
"grad_norm": 0.4285057783126831, |
|
"learning_rate": 8.291935554796962e-05, |
|
"loss": 2.3405, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.5597906474001593, |
|
"grad_norm": 0.41062313318252563, |
|
"learning_rate": 8.263518223330697e-05, |
|
"loss": 2.2424, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5607008760951189, |
|
"grad_norm": 0.40377363562583923, |
|
"learning_rate": 8.235115343397295e-05, |
|
"loss": 2.2611, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.5616111047900785, |
|
"grad_norm": 0.38683268427848816, |
|
"learning_rate": 8.206727151374207e-05, |
|
"loss": 2.0895, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.5625213334850381, |
|
"grad_norm": 0.3813983201980591, |
|
"learning_rate": 8.178353883516664e-05, |
|
"loss": 2.0735, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.5634315621799977, |
|
"grad_norm": 0.40514618158340454, |
|
"learning_rate": 8.149995775955686e-05, |
|
"loss": 2.2261, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.5643417908749573, |
|
"grad_norm": 0.3858005106449127, |
|
"learning_rate": 8.121653064696118e-05, |
|
"loss": 2.0794, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.565252019569917, |
|
"grad_norm": 0.4032715857028961, |
|
"learning_rate": 8.093325985614685e-05, |
|
"loss": 2.2016, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5661622482648766, |
|
"grad_norm": 0.40262308716773987, |
|
"learning_rate": 8.065014774458003e-05, |
|
"loss": 2.1193, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5670724769598362, |
|
"grad_norm": 0.4016035497188568, |
|
"learning_rate": 8.036719666840647e-05, |
|
"loss": 2.0265, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.5679827056547958, |
|
"grad_norm": 0.42149612307548523, |
|
"learning_rate": 8.008440898243149e-05, |
|
"loss": 2.1186, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.5688929343497554, |
|
"grad_norm": 0.40055879950523376, |
|
"learning_rate": 7.980178704010089e-05, |
|
"loss": 2.0066, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.569803163044715, |
|
"grad_norm": 0.40722012519836426, |
|
"learning_rate": 7.951933319348095e-05, |
|
"loss": 2.0262, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5707133917396746, |
|
"grad_norm": 0.4256587624549866, |
|
"learning_rate": 7.923704979323899e-05, |
|
"loss": 2.0765, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.5716236204346342, |
|
"grad_norm": 0.452120840549469, |
|
"learning_rate": 7.895493918862396e-05, |
|
"loss": 2.2261, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.5725338491295938, |
|
"grad_norm": 0.45264679193496704, |
|
"learning_rate": 7.867300372744657e-05, |
|
"loss": 2.1382, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.5734440778245534, |
|
"grad_norm": 0.4367516040802002, |
|
"learning_rate": 7.839124575606004e-05, |
|
"loss": 2.1152, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.574354306519513, |
|
"grad_norm": 0.41711944341659546, |
|
"learning_rate": 7.810966761934053e-05, |
|
"loss": 2.0499, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.5752645352144726, |
|
"grad_norm": 0.4274366497993469, |
|
"learning_rate": 7.782827166066739e-05, |
|
"loss": 2.0843, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.5761747639094322, |
|
"grad_norm": 0.4412054717540741, |
|
"learning_rate": 7.754706022190398e-05, |
|
"loss": 2.1291, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.5770849926043918, |
|
"grad_norm": 0.43048036098480225, |
|
"learning_rate": 7.726603564337791e-05, |
|
"loss": 2.0492, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.5779952212993514, |
|
"grad_norm": 0.4570690393447876, |
|
"learning_rate": 7.69852002638618e-05, |
|
"loss": 2.2868, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.578905449994311, |
|
"grad_norm": 0.4373176097869873, |
|
"learning_rate": 7.670455642055361e-05, |
|
"loss": 2.106, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.5798156786892706, |
|
"grad_norm": 0.46953141689300537, |
|
"learning_rate": 7.642410644905726e-05, |
|
"loss": 2.2209, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.5807259073842302, |
|
"grad_norm": 0.48714450001716614, |
|
"learning_rate": 7.614385268336336e-05, |
|
"loss": 2.2923, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.5816361360791898, |
|
"grad_norm": 0.45921769738197327, |
|
"learning_rate": 7.586379745582944e-05, |
|
"loss": 2.1636, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.5825463647741494, |
|
"grad_norm": 0.4652685821056366, |
|
"learning_rate": 7.558394309716088e-05, |
|
"loss": 2.205, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5834565934691092, |
|
"grad_norm": 0.48991554975509644, |
|
"learning_rate": 7.530429193639128e-05, |
|
"loss": 2.1805, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.5843668221640688, |
|
"grad_norm": 0.515925407409668, |
|
"learning_rate": 7.502484630086318e-05, |
|
"loss": 2.2075, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.5852770508590284, |
|
"grad_norm": 0.497593492269516, |
|
"learning_rate": 7.474560851620873e-05, |
|
"loss": 2.0497, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.586187279553988, |
|
"grad_norm": 0.5590106248855591, |
|
"learning_rate": 7.446658090633026e-05, |
|
"loss": 2.273, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.5870975082489476, |
|
"grad_norm": 0.570801854133606, |
|
"learning_rate": 7.41877657933809e-05, |
|
"loss": 2.4023, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5880077369439072, |
|
"grad_norm": 0.5791385173797607, |
|
"learning_rate": 7.390916549774536e-05, |
|
"loss": 2.2391, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.5889179656388668, |
|
"grad_norm": 0.677937924861908, |
|
"learning_rate": 7.363078233802063e-05, |
|
"loss": 2.6502, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.5898281943338264, |
|
"grad_norm": 0.6995664238929749, |
|
"learning_rate": 7.335261863099651e-05, |
|
"loss": 2.4716, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.590738423028786, |
|
"grad_norm": 0.8106637597084045, |
|
"learning_rate": 7.307467669163655e-05, |
|
"loss": 2.3574, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.5916486517237456, |
|
"grad_norm": 1.577392816543579, |
|
"learning_rate": 7.279695883305866e-05, |
|
"loss": 2.1657, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5925588804187052, |
|
"grad_norm": 0.43565502762794495, |
|
"learning_rate": 7.251946736651582e-05, |
|
"loss": 2.4673, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.5934691091136648, |
|
"grad_norm": 0.42318448424339294, |
|
"learning_rate": 7.224220460137701e-05, |
|
"loss": 2.409, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.5943793378086244, |
|
"grad_norm": 0.41891592741012573, |
|
"learning_rate": 7.196517284510773e-05, |
|
"loss": 2.3842, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.595289566503584, |
|
"grad_norm": 0.4418095350265503, |
|
"learning_rate": 7.168837440325114e-05, |
|
"loss": 2.3998, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.5961997951985436, |
|
"grad_norm": 0.43118715286254883, |
|
"learning_rate": 7.141181157940859e-05, |
|
"loss": 2.3845, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5971100238935032, |
|
"grad_norm": 0.41435372829437256, |
|
"learning_rate": 7.11354866752205e-05, |
|
"loss": 2.3077, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.5980202525884628, |
|
"grad_norm": 0.4133334457874298, |
|
"learning_rate": 7.085940199034735e-05, |
|
"loss": 2.385, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.5989304812834224, |
|
"grad_norm": 0.4240586757659912, |
|
"learning_rate": 7.058355982245037e-05, |
|
"loss": 2.3835, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.599840709978382, |
|
"grad_norm": 0.4071354568004608, |
|
"learning_rate": 7.030796246717255e-05, |
|
"loss": 2.0752, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.6007509386733417, |
|
"grad_norm": 0.44435110688209534, |
|
"learning_rate": 7.003261221811934e-05, |
|
"loss": 2.4811, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6016611673683013, |
|
"grad_norm": 0.4175739884376526, |
|
"learning_rate": 6.97575113668399e-05, |
|
"loss": 2.3717, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.6025713960632609, |
|
"grad_norm": 0.41801995038986206, |
|
"learning_rate": 6.948266220280771e-05, |
|
"loss": 2.3406, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.6034816247582205, |
|
"grad_norm": 0.42795541882514954, |
|
"learning_rate": 6.920806701340155e-05, |
|
"loss": 2.1457, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.6043918534531801, |
|
"grad_norm": 0.39928749203681946, |
|
"learning_rate": 6.893372808388675e-05, |
|
"loss": 2.2435, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.6053020821481397, |
|
"grad_norm": 0.39942193031311035, |
|
"learning_rate": 6.865964769739575e-05, |
|
"loss": 2.1791, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6062123108430993, |
|
"grad_norm": 0.3968772888183594, |
|
"learning_rate": 6.838582813490947e-05, |
|
"loss": 2.106, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.607122539538059, |
|
"grad_norm": 0.41822007298469543, |
|
"learning_rate": 6.811227167523815e-05, |
|
"loss": 2.2538, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.6080327682330186, |
|
"grad_norm": 0.39328843355178833, |
|
"learning_rate": 6.783898059500233e-05, |
|
"loss": 2.1372, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.6089429969279782, |
|
"grad_norm": 0.39368972182273865, |
|
"learning_rate": 6.756595716861407e-05, |
|
"loss": 2.1002, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.6098532256229378, |
|
"grad_norm": 0.41033080220222473, |
|
"learning_rate": 6.729320366825784e-05, |
|
"loss": 2.0963, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6107634543178974, |
|
"grad_norm": 0.40454182028770447, |
|
"learning_rate": 6.702072236387182e-05, |
|
"loss": 2.0902, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.611673683012857, |
|
"grad_norm": 0.40117865800857544, |
|
"learning_rate": 6.674851552312878e-05, |
|
"loss": 2.086, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.6125839117078166, |
|
"grad_norm": 0.39771145582199097, |
|
"learning_rate": 6.647658541141735e-05, |
|
"loss": 1.977, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.6134941404027762, |
|
"grad_norm": 0.4230089485645294, |
|
"learning_rate": 6.620493429182323e-05, |
|
"loss": 2.1687, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.6144043690977358, |
|
"grad_norm": 0.40441179275512695, |
|
"learning_rate": 6.593356442511015e-05, |
|
"loss": 2.1608, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6153145977926954, |
|
"grad_norm": 0.40813207626342773, |
|
"learning_rate": 6.566247806970119e-05, |
|
"loss": 2.0103, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.616224826487655, |
|
"grad_norm": 0.4276919364929199, |
|
"learning_rate": 6.539167748165994e-05, |
|
"loss": 2.0024, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.6171350551826146, |
|
"grad_norm": 0.4275762140750885, |
|
"learning_rate": 6.512116491467185e-05, |
|
"loss": 2.1589, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.6180452838775743, |
|
"grad_norm": 0.4258089065551758, |
|
"learning_rate": 6.485094262002529e-05, |
|
"loss": 1.9598, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.6189555125725339, |
|
"grad_norm": 0.436363160610199, |
|
"learning_rate": 6.458101284659286e-05, |
|
"loss": 2.2201, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6198657412674935, |
|
"grad_norm": 0.41596871614456177, |
|
"learning_rate": 6.431137784081282e-05, |
|
"loss": 2.0394, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.6207759699624531, |
|
"grad_norm": 0.46983107924461365, |
|
"learning_rate": 6.404203984667019e-05, |
|
"loss": 2.0277, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.6216861986574127, |
|
"grad_norm": 0.4560098350048065, |
|
"learning_rate": 6.377300110567821e-05, |
|
"loss": 2.2369, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.6225964273523723, |
|
"grad_norm": 0.45281484723091125, |
|
"learning_rate": 6.350426385685957e-05, |
|
"loss": 2.2576, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.6235066560473319, |
|
"grad_norm": 0.45989713072776794, |
|
"learning_rate": 6.323583033672799e-05, |
|
"loss": 2.1294, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6244168847422915, |
|
"grad_norm": 0.46797510981559753, |
|
"learning_rate": 6.296770277926937e-05, |
|
"loss": 2.0688, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.6253271134372511, |
|
"grad_norm": 0.5156514644622803, |
|
"learning_rate": 6.269988341592328e-05, |
|
"loss": 2.1114, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.6262373421322107, |
|
"grad_norm": 0.48389503359794617, |
|
"learning_rate": 6.243237447556449e-05, |
|
"loss": 2.0931, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.6271475708271703, |
|
"grad_norm": 0.4714515209197998, |
|
"learning_rate": 6.216517818448423e-05, |
|
"loss": 2.0982, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.6280577995221299, |
|
"grad_norm": 0.5029696226119995, |
|
"learning_rate": 6.189829676637182e-05, |
|
"loss": 2.2145, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6289680282170895, |
|
"grad_norm": 0.49942919611930847, |
|
"learning_rate": 6.163173244229619e-05, |
|
"loss": 2.1344, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.6298782569120491, |
|
"grad_norm": 0.5015403032302856, |
|
"learning_rate": 6.136548743068713e-05, |
|
"loss": 2.0721, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.6307884856070087, |
|
"grad_norm": 0.5125733613967896, |
|
"learning_rate": 6.109956394731722e-05, |
|
"loss": 2.0609, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.6316987143019683, |
|
"grad_norm": 0.5657601952552795, |
|
"learning_rate": 6.083396420528298e-05, |
|
"loss": 2.4455, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.6326089429969279, |
|
"grad_norm": 0.5578758120536804, |
|
"learning_rate": 6.056869041498687e-05, |
|
"loss": 2.205, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6335191716918875, |
|
"grad_norm": 0.6066897511482239, |
|
"learning_rate": 6.030374478411847e-05, |
|
"loss": 2.3107, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.6344294003868471, |
|
"grad_norm": 0.6329779028892517, |
|
"learning_rate": 6.0039129517636435e-05, |
|
"loss": 2.3413, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.6353396290818069, |
|
"grad_norm": 0.7217928767204285, |
|
"learning_rate": 5.9774846817750105e-05, |
|
"loss": 2.4855, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.6362498577767665, |
|
"grad_norm": 0.9335882067680359, |
|
"learning_rate": 5.951089888390087e-05, |
|
"loss": 2.7768, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.6371600864717261, |
|
"grad_norm": 1.5800278186798096, |
|
"learning_rate": 5.924728791274432e-05, |
|
"loss": 2.5227, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6380703151666857, |
|
"grad_norm": 0.4945278763771057, |
|
"learning_rate": 5.89840160981316e-05, |
|
"loss": 2.4396, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.6389805438616453, |
|
"grad_norm": 0.4547106921672821, |
|
"learning_rate": 5.872108563109131e-05, |
|
"loss": 2.3647, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.6398907725566049, |
|
"grad_norm": 0.46132609248161316, |
|
"learning_rate": 5.845849869981137e-05, |
|
"loss": 2.5182, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.6408010012515645, |
|
"grad_norm": 0.41916942596435547, |
|
"learning_rate": 5.819625748962049e-05, |
|
"loss": 2.3084, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.6417112299465241, |
|
"grad_norm": 0.4376005530357361, |
|
"learning_rate": 5.79343641829704e-05, |
|
"loss": 2.3951, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6426214586414837, |
|
"grad_norm": 0.4435478448867798, |
|
"learning_rate": 5.7672820959417254e-05, |
|
"loss": 2.3179, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.6435316873364433, |
|
"grad_norm": 0.43887507915496826, |
|
"learning_rate": 5.741162999560386e-05, |
|
"loss": 2.2442, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6444419160314029, |
|
"grad_norm": 0.45083487033843994, |
|
"learning_rate": 5.7150793465241346e-05, |
|
"loss": 2.3397, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6453521447263625, |
|
"grad_norm": 0.4538663625717163, |
|
"learning_rate": 5.68903135390912e-05, |
|
"loss": 2.1915, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6462623734213221, |
|
"grad_norm": 0.41724029183387756, |
|
"learning_rate": 5.663019238494704e-05, |
|
"loss": 2.314, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6471726021162817, |
|
"grad_norm": 0.4224849343299866, |
|
"learning_rate": 5.637043216761678e-05, |
|
"loss": 2.1712, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6480828308112413, |
|
"grad_norm": 0.4206700921058655, |
|
"learning_rate": 5.611103504890444e-05, |
|
"loss": 2.2096, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.6489930595062009, |
|
"grad_norm": 0.41635435819625854, |
|
"learning_rate": 5.5852003187592226e-05, |
|
"loss": 2.3813, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6499032882011605, |
|
"grad_norm": 0.4309711456298828, |
|
"learning_rate": 5.559333873942259e-05, |
|
"loss": 2.3199, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6508135168961201, |
|
"grad_norm": 0.4129570424556732, |
|
"learning_rate": 5.533504385708024e-05, |
|
"loss": 2.2384, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6517237455910797, |
|
"grad_norm": 0.40963393449783325, |
|
"learning_rate": 5.5077120690174246e-05, |
|
"loss": 2.1162, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.6526339742860394, |
|
"grad_norm": 0.4129562973976135, |
|
"learning_rate": 5.481957138522018e-05, |
|
"loss": 2.2232, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.653544202980999, |
|
"grad_norm": 0.43267586827278137, |
|
"learning_rate": 5.456239808562209e-05, |
|
"loss": 2.208, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6544544316759586, |
|
"grad_norm": 0.4190649688243866, |
|
"learning_rate": 5.4305602931655045e-05, |
|
"loss": 2.0585, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.6553646603709182, |
|
"grad_norm": 0.41037124395370483, |
|
"learning_rate": 5.404918806044679e-05, |
|
"loss": 2.1371, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6562748890658778, |
|
"grad_norm": 0.43521812558174133, |
|
"learning_rate": 5.379315560596038e-05, |
|
"loss": 2.2227, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.6571851177608374, |
|
"grad_norm": 0.4168831408023834, |
|
"learning_rate": 5.3537507698976365e-05, |
|
"loss": 1.962, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.658095346455797, |
|
"grad_norm": 0.4054042100906372, |
|
"learning_rate": 5.328224646707479e-05, |
|
"loss": 2.0154, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.6590055751507566, |
|
"grad_norm": 0.4229868948459625, |
|
"learning_rate": 5.3027374034617785e-05, |
|
"loss": 2.1075, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.6599158038457162, |
|
"grad_norm": 0.4285801351070404, |
|
"learning_rate": 5.277289252273174e-05, |
|
"loss": 2.0953, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6608260325406758, |
|
"grad_norm": 0.43290793895721436, |
|
"learning_rate": 5.251880404928971e-05, |
|
"loss": 2.3198, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.6617362612356354, |
|
"grad_norm": 0.4112420082092285, |
|
"learning_rate": 5.226511072889371e-05, |
|
"loss": 2.1214, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.662646489930595, |
|
"grad_norm": 0.4102923572063446, |
|
"learning_rate": 5.201181467285723e-05, |
|
"loss": 1.8327, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.6635567186255547, |
|
"grad_norm": 0.45263952016830444, |
|
"learning_rate": 5.175891798918757e-05, |
|
"loss": 2.1415, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.6644669473205143, |
|
"grad_norm": 0.4748065769672394, |
|
"learning_rate": 5.1506422782568345e-05, |
|
"loss": 2.0534, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6653771760154739, |
|
"grad_norm": 0.42939677834510803, |
|
"learning_rate": 5.125433115434197e-05, |
|
"loss": 1.8938, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.6662874047104335, |
|
"grad_norm": 0.4550830125808716, |
|
"learning_rate": 5.100264520249205e-05, |
|
"loss": 2.1615, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.6671976334053931, |
|
"grad_norm": 0.47507891058921814, |
|
"learning_rate": 5.0751367021626215e-05, |
|
"loss": 2.1034, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.6681078621003527, |
|
"grad_norm": 0.44998088479042053, |
|
"learning_rate": 5.050049870295841e-05, |
|
"loss": 1.9552, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.6690180907953123, |
|
"grad_norm": 0.4823133647441864, |
|
"learning_rate": 5.025004233429145e-05, |
|
"loss": 2.0965, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.669928319490272, |
|
"grad_norm": 0.4999094605445862, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 2.2037, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.6708385481852316, |
|
"grad_norm": 0.4802190661430359, |
|
"learning_rate": 4.9750373781012885e-05, |
|
"loss": 2.0832, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.6717487768801912, |
|
"grad_norm": 0.4969741702079773, |
|
"learning_rate": 4.950116575479586e-05, |
|
"loss": 2.0196, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.6726590055751508, |
|
"grad_norm": 0.47610992193222046, |
|
"learning_rate": 4.9252377995334444e-05, |
|
"loss": 1.9983, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.6735692342701104, |
|
"grad_norm": 0.5233409404754639, |
|
"learning_rate": 4.90040125731165e-05, |
|
"loss": 2.2088, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.67447946296507, |
|
"grad_norm": 0.5126591920852661, |
|
"learning_rate": 4.87560715551151e-05, |
|
"loss": 2.0659, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.6753896916600296, |
|
"grad_norm": 0.5209243893623352, |
|
"learning_rate": 4.85085570047713e-05, |
|
"loss": 2.2167, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.6762999203549892, |
|
"grad_norm": 0.5382196307182312, |
|
"learning_rate": 4.826147098197691e-05, |
|
"loss": 2.234, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.6772101490499488, |
|
"grad_norm": 0.5741109848022461, |
|
"learning_rate": 4.8014815543057475e-05, |
|
"loss": 2.2118, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.6781203777449084, |
|
"grad_norm": 0.577425479888916, |
|
"learning_rate": 4.776859274075506e-05, |
|
"loss": 2.2446, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.679030606439868, |
|
"grad_norm": 0.614007294178009, |
|
"learning_rate": 4.752280462421117e-05, |
|
"loss": 2.3033, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.6799408351348276, |
|
"grad_norm": 0.654388427734375, |
|
"learning_rate": 4.727745323894976e-05, |
|
"loss": 2.33, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.6808510638297872, |
|
"grad_norm": 0.7294265627861023, |
|
"learning_rate": 4.703254062686017e-05, |
|
"loss": 2.5732, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.6817612925247468, |
|
"grad_norm": 0.8145880699157715, |
|
"learning_rate": 4.678806882618003e-05, |
|
"loss": 2.4708, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.6826715212197064, |
|
"grad_norm": 1.3215488195419312, |
|
"learning_rate": 4.654403987147865e-05, |
|
"loss": 2.566, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.683581749914666, |
|
"grad_norm": 0.4625120759010315, |
|
"learning_rate": 4.630045579363957e-05, |
|
"loss": 2.4207, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.6844919786096256, |
|
"grad_norm": 0.4124356508255005, |
|
"learning_rate": 4.605731861984401e-05, |
|
"loss": 2.2606, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.6854022073045852, |
|
"grad_norm": 0.42320266366004944, |
|
"learning_rate": 4.5814630373554115e-05, |
|
"loss": 2.3107, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.6863124359995448, |
|
"grad_norm": 0.4337674379348755, |
|
"learning_rate": 4.557239307449561e-05, |
|
"loss": 2.5028, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.6872226646945045, |
|
"grad_norm": 0.44273191690444946, |
|
"learning_rate": 4.5330608738641486e-05, |
|
"loss": 2.42, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6881328933894642, |
|
"grad_norm": 0.40541112422943115, |
|
"learning_rate": 4.508927937819499e-05, |
|
"loss": 2.1912, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.6890431220844238, |
|
"grad_norm": 0.4108361601829529, |
|
"learning_rate": 4.484840700157295e-05, |
|
"loss": 2.2871, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.6899533507793834, |
|
"grad_norm": 0.4105437994003296, |
|
"learning_rate": 4.4607993613388976e-05, |
|
"loss": 2.244, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.690863579474343, |
|
"grad_norm": 0.42231062054634094, |
|
"learning_rate": 4.436804121443689e-05, |
|
"loss": 2.3462, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.6917738081693026, |
|
"grad_norm": 0.42183414101600647, |
|
"learning_rate": 4.412855180167406e-05, |
|
"loss": 2.3269, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6926840368642622, |
|
"grad_norm": 0.4242246448993683, |
|
"learning_rate": 4.388952736820453e-05, |
|
"loss": 2.2572, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.6935942655592218, |
|
"grad_norm": 0.41937458515167236, |
|
"learning_rate": 4.365096990326297e-05, |
|
"loss": 2.1373, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.6945044942541814, |
|
"grad_norm": 0.4179951250553131, |
|
"learning_rate": 4.3412881392197526e-05, |
|
"loss": 2.2584, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.695414722949141, |
|
"grad_norm": 0.4121822416782379, |
|
"learning_rate": 4.317526381645363e-05, |
|
"loss": 2.2389, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.6963249516441006, |
|
"grad_norm": 0.400831401348114, |
|
"learning_rate": 4.293811915355761e-05, |
|
"loss": 2.1734, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6972351803390602, |
|
"grad_norm": 0.426580011844635, |
|
"learning_rate": 4.270144937709981e-05, |
|
"loss": 2.1537, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.6981454090340198, |
|
"grad_norm": 0.4221407175064087, |
|
"learning_rate": 4.2465256456718615e-05, |
|
"loss": 2.1184, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.6990556377289794, |
|
"grad_norm": 0.3915541172027588, |
|
"learning_rate": 4.222954235808378e-05, |
|
"loss": 2.0476, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.699965866423939, |
|
"grad_norm": 0.39140835404396057, |
|
"learning_rate": 4.19943090428802e-05, |
|
"loss": 1.9244, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.7008760951188986, |
|
"grad_norm": 0.4446852505207062, |
|
"learning_rate": 4.175955846879151e-05, |
|
"loss": 2.1622, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7017863238138582, |
|
"grad_norm": 0.4065980911254883, |
|
"learning_rate": 4.1525292589483843e-05, |
|
"loss": 1.9522, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.7026965525088178, |
|
"grad_norm": 0.41551730036735535, |
|
"learning_rate": 4.129151335458957e-05, |
|
"loss": 1.9784, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.7036067812037774, |
|
"grad_norm": 0.4159603416919708, |
|
"learning_rate": 4.105822270969102e-05, |
|
"loss": 2.0386, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.7045170098987371, |
|
"grad_norm": 0.4357737600803375, |
|
"learning_rate": 4.0825422596304396e-05, |
|
"loss": 2.1812, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.7054272385936967, |
|
"grad_norm": 0.43295785784721375, |
|
"learning_rate": 4.059311495186338e-05, |
|
"loss": 2.1118, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7063374672886563, |
|
"grad_norm": 0.4281920790672302, |
|
"learning_rate": 4.036130170970341e-05, |
|
"loss": 2.1594, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.7072476959836159, |
|
"grad_norm": 0.45081713795661926, |
|
"learning_rate": 4.012998479904525e-05, |
|
"loss": 2.2, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.7081579246785755, |
|
"grad_norm": 0.4478646218776703, |
|
"learning_rate": 3.9899166144978904e-05, |
|
"loss": 2.1344, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.7090681533735351, |
|
"grad_norm": 0.44155117869377136, |
|
"learning_rate": 3.966884766844803e-05, |
|
"loss": 2.1462, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.7099783820684947, |
|
"grad_norm": 0.4430200755596161, |
|
"learning_rate": 3.943903128623335e-05, |
|
"loss": 2.0047, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7108886107634543, |
|
"grad_norm": 0.43685182929039, |
|
"learning_rate": 3.920971891093718e-05, |
|
"loss": 1.9843, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.7117988394584139, |
|
"grad_norm": 0.44161438941955566, |
|
"learning_rate": 3.8980912450967366e-05, |
|
"loss": 2.02, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.7127090681533735, |
|
"grad_norm": 0.45051446557044983, |
|
"learning_rate": 3.875261381052121e-05, |
|
"loss": 2.0351, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.7136192968483331, |
|
"grad_norm": 0.47110506892204285, |
|
"learning_rate": 3.852482488956992e-05, |
|
"loss": 2.0375, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.7145295255432927, |
|
"grad_norm": 0.49640730023384094, |
|
"learning_rate": 3.829754758384262e-05, |
|
"loss": 2.301, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7154397542382523, |
|
"grad_norm": 0.5000788569450378, |
|
"learning_rate": 3.807078378481059e-05, |
|
"loss": 2.3439, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.716349982933212, |
|
"grad_norm": 0.4984182119369507, |
|
"learning_rate": 3.784453537967161e-05, |
|
"loss": 2.1625, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.7172602116281716, |
|
"grad_norm": 0.4947352409362793, |
|
"learning_rate": 3.761880425133413e-05, |
|
"loss": 2.1349, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.7181704403231312, |
|
"grad_norm": 0.48531001806259155, |
|
"learning_rate": 3.7393592278401704e-05, |
|
"loss": 2.0905, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.7190806690180908, |
|
"grad_norm": 0.5409421324729919, |
|
"learning_rate": 3.7168901335157315e-05, |
|
"loss": 2.4218, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7199908977130504, |
|
"grad_norm": 0.5206452012062073, |
|
"learning_rate": 3.694473329154778e-05, |
|
"loss": 1.98, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.72090112640801, |
|
"grad_norm": 0.5477956533432007, |
|
"learning_rate": 3.672109001316809e-05, |
|
"loss": 2.4692, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.7218113551029697, |
|
"grad_norm": 0.5402776002883911, |
|
"learning_rate": 3.649797336124615e-05, |
|
"loss": 2.0121, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.7227215837979293, |
|
"grad_norm": 0.529184877872467, |
|
"learning_rate": 3.6275385192627056e-05, |
|
"loss": 2.1043, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.7236318124928889, |
|
"grad_norm": 0.5657643675804138, |
|
"learning_rate": 3.6053327359757535e-05, |
|
"loss": 2.1002, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7245420411878485, |
|
"grad_norm": 0.6207425594329834, |
|
"learning_rate": 3.583180171067101e-05, |
|
"loss": 2.3302, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.7254522698828081, |
|
"grad_norm": 0.6927235722541809, |
|
"learning_rate": 3.5610810088971625e-05, |
|
"loss": 2.446, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.7263624985777677, |
|
"grad_norm": 0.7733549475669861, |
|
"learning_rate": 3.5390354333819344e-05, |
|
"loss": 2.6638, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.9281876087188721, |
|
"learning_rate": 3.517043627991441e-05, |
|
"loss": 2.5248, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.7281829559676869, |
|
"grad_norm": 1.9464212656021118, |
|
"learning_rate": 3.4951057757482205e-05, |
|
"loss": 2.603, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7290931846626465, |
|
"grad_norm": 0.42626360058784485, |
|
"learning_rate": 3.4732220592257946e-05, |
|
"loss": 2.5094, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.7300034133576061, |
|
"grad_norm": 0.42765355110168457, |
|
"learning_rate": 3.45139266054715e-05, |
|
"loss": 2.172, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.7309136420525657, |
|
"grad_norm": 0.42991819977760315, |
|
"learning_rate": 3.429617761383222e-05, |
|
"loss": 2.2523, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.7318238707475253, |
|
"grad_norm": 0.4252484142780304, |
|
"learning_rate": 3.40789754295139e-05, |
|
"loss": 2.4049, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.7327340994424849, |
|
"grad_norm": 0.4209078252315521, |
|
"learning_rate": 3.3862321860139576e-05, |
|
"loss": 2.4249, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7336443281374445, |
|
"grad_norm": 0.4354207515716553, |
|
"learning_rate": 3.364621870876659e-05, |
|
"loss": 2.4072, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.7345545568324041, |
|
"grad_norm": 0.4264775514602661, |
|
"learning_rate": 3.343066777387148e-05, |
|
"loss": 2.3709, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.7354647855273637, |
|
"grad_norm": 0.41826266050338745, |
|
"learning_rate": 3.3215670849335155e-05, |
|
"loss": 2.2593, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.7363750142223233, |
|
"grad_norm": 0.4268350899219513, |
|
"learning_rate": 3.300122972442773e-05, |
|
"loss": 2.3383, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.7372852429172829, |
|
"grad_norm": 0.40746137499809265, |
|
"learning_rate": 3.278734618379402e-05, |
|
"loss": 2.1898, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7381954716122425, |
|
"grad_norm": 0.4189973771572113, |
|
"learning_rate": 3.257402200743821e-05, |
|
"loss": 2.3329, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.7391057003072021, |
|
"grad_norm": 0.44023388624191284, |
|
"learning_rate": 3.2361258970709397e-05, |
|
"loss": 2.3947, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.7400159290021618, |
|
"grad_norm": 0.41705650091171265, |
|
"learning_rate": 3.21490588442868e-05, |
|
"loss": 2.261, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.7409261576971214, |
|
"grad_norm": 0.431820273399353, |
|
"learning_rate": 3.19374233941647e-05, |
|
"loss": 2.4248, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.741836386392081, |
|
"grad_norm": 0.4188806414604187, |
|
"learning_rate": 3.172635438163816e-05, |
|
"loss": 2.2794, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7427466150870407, |
|
"grad_norm": 0.41158682107925415, |
|
"learning_rate": 3.1515853563288076e-05, |
|
"loss": 2.1274, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.7436568437820003, |
|
"grad_norm": 0.4038848876953125, |
|
"learning_rate": 3.130592269096671e-05, |
|
"loss": 2.0359, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.7445670724769599, |
|
"grad_norm": 0.4189314842224121, |
|
"learning_rate": 3.1096563511783014e-05, |
|
"loss": 2.1405, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.7454773011719195, |
|
"grad_norm": 0.39889493584632874, |
|
"learning_rate": 3.08877777680882e-05, |
|
"loss": 2.0187, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.7463875298668791, |
|
"grad_norm": 0.38899531960487366, |
|
"learning_rate": 3.0679567197461134e-05, |
|
"loss": 2.0957, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7472977585618387, |
|
"grad_norm": 0.41540107131004333, |
|
"learning_rate": 3.047193353269382e-05, |
|
"loss": 2.1757, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.7482079872567983, |
|
"grad_norm": 0.41734689474105835, |
|
"learning_rate": 3.0264878501777306e-05, |
|
"loss": 2.0902, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.7491182159517579, |
|
"grad_norm": 0.44342851638793945, |
|
"learning_rate": 3.005840382788685e-05, |
|
"loss": 2.087, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.7500284446467175, |
|
"grad_norm": 0.430385559797287, |
|
"learning_rate": 2.9852511229367865e-05, |
|
"loss": 2.1539, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.7509386733416771, |
|
"grad_norm": 0.4446250796318054, |
|
"learning_rate": 2.9647202419721687e-05, |
|
"loss": 2.2294, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7509386733416771, |
|
"eval_loss": 2.2184133529663086, |
|
"eval_runtime": 204.0107, |
|
"eval_samples_per_second": 9.073, |
|
"eval_steps_per_second": 4.539, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7518489020366367, |
|
"grad_norm": 0.4406190812587738, |
|
"learning_rate": 2.944247910759097e-05, |
|
"loss": 2.163, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7527591307315963, |
|
"grad_norm": 0.43820223212242126, |
|
"learning_rate": 2.9238342996745817e-05, |
|
"loss": 2.1481, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.7536693594265559, |
|
"grad_norm": 0.4559546709060669, |
|
"learning_rate": 2.9034795786069436e-05, |
|
"loss": 2.1513, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7545795881215155, |
|
"grad_norm": 0.41954734921455383, |
|
"learning_rate": 2.8831839169543996e-05, |
|
"loss": 1.9627, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7554898168164751, |
|
"grad_norm": 0.4652347266674042, |
|
"learning_rate": 2.862947483623659e-05, |
|
"loss": 2.1275, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7564000455114347, |
|
"grad_norm": 0.454685777425766, |
|
"learning_rate": 2.8427704470285144e-05, |
|
"loss": 2.1943, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7573102742063944, |
|
"grad_norm": 0.45541757345199585, |
|
"learning_rate": 2.8226529750884402e-05, |
|
"loss": 2.0786, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.758220502901354, |
|
"grad_norm": 0.46581897139549255, |
|
"learning_rate": 2.8025952352271958e-05, |
|
"loss": 2.1656, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.7591307315963136, |
|
"grad_norm": 0.47137272357940674, |
|
"learning_rate": 2.7825973943714335e-05, |
|
"loss": 2.1557, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7600409602912732, |
|
"grad_norm": 0.462990403175354, |
|
"learning_rate": 2.7626596189492983e-05, |
|
"loss": 2.0477, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7609511889862328, |
|
"grad_norm": 0.4760456085205078, |
|
"learning_rate": 2.7427820748890685e-05, |
|
"loss": 2.1504, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.7618614176811924, |
|
"grad_norm": 0.46250203251838684, |
|
"learning_rate": 2.7229649276177503e-05, |
|
"loss": 2.1027, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.762771646376152, |
|
"grad_norm": 0.48775410652160645, |
|
"learning_rate": 2.7032083420597e-05, |
|
"loss": 2.2122, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.7636818750711116, |
|
"grad_norm": 0.4987994432449341, |
|
"learning_rate": 2.683512482635281e-05, |
|
"loss": 2.1812, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.7645921037660712, |
|
"grad_norm": 0.5075133442878723, |
|
"learning_rate": 2.6638775132594553e-05, |
|
"loss": 2.2842, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7655023324610308, |
|
"grad_norm": 0.49660152196884155, |
|
"learning_rate": 2.6443035973404496e-05, |
|
"loss": 2.1107, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.7664125611559904, |
|
"grad_norm": 0.5171281695365906, |
|
"learning_rate": 2.624790897778391e-05, |
|
"loss": 2.1871, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.76732278985095, |
|
"grad_norm": 0.5493224859237671, |
|
"learning_rate": 2.605339576963929e-05, |
|
"loss": 2.3867, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.7682330185459096, |
|
"grad_norm": 0.534160315990448, |
|
"learning_rate": 2.585949796776912e-05, |
|
"loss": 2.255, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.7691432472408692, |
|
"grad_norm": 0.5723463892936707, |
|
"learning_rate": 2.5666217185850262e-05, |
|
"loss": 2.321, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7700534759358288, |
|
"grad_norm": 0.5741109848022461, |
|
"learning_rate": 2.5473555032424533e-05, |
|
"loss": 2.1497, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.7709637046307884, |
|
"grad_norm": 0.6545650959014893, |
|
"learning_rate": 2.528151311088537e-05, |
|
"loss": 2.3931, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.771873933325748, |
|
"grad_norm": 0.756486713886261, |
|
"learning_rate": 2.50900930194644e-05, |
|
"loss": 2.7378, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.7727841620207077, |
|
"grad_norm": 0.8643143773078918, |
|
"learning_rate": 2.4899296351218227e-05, |
|
"loss": 2.3271, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.7736943907156673, |
|
"grad_norm": 1.6659263372421265, |
|
"learning_rate": 2.4709124694015116e-05, |
|
"loss": 2.4264, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.774604619410627, |
|
"grad_norm": 0.4279872477054596, |
|
"learning_rate": 2.451957963052185e-05, |
|
"loss": 2.43, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.7755148481055866, |
|
"grad_norm": 0.41057080030441284, |
|
"learning_rate": 2.433066273819037e-05, |
|
"loss": 2.3079, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.7764250768005462, |
|
"grad_norm": 0.4251214563846588, |
|
"learning_rate": 2.4142375589244957e-05, |
|
"loss": 2.3776, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.7773353054955058, |
|
"grad_norm": 0.41808971762657166, |
|
"learning_rate": 2.3954719750668907e-05, |
|
"loss": 2.1376, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.7782455341904654, |
|
"grad_norm": 0.408611923456192, |
|
"learning_rate": 2.3767696784191463e-05, |
|
"loss": 2.2524, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.779155762885425, |
|
"grad_norm": 0.42068326473236084, |
|
"learning_rate": 2.3581308246275103e-05, |
|
"loss": 2.3363, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.7800659915803846, |
|
"grad_norm": 0.4118780493736267, |
|
"learning_rate": 2.339555568810221e-05, |
|
"loss": 2.1718, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.7809762202753442, |
|
"grad_norm": 0.42966723442077637, |
|
"learning_rate": 2.321044065556246e-05, |
|
"loss": 2.4884, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.7818864489703038, |
|
"grad_norm": 0.42242035269737244, |
|
"learning_rate": 2.302596468923981e-05, |
|
"loss": 2.3057, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.7827966776652634, |
|
"grad_norm": 0.41452938318252563, |
|
"learning_rate": 2.284212932439972e-05, |
|
"loss": 2.3253, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.783706906360223, |
|
"grad_norm": 0.39578771591186523, |
|
"learning_rate": 2.265893609097637e-05, |
|
"loss": 2.0894, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.7846171350551826, |
|
"grad_norm": 0.42091935873031616, |
|
"learning_rate": 2.247638651355991e-05, |
|
"loss": 2.3383, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.7855273637501422, |
|
"grad_norm": 0.4203489124774933, |
|
"learning_rate": 2.229448211138382e-05, |
|
"loss": 2.3549, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.7864375924451018, |
|
"grad_norm": 0.4158013164997101, |
|
"learning_rate": 2.211322439831218e-05, |
|
"loss": 2.1964, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.7873478211400614, |
|
"grad_norm": 0.4121032655239105, |
|
"learning_rate": 2.1932614882827197e-05, |
|
"loss": 2.2676, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.788258049835021, |
|
"grad_norm": 0.4054934084415436, |
|
"learning_rate": 2.1752655068016515e-05, |
|
"loss": 2.119, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.7891682785299806, |
|
"grad_norm": 0.4000680148601532, |
|
"learning_rate": 2.1573346451560794e-05, |
|
"loss": 2.0798, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.7900785072249402, |
|
"grad_norm": 0.4024758040904999, |
|
"learning_rate": 2.139469052572127e-05, |
|
"loss": 1.9786, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.7909887359198998, |
|
"grad_norm": 0.422947496175766, |
|
"learning_rate": 2.1216688777327154e-05, |
|
"loss": 2.077, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.7918989646148595, |
|
"grad_norm": 0.4274744689464569, |
|
"learning_rate": 2.1039342687763586e-05, |
|
"loss": 2.2014, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7928091933098191, |
|
"grad_norm": 0.4078015387058258, |
|
"learning_rate": 2.0862653732958915e-05, |
|
"loss": 1.9707, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.7937194220047787, |
|
"grad_norm": 0.4366302788257599, |
|
"learning_rate": 2.0686623383372715e-05, |
|
"loss": 2.1645, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.7946296506997383, |
|
"grad_norm": 0.4288860559463501, |
|
"learning_rate": 2.051125310398353e-05, |
|
"loss": 2.0476, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.795539879394698, |
|
"grad_norm": 0.4501003623008728, |
|
"learning_rate": 2.03365443542764e-05, |
|
"loss": 2.1982, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.7964501080896575, |
|
"grad_norm": 0.4465639293193817, |
|
"learning_rate": 2.016249858823106e-05, |
|
"loss": 2.0277, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7973603367846172, |
|
"grad_norm": 0.45942404866218567, |
|
"learning_rate": 1.998911725430963e-05, |
|
"loss": 2.1634, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.7982705654795768, |
|
"grad_norm": 0.4460104703903198, |
|
"learning_rate": 1.981640179544466e-05, |
|
"loss": 2.3188, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.7991807941745364, |
|
"grad_norm": 0.4495980143547058, |
|
"learning_rate": 1.964435364902705e-05, |
|
"loss": 2.1365, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.800091022869496, |
|
"grad_norm": 0.4429667294025421, |
|
"learning_rate": 1.947297424689414e-05, |
|
"loss": 2.1187, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.8010012515644556, |
|
"grad_norm": 0.4700337052345276, |
|
"learning_rate": 1.93022650153178e-05, |
|
"loss": 2.0205, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8019114802594152, |
|
"grad_norm": 0.45045018196105957, |
|
"learning_rate": 1.913222737499243e-05, |
|
"loss": 2.0114, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.8028217089543748, |
|
"grad_norm": 0.441072016954422, |
|
"learning_rate": 1.8962862741023423e-05, |
|
"loss": 1.9489, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.8037319376493344, |
|
"grad_norm": 0.4684150516986847, |
|
"learning_rate": 1.879417252291502e-05, |
|
"loss": 2.1999, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.804642166344294, |
|
"grad_norm": 0.4659992754459381, |
|
"learning_rate": 1.8626158124558858e-05, |
|
"loss": 2.1043, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.8055523950392536, |
|
"grad_norm": 0.49253416061401367, |
|
"learning_rate": 1.8458820944222255e-05, |
|
"loss": 2.2101, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8064626237342132, |
|
"grad_norm": 0.4710872769355774, |
|
"learning_rate": 1.829216237453637e-05, |
|
"loss": 2.1022, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.8073728524291728, |
|
"grad_norm": 0.4951106309890747, |
|
"learning_rate": 1.8126183802484865e-05, |
|
"loss": 2.2409, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.8082830811241324, |
|
"grad_norm": 0.48583388328552246, |
|
"learning_rate": 1.7960886609392214e-05, |
|
"loss": 2.0319, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.8091933098190921, |
|
"grad_norm": 0.4637562930583954, |
|
"learning_rate": 1.7796272170912253e-05, |
|
"loss": 1.8175, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.8101035385140517, |
|
"grad_norm": 0.5015937089920044, |
|
"learning_rate": 1.763234185701673e-05, |
|
"loss": 2.2035, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8110137672090113, |
|
"grad_norm": 0.5073252320289612, |
|
"learning_rate": 1.7469097031983893e-05, |
|
"loss": 2.0869, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.8119239959039709, |
|
"grad_norm": 0.5104179978370667, |
|
"learning_rate": 1.730653905438714e-05, |
|
"loss": 2.0663, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.8128342245989305, |
|
"grad_norm": 0.5239804983139038, |
|
"learning_rate": 1.7144669277083712e-05, |
|
"loss": 2.1529, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.8137444532938901, |
|
"grad_norm": 0.5428870320320129, |
|
"learning_rate": 1.6983489047203483e-05, |
|
"loss": 2.0934, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.8146546819888497, |
|
"grad_norm": 0.5535885691642761, |
|
"learning_rate": 1.6822999706137567e-05, |
|
"loss": 2.0925, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8155649106838093, |
|
"grad_norm": 0.6293628811836243, |
|
"learning_rate": 1.6663202589527473e-05, |
|
"loss": 2.3585, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.8164751393787689, |
|
"grad_norm": 0.7003004550933838, |
|
"learning_rate": 1.6504099027253706e-05, |
|
"loss": 2.4135, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.8173853680737285, |
|
"grad_norm": 0.7463914155960083, |
|
"learning_rate": 1.634569034342476e-05, |
|
"loss": 2.578, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.8182955967686881, |
|
"grad_norm": 0.8569872975349426, |
|
"learning_rate": 1.6187977856366253e-05, |
|
"loss": 2.5576, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.8192058254636477, |
|
"grad_norm": 1.334454894065857, |
|
"learning_rate": 1.6030962878609725e-05, |
|
"loss": 2.4123, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8201160541586073, |
|
"grad_norm": 0.4524904489517212, |
|
"learning_rate": 1.587464671688187e-05, |
|
"loss": 2.4774, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.8210262828535669, |
|
"grad_norm": 0.42647942900657654, |
|
"learning_rate": 1.5719030672093717e-05, |
|
"loss": 2.3709, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.8219365115485265, |
|
"grad_norm": 0.41805699467658997, |
|
"learning_rate": 1.5564116039329545e-05, |
|
"loss": 2.2022, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.8228467402434861, |
|
"grad_norm": 0.4297042489051819, |
|
"learning_rate": 1.5409904107836358e-05, |
|
"loss": 2.2815, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.8237569689384457, |
|
"grad_norm": 0.4077490568161011, |
|
"learning_rate": 1.5256396161013075e-05, |
|
"loss": 2.332, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8246671976334053, |
|
"grad_norm": 0.42278701066970825, |
|
"learning_rate": 1.5103593476399791e-05, |
|
"loss": 2.3218, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.825577426328365, |
|
"grad_norm": 0.4234485626220703, |
|
"learning_rate": 1.495149732566723e-05, |
|
"loss": 2.2363, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.8264876550233247, |
|
"grad_norm": 0.410879909992218, |
|
"learning_rate": 1.4800108974606119e-05, |
|
"loss": 2.2881, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.8273978837182843, |
|
"grad_norm": 0.4263591170310974, |
|
"learning_rate": 1.4649429683116644e-05, |
|
"loss": 2.1476, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.8283081124132439, |
|
"grad_norm": 0.4317147433757782, |
|
"learning_rate": 1.4499460705197998e-05, |
|
"loss": 2.2372, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8292183411082035, |
|
"grad_norm": 0.423459529876709, |
|
"learning_rate": 1.4350203288937936e-05, |
|
"loss": 2.3593, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.8301285698031631, |
|
"grad_norm": 0.40977466106414795, |
|
"learning_rate": 1.4201658676502294e-05, |
|
"loss": 2.1842, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.8310387984981227, |
|
"grad_norm": 0.41937538981437683, |
|
"learning_rate": 1.4053828104124867e-05, |
|
"loss": 2.251, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.8319490271930823, |
|
"grad_norm": 0.407067209482193, |
|
"learning_rate": 1.3906712802096933e-05, |
|
"loss": 2.027, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.8328592558880419, |
|
"grad_norm": 0.42164286971092224, |
|
"learning_rate": 1.3760313994757001e-05, |
|
"loss": 2.2382, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8337694845830015, |
|
"grad_norm": 0.41477370262145996, |
|
"learning_rate": 1.361463290048085e-05, |
|
"loss": 2.0209, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.8346797132779611, |
|
"grad_norm": 0.39464712142944336, |
|
"learning_rate": 1.3469670731671046e-05, |
|
"loss": 2.063, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.8355899419729207, |
|
"grad_norm": 0.4041547179222107, |
|
"learning_rate": 1.3325428694747177e-05, |
|
"loss": 2.0052, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.8365001706678803, |
|
"grad_norm": 0.40053966641426086, |
|
"learning_rate": 1.3181907990135622e-05, |
|
"loss": 2.0691, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.8374103993628399, |
|
"grad_norm": 0.4019843637943268, |
|
"learning_rate": 1.3039109812259598e-05, |
|
"loss": 2.0359, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8383206280577995, |
|
"grad_norm": 0.42373621463775635, |
|
"learning_rate": 1.2897035349529263e-05, |
|
"loss": 2.0603, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.8392308567527591, |
|
"grad_norm": 0.42414146661758423, |
|
"learning_rate": 1.2755685784331783e-05, |
|
"loss": 2.0417, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.8401410854477187, |
|
"grad_norm": 0.42916223406791687, |
|
"learning_rate": 1.2615062293021507e-05, |
|
"loss": 2.0517, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.8410513141426783, |
|
"grad_norm": 0.44549453258514404, |
|
"learning_rate": 1.2475166045910159e-05, |
|
"loss": 2.2541, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.8419615428376379, |
|
"grad_norm": 0.4355100393295288, |
|
"learning_rate": 1.2335998207257137e-05, |
|
"loss": 2.1341, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8428717715325975, |
|
"grad_norm": 0.447841078042984, |
|
"learning_rate": 1.2197559935259795e-05, |
|
"loss": 2.2051, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.8437820002275572, |
|
"grad_norm": 0.4368191659450531, |
|
"learning_rate": 1.20598523820438e-05, |
|
"loss": 1.8763, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.8446922289225168, |
|
"grad_norm": 0.4553552269935608, |
|
"learning_rate": 1.1922876693653585e-05, |
|
"loss": 2.0406, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.8456024576174764, |
|
"grad_norm": 0.4466502070426941, |
|
"learning_rate": 1.1786634010042719e-05, |
|
"loss": 1.9599, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.846512686312436, |
|
"grad_norm": 0.43716827034950256, |
|
"learning_rate": 1.1651125465064516e-05, |
|
"loss": 2.0089, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8474229150073956, |
|
"grad_norm": 0.4619704782962799, |
|
"learning_rate": 1.1516352186462586e-05, |
|
"loss": 2.0697, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.8483331437023552, |
|
"grad_norm": 0.44224247336387634, |
|
"learning_rate": 1.13823152958614e-05, |
|
"loss": 1.8982, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.8492433723973148, |
|
"grad_norm": 0.45818379521369934, |
|
"learning_rate": 1.1249015908756998e-05, |
|
"loss": 1.9608, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.8501536010922744, |
|
"grad_norm": 0.4756350517272949, |
|
"learning_rate": 1.1116455134507664e-05, |
|
"loss": 2.0776, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 0.46378758549690247, |
|
"learning_rate": 1.098463407632474e-05, |
|
"loss": 2.0733, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8519740584821937, |
|
"grad_norm": 0.4653812050819397, |
|
"learning_rate": 1.0853553831263418e-05, |
|
"loss": 2.0823, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.8528842871771533, |
|
"grad_norm": 0.4805110991001129, |
|
"learning_rate": 1.0723215490213634e-05, |
|
"loss": 2.1113, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.8537945158721129, |
|
"grad_norm": 0.49554312229156494, |
|
"learning_rate": 1.0593620137890948e-05, |
|
"loss": 2.2246, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.8547047445670725, |
|
"grad_norm": 0.5131683349609375, |
|
"learning_rate": 1.0464768852827545e-05, |
|
"loss": 2.1736, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.8556149732620321, |
|
"grad_norm": 0.5109636187553406, |
|
"learning_rate": 1.0336662707363287e-05, |
|
"loss": 2.1027, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8565252019569917, |
|
"grad_norm": 0.5181056261062622, |
|
"learning_rate": 1.0209302767636664e-05, |
|
"loss": 2.2091, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.8574354306519513, |
|
"grad_norm": 0.542943000793457, |
|
"learning_rate": 1.0082690093576163e-05, |
|
"loss": 2.2451, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.8583456593469109, |
|
"grad_norm": 0.5382577180862427, |
|
"learning_rate": 9.95682573889114e-06, |
|
"loss": 2.239, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.8592558880418705, |
|
"grad_norm": 0.5316376686096191, |
|
"learning_rate": 9.831710751063283e-06, |
|
"loss": 1.978, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.8601661167368301, |
|
"grad_norm": 0.5699295997619629, |
|
"learning_rate": 9.707346171337894e-06, |
|
"loss": 2.293, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8610763454317898, |
|
"grad_norm": 0.6195168495178223, |
|
"learning_rate": 9.583733034714981e-06, |
|
"loss": 2.3735, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8619865741267494, |
|
"grad_norm": 0.6604196429252625, |
|
"learning_rate": 9.460872369940955e-06, |
|
"loss": 2.4664, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.862896802821709, |
|
"grad_norm": 0.7245504856109619, |
|
"learning_rate": 9.338765199499854e-06, |
|
"loss": 2.4597, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.8638070315166686, |
|
"grad_norm": 0.8482882976531982, |
|
"learning_rate": 9.217412539604942e-06, |
|
"loss": 2.6484, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8647172602116282, |
|
"grad_norm": 1.452289342880249, |
|
"learning_rate": 9.096815400190172e-06, |
|
"loss": 2.4219, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8656274889065878, |
|
"grad_norm": 0.43866923451423645, |
|
"learning_rate": 8.97697478490188e-06, |
|
"loss": 2.5448, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8665377176015474, |
|
"grad_norm": 0.4115692377090454, |
|
"learning_rate": 8.857891691090337e-06, |
|
"loss": 2.3454, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.867447946296507, |
|
"grad_norm": 0.4357510507106781, |
|
"learning_rate": 8.739567109801494e-06, |
|
"loss": 2.3642, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.8683581749914666, |
|
"grad_norm": 0.4002648890018463, |
|
"learning_rate": 8.62200202576875e-06, |
|
"loss": 2.2382, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8692684036864262, |
|
"grad_norm": 0.41515567898750305, |
|
"learning_rate": 8.505197417404687e-06, |
|
"loss": 2.1776, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8701786323813858, |
|
"grad_norm": 0.43014460802078247, |
|
"learning_rate": 8.38915425679304e-06, |
|
"loss": 2.4619, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8710888610763454, |
|
"grad_norm": 0.42643028497695923, |
|
"learning_rate": 8.273873509680519e-06, |
|
"loss": 2.43, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.871999089771305, |
|
"grad_norm": 0.43487077951431274, |
|
"learning_rate": 8.15935613546872e-06, |
|
"loss": 2.3027, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.8729093184662646, |
|
"grad_norm": 0.42600685358047485, |
|
"learning_rate": 8.045603087206388e-06, |
|
"loss": 2.2503, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.8738195471612242, |
|
"grad_norm": 0.41962459683418274, |
|
"learning_rate": 7.932615311581126e-06, |
|
"loss": 2.2857, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8747297758561838, |
|
"grad_norm": 0.4238678812980652, |
|
"learning_rate": 7.820393748911791e-06, |
|
"loss": 2.2728, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.8756400045511434, |
|
"grad_norm": 0.40477514266967773, |
|
"learning_rate": 7.708939333140642e-06, |
|
"loss": 2.2017, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.876550233246103, |
|
"grad_norm": 0.4259748160839081, |
|
"learning_rate": 7.598252991825372e-06, |
|
"loss": 2.1998, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.8774604619410626, |
|
"grad_norm": 0.4023091793060303, |
|
"learning_rate": 7.488335646131628e-06, |
|
"loss": 2.1225, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.8783706906360224, |
|
"grad_norm": 0.4074653387069702, |
|
"learning_rate": 7.3791882108251945e-06, |
|
"loss": 2.0988, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.879280919330982, |
|
"grad_norm": 0.40571245551109314, |
|
"learning_rate": 7.270811594264437e-06, |
|
"loss": 2.1729, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.8801911480259416, |
|
"grad_norm": 0.3894023895263672, |
|
"learning_rate": 7.163206698392744e-06, |
|
"loss": 2.0464, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.8811013767209012, |
|
"grad_norm": 0.40034767985343933, |
|
"learning_rate": 7.056374418730971e-06, |
|
"loss": 2.1144, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.8820116054158608, |
|
"grad_norm": 0.3878059387207031, |
|
"learning_rate": 6.950315644370075e-06, |
|
"loss": 1.8818, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.8829218341108204, |
|
"grad_norm": 0.38767677545547485, |
|
"learning_rate": 6.845031257963619e-06, |
|
"loss": 2.0166, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.88383206280578, |
|
"grad_norm": 0.40807151794433594, |
|
"learning_rate": 6.740522135720517e-06, |
|
"loss": 2.2045, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.8847422915007396, |
|
"grad_norm": 0.41804906725883484, |
|
"learning_rate": 6.636789147397637e-06, |
|
"loss": 2.1742, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.8856525201956992, |
|
"grad_norm": 0.420526921749115, |
|
"learning_rate": 6.533833156292679e-06, |
|
"loss": 1.9607, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.8865627488906588, |
|
"grad_norm": 0.42398571968078613, |
|
"learning_rate": 6.431655019236948e-06, |
|
"loss": 2.1077, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.8874729775856184, |
|
"grad_norm": 0.43108931183815, |
|
"learning_rate": 6.3302555865880965e-06, |
|
"loss": 2.0987, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.888383206280578, |
|
"grad_norm": 0.4478430151939392, |
|
"learning_rate": 6.229635702223324e-06, |
|
"loss": 2.2323, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.8892934349755376, |
|
"grad_norm": 0.4497049152851105, |
|
"learning_rate": 6.129796203532057e-06, |
|
"loss": 2.2498, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.8902036636704972, |
|
"grad_norm": 0.45716363191604614, |
|
"learning_rate": 6.030737921409169e-06, |
|
"loss": 2.1167, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.8911138923654568, |
|
"grad_norm": 0.4461272060871124, |
|
"learning_rate": 5.932461680248014e-06, |
|
"loss": 2.0598, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.8920241210604164, |
|
"grad_norm": 0.41892901062965393, |
|
"learning_rate": 5.834968297933541e-06, |
|
"loss": 2.0647, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.892934349755376, |
|
"grad_norm": 0.43655654788017273, |
|
"learning_rate": 5.738258585835532e-06, |
|
"loss": 1.9849, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.8938445784503356, |
|
"grad_norm": 0.45647647976875305, |
|
"learning_rate": 5.6423333488018095e-06, |
|
"loss": 2.1166, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.8947548071452952, |
|
"grad_norm": 0.46431490778923035, |
|
"learning_rate": 5.547193385151561e-06, |
|
"loss": 2.0456, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.8956650358402549, |
|
"grad_norm": 0.45279815793037415, |
|
"learning_rate": 5.45283948666866e-06, |
|
"loss": 2.0964, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.8965752645352145, |
|
"grad_norm": 0.47188904881477356, |
|
"learning_rate": 5.359272438595153e-06, |
|
"loss": 2.1394, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8974854932301741, |
|
"grad_norm": 0.47204941511154175, |
|
"learning_rate": 5.266493019624663e-06, |
|
"loss": 2.0513, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.8983957219251337, |
|
"grad_norm": 0.46656250953674316, |
|
"learning_rate": 5.1745020018958866e-06, |
|
"loss": 1.9721, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.8993059506200933, |
|
"grad_norm": 0.47728633880615234, |
|
"learning_rate": 5.083300150986259e-06, |
|
"loss": 2.0223, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.9002161793150529, |
|
"grad_norm": 0.5116147398948669, |
|
"learning_rate": 4.992888225905468e-06, |
|
"loss": 2.108, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.9011264080100125, |
|
"grad_norm": 0.5168989300727844, |
|
"learning_rate": 4.903266979089249e-06, |
|
"loss": 2.1711, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9020366367049721, |
|
"grad_norm": 0.5194457769393921, |
|
"learning_rate": 4.8144371563930476e-06, |
|
"loss": 2.2852, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.9029468653999317, |
|
"grad_norm": 0.5500402450561523, |
|
"learning_rate": 4.726399497085832e-06, |
|
"loss": 2.2758, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.9038570940948913, |
|
"grad_norm": 0.5453543066978455, |
|
"learning_rate": 4.6391547338439536e-06, |
|
"loss": 2.1761, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.904767322789851, |
|
"grad_norm": 0.5767526030540466, |
|
"learning_rate": 4.552703592745033e-06, |
|
"loss": 2.2521, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.9056775514848106, |
|
"grad_norm": 0.5596179962158203, |
|
"learning_rate": 4.467046793261931e-06, |
|
"loss": 2.2413, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9065877801797702, |
|
"grad_norm": 0.610089123249054, |
|
"learning_rate": 4.3821850482567595e-06, |
|
"loss": 2.448, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.9074980088747298, |
|
"grad_norm": 0.6238061189651489, |
|
"learning_rate": 4.298119063974914e-06, |
|
"loss": 2.191, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.9084082375696894, |
|
"grad_norm": 0.7346171736717224, |
|
"learning_rate": 4.214849540039267e-06, |
|
"loss": 2.5612, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.909318466264649, |
|
"grad_norm": 0.8779885768890381, |
|
"learning_rate": 4.132377169444279e-06, |
|
"loss": 2.5268, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.9102286949596086, |
|
"grad_norm": 1.4997063875198364, |
|
"learning_rate": 4.050702638550275e-06, |
|
"loss": 2.3846, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9111389236545682, |
|
"grad_norm": 0.4338892996311188, |
|
"learning_rate": 3.969826627077655e-06, |
|
"loss": 2.5665, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.9120491523495278, |
|
"grad_norm": 0.4329196810722351, |
|
"learning_rate": 3.889749808101395e-06, |
|
"loss": 2.3979, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.9129593810444874, |
|
"grad_norm": 0.43765294551849365, |
|
"learning_rate": 3.810472848045266e-06, |
|
"loss": 2.6044, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.9138696097394471, |
|
"grad_norm": 0.4189110994338989, |
|
"learning_rate": 3.7319964066763858e-06, |
|
"loss": 2.3877, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.9147798384344067, |
|
"grad_norm": 0.4291215240955353, |
|
"learning_rate": 3.6543211370997587e-06, |
|
"loss": 2.3827, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9156900671293663, |
|
"grad_norm": 0.4351893365383148, |
|
"learning_rate": 3.5774476857527107e-06, |
|
"loss": 2.3855, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.9166002958243259, |
|
"grad_norm": 0.421740859746933, |
|
"learning_rate": 3.5013766923996604e-06, |
|
"loss": 2.2888, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.9175105245192855, |
|
"grad_norm": 0.41383060812950134, |
|
"learning_rate": 3.426108790126681e-06, |
|
"loss": 2.3313, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.9184207532142451, |
|
"grad_norm": 0.4178398549556732, |
|
"learning_rate": 3.3516446053363015e-06, |
|
"loss": 2.21, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.9193309819092047, |
|
"grad_norm": 0.4307008385658264, |
|
"learning_rate": 3.2779847577422697e-06, |
|
"loss": 2.2395, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9202412106041643, |
|
"grad_norm": 0.4171479046344757, |
|
"learning_rate": 3.2051298603643753e-06, |
|
"loss": 2.0993, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.9211514392991239, |
|
"grad_norm": 0.4234906733036041, |
|
"learning_rate": 3.133080519523368e-06, |
|
"loss": 2.3472, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.9220616679940835, |
|
"grad_norm": 0.4258922040462494, |
|
"learning_rate": 3.0618373348359264e-06, |
|
"loss": 2.3281, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.9229718966890431, |
|
"grad_norm": 0.41497015953063965, |
|
"learning_rate": 2.991400899209651e-06, |
|
"loss": 2.11, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.9238821253840027, |
|
"grad_norm": 0.4151165187358856, |
|
"learning_rate": 2.921771798838069e-06, |
|
"loss": 2.0979, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9247923540789623, |
|
"grad_norm": 0.4119180142879486, |
|
"learning_rate": 2.852950613195915e-06, |
|
"loss": 2.1067, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.9257025827739219, |
|
"grad_norm": 0.3973481357097626, |
|
"learning_rate": 2.784937915034169e-06, |
|
"loss": 2.1104, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.9266128114688815, |
|
"grad_norm": 0.41312044858932495, |
|
"learning_rate": 2.717734270375272e-06, |
|
"loss": 2.217, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.9275230401638411, |
|
"grad_norm": 0.3981812298297882, |
|
"learning_rate": 2.6513402385085704e-06, |
|
"loss": 2.035, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.9284332688588007, |
|
"grad_norm": 0.39964228868484497, |
|
"learning_rate": 2.585756371985493e-06, |
|
"loss": 1.9749, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9293434975537603, |
|
"grad_norm": 0.4008193910121918, |
|
"learning_rate": 2.520983216615047e-06, |
|
"loss": 2.0877, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.9302537262487199, |
|
"grad_norm": 0.4075929820537567, |
|
"learning_rate": 2.4570213114592954e-06, |
|
"loss": 2.0702, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.9311639549436797, |
|
"grad_norm": 0.4271462857723236, |
|
"learning_rate": 2.393871188828767e-06, |
|
"loss": 2.1174, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.9320741836386393, |
|
"grad_norm": 0.410427063703537, |
|
"learning_rate": 2.3315333742780942e-06, |
|
"loss": 2.0849, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.9329844123335989, |
|
"grad_norm": 0.41201114654541016, |
|
"learning_rate": 2.270008386601685e-06, |
|
"loss": 2.0695, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9338946410285585, |
|
"grad_norm": 0.41850075125694275, |
|
"learning_rate": 2.2092967378292915e-06, |
|
"loss": 2.0697, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.9348048697235181, |
|
"grad_norm": 0.429149866104126, |
|
"learning_rate": 2.1493989332218468e-06, |
|
"loss": 2.1187, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.9357150984184777, |
|
"grad_norm": 0.4181869328022003, |
|
"learning_rate": 2.0903154712672237e-06, |
|
"loss": 1.8432, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.9366253271134373, |
|
"grad_norm": 0.43963634967803955, |
|
"learning_rate": 2.032046843676061e-06, |
|
"loss": 2.2287, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.9375355558083969, |
|
"grad_norm": 0.4306661784648895, |
|
"learning_rate": 1.974593535377722e-06, |
|
"loss": 1.9793, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9384457845033565, |
|
"grad_norm": 0.4563003480434418, |
|
"learning_rate": 1.917956024516243e-06, |
|
"loss": 1.932, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.9393560131983161, |
|
"grad_norm": 0.4930243492126465, |
|
"learning_rate": 1.8621347824462787e-06, |
|
"loss": 2.2763, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.9402662418932757, |
|
"grad_norm": 0.4600037932395935, |
|
"learning_rate": 1.8071302737293295e-06, |
|
"loss": 2.1954, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.47071364521980286, |
|
"learning_rate": 1.752942956129744e-06, |
|
"loss": 2.1367, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.9420866992831949, |
|
"grad_norm": 0.4597410559654236, |
|
"learning_rate": 1.6995732806109554e-06, |
|
"loss": 2.0846, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9429969279781545, |
|
"grad_norm": 0.4716775715351105, |
|
"learning_rate": 1.6470216913317626e-06, |
|
"loss": 2.08, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.9439071566731141, |
|
"grad_norm": 0.4849373996257782, |
|
"learning_rate": 1.5952886256425547e-06, |
|
"loss": 2.1608, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.9448173853680737, |
|
"grad_norm": 0.5041067004203796, |
|
"learning_rate": 1.5443745140817366e-06, |
|
"loss": 2.2022, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.9457276140630333, |
|
"grad_norm": 0.503990888595581, |
|
"learning_rate": 1.4942797803721543e-06, |
|
"loss": 1.9996, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.9466378427579929, |
|
"grad_norm": 0.5079309940338135, |
|
"learning_rate": 1.4450048414174854e-06, |
|
"loss": 2.1168, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9475480714529525, |
|
"grad_norm": 0.5187703371047974, |
|
"learning_rate": 1.3965501072988663e-06, |
|
"loss": 2.0721, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.9484583001479122, |
|
"grad_norm": 0.5252265930175781, |
|
"learning_rate": 1.348915981271437e-06, |
|
"loss": 2.1585, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.9493685288428718, |
|
"grad_norm": 0.5392215847969055, |
|
"learning_rate": 1.3021028597609675e-06, |
|
"loss": 2.2417, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.9502787575378314, |
|
"grad_norm": 0.5682265162467957, |
|
"learning_rate": 1.2561111323605712e-06, |
|
"loss": 2.1904, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.951188986232791, |
|
"grad_norm": 0.5829483270645142, |
|
"learning_rate": 1.2109411818274852e-06, |
|
"loss": 2.2045, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.9520992149277506, |
|
"grad_norm": 0.6208778023719788, |
|
"learning_rate": 1.1665933840798838e-06, |
|
"loss": 2.1755, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.9530094436227102, |
|
"grad_norm": 0.6663662195205688, |
|
"learning_rate": 1.1230681081936923e-06, |
|
"loss": 2.4264, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.9539196723176698, |
|
"grad_norm": 0.7317161560058594, |
|
"learning_rate": 1.0803657163995895e-06, |
|
"loss": 2.4505, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.9548299010126294, |
|
"grad_norm": 0.8815041184425354, |
|
"learning_rate": 1.0384865640799435e-06, |
|
"loss": 2.4768, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.955740129707589, |
|
"grad_norm": 1.4678566455841064, |
|
"learning_rate": 9.974309997658915e-07, |
|
"loss": 2.6858, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9566503584025486, |
|
"grad_norm": 0.4141276180744171, |
|
"learning_rate": 9.57199365134387e-07, |
|
"loss": 2.4886, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.9575605870975082, |
|
"grad_norm": 0.43364688754081726, |
|
"learning_rate": 9.177919950054237e-07, |
|
"loss": 2.4077, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.9584708157924678, |
|
"grad_norm": 0.42708560824394226, |
|
"learning_rate": 8.792092173391831e-07, |
|
"loss": 2.3588, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.9593810444874274, |
|
"grad_norm": 0.4065101146697998, |
|
"learning_rate": 8.41451353233369e-07, |
|
"loss": 2.3254, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.960291273182387, |
|
"grad_norm": 0.42761117219924927, |
|
"learning_rate": 8.04518716920466e-07, |
|
"loss": 2.4209, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9612015018773467, |
|
"grad_norm": 0.4246588349342346, |
|
"learning_rate": 7.684116157651966e-07, |
|
"loss": 2.3689, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.9621117305723063, |
|
"grad_norm": 0.4166822135448456, |
|
"learning_rate": 7.331303502618903e-07, |
|
"loss": 2.3942, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.9630219592672659, |
|
"grad_norm": 0.4337478578090668, |
|
"learning_rate": 6.986752140320518e-07, |
|
"loss": 2.2818, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.9639321879622255, |
|
"grad_norm": 0.4047817885875702, |
|
"learning_rate": 6.650464938218637e-07, |
|
"loss": 2.2848, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.9648424166571851, |
|
"grad_norm": 0.4132625162601471, |
|
"learning_rate": 6.322444694998319e-07, |
|
"loss": 2.3324, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9657526453521448, |
|
"grad_norm": 0.4332369863986969, |
|
"learning_rate": 6.002694140544329e-07, |
|
"loss": 2.2694, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.9666628740471044, |
|
"grad_norm": 0.4239719808101654, |
|
"learning_rate": 5.691215935918815e-07, |
|
"loss": 2.1016, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.967573102742064, |
|
"grad_norm": 0.40531545877456665, |
|
"learning_rate": 5.388012673338661e-07, |
|
"loss": 2.1497, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.9684833314370236, |
|
"grad_norm": 0.4235825538635254, |
|
"learning_rate": 5.093086876154174e-07, |
|
"loss": 2.2406, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.9693935601319832, |
|
"grad_norm": 0.4083643853664398, |
|
"learning_rate": 4.80644099882821e-07, |
|
"loss": 2.188, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9703037888269428, |
|
"grad_norm": 0.41800832748413086, |
|
"learning_rate": 4.5280774269154115e-07, |
|
"loss": 2.1485, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.9712140175219024, |
|
"grad_norm": 0.4159460663795471, |
|
"learning_rate": 4.2579984770426686e-07, |
|
"loss": 2.0976, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.972124246216862, |
|
"grad_norm": 0.39471137523651123, |
|
"learning_rate": 3.99620639688969e-07, |
|
"loss": 1.9677, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.9730344749118216, |
|
"grad_norm": 0.414125919342041, |
|
"learning_rate": 3.742703365170241e-07, |
|
"loss": 2.0815, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.9739447036067812, |
|
"grad_norm": 0.4122181236743927, |
|
"learning_rate": 3.497491491614158e-07, |
|
"loss": 1.935, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9748549323017408, |
|
"grad_norm": 0.4170084595680237, |
|
"learning_rate": 3.260572816949692e-07, |
|
"loss": 2.1813, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.9757651609967004, |
|
"grad_norm": 0.41518816351890564, |
|
"learning_rate": 3.0319493128866396e-07, |
|
"loss": 2.0718, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.97667538969166, |
|
"grad_norm": 0.4334098696708679, |
|
"learning_rate": 2.8116228820997957e-07, |
|
"loss": 2.263, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.9775856183866196, |
|
"grad_norm": 0.4226486086845398, |
|
"learning_rate": 2.5995953582130804e-07, |
|
"loss": 2.1012, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.9784958470815792, |
|
"grad_norm": 0.43473172187805176, |
|
"learning_rate": 2.395868505784438e-07, |
|
"loss": 2.1051, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9794060757765388, |
|
"grad_norm": 0.43599316477775574, |
|
"learning_rate": 2.2004440202911814e-07, |
|
"loss": 2.1165, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.9803163044714984, |
|
"grad_norm": 0.4303858280181885, |
|
"learning_rate": 2.0133235281156736e-07, |
|
"loss": 2.0183, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.981226533166458, |
|
"grad_norm": 0.4305777847766876, |
|
"learning_rate": 1.83450858653178e-07, |
|
"loss": 2.0601, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.9821367618614176, |
|
"grad_norm": 0.43362459540367126, |
|
"learning_rate": 1.664000683692324e-07, |
|
"loss": 1.9406, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.9830469905563773, |
|
"grad_norm": 0.4709031283855438, |
|
"learning_rate": 1.5018012386162072e-07, |
|
"loss": 2.3195, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.983957219251337, |
|
"grad_norm": 0.43655630946159363, |
|
"learning_rate": 1.3479116011769767e-07, |
|
"loss": 2.0253, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.9848674479462965, |
|
"grad_norm": 0.42346787452697754, |
|
"learning_rate": 1.2023330520911646e-07, |
|
"loss": 1.8358, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.9857776766412562, |
|
"grad_norm": 0.455380916595459, |
|
"learning_rate": 1.0650668029079658e-07, |
|
"loss": 2.096, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.9866879053362158, |
|
"grad_norm": 0.48610299825668335, |
|
"learning_rate": 9.361139959993549e-08, |
|
"loss": 2.1442, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.9875981340311754, |
|
"grad_norm": 0.47684475779533386, |
|
"learning_rate": 8.154757045497619e-08, |
|
"loss": 2.108, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.988508362726135, |
|
"grad_norm": 0.45393744111061096, |
|
"learning_rate": 7.0315293254819e-08, |
|
"loss": 1.9724, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.9894185914210946, |
|
"grad_norm": 0.4876580238342285, |
|
"learning_rate": 5.991466147791113e-08, |
|
"loss": 2.0793, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.9903288201160542, |
|
"grad_norm": 0.5055583119392395, |
|
"learning_rate": 5.0345761681491746e-08, |
|
"loss": 2.1826, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.9912390488110138, |
|
"grad_norm": 0.5002666115760803, |
|
"learning_rate": 4.1608673500859175e-08, |
|
"loss": 2.1531, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.9921492775059734, |
|
"grad_norm": 0.503621518611908, |
|
"learning_rate": 3.370346964876036e-08, |
|
"loss": 2.035, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.993059506200933, |
|
"grad_norm": 0.5054223537445068, |
|
"learning_rate": 2.6630215914702495e-08, |
|
"loss": 2.0657, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.9939697348958926, |
|
"grad_norm": 0.5747066140174866, |
|
"learning_rate": 2.038897116447558e-08, |
|
"loss": 2.3719, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.9948799635908522, |
|
"grad_norm": 0.5756259560585022, |
|
"learning_rate": 1.4979787339619578e-08, |
|
"loss": 2.1818, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.9957901922858118, |
|
"grad_norm": 0.6113715767860413, |
|
"learning_rate": 1.0402709457035808e-08, |
|
"loss": 2.3374, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.9967004209807714, |
|
"grad_norm": 0.6483973264694214, |
|
"learning_rate": 6.657775608553962e-09, |
|
"loss": 2.307, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.997610649675731, |
|
"grad_norm": 0.7209523320198059, |
|
"learning_rate": 3.745016960665648e-09, |
|
"loss": 2.5389, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.9985208783706906, |
|
"grad_norm": 0.7734662294387817, |
|
"learning_rate": 1.6644577542357375e-09, |
|
"loss": 2.4849, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.9994311070656502, |
|
"grad_norm": 1.1054223775863647, |
|
"learning_rate": 4.1611530431362453e-10, |
|
"loss": 2.4941, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.0006826715212198, |
|
"grad_norm": 3.6098527908325195, |
|
"learning_rate": 0.0, |
|
"loss": 4.772, |
|
"step": 1099 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1099, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 275, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.0148357964895355e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|