|
{ |
|
"best_metric": 1.4881032705307007, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.0161033837235049, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 8.051691861752451e-05, |
|
"grad_norm": 15.694011688232422, |
|
"learning_rate": 1e-05, |
|
"loss": 2.23, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 8.051691861752451e-05, |
|
"eval_loss": 3.512342929840088, |
|
"eval_runtime": 1553.9839, |
|
"eval_samples_per_second": 13.461, |
|
"eval_steps_per_second": 3.366, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00016103383723504902, |
|
"grad_norm": 17.33702850341797, |
|
"learning_rate": 2e-05, |
|
"loss": 2.5065, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00024155075585257353, |
|
"grad_norm": 17.64397621154785, |
|
"learning_rate": 3e-05, |
|
"loss": 2.5815, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00032206767447009804, |
|
"grad_norm": 11.568385124206543, |
|
"learning_rate": 4e-05, |
|
"loss": 2.2006, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0004025845930876225, |
|
"grad_norm": 5.545162677764893, |
|
"learning_rate": 5e-05, |
|
"loss": 2.1846, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00048310151170514705, |
|
"grad_norm": 4.827724456787109, |
|
"learning_rate": 6e-05, |
|
"loss": 1.9824, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0005636184303226716, |
|
"grad_norm": 2.6311419010162354, |
|
"learning_rate": 7e-05, |
|
"loss": 2.0108, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0006441353489401961, |
|
"grad_norm": 2.659532308578491, |
|
"learning_rate": 8e-05, |
|
"loss": 2.0057, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0007246522675577206, |
|
"grad_norm": 1.7492154836654663, |
|
"learning_rate": 9e-05, |
|
"loss": 2.048, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.000805169186175245, |
|
"grad_norm": 1.2077504396438599, |
|
"learning_rate": 0.0001, |
|
"loss": 1.8373, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0008856861047927696, |
|
"grad_norm": 1.0642375946044922, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.8222, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0009662030234102941, |
|
"grad_norm": 1.2671899795532227, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 1.8701, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0010467199420278186, |
|
"grad_norm": 0.8027169108390808, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.7772, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0011272368606453432, |
|
"grad_norm": 1.0432685613632202, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.72, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0012077537792628676, |
|
"grad_norm": 0.6952840685844421, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.6682, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0012882706978803921, |
|
"grad_norm": 0.939834475517273, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 1.7082, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0013687876164979165, |
|
"grad_norm": 0.5965157747268677, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 1.4408, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.001449304535115441, |
|
"grad_norm": 0.8464444875717163, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.5831, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0015298214537329657, |
|
"grad_norm": 0.6520291566848755, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 1.598, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.00161033837235049, |
|
"grad_norm": 0.6852399110794067, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 1.6095, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0016908552909680147, |
|
"grad_norm": 1.1227819919586182, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 1.692, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0017713722095855393, |
|
"grad_norm": 0.6330742835998535, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 1.4861, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0018518891282030636, |
|
"grad_norm": 0.6982694268226624, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.5463, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0019324060468205882, |
|
"grad_norm": 0.7267745137214661, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.5918, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0020129229654381126, |
|
"grad_norm": 0.8092511296272278, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 1.5714, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.002093439884055637, |
|
"grad_norm": 0.6692335605621338, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 1.5636, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0021739568026731618, |
|
"grad_norm": 0.6988323330879211, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 1.5656, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0022544737212906864, |
|
"grad_norm": 0.6920161247253418, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 1.5276, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0023349906399082105, |
|
"grad_norm": 0.7586509585380554, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.4832, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.002415507558525735, |
|
"grad_norm": 0.75624680519104, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 1.7347, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0024960244771432597, |
|
"grad_norm": 0.8674934506416321, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 1.469, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0025765413957607843, |
|
"grad_norm": 0.9146162271499634, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.3929, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.002657058314378309, |
|
"grad_norm": 0.8667139410972595, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 1.6595, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.002737575232995833, |
|
"grad_norm": 0.9289156198501587, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.468, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0028180921516133576, |
|
"grad_norm": 0.8652037978172302, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.6761, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.002898609070230882, |
|
"grad_norm": 0.7662935853004456, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.4846, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.002979125988848407, |
|
"grad_norm": 0.829105019569397, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.5524, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0030596429074659314, |
|
"grad_norm": 0.7352724075317383, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 1.3437, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.003140159826083456, |
|
"grad_norm": 1.0383816957473755, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.5991, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00322067674470098, |
|
"grad_norm": 0.7830373644828796, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 1.5001, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0033011936633185047, |
|
"grad_norm": 1.0262047052383423, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 1.6893, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0033817105819360293, |
|
"grad_norm": 0.9097186326980591, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 1.5568, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.003462227500553554, |
|
"grad_norm": 0.940087616443634, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.6516, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0035427444191710785, |
|
"grad_norm": 1.0287165641784668, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.8198, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0036232613377886027, |
|
"grad_norm": 1.0288898944854736, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 1.7021, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0037037782564061273, |
|
"grad_norm": 0.8909144401550293, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 1.284, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.003784295175023652, |
|
"grad_norm": 1.2093318700790405, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.7116, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0038648120936411764, |
|
"grad_norm": 1.316892385482788, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.7716, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.003945329012258701, |
|
"grad_norm": 1.7544459104537964, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.8157, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.004025845930876225, |
|
"grad_norm": 1.5375967025756836, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.9209, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004025845930876225, |
|
"eval_loss": 1.635573148727417, |
|
"eval_runtime": 1564.134, |
|
"eval_samples_per_second": 13.374, |
|
"eval_steps_per_second": 3.344, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.00410636284949375, |
|
"grad_norm": 0.8445530533790588, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 1.3096, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.004186879768111274, |
|
"grad_norm": 0.7997322082519531, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.6707, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0042673966867287985, |
|
"grad_norm": 0.706866443157196, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 1.4309, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0043479136053463235, |
|
"grad_norm": 0.4168620705604553, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 1.4875, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.004428430523963848, |
|
"grad_norm": 0.4258555769920349, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 1.4082, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.004508947442581373, |
|
"grad_norm": 0.5967984199523926, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 1.4149, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.004589464361198897, |
|
"grad_norm": 0.5998218655586243, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 1.5081, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.004669981279816421, |
|
"grad_norm": 0.49025171995162964, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 1.4152, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.004750498198433946, |
|
"grad_norm": 0.44819146394729614, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 1.3845, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.00483101511705147, |
|
"grad_norm": 0.5317243933677673, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 1.4941, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.004911532035668995, |
|
"grad_norm": 0.5236647129058838, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 1.3369, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.004992048954286519, |
|
"grad_norm": 0.44179674983024597, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 1.393, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0050725658729040436, |
|
"grad_norm": 0.5650262236595154, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 1.41, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.005153082791521569, |
|
"grad_norm": 0.5390046834945679, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.4469, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.005233599710139093, |
|
"grad_norm": 0.5149980783462524, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 1.4273, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.005314116628756618, |
|
"grad_norm": 0.4648854732513428, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 1.3731, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.005394633547374142, |
|
"grad_norm": 0.4596269130706787, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.4532, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.005475150465991666, |
|
"grad_norm": 0.5018298625946045, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 1.4546, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.005555667384609191, |
|
"grad_norm": 0.495149165391922, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 1.355, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.005636184303226715, |
|
"grad_norm": 0.5004109740257263, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 1.266, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.00571670122184424, |
|
"grad_norm": 0.5031141042709351, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 1.5312, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.005797218140461764, |
|
"grad_norm": 0.5997195243835449, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 1.46, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0058777350590792895, |
|
"grad_norm": 0.5207065343856812, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 1.4882, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.005958251977696814, |
|
"grad_norm": 0.5105201601982117, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 1.3682, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.006038768896314338, |
|
"grad_norm": 0.5663651823997498, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 1.4381, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.006119285814931863, |
|
"grad_norm": 0.5280080437660217, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 1.3723, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.006199802733549387, |
|
"grad_norm": 0.6614448428153992, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 1.4818, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.006280319652166912, |
|
"grad_norm": 0.5965041518211365, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 1.3351, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.006360836570784436, |
|
"grad_norm": 0.6547884941101074, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 1.4995, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.00644135348940196, |
|
"grad_norm": 0.6421893239021301, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 1.5583, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.006521870408019485, |
|
"grad_norm": 0.666612446308136, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.5512, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0066023873266370095, |
|
"grad_norm": 0.7277308702468872, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 1.388, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0066829042452545345, |
|
"grad_norm": 0.6979571580886841, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 1.5284, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.006763421163872059, |
|
"grad_norm": 0.6597092747688293, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.4454, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.006843938082489583, |
|
"grad_norm": 0.6322495341300964, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 1.6041, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.006924455001107108, |
|
"grad_norm": 0.6709463596343994, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.4301, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.007004971919724632, |
|
"grad_norm": 0.9026774764060974, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 1.3447, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.007085488838342157, |
|
"grad_norm": 0.8283661007881165, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 1.6074, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.007166005756959681, |
|
"grad_norm": 0.6361680030822754, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.4813, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.007246522675577205, |
|
"grad_norm": 0.7173851728439331, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 1.4084, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.00732703959419473, |
|
"grad_norm": 0.7785750031471252, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 1.3313, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0074075565128122545, |
|
"grad_norm": 0.8564140200614929, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 1.4273, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0074880734314297795, |
|
"grad_norm": 0.9119535684585571, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 1.5423, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.007568590350047304, |
|
"grad_norm": 0.8943213224411011, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.4068, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.007649107268664828, |
|
"grad_norm": 0.9337512254714966, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 1.4746, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.007729624187282353, |
|
"grad_norm": 1.1742053031921387, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.8425, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.007810141105899877, |
|
"grad_norm": 1.1696361303329468, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.7462, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.007890658024517402, |
|
"grad_norm": 1.1739925146102905, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.6647, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.007971174943134927, |
|
"grad_norm": 1.2244256734848022, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.7935, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.00805169186175245, |
|
"grad_norm": 1.8664811849594116, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 2.3502, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.00805169186175245, |
|
"eval_loss": 1.5458824634552002, |
|
"eval_runtime": 1563.1648, |
|
"eval_samples_per_second": 13.382, |
|
"eval_steps_per_second": 3.346, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.008132208780369975, |
|
"grad_norm": 0.42842939496040344, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 1.3317, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.0082127256989875, |
|
"grad_norm": 0.5643890500068665, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 1.4563, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.008293242617605024, |
|
"grad_norm": 0.4634335935115814, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 1.3654, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.008373759536222549, |
|
"grad_norm": 0.44454672932624817, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 1.3543, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.008454276454840074, |
|
"grad_norm": 0.44821494817733765, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2992, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.008534793373457597, |
|
"grad_norm": 0.43948835134506226, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 1.2973, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.008615310292075122, |
|
"grad_norm": 0.37838828563690186, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 1.4294, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.008695827210692647, |
|
"grad_norm": 0.4603414237499237, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 1.451, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.008776344129310172, |
|
"grad_norm": 0.4389083981513977, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 1.2915, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.008856861047927695, |
|
"grad_norm": 0.41231870651245117, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 1.4153, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.00893737796654522, |
|
"grad_norm": 0.4334801733493805, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 1.4621, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.009017894885162745, |
|
"grad_norm": 0.44022494554519653, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 1.5572, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.009098411803780269, |
|
"grad_norm": 0.45386338233947754, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 1.465, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.009178928722397794, |
|
"grad_norm": 0.41959309577941895, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 1.4254, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.009259445641015319, |
|
"grad_norm": 0.5270527005195618, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 1.4127, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.009339962559632842, |
|
"grad_norm": 0.4227737784385681, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 1.2715, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.009420479478250367, |
|
"grad_norm": 0.45509597659111023, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 1.5044, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.009500996396867892, |
|
"grad_norm": 0.5056562423706055, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 1.3814, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.009581513315485417, |
|
"grad_norm": 0.46866923570632935, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 1.3934, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.00966203023410294, |
|
"grad_norm": 0.4556780159473419, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 1.388, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.009742547152720465, |
|
"grad_norm": 0.5601311326026917, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 1.3276, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.00982306407133799, |
|
"grad_norm": 0.5342466831207275, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 1.3697, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.009903580989955514, |
|
"grad_norm": 0.49002259969711304, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 1.3699, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.009984097908573039, |
|
"grad_norm": 0.5552515387535095, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.5532, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.010064614827190564, |
|
"grad_norm": 0.4900350868701935, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 1.3606, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.010145131745808087, |
|
"grad_norm": 0.5334219932556152, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 1.2242, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.010225648664425612, |
|
"grad_norm": 0.629417359828949, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 1.4833, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.010306165583043137, |
|
"grad_norm": 0.5269851684570312, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 1.3561, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.010386682501660662, |
|
"grad_norm": 0.739433228969574, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 1.426, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.010467199420278185, |
|
"grad_norm": 0.7249503135681152, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 1.4334, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01054771633889571, |
|
"grad_norm": 0.6687948107719421, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 1.5072, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.010628233257513236, |
|
"grad_norm": 0.6781296133995056, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 1.6441, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.010708750176130759, |
|
"grad_norm": 0.6829041838645935, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 1.408, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.010789267094748284, |
|
"grad_norm": 0.5669664144515991, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.4808, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.010869784013365809, |
|
"grad_norm": 0.6487475633621216, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 1.5326, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.010950300931983332, |
|
"grad_norm": 0.6838583946228027, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 1.4965, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.011030817850600857, |
|
"grad_norm": 0.7144851088523865, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 1.4568, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.011111334769218382, |
|
"grad_norm": 0.6923321485519409, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 1.393, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.011191851687835907, |
|
"grad_norm": 0.8496538996696472, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 1.6645, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.01127236860645343, |
|
"grad_norm": 0.5879185199737549, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 1.3587, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.011352885525070956, |
|
"grad_norm": 0.7312722206115723, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 1.3843, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.01143340244368848, |
|
"grad_norm": 0.9298387169837952, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 1.7808, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.011513919362306004, |
|
"grad_norm": 0.9687211513519287, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.5339, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.011594436280923529, |
|
"grad_norm": 0.8629782199859619, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.4944, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.011674953199541054, |
|
"grad_norm": 0.8845195770263672, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 1.4167, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.011755470118158579, |
|
"grad_norm": 0.8540925979614258, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 1.4933, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.011835987036776102, |
|
"grad_norm": 0.9973525404930115, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.5346, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.011916503955393627, |
|
"grad_norm": 1.0560837984085083, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 1.496, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.011997020874011152, |
|
"grad_norm": 0.9571331143379211, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.4229, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.012077537792628676, |
|
"grad_norm": 1.4350992441177368, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.7281, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.012077537792628676, |
|
"eval_loss": 1.4881032705307007, |
|
"eval_runtime": 1565.186, |
|
"eval_samples_per_second": 13.365, |
|
"eval_steps_per_second": 3.341, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0121580547112462, |
|
"grad_norm": 0.26667049527168274, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 1.2019, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.012238571629863726, |
|
"grad_norm": 0.39429017901420593, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 1.3906, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.012319088548481249, |
|
"grad_norm": 0.346015989780426, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 1.4222, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.012399605467098774, |
|
"grad_norm": 0.32731351256370544, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.3621, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.012480122385716299, |
|
"grad_norm": 0.33853569626808167, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 1.4196, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.012560639304333824, |
|
"grad_norm": 0.33642446994781494, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 1.3536, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.012641156222951347, |
|
"grad_norm": 0.37169885635375977, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 1.4761, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.012721673141568872, |
|
"grad_norm": 0.451521098613739, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 1.3824, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.012802190060186397, |
|
"grad_norm": 0.37195083498954773, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 1.2683, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.01288270697880392, |
|
"grad_norm": 0.39328181743621826, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 1.2856, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.012963223897421446, |
|
"grad_norm": 0.34638622403144836, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 1.2844, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.01304374081603897, |
|
"grad_norm": 0.41183391213417053, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 1.3941, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.013124257734656494, |
|
"grad_norm": 0.41171473264694214, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 1.3354, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.013204774653274019, |
|
"grad_norm": 0.4064787030220032, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 1.4138, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.013285291571891544, |
|
"grad_norm": 0.4661749601364136, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 1.3705, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.013365808490509069, |
|
"grad_norm": 0.4481525123119354, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 1.4985, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.013446325409126592, |
|
"grad_norm": 0.4615930914878845, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 1.3835, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.013526842327744117, |
|
"grad_norm": 0.4717801809310913, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 1.4163, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.013607359246361642, |
|
"grad_norm": 0.5136454105377197, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 1.4726, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.013687876164979166, |
|
"grad_norm": 0.4442136585712433, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 1.3468, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01376839308359669, |
|
"grad_norm": 0.46402686834335327, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 1.2927, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.013848910002214216, |
|
"grad_norm": 0.43890780210494995, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 1.4446, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.013929426920831739, |
|
"grad_norm": 0.45133015513420105, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 1.4763, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.014009943839449264, |
|
"grad_norm": 0.5477501749992371, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 1.4132, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.014090460758066789, |
|
"grad_norm": 0.5438304543495178, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 1.3898, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.014170977676684314, |
|
"grad_norm": 0.4997485280036926, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 1.4163, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.014251494595301837, |
|
"grad_norm": 0.5147976279258728, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 1.5556, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.014332011513919362, |
|
"grad_norm": 0.5399206876754761, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 1.3201, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.014412528432536887, |
|
"grad_norm": 0.7199476361274719, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 1.565, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.01449304535115441, |
|
"grad_norm": 0.6314929127693176, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 1.3938, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.014573562269771936, |
|
"grad_norm": 0.6556135416030884, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.5652, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.01465407918838946, |
|
"grad_norm": 0.59678715467453, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 1.315, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.014734596107006984, |
|
"grad_norm": 0.5509481430053711, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 1.4197, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.014815113025624509, |
|
"grad_norm": 0.6057325601577759, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 1.4712, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.014895629944242034, |
|
"grad_norm": 0.6615339517593384, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 1.4587, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.014976146862859559, |
|
"grad_norm": 0.6603967547416687, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 1.1763, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.015056663781477082, |
|
"grad_norm": 0.7309848666191101, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 1.4143, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.015137180700094607, |
|
"grad_norm": 0.690373420715332, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 1.3348, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.015217697618712132, |
|
"grad_norm": 0.7621862292289734, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 1.5015, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.015298214537329656, |
|
"grad_norm": 0.7496787309646606, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 1.3518, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01537873145594718, |
|
"grad_norm": 0.8436548113822937, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 1.6093, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.015459248374564706, |
|
"grad_norm": 0.8552038073539734, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 1.4763, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.01553976529318223, |
|
"grad_norm": 0.9818535447120667, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 1.5643, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.015620282211799754, |
|
"grad_norm": 1.0228612422943115, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 1.672, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.01570079913041728, |
|
"grad_norm": 0.9128052592277527, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 1.4994, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.015781316049034804, |
|
"grad_norm": 0.8675194382667542, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 1.3002, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.01586183296765233, |
|
"grad_norm": 1.1194286346435547, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 1.6567, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.015942349886269854, |
|
"grad_norm": 1.0962642431259155, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 1.7238, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.016022866804887376, |
|
"grad_norm": 1.275984287261963, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 1.6813, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.0161033837235049, |
|
"grad_norm": 1.448941707611084, |
|
"learning_rate": 0.0, |
|
"loss": 1.8146, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0161033837235049, |
|
"eval_loss": 1.4893519878387451, |
|
"eval_runtime": 1563.6447, |
|
"eval_samples_per_second": 13.378, |
|
"eval_steps_per_second": 3.345, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.7847343411822592e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|