|
{ |
|
"best_metric": 0.8629807233810425, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 2.663716814159292, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": 2.280963897705078, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6006, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"eval_loss": 2.295236349105835, |
|
"eval_runtime": 6.8162, |
|
"eval_samples_per_second": 13.937, |
|
"eval_steps_per_second": 3.521, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 2.509812593460083, |
|
"learning_rate": 2e-05, |
|
"loss": 1.9744, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 2.4870777130126953, |
|
"learning_rate": 3e-05, |
|
"loss": 2.151, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 3.0523364543914795, |
|
"learning_rate": 4e-05, |
|
"loss": 2.2439, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 2.856015205383301, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9178, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"grad_norm": 2.473170757293701, |
|
"learning_rate": 6e-05, |
|
"loss": 1.625, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.12389380530973451, |
|
"grad_norm": 2.4284801483154297, |
|
"learning_rate": 7e-05, |
|
"loss": 1.4498, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 3.074007987976074, |
|
"learning_rate": 8e-05, |
|
"loss": 1.4779, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1592920353982301, |
|
"grad_norm": 1.903256893157959, |
|
"learning_rate": 9e-05, |
|
"loss": 0.9667, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 1.8827953338623047, |
|
"learning_rate": 0.0001, |
|
"loss": 1.2711, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19469026548672566, |
|
"grad_norm": 1.4443219900131226, |
|
"learning_rate": 9.999036202410325e-05, |
|
"loss": 1.1201, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"grad_norm": 1.6327110528945923, |
|
"learning_rate": 9.996145181203615e-05, |
|
"loss": 1.2773, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.23008849557522124, |
|
"grad_norm": 1.7427234649658203, |
|
"learning_rate": 9.991328050923581e-05, |
|
"loss": 1.5374, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.24778761061946902, |
|
"grad_norm": 4.107868671417236, |
|
"learning_rate": 9.98458666866564e-05, |
|
"loss": 1.9677, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 1.2882623672485352, |
|
"learning_rate": 9.975923633360985e-05, |
|
"loss": 0.6889, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2831858407079646, |
|
"grad_norm": 1.568882703781128, |
|
"learning_rate": 9.965342284774632e-05, |
|
"loss": 0.8211, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.3008849557522124, |
|
"grad_norm": 1.8820855617523193, |
|
"learning_rate": 9.952846702217886e-05, |
|
"loss": 1.0151, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.3185840707964602, |
|
"grad_norm": 1.6098060607910156, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 1.0105, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.336283185840708, |
|
"grad_norm": 1.0769816637039185, |
|
"learning_rate": 9.922132840449459e-05, |
|
"loss": 0.8687, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 0.9652581810951233, |
|
"learning_rate": 9.903926402016153e-05, |
|
"loss": 0.9425, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.37168141592920356, |
|
"grad_norm": 1.15622878074646, |
|
"learning_rate": 9.883829406604363e-05, |
|
"loss": 1.1651, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.3893805309734513, |
|
"grad_norm": 0.9711587429046631, |
|
"learning_rate": 9.861849601988383e-05, |
|
"loss": 0.929, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.40707964601769914, |
|
"grad_norm": 1.1409817934036255, |
|
"learning_rate": 9.837995461801299e-05, |
|
"loss": 0.8177, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.4247787610619469, |
|
"grad_norm": 1.1862760782241821, |
|
"learning_rate": 9.812276182268236e-05, |
|
"loss": 0.8939, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 1.4299732446670532, |
|
"learning_rate": 9.784701678661045e-05, |
|
"loss": 1.211, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.46017699115044247, |
|
"grad_norm": 1.1160023212432861, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.0373, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.4778761061946903, |
|
"grad_norm": 1.0652543306350708, |
|
"learning_rate": 9.724030232334391e-05, |
|
"loss": 1.0916, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.49557522123893805, |
|
"grad_norm": 1.3456246852874756, |
|
"learning_rate": 9.690956679612421e-05, |
|
"loss": 1.439, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5132743362831859, |
|
"grad_norm": 0.8845888376235962, |
|
"learning_rate": 9.656074673794018e-05, |
|
"loss": 0.6948, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 0.9293695092201233, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 0.8672, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5486725663716814, |
|
"grad_norm": 0.9480873942375183, |
|
"learning_rate": 9.580939785585681e-05, |
|
"loss": 0.914, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5663716814159292, |
|
"grad_norm": 0.945064127445221, |
|
"learning_rate": 9.540715869125407e-05, |
|
"loss": 0.8962, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.584070796460177, |
|
"grad_norm": 1.8173238039016724, |
|
"learning_rate": 9.498741420261108e-05, |
|
"loss": 0.7806, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.6017699115044248, |
|
"grad_norm": 1.9008045196533203, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 1.183, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6194690265486725, |
|
"grad_norm": 0.8381111025810242, |
|
"learning_rate": 9.409606321741775e-05, |
|
"loss": 0.8236, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.6371681415929203, |
|
"grad_norm": 0.8810117840766907, |
|
"learning_rate": 9.362480035363986e-05, |
|
"loss": 0.9894, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6548672566371682, |
|
"grad_norm": 1.0025238990783691, |
|
"learning_rate": 9.31367192988896e-05, |
|
"loss": 1.0978, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.672566371681416, |
|
"grad_norm": 0.8731153011322021, |
|
"learning_rate": 9.263200821770461e-05, |
|
"loss": 0.9889, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6902654867256637, |
|
"grad_norm": 0.8915436267852783, |
|
"learning_rate": 9.211086168581433e-05, |
|
"loss": 1.0382, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.7079646017699115, |
|
"grad_norm": 0.9948314428329468, |
|
"learning_rate": 9.157348061512727e-05, |
|
"loss": 1.1075, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7256637168141593, |
|
"grad_norm": 1.0864626169204712, |
|
"learning_rate": 9.102007217627568e-05, |
|
"loss": 1.2494, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7433628318584071, |
|
"grad_norm": 1.2462245225906372, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.4589, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7610619469026548, |
|
"grad_norm": 0.6787369251251221, |
|
"learning_rate": 8.986603268863536e-05, |
|
"loss": 0.8135, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7787610619469026, |
|
"grad_norm": 0.6781764030456543, |
|
"learning_rate": 8.926584654403724e-05, |
|
"loss": 0.7703, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7964601769911505, |
|
"grad_norm": 0.8601434826850891, |
|
"learning_rate": 8.865052266813685e-05, |
|
"loss": 0.9327, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8141592920353983, |
|
"grad_norm": 0.7944732308387756, |
|
"learning_rate": 8.802029828000156e-05, |
|
"loss": 0.8526, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.831858407079646, |
|
"grad_norm": 0.7232624888420105, |
|
"learning_rate": 8.737541634312985e-05, |
|
"loss": 0.6354, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8495575221238938, |
|
"grad_norm": 0.8034805059432983, |
|
"learning_rate": 8.671612547178428e-05, |
|
"loss": 0.7852, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8672566371681416, |
|
"grad_norm": 0.8037271499633789, |
|
"learning_rate": 8.604267983514594e-05, |
|
"loss": 0.8371, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"grad_norm": 0.8748500943183899, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.8886, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"eval_loss": 0.8629807233810425, |
|
"eval_runtime": 6.986, |
|
"eval_samples_per_second": 13.599, |
|
"eval_steps_per_second": 3.435, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.9026548672566371, |
|
"grad_norm": 0.8342791199684143, |
|
"learning_rate": 8.46543681272818e-05, |
|
"loss": 0.8257, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9203539823008849, |
|
"grad_norm": 0.9353609681129456, |
|
"learning_rate": 8.39400372766471e-05, |
|
"loss": 1.1307, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.9380530973451328, |
|
"grad_norm": 1.022132396697998, |
|
"learning_rate": 8.321262189556409e-05, |
|
"loss": 1.1414, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.9557522123893806, |
|
"grad_norm": 1.032721996307373, |
|
"learning_rate": 8.247240241650918e-05, |
|
"loss": 1.0249, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9734513274336283, |
|
"grad_norm": 1.0308945178985596, |
|
"learning_rate": 8.171966420818228e-05, |
|
"loss": 1.0607, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9911504424778761, |
|
"grad_norm": 1.2551980018615723, |
|
"learning_rate": 8.095469746549172e-05, |
|
"loss": 1.5497, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.0132743362831858, |
|
"grad_norm": 1.5172427892684937, |
|
"learning_rate": 8.017779709767858e-05, |
|
"loss": 1.1828, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.0309734513274336, |
|
"grad_norm": 0.7486307621002197, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.6634, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.0486725663716814, |
|
"grad_norm": 0.7508041858673096, |
|
"learning_rate": 7.858939801138061e-05, |
|
"loss": 0.8152, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0663716814159292, |
|
"grad_norm": 0.7589112520217896, |
|
"learning_rate": 7.777851165098012e-05, |
|
"loss": 0.6895, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.084070796460177, |
|
"grad_norm": 0.7142118215560913, |
|
"learning_rate": 7.695691614555003e-05, |
|
"loss": 0.5247, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.1017699115044248, |
|
"grad_norm": 0.7129442095756531, |
|
"learning_rate": 7.612492823579745e-05, |
|
"loss": 0.6682, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.1194690265486726, |
|
"grad_norm": 0.7287638187408447, |
|
"learning_rate": 7.528286866889924e-05, |
|
"loss": 0.7053, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1371681415929205, |
|
"grad_norm": 0.9455931186676025, |
|
"learning_rate": 7.443106207484776e-05, |
|
"loss": 0.7347, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.154867256637168, |
|
"grad_norm": 0.760444700717926, |
|
"learning_rate": 7.35698368412999e-05, |
|
"loss": 0.7523, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1725663716814159, |
|
"grad_norm": 0.7031366229057312, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 0.5242, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1902654867256637, |
|
"grad_norm": 0.8057155013084412, |
|
"learning_rate": 7.18204620336671e-05, |
|
"loss": 0.6423, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.2079646017699115, |
|
"grad_norm": 0.9145888090133667, |
|
"learning_rate": 7.09329868768714e-05, |
|
"loss": 0.8114, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.2256637168141593, |
|
"grad_norm": 0.957840085029602, |
|
"learning_rate": 7.003744165515705e-05, |
|
"loss": 0.7889, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.2433628318584071, |
|
"grad_norm": 1.1084387302398682, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 0.8842, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.261061946902655, |
|
"grad_norm": 0.8518170118331909, |
|
"learning_rate": 6.82235249939575e-05, |
|
"loss": 0.5889, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.2787610619469025, |
|
"grad_norm": 0.761696994304657, |
|
"learning_rate": 6.730585285387465e-05, |
|
"loss": 0.4945, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2964601769911503, |
|
"grad_norm": 0.8722968101501465, |
|
"learning_rate": 6.638150897808468e-05, |
|
"loss": 0.5406, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.3141592920353982, |
|
"grad_norm": 0.9867381453514099, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.812, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.331858407079646, |
|
"grad_norm": 1.058192491531372, |
|
"learning_rate": 6.451423386272312e-05, |
|
"loss": 0.597, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3495575221238938, |
|
"grad_norm": 1.0891320705413818, |
|
"learning_rate": 6.357202249325371e-05, |
|
"loss": 0.6035, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3672566371681416, |
|
"grad_norm": 1.0673609972000122, |
|
"learning_rate": 6.26245788507579e-05, |
|
"loss": 0.7171, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.3849557522123894, |
|
"grad_norm": 0.856677770614624, |
|
"learning_rate": 6.167226819279528e-05, |
|
"loss": 0.5655, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.4026548672566372, |
|
"grad_norm": 1.0465450286865234, |
|
"learning_rate": 6.071545765325254e-05, |
|
"loss": 0.4403, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.420353982300885, |
|
"grad_norm": 0.9781056046485901, |
|
"learning_rate": 5.9754516100806423e-05, |
|
"loss": 0.7209, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.4380530973451329, |
|
"grad_norm": 0.9389674067497253, |
|
"learning_rate": 5.8789813996717736e-05, |
|
"loss": 0.6367, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4557522123893805, |
|
"grad_norm": 0.9528778195381165, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.7008, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4734513274336283, |
|
"grad_norm": 1.0513659715652466, |
|
"learning_rate": 5.685061708409841e-05, |
|
"loss": 0.641, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.491150442477876, |
|
"grad_norm": 1.2438160181045532, |
|
"learning_rate": 5.587686987289189e-05, |
|
"loss": 1.0257, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.508849557522124, |
|
"grad_norm": 0.9585502743721008, |
|
"learning_rate": 5.490085701647805e-05, |
|
"loss": 0.6353, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.5265486725663717, |
|
"grad_norm": 1.3222746849060059, |
|
"learning_rate": 5.392295478639225e-05, |
|
"loss": 0.6635, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.5442477876106193, |
|
"grad_norm": 1.1059401035308838, |
|
"learning_rate": 5.294354018255945e-05, |
|
"loss": 0.6694, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.5619469026548671, |
|
"grad_norm": 0.9538819789886475, |
|
"learning_rate": 5.196299078795344e-05, |
|
"loss": 0.536, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.579646017699115, |
|
"grad_norm": 0.9908929467201233, |
|
"learning_rate": 5.0981684623031415e-05, |
|
"loss": 0.6028, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.5973451327433628, |
|
"grad_norm": 0.9496501684188843, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4547, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.6150442477876106, |
|
"grad_norm": 0.9586553573608398, |
|
"learning_rate": 4.901831537696859e-05, |
|
"loss": 0.6183, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.6327433628318584, |
|
"grad_norm": 0.9949407577514648, |
|
"learning_rate": 4.8037009212046586e-05, |
|
"loss": 0.5747, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.6504424778761062, |
|
"grad_norm": 1.0018373727798462, |
|
"learning_rate": 4.7056459817440544e-05, |
|
"loss": 0.5943, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.668141592920354, |
|
"grad_norm": 1.0088860988616943, |
|
"learning_rate": 4.607704521360776e-05, |
|
"loss": 0.6541, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6858407079646018, |
|
"grad_norm": 1.0395617485046387, |
|
"learning_rate": 4.509914298352197e-05, |
|
"loss": 0.614, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.7035398230088497, |
|
"grad_norm": 1.2786129713058472, |
|
"learning_rate": 4.412313012710813e-05, |
|
"loss": 0.6389, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.7212389380530975, |
|
"grad_norm": 1.1238988637924194, |
|
"learning_rate": 4.3149382915901606e-05, |
|
"loss": 0.8043, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.7389380530973453, |
|
"grad_norm": 1.5196340084075928, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.959, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.7566371681415929, |
|
"grad_norm": 1.0669888257980347, |
|
"learning_rate": 4.1210186003282275e-05, |
|
"loss": 0.6601, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7743362831858407, |
|
"grad_norm": 0.8285733461380005, |
|
"learning_rate": 4.0245483899193595e-05, |
|
"loss": 0.4862, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7743362831858407, |
|
"eval_loss": 0.9206369519233704, |
|
"eval_runtime": 6.9876, |
|
"eval_samples_per_second": 13.595, |
|
"eval_steps_per_second": 3.435, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7920353982300885, |
|
"grad_norm": 1.2100951671600342, |
|
"learning_rate": 3.928454234674747e-05, |
|
"loss": 0.6735, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.8097345132743363, |
|
"grad_norm": 1.019980788230896, |
|
"learning_rate": 3.832773180720475e-05, |
|
"loss": 0.4795, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.827433628318584, |
|
"grad_norm": 0.9808886051177979, |
|
"learning_rate": 3.73754211492421e-05, |
|
"loss": 0.605, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.8451327433628317, |
|
"grad_norm": 1.0874282121658325, |
|
"learning_rate": 3.642797750674629e-05, |
|
"loss": 0.7412, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.8628318584070795, |
|
"grad_norm": 1.1271072626113892, |
|
"learning_rate": 3.5485766137276894e-05, |
|
"loss": 0.6797, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8805309734513274, |
|
"grad_norm": 1.0317531824111938, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.4929, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.8982300884955752, |
|
"grad_norm": 1.1021236181259155, |
|
"learning_rate": 3.361849102191533e-05, |
|
"loss": 0.6134, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.915929203539823, |
|
"grad_norm": 1.095332145690918, |
|
"learning_rate": 3.2694147146125345e-05, |
|
"loss": 0.5781, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.9336283185840708, |
|
"grad_norm": 1.1151381731033325, |
|
"learning_rate": 3.177647500604252e-05, |
|
"loss": 0.6545, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.9513274336283186, |
|
"grad_norm": 1.2261312007904053, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 0.7223, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.9690265486725664, |
|
"grad_norm": 1.0669209957122803, |
|
"learning_rate": 2.996255834484296e-05, |
|
"loss": 0.5042, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.9867256637168142, |
|
"grad_norm": 1.3711998462677002, |
|
"learning_rate": 2.9067013123128613e-05, |
|
"loss": 0.9739, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.0088495575221237, |
|
"grad_norm": 1.9376322031021118, |
|
"learning_rate": 2.8179537966332887e-05, |
|
"loss": 0.7555, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.0265486725663715, |
|
"grad_norm": 0.781846821308136, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 0.3182, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.0442477876106193, |
|
"grad_norm": 0.9402307868003845, |
|
"learning_rate": 2.6430163158700115e-05, |
|
"loss": 0.4366, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.061946902654867, |
|
"grad_norm": 0.9695858359336853, |
|
"learning_rate": 2.556893792515227e-05, |
|
"loss": 0.2303, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.079646017699115, |
|
"grad_norm": 1.2189714908599854, |
|
"learning_rate": 2.471713133110078e-05, |
|
"loss": 0.4529, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.0973451327433628, |
|
"grad_norm": 0.7794205546379089, |
|
"learning_rate": 2.3875071764202563e-05, |
|
"loss": 0.2843, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.1150442477876106, |
|
"grad_norm": 0.8974909782409668, |
|
"learning_rate": 2.3043083854449988e-05, |
|
"loss": 0.4039, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.1327433628318584, |
|
"grad_norm": 0.9793106913566589, |
|
"learning_rate": 2.2221488349019903e-05, |
|
"loss": 0.3848, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.150442477876106, |
|
"grad_norm": 0.9427468180656433, |
|
"learning_rate": 2.1410601988619394e-05, |
|
"loss": 0.3134, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.168141592920354, |
|
"grad_norm": 1.0571012496948242, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.3331, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.185840707964602, |
|
"grad_norm": 1.081581473350525, |
|
"learning_rate": 1.982220290232143e-05, |
|
"loss": 0.3525, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.2035398230088497, |
|
"grad_norm": 1.3777657747268677, |
|
"learning_rate": 1.9045302534508297e-05, |
|
"loss": 0.4149, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.2212389380530975, |
|
"grad_norm": 0.9454907774925232, |
|
"learning_rate": 1.8280335791817733e-05, |
|
"loss": 0.2463, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.2389380530973453, |
|
"grad_norm": 1.1618309020996094, |
|
"learning_rate": 1.7527597583490822e-05, |
|
"loss": 0.3783, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.256637168141593, |
|
"grad_norm": 1.170831561088562, |
|
"learning_rate": 1.678737810443593e-05, |
|
"loss": 0.3626, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.274336283185841, |
|
"grad_norm": 1.1366978883743286, |
|
"learning_rate": 1.605996272335291e-05, |
|
"loss": 0.2512, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.2920353982300883, |
|
"grad_norm": 1.2076972723007202, |
|
"learning_rate": 1.5345631872718214e-05, |
|
"loss": 0.3422, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.309734513274336, |
|
"grad_norm": 1.2742947340011597, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.3533, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.327433628318584, |
|
"grad_norm": 1.1574723720550537, |
|
"learning_rate": 1.3957320164854059e-05, |
|
"loss": 0.3042, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.3451327433628317, |
|
"grad_norm": 1.438873052597046, |
|
"learning_rate": 1.3283874528215733e-05, |
|
"loss": 0.3407, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.3628318584070795, |
|
"grad_norm": 2.25028133392334, |
|
"learning_rate": 1.2624583656870154e-05, |
|
"loss": 0.3244, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.3805309734513274, |
|
"grad_norm": 1.5336682796478271, |
|
"learning_rate": 1.1979701719998453e-05, |
|
"loss": 0.4788, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.398230088495575, |
|
"grad_norm": 1.2398130893707275, |
|
"learning_rate": 1.134947733186315e-05, |
|
"loss": 0.3341, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.415929203539823, |
|
"grad_norm": 1.1453783512115479, |
|
"learning_rate": 1.0734153455962765e-05, |
|
"loss": 0.2069, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.433628318584071, |
|
"grad_norm": 1.406343936920166, |
|
"learning_rate": 1.013396731136465e-05, |
|
"loss": 0.365, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.4513274336283186, |
|
"grad_norm": 1.3453893661499023, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.3082, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.4690265486725664, |
|
"grad_norm": 1.331973671913147, |
|
"learning_rate": 8.97992782372432e-06, |
|
"loss": 0.3943, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.4867256637168142, |
|
"grad_norm": 1.3919755220413208, |
|
"learning_rate": 8.426519384872733e-06, |
|
"loss": 0.4101, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.504424778761062, |
|
"grad_norm": 1.3736562728881836, |
|
"learning_rate": 7.889138314185678e-06, |
|
"loss": 0.3286, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.52212389380531, |
|
"grad_norm": 1.3815242052078247, |
|
"learning_rate": 7.367991782295391e-06, |
|
"loss": 0.3418, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.5398230088495577, |
|
"grad_norm": 1.343965768814087, |
|
"learning_rate": 6.863280701110408e-06, |
|
"loss": 0.4067, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.557522123893805, |
|
"grad_norm": 1.2378324270248413, |
|
"learning_rate": 6.375199646360142e-06, |
|
"loss": 0.3246, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.5752212389380533, |
|
"grad_norm": 1.2089776992797852, |
|
"learning_rate": 5.903936782582253e-06, |
|
"loss": 0.4119, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.5929203539823007, |
|
"grad_norm": 0.982158362865448, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 0.2026, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.6106194690265485, |
|
"grad_norm": 1.264430046081543, |
|
"learning_rate": 5.012585797388936e-06, |
|
"loss": 0.312, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.6283185840707963, |
|
"grad_norm": 1.0391494035720825, |
|
"learning_rate": 4.592841308745932e-06, |
|
"loss": 0.2517, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.646017699115044, |
|
"grad_norm": 1.2044720649719238, |
|
"learning_rate": 4.190602144143207e-06, |
|
"loss": 0.2401, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.663716814159292, |
|
"grad_norm": 1.0556868314743042, |
|
"learning_rate": 3.8060233744356633e-06, |
|
"loss": 0.2354, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.663716814159292, |
|
"eval_loss": 1.0171345472335815, |
|
"eval_runtime": 6.9871, |
|
"eval_samples_per_second": 13.597, |
|
"eval_steps_per_second": 3.435, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 170, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 2 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.018651406336e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|