|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2054442732408834, |
|
"eval_steps": 10, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001027221366204417, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7873, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002054442732408834, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7904, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0030816640986132513, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8348, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004108885464817668, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8357, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005136106831022085, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8153, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0061633281972265025, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.825, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.007190549563430919, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8002, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.008217770929635337, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8413, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.009244992295839754, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8157, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01027221366204417, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8249, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01027221366204417, |
|
"eval_loss": 2.8518412113189697, |
|
"eval_runtime": 43.212, |
|
"eval_samples_per_second": 23.142, |
|
"eval_steps_per_second": 0.972, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011299435028248588, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7829, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.012326656394453005, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7819, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01335387776065742, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8355, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.014381099126861838, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8138, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.015408320493066256, |
|
"grad_norm": 5.162191390991211, |
|
"learning_rate": 1.7123287671232876e-08, |
|
"loss": 2.8088, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.016435541859270673, |
|
"grad_norm": 5.5954084396362305, |
|
"learning_rate": 3.424657534246575e-08, |
|
"loss": 2.7883, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01746276322547509, |
|
"grad_norm": 5.542337417602539, |
|
"learning_rate": 5.136986301369863e-08, |
|
"loss": 2.7625, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01848998459167951, |
|
"grad_norm": 5.300518989562988, |
|
"learning_rate": 6.84931506849315e-08, |
|
"loss": 2.84, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.019517205957883924, |
|
"grad_norm": 5.6646881103515625, |
|
"learning_rate": 8.561643835616439e-08, |
|
"loss": 2.761, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02054442732408834, |
|
"grad_norm": 5.375557899475098, |
|
"learning_rate": 1.0273972602739726e-07, |
|
"loss": 2.8215, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02054442732408834, |
|
"eval_loss": 2.8507144451141357, |
|
"eval_runtime": 45.1133, |
|
"eval_samples_per_second": 22.166, |
|
"eval_steps_per_second": 0.931, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02157164869029276, |
|
"grad_norm": 5.1731977462768555, |
|
"learning_rate": 1.1986301369863014e-07, |
|
"loss": 2.8348, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.022598870056497175, |
|
"grad_norm": 5.370945930480957, |
|
"learning_rate": 1.36986301369863e-07, |
|
"loss": 2.822, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02362609142270159, |
|
"grad_norm": 5.550389289855957, |
|
"learning_rate": 1.541095890410959e-07, |
|
"loss": 2.7904, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02465331278890601, |
|
"grad_norm": 5.339347839355469, |
|
"learning_rate": 1.7123287671232878e-07, |
|
"loss": 2.8066, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.025680534155110426, |
|
"grad_norm": 5.2319722175598145, |
|
"learning_rate": 1.8835616438356165e-07, |
|
"loss": 2.7976, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02670775552131484, |
|
"grad_norm": 5.207149982452393, |
|
"learning_rate": 2.0547945205479452e-07, |
|
"loss": 2.8162, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02773497688751926, |
|
"grad_norm": 4.400204181671143, |
|
"learning_rate": 2.226027397260274e-07, |
|
"loss": 2.7808, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.028762198253723677, |
|
"grad_norm": 4.21675968170166, |
|
"learning_rate": 2.397260273972603e-07, |
|
"loss": 2.8044, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.029789419619928096, |
|
"grad_norm": 4.406878471374512, |
|
"learning_rate": 2.568493150684932e-07, |
|
"loss": 2.8162, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.030816640986132512, |
|
"grad_norm": 4.024835586547852, |
|
"learning_rate": 2.73972602739726e-07, |
|
"loss": 2.7994, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.030816640986132512, |
|
"eval_loss": 2.8124029636383057, |
|
"eval_runtime": 43.1772, |
|
"eval_samples_per_second": 23.16, |
|
"eval_steps_per_second": 0.973, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03184386235233693, |
|
"grad_norm": 3.5378901958465576, |
|
"learning_rate": 2.910958904109589e-07, |
|
"loss": 2.7637, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03287108371854135, |
|
"grad_norm": 3.6289916038513184, |
|
"learning_rate": 3.082191780821918e-07, |
|
"loss": 2.8017, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03389830508474576, |
|
"grad_norm": 3.6789698600769043, |
|
"learning_rate": 3.2534246575342466e-07, |
|
"loss": 2.7723, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03492552645095018, |
|
"grad_norm": 3.491389751434326, |
|
"learning_rate": 3.4246575342465755e-07, |
|
"loss": 2.7528, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.035952747817154594, |
|
"grad_norm": 3.383080005645752, |
|
"learning_rate": 3.595890410958904e-07, |
|
"loss": 2.7418, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03697996918335902, |
|
"grad_norm": 3.4823594093322754, |
|
"learning_rate": 3.767123287671233e-07, |
|
"loss": 2.6662, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03800719054956343, |
|
"grad_norm": 3.2146108150482178, |
|
"learning_rate": 3.938356164383562e-07, |
|
"loss": 2.7659, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03903441191576785, |
|
"grad_norm": 2.9549412727355957, |
|
"learning_rate": 4.1095890410958903e-07, |
|
"loss": 2.6838, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.040061633281972264, |
|
"grad_norm": 3.0744080543518066, |
|
"learning_rate": 4.2808219178082193e-07, |
|
"loss": 2.6879, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04108885464817668, |
|
"grad_norm": 3.19840669631958, |
|
"learning_rate": 4.452054794520548e-07, |
|
"loss": 2.7443, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04108885464817668, |
|
"eval_loss": 2.7469444274902344, |
|
"eval_runtime": 46.4676, |
|
"eval_samples_per_second": 21.52, |
|
"eval_steps_per_second": 0.904, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.042116076014381096, |
|
"grad_norm": 3.178406000137329, |
|
"learning_rate": 4.6232876712328767e-07, |
|
"loss": 2.7036, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04314329738058552, |
|
"grad_norm": 3.2032673358917236, |
|
"learning_rate": 4.794520547945206e-07, |
|
"loss": 2.6465, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.044170518746789934, |
|
"grad_norm": 3.1525962352752686, |
|
"learning_rate": 4.965753424657534e-07, |
|
"loss": 2.6996, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04519774011299435, |
|
"grad_norm": 2.907524585723877, |
|
"learning_rate": 5.136986301369864e-07, |
|
"loss": 2.6581, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.046224961479198766, |
|
"grad_norm": 2.5885250568389893, |
|
"learning_rate": 5.308219178082192e-07, |
|
"loss": 2.6773, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04725218284540318, |
|
"grad_norm": 2.569068431854248, |
|
"learning_rate": 5.47945205479452e-07, |
|
"loss": 2.6635, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.048279404211607604, |
|
"grad_norm": 2.5313518047332764, |
|
"learning_rate": 5.65068493150685e-07, |
|
"loss": 2.706, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04930662557781202, |
|
"grad_norm": 2.615442991256714, |
|
"learning_rate": 5.821917808219178e-07, |
|
"loss": 2.638, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.050333846944016436, |
|
"grad_norm": 2.7420806884765625, |
|
"learning_rate": 5.993150684931507e-07, |
|
"loss": 2.6398, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05136106831022085, |
|
"grad_norm": 2.5246975421905518, |
|
"learning_rate": 6.164383561643836e-07, |
|
"loss": 2.5916, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05136106831022085, |
|
"eval_loss": 2.6723692417144775, |
|
"eval_runtime": 46.7206, |
|
"eval_samples_per_second": 21.404, |
|
"eval_steps_per_second": 0.899, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05238828967642527, |
|
"grad_norm": 2.390143632888794, |
|
"learning_rate": 6.335616438356165e-07, |
|
"loss": 2.5773, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05341551104262968, |
|
"grad_norm": 2.0792484283447266, |
|
"learning_rate": 6.506849315068493e-07, |
|
"loss": 2.6249, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.054442732408834106, |
|
"grad_norm": 1.9995019435882568, |
|
"learning_rate": 6.678082191780823e-07, |
|
"loss": 2.5425, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05546995377503852, |
|
"grad_norm": 1.985452651977539, |
|
"learning_rate": 6.849315068493151e-07, |
|
"loss": 2.5986, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05649717514124294, |
|
"grad_norm": 2.10184907913208, |
|
"learning_rate": 7.020547945205481e-07, |
|
"loss": 2.5713, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05752439650744735, |
|
"grad_norm": 2.0898985862731934, |
|
"learning_rate": 7.191780821917808e-07, |
|
"loss": 2.535, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05855161787365177, |
|
"grad_norm": 2.0239579677581787, |
|
"learning_rate": 7.363013698630137e-07, |
|
"loss": 2.5667, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05957883923985619, |
|
"grad_norm": 2.207080125808716, |
|
"learning_rate": 7.534246575342466e-07, |
|
"loss": 2.4999, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 1.8541918992996216, |
|
"learning_rate": 7.705479452054795e-07, |
|
"loss": 2.5582, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.061633281972265024, |
|
"grad_norm": 1.8530476093292236, |
|
"learning_rate": 7.876712328767124e-07, |
|
"loss": 2.5535, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.061633281972265024, |
|
"eval_loss": 2.607963800430298, |
|
"eval_runtime": 45.9754, |
|
"eval_samples_per_second": 21.751, |
|
"eval_steps_per_second": 0.914, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06266050333846944, |
|
"grad_norm": 1.7978090047836304, |
|
"learning_rate": 8.047945205479453e-07, |
|
"loss": 2.53, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06368772470467386, |
|
"grad_norm": 1.8173762559890747, |
|
"learning_rate": 8.219178082191781e-07, |
|
"loss": 2.5144, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06471494607087827, |
|
"grad_norm": 2.0847485065460205, |
|
"learning_rate": 8.39041095890411e-07, |
|
"loss": 2.5525, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0657421674370827, |
|
"grad_norm": 1.8691741228103638, |
|
"learning_rate": 8.561643835616439e-07, |
|
"loss": 2.5451, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0667693888032871, |
|
"grad_norm": 2.0242726802825928, |
|
"learning_rate": 8.732876712328768e-07, |
|
"loss": 2.4145, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06779661016949153, |
|
"grad_norm": 1.8268908262252808, |
|
"learning_rate": 8.904109589041097e-07, |
|
"loss": 2.5024, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06882383153569595, |
|
"grad_norm": 1.8703057765960693, |
|
"learning_rate": 9.075342465753426e-07, |
|
"loss": 2.461, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06985105290190036, |
|
"grad_norm": 1.8747144937515259, |
|
"learning_rate": 9.246575342465753e-07, |
|
"loss": 2.5122, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07087827426810478, |
|
"grad_norm": 1.744924783706665, |
|
"learning_rate": 9.417808219178083e-07, |
|
"loss": 2.4673, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07190549563430919, |
|
"grad_norm": 1.7492460012435913, |
|
"learning_rate": 9.589041095890411e-07, |
|
"loss": 2.5598, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07190549563430919, |
|
"eval_loss": 2.547417640686035, |
|
"eval_runtime": 49.7488, |
|
"eval_samples_per_second": 20.101, |
|
"eval_steps_per_second": 0.844, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07293271700051361, |
|
"grad_norm": 1.9218230247497559, |
|
"learning_rate": 9.76027397260274e-07, |
|
"loss": 2.4562, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07395993836671803, |
|
"grad_norm": 2.0127363204956055, |
|
"learning_rate": 9.931506849315068e-07, |
|
"loss": 2.4141, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07498715973292244, |
|
"grad_norm": 1.8700369596481323, |
|
"learning_rate": 1.0102739726027399e-06, |
|
"loss": 2.4696, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07601438109912687, |
|
"grad_norm": 2.1706016063690186, |
|
"learning_rate": 1.0273972602739727e-06, |
|
"loss": 2.4404, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.07704160246533127, |
|
"grad_norm": 2.470412254333496, |
|
"learning_rate": 1.0445205479452056e-06, |
|
"loss": 2.4409, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0780688238315357, |
|
"grad_norm": 1.906569242477417, |
|
"learning_rate": 1.0616438356164384e-06, |
|
"loss": 2.4313, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.07909604519774012, |
|
"grad_norm": 2.122936487197876, |
|
"learning_rate": 1.0787671232876712e-06, |
|
"loss": 2.4369, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08012326656394453, |
|
"grad_norm": 2.079127311706543, |
|
"learning_rate": 1.095890410958904e-06, |
|
"loss": 2.449, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08115048793014895, |
|
"grad_norm": 1.9173146486282349, |
|
"learning_rate": 1.1130136986301371e-06, |
|
"loss": 2.3782, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08217770929635336, |
|
"grad_norm": 1.9856903553009033, |
|
"learning_rate": 1.13013698630137e-06, |
|
"loss": 2.4215, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08217770929635336, |
|
"eval_loss": 2.479673385620117, |
|
"eval_runtime": 46.2211, |
|
"eval_samples_per_second": 21.635, |
|
"eval_steps_per_second": 0.909, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08320493066255778, |
|
"grad_norm": 2.3764865398406982, |
|
"learning_rate": 1.1472602739726028e-06, |
|
"loss": 2.3744, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08423215202876219, |
|
"grad_norm": 1.980668544769287, |
|
"learning_rate": 1.1643835616438357e-06, |
|
"loss": 2.3856, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08525937339496661, |
|
"grad_norm": 2.2589704990386963, |
|
"learning_rate": 1.1815068493150685e-06, |
|
"loss": 2.3853, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08628659476117104, |
|
"grad_norm": 2.171780824661255, |
|
"learning_rate": 1.1986301369863014e-06, |
|
"loss": 2.3603, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.08731381612737545, |
|
"grad_norm": 2.25736403465271, |
|
"learning_rate": 1.2157534246575344e-06, |
|
"loss": 2.3189, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08834103749357987, |
|
"grad_norm": 2.3643763065338135, |
|
"learning_rate": 1.2328767123287673e-06, |
|
"loss": 2.3795, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.08936825885978428, |
|
"grad_norm": 2.384516954421997, |
|
"learning_rate": 1.25e-06, |
|
"loss": 2.3257, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0903954802259887, |
|
"grad_norm": 2.5072567462921143, |
|
"learning_rate": 1.267123287671233e-06, |
|
"loss": 2.3125, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09142270159219312, |
|
"grad_norm": 2.2726807594299316, |
|
"learning_rate": 1.284246575342466e-06, |
|
"loss": 2.3297, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09244992295839753, |
|
"grad_norm": 2.4817421436309814, |
|
"learning_rate": 1.3013698630136986e-06, |
|
"loss": 2.2878, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09244992295839753, |
|
"eval_loss": 2.4065146446228027, |
|
"eval_runtime": 47.9372, |
|
"eval_samples_per_second": 20.861, |
|
"eval_steps_per_second": 0.876, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09347714432460195, |
|
"grad_norm": 2.2219603061676025, |
|
"learning_rate": 1.3184931506849317e-06, |
|
"loss": 2.2978, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09450436569080636, |
|
"grad_norm": 2.8790745735168457, |
|
"learning_rate": 1.3356164383561645e-06, |
|
"loss": 2.313, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09553158705701079, |
|
"grad_norm": 2.589200496673584, |
|
"learning_rate": 1.3527397260273976e-06, |
|
"loss": 2.2937, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.09655880842321521, |
|
"grad_norm": 2.760016679763794, |
|
"learning_rate": 1.3698630136986302e-06, |
|
"loss": 2.277, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.09758602978941962, |
|
"grad_norm": 2.2641494274139404, |
|
"learning_rate": 1.386986301369863e-06, |
|
"loss": 2.3336, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09861325115562404, |
|
"grad_norm": 2.2786386013031006, |
|
"learning_rate": 1.4041095890410961e-06, |
|
"loss": 2.233, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.09964047252182845, |
|
"grad_norm": 2.5590052604675293, |
|
"learning_rate": 1.421232876712329e-06, |
|
"loss": 2.2851, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.10066769388803287, |
|
"grad_norm": 2.5609471797943115, |
|
"learning_rate": 1.4383561643835616e-06, |
|
"loss": 2.1848, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1016949152542373, |
|
"grad_norm": 2.442521810531616, |
|
"learning_rate": 1.4554794520547946e-06, |
|
"loss": 2.2073, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1027221366204417, |
|
"grad_norm": 2.3735859394073486, |
|
"learning_rate": 1.4726027397260275e-06, |
|
"loss": 2.2353, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1027221366204417, |
|
"eval_loss": 2.3457911014556885, |
|
"eval_runtime": 51.6469, |
|
"eval_samples_per_second": 19.362, |
|
"eval_steps_per_second": 0.813, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10374935798664613, |
|
"grad_norm": 2.175354242324829, |
|
"learning_rate": 1.4897260273972605e-06, |
|
"loss": 2.2259, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.10477657935285054, |
|
"grad_norm": 2.217677116394043, |
|
"learning_rate": 1.5068493150684932e-06, |
|
"loss": 2.2082, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.10580380071905496, |
|
"grad_norm": 2.4618067741394043, |
|
"learning_rate": 1.5239726027397262e-06, |
|
"loss": 2.1649, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.10683102208525937, |
|
"grad_norm": 2.232060432434082, |
|
"learning_rate": 1.541095890410959e-06, |
|
"loss": 2.1962, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.10785824345146379, |
|
"grad_norm": 2.9140872955322266, |
|
"learning_rate": 1.5582191780821921e-06, |
|
"loss": 2.2446, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10888546481766821, |
|
"grad_norm": 2.232158660888672, |
|
"learning_rate": 1.5753424657534248e-06, |
|
"loss": 2.2179, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.10991268618387262, |
|
"grad_norm": 2.804938793182373, |
|
"learning_rate": 1.5924657534246576e-06, |
|
"loss": 2.2449, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11093990755007704, |
|
"grad_norm": 2.3690710067749023, |
|
"learning_rate": 1.6095890410958907e-06, |
|
"loss": 2.225, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11196712891628145, |
|
"grad_norm": 2.7717416286468506, |
|
"learning_rate": 1.6267123287671235e-06, |
|
"loss": 2.0922, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11299435028248588, |
|
"grad_norm": 3.053602933883667, |
|
"learning_rate": 1.6438356164383561e-06, |
|
"loss": 2.1702, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11299435028248588, |
|
"eval_loss": 2.298161029815674, |
|
"eval_runtime": 44.0776, |
|
"eval_samples_per_second": 22.687, |
|
"eval_steps_per_second": 0.953, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1140215716486903, |
|
"grad_norm": 2.8673532009124756, |
|
"learning_rate": 1.6609589041095892e-06, |
|
"loss": 2.0938, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1150487930148947, |
|
"grad_norm": 2.57185959815979, |
|
"learning_rate": 1.678082191780822e-06, |
|
"loss": 2.1892, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.11607601438109913, |
|
"grad_norm": 2.5278828144073486, |
|
"learning_rate": 1.695205479452055e-06, |
|
"loss": 2.1251, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.11710323574730354, |
|
"grad_norm": 2.8581748008728027, |
|
"learning_rate": 1.7123287671232877e-06, |
|
"loss": 2.1699, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.11813045711350796, |
|
"grad_norm": 2.924959897994995, |
|
"learning_rate": 1.7294520547945206e-06, |
|
"loss": 2.1005, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11915767847971238, |
|
"grad_norm": 2.477034091949463, |
|
"learning_rate": 1.7465753424657536e-06, |
|
"loss": 2.2016, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12018489984591679, |
|
"grad_norm": 2.8719305992126465, |
|
"learning_rate": 1.7636986301369865e-06, |
|
"loss": 2.1588, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 2.4403953552246094, |
|
"learning_rate": 1.7808219178082193e-06, |
|
"loss": 2.1445, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12223934257832562, |
|
"grad_norm": 2.7994632720947266, |
|
"learning_rate": 1.7979452054794521e-06, |
|
"loss": 2.1369, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12326656394453005, |
|
"grad_norm": 2.163196563720703, |
|
"learning_rate": 1.8150684931506852e-06, |
|
"loss": 2.2195, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12326656394453005, |
|
"eval_loss": 2.2648775577545166, |
|
"eval_runtime": 49.47, |
|
"eval_samples_per_second": 20.214, |
|
"eval_steps_per_second": 0.849, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12429378531073447, |
|
"grad_norm": 2.6743483543395996, |
|
"learning_rate": 1.832191780821918e-06, |
|
"loss": 2.1521, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.12532100667693888, |
|
"grad_norm": 2.54795503616333, |
|
"learning_rate": 1.8493150684931507e-06, |
|
"loss": 2.1158, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1263482280431433, |
|
"grad_norm": 2.9093267917633057, |
|
"learning_rate": 1.8664383561643837e-06, |
|
"loss": 2.0707, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.12737544940934772, |
|
"grad_norm": 2.416940212249756, |
|
"learning_rate": 1.8835616438356166e-06, |
|
"loss": 2.0901, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.12840267077555212, |
|
"grad_norm": 2.8333663940429688, |
|
"learning_rate": 1.9006849315068496e-06, |
|
"loss": 2.0545, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12942989214175654, |
|
"grad_norm": 2.483051061630249, |
|
"learning_rate": 1.9178082191780823e-06, |
|
"loss": 2.1274, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13045711350796096, |
|
"grad_norm": 2.3486859798431396, |
|
"learning_rate": 1.9349315068493153e-06, |
|
"loss": 2.0478, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1314843348741654, |
|
"grad_norm": 3.1512887477874756, |
|
"learning_rate": 1.952054794520548e-06, |
|
"loss": 2.1041, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1325115562403698, |
|
"grad_norm": 2.902873992919922, |
|
"learning_rate": 1.969178082191781e-06, |
|
"loss": 2.1332, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1335387776065742, |
|
"grad_norm": 2.779132127761841, |
|
"learning_rate": 1.9863013698630136e-06, |
|
"loss": 2.1297, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1335387776065742, |
|
"eval_loss": 2.2391879558563232, |
|
"eval_runtime": 55.3018, |
|
"eval_samples_per_second": 18.083, |
|
"eval_steps_per_second": 0.759, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13456599897277863, |
|
"grad_norm": 2.703019618988037, |
|
"learning_rate": 2.0034246575342467e-06, |
|
"loss": 2.0506, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.13559322033898305, |
|
"grad_norm": 3.0187575817108154, |
|
"learning_rate": 2.0205479452054797e-06, |
|
"loss": 2.1291, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.13662044170518747, |
|
"grad_norm": 2.7629971504211426, |
|
"learning_rate": 2.037671232876713e-06, |
|
"loss": 2.1003, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.1376476630713919, |
|
"grad_norm": 3.175776720046997, |
|
"learning_rate": 2.0547945205479454e-06, |
|
"loss": 2.0429, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1386748844375963, |
|
"grad_norm": 3.0342514514923096, |
|
"learning_rate": 2.071917808219178e-06, |
|
"loss": 2.0858, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1397021058038007, |
|
"grad_norm": 3.0861928462982178, |
|
"learning_rate": 2.089041095890411e-06, |
|
"loss": 2.0964, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.14072932717000514, |
|
"grad_norm": 3.151945114135742, |
|
"learning_rate": 2.106164383561644e-06, |
|
"loss": 2.0194, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14175654853620956, |
|
"grad_norm": 2.9069676399230957, |
|
"learning_rate": 2.123287671232877e-06, |
|
"loss": 2.1131, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14278376990241398, |
|
"grad_norm": 3.2285282611846924, |
|
"learning_rate": 2.14041095890411e-06, |
|
"loss": 2.0643, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14381099126861838, |
|
"grad_norm": 2.500481367111206, |
|
"learning_rate": 2.1575342465753425e-06, |
|
"loss": 2.0625, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14381099126861838, |
|
"eval_loss": 2.211498975753784, |
|
"eval_runtime": 55.1046, |
|
"eval_samples_per_second": 18.147, |
|
"eval_steps_per_second": 0.762, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1448382126348228, |
|
"grad_norm": 4.004592418670654, |
|
"learning_rate": 2.1746575342465755e-06, |
|
"loss": 2.0644, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.14586543400102722, |
|
"grad_norm": 2.9258391857147217, |
|
"learning_rate": 2.191780821917808e-06, |
|
"loss": 2.0174, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.14689265536723164, |
|
"grad_norm": 3.0073509216308594, |
|
"learning_rate": 2.2089041095890412e-06, |
|
"loss": 2.1417, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.14791987673343607, |
|
"grad_norm": 3.040708065032959, |
|
"learning_rate": 2.2260273972602743e-06, |
|
"loss": 2.0357, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.14894709809964046, |
|
"grad_norm": 2.9062869548797607, |
|
"learning_rate": 2.243150684931507e-06, |
|
"loss": 2.0122, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.14997431946584489, |
|
"grad_norm": 3.28969669342041, |
|
"learning_rate": 2.26027397260274e-06, |
|
"loss": 2.0255, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.1510015408320493, |
|
"grad_norm": 3.608752727508545, |
|
"learning_rate": 2.2773972602739726e-06, |
|
"loss": 2.0184, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15202876219825373, |
|
"grad_norm": 2.900341033935547, |
|
"learning_rate": 2.2945205479452057e-06, |
|
"loss": 2.0532, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15305598356445815, |
|
"grad_norm": 2.881843328475952, |
|
"learning_rate": 2.3116438356164387e-06, |
|
"loss": 2.1336, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.15408320493066255, |
|
"grad_norm": 3.4043259620666504, |
|
"learning_rate": 2.3287671232876713e-06, |
|
"loss": 2.0536, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15408320493066255, |
|
"eval_loss": 2.192880153656006, |
|
"eval_runtime": 53.7838, |
|
"eval_samples_per_second": 18.593, |
|
"eval_steps_per_second": 0.781, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15511042629686697, |
|
"grad_norm": 2.693107843399048, |
|
"learning_rate": 2.3458904109589044e-06, |
|
"loss": 1.9503, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1561376476630714, |
|
"grad_norm": 2.885260581970215, |
|
"learning_rate": 2.363013698630137e-06, |
|
"loss": 2.0081, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.15716486902927582, |
|
"grad_norm": 3.4459924697875977, |
|
"learning_rate": 2.38013698630137e-06, |
|
"loss": 2.0649, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.15819209039548024, |
|
"grad_norm": 2.8447093963623047, |
|
"learning_rate": 2.3972602739726027e-06, |
|
"loss": 2.0476, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.15921931176168463, |
|
"grad_norm": 2.755176305770874, |
|
"learning_rate": 2.4143835616438358e-06, |
|
"loss": 2.0006, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16024653312788906, |
|
"grad_norm": 2.8677561283111572, |
|
"learning_rate": 2.431506849315069e-06, |
|
"loss": 1.9663, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16127375449409348, |
|
"grad_norm": 2.783351182937622, |
|
"learning_rate": 2.4486301369863015e-06, |
|
"loss": 2.0301, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1623009758602979, |
|
"grad_norm": 2.7545998096466064, |
|
"learning_rate": 2.4657534246575345e-06, |
|
"loss": 2.0438, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1633281972265023, |
|
"grad_norm": 2.8140594959259033, |
|
"learning_rate": 2.482876712328767e-06, |
|
"loss": 2.0055, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.16435541859270672, |
|
"grad_norm": 2.6072335243225098, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.0262, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16435541859270672, |
|
"eval_loss": 2.179842710494995, |
|
"eval_runtime": 52.6159, |
|
"eval_samples_per_second": 19.006, |
|
"eval_steps_per_second": 0.798, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16538263995891114, |
|
"grad_norm": 2.830212354660034, |
|
"learning_rate": 2.5171232876712333e-06, |
|
"loss": 2.0154, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.16640986132511557, |
|
"grad_norm": 2.677140712738037, |
|
"learning_rate": 2.534246575342466e-06, |
|
"loss": 1.9933, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.16743708269132, |
|
"grad_norm": 2.837759017944336, |
|
"learning_rate": 2.551369863013699e-06, |
|
"loss": 1.9602, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.16846430405752438, |
|
"grad_norm": 3.9556589126586914, |
|
"learning_rate": 2.568493150684932e-06, |
|
"loss": 2.0121, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.1694915254237288, |
|
"grad_norm": 2.8715925216674805, |
|
"learning_rate": 2.585616438356164e-06, |
|
"loss": 2.0219, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17051874678993323, |
|
"grad_norm": 3.5073623657226562, |
|
"learning_rate": 2.6027397260273973e-06, |
|
"loss": 2.0202, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17154596815613765, |
|
"grad_norm": 2.751962900161743, |
|
"learning_rate": 2.6198630136986303e-06, |
|
"loss": 1.9528, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17257318952234207, |
|
"grad_norm": 3.7753474712371826, |
|
"learning_rate": 2.6369863013698634e-06, |
|
"loss": 2.0208, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.17360041088854647, |
|
"grad_norm": 2.642289876937866, |
|
"learning_rate": 2.654109589041096e-06, |
|
"loss": 2.0386, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1746276322547509, |
|
"grad_norm": 2.82173490524292, |
|
"learning_rate": 2.671232876712329e-06, |
|
"loss": 2.0355, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1746276322547509, |
|
"eval_loss": 2.1699249744415283, |
|
"eval_runtime": 50.0341, |
|
"eval_samples_per_second": 19.986, |
|
"eval_steps_per_second": 0.839, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17565485362095531, |
|
"grad_norm": 3.342280149459839, |
|
"learning_rate": 2.688356164383562e-06, |
|
"loss": 1.9672, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.17668207498715974, |
|
"grad_norm": 2.6548173427581787, |
|
"learning_rate": 2.705479452054795e-06, |
|
"loss": 1.9669, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.17770929635336416, |
|
"grad_norm": 2.6548173427581787, |
|
"learning_rate": 2.705479452054795e-06, |
|
"loss": 1.961, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.17873651771956856, |
|
"grad_norm": 3.4306116104125977, |
|
"learning_rate": 2.7226027397260274e-06, |
|
"loss": 1.9173, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.17976373908577298, |
|
"grad_norm": 2.7991764545440674, |
|
"learning_rate": 2.7397260273972604e-06, |
|
"loss": 2.0339, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1807909604519774, |
|
"grad_norm": 3.6040217876434326, |
|
"learning_rate": 2.7568493150684935e-06, |
|
"loss": 2.0218, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 3.0253515243530273, |
|
"learning_rate": 2.773972602739726e-06, |
|
"loss": 1.9245, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.18284540318438625, |
|
"grad_norm": 3.405141830444336, |
|
"learning_rate": 2.791095890410959e-06, |
|
"loss": 2.0136, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.18387262455059064, |
|
"grad_norm": 2.7518203258514404, |
|
"learning_rate": 2.8082191780821922e-06, |
|
"loss": 1.8925, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.18489984591679506, |
|
"grad_norm": 3.1903367042541504, |
|
"learning_rate": 2.8253424657534253e-06, |
|
"loss": 2.0523, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18489984591679506, |
|
"eval_loss": 2.158029794692993, |
|
"eval_runtime": 48.6162, |
|
"eval_samples_per_second": 20.569, |
|
"eval_steps_per_second": 0.864, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1859270672829995, |
|
"grad_norm": 3.0331413745880127, |
|
"learning_rate": 2.842465753424658e-06, |
|
"loss": 2.0167, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1869542886492039, |
|
"grad_norm": 2.919433116912842, |
|
"learning_rate": 2.8595890410958905e-06, |
|
"loss": 2.0197, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.18798151001540833, |
|
"grad_norm": 2.9013049602508545, |
|
"learning_rate": 2.876712328767123e-06, |
|
"loss": 2.0301, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.18900873138161273, |
|
"grad_norm": 2.738800525665283, |
|
"learning_rate": 2.8938356164383562e-06, |
|
"loss": 1.894, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.19003595274781715, |
|
"grad_norm": 2.670806407928467, |
|
"learning_rate": 2.9109589041095893e-06, |
|
"loss": 1.9127, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19106317411402157, |
|
"grad_norm": 3.0435004234313965, |
|
"learning_rate": 2.9280821917808223e-06, |
|
"loss": 1.8996, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.192090395480226, |
|
"grad_norm": 3.2139949798583984, |
|
"learning_rate": 2.945205479452055e-06, |
|
"loss": 1.9255, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.19311761684643042, |
|
"grad_norm": 3.252965211868286, |
|
"learning_rate": 2.962328767123288e-06, |
|
"loss": 1.9809, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1941448382126348, |
|
"grad_norm": 2.9325191974639893, |
|
"learning_rate": 2.979452054794521e-06, |
|
"loss": 1.9691, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.19517205957883924, |
|
"grad_norm": 3.5879862308502197, |
|
"learning_rate": 2.9965753424657533e-06, |
|
"loss": 1.9205, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19517205957883924, |
|
"eval_loss": 2.13791561126709, |
|
"eval_runtime": 44.8545, |
|
"eval_samples_per_second": 22.294, |
|
"eval_steps_per_second": 0.936, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19619928094504366, |
|
"grad_norm": 3.0025253295898438, |
|
"learning_rate": 3.0136986301369864e-06, |
|
"loss": 1.9717, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.19722650231124808, |
|
"grad_norm": 3.1785972118377686, |
|
"learning_rate": 3.0308219178082194e-06, |
|
"loss": 1.9535, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1982537236774525, |
|
"grad_norm": 3.121065378189087, |
|
"learning_rate": 3.0479452054794525e-06, |
|
"loss": 1.9026, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.1992809450436569, |
|
"grad_norm": 2.838779926300049, |
|
"learning_rate": 3.065068493150685e-06, |
|
"loss": 1.941, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.20030816640986132, |
|
"grad_norm": 2.8916969299316406, |
|
"learning_rate": 3.082191780821918e-06, |
|
"loss": 1.8983, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.20133538777606574, |
|
"grad_norm": 2.9195168018341064, |
|
"learning_rate": 3.099315068493151e-06, |
|
"loss": 1.918, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.20236260914227017, |
|
"grad_norm": 3.235989809036255, |
|
"learning_rate": 3.1164383561643843e-06, |
|
"loss": 1.9111, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2033898305084746, |
|
"grad_norm": 3.0316455364227295, |
|
"learning_rate": 3.1335616438356165e-06, |
|
"loss": 1.8839, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.20441705187467898, |
|
"grad_norm": 2.963610887527466, |
|
"learning_rate": 3.1506849315068495e-06, |
|
"loss": 2.0303, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2054442732408834, |
|
"grad_norm": 2.944054365158081, |
|
"learning_rate": 3.167808219178082e-06, |
|
"loss": 1.8822, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2054442732408834, |
|
"eval_loss": 2.1433050632476807, |
|
"eval_runtime": 45.8205, |
|
"eval_samples_per_second": 21.824, |
|
"eval_steps_per_second": 0.917, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2919, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.23461881872384e+16, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|