|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2054442732408834, |
|
"eval_steps": 10, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001027221366204417, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7522, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002054442732408834, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7541, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0030816640986132513, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8031, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004108885464817668, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8062, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005136106831022085, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7733, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0061633281972265025, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7949, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.007190549563430919, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7657, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.008217770929635337, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.803, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.009244992295839754, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7793, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01027221366204417, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7935, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01027221366204417, |
|
"eval_loss": 2.8114004135131836, |
|
"eval_runtime": 37.8463, |
|
"eval_samples_per_second": 26.423, |
|
"eval_steps_per_second": 1.11, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011299435028248588, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7546, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.012326656394453005, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7423, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01335387776065742, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8057, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.014381099126861838, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7663, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.015408320493066256, |
|
"grad_norm": 4.694424152374268, |
|
"learning_rate": 1.7123287671232876e-08, |
|
"loss": 2.7715, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.016435541859270673, |
|
"grad_norm": 4.905329704284668, |
|
"learning_rate": 3.424657534246575e-08, |
|
"loss": 2.751, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01746276322547509, |
|
"grad_norm": 4.705279350280762, |
|
"learning_rate": 5.136986301369863e-08, |
|
"loss": 2.7186, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01848998459167951, |
|
"grad_norm": 5.154810905456543, |
|
"learning_rate": 6.84931506849315e-08, |
|
"loss": 2.8073, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.019517205957883924, |
|
"grad_norm": 5.070403099060059, |
|
"learning_rate": 8.561643835616439e-08, |
|
"loss": 2.7237, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02054442732408834, |
|
"grad_norm": 4.901423454284668, |
|
"learning_rate": 1.0273972602739726e-07, |
|
"loss": 2.7882, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02054442732408834, |
|
"eval_loss": 2.810307502746582, |
|
"eval_runtime": 37.65, |
|
"eval_samples_per_second": 26.56, |
|
"eval_steps_per_second": 1.116, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02157164869029276, |
|
"grad_norm": 4.914156436920166, |
|
"learning_rate": 1.1986301369863014e-07, |
|
"loss": 2.8005, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.022598870056497175, |
|
"grad_norm": 4.937129974365234, |
|
"learning_rate": 1.36986301369863e-07, |
|
"loss": 2.7902, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02362609142270159, |
|
"grad_norm": 5.0690202713012695, |
|
"learning_rate": 1.541095890410959e-07, |
|
"loss": 2.7474, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02465331278890601, |
|
"grad_norm": 4.980844974517822, |
|
"learning_rate": 1.7123287671232878e-07, |
|
"loss": 2.7695, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.025680534155110426, |
|
"grad_norm": 4.8092451095581055, |
|
"learning_rate": 1.8835616438356165e-07, |
|
"loss": 2.7611, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02670775552131484, |
|
"grad_norm": 4.765528202056885, |
|
"learning_rate": 2.0547945205479452e-07, |
|
"loss": 2.7823, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02773497688751926, |
|
"grad_norm": 3.9759938716888428, |
|
"learning_rate": 2.226027397260274e-07, |
|
"loss": 2.7445, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.028762198253723677, |
|
"grad_norm": 4.023419380187988, |
|
"learning_rate": 2.397260273972603e-07, |
|
"loss": 2.7679, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.029789419619928096, |
|
"grad_norm": 4.161378383636475, |
|
"learning_rate": 2.568493150684932e-07, |
|
"loss": 2.7704, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.030816640986132512, |
|
"grad_norm": 3.8363842964172363, |
|
"learning_rate": 2.73972602739726e-07, |
|
"loss": 2.7598, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.030816640986132512, |
|
"eval_loss": 2.772540330886841, |
|
"eval_runtime": 37.2624, |
|
"eval_samples_per_second": 26.837, |
|
"eval_steps_per_second": 1.127, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03184386235233693, |
|
"grad_norm": 3.718747615814209, |
|
"learning_rate": 2.910958904109589e-07, |
|
"loss": 2.7241, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03287108371854135, |
|
"grad_norm": 3.555985927581787, |
|
"learning_rate": 3.082191780821918e-07, |
|
"loss": 2.7751, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03389830508474576, |
|
"grad_norm": 3.6041972637176514, |
|
"learning_rate": 3.2534246575342466e-07, |
|
"loss": 2.7322, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03492552645095018, |
|
"grad_norm": 3.4408438205718994, |
|
"learning_rate": 3.4246575342465755e-07, |
|
"loss": 2.727, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.035952747817154594, |
|
"grad_norm": 3.4169280529022217, |
|
"learning_rate": 3.595890410958904e-07, |
|
"loss": 2.7062, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03697996918335902, |
|
"grad_norm": 3.3882925510406494, |
|
"learning_rate": 3.767123287671233e-07, |
|
"loss": 2.6271, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03800719054956343, |
|
"grad_norm": 3.0491855144500732, |
|
"learning_rate": 3.938356164383562e-07, |
|
"loss": 2.7309, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03903441191576785, |
|
"grad_norm": 2.8444902896881104, |
|
"learning_rate": 4.1095890410958903e-07, |
|
"loss": 2.647, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.040061633281972264, |
|
"grad_norm": 2.7438602447509766, |
|
"learning_rate": 4.2808219178082193e-07, |
|
"loss": 2.6478, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04108885464817668, |
|
"grad_norm": 2.8886020183563232, |
|
"learning_rate": 4.452054794520548e-07, |
|
"loss": 2.7065, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04108885464817668, |
|
"eval_loss": 2.7021424770355225, |
|
"eval_runtime": 37.3227, |
|
"eval_samples_per_second": 26.793, |
|
"eval_steps_per_second": 1.125, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.042116076014381096, |
|
"grad_norm": 2.8825268745422363, |
|
"learning_rate": 4.6232876712328767e-07, |
|
"loss": 2.6666, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04314329738058552, |
|
"grad_norm": 2.8848748207092285, |
|
"learning_rate": 4.794520547945206e-07, |
|
"loss": 2.597, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.044170518746789934, |
|
"grad_norm": 2.9176979064941406, |
|
"learning_rate": 4.965753424657534e-07, |
|
"loss": 2.6638, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04519774011299435, |
|
"grad_norm": 2.6903059482574463, |
|
"learning_rate": 5.136986301369864e-07, |
|
"loss": 2.6202, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.046224961479198766, |
|
"grad_norm": 2.5349855422973633, |
|
"learning_rate": 5.308219178082192e-07, |
|
"loss": 2.6466, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04725218284540318, |
|
"grad_norm": 2.405383825302124, |
|
"learning_rate": 5.47945205479452e-07, |
|
"loss": 2.6291, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.048279404211607604, |
|
"grad_norm": 2.526106595993042, |
|
"learning_rate": 5.65068493150685e-07, |
|
"loss": 2.6718, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04930662557781202, |
|
"grad_norm": 2.3981597423553467, |
|
"learning_rate": 5.821917808219178e-07, |
|
"loss": 2.592, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.050333846944016436, |
|
"grad_norm": 2.61842679977417, |
|
"learning_rate": 5.993150684931507e-07, |
|
"loss": 2.6039, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05136106831022085, |
|
"grad_norm": 2.3001420497894287, |
|
"learning_rate": 6.164383561643836e-07, |
|
"loss": 2.5481, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05136106831022085, |
|
"eval_loss": 2.6244444847106934, |
|
"eval_runtime": 37.3248, |
|
"eval_samples_per_second": 26.792, |
|
"eval_steps_per_second": 1.125, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05238828967642527, |
|
"grad_norm": 2.12744140625, |
|
"learning_rate": 6.335616438356165e-07, |
|
"loss": 2.5336, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05341551104262968, |
|
"grad_norm": 1.974413275718689, |
|
"learning_rate": 6.506849315068493e-07, |
|
"loss": 2.5882, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.054442732408834106, |
|
"grad_norm": 2.098527669906616, |
|
"learning_rate": 6.678082191780823e-07, |
|
"loss": 2.5073, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05546995377503852, |
|
"grad_norm": 2.056288719177246, |
|
"learning_rate": 6.849315068493151e-07, |
|
"loss": 2.5612, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05649717514124294, |
|
"grad_norm": 2.2291629314422607, |
|
"learning_rate": 7.020547945205481e-07, |
|
"loss": 2.527, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05752439650744735, |
|
"grad_norm": 2.140164375305176, |
|
"learning_rate": 7.191780821917808e-07, |
|
"loss": 2.4972, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05855161787365177, |
|
"grad_norm": 1.8682485818862915, |
|
"learning_rate": 7.363013698630137e-07, |
|
"loss": 2.5231, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05957883923985619, |
|
"grad_norm": 1.9373308420181274, |
|
"learning_rate": 7.534246575342466e-07, |
|
"loss": 2.4509, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 1.8535712957382202, |
|
"learning_rate": 7.705479452054795e-07, |
|
"loss": 2.5164, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.061633281972265024, |
|
"grad_norm": 1.7497329711914062, |
|
"learning_rate": 7.876712328767124e-07, |
|
"loss": 2.5177, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.061633281972265024, |
|
"eval_loss": 2.5608646869659424, |
|
"eval_runtime": 37.3426, |
|
"eval_samples_per_second": 26.779, |
|
"eval_steps_per_second": 1.125, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06266050333846944, |
|
"grad_norm": 1.8476629257202148, |
|
"learning_rate": 8.047945205479453e-07, |
|
"loss": 2.4838, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06368772470467386, |
|
"grad_norm": 1.7130696773529053, |
|
"learning_rate": 8.219178082191781e-07, |
|
"loss": 2.473, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06471494607087827, |
|
"grad_norm": 1.751081109046936, |
|
"learning_rate": 8.39041095890411e-07, |
|
"loss": 2.5152, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0657421674370827, |
|
"grad_norm": 1.7758578062057495, |
|
"learning_rate": 8.561643835616439e-07, |
|
"loss": 2.5031, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0667693888032871, |
|
"grad_norm": 1.9316363334655762, |
|
"learning_rate": 8.732876712328768e-07, |
|
"loss": 2.3695, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06779661016949153, |
|
"grad_norm": 1.7765878438949585, |
|
"learning_rate": 8.904109589041097e-07, |
|
"loss": 2.4669, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06882383153569595, |
|
"grad_norm": 1.7339810132980347, |
|
"learning_rate": 9.075342465753426e-07, |
|
"loss": 2.4112, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06985105290190036, |
|
"grad_norm": 1.717781662940979, |
|
"learning_rate": 9.246575342465753e-07, |
|
"loss": 2.4677, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07087827426810478, |
|
"grad_norm": 1.7711046934127808, |
|
"learning_rate": 9.417808219178083e-07, |
|
"loss": 2.4131, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07190549563430919, |
|
"grad_norm": 1.708680510520935, |
|
"learning_rate": 9.589041095890411e-07, |
|
"loss": 2.5191, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07190549563430919, |
|
"eval_loss": 2.4920735359191895, |
|
"eval_runtime": 37.3501, |
|
"eval_samples_per_second": 26.774, |
|
"eval_steps_per_second": 1.124, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07293271700051361, |
|
"grad_norm": 1.9429680109024048, |
|
"learning_rate": 9.76027397260274e-07, |
|
"loss": 2.4099, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07395993836671803, |
|
"grad_norm": 1.7411247491836548, |
|
"learning_rate": 9.931506849315068e-07, |
|
"loss": 2.3632, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07498715973292244, |
|
"grad_norm": 1.7577983140945435, |
|
"learning_rate": 1.0102739726027399e-06, |
|
"loss": 2.4214, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07601438109912687, |
|
"grad_norm": 2.025926351547241, |
|
"learning_rate": 1.0273972602739727e-06, |
|
"loss": 2.394, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.07704160246533127, |
|
"grad_norm": 2.4366860389709473, |
|
"learning_rate": 1.0445205479452056e-06, |
|
"loss": 2.3943, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0780688238315357, |
|
"grad_norm": 2.437108278274536, |
|
"learning_rate": 1.0616438356164384e-06, |
|
"loss": 2.3844, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.07909604519774012, |
|
"grad_norm": 2.1352434158325195, |
|
"learning_rate": 1.0787671232876712e-06, |
|
"loss": 2.3871, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08012326656394453, |
|
"grad_norm": 1.8895397186279297, |
|
"learning_rate": 1.095890410958904e-06, |
|
"loss": 2.3927, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08115048793014895, |
|
"grad_norm": 2.2960093021392822, |
|
"learning_rate": 1.1130136986301371e-06, |
|
"loss": 2.3343, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08217770929635336, |
|
"grad_norm": 2.0891849994659424, |
|
"learning_rate": 1.13013698630137e-06, |
|
"loss": 2.3732, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08217770929635336, |
|
"eval_loss": 2.4189229011535645, |
|
"eval_runtime": 37.3645, |
|
"eval_samples_per_second": 26.763, |
|
"eval_steps_per_second": 1.124, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08320493066255778, |
|
"grad_norm": 2.450587034225464, |
|
"learning_rate": 1.1472602739726028e-06, |
|
"loss": 2.3163, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08423215202876219, |
|
"grad_norm": 2.079949378967285, |
|
"learning_rate": 1.1643835616438357e-06, |
|
"loss": 2.3381, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08525937339496661, |
|
"grad_norm": 2.160737991333008, |
|
"learning_rate": 1.1815068493150685e-06, |
|
"loss": 2.3376, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08628659476117104, |
|
"grad_norm": 2.463890552520752, |
|
"learning_rate": 1.1986301369863014e-06, |
|
"loss": 2.308, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.08731381612737545, |
|
"grad_norm": 2.3923065662384033, |
|
"learning_rate": 1.2157534246575344e-06, |
|
"loss": 2.2682, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08834103749357987, |
|
"grad_norm": 2.2096004486083984, |
|
"learning_rate": 1.2328767123287673e-06, |
|
"loss": 2.3256, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.08936825885978428, |
|
"grad_norm": 2.1402556896209717, |
|
"learning_rate": 1.25e-06, |
|
"loss": 2.2767, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0903954802259887, |
|
"grad_norm": 2.500629186630249, |
|
"learning_rate": 1.267123287671233e-06, |
|
"loss": 2.2576, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09142270159219312, |
|
"grad_norm": 2.0257675647735596, |
|
"learning_rate": 1.284246575342466e-06, |
|
"loss": 2.2722, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09244992295839753, |
|
"grad_norm": 2.148428440093994, |
|
"learning_rate": 1.3013698630136986e-06, |
|
"loss": 2.2309, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09244992295839753, |
|
"eval_loss": 2.3439993858337402, |
|
"eval_runtime": 37.3803, |
|
"eval_samples_per_second": 26.752, |
|
"eval_steps_per_second": 1.124, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09347714432460195, |
|
"grad_norm": 2.3541946411132812, |
|
"learning_rate": 1.3184931506849317e-06, |
|
"loss": 2.2347, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09450436569080636, |
|
"grad_norm": 2.6239383220672607, |
|
"learning_rate": 1.3356164383561645e-06, |
|
"loss": 2.2589, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09553158705701079, |
|
"grad_norm": 2.1981804370880127, |
|
"learning_rate": 1.3527397260273976e-06, |
|
"loss": 2.2372, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.09655880842321521, |
|
"grad_norm": 2.3652358055114746, |
|
"learning_rate": 1.3698630136986302e-06, |
|
"loss": 2.2169, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.09758602978941962, |
|
"grad_norm": 2.2078158855438232, |
|
"learning_rate": 1.386986301369863e-06, |
|
"loss": 2.2761, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09861325115562404, |
|
"grad_norm": 2.494685649871826, |
|
"learning_rate": 1.4041095890410961e-06, |
|
"loss": 2.1691, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.09964047252182845, |
|
"grad_norm": 2.3293051719665527, |
|
"learning_rate": 1.421232876712329e-06, |
|
"loss": 2.2246, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.10066769388803287, |
|
"grad_norm": 2.410687208175659, |
|
"learning_rate": 1.4383561643835616e-06, |
|
"loss": 2.1166, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1016949152542373, |
|
"grad_norm": 2.5168960094451904, |
|
"learning_rate": 1.4554794520547946e-06, |
|
"loss": 2.1417, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1027221366204417, |
|
"grad_norm": 2.178412914276123, |
|
"learning_rate": 1.4726027397260275e-06, |
|
"loss": 2.1638, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1027221366204417, |
|
"eval_loss": 2.273622751235962, |
|
"eval_runtime": 37.3892, |
|
"eval_samples_per_second": 26.746, |
|
"eval_steps_per_second": 1.123, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10374935798664613, |
|
"grad_norm": 2.8951706886291504, |
|
"learning_rate": 1.4897260273972605e-06, |
|
"loss": 2.1631, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.10477657935285054, |
|
"grad_norm": 2.103206157684326, |
|
"learning_rate": 1.5068493150684932e-06, |
|
"loss": 2.1419, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.10580380071905496, |
|
"grad_norm": 2.495969772338867, |
|
"learning_rate": 1.5239726027397262e-06, |
|
"loss": 2.1029, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.10683102208525937, |
|
"grad_norm": 2.5686826705932617, |
|
"learning_rate": 1.541095890410959e-06, |
|
"loss": 2.1235, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.10785824345146379, |
|
"grad_norm": 3.4710471630096436, |
|
"learning_rate": 1.5582191780821921e-06, |
|
"loss": 2.1832, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10888546481766821, |
|
"grad_norm": 2.2563316822052, |
|
"learning_rate": 1.5753424657534248e-06, |
|
"loss": 2.1441, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.10991268618387262, |
|
"grad_norm": 3.860506534576416, |
|
"learning_rate": 1.5924657534246576e-06, |
|
"loss": 2.1764, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11093990755007704, |
|
"grad_norm": 2.550135850906372, |
|
"learning_rate": 1.6095890410958907e-06, |
|
"loss": 2.1455, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11196712891628145, |
|
"grad_norm": 3.168283700942993, |
|
"learning_rate": 1.6267123287671235e-06, |
|
"loss": 2.0246, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11299435028248588, |
|
"grad_norm": 2.577069044113159, |
|
"learning_rate": 1.6438356164383561e-06, |
|
"loss": 2.0921, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11299435028248588, |
|
"eval_loss": 2.2218997478485107, |
|
"eval_runtime": 37.246, |
|
"eval_samples_per_second": 26.849, |
|
"eval_steps_per_second": 1.128, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1140215716486903, |
|
"grad_norm": 2.3515374660491943, |
|
"learning_rate": 1.6609589041095892e-06, |
|
"loss": 2.0209, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1150487930148947, |
|
"grad_norm": 2.7660279273986816, |
|
"learning_rate": 1.678082191780822e-06, |
|
"loss": 2.1165, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.11607601438109913, |
|
"grad_norm": 2.567336082458496, |
|
"learning_rate": 1.695205479452055e-06, |
|
"loss": 2.0529, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.11710323574730354, |
|
"grad_norm": 2.4126152992248535, |
|
"learning_rate": 1.7123287671232877e-06, |
|
"loss": 2.1028, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.11813045711350796, |
|
"grad_norm": 3.577528476715088, |
|
"learning_rate": 1.7294520547945206e-06, |
|
"loss": 2.0317, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11915767847971238, |
|
"grad_norm": 2.9624195098876953, |
|
"learning_rate": 1.7465753424657536e-06, |
|
"loss": 2.1318, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12018489984591679, |
|
"grad_norm": 2.789854049682617, |
|
"learning_rate": 1.7636986301369865e-06, |
|
"loss": 2.0822, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 2.8168365955352783, |
|
"learning_rate": 1.7808219178082193e-06, |
|
"loss": 2.08, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12223934257832562, |
|
"grad_norm": 2.7616257667541504, |
|
"learning_rate": 1.7979452054794521e-06, |
|
"loss": 2.0668, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12326656394453005, |
|
"grad_norm": 2.5610315799713135, |
|
"learning_rate": 1.8150684931506852e-06, |
|
"loss": 2.1518, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12326656394453005, |
|
"eval_loss": 2.184033155441284, |
|
"eval_runtime": 37.3784, |
|
"eval_samples_per_second": 26.753, |
|
"eval_steps_per_second": 1.124, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12429378531073447, |
|
"grad_norm": 2.817063093185425, |
|
"learning_rate": 1.832191780821918e-06, |
|
"loss": 2.0813, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.12532100667693888, |
|
"grad_norm": 2.987205982208252, |
|
"learning_rate": 1.8493150684931507e-06, |
|
"loss": 2.0421, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1263482280431433, |
|
"grad_norm": 2.8480496406555176, |
|
"learning_rate": 1.8664383561643837e-06, |
|
"loss": 2.0018, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.12737544940934772, |
|
"grad_norm": 2.994274616241455, |
|
"learning_rate": 1.8835616438356166e-06, |
|
"loss": 2.0266, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.12840267077555212, |
|
"grad_norm": 2.64136004447937, |
|
"learning_rate": 1.9006849315068496e-06, |
|
"loss": 1.9873, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12942989214175654, |
|
"grad_norm": 2.9426915645599365, |
|
"learning_rate": 1.9178082191780823e-06, |
|
"loss": 2.0566, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13045711350796096, |
|
"grad_norm": 2.9018619060516357, |
|
"learning_rate": 1.9349315068493153e-06, |
|
"loss": 1.9674, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1314843348741654, |
|
"grad_norm": 3.6921045780181885, |
|
"learning_rate": 1.952054794520548e-06, |
|
"loss": 2.0295, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1325115562403698, |
|
"grad_norm": 2.4194657802581787, |
|
"learning_rate": 1.969178082191781e-06, |
|
"loss": 2.0639, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1335387776065742, |
|
"grad_norm": 3.252349615097046, |
|
"learning_rate": 1.9863013698630136e-06, |
|
"loss": 2.0613, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1335387776065742, |
|
"eval_loss": 2.153157949447632, |
|
"eval_runtime": 37.394, |
|
"eval_samples_per_second": 26.742, |
|
"eval_steps_per_second": 1.123, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13456599897277863, |
|
"grad_norm": 2.9361023902893066, |
|
"learning_rate": 2.0034246575342467e-06, |
|
"loss": 1.9817, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.13559322033898305, |
|
"grad_norm": 2.834803581237793, |
|
"learning_rate": 2.0205479452054797e-06, |
|
"loss": 2.0539, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.13662044170518747, |
|
"grad_norm": 2.8814785480499268, |
|
"learning_rate": 2.037671232876713e-06, |
|
"loss": 2.0296, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.1376476630713919, |
|
"grad_norm": 2.647125482559204, |
|
"learning_rate": 2.0547945205479454e-06, |
|
"loss": 1.9739, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1386748844375963, |
|
"grad_norm": 2.9608750343322754, |
|
"learning_rate": 2.071917808219178e-06, |
|
"loss": 2.0083, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1397021058038007, |
|
"grad_norm": 2.8570032119750977, |
|
"learning_rate": 2.089041095890411e-06, |
|
"loss": 2.0216, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.14072932717000514, |
|
"grad_norm": 2.806358575820923, |
|
"learning_rate": 2.106164383561644e-06, |
|
"loss": 1.9457, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14175654853620956, |
|
"grad_norm": 3.1003353595733643, |
|
"learning_rate": 2.123287671232877e-06, |
|
"loss": 2.0324, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14278376990241398, |
|
"grad_norm": 2.679588556289673, |
|
"learning_rate": 2.14041095890411e-06, |
|
"loss": 1.9699, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14381099126861838, |
|
"grad_norm": 2.9436194896698, |
|
"learning_rate": 2.1575342465753425e-06, |
|
"loss": 1.9824, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14381099126861838, |
|
"eval_loss": 2.123953104019165, |
|
"eval_runtime": 37.4214, |
|
"eval_samples_per_second": 26.723, |
|
"eval_steps_per_second": 1.122, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1448382126348228, |
|
"grad_norm": 3.088301420211792, |
|
"learning_rate": 2.1746575342465755e-06, |
|
"loss": 1.9861, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.14586543400102722, |
|
"grad_norm": 3.011859178543091, |
|
"learning_rate": 2.191780821917808e-06, |
|
"loss": 1.9383, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.14689265536723164, |
|
"grad_norm": 2.930264472961426, |
|
"learning_rate": 2.2089041095890412e-06, |
|
"loss": 2.0674, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.14791987673343607, |
|
"grad_norm": 3.361328601837158, |
|
"learning_rate": 2.2260273972602743e-06, |
|
"loss": 1.9495, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.14894709809964046, |
|
"grad_norm": 2.7825112342834473, |
|
"learning_rate": 2.243150684931507e-06, |
|
"loss": 1.9335, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.14997431946584489, |
|
"grad_norm": 3.7215847969055176, |
|
"learning_rate": 2.26027397260274e-06, |
|
"loss": 1.9403, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.1510015408320493, |
|
"grad_norm": 2.5125460624694824, |
|
"learning_rate": 2.2773972602739726e-06, |
|
"loss": 1.9437, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15202876219825373, |
|
"grad_norm": 3.0963492393493652, |
|
"learning_rate": 2.2945205479452057e-06, |
|
"loss": 1.9651, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15305598356445815, |
|
"grad_norm": 2.9147121906280518, |
|
"learning_rate": 2.3116438356164387e-06, |
|
"loss": 2.0567, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.15408320493066255, |
|
"grad_norm": 3.5275802612304688, |
|
"learning_rate": 2.3287671232876713e-06, |
|
"loss": 1.9773, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15408320493066255, |
|
"eval_loss": 2.112344264984131, |
|
"eval_runtime": 37.4142, |
|
"eval_samples_per_second": 26.728, |
|
"eval_steps_per_second": 1.123, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15511042629686697, |
|
"grad_norm": 3.013786554336548, |
|
"learning_rate": 2.3458904109589044e-06, |
|
"loss": 1.8753, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1561376476630714, |
|
"grad_norm": 3.433615207672119, |
|
"learning_rate": 2.363013698630137e-06, |
|
"loss": 1.9256, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.15716486902927582, |
|
"grad_norm": 3.1526033878326416, |
|
"learning_rate": 2.38013698630137e-06, |
|
"loss": 1.9815, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.15819209039548024, |
|
"grad_norm": 3.2099974155426025, |
|
"learning_rate": 2.3972602739726027e-06, |
|
"loss": 1.9628, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.15921931176168463, |
|
"grad_norm": 3.2713584899902344, |
|
"learning_rate": 2.4143835616438358e-06, |
|
"loss": 1.9128, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16024653312788906, |
|
"grad_norm": 3.287649393081665, |
|
"learning_rate": 2.431506849315069e-06, |
|
"loss": 1.8764, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16127375449409348, |
|
"grad_norm": 3.1439101696014404, |
|
"learning_rate": 2.4486301369863015e-06, |
|
"loss": 1.9428, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1623009758602979, |
|
"grad_norm": 3.006457805633545, |
|
"learning_rate": 2.4657534246575345e-06, |
|
"loss": 1.9516, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1633281972265023, |
|
"grad_norm": 2.9567787647247314, |
|
"learning_rate": 2.482876712328767e-06, |
|
"loss": 1.915, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.16435541859270672, |
|
"grad_norm": 3.2475216388702393, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.9451, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16435541859270672, |
|
"eval_loss": 2.086299419403076, |
|
"eval_runtime": 37.3939, |
|
"eval_samples_per_second": 26.742, |
|
"eval_steps_per_second": 1.123, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16538263995891114, |
|
"grad_norm": 2.9586498737335205, |
|
"learning_rate": 2.5171232876712333e-06, |
|
"loss": 1.9298, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.16640986132511557, |
|
"grad_norm": 3.066910982131958, |
|
"learning_rate": 2.534246575342466e-06, |
|
"loss": 1.9103, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.16743708269132, |
|
"grad_norm": 2.814741611480713, |
|
"learning_rate": 2.551369863013699e-06, |
|
"loss": 1.8678, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.16846430405752438, |
|
"grad_norm": 3.7239339351654053, |
|
"learning_rate": 2.568493150684932e-06, |
|
"loss": 1.9321, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.1694915254237288, |
|
"grad_norm": 2.829899787902832, |
|
"learning_rate": 2.585616438356164e-06, |
|
"loss": 1.9587, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17051874678993323, |
|
"grad_norm": 3.826289176940918, |
|
"learning_rate": 2.6027397260273973e-06, |
|
"loss": 1.9288, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17154596815613765, |
|
"grad_norm": 3.1028499603271484, |
|
"learning_rate": 2.6198630136986303e-06, |
|
"loss": 1.8685, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17257318952234207, |
|
"grad_norm": 3.6923508644104004, |
|
"learning_rate": 2.6369863013698634e-06, |
|
"loss": 1.9276, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.17360041088854647, |
|
"grad_norm": 3.0577642917633057, |
|
"learning_rate": 2.654109589041096e-06, |
|
"loss": 1.9653, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1746276322547509, |
|
"grad_norm": 3.363762617111206, |
|
"learning_rate": 2.671232876712329e-06, |
|
"loss": 1.9465, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1746276322547509, |
|
"eval_loss": 2.0753531455993652, |
|
"eval_runtime": 37.3955, |
|
"eval_samples_per_second": 26.741, |
|
"eval_steps_per_second": 1.123, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17565485362095531, |
|
"grad_norm": 2.9063165187835693, |
|
"learning_rate": 2.688356164383562e-06, |
|
"loss": 1.8827, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.17668207498715974, |
|
"grad_norm": 3.4709181785583496, |
|
"learning_rate": 2.705479452054795e-06, |
|
"loss": 1.882, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.17770929635336416, |
|
"grad_norm": 3.395397663116455, |
|
"learning_rate": 2.7226027397260274e-06, |
|
"loss": 1.8711, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.17873651771956856, |
|
"grad_norm": 3.9973363876342773, |
|
"learning_rate": 2.7397260273972604e-06, |
|
"loss": 1.8223, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.17976373908577298, |
|
"grad_norm": 3.1841156482696533, |
|
"learning_rate": 2.7568493150684935e-06, |
|
"loss": 1.947, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1807909604519774, |
|
"grad_norm": 3.1841156482696533, |
|
"learning_rate": 2.7568493150684935e-06, |
|
"loss": 1.9192, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 3.84712290763855, |
|
"learning_rate": 2.773972602739726e-06, |
|
"loss": 1.8344, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.18284540318438625, |
|
"grad_norm": 3.487525463104248, |
|
"learning_rate": 2.791095890410959e-06, |
|
"loss": 1.9242, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.18387262455059064, |
|
"grad_norm": 3.1980319023132324, |
|
"learning_rate": 2.8082191780821922e-06, |
|
"loss": 1.7959, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.18489984591679506, |
|
"grad_norm": 3.56462025642395, |
|
"learning_rate": 2.8253424657534253e-06, |
|
"loss": 1.9668, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18489984591679506, |
|
"eval_loss": 2.0606908798217773, |
|
"eval_runtime": 37.3994, |
|
"eval_samples_per_second": 26.738, |
|
"eval_steps_per_second": 1.123, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1859270672829995, |
|
"grad_norm": 3.613842248916626, |
|
"learning_rate": 2.842465753424658e-06, |
|
"loss": 1.925, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1869542886492039, |
|
"grad_norm": 2.9755642414093018, |
|
"learning_rate": 2.8595890410958905e-06, |
|
"loss": 1.9251, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.18798151001540833, |
|
"grad_norm": 2.9678146839141846, |
|
"learning_rate": 2.876712328767123e-06, |
|
"loss": 1.9406, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.18900873138161273, |
|
"grad_norm": 3.3536393642425537, |
|
"learning_rate": 2.8938356164383562e-06, |
|
"loss": 1.8041, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.19003595274781715, |
|
"grad_norm": 2.714902877807617, |
|
"learning_rate": 2.9109589041095893e-06, |
|
"loss": 1.8213, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19106317411402157, |
|
"grad_norm": 3.0686237812042236, |
|
"learning_rate": 2.9280821917808223e-06, |
|
"loss": 1.817, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.192090395480226, |
|
"grad_norm": 3.510448932647705, |
|
"learning_rate": 2.945205479452055e-06, |
|
"loss": 1.8317, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.19311761684643042, |
|
"grad_norm": 3.2060787677764893, |
|
"learning_rate": 2.962328767123288e-06, |
|
"loss": 1.894, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1941448382126348, |
|
"grad_norm": 3.1756906509399414, |
|
"learning_rate": 2.979452054794521e-06, |
|
"loss": 1.8788, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.19517205957883924, |
|
"grad_norm": 3.059863805770874, |
|
"learning_rate": 2.9965753424657533e-06, |
|
"loss": 1.8198, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19517205957883924, |
|
"eval_loss": 2.0506513118743896, |
|
"eval_runtime": 37.4014, |
|
"eval_samples_per_second": 26.737, |
|
"eval_steps_per_second": 1.123, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19619928094504366, |
|
"grad_norm": 3.193685531616211, |
|
"learning_rate": 3.0136986301369864e-06, |
|
"loss": 1.8818, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.19722650231124808, |
|
"grad_norm": 2.905402183532715, |
|
"learning_rate": 3.0308219178082194e-06, |
|
"loss": 1.8546, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1982537236774525, |
|
"grad_norm": 3.423109531402588, |
|
"learning_rate": 3.0479452054794525e-06, |
|
"loss": 1.8142, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.1992809450436569, |
|
"grad_norm": 3.136094331741333, |
|
"learning_rate": 3.065068493150685e-06, |
|
"loss": 1.8553, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.20030816640986132, |
|
"grad_norm": 3.0746538639068604, |
|
"learning_rate": 3.082191780821918e-06, |
|
"loss": 1.8153, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.20133538777606574, |
|
"grad_norm": 2.9354681968688965, |
|
"learning_rate": 3.099315068493151e-06, |
|
"loss": 1.8301, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.20236260914227017, |
|
"grad_norm": 3.00426983833313, |
|
"learning_rate": 3.1164383561643843e-06, |
|
"loss": 1.8233, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2033898305084746, |
|
"grad_norm": 3.2754054069519043, |
|
"learning_rate": 3.1335616438356165e-06, |
|
"loss": 1.787, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.20441705187467898, |
|
"grad_norm": 2.9854509830474854, |
|
"learning_rate": 3.1506849315068495e-06, |
|
"loss": 1.9341, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2054442732408834, |
|
"grad_norm": 3.5655088424682617, |
|
"learning_rate": 3.167808219178082e-06, |
|
"loss": 1.7964, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2054442732408834, |
|
"eval_loss": 2.0440263748168945, |
|
"eval_runtime": 37.4054, |
|
"eval_samples_per_second": 26.734, |
|
"eval_steps_per_second": 1.123, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2919, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.23461881872384e+16, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|