|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9956440572495333, |
|
"eval_steps": 10, |
|
"global_step": 800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012445550715619166, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8073, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002489110143123833, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8285, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00373366521468575, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7975, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004978220286247666, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.798, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006222775357809583, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8443, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0074673304293715, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8074, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.008711885500933417, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7588, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.009956440572495333, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8136, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01120099564405725, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7397, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012445550715619166, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7745, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012445550715619166, |
|
"eval_loss": 2.8210322856903076, |
|
"eval_runtime": 43.0476, |
|
"eval_samples_per_second": 23.23, |
|
"eval_steps_per_second": 0.976, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013690105787181083, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8275, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.014934660858743, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7762, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.016179215930304917, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7799, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.017423771001866834, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7906, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.018668326073428748, |
|
"grad_norm": 5.01304292678833, |
|
"learning_rate": 2.0746887966804982e-08, |
|
"loss": 2.7712, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019912881144990666, |
|
"grad_norm": 4.976474761962891, |
|
"learning_rate": 4.1493775933609963e-08, |
|
"loss": 2.829, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.021157436216552583, |
|
"grad_norm": 5.539012908935547, |
|
"learning_rate": 6.224066390041494e-08, |
|
"loss": 2.813, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0224019912881145, |
|
"grad_norm": 5.4900407791137695, |
|
"learning_rate": 8.298755186721993e-08, |
|
"loss": 2.8126, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.023646546359676415, |
|
"grad_norm": 5.457218647003174, |
|
"learning_rate": 1.037344398340249e-07, |
|
"loss": 2.7482, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.024891101431238332, |
|
"grad_norm": 5.231036186218262, |
|
"learning_rate": 1.2448132780082988e-07, |
|
"loss": 2.8323, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.024891101431238332, |
|
"eval_loss": 2.819715738296509, |
|
"eval_runtime": 43.9884, |
|
"eval_samples_per_second": 22.733, |
|
"eval_steps_per_second": 0.955, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02613565650280025, |
|
"grad_norm": 5.337130546569824, |
|
"learning_rate": 1.4522821576763488e-07, |
|
"loss": 2.776, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.027380211574362167, |
|
"grad_norm": 4.973108291625977, |
|
"learning_rate": 1.6597510373443985e-07, |
|
"loss": 2.8239, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02862476664592408, |
|
"grad_norm": 4.8733906745910645, |
|
"learning_rate": 1.8672199170124483e-07, |
|
"loss": 2.8069, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.029869321717486, |
|
"grad_norm": 5.061434745788574, |
|
"learning_rate": 2.074688796680498e-07, |
|
"loss": 2.8063, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.031113876789047916, |
|
"grad_norm": 4.782018661499023, |
|
"learning_rate": 2.2821576763485478e-07, |
|
"loss": 2.7488, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03235843186060983, |
|
"grad_norm": 4.394975662231445, |
|
"learning_rate": 2.4896265560165975e-07, |
|
"loss": 2.7683, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03360298693217175, |
|
"grad_norm": 4.475763320922852, |
|
"learning_rate": 2.6970954356846476e-07, |
|
"loss": 2.8143, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03484754200373367, |
|
"grad_norm": 4.362309455871582, |
|
"learning_rate": 2.9045643153526976e-07, |
|
"loss": 2.7875, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03609209707529558, |
|
"grad_norm": 3.9444823265075684, |
|
"learning_rate": 3.112033195020747e-07, |
|
"loss": 2.766, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.037336652146857496, |
|
"grad_norm": 3.4614129066467285, |
|
"learning_rate": 3.319502074688797e-07, |
|
"loss": 2.7644, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.037336652146857496, |
|
"eval_loss": 2.772022008895874, |
|
"eval_runtime": 45.8513, |
|
"eval_samples_per_second": 21.81, |
|
"eval_steps_per_second": 0.916, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.038581207218419414, |
|
"grad_norm": 3.449410915374756, |
|
"learning_rate": 3.5269709543568466e-07, |
|
"loss": 2.7653, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03982576228998133, |
|
"grad_norm": 3.3115482330322266, |
|
"learning_rate": 3.7344398340248966e-07, |
|
"loss": 2.7032, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04107031736154325, |
|
"grad_norm": 3.2057440280914307, |
|
"learning_rate": 3.941908713692946e-07, |
|
"loss": 2.7766, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.042314872433105166, |
|
"grad_norm": 3.4503021240234375, |
|
"learning_rate": 4.149377593360996e-07, |
|
"loss": 2.7214, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.043559427504667084, |
|
"grad_norm": 3.1477363109588623, |
|
"learning_rate": 4.3568464730290456e-07, |
|
"loss": 2.7333, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.044803982576229, |
|
"grad_norm": 2.8025119304656982, |
|
"learning_rate": 4.5643153526970956e-07, |
|
"loss": 2.7535, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04604853764779091, |
|
"grad_norm": 2.967703342437744, |
|
"learning_rate": 4.771784232365145e-07, |
|
"loss": 2.7615, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04729309271935283, |
|
"grad_norm": 2.9308114051818848, |
|
"learning_rate": 4.979253112033195e-07, |
|
"loss": 2.7115, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04853764779091475, |
|
"grad_norm": 2.9203720092773438, |
|
"learning_rate": 5.186721991701245e-07, |
|
"loss": 2.701, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.049782202862476664, |
|
"grad_norm": 2.8226709365844727, |
|
"learning_rate": 5.394190871369295e-07, |
|
"loss": 2.6267, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.049782202862476664, |
|
"eval_loss": 2.6964941024780273, |
|
"eval_runtime": 46.5181, |
|
"eval_samples_per_second": 21.497, |
|
"eval_steps_per_second": 0.903, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05102675793403858, |
|
"grad_norm": 2.7212748527526855, |
|
"learning_rate": 5.601659751037345e-07, |
|
"loss": 2.6505, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0522713130056005, |
|
"grad_norm": 2.658881425857544, |
|
"learning_rate": 5.809128630705395e-07, |
|
"loss": 2.6655, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.053515868077162417, |
|
"grad_norm": 2.4534730911254883, |
|
"learning_rate": 6.016597510373444e-07, |
|
"loss": 2.7098, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.054760423148724334, |
|
"grad_norm": 2.4222075939178467, |
|
"learning_rate": 6.224066390041494e-07, |
|
"loss": 2.6793, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.056004978220286245, |
|
"grad_norm": 2.5824973583221436, |
|
"learning_rate": 6.431535269709543e-07, |
|
"loss": 2.6212, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05724953329184816, |
|
"grad_norm": 2.5982468128204346, |
|
"learning_rate": 6.639004149377594e-07, |
|
"loss": 2.5901, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05849408836341008, |
|
"grad_norm": 2.213388442993164, |
|
"learning_rate": 6.846473029045644e-07, |
|
"loss": 2.6156, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.059738643434972, |
|
"grad_norm": 2.097372531890869, |
|
"learning_rate": 7.053941908713693e-07, |
|
"loss": 2.6154, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.060983198506533914, |
|
"grad_norm": 1.9788408279418945, |
|
"learning_rate": 7.261410788381744e-07, |
|
"loss": 2.5516, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06222775357809583, |
|
"grad_norm": 2.052147150039673, |
|
"learning_rate": 7.468879668049793e-07, |
|
"loss": 2.6128, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06222775357809583, |
|
"eval_loss": 2.6048905849456787, |
|
"eval_runtime": 51.2137, |
|
"eval_samples_per_second": 19.526, |
|
"eval_steps_per_second": 0.82, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06347230864965775, |
|
"grad_norm": 1.8841335773468018, |
|
"learning_rate": 7.676348547717843e-07, |
|
"loss": 2.5617, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06471686372121967, |
|
"grad_norm": 2.0841684341430664, |
|
"learning_rate": 7.883817427385892e-07, |
|
"loss": 2.5627, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06596141879278158, |
|
"grad_norm": 1.8119730949401855, |
|
"learning_rate": 8.091286307053943e-07, |
|
"loss": 2.5572, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0672059738643435, |
|
"grad_norm": 1.7168290615081787, |
|
"learning_rate": 8.298755186721992e-07, |
|
"loss": 2.5465, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06845052893590542, |
|
"grad_norm": 1.7731754779815674, |
|
"learning_rate": 8.506224066390042e-07, |
|
"loss": 2.543, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06969508400746734, |
|
"grad_norm": 1.9654561281204224, |
|
"learning_rate": 8.713692946058091e-07, |
|
"loss": 2.5547, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07093963907902924, |
|
"grad_norm": 1.8032375574111938, |
|
"learning_rate": 8.921161825726142e-07, |
|
"loss": 2.5319, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07218419415059116, |
|
"grad_norm": 1.8775644302368164, |
|
"learning_rate": 9.128630705394191e-07, |
|
"loss": 2.5533, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07342874922215308, |
|
"grad_norm": 1.6019998788833618, |
|
"learning_rate": 9.336099585062241e-07, |
|
"loss": 2.5402, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07467330429371499, |
|
"grad_norm": 1.7785435914993286, |
|
"learning_rate": 9.54356846473029e-07, |
|
"loss": 2.4704, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07467330429371499, |
|
"eval_loss": 2.541548252105713, |
|
"eval_runtime": 51.1601, |
|
"eval_samples_per_second": 19.546, |
|
"eval_steps_per_second": 0.821, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07591785936527691, |
|
"grad_norm": 1.5377200841903687, |
|
"learning_rate": 9.751037344398341e-07, |
|
"loss": 2.5028, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07716241443683883, |
|
"grad_norm": 1.7212916612625122, |
|
"learning_rate": 9.95850622406639e-07, |
|
"loss": 2.4621, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07840696950840075, |
|
"grad_norm": 1.632606029510498, |
|
"learning_rate": 1.0165975103734441e-06, |
|
"loss": 2.4868, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07965152457996266, |
|
"grad_norm": 1.8643872737884521, |
|
"learning_rate": 1.037344398340249e-06, |
|
"loss": 2.4582, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08089607965152458, |
|
"grad_norm": 1.8329377174377441, |
|
"learning_rate": 1.058091286307054e-06, |
|
"loss": 2.464, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0821406347230865, |
|
"grad_norm": 1.9488356113433838, |
|
"learning_rate": 1.078838174273859e-06, |
|
"loss": 2.4275, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08338518979464841, |
|
"grad_norm": 1.7663215398788452, |
|
"learning_rate": 1.099585062240664e-06, |
|
"loss": 2.516, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08462974486621033, |
|
"grad_norm": 1.7675347328186035, |
|
"learning_rate": 1.120331950207469e-06, |
|
"loss": 2.4535, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08587429993777225, |
|
"grad_norm": 1.7481404542922974, |
|
"learning_rate": 1.141078838174274e-06, |
|
"loss": 2.4089, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08711885500933417, |
|
"grad_norm": 1.7963330745697021, |
|
"learning_rate": 1.161825726141079e-06, |
|
"loss": 2.4408, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08711885500933417, |
|
"eval_loss": 2.466163396835327, |
|
"eval_runtime": 50.4204, |
|
"eval_samples_per_second": 19.833, |
|
"eval_steps_per_second": 0.833, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08836341008089608, |
|
"grad_norm": 2.053469657897949, |
|
"learning_rate": 1.182572614107884e-06, |
|
"loss": 2.408, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.089607965152458, |
|
"grad_norm": 1.900294303894043, |
|
"learning_rate": 1.2033195020746888e-06, |
|
"loss": 2.4066, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09085252022401992, |
|
"grad_norm": 2.38637113571167, |
|
"learning_rate": 1.224066390041494e-06, |
|
"loss": 2.3548, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09209707529558182, |
|
"grad_norm": 1.8274579048156738, |
|
"learning_rate": 1.2448132780082988e-06, |
|
"loss": 2.3844, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09334163036714374, |
|
"grad_norm": 1.9040184020996094, |
|
"learning_rate": 1.2655601659751037e-06, |
|
"loss": 2.4406, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09458618543870566, |
|
"grad_norm": 2.0351619720458984, |
|
"learning_rate": 1.2863070539419086e-06, |
|
"loss": 2.3624, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09583074051026758, |
|
"grad_norm": 1.989876627922058, |
|
"learning_rate": 1.307053941908714e-06, |
|
"loss": 2.4097, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0970752955818295, |
|
"grad_norm": 1.8507741689682007, |
|
"learning_rate": 1.3278008298755188e-06, |
|
"loss": 2.4482, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09831985065339141, |
|
"grad_norm": 2.6118454933166504, |
|
"learning_rate": 1.3485477178423237e-06, |
|
"loss": 2.3178, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09956440572495333, |
|
"grad_norm": 1.9467326402664185, |
|
"learning_rate": 1.3692946058091288e-06, |
|
"loss": 2.3175, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09956440572495333, |
|
"eval_loss": 2.3719911575317383, |
|
"eval_runtime": 52.3293, |
|
"eval_samples_per_second": 19.11, |
|
"eval_steps_per_second": 0.803, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10080896079651525, |
|
"grad_norm": 2.4913370609283447, |
|
"learning_rate": 1.3900414937759337e-06, |
|
"loss": 2.3063, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10205351586807716, |
|
"grad_norm": 2.4838919639587402, |
|
"learning_rate": 1.4107883817427386e-06, |
|
"loss": 2.3534, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10329807093963908, |
|
"grad_norm": 2.798588991165161, |
|
"learning_rate": 1.4315352697095435e-06, |
|
"loss": 2.2732, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.104542626011201, |
|
"grad_norm": 2.688399076461792, |
|
"learning_rate": 1.4522821576763488e-06, |
|
"loss": 2.3112, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.10578718108276292, |
|
"grad_norm": 2.7367143630981445, |
|
"learning_rate": 1.4730290456431537e-06, |
|
"loss": 2.3055, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10703173615432483, |
|
"grad_norm": 2.6406664848327637, |
|
"learning_rate": 1.4937759336099586e-06, |
|
"loss": 2.312, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.10827629122588675, |
|
"grad_norm": 2.703355312347412, |
|
"learning_rate": 1.5145228215767635e-06, |
|
"loss": 2.3305, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.10952084629744867, |
|
"grad_norm": 2.420084238052368, |
|
"learning_rate": 1.5352697095435686e-06, |
|
"loss": 2.2608, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11076540136901059, |
|
"grad_norm": 2.999835729598999, |
|
"learning_rate": 1.5560165975103735e-06, |
|
"loss": 2.2389, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11200995644057249, |
|
"grad_norm": 2.35611629486084, |
|
"learning_rate": 1.5767634854771784e-06, |
|
"loss": 2.2671, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11200995644057249, |
|
"eval_loss": 2.2900290489196777, |
|
"eval_runtime": 52.1871, |
|
"eval_samples_per_second": 19.162, |
|
"eval_steps_per_second": 0.805, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1132545115121344, |
|
"grad_norm": 2.2686431407928467, |
|
"learning_rate": 1.5975103734439833e-06, |
|
"loss": 2.2405, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11449906658369632, |
|
"grad_norm": 2.883517265319824, |
|
"learning_rate": 1.6182572614107886e-06, |
|
"loss": 2.2364, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11574362165525824, |
|
"grad_norm": 2.4562909603118896, |
|
"learning_rate": 1.6390041493775935e-06, |
|
"loss": 2.2308, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11698817672682016, |
|
"grad_norm": 2.7968456745147705, |
|
"learning_rate": 1.6597510373443984e-06, |
|
"loss": 2.1959, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11823273179838208, |
|
"grad_norm": 2.8692259788513184, |
|
"learning_rate": 1.6804979253112035e-06, |
|
"loss": 2.215, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.119477286869944, |
|
"grad_norm": 2.8436100482940674, |
|
"learning_rate": 1.7012448132780084e-06, |
|
"loss": 2.1816, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12072184194150591, |
|
"grad_norm": 2.4477386474609375, |
|
"learning_rate": 1.7219917012448133e-06, |
|
"loss": 2.1684, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12196639701306783, |
|
"grad_norm": 2.610046863555908, |
|
"learning_rate": 1.7427385892116182e-06, |
|
"loss": 2.1656, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12321095208462975, |
|
"grad_norm": 2.698709487915039, |
|
"learning_rate": 1.7634854771784235e-06, |
|
"loss": 2.2115, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12445550715619166, |
|
"grad_norm": 2.476600408554077, |
|
"learning_rate": 1.7842323651452284e-06, |
|
"loss": 2.1619, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12445550715619166, |
|
"eval_loss": 2.221313953399658, |
|
"eval_runtime": 51.6214, |
|
"eval_samples_per_second": 19.372, |
|
"eval_steps_per_second": 0.814, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12570006222775357, |
|
"grad_norm": 3.238584518432617, |
|
"learning_rate": 1.8049792531120333e-06, |
|
"loss": 2.1543, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1269446172993155, |
|
"grad_norm": 2.3609964847564697, |
|
"learning_rate": 1.8257261410788382e-06, |
|
"loss": 2.1602, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1281891723708774, |
|
"grad_norm": 3.3314859867095947, |
|
"learning_rate": 1.8464730290456433e-06, |
|
"loss": 2.1843, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.12943372744243933, |
|
"grad_norm": 2.42441725730896, |
|
"learning_rate": 1.8672199170124482e-06, |
|
"loss": 2.135, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13067828251400124, |
|
"grad_norm": 3.2766177654266357, |
|
"learning_rate": 1.8879668049792531e-06, |
|
"loss": 2.158, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13192283758556317, |
|
"grad_norm": 2.8646531105041504, |
|
"learning_rate": 1.908713692946058e-06, |
|
"loss": 2.1214, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13316739265712507, |
|
"grad_norm": 2.8570213317871094, |
|
"learning_rate": 1.929460580912863e-06, |
|
"loss": 2.1345, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.134411947728687, |
|
"grad_norm": 2.5014989376068115, |
|
"learning_rate": 1.9502074688796682e-06, |
|
"loss": 2.1147, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1356565028002489, |
|
"grad_norm": 2.818286895751953, |
|
"learning_rate": 1.970954356846473e-06, |
|
"loss": 2.0914, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13690105787181084, |
|
"grad_norm": 3.0579800605773926, |
|
"learning_rate": 1.991701244813278e-06, |
|
"loss": 2.0879, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13690105787181084, |
|
"eval_loss": 2.167081594467163, |
|
"eval_runtime": 42.3907, |
|
"eval_samples_per_second": 23.59, |
|
"eval_steps_per_second": 0.991, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13814561294337274, |
|
"grad_norm": 2.6039772033691406, |
|
"learning_rate": 2.012448132780083e-06, |
|
"loss": 2.1006, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13939016801493467, |
|
"grad_norm": 3.1309447288513184, |
|
"learning_rate": 2.0331950207468883e-06, |
|
"loss": 2.0758, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14063472308649658, |
|
"grad_norm": 2.6772613525390625, |
|
"learning_rate": 2.053941908713693e-06, |
|
"loss": 2.1542, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14187927815805848, |
|
"grad_norm": 3.2689080238342285, |
|
"learning_rate": 2.074688796680498e-06, |
|
"loss": 2.0873, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1431238332296204, |
|
"grad_norm": 2.7289857864379883, |
|
"learning_rate": 2.095435684647303e-06, |
|
"loss": 2.0697, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14436838830118232, |
|
"grad_norm": 2.8635787963867188, |
|
"learning_rate": 2.116182572614108e-06, |
|
"loss": 2.0908, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14561294337274425, |
|
"grad_norm": 2.805933713912964, |
|
"learning_rate": 2.136929460580913e-06, |
|
"loss": 2.0643, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14685749844430615, |
|
"grad_norm": 2.562567710876465, |
|
"learning_rate": 2.157676348547718e-06, |
|
"loss": 2.0559, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.14810205351586808, |
|
"grad_norm": 3.000135898590088, |
|
"learning_rate": 2.178423236514523e-06, |
|
"loss": 2.0442, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.14934660858742999, |
|
"grad_norm": 2.6913654804229736, |
|
"learning_rate": 2.199170124481328e-06, |
|
"loss": 2.0551, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14934660858742999, |
|
"eval_loss": 2.130500316619873, |
|
"eval_runtime": 44.3089, |
|
"eval_samples_per_second": 22.569, |
|
"eval_steps_per_second": 0.948, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15059116365899192, |
|
"grad_norm": 3.38159441947937, |
|
"learning_rate": 2.219917012448133e-06, |
|
"loss": 2.0359, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15183571873055382, |
|
"grad_norm": 2.4632813930511475, |
|
"learning_rate": 2.240663900414938e-06, |
|
"loss": 2.0805, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15308027380211575, |
|
"grad_norm": 3.2085115909576416, |
|
"learning_rate": 2.2614107883817427e-06, |
|
"loss": 2.0617, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15432482887367766, |
|
"grad_norm": 2.7810094356536865, |
|
"learning_rate": 2.282157676348548e-06, |
|
"loss": 2.0467, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1555693839452396, |
|
"grad_norm": 2.6023035049438477, |
|
"learning_rate": 2.302904564315353e-06, |
|
"loss": 2.0578, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1568139390168015, |
|
"grad_norm": 3.073814630508423, |
|
"learning_rate": 2.323651452282158e-06, |
|
"loss": 2.0742, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.15805849408836342, |
|
"grad_norm": 2.622281312942505, |
|
"learning_rate": 2.3443983402489627e-06, |
|
"loss": 2.1668, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.15930304915992533, |
|
"grad_norm": 3.3766582012176514, |
|
"learning_rate": 2.365145228215768e-06, |
|
"loss": 2.0129, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16054760423148726, |
|
"grad_norm": 3.1513423919677734, |
|
"learning_rate": 2.385892116182573e-06, |
|
"loss": 2.0215, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16179215930304916, |
|
"grad_norm": 3.2852959632873535, |
|
"learning_rate": 2.4066390041493776e-06, |
|
"loss": 2.0557, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16179215930304916, |
|
"eval_loss": 2.096493721008301, |
|
"eval_runtime": 46.9447, |
|
"eval_samples_per_second": 21.302, |
|
"eval_steps_per_second": 0.895, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16303671437461106, |
|
"grad_norm": 2.526399612426758, |
|
"learning_rate": 2.4273858921161828e-06, |
|
"loss": 2.065, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.164281269446173, |
|
"grad_norm": 3.3448667526245117, |
|
"learning_rate": 2.448132780082988e-06, |
|
"loss": 2.0472, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.1655258245177349, |
|
"grad_norm": 2.6260809898376465, |
|
"learning_rate": 2.468879668049793e-06, |
|
"loss": 1.9804, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.16677037958929683, |
|
"grad_norm": 3.6141812801361084, |
|
"learning_rate": 2.4896265560165977e-06, |
|
"loss": 2.0197, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.16801493466085873, |
|
"grad_norm": 2.4911234378814697, |
|
"learning_rate": 2.5103734439834028e-06, |
|
"loss": 1.9642, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.16925948973242066, |
|
"grad_norm": 2.928642749786377, |
|
"learning_rate": 2.5311203319502074e-06, |
|
"loss": 1.9489, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17050404480398257, |
|
"grad_norm": 3.090965509414673, |
|
"learning_rate": 2.5518672199170125e-06, |
|
"loss": 1.994, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1717485998755445, |
|
"grad_norm": 3.2897465229034424, |
|
"learning_rate": 2.5726141078838172e-06, |
|
"loss": 1.9746, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1729931549471064, |
|
"grad_norm": 2.857083320617676, |
|
"learning_rate": 2.5933609958506228e-06, |
|
"loss": 1.9774, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17423771001866833, |
|
"grad_norm": 3.803220510482788, |
|
"learning_rate": 2.614107883817428e-06, |
|
"loss": 1.9731, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17423771001866833, |
|
"eval_loss": 2.064887046813965, |
|
"eval_runtime": 44.0231, |
|
"eval_samples_per_second": 22.715, |
|
"eval_steps_per_second": 0.954, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17548226509023024, |
|
"grad_norm": 2.9923534393310547, |
|
"learning_rate": 2.6348547717842326e-06, |
|
"loss": 1.9854, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.17672682016179217, |
|
"grad_norm": 3.3368566036224365, |
|
"learning_rate": 2.6556016597510377e-06, |
|
"loss": 2.0687, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.17797137523335407, |
|
"grad_norm": 3.3132379055023193, |
|
"learning_rate": 2.6763485477178423e-06, |
|
"loss": 1.9696, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.179215930304916, |
|
"grad_norm": 3.7914819717407227, |
|
"learning_rate": 2.6970954356846475e-06, |
|
"loss": 1.9926, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1804604853764779, |
|
"grad_norm": 3.20161509513855, |
|
"learning_rate": 2.717842323651452e-06, |
|
"loss": 1.9193, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18170504044803984, |
|
"grad_norm": 3.440420150756836, |
|
"learning_rate": 2.7385892116182577e-06, |
|
"loss": 1.991, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.18294959551960174, |
|
"grad_norm": 3.152684450149536, |
|
"learning_rate": 2.7593360995850628e-06, |
|
"loss": 1.9695, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18419415059116365, |
|
"grad_norm": 3.0402464866638184, |
|
"learning_rate": 2.7800829875518675e-06, |
|
"loss": 1.9274, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.18543870566272558, |
|
"grad_norm": 2.778444290161133, |
|
"learning_rate": 2.8008298755186726e-06, |
|
"loss": 1.9198, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.18668326073428748, |
|
"grad_norm": 2.7946548461914062, |
|
"learning_rate": 2.8215767634854773e-06, |
|
"loss": 1.8958, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.18668326073428748, |
|
"eval_loss": 2.043288230895996, |
|
"eval_runtime": 44.2268, |
|
"eval_samples_per_second": 22.611, |
|
"eval_steps_per_second": 0.95, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1879278158058494, |
|
"grad_norm": 3.269259452819824, |
|
"learning_rate": 2.8423236514522824e-06, |
|
"loss": 1.9832, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.18917237087741132, |
|
"grad_norm": 3.2673771381378174, |
|
"learning_rate": 2.863070539419087e-06, |
|
"loss": 1.9345, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19041692594897325, |
|
"grad_norm": 3.016599655151367, |
|
"learning_rate": 2.883817427385892e-06, |
|
"loss": 1.8969, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19166148102053515, |
|
"grad_norm": 3.2771544456481934, |
|
"learning_rate": 2.9045643153526977e-06, |
|
"loss": 1.921, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.19290603609209708, |
|
"grad_norm": 3.008080244064331, |
|
"learning_rate": 2.9253112033195024e-06, |
|
"loss": 1.9367, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.194150591163659, |
|
"grad_norm": 3.2312428951263428, |
|
"learning_rate": 2.9460580912863075e-06, |
|
"loss": 1.9021, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19539514623522092, |
|
"grad_norm": 3.294121503829956, |
|
"learning_rate": 2.966804979253112e-06, |
|
"loss": 1.9216, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.19663970130678282, |
|
"grad_norm": 2.771685838699341, |
|
"learning_rate": 2.9875518672199173e-06, |
|
"loss": 1.9435, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.19788425637834475, |
|
"grad_norm": 2.971971273422241, |
|
"learning_rate": 3.008298755186722e-06, |
|
"loss": 1.8851, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.19912881144990666, |
|
"grad_norm": 3.3144047260284424, |
|
"learning_rate": 3.029045643153527e-06, |
|
"loss": 1.853, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.19912881144990666, |
|
"eval_loss": 2.023491859436035, |
|
"eval_runtime": 44.9425, |
|
"eval_samples_per_second": 22.251, |
|
"eval_steps_per_second": 0.935, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2003733665214686, |
|
"grad_norm": 3.3733646869659424, |
|
"learning_rate": 3.0497925311203326e-06, |
|
"loss": 1.8836, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2016179215930305, |
|
"grad_norm": 3.1801207065582275, |
|
"learning_rate": 3.0705394190871373e-06, |
|
"loss": 1.9438, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2028624766645924, |
|
"grad_norm": 3.1199593544006348, |
|
"learning_rate": 3.0912863070539424e-06, |
|
"loss": 1.9219, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.20410703173615433, |
|
"grad_norm": 3.344089984893799, |
|
"learning_rate": 3.112033195020747e-06, |
|
"loss": 1.9174, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.20535158680771623, |
|
"grad_norm": 3.269702911376953, |
|
"learning_rate": 3.132780082987552e-06, |
|
"loss": 1.894, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.20659614187927816, |
|
"grad_norm": 3.021744966506958, |
|
"learning_rate": 3.153526970954357e-06, |
|
"loss": 1.8799, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.20784069695084006, |
|
"grad_norm": 3.0104167461395264, |
|
"learning_rate": 3.174273858921162e-06, |
|
"loss": 1.9116, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.209085252022402, |
|
"grad_norm": 3.3018341064453125, |
|
"learning_rate": 3.1950207468879666e-06, |
|
"loss": 1.8389, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2103298070939639, |
|
"grad_norm": 3.0919857025146484, |
|
"learning_rate": 3.215767634854772e-06, |
|
"loss": 1.9522, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21157436216552583, |
|
"grad_norm": 3.4702494144439697, |
|
"learning_rate": 3.2365145228215773e-06, |
|
"loss": 1.9204, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21157436216552583, |
|
"eval_loss": 1.997478723526001, |
|
"eval_runtime": 43.2143, |
|
"eval_samples_per_second": 23.14, |
|
"eval_steps_per_second": 0.972, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21281891723708773, |
|
"grad_norm": 3.133046865463257, |
|
"learning_rate": 3.257261410788382e-06, |
|
"loss": 1.891, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.21406347230864967, |
|
"grad_norm": 3.1828863620758057, |
|
"learning_rate": 3.278008298755187e-06, |
|
"loss": 1.8816, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21530802738021157, |
|
"grad_norm": 3.374898910522461, |
|
"learning_rate": 3.2987551867219918e-06, |
|
"loss": 1.8886, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2165525824517735, |
|
"grad_norm": 3.6660516262054443, |
|
"learning_rate": 3.319502074688797e-06, |
|
"loss": 1.9442, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2177971375233354, |
|
"grad_norm": 3.080636978149414, |
|
"learning_rate": 3.3402489626556016e-06, |
|
"loss": 1.866, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.21904169259489734, |
|
"grad_norm": 3.3684473037719727, |
|
"learning_rate": 3.360995850622407e-06, |
|
"loss": 1.8699, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22028624766645924, |
|
"grad_norm": 3.818382740020752, |
|
"learning_rate": 3.381742738589212e-06, |
|
"loss": 1.8541, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.22153080273802117, |
|
"grad_norm": 3.0995497703552246, |
|
"learning_rate": 3.402489626556017e-06, |
|
"loss": 1.8453, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.22277535780958307, |
|
"grad_norm": 3.5696587562561035, |
|
"learning_rate": 3.423236514522822e-06, |
|
"loss": 1.9289, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.22401991288114498, |
|
"grad_norm": 3.480571985244751, |
|
"learning_rate": 3.4439834024896267e-06, |
|
"loss": 1.8175, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22401991288114498, |
|
"eval_loss": 1.9842805862426758, |
|
"eval_runtime": 43.7649, |
|
"eval_samples_per_second": 22.849, |
|
"eval_steps_per_second": 0.96, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2252644679527069, |
|
"grad_norm": 3.229316473007202, |
|
"learning_rate": 3.4647302904564318e-06, |
|
"loss": 1.9048, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2265090230242688, |
|
"grad_norm": 3.1970808506011963, |
|
"learning_rate": 3.4854771784232365e-06, |
|
"loss": 1.907, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.22775357809583074, |
|
"grad_norm": 3.5275299549102783, |
|
"learning_rate": 3.5062240663900416e-06, |
|
"loss": 1.8147, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.22899813316739265, |
|
"grad_norm": 3.367077589035034, |
|
"learning_rate": 3.526970954356847e-06, |
|
"loss": 1.8189, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23024268823895458, |
|
"grad_norm": 3.3052914142608643, |
|
"learning_rate": 3.5477178423236518e-06, |
|
"loss": 1.8362, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.23148724331051648, |
|
"grad_norm": 3.4737954139709473, |
|
"learning_rate": 3.568464730290457e-06, |
|
"loss": 1.8747, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.23273179838207841, |
|
"grad_norm": 2.851705312728882, |
|
"learning_rate": 3.5892116182572616e-06, |
|
"loss": 1.7835, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.23397635345364032, |
|
"grad_norm": 3.615224599838257, |
|
"learning_rate": 3.6099585062240667e-06, |
|
"loss": 1.7855, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.23522090852520225, |
|
"grad_norm": 2.7468442916870117, |
|
"learning_rate": 3.6307053941908714e-06, |
|
"loss": 1.843, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.23646546359676415, |
|
"grad_norm": 3.3228273391723633, |
|
"learning_rate": 3.6514522821576765e-06, |
|
"loss": 1.8453, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23646546359676415, |
|
"eval_loss": 1.9668089151382446, |
|
"eval_runtime": 44.9228, |
|
"eval_samples_per_second": 22.26, |
|
"eval_steps_per_second": 0.935, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23771001866832608, |
|
"grad_norm": 2.7882697582244873, |
|
"learning_rate": 3.672199170124482e-06, |
|
"loss": 1.8625, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.238954573739888, |
|
"grad_norm": 3.244044780731201, |
|
"learning_rate": 3.6929460580912867e-06, |
|
"loss": 1.8903, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24019912881144992, |
|
"grad_norm": 2.8844549655914307, |
|
"learning_rate": 3.713692946058092e-06, |
|
"loss": 1.8929, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.24144368388301182, |
|
"grad_norm": 2.9207894802093506, |
|
"learning_rate": 3.7344398340248965e-06, |
|
"loss": 1.8458, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.24268823895457373, |
|
"grad_norm": 2.908562183380127, |
|
"learning_rate": 3.7551867219917016e-06, |
|
"loss": 1.828, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.24393279402613566, |
|
"grad_norm": 3.1195480823516846, |
|
"learning_rate": 3.7759336099585063e-06, |
|
"loss": 1.855, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.24517734909769756, |
|
"grad_norm": 3.3910183906555176, |
|
"learning_rate": 3.7966804979253114e-06, |
|
"loss": 1.7868, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2464219041692595, |
|
"grad_norm": 2.921475887298584, |
|
"learning_rate": 3.817427385892116e-06, |
|
"loss": 1.7505, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2476664592408214, |
|
"grad_norm": 3.5471713542938232, |
|
"learning_rate": 3.838174273858922e-06, |
|
"loss": 1.8445, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.24891101431238333, |
|
"grad_norm": 3.047698736190796, |
|
"learning_rate": 3.858921161825726e-06, |
|
"loss": 1.8037, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.24891101431238333, |
|
"eval_loss": 1.9400665760040283, |
|
"eval_runtime": 49.1044, |
|
"eval_samples_per_second": 20.365, |
|
"eval_steps_per_second": 0.855, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25015556938394523, |
|
"grad_norm": 3.4988913536071777, |
|
"learning_rate": 3.879668049792531e-06, |
|
"loss": 1.8061, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.25140012445550713, |
|
"grad_norm": 3.0373125076293945, |
|
"learning_rate": 3.9004149377593365e-06, |
|
"loss": 1.8081, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2526446795270691, |
|
"grad_norm": 2.9764745235443115, |
|
"learning_rate": 3.921161825726142e-06, |
|
"loss": 1.8244, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.253889234598631, |
|
"grad_norm": 3.4413554668426514, |
|
"learning_rate": 3.941908713692946e-06, |
|
"loss": 1.8492, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2551337896701929, |
|
"grad_norm": 2.9980437755584717, |
|
"learning_rate": 3.962655601659751e-06, |
|
"loss": 1.805, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2563783447417548, |
|
"grad_norm": 3.228750228881836, |
|
"learning_rate": 3.983402489626556e-06, |
|
"loss": 1.7683, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.25762289981331676, |
|
"grad_norm": 3.008496046066284, |
|
"learning_rate": 4.004149377593361e-06, |
|
"loss": 1.8602, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.25886745488487867, |
|
"grad_norm": 3.338935375213623, |
|
"learning_rate": 4.024896265560166e-06, |
|
"loss": 1.7978, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.26011200995644057, |
|
"grad_norm": 3.1450204849243164, |
|
"learning_rate": 4.045643153526971e-06, |
|
"loss": 1.8243, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2613565650280025, |
|
"grad_norm": 3.5714142322540283, |
|
"learning_rate": 4.0663900414937765e-06, |
|
"loss": 1.8386, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2613565650280025, |
|
"eval_loss": 1.9310717582702637, |
|
"eval_runtime": 42.4826, |
|
"eval_samples_per_second": 23.539, |
|
"eval_steps_per_second": 0.989, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.26260112009956443, |
|
"grad_norm": 3.3498189449310303, |
|
"learning_rate": 4.087136929460581e-06, |
|
"loss": 1.8055, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.26384567517112634, |
|
"grad_norm": 3.2423200607299805, |
|
"learning_rate": 4.107883817427386e-06, |
|
"loss": 1.8378, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.26509023024268824, |
|
"grad_norm": 3.0533974170684814, |
|
"learning_rate": 4.128630705394191e-06, |
|
"loss": 1.7699, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.26633478531425014, |
|
"grad_norm": 3.135615587234497, |
|
"learning_rate": 4.149377593360996e-06, |
|
"loss": 1.8019, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.26757934038581205, |
|
"grad_norm": 2.9863040447235107, |
|
"learning_rate": 4.170124481327801e-06, |
|
"loss": 1.7817, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.268823895457374, |
|
"grad_norm": 3.270598888397217, |
|
"learning_rate": 4.190871369294606e-06, |
|
"loss": 1.7542, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.2700684505289359, |
|
"grad_norm": 3.2279393672943115, |
|
"learning_rate": 4.211618257261411e-06, |
|
"loss": 1.7964, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2713130056004978, |
|
"grad_norm": 3.0626120567321777, |
|
"learning_rate": 4.232365145228216e-06, |
|
"loss": 1.7687, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.2725575606720597, |
|
"grad_norm": 3.070702075958252, |
|
"learning_rate": 4.253112033195021e-06, |
|
"loss": 1.8075, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.2738021157436217, |
|
"grad_norm": 3.3623268604278564, |
|
"learning_rate": 4.273858921161826e-06, |
|
"loss": 1.8517, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2738021157436217, |
|
"eval_loss": 1.9212934970855713, |
|
"eval_runtime": 46.8503, |
|
"eval_samples_per_second": 21.345, |
|
"eval_steps_per_second": 0.896, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2750466708151836, |
|
"grad_norm": 3.005740165710449, |
|
"learning_rate": 4.294605809128631e-06, |
|
"loss": 1.816, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.2762912258867455, |
|
"grad_norm": 3.5257251262664795, |
|
"learning_rate": 4.315352697095436e-06, |
|
"loss": 1.8191, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2775357809583074, |
|
"grad_norm": 3.361103057861328, |
|
"learning_rate": 4.336099585062241e-06, |
|
"loss": 1.8378, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.27878033602986935, |
|
"grad_norm": 3.787623405456543, |
|
"learning_rate": 4.356846473029046e-06, |
|
"loss": 1.7736, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.28002489110143125, |
|
"grad_norm": 3.45717453956604, |
|
"learning_rate": 4.3775933609958506e-06, |
|
"loss": 1.8195, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.28126944617299315, |
|
"grad_norm": 3.1698601245880127, |
|
"learning_rate": 4.398340248962656e-06, |
|
"loss": 1.7756, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.28251400124455506, |
|
"grad_norm": 3.345258951187134, |
|
"learning_rate": 4.419087136929461e-06, |
|
"loss": 1.7272, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.28375855631611696, |
|
"grad_norm": 2.905773639678955, |
|
"learning_rate": 4.439834024896266e-06, |
|
"loss": 1.84, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2850031113876789, |
|
"grad_norm": 3.2004425525665283, |
|
"learning_rate": 4.460580912863071e-06, |
|
"loss": 1.8164, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.2862476664592408, |
|
"grad_norm": 2.965275287628174, |
|
"learning_rate": 4.481327800829876e-06, |
|
"loss": 1.7718, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2862476664592408, |
|
"eval_loss": 1.903228521347046, |
|
"eval_runtime": 51.1663, |
|
"eval_samples_per_second": 19.544, |
|
"eval_steps_per_second": 0.821, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2874922215308027, |
|
"grad_norm": 2.8130710124969482, |
|
"learning_rate": 4.502074688796681e-06, |
|
"loss": 1.7696, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.28873677660236463, |
|
"grad_norm": 2.9987664222717285, |
|
"learning_rate": 4.5228215767634855e-06, |
|
"loss": 1.775, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2899813316739266, |
|
"grad_norm": 3.0936989784240723, |
|
"learning_rate": 4.543568464730291e-06, |
|
"loss": 1.7775, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2912258867454885, |
|
"grad_norm": 3.0936989784240723, |
|
"learning_rate": 4.543568464730291e-06, |
|
"loss": 1.7154, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2924704418170504, |
|
"grad_norm": 2.8632843494415283, |
|
"learning_rate": 4.564315352697096e-06, |
|
"loss": 1.7771, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2937149968886123, |
|
"grad_norm": 3.3314013481140137, |
|
"learning_rate": 4.585062240663901e-06, |
|
"loss": 1.7243, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.29495955196017426, |
|
"grad_norm": 2.8522872924804688, |
|
"learning_rate": 4.605809128630706e-06, |
|
"loss": 1.7543, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.29620410703173616, |
|
"grad_norm": 3.319157600402832, |
|
"learning_rate": 4.626556016597511e-06, |
|
"loss": 1.7652, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.29744866210329807, |
|
"grad_norm": 3.123116970062256, |
|
"learning_rate": 4.647302904564316e-06, |
|
"loss": 1.6933, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.29869321717485997, |
|
"grad_norm": 3.1735854148864746, |
|
"learning_rate": 4.66804979253112e-06, |
|
"loss": 1.7437, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29869321717485997, |
|
"eval_loss": 1.891274333000183, |
|
"eval_runtime": 50.0054, |
|
"eval_samples_per_second": 19.998, |
|
"eval_steps_per_second": 0.84, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29993777224642193, |
|
"grad_norm": 3.0996642112731934, |
|
"learning_rate": 4.6887966804979255e-06, |
|
"loss": 1.7594, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.30118232731798383, |
|
"grad_norm": 3.255063533782959, |
|
"learning_rate": 4.709543568464731e-06, |
|
"loss": 1.7645, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.30242688238954574, |
|
"grad_norm": 2.722285747528076, |
|
"learning_rate": 4.730290456431536e-06, |
|
"loss": 1.7549, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.30367143746110764, |
|
"grad_norm": 3.1903417110443115, |
|
"learning_rate": 4.751037344398341e-06, |
|
"loss": 1.7382, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.30491599253266954, |
|
"grad_norm": 2.8352959156036377, |
|
"learning_rate": 4.771784232365146e-06, |
|
"loss": 1.7497, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3061605476042315, |
|
"grad_norm": 3.158536434173584, |
|
"learning_rate": 4.792531120331951e-06, |
|
"loss": 1.7467, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3074051026757934, |
|
"grad_norm": 2.8456921577453613, |
|
"learning_rate": 4.813278008298755e-06, |
|
"loss": 1.7472, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3086496577473553, |
|
"grad_norm": 3.1215128898620605, |
|
"learning_rate": 4.83402489626556e-06, |
|
"loss": 1.7577, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3098942128189172, |
|
"grad_norm": 2.928015947341919, |
|
"learning_rate": 4.8547717842323655e-06, |
|
"loss": 1.6932, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3111387678904792, |
|
"grad_norm": 3.001044511795044, |
|
"learning_rate": 4.875518672199171e-06, |
|
"loss": 1.7276, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3111387678904792, |
|
"eval_loss": 1.8855507373809814, |
|
"eval_runtime": 46.0143, |
|
"eval_samples_per_second": 21.732, |
|
"eval_steps_per_second": 0.913, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3123833229620411, |
|
"grad_norm": 2.988994598388672, |
|
"learning_rate": 4.896265560165976e-06, |
|
"loss": 1.739, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.313627878033603, |
|
"grad_norm": 3.0091233253479004, |
|
"learning_rate": 4.91701244813278e-06, |
|
"loss": 1.7118, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3148724331051649, |
|
"grad_norm": 3.2735323905944824, |
|
"learning_rate": 4.937759336099586e-06, |
|
"loss": 1.7832, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.31611698817672684, |
|
"grad_norm": 3.055468797683716, |
|
"learning_rate": 4.95850622406639e-06, |
|
"loss": 1.7151, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.31736154324828875, |
|
"grad_norm": 3.0889883041381836, |
|
"learning_rate": 4.979253112033195e-06, |
|
"loss": 1.7411, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.31860609831985065, |
|
"grad_norm": 2.8418147563934326, |
|
"learning_rate": 5e-06, |
|
"loss": 1.7575, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.31985065339141255, |
|
"grad_norm": 3.09739351272583, |
|
"learning_rate": 4.99769372693727e-06, |
|
"loss": 1.7988, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3210952084629745, |
|
"grad_norm": 3.245515823364258, |
|
"learning_rate": 4.995387453874539e-06, |
|
"loss": 1.7706, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.3223397635345364, |
|
"grad_norm": 3.250432014465332, |
|
"learning_rate": 4.993081180811809e-06, |
|
"loss": 1.7839, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.3235843186060983, |
|
"grad_norm": 3.1325368881225586, |
|
"learning_rate": 4.990774907749078e-06, |
|
"loss": 1.754, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3235843186060983, |
|
"eval_loss": 1.869175672531128, |
|
"eval_runtime": 49.0199, |
|
"eval_samples_per_second": 20.4, |
|
"eval_steps_per_second": 0.857, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3248288736776602, |
|
"grad_norm": 3.1627941131591797, |
|
"learning_rate": 4.988468634686347e-06, |
|
"loss": 1.7123, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.3260734287492221, |
|
"grad_norm": 2.848202705383301, |
|
"learning_rate": 4.986162361623617e-06, |
|
"loss": 1.7231, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3273179838207841, |
|
"grad_norm": 2.9585494995117188, |
|
"learning_rate": 4.983856088560886e-06, |
|
"loss": 1.7301, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.328562538892346, |
|
"grad_norm": 3.160170316696167, |
|
"learning_rate": 4.981549815498156e-06, |
|
"loss": 1.7283, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.3298070939639079, |
|
"grad_norm": 2.6879520416259766, |
|
"learning_rate": 4.979243542435424e-06, |
|
"loss": 1.7494, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3310516490354698, |
|
"grad_norm": 3.1313576698303223, |
|
"learning_rate": 4.976937269372694e-06, |
|
"loss": 1.7224, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.33229620410703176, |
|
"grad_norm": 3.0477867126464844, |
|
"learning_rate": 4.974630996309964e-06, |
|
"loss": 1.6951, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.33354075917859366, |
|
"grad_norm": 3.133974313735962, |
|
"learning_rate": 4.972324723247233e-06, |
|
"loss": 1.7362, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.33478531425015556, |
|
"grad_norm": 3.4925222396850586, |
|
"learning_rate": 4.970018450184502e-06, |
|
"loss": 1.7053, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.33602986932171747, |
|
"grad_norm": 2.848466634750366, |
|
"learning_rate": 4.9677121771217715e-06, |
|
"loss": 1.7336, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.33602986932171747, |
|
"eval_loss": 1.8723454475402832, |
|
"eval_runtime": 47.9758, |
|
"eval_samples_per_second": 20.844, |
|
"eval_steps_per_second": 0.875, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3372744243932794, |
|
"grad_norm": 3.405003309249878, |
|
"learning_rate": 4.965405904059041e-06, |
|
"loss": 1.7178, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.33851897946484133, |
|
"grad_norm": 2.982916831970215, |
|
"learning_rate": 4.96309963099631e-06, |
|
"loss": 1.7255, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.33976353453640323, |
|
"grad_norm": 3.1712088584899902, |
|
"learning_rate": 4.96079335793358e-06, |
|
"loss": 1.7328, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.34100808960796514, |
|
"grad_norm": 3.325481414794922, |
|
"learning_rate": 4.958487084870849e-06, |
|
"loss": 1.7268, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3422526446795271, |
|
"grad_norm": 3.0751800537109375, |
|
"learning_rate": 4.956180811808119e-06, |
|
"loss": 1.7916, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.343497199751089, |
|
"grad_norm": 2.9451465606689453, |
|
"learning_rate": 4.953874538745388e-06, |
|
"loss": 1.7588, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.3447417548226509, |
|
"grad_norm": 3.1492040157318115, |
|
"learning_rate": 4.9515682656826574e-06, |
|
"loss": 1.7312, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.3459863098942128, |
|
"grad_norm": 3.165748119354248, |
|
"learning_rate": 4.949261992619927e-06, |
|
"loss": 1.7645, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3472308649657747, |
|
"grad_norm": 3.059307336807251, |
|
"learning_rate": 4.946955719557196e-06, |
|
"loss": 1.6783, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.34847542003733667, |
|
"grad_norm": 2.940147638320923, |
|
"learning_rate": 4.944649446494466e-06, |
|
"loss": 1.7079, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.34847542003733667, |
|
"eval_loss": 1.8577951192855835, |
|
"eval_runtime": 50.1741, |
|
"eval_samples_per_second": 19.931, |
|
"eval_steps_per_second": 0.837, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3497199751088986, |
|
"grad_norm": 2.6892037391662598, |
|
"learning_rate": 4.942343173431734e-06, |
|
"loss": 1.7104, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3509645301804605, |
|
"grad_norm": 3.136791467666626, |
|
"learning_rate": 4.940036900369004e-06, |
|
"loss": 1.763, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.3522090852520224, |
|
"grad_norm": 2.6873879432678223, |
|
"learning_rate": 4.937730627306274e-06, |
|
"loss": 1.722, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.35345364032358434, |
|
"grad_norm": 3.1375961303710938, |
|
"learning_rate": 4.9354243542435426e-06, |
|
"loss": 1.7048, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.35469819539514624, |
|
"grad_norm": 2.920725107192993, |
|
"learning_rate": 4.933118081180812e-06, |
|
"loss": 1.7014, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.35594275046670815, |
|
"grad_norm": 2.750018835067749, |
|
"learning_rate": 4.930811808118081e-06, |
|
"loss": 1.6283, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.35718730553827005, |
|
"grad_norm": 3.399036169052124, |
|
"learning_rate": 4.928505535055351e-06, |
|
"loss": 1.7265, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.358431860609832, |
|
"grad_norm": 3.1734066009521484, |
|
"learning_rate": 4.92619926199262e-06, |
|
"loss": 1.7007, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3596764156813939, |
|
"grad_norm": 3.038865089416504, |
|
"learning_rate": 4.92389298892989e-06, |
|
"loss": 1.7357, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3609209707529558, |
|
"grad_norm": 3.0309486389160156, |
|
"learning_rate": 4.921586715867159e-06, |
|
"loss": 1.7487, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3609209707529558, |
|
"eval_loss": 1.8484960794448853, |
|
"eval_runtime": 50.374, |
|
"eval_samples_per_second": 19.852, |
|
"eval_steps_per_second": 0.834, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3621655258245177, |
|
"grad_norm": 2.867432117462158, |
|
"learning_rate": 4.9192804428044285e-06, |
|
"loss": 1.6988, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3634100808960797, |
|
"grad_norm": 3.31636905670166, |
|
"learning_rate": 4.916974169741698e-06, |
|
"loss": 1.7003, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3646546359676416, |
|
"grad_norm": 3.1181511878967285, |
|
"learning_rate": 4.914667896678967e-06, |
|
"loss": 1.6762, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3658991910392035, |
|
"grad_norm": 2.978194236755371, |
|
"learning_rate": 4.912361623616237e-06, |
|
"loss": 1.6435, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3671437461107654, |
|
"grad_norm": 3.0623116493225098, |
|
"learning_rate": 4.910055350553506e-06, |
|
"loss": 1.7038, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3683883011823273, |
|
"grad_norm": 2.83353853225708, |
|
"learning_rate": 4.907749077490776e-06, |
|
"loss": 1.7237, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.36963285625388925, |
|
"grad_norm": 2.8587100505828857, |
|
"learning_rate": 4.905442804428044e-06, |
|
"loss": 1.6927, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.37087741132545116, |
|
"grad_norm": 2.7930493354797363, |
|
"learning_rate": 4.903136531365314e-06, |
|
"loss": 1.7381, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.37212196639701306, |
|
"grad_norm": 2.8992788791656494, |
|
"learning_rate": 4.900830258302584e-06, |
|
"loss": 1.6485, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.37336652146857496, |
|
"grad_norm": 2.8896944522857666, |
|
"learning_rate": 4.898523985239853e-06, |
|
"loss": 1.7143, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.37336652146857496, |
|
"eval_loss": 1.8331259489059448, |
|
"eval_runtime": 51.0214, |
|
"eval_samples_per_second": 19.6, |
|
"eval_steps_per_second": 0.823, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3746110765401369, |
|
"grad_norm": 2.885444164276123, |
|
"learning_rate": 4.896217712177122e-06, |
|
"loss": 1.6914, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.3758556316116988, |
|
"grad_norm": 3.2996826171875, |
|
"learning_rate": 4.893911439114391e-06, |
|
"loss": 1.7353, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.37710018668326073, |
|
"grad_norm": 2.7231531143188477, |
|
"learning_rate": 4.891605166051661e-06, |
|
"loss": 1.6837, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.37834474175482263, |
|
"grad_norm": 2.9413955211639404, |
|
"learning_rate": 4.88929889298893e-06, |
|
"loss": 1.6448, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3795892968263846, |
|
"grad_norm": 2.6486589908599854, |
|
"learning_rate": 4.8869926199262e-06, |
|
"loss": 1.6826, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3808338518979465, |
|
"grad_norm": 2.977836847305298, |
|
"learning_rate": 4.884686346863469e-06, |
|
"loss": 1.6737, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.3820784069695084, |
|
"grad_norm": 2.635324716567993, |
|
"learning_rate": 4.8823800738007384e-06, |
|
"loss": 1.7112, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.3833229620410703, |
|
"grad_norm": 3.0650839805603027, |
|
"learning_rate": 4.880073800738008e-06, |
|
"loss": 1.7123, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.3845675171126322, |
|
"grad_norm": 3.059629440307617, |
|
"learning_rate": 4.877767527675277e-06, |
|
"loss": 1.662, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.38581207218419417, |
|
"grad_norm": 2.7559597492218018, |
|
"learning_rate": 4.875461254612546e-06, |
|
"loss": 1.7418, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38581207218419417, |
|
"eval_loss": 1.8277366161346436, |
|
"eval_runtime": 42.6671, |
|
"eval_samples_per_second": 23.437, |
|
"eval_steps_per_second": 0.984, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38705662725575607, |
|
"grad_norm": 2.7987024784088135, |
|
"learning_rate": 4.873154981549816e-06, |
|
"loss": 1.7326, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.388301182327318, |
|
"grad_norm": 2.999824047088623, |
|
"learning_rate": 4.8708487084870856e-06, |
|
"loss": 1.7344, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.3895457373988799, |
|
"grad_norm": 2.942995071411133, |
|
"learning_rate": 4.868542435424355e-06, |
|
"loss": 1.7055, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.39079029247044184, |
|
"grad_norm": 2.823906183242798, |
|
"learning_rate": 4.8662361623616235e-06, |
|
"loss": 1.6819, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.39203484754200374, |
|
"grad_norm": 2.7798759937286377, |
|
"learning_rate": 4.863929889298894e-06, |
|
"loss": 1.6694, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.39327940261356564, |
|
"grad_norm": 2.599005699157715, |
|
"learning_rate": 4.861623616236163e-06, |
|
"loss": 1.6924, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.39452395768512755, |
|
"grad_norm": 2.687539577484131, |
|
"learning_rate": 4.859317343173432e-06, |
|
"loss": 1.685, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.3957685127566895, |
|
"grad_norm": 2.9679677486419678, |
|
"learning_rate": 4.857011070110701e-06, |
|
"loss": 1.718, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.3970130678282514, |
|
"grad_norm": 2.8611817359924316, |
|
"learning_rate": 4.854704797047971e-06, |
|
"loss": 1.7247, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.3982576228998133, |
|
"grad_norm": 2.7461249828338623, |
|
"learning_rate": 4.85239852398524e-06, |
|
"loss": 1.6434, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3982576228998133, |
|
"eval_loss": 1.8234667778015137, |
|
"eval_runtime": 44.3433, |
|
"eval_samples_per_second": 22.551, |
|
"eval_steps_per_second": 0.947, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3995021779713752, |
|
"grad_norm": 2.7142515182495117, |
|
"learning_rate": 4.8500922509225095e-06, |
|
"loss": 1.6855, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4007467330429372, |
|
"grad_norm": 2.87727689743042, |
|
"learning_rate": 4.847785977859779e-06, |
|
"loss": 1.705, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.4019912881144991, |
|
"grad_norm": 2.9598982334136963, |
|
"learning_rate": 4.845479704797048e-06, |
|
"loss": 1.6482, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.403235843186061, |
|
"grad_norm": 2.8654463291168213, |
|
"learning_rate": 4.843173431734318e-06, |
|
"loss": 1.5816, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4044803982576229, |
|
"grad_norm": 2.983837842941284, |
|
"learning_rate": 4.840867158671587e-06, |
|
"loss": 1.6962, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4057249533291848, |
|
"grad_norm": 2.730914354324341, |
|
"learning_rate": 4.838560885608857e-06, |
|
"loss": 1.6991, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.40696950840074675, |
|
"grad_norm": 2.6901161670684814, |
|
"learning_rate": 4.836254612546126e-06, |
|
"loss": 1.6839, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.40821406347230865, |
|
"grad_norm": 2.6661486625671387, |
|
"learning_rate": 4.8339483394833955e-06, |
|
"loss": 1.6894, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.40945861854387056, |
|
"grad_norm": 2.64243221282959, |
|
"learning_rate": 4.831642066420665e-06, |
|
"loss": 1.6533, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.41070317361543246, |
|
"grad_norm": 2.586725950241089, |
|
"learning_rate": 4.8293357933579335e-06, |
|
"loss": 1.6486, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.41070317361543246, |
|
"eval_loss": 1.82283616065979, |
|
"eval_runtime": 46.5937, |
|
"eval_samples_per_second": 21.462, |
|
"eval_steps_per_second": 0.901, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4119477286869944, |
|
"grad_norm": 2.899968147277832, |
|
"learning_rate": 4.827029520295204e-06, |
|
"loss": 1.7136, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4131922837585563, |
|
"grad_norm": 3.067671537399292, |
|
"learning_rate": 4.824723247232473e-06, |
|
"loss": 1.672, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4144368388301182, |
|
"grad_norm": 2.8144404888153076, |
|
"learning_rate": 4.822416974169742e-06, |
|
"loss": 1.7173, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.41568139390168013, |
|
"grad_norm": 3.0551373958587646, |
|
"learning_rate": 4.820110701107011e-06, |
|
"loss": 1.6843, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.4169259489732421, |
|
"grad_norm": 2.7585153579711914, |
|
"learning_rate": 4.817804428044281e-06, |
|
"loss": 1.6834, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.418170504044804, |
|
"grad_norm": 2.8691210746765137, |
|
"learning_rate": 4.81549815498155e-06, |
|
"loss": 1.6814, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.4194150591163659, |
|
"grad_norm": 2.871523380279541, |
|
"learning_rate": 4.8131918819188194e-06, |
|
"loss": 1.6222, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4206596141879278, |
|
"grad_norm": 2.8928768634796143, |
|
"learning_rate": 4.810885608856089e-06, |
|
"loss": 1.6984, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.42190416925948976, |
|
"grad_norm": 3.285256862640381, |
|
"learning_rate": 4.808579335793358e-06, |
|
"loss": 1.7029, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.42314872433105166, |
|
"grad_norm": 2.785504102706909, |
|
"learning_rate": 4.806273062730628e-06, |
|
"loss": 1.6857, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.42314872433105166, |
|
"eval_loss": 1.8053412437438965, |
|
"eval_runtime": 48.0656, |
|
"eval_samples_per_second": 20.805, |
|
"eval_steps_per_second": 0.874, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.42439327940261357, |
|
"grad_norm": 3.0227086544036865, |
|
"learning_rate": 4.803966789667897e-06, |
|
"loss": 1.6903, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.42563783447417547, |
|
"grad_norm": 2.616769790649414, |
|
"learning_rate": 4.8016605166051665e-06, |
|
"loss": 1.6144, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.4268823895457374, |
|
"grad_norm": 2.9519753456115723, |
|
"learning_rate": 4.799354243542436e-06, |
|
"loss": 1.6157, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.42812694461729933, |
|
"grad_norm": 2.764512062072754, |
|
"learning_rate": 4.797047970479705e-06, |
|
"loss": 1.6248, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.42937149968886124, |
|
"grad_norm": 2.999178886413574, |
|
"learning_rate": 4.794741697416975e-06, |
|
"loss": 1.6764, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.43061605476042314, |
|
"grad_norm": 3.0472185611724854, |
|
"learning_rate": 4.792435424354243e-06, |
|
"loss": 1.6421, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.43186060983198504, |
|
"grad_norm": 2.6638400554656982, |
|
"learning_rate": 4.790129151291514e-06, |
|
"loss": 1.6304, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.433105164903547, |
|
"grad_norm": 2.945295810699463, |
|
"learning_rate": 4.787822878228783e-06, |
|
"loss": 1.6181, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.4343497199751089, |
|
"grad_norm": 2.915125846862793, |
|
"learning_rate": 4.7855166051660525e-06, |
|
"loss": 1.6789, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.4355942750466708, |
|
"grad_norm": 2.5674026012420654, |
|
"learning_rate": 4.783210332103321e-06, |
|
"loss": 1.6723, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4355942750466708, |
|
"eval_loss": 1.8109745979309082, |
|
"eval_runtime": 43.9943, |
|
"eval_samples_per_second": 22.73, |
|
"eval_steps_per_second": 0.955, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4368388301182327, |
|
"grad_norm": 2.9533393383026123, |
|
"learning_rate": 4.7809040590405905e-06, |
|
"loss": 1.6271, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.43808338518979467, |
|
"grad_norm": 2.7677664756774902, |
|
"learning_rate": 4.778597785977861e-06, |
|
"loss": 1.6633, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.4393279402613566, |
|
"grad_norm": 2.527456521987915, |
|
"learning_rate": 4.776291512915129e-06, |
|
"loss": 1.5644, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4405724953329185, |
|
"grad_norm": 2.9778528213500977, |
|
"learning_rate": 4.773985239852399e-06, |
|
"loss": 1.6166, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4418170504044804, |
|
"grad_norm": 2.5554800033569336, |
|
"learning_rate": 4.771678966789668e-06, |
|
"loss": 1.5573, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.44306160547604234, |
|
"grad_norm": 2.7415409088134766, |
|
"learning_rate": 4.769372693726938e-06, |
|
"loss": 1.6564, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.44430616054760425, |
|
"grad_norm": 3.0275440216064453, |
|
"learning_rate": 4.767066420664207e-06, |
|
"loss": 1.67, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.44555071561916615, |
|
"grad_norm": 2.720694065093994, |
|
"learning_rate": 4.7647601476014765e-06, |
|
"loss": 1.6374, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.44679527069072805, |
|
"grad_norm": 2.97589111328125, |
|
"learning_rate": 4.762453874538746e-06, |
|
"loss": 1.6596, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.44803982576228996, |
|
"grad_norm": 2.8005781173706055, |
|
"learning_rate": 4.760147601476015e-06, |
|
"loss": 1.6281, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.44803982576228996, |
|
"eval_loss": 1.8139524459838867, |
|
"eval_runtime": 44.5817, |
|
"eval_samples_per_second": 22.431, |
|
"eval_steps_per_second": 0.942, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4492843808338519, |
|
"grad_norm": 3.3947463035583496, |
|
"learning_rate": 4.757841328413285e-06, |
|
"loss": 1.7149, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.4505289359054138, |
|
"grad_norm": 2.991457939147949, |
|
"learning_rate": 4.755535055350554e-06, |
|
"loss": 1.5971, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.4517734909769757, |
|
"grad_norm": 2.653313398361206, |
|
"learning_rate": 4.753228782287823e-06, |
|
"loss": 1.6465, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.4530180460485376, |
|
"grad_norm": 2.716360569000244, |
|
"learning_rate": 4.750922509225093e-06, |
|
"loss": 1.6775, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.4542626011200996, |
|
"grad_norm": 2.733598470687866, |
|
"learning_rate": 4.748616236162362e-06, |
|
"loss": 1.6087, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4555071561916615, |
|
"grad_norm": 2.581547737121582, |
|
"learning_rate": 4.746309963099631e-06, |
|
"loss": 1.6339, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.4567517112632234, |
|
"grad_norm": 2.6841800212860107, |
|
"learning_rate": 4.7440036900369e-06, |
|
"loss": 1.6193, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.4579962663347853, |
|
"grad_norm": 2.658369302749634, |
|
"learning_rate": 4.741697416974171e-06, |
|
"loss": 1.6287, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.45924082140634725, |
|
"grad_norm": 2.7204179763793945, |
|
"learning_rate": 4.739391143911439e-06, |
|
"loss": 1.6755, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.46048537647790916, |
|
"grad_norm": 2.6627941131591797, |
|
"learning_rate": 4.737084870848709e-06, |
|
"loss": 1.6142, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.46048537647790916, |
|
"eval_loss": 1.8019787073135376, |
|
"eval_runtime": 42.7804, |
|
"eval_samples_per_second": 23.375, |
|
"eval_steps_per_second": 0.982, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.46172993154947106, |
|
"grad_norm": 2.650705337524414, |
|
"learning_rate": 4.734778597785978e-06, |
|
"loss": 1.641, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.46297448662103297, |
|
"grad_norm": 2.7031619548797607, |
|
"learning_rate": 4.7324723247232475e-06, |
|
"loss": 1.6596, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.4642190416925949, |
|
"grad_norm": 3.3050339221954346, |
|
"learning_rate": 4.730166051660517e-06, |
|
"loss": 1.6664, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.46546359676415683, |
|
"grad_norm": 2.841404676437378, |
|
"learning_rate": 4.727859778597786e-06, |
|
"loss": 1.6474, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.46670815183571873, |
|
"grad_norm": 3.0990259647369385, |
|
"learning_rate": 4.725553505535056e-06, |
|
"loss": 1.6008, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.46795270690728064, |
|
"grad_norm": 2.987772226333618, |
|
"learning_rate": 4.723247232472325e-06, |
|
"loss": 1.6374, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.46919726197884254, |
|
"grad_norm": 3.3345255851745605, |
|
"learning_rate": 4.720940959409595e-06, |
|
"loss": 1.6405, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.4704418170504045, |
|
"grad_norm": 3.0810067653656006, |
|
"learning_rate": 4.718634686346864e-06, |
|
"loss": 1.6229, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4716863721219664, |
|
"grad_norm": 3.246685266494751, |
|
"learning_rate": 4.716328413284133e-06, |
|
"loss": 1.6262, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.4729309271935283, |
|
"grad_norm": 2.6052889823913574, |
|
"learning_rate": 4.714022140221403e-06, |
|
"loss": 1.6241, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4729309271935283, |
|
"eval_loss": 1.7957485914230347, |
|
"eval_runtime": 43.6626, |
|
"eval_samples_per_second": 22.903, |
|
"eval_steps_per_second": 0.962, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4741754822650902, |
|
"grad_norm": 3.747142791748047, |
|
"learning_rate": 4.711715867158672e-06, |
|
"loss": 1.6764, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.47542003733665217, |
|
"grad_norm": 2.8508121967315674, |
|
"learning_rate": 4.709409594095941e-06, |
|
"loss": 1.6597, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.47666459240821407, |
|
"grad_norm": 3.5615711212158203, |
|
"learning_rate": 4.70710332103321e-06, |
|
"loss": 1.6089, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.477909147479776, |
|
"grad_norm": 2.5886309146881104, |
|
"learning_rate": 4.704797047970481e-06, |
|
"loss": 1.5996, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.4791537025513379, |
|
"grad_norm": 3.3900864124298096, |
|
"learning_rate": 4.702490774907749e-06, |
|
"loss": 1.7137, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.48039825762289984, |
|
"grad_norm": 2.913641929626465, |
|
"learning_rate": 4.700184501845019e-06, |
|
"loss": 1.6512, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.48164281269446174, |
|
"grad_norm": 2.810722827911377, |
|
"learning_rate": 4.697878228782288e-06, |
|
"loss": 1.6939, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.48288736776602365, |
|
"grad_norm": 3.2701401710510254, |
|
"learning_rate": 4.6955719557195575e-06, |
|
"loss": 1.6455, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.48413192283758555, |
|
"grad_norm": 2.972931385040283, |
|
"learning_rate": 4.693265682656827e-06, |
|
"loss": 1.6272, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.48537647790914745, |
|
"grad_norm": 3.2157890796661377, |
|
"learning_rate": 4.690959409594096e-06, |
|
"loss": 1.6409, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.48537647790914745, |
|
"eval_loss": 1.7859243154525757, |
|
"eval_runtime": 45.2093, |
|
"eval_samples_per_second": 22.119, |
|
"eval_steps_per_second": 0.929, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4866210329807094, |
|
"grad_norm": 2.783360719680786, |
|
"learning_rate": 4.688653136531366e-06, |
|
"loss": 1.6303, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.4878655880522713, |
|
"grad_norm": 2.6783013343811035, |
|
"learning_rate": 4.686346863468635e-06, |
|
"loss": 1.7259, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.4891101431238332, |
|
"grad_norm": 2.899916172027588, |
|
"learning_rate": 4.6840405904059046e-06, |
|
"loss": 1.6434, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.4903546981953951, |
|
"grad_norm": 2.604377269744873, |
|
"learning_rate": 4.681734317343174e-06, |
|
"loss": 1.6838, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.4915992532669571, |
|
"grad_norm": 3.0830864906311035, |
|
"learning_rate": 4.6794280442804426e-06, |
|
"loss": 1.6053, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.492843808338519, |
|
"grad_norm": 3.1381475925445557, |
|
"learning_rate": 4.677121771217713e-06, |
|
"loss": 1.6132, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4940883634100809, |
|
"grad_norm": 3.097426652908325, |
|
"learning_rate": 4.674815498154982e-06, |
|
"loss": 1.6237, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.4953329184816428, |
|
"grad_norm": 2.9942305088043213, |
|
"learning_rate": 4.672509225092252e-06, |
|
"loss": 1.6554, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.49657747355320475, |
|
"grad_norm": 2.834199905395508, |
|
"learning_rate": 4.67020295202952e-06, |
|
"loss": 1.6389, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.49782202862476665, |
|
"grad_norm": 3.183389902114868, |
|
"learning_rate": 4.66789667896679e-06, |
|
"loss": 1.6518, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.49782202862476665, |
|
"eval_loss": 1.781000018119812, |
|
"eval_runtime": 45.4127, |
|
"eval_samples_per_second": 22.02, |
|
"eval_steps_per_second": 0.925, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.49906658369632856, |
|
"grad_norm": 2.8186683654785156, |
|
"learning_rate": 4.66559040590406e-06, |
|
"loss": 1.669, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.5003111387678905, |
|
"grad_norm": 2.618595600128174, |
|
"learning_rate": 4.6632841328413285e-06, |
|
"loss": 1.6767, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.5015556938394524, |
|
"grad_norm": 3.080432653427124, |
|
"learning_rate": 4.660977859778598e-06, |
|
"loss": 1.7096, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.5028002489110143, |
|
"grad_norm": 2.6780874729156494, |
|
"learning_rate": 4.658671586715867e-06, |
|
"loss": 1.6576, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.5040448039825762, |
|
"grad_norm": 2.8194704055786133, |
|
"learning_rate": 4.656365313653137e-06, |
|
"loss": 1.6405, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5052893590541382, |
|
"grad_norm": 3.2195825576782227, |
|
"learning_rate": 4.654059040590406e-06, |
|
"loss": 1.6631, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.5065339141257, |
|
"grad_norm": 2.7719528675079346, |
|
"learning_rate": 4.651752767527676e-06, |
|
"loss": 1.6036, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.507778469197262, |
|
"grad_norm": 3.186394453048706, |
|
"learning_rate": 4.649446494464945e-06, |
|
"loss": 1.6508, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5090230242688238, |
|
"grad_norm": 2.674922227859497, |
|
"learning_rate": 4.6471402214022145e-06, |
|
"loss": 1.6033, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.5102675793403858, |
|
"grad_norm": 3.2772562503814697, |
|
"learning_rate": 4.644833948339484e-06, |
|
"loss": 1.5916, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5102675793403858, |
|
"eval_loss": 1.784346342086792, |
|
"eval_runtime": 42.2178, |
|
"eval_samples_per_second": 23.687, |
|
"eval_steps_per_second": 0.995, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5115121344119478, |
|
"grad_norm": 2.9295618534088135, |
|
"learning_rate": 4.642527675276753e-06, |
|
"loss": 1.6693, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5127566894835096, |
|
"grad_norm": 2.8492650985717773, |
|
"learning_rate": 4.640221402214023e-06, |
|
"loss": 1.658, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5140012445550716, |
|
"grad_norm": 2.921211004257202, |
|
"learning_rate": 4.637915129151292e-06, |
|
"loss": 1.6559, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.5152457996266335, |
|
"grad_norm": 2.583395004272461, |
|
"learning_rate": 4.635608856088562e-06, |
|
"loss": 1.6869, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.5164903546981954, |
|
"grad_norm": 2.79984712600708, |
|
"learning_rate": 4.63330258302583e-06, |
|
"loss": 1.6007, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5177349097697573, |
|
"grad_norm": 2.872856616973877, |
|
"learning_rate": 4.6309963099631e-06, |
|
"loss": 1.6132, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5189794648413192, |
|
"grad_norm": 2.7482125759124756, |
|
"learning_rate": 4.62869003690037e-06, |
|
"loss": 1.5805, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5202240199128811, |
|
"grad_norm": 2.9227206707000732, |
|
"learning_rate": 4.6263837638376384e-06, |
|
"loss": 1.6362, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5214685749844431, |
|
"grad_norm": 2.858302354812622, |
|
"learning_rate": 4.624077490774908e-06, |
|
"loss": 1.5934, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.522713130056005, |
|
"grad_norm": 3.2696762084960938, |
|
"learning_rate": 4.621771217712177e-06, |
|
"loss": 1.6215, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.522713130056005, |
|
"eval_loss": 1.7770200967788696, |
|
"eval_runtime": 46.0922, |
|
"eval_samples_per_second": 21.696, |
|
"eval_steps_per_second": 0.911, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5239576851275669, |
|
"grad_norm": 2.525740623474121, |
|
"learning_rate": 4.619464944649447e-06, |
|
"loss": 1.6193, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5252022401991289, |
|
"grad_norm": 2.9050614833831787, |
|
"learning_rate": 4.617158671586716e-06, |
|
"loss": 1.69, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5264467952706907, |
|
"grad_norm": 2.959547519683838, |
|
"learning_rate": 4.6148523985239856e-06, |
|
"loss": 1.6874, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5276913503422527, |
|
"grad_norm": 2.6331565380096436, |
|
"learning_rate": 4.612546125461255e-06, |
|
"loss": 1.5795, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5289359054138145, |
|
"grad_norm": 2.7323524951934814, |
|
"learning_rate": 4.610239852398524e-06, |
|
"loss": 1.5882, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5301804604853765, |
|
"grad_norm": 2.8021790981292725, |
|
"learning_rate": 4.607933579335794e-06, |
|
"loss": 1.5852, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.5314250155569384, |
|
"grad_norm": 2.984299659729004, |
|
"learning_rate": 4.605627306273063e-06, |
|
"loss": 1.6168, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5326695706285003, |
|
"grad_norm": 2.92749285697937, |
|
"learning_rate": 4.603321033210333e-06, |
|
"loss": 1.5768, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.5339141257000622, |
|
"grad_norm": 2.7681751251220703, |
|
"learning_rate": 4.601014760147602e-06, |
|
"loss": 1.5831, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.5351586807716241, |
|
"grad_norm": 2.965458631515503, |
|
"learning_rate": 4.5987084870848715e-06, |
|
"loss": 1.6412, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5351586807716241, |
|
"eval_loss": 1.766748070716858, |
|
"eval_runtime": 51.1098, |
|
"eval_samples_per_second": 19.566, |
|
"eval_steps_per_second": 0.822, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.536403235843186, |
|
"grad_norm": 2.681853771209717, |
|
"learning_rate": 4.59640221402214e-06, |
|
"loss": 1.6302, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.537647790914748, |
|
"grad_norm": 2.698493480682373, |
|
"learning_rate": 4.5940959409594095e-06, |
|
"loss": 1.6774, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.5388923459863099, |
|
"grad_norm": 3.1715168952941895, |
|
"learning_rate": 4.59178966789668e-06, |
|
"loss": 1.5617, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.5401369010578718, |
|
"grad_norm": 2.98128080368042, |
|
"learning_rate": 4.589483394833948e-06, |
|
"loss": 1.5901, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.5413814561294338, |
|
"grad_norm": 3.3887572288513184, |
|
"learning_rate": 4.587177121771218e-06, |
|
"loss": 1.6448, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5426260112009956, |
|
"grad_norm": 2.647975206375122, |
|
"learning_rate": 4.584870848708487e-06, |
|
"loss": 1.6209, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.5438705662725576, |
|
"grad_norm": 2.833264112472534, |
|
"learning_rate": 4.5825645756457575e-06, |
|
"loss": 1.6875, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.5451151213441194, |
|
"grad_norm": 3.0966858863830566, |
|
"learning_rate": 4.580258302583026e-06, |
|
"loss": 1.6291, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.5463596764156814, |
|
"grad_norm": 2.768793821334839, |
|
"learning_rate": 4.5779520295202955e-06, |
|
"loss": 1.6183, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.5476042314872434, |
|
"grad_norm": 3.4009342193603516, |
|
"learning_rate": 4.575645756457565e-06, |
|
"loss": 1.6602, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5476042314872434, |
|
"eval_loss": 1.76938796043396, |
|
"eval_runtime": 46.2093, |
|
"eval_samples_per_second": 21.641, |
|
"eval_steps_per_second": 0.909, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5488487865588052, |
|
"grad_norm": 2.769113063812256, |
|
"learning_rate": 4.573339483394834e-06, |
|
"loss": 1.6453, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5500933416303672, |
|
"grad_norm": 3.3592801094055176, |
|
"learning_rate": 4.571033210332104e-06, |
|
"loss": 1.6602, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.551337896701929, |
|
"grad_norm": 2.6817564964294434, |
|
"learning_rate": 4.568726937269373e-06, |
|
"loss": 1.5714, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.552582451773491, |
|
"grad_norm": 3.093724250793457, |
|
"learning_rate": 4.566420664206643e-06, |
|
"loss": 1.6924, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5538270068450529, |
|
"grad_norm": 2.766580104827881, |
|
"learning_rate": 4.564114391143912e-06, |
|
"loss": 1.5714, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5550715619166148, |
|
"grad_norm": 2.868870973587036, |
|
"learning_rate": 4.5618081180811814e-06, |
|
"loss": 1.5823, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5563161169881767, |
|
"grad_norm": 2.8286170959472656, |
|
"learning_rate": 4.559501845018451e-06, |
|
"loss": 1.6123, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.5575606720597387, |
|
"grad_norm": 2.66096568107605, |
|
"learning_rate": 4.5571955719557194e-06, |
|
"loss": 1.6044, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5588052271313005, |
|
"grad_norm": 2.7411820888519287, |
|
"learning_rate": 4.55488929889299e-06, |
|
"loss": 1.6361, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.5600497822028625, |
|
"grad_norm": 2.7591240406036377, |
|
"learning_rate": 4.552583025830259e-06, |
|
"loss": 1.6257, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5600497822028625, |
|
"eval_loss": 1.758321762084961, |
|
"eval_runtime": 52.9135, |
|
"eval_samples_per_second": 18.899, |
|
"eval_steps_per_second": 0.794, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5612943372744243, |
|
"grad_norm": 2.733456611633301, |
|
"learning_rate": 4.550276752767528e-06, |
|
"loss": 1.7159, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.5625388923459863, |
|
"grad_norm": 2.7318639755249023, |
|
"learning_rate": 4.547970479704797e-06, |
|
"loss": 1.6062, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.5637834474175483, |
|
"grad_norm": 2.8235232830047607, |
|
"learning_rate": 4.5456642066420666e-06, |
|
"loss": 1.6616, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.5650280024891101, |
|
"grad_norm": 2.499028444290161, |
|
"learning_rate": 4.543357933579336e-06, |
|
"loss": 1.5795, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.5662725575606721, |
|
"grad_norm": 2.7547316551208496, |
|
"learning_rate": 4.541051660516605e-06, |
|
"loss": 1.6204, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.5675171126322339, |
|
"grad_norm": 2.7086215019226074, |
|
"learning_rate": 4.538745387453875e-06, |
|
"loss": 1.5611, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.5687616677037959, |
|
"grad_norm": 2.911731719970703, |
|
"learning_rate": 4.536439114391144e-06, |
|
"loss": 1.645, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.5700062227753578, |
|
"grad_norm": 2.5553250312805176, |
|
"learning_rate": 4.534132841328414e-06, |
|
"loss": 1.6093, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.5712507778469197, |
|
"grad_norm": 2.795919895172119, |
|
"learning_rate": 4.531826568265683e-06, |
|
"loss": 1.6, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.5724953329184816, |
|
"grad_norm": 2.92104434967041, |
|
"learning_rate": 4.5295202952029525e-06, |
|
"loss": 1.6444, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5724953329184816, |
|
"eval_loss": 1.760049819946289, |
|
"eval_runtime": 48.811, |
|
"eval_samples_per_second": 20.487, |
|
"eval_steps_per_second": 0.86, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5737398879900436, |
|
"grad_norm": 2.6339287757873535, |
|
"learning_rate": 4.527214022140222e-06, |
|
"loss": 1.6133, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.5749844430616055, |
|
"grad_norm": 2.531398057937622, |
|
"learning_rate": 4.524907749077491e-06, |
|
"loss": 1.5931, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.5762289981331674, |
|
"grad_norm": 2.8858656883239746, |
|
"learning_rate": 4.522601476014761e-06, |
|
"loss": 1.6287, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.5774735532047293, |
|
"grad_norm": 2.7309634685516357, |
|
"learning_rate": 4.520295202952029e-06, |
|
"loss": 1.6102, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.5787181082762912, |
|
"grad_norm": 2.7758548259735107, |
|
"learning_rate": 4.5179889298893e-06, |
|
"loss": 1.6292, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5799626633478532, |
|
"grad_norm": 2.6181721687316895, |
|
"learning_rate": 4.515682656826569e-06, |
|
"loss": 1.6113, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.581207218419415, |
|
"grad_norm": 2.6215691566467285, |
|
"learning_rate": 4.513376383763838e-06, |
|
"loss": 1.5158, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.582451773490977, |
|
"grad_norm": 2.727487087249756, |
|
"learning_rate": 4.511070110701107e-06, |
|
"loss": 1.6212, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.583696328562539, |
|
"grad_norm": 2.6248037815093994, |
|
"learning_rate": 4.5087638376383765e-06, |
|
"loss": 1.6117, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5849408836341008, |
|
"grad_norm": 2.714291572570801, |
|
"learning_rate": 4.506457564575646e-06, |
|
"loss": 1.612, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5849408836341008, |
|
"eval_loss": 1.7569347620010376, |
|
"eval_runtime": 51.8475, |
|
"eval_samples_per_second": 19.287, |
|
"eval_steps_per_second": 0.81, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5861854387056628, |
|
"grad_norm": 2.752577066421509, |
|
"learning_rate": 4.504151291512915e-06, |
|
"loss": 1.5969, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5874299937772246, |
|
"grad_norm": 2.688563346862793, |
|
"learning_rate": 4.501845018450185e-06, |
|
"loss": 1.6085, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.5886745488487866, |
|
"grad_norm": 2.8581559658050537, |
|
"learning_rate": 4.499538745387454e-06, |
|
"loss": 1.6688, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.5899191039203485, |
|
"grad_norm": 2.8520970344543457, |
|
"learning_rate": 4.497232472324724e-06, |
|
"loss": 1.6034, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.5911636589919104, |
|
"grad_norm": 2.8052237033843994, |
|
"learning_rate": 4.494926199261993e-06, |
|
"loss": 1.5656, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5924082140634723, |
|
"grad_norm": 2.619663953781128, |
|
"learning_rate": 4.4926199261992624e-06, |
|
"loss": 1.5543, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.5936527691350342, |
|
"grad_norm": 2.604964017868042, |
|
"learning_rate": 4.490313653136532e-06, |
|
"loss": 1.5671, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.5948973242065961, |
|
"grad_norm": 2.985004186630249, |
|
"learning_rate": 4.488007380073801e-06, |
|
"loss": 1.5681, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.5961418792781581, |
|
"grad_norm": 2.8348851203918457, |
|
"learning_rate": 4.485701107011071e-06, |
|
"loss": 1.6214, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.5973864343497199, |
|
"grad_norm": 2.580967664718628, |
|
"learning_rate": 4.483394833948339e-06, |
|
"loss": 1.5753, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5973864343497199, |
|
"eval_loss": 1.7542102336883545, |
|
"eval_runtime": 49.5092, |
|
"eval_samples_per_second": 20.198, |
|
"eval_steps_per_second": 0.848, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5986309894212819, |
|
"grad_norm": 3.0104517936706543, |
|
"learning_rate": 4.4810885608856096e-06, |
|
"loss": 1.6282, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.5998755444928439, |
|
"grad_norm": 2.9261820316314697, |
|
"learning_rate": 4.478782287822879e-06, |
|
"loss": 1.5811, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.6011200995644057, |
|
"grad_norm": 2.9920477867126465, |
|
"learning_rate": 4.4764760147601476e-06, |
|
"loss": 1.5905, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.6023646546359677, |
|
"grad_norm": 3.046887159347534, |
|
"learning_rate": 4.474169741697417e-06, |
|
"loss": 1.6587, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.6036092097075295, |
|
"grad_norm": 2.9660050868988037, |
|
"learning_rate": 4.471863468634686e-06, |
|
"loss": 1.5888, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6048537647790915, |
|
"grad_norm": 2.770362377166748, |
|
"learning_rate": 4.469557195571957e-06, |
|
"loss": 1.5925, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.6060983198506534, |
|
"grad_norm": 2.621699094772339, |
|
"learning_rate": 4.467250922509225e-06, |
|
"loss": 1.5671, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.6073428749222153, |
|
"grad_norm": 2.749340295791626, |
|
"learning_rate": 4.464944649446495e-06, |
|
"loss": 1.5672, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.6085874299937772, |
|
"grad_norm": 3.091601848602295, |
|
"learning_rate": 4.462638376383764e-06, |
|
"loss": 1.5965, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.6098319850653391, |
|
"grad_norm": 2.6989245414733887, |
|
"learning_rate": 4.4603321033210335e-06, |
|
"loss": 1.5845, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6098319850653391, |
|
"eval_loss": 1.7461379766464233, |
|
"eval_runtime": 52.879, |
|
"eval_samples_per_second": 18.911, |
|
"eval_steps_per_second": 0.794, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.611076540136901, |
|
"grad_norm": 2.8998405933380127, |
|
"learning_rate": 4.458025830258303e-06, |
|
"loss": 1.5764, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.612321095208463, |
|
"grad_norm": 2.6035947799682617, |
|
"learning_rate": 4.455719557195572e-06, |
|
"loss": 1.5988, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.6135656502800249, |
|
"grad_norm": 2.712857484817505, |
|
"learning_rate": 4.453413284132842e-06, |
|
"loss": 1.6276, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.6148102053515868, |
|
"grad_norm": 2.73052978515625, |
|
"learning_rate": 4.451107011070111e-06, |
|
"loss": 1.5907, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.6160547604231488, |
|
"grad_norm": 2.8072566986083984, |
|
"learning_rate": 4.448800738007381e-06, |
|
"loss": 1.5771, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6172993154947106, |
|
"grad_norm": 2.6016132831573486, |
|
"learning_rate": 4.446494464944649e-06, |
|
"loss": 1.5983, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.6185438705662726, |
|
"grad_norm": 2.9697721004486084, |
|
"learning_rate": 4.4441881918819195e-06, |
|
"loss": 1.5974, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.6197884256378344, |
|
"grad_norm": 2.6966018676757812, |
|
"learning_rate": 4.441881918819189e-06, |
|
"loss": 1.6042, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.6210329807093964, |
|
"grad_norm": 2.871964931488037, |
|
"learning_rate": 4.439575645756458e-06, |
|
"loss": 1.5668, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.6222775357809583, |
|
"grad_norm": 2.791602611541748, |
|
"learning_rate": 4.437269372693727e-06, |
|
"loss": 1.5222, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6222775357809583, |
|
"eval_loss": 1.756259799003601, |
|
"eval_runtime": 53.0959, |
|
"eval_samples_per_second": 18.834, |
|
"eval_steps_per_second": 0.791, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6235220908525202, |
|
"grad_norm": 2.6534979343414307, |
|
"learning_rate": 4.434963099630996e-06, |
|
"loss": 1.5618, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.6247666459240822, |
|
"grad_norm": 3.0166008472442627, |
|
"learning_rate": 4.432656826568267e-06, |
|
"loss": 1.639, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.6260112009956441, |
|
"grad_norm": 2.7879786491394043, |
|
"learning_rate": 4.430350553505535e-06, |
|
"loss": 1.5521, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.627255756067206, |
|
"grad_norm": 2.695650339126587, |
|
"learning_rate": 4.428044280442805e-06, |
|
"loss": 1.6086, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.6285003111387679, |
|
"grad_norm": 2.8340883255004883, |
|
"learning_rate": 4.425738007380074e-06, |
|
"loss": 1.583, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6297448662103298, |
|
"grad_norm": 2.872870683670044, |
|
"learning_rate": 4.4234317343173434e-06, |
|
"loss": 1.5633, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.6309894212818917, |
|
"grad_norm": 2.840087652206421, |
|
"learning_rate": 4.421125461254613e-06, |
|
"loss": 1.603, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.6322339763534537, |
|
"grad_norm": 2.86578631401062, |
|
"learning_rate": 4.418819188191882e-06, |
|
"loss": 1.6031, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.6334785314250155, |
|
"grad_norm": 2.419501543045044, |
|
"learning_rate": 4.416512915129152e-06, |
|
"loss": 1.5975, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.6347230864965775, |
|
"grad_norm": 2.6204769611358643, |
|
"learning_rate": 4.414206642066421e-06, |
|
"loss": 1.608, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6347230864965775, |
|
"eval_loss": 1.7449734210968018, |
|
"eval_runtime": 44.3185, |
|
"eval_samples_per_second": 22.564, |
|
"eval_steps_per_second": 0.948, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6359676415681393, |
|
"grad_norm": 2.837980270385742, |
|
"learning_rate": 4.4119003690036905e-06, |
|
"loss": 1.625, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.6372121966397013, |
|
"grad_norm": 2.589782238006592, |
|
"learning_rate": 4.40959409594096e-06, |
|
"loss": 1.5976, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.6384567517112633, |
|
"grad_norm": 2.596064567565918, |
|
"learning_rate": 4.407287822878229e-06, |
|
"loss": 1.5568, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.6397013067828251, |
|
"grad_norm": 3.031325578689575, |
|
"learning_rate": 4.404981549815499e-06, |
|
"loss": 1.5744, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.6409458618543871, |
|
"grad_norm": 2.581284761428833, |
|
"learning_rate": 4.402675276752768e-06, |
|
"loss": 1.5822, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.642190416925949, |
|
"grad_norm": 2.7712416648864746, |
|
"learning_rate": 4.400369003690037e-06, |
|
"loss": 1.606, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.6434349719975109, |
|
"grad_norm": 2.5830018520355225, |
|
"learning_rate": 4.398062730627306e-06, |
|
"loss": 1.5845, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.6446795270690728, |
|
"grad_norm": 2.6646499633789062, |
|
"learning_rate": 4.3957564575645765e-06, |
|
"loss": 1.6323, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.6459240821406347, |
|
"grad_norm": 2.614750862121582, |
|
"learning_rate": 4.393450184501845e-06, |
|
"loss": 1.5447, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.6471686372121966, |
|
"grad_norm": 2.6826741695404053, |
|
"learning_rate": 4.3911439114391145e-06, |
|
"loss": 1.6081, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6471686372121966, |
|
"eval_loss": 1.7463606595993042, |
|
"eval_runtime": 45.7383, |
|
"eval_samples_per_second": 21.864, |
|
"eval_steps_per_second": 0.918, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6484131922837586, |
|
"grad_norm": 2.9165496826171875, |
|
"learning_rate": 4.388837638376384e-06, |
|
"loss": 1.5853, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.6496577473553204, |
|
"grad_norm": 2.789862871170044, |
|
"learning_rate": 4.386531365313653e-06, |
|
"loss": 1.6115, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.6509023024268824, |
|
"grad_norm": 2.690093994140625, |
|
"learning_rate": 4.384225092250923e-06, |
|
"loss": 1.5625, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.6521468574984443, |
|
"grad_norm": 2.660036087036133, |
|
"learning_rate": 4.381918819188192e-06, |
|
"loss": 1.6065, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.6533914125700062, |
|
"grad_norm": 2.5939362049102783, |
|
"learning_rate": 4.379612546125462e-06, |
|
"loss": 1.5593, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6546359676415682, |
|
"grad_norm": 2.6998841762542725, |
|
"learning_rate": 4.377306273062731e-06, |
|
"loss": 1.6168, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.65588052271313, |
|
"grad_norm": 2.5756912231445312, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 1.528, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.657125077784692, |
|
"grad_norm": 2.6959640979766846, |
|
"learning_rate": 4.37269372693727e-06, |
|
"loss": 1.5987, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.6583696328562539, |
|
"grad_norm": 2.773494005203247, |
|
"learning_rate": 4.370387453874539e-06, |
|
"loss": 1.6018, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.6596141879278158, |
|
"grad_norm": 2.8194501399993896, |
|
"learning_rate": 4.368081180811809e-06, |
|
"loss": 1.6082, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6596141879278158, |
|
"eval_loss": 1.7327930927276611, |
|
"eval_runtime": 46.7893, |
|
"eval_samples_per_second": 21.372, |
|
"eval_steps_per_second": 0.898, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6608587429993777, |
|
"grad_norm": 2.721564292907715, |
|
"learning_rate": 4.365774907749078e-06, |
|
"loss": 1.5549, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.6621032980709396, |
|
"grad_norm": 2.6635236740112305, |
|
"learning_rate": 4.363468634686347e-06, |
|
"loss": 1.6016, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.6633478531425016, |
|
"grad_norm": 2.6541085243225098, |
|
"learning_rate": 4.361162361623616e-06, |
|
"loss": 1.5574, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.6645924082140635, |
|
"grad_norm": 2.9270925521850586, |
|
"learning_rate": 4.3588560885608864e-06, |
|
"loss": 1.5722, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.6658369632856254, |
|
"grad_norm": 2.6041259765625, |
|
"learning_rate": 4.356549815498156e-06, |
|
"loss": 1.6181, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6670815183571873, |
|
"grad_norm": 2.7029967308044434, |
|
"learning_rate": 4.354243542435424e-06, |
|
"loss": 1.5576, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.6683260734287492, |
|
"grad_norm": 2.805567979812622, |
|
"learning_rate": 4.351937269372694e-06, |
|
"loss": 1.6279, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.6695706285003111, |
|
"grad_norm": 2.6513259410858154, |
|
"learning_rate": 4.349630996309963e-06, |
|
"loss": 1.5626, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.6708151835718731, |
|
"grad_norm": 2.7320876121520996, |
|
"learning_rate": 4.347324723247233e-06, |
|
"loss": 1.6, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.6720597386434349, |
|
"grad_norm": 2.9504754543304443, |
|
"learning_rate": 4.345018450184502e-06, |
|
"loss": 1.6755, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.6720597386434349, |
|
"eval_loss": 1.733923077583313, |
|
"eval_runtime": 45.4809, |
|
"eval_samples_per_second": 21.987, |
|
"eval_steps_per_second": 0.923, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.6733042937149969, |
|
"grad_norm": 2.7561354637145996, |
|
"learning_rate": 4.3427121771217715e-06, |
|
"loss": 1.5624, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.6745488487865589, |
|
"grad_norm": 2.612337827682495, |
|
"learning_rate": 4.340405904059041e-06, |
|
"loss": 1.5806, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.6757934038581207, |
|
"grad_norm": 3.2763278484344482, |
|
"learning_rate": 4.33809963099631e-06, |
|
"loss": 1.5939, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.6770379589296827, |
|
"grad_norm": 2.632627248764038, |
|
"learning_rate": 4.33579335793358e-06, |
|
"loss": 1.564, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.6782825140012445, |
|
"grad_norm": 2.9776413440704346, |
|
"learning_rate": 4.333487084870848e-06, |
|
"loss": 1.5624, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.6795270690728065, |
|
"grad_norm": 3.083935260772705, |
|
"learning_rate": 4.331180811808119e-06, |
|
"loss": 1.5957, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.6807716241443684, |
|
"grad_norm": 2.8425586223602295, |
|
"learning_rate": 4.328874538745388e-06, |
|
"loss": 1.5559, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.6820161792159303, |
|
"grad_norm": 3.0011188983917236, |
|
"learning_rate": 4.3265682656826575e-06, |
|
"loss": 1.5308, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.6832607342874922, |
|
"grad_norm": 2.7645163536071777, |
|
"learning_rate": 4.324261992619926e-06, |
|
"loss": 1.6114, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.6845052893590542, |
|
"grad_norm": 2.8784844875335693, |
|
"learning_rate": 4.321955719557196e-06, |
|
"loss": 1.5591, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6845052893590542, |
|
"eval_loss": 1.7328442335128784, |
|
"eval_runtime": 45.5776, |
|
"eval_samples_per_second": 21.941, |
|
"eval_steps_per_second": 0.922, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.685749844430616, |
|
"grad_norm": 3.1323604583740234, |
|
"learning_rate": 4.319649446494466e-06, |
|
"loss": 1.5626, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.686994399502178, |
|
"grad_norm": 2.7367660999298096, |
|
"learning_rate": 4.317343173431734e-06, |
|
"loss": 1.5914, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.6882389545737398, |
|
"grad_norm": 3.056155204772949, |
|
"learning_rate": 4.315036900369004e-06, |
|
"loss": 1.6014, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.6894835096453018, |
|
"grad_norm": 2.7824926376342773, |
|
"learning_rate": 4.312730627306273e-06, |
|
"loss": 1.5508, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.6907280647168638, |
|
"grad_norm": 2.629610538482666, |
|
"learning_rate": 4.310424354243543e-06, |
|
"loss": 1.5385, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6919726197884256, |
|
"grad_norm": 2.753021240234375, |
|
"learning_rate": 4.308118081180812e-06, |
|
"loss": 1.6211, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.6932171748599876, |
|
"grad_norm": 2.8113718032836914, |
|
"learning_rate": 4.3058118081180815e-06, |
|
"loss": 1.5904, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.6944617299315494, |
|
"grad_norm": 2.849471092224121, |
|
"learning_rate": 4.303505535055351e-06, |
|
"loss": 1.6126, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.6957062850031114, |
|
"grad_norm": 2.6744682788848877, |
|
"learning_rate": 4.30119926199262e-06, |
|
"loss": 1.5743, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.6969508400746733, |
|
"grad_norm": 2.7900655269622803, |
|
"learning_rate": 4.29889298892989e-06, |
|
"loss": 1.5865, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6969508400746733, |
|
"eval_loss": 1.724618673324585, |
|
"eval_runtime": 44.2181, |
|
"eval_samples_per_second": 22.615, |
|
"eval_steps_per_second": 0.95, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6981953951462352, |
|
"grad_norm": 2.7363393306732178, |
|
"learning_rate": 4.296586715867159e-06, |
|
"loss": 1.5347, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.6994399502177971, |
|
"grad_norm": 2.696748733520508, |
|
"learning_rate": 4.2942804428044286e-06, |
|
"loss": 1.5463, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.7006845052893591, |
|
"grad_norm": 2.5793895721435547, |
|
"learning_rate": 4.291974169741698e-06, |
|
"loss": 1.6231, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.701929060360921, |
|
"grad_norm": 2.576230764389038, |
|
"learning_rate": 4.289667896678967e-06, |
|
"loss": 1.5663, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.7031736154324829, |
|
"grad_norm": 2.718759298324585, |
|
"learning_rate": 4.287361623616236e-06, |
|
"loss": 1.5316, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7044181705040448, |
|
"grad_norm": 2.651751756668091, |
|
"learning_rate": 4.285055350553506e-06, |
|
"loss": 1.5777, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.7056627255756067, |
|
"grad_norm": 2.931264638900757, |
|
"learning_rate": 4.282749077490776e-06, |
|
"loss": 1.5647, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.7069072806471687, |
|
"grad_norm": 2.8062546253204346, |
|
"learning_rate": 4.280442804428044e-06, |
|
"loss": 1.5685, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.7081518357187305, |
|
"grad_norm": 2.7551019191741943, |
|
"learning_rate": 4.278136531365314e-06, |
|
"loss": 1.5659, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.7093963907902925, |
|
"grad_norm": 2.799724578857422, |
|
"learning_rate": 4.275830258302583e-06, |
|
"loss": 1.5917, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7093963907902925, |
|
"eval_loss": 1.7217484712600708, |
|
"eval_runtime": 44.5312, |
|
"eval_samples_per_second": 22.456, |
|
"eval_steps_per_second": 0.943, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7106409458618543, |
|
"grad_norm": 2.8496406078338623, |
|
"learning_rate": 4.273523985239853e-06, |
|
"loss": 1.5607, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.7118855009334163, |
|
"grad_norm": 2.5612425804138184, |
|
"learning_rate": 4.271217712177122e-06, |
|
"loss": 1.5496, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.7131300560049783, |
|
"grad_norm": 2.6605045795440674, |
|
"learning_rate": 4.268911439114391e-06, |
|
"loss": 1.5371, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.7143746110765401, |
|
"grad_norm": 2.823802947998047, |
|
"learning_rate": 4.266605166051661e-06, |
|
"loss": 1.5699, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.7156191661481021, |
|
"grad_norm": 2.8212928771972656, |
|
"learning_rate": 4.26429889298893e-06, |
|
"loss": 1.5529, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.716863721219664, |
|
"grad_norm": 2.7193386554718018, |
|
"learning_rate": 4.2619926199262e-06, |
|
"loss": 1.5723, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.7181082762912259, |
|
"grad_norm": 2.8348941802978516, |
|
"learning_rate": 4.259686346863469e-06, |
|
"loss": 1.5212, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.7193528313627878, |
|
"grad_norm": 2.7177276611328125, |
|
"learning_rate": 4.2573800738007385e-06, |
|
"loss": 1.5688, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.7205973864343497, |
|
"grad_norm": 2.675734043121338, |
|
"learning_rate": 4.255073800738008e-06, |
|
"loss": 1.5345, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.7218419415059116, |
|
"grad_norm": 2.640942335128784, |
|
"learning_rate": 4.252767527675277e-06, |
|
"loss": 1.5874, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7218419415059116, |
|
"eval_loss": 1.72821044921875, |
|
"eval_runtime": 45.9409, |
|
"eval_samples_per_second": 21.767, |
|
"eval_steps_per_second": 0.914, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7230864965774736, |
|
"grad_norm": 2.785066604614258, |
|
"learning_rate": 4.250461254612546e-06, |
|
"loss": 1.5685, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.7243310516490354, |
|
"grad_norm": 2.8149311542510986, |
|
"learning_rate": 4.248154981549816e-06, |
|
"loss": 1.5106, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.7255756067205974, |
|
"grad_norm": 2.5781238079071045, |
|
"learning_rate": 4.245848708487086e-06, |
|
"loss": 1.5895, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.7268201617921594, |
|
"grad_norm": 2.6275103092193604, |
|
"learning_rate": 4.243542435424355e-06, |
|
"loss": 1.601, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.7280647168637212, |
|
"grad_norm": 2.5282299518585205, |
|
"learning_rate": 4.241236162361624e-06, |
|
"loss": 1.5376, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7293092719352832, |
|
"grad_norm": 2.7331771850585938, |
|
"learning_rate": 4.238929889298893e-06, |
|
"loss": 1.5855, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.730553827006845, |
|
"grad_norm": 2.6941936016082764, |
|
"learning_rate": 4.236623616236163e-06, |
|
"loss": 1.5937, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.731798382078407, |
|
"grad_norm": 2.924062728881836, |
|
"learning_rate": 4.234317343173432e-06, |
|
"loss": 1.5914, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.7330429371499689, |
|
"grad_norm": 2.687530279159546, |
|
"learning_rate": 4.232011070110701e-06, |
|
"loss": 1.5598, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.7342874922215308, |
|
"grad_norm": 2.7230913639068604, |
|
"learning_rate": 4.229704797047971e-06, |
|
"loss": 1.6048, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7342874922215308, |
|
"eval_loss": 1.7240839004516602, |
|
"eval_runtime": 46.2617, |
|
"eval_samples_per_second": 21.616, |
|
"eval_steps_per_second": 0.908, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7355320472930927, |
|
"grad_norm": 2.833481788635254, |
|
"learning_rate": 4.22739852398524e-06, |
|
"loss": 1.545, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.7367766023646546, |
|
"grad_norm": 2.7287282943725586, |
|
"learning_rate": 4.2250922509225096e-06, |
|
"loss": 1.5795, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.7380211574362165, |
|
"grad_norm": 2.847174882888794, |
|
"learning_rate": 4.222785977859779e-06, |
|
"loss": 1.5469, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.7392657125077785, |
|
"grad_norm": 2.9214589595794678, |
|
"learning_rate": 4.220479704797048e-06, |
|
"loss": 1.5791, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.7405102675793404, |
|
"grad_norm": 2.7506580352783203, |
|
"learning_rate": 4.218173431734318e-06, |
|
"loss": 1.6578, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7417548226509023, |
|
"grad_norm": 2.7006874084472656, |
|
"learning_rate": 4.215867158671587e-06, |
|
"loss": 1.5598, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.7429993777224643, |
|
"grad_norm": 2.795417547225952, |
|
"learning_rate": 4.213560885608857e-06, |
|
"loss": 1.5862, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.7442439327940261, |
|
"grad_norm": 2.7418057918548584, |
|
"learning_rate": 4.211254612546125e-06, |
|
"loss": 1.5717, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.7454884878655881, |
|
"grad_norm": 2.884915590286255, |
|
"learning_rate": 4.2089483394833955e-06, |
|
"loss": 1.5828, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.7467330429371499, |
|
"grad_norm": 2.752328395843506, |
|
"learning_rate": 4.206642066420665e-06, |
|
"loss": 1.576, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7467330429371499, |
|
"eval_loss": 1.7281302213668823, |
|
"eval_runtime": 45.1978, |
|
"eval_samples_per_second": 22.125, |
|
"eval_steps_per_second": 0.929, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7479775980087119, |
|
"grad_norm": 2.9103147983551025, |
|
"learning_rate": 4.2043357933579335e-06, |
|
"loss": 1.4959, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.7492221530802738, |
|
"grad_norm": 2.5317649841308594, |
|
"learning_rate": 4.202029520295203e-06, |
|
"loss": 1.5092, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.7504667081518357, |
|
"grad_norm": 2.7383415699005127, |
|
"learning_rate": 4.199723247232473e-06, |
|
"loss": 1.4696, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.7517112632233977, |
|
"grad_norm": 2.890305519104004, |
|
"learning_rate": 4.197416974169742e-06, |
|
"loss": 1.511, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.7529558182949595, |
|
"grad_norm": 2.7425734996795654, |
|
"learning_rate": 4.195110701107011e-06, |
|
"loss": 1.5658, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7542003733665215, |
|
"grad_norm": 2.657726764678955, |
|
"learning_rate": 4.192804428044281e-06, |
|
"loss": 1.5648, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.7554449284380834, |
|
"grad_norm": 3.181201696395874, |
|
"learning_rate": 4.19049815498155e-06, |
|
"loss": 1.583, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.7566894835096453, |
|
"grad_norm": 2.665555715560913, |
|
"learning_rate": 4.1881918819188195e-06, |
|
"loss": 1.5439, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.7579340385812072, |
|
"grad_norm": 2.7328009605407715, |
|
"learning_rate": 4.185885608856089e-06, |
|
"loss": 1.5264, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.7591785936527692, |
|
"grad_norm": 2.7027499675750732, |
|
"learning_rate": 4.183579335793358e-06, |
|
"loss": 1.5327, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7591785936527692, |
|
"eval_loss": 1.7161823511123657, |
|
"eval_runtime": 42.4811, |
|
"eval_samples_per_second": 23.54, |
|
"eval_steps_per_second": 0.989, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.760423148724331, |
|
"grad_norm": 2.6982595920562744, |
|
"learning_rate": 4.181273062730628e-06, |
|
"loss": 1.585, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.761667703795893, |
|
"grad_norm": 2.780768394470215, |
|
"learning_rate": 4.178966789667897e-06, |
|
"loss": 1.5754, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.7629122588674548, |
|
"grad_norm": 2.7140064239501953, |
|
"learning_rate": 4.176660516605167e-06, |
|
"loss": 1.5649, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.7641568139390168, |
|
"grad_norm": 2.60182523727417, |
|
"learning_rate": 4.174354243542435e-06, |
|
"loss": 1.5879, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.7654013690105788, |
|
"grad_norm": 2.6529550552368164, |
|
"learning_rate": 4.1720479704797054e-06, |
|
"loss": 1.5418, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.7666459240821406, |
|
"grad_norm": 2.7936999797821045, |
|
"learning_rate": 4.169741697416975e-06, |
|
"loss": 1.5993, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.7678904791537026, |
|
"grad_norm": 2.7894914150238037, |
|
"learning_rate": 4.1674354243542434e-06, |
|
"loss": 1.5962, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.7691350342252644, |
|
"grad_norm": 2.8094332218170166, |
|
"learning_rate": 4.165129151291513e-06, |
|
"loss": 1.5898, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.7703795892968264, |
|
"grad_norm": 2.9678053855895996, |
|
"learning_rate": 4.162822878228783e-06, |
|
"loss": 1.5602, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.7716241443683883, |
|
"grad_norm": 2.7238247394561768, |
|
"learning_rate": 4.1605166051660526e-06, |
|
"loss": 1.554, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7716241443683883, |
|
"eval_loss": 1.720330834388733, |
|
"eval_runtime": 47.0576, |
|
"eval_samples_per_second": 21.251, |
|
"eval_steps_per_second": 0.893, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7728686994399502, |
|
"grad_norm": 2.6484885215759277, |
|
"learning_rate": 4.158210332103321e-06, |
|
"loss": 1.5464, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.7741132545115121, |
|
"grad_norm": 2.722929000854492, |
|
"learning_rate": 4.1559040590405906e-06, |
|
"loss": 1.5471, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.7753578095830741, |
|
"grad_norm": 2.842886209487915, |
|
"learning_rate": 4.15359778597786e-06, |
|
"loss": 1.5512, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.776602364654636, |
|
"grad_norm": 2.647134780883789, |
|
"learning_rate": 4.151291512915129e-06, |
|
"loss": 1.5264, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.7778469197261979, |
|
"grad_norm": 2.763277053833008, |
|
"learning_rate": 4.148985239852399e-06, |
|
"loss": 1.5102, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7790914747977598, |
|
"grad_norm": 2.9078922271728516, |
|
"learning_rate": 4.146678966789668e-06, |
|
"loss": 1.5538, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.7803360298693217, |
|
"grad_norm": 2.60658860206604, |
|
"learning_rate": 4.144372693726938e-06, |
|
"loss": 1.486, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.7815805849408837, |
|
"grad_norm": 2.5513789653778076, |
|
"learning_rate": 4.142066420664207e-06, |
|
"loss": 1.5323, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.7828251400124455, |
|
"grad_norm": 3.0412704944610596, |
|
"learning_rate": 4.1397601476014765e-06, |
|
"loss": 1.5835, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.7840696950840075, |
|
"grad_norm": 2.9483704566955566, |
|
"learning_rate": 4.137453874538745e-06, |
|
"loss": 1.5849, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.7840696950840075, |
|
"eval_loss": 1.7138603925704956, |
|
"eval_runtime": 47.7618, |
|
"eval_samples_per_second": 20.937, |
|
"eval_steps_per_second": 0.879, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.7853142501555694, |
|
"grad_norm": 2.784747362136841, |
|
"learning_rate": 4.135147601476015e-06, |
|
"loss": 1.5625, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.7865588052271313, |
|
"grad_norm": 2.775791883468628, |
|
"learning_rate": 4.132841328413285e-06, |
|
"loss": 1.5827, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.7878033602986932, |
|
"grad_norm": 2.867126226425171, |
|
"learning_rate": 4.130535055350554e-06, |
|
"loss": 1.5174, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.7890479153702551, |
|
"grad_norm": 2.7829818725585938, |
|
"learning_rate": 4.128228782287823e-06, |
|
"loss": 1.5853, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.790292470441817, |
|
"grad_norm": 2.630842924118042, |
|
"learning_rate": 4.125922509225092e-06, |
|
"loss": 1.5853, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.791537025513379, |
|
"grad_norm": 2.8100547790527344, |
|
"learning_rate": 4.1236162361623625e-06, |
|
"loss": 1.5162, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.7927815805849409, |
|
"grad_norm": 2.5983335971832275, |
|
"learning_rate": 4.121309963099631e-06, |
|
"loss": 1.5926, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.7940261356565028, |
|
"grad_norm": 2.826624870300293, |
|
"learning_rate": 4.1190036900369005e-06, |
|
"loss": 1.5527, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.7952706907280647, |
|
"grad_norm": 2.863215684890747, |
|
"learning_rate": 4.11669741697417e-06, |
|
"loss": 1.5514, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.7965152457996266, |
|
"grad_norm": 2.7492613792419434, |
|
"learning_rate": 4.114391143911439e-06, |
|
"loss": 1.5101, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7965152457996266, |
|
"eval_loss": 1.7091425657272339, |
|
"eval_runtime": 46.5279, |
|
"eval_samples_per_second": 21.492, |
|
"eval_steps_per_second": 0.903, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7977598008711886, |
|
"grad_norm": 2.9577860832214355, |
|
"learning_rate": 4.112084870848709e-06, |
|
"loss": 1.5496, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.7990043559427504, |
|
"grad_norm": 2.612668991088867, |
|
"learning_rate": 4.109778597785978e-06, |
|
"loss": 1.5204, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.8002489110143124, |
|
"grad_norm": 2.5427377223968506, |
|
"learning_rate": 4.107472324723248e-06, |
|
"loss": 1.5038, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.8014934660858744, |
|
"grad_norm": 2.7057406902313232, |
|
"learning_rate": 4.105166051660517e-06, |
|
"loss": 1.5831, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.8027380211574362, |
|
"grad_norm": 2.664353609085083, |
|
"learning_rate": 4.1028597785977864e-06, |
|
"loss": 1.555, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8039825762289982, |
|
"grad_norm": 2.5452728271484375, |
|
"learning_rate": 4.100553505535056e-06, |
|
"loss": 1.5847, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.80522713130056, |
|
"grad_norm": 2.5370709896087646, |
|
"learning_rate": 4.098247232472325e-06, |
|
"loss": 1.5396, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.806471686372122, |
|
"grad_norm": 2.730109930038452, |
|
"learning_rate": 4.095940959409595e-06, |
|
"loss": 1.5234, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.8077162414436839, |
|
"grad_norm": 2.586108446121216, |
|
"learning_rate": 4.093634686346864e-06, |
|
"loss": 1.5885, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.8089607965152458, |
|
"grad_norm": 2.557053804397583, |
|
"learning_rate": 4.091328413284133e-06, |
|
"loss": 1.483, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8089607965152458, |
|
"eval_loss": 1.7204149961471558, |
|
"eval_runtime": 48.0041, |
|
"eval_samples_per_second": 20.832, |
|
"eval_steps_per_second": 0.875, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8102053515868077, |
|
"grad_norm": 2.673672676086426, |
|
"learning_rate": 4.089022140221402e-06, |
|
"loss": 1.4837, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.8114499066583696, |
|
"grad_norm": 2.6465933322906494, |
|
"learning_rate": 4.086715867158672e-06, |
|
"loss": 1.5219, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.8126944617299315, |
|
"grad_norm": 2.6897783279418945, |
|
"learning_rate": 4.084409594095941e-06, |
|
"loss": 1.5116, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.8139390168014935, |
|
"grad_norm": 2.644369602203369, |
|
"learning_rate": 4.08210332103321e-06, |
|
"loss": 1.5141, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.8151835718730553, |
|
"grad_norm": 2.7475595474243164, |
|
"learning_rate": 4.07979704797048e-06, |
|
"loss": 1.5509, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.8164281269446173, |
|
"grad_norm": 2.72043514251709, |
|
"learning_rate": 4.077490774907749e-06, |
|
"loss": 1.6098, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.8176726820161793, |
|
"grad_norm": 2.648069381713867, |
|
"learning_rate": 4.075184501845019e-06, |
|
"loss": 1.5368, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.8189172370877411, |
|
"grad_norm": 2.644893169403076, |
|
"learning_rate": 4.072878228782288e-06, |
|
"loss": 1.492, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.8201617921593031, |
|
"grad_norm": 2.9441068172454834, |
|
"learning_rate": 4.0705719557195575e-06, |
|
"loss": 1.5904, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.8214063472308649, |
|
"grad_norm": 2.752913475036621, |
|
"learning_rate": 4.068265682656827e-06, |
|
"loss": 1.5544, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8214063472308649, |
|
"eval_loss": 1.709177017211914, |
|
"eval_runtime": 48.0401, |
|
"eval_samples_per_second": 20.816, |
|
"eval_steps_per_second": 0.874, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8226509023024269, |
|
"grad_norm": 2.6959197521209717, |
|
"learning_rate": 4.065959409594096e-06, |
|
"loss": 1.5553, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.8238954573739888, |
|
"grad_norm": 2.8757998943328857, |
|
"learning_rate": 4.063653136531366e-06, |
|
"loss": 1.554, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.8251400124455507, |
|
"grad_norm": 2.7723867893218994, |
|
"learning_rate": 4.061346863468635e-06, |
|
"loss": 1.5499, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.8263845675171126, |
|
"grad_norm": 2.7620463371276855, |
|
"learning_rate": 4.059040590405905e-06, |
|
"loss": 1.5962, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.8276291225886746, |
|
"grad_norm": 2.8601174354553223, |
|
"learning_rate": 4.056734317343174e-06, |
|
"loss": 1.5218, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.8288736776602365, |
|
"grad_norm": 2.81356143951416, |
|
"learning_rate": 4.054428044280443e-06, |
|
"loss": 1.5382, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.8301182327317984, |
|
"grad_norm": 2.904679298400879, |
|
"learning_rate": 4.052121771217712e-06, |
|
"loss": 1.5314, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.8313627878033603, |
|
"grad_norm": 2.798236846923828, |
|
"learning_rate": 4.049815498154982e-06, |
|
"loss": 1.5989, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.8326073428749222, |
|
"grad_norm": 2.8923728466033936, |
|
"learning_rate": 4.047509225092252e-06, |
|
"loss": 1.5475, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.8338518979464842, |
|
"grad_norm": 2.6134703159332275, |
|
"learning_rate": 4.04520295202952e-06, |
|
"loss": 1.5232, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8338518979464842, |
|
"eval_loss": 1.7080007791519165, |
|
"eval_runtime": 52.6531, |
|
"eval_samples_per_second": 18.992, |
|
"eval_steps_per_second": 0.798, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.835096453018046, |
|
"grad_norm": 2.6788907051086426, |
|
"learning_rate": 4.04289667896679e-06, |
|
"loss": 1.5437, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.836341008089608, |
|
"grad_norm": 2.872429847717285, |
|
"learning_rate": 4.04059040590406e-06, |
|
"loss": 1.5368, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.8375855631611698, |
|
"grad_norm": 2.714857578277588, |
|
"learning_rate": 4.038284132841329e-06, |
|
"loss": 1.5404, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.8388301182327318, |
|
"grad_norm": 2.6955931186676025, |
|
"learning_rate": 4.035977859778598e-06, |
|
"loss": 1.5501, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.8400746733042938, |
|
"grad_norm": 2.7090582847595215, |
|
"learning_rate": 4.0336715867158674e-06, |
|
"loss": 1.5068, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8413192283758556, |
|
"grad_norm": 2.8406383991241455, |
|
"learning_rate": 4.031365313653137e-06, |
|
"loss": 1.5529, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.8425637834474176, |
|
"grad_norm": 2.965557098388672, |
|
"learning_rate": 4.029059040590406e-06, |
|
"loss": 1.5847, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.8438083385189795, |
|
"grad_norm": 2.664083242416382, |
|
"learning_rate": 4.026752767527676e-06, |
|
"loss": 1.5701, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.8450528935905414, |
|
"grad_norm": 2.7362401485443115, |
|
"learning_rate": 4.024446494464945e-06, |
|
"loss": 1.5496, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.8462974486621033, |
|
"grad_norm": 2.695342540740967, |
|
"learning_rate": 4.0221402214022145e-06, |
|
"loss": 1.5009, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8462974486621033, |
|
"eval_loss": 1.7063637971878052, |
|
"eval_runtime": 52.7749, |
|
"eval_samples_per_second": 18.948, |
|
"eval_steps_per_second": 0.796, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8475420037336652, |
|
"grad_norm": 2.615994453430176, |
|
"learning_rate": 4.019833948339484e-06, |
|
"loss": 1.5281, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.8487865588052271, |
|
"grad_norm": 3.065732002258301, |
|
"learning_rate": 4.017527675276753e-06, |
|
"loss": 1.5616, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.8500311138767891, |
|
"grad_norm": 2.812166690826416, |
|
"learning_rate": 4.015221402214022e-06, |
|
"loss": 1.5573, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.8512756689483509, |
|
"grad_norm": 2.745252847671509, |
|
"learning_rate": 4.012915129151292e-06, |
|
"loss": 1.5426, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.8525202240199129, |
|
"grad_norm": 2.862264633178711, |
|
"learning_rate": 4.010608856088562e-06, |
|
"loss": 1.4973, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.8537647790914747, |
|
"grad_norm": 2.752469062805176, |
|
"learning_rate": 4.00830258302583e-06, |
|
"loss": 1.5416, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.8550093341630367, |
|
"grad_norm": 2.5692927837371826, |
|
"learning_rate": 4.0059963099631e-06, |
|
"loss": 1.5021, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.8562538892345987, |
|
"grad_norm": 2.896444320678711, |
|
"learning_rate": 4.003690036900369e-06, |
|
"loss": 1.5794, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.8574984443061605, |
|
"grad_norm": 2.7448434829711914, |
|
"learning_rate": 4.0013837638376385e-06, |
|
"loss": 1.5138, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.8587429993777225, |
|
"grad_norm": 2.8280975818634033, |
|
"learning_rate": 3.999077490774908e-06, |
|
"loss": 1.5071, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8587429993777225, |
|
"eval_loss": 1.7048547267913818, |
|
"eval_runtime": 52.8015, |
|
"eval_samples_per_second": 18.939, |
|
"eval_steps_per_second": 0.795, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8599875544492844, |
|
"grad_norm": 2.701293706893921, |
|
"learning_rate": 3.996771217712177e-06, |
|
"loss": 1.5457, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.8612321095208463, |
|
"grad_norm": 2.673452138900757, |
|
"learning_rate": 3.994464944649447e-06, |
|
"loss": 1.5355, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.8624766645924082, |
|
"grad_norm": 2.7728021144866943, |
|
"learning_rate": 3.992158671586716e-06, |
|
"loss": 1.5486, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.8637212196639701, |
|
"grad_norm": 2.747614860534668, |
|
"learning_rate": 3.989852398523986e-06, |
|
"loss": 1.5771, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.864965774735532, |
|
"grad_norm": 2.6240482330322266, |
|
"learning_rate": 3.987546125461255e-06, |
|
"loss": 1.5338, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.866210329807094, |
|
"grad_norm": 2.8253114223480225, |
|
"learning_rate": 3.9852398523985245e-06, |
|
"loss": 1.5029, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.8674548848786559, |
|
"grad_norm": 2.7980496883392334, |
|
"learning_rate": 3.982933579335794e-06, |
|
"loss": 1.5239, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.8686994399502178, |
|
"grad_norm": 2.937443971633911, |
|
"learning_rate": 3.980627306273063e-06, |
|
"loss": 1.5406, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.8699439950217797, |
|
"grad_norm": 2.714353322982788, |
|
"learning_rate": 3.978321033210332e-06, |
|
"loss": 1.5724, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.8711885500933416, |
|
"grad_norm": 2.7986929416656494, |
|
"learning_rate": 3.976014760147602e-06, |
|
"loss": 1.5687, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8711885500933416, |
|
"eval_loss": 1.6998120546340942, |
|
"eval_runtime": 46.4448, |
|
"eval_samples_per_second": 21.531, |
|
"eval_steps_per_second": 0.904, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8724331051649036, |
|
"grad_norm": 2.755740165710449, |
|
"learning_rate": 3.973708487084872e-06, |
|
"loss": 1.5047, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.8736776602364654, |
|
"grad_norm": 2.760399341583252, |
|
"learning_rate": 3.97140221402214e-06, |
|
"loss": 1.5406, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.8749222153080274, |
|
"grad_norm": 2.780214786529541, |
|
"learning_rate": 3.96909594095941e-06, |
|
"loss": 1.551, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.8761667703795893, |
|
"grad_norm": 2.669801950454712, |
|
"learning_rate": 3.966789667896679e-06, |
|
"loss": 1.5578, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.8774113254511512, |
|
"grad_norm": 2.729586362838745, |
|
"learning_rate": 3.964483394833948e-06, |
|
"loss": 1.4808, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.8786558805227132, |
|
"grad_norm": 2.608551263809204, |
|
"learning_rate": 3.962177121771218e-06, |
|
"loss": 1.4649, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.879900435594275, |
|
"grad_norm": 3.0885748863220215, |
|
"learning_rate": 3.959870848708487e-06, |
|
"loss": 1.5602, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.881144990665837, |
|
"grad_norm": 2.659917116165161, |
|
"learning_rate": 3.957564575645757e-06, |
|
"loss": 1.5341, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.8823895457373989, |
|
"grad_norm": 2.718696355819702, |
|
"learning_rate": 3.955258302583026e-06, |
|
"loss": 1.5232, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.8836341008089608, |
|
"grad_norm": 2.902811288833618, |
|
"learning_rate": 3.9529520295202955e-06, |
|
"loss": 1.517, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8836341008089608, |
|
"eval_loss": 1.7022311687469482, |
|
"eval_runtime": 43.1291, |
|
"eval_samples_per_second": 23.186, |
|
"eval_steps_per_second": 0.974, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8848786558805227, |
|
"grad_norm": 2.816802978515625, |
|
"learning_rate": 3.950645756457565e-06, |
|
"loss": 1.5097, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.8861232109520847, |
|
"grad_norm": 2.8189518451690674, |
|
"learning_rate": 3.948339483394834e-06, |
|
"loss": 1.5842, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.8873677660236465, |
|
"grad_norm": 2.879767417907715, |
|
"learning_rate": 3.946033210332104e-06, |
|
"loss": 1.5484, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.8886123210952085, |
|
"grad_norm": 2.8263540267944336, |
|
"learning_rate": 3.943726937269373e-06, |
|
"loss": 1.5259, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.8898568761667703, |
|
"grad_norm": 2.655139923095703, |
|
"learning_rate": 3.941420664206642e-06, |
|
"loss": 1.482, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.8911014312383323, |
|
"grad_norm": 2.63429856300354, |
|
"learning_rate": 3.939114391143912e-06, |
|
"loss": 1.512, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.8923459863098943, |
|
"grad_norm": 2.9241650104522705, |
|
"learning_rate": 3.9368081180811815e-06, |
|
"loss": 1.5347, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.8935905413814561, |
|
"grad_norm": 2.8512847423553467, |
|
"learning_rate": 3.934501845018451e-06, |
|
"loss": 1.5791, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.8948350964530181, |
|
"grad_norm": 2.5280821323394775, |
|
"learning_rate": 3.9321955719557195e-06, |
|
"loss": 1.4823, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.8960796515245799, |
|
"grad_norm": 2.6618733406066895, |
|
"learning_rate": 3.929889298892989e-06, |
|
"loss": 1.4708, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.8960796515245799, |
|
"eval_loss": 1.6904946565628052, |
|
"eval_runtime": 49.3149, |
|
"eval_samples_per_second": 20.278, |
|
"eval_steps_per_second": 0.852, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.8973242065961419, |
|
"grad_norm": 2.8406248092651367, |
|
"learning_rate": 3.927583025830259e-06, |
|
"loss": 1.5425, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.8985687616677038, |
|
"grad_norm": 2.807971715927124, |
|
"learning_rate": 3.925276752767528e-06, |
|
"loss": 1.5617, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.8998133167392657, |
|
"grad_norm": 2.6703925132751465, |
|
"learning_rate": 3.922970479704797e-06, |
|
"loss": 1.5544, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.9010578718108276, |
|
"grad_norm": 2.785083770751953, |
|
"learning_rate": 3.920664206642067e-06, |
|
"loss": 1.5696, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.9023024268823896, |
|
"grad_norm": 2.7778568267822266, |
|
"learning_rate": 3.918357933579336e-06, |
|
"loss": 1.5262, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.9035469819539514, |
|
"grad_norm": 2.6343681812286377, |
|
"learning_rate": 3.9160516605166055e-06, |
|
"loss": 1.5509, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.9047915370255134, |
|
"grad_norm": 2.6923940181732178, |
|
"learning_rate": 3.913745387453875e-06, |
|
"loss": 1.5425, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.9060360920970753, |
|
"grad_norm": 2.732797861099243, |
|
"learning_rate": 3.911439114391144e-06, |
|
"loss": 1.4524, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.9072806471686372, |
|
"grad_norm": 2.6365227699279785, |
|
"learning_rate": 3.909132841328414e-06, |
|
"loss": 1.5617, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.9085252022401992, |
|
"grad_norm": 2.646639823913574, |
|
"learning_rate": 3.906826568265683e-06, |
|
"loss": 1.5122, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9085252022401992, |
|
"eval_loss": 1.6920651197433472, |
|
"eval_runtime": 48.5914, |
|
"eval_samples_per_second": 20.58, |
|
"eval_steps_per_second": 0.864, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.909769757311761, |
|
"grad_norm": 2.5834903717041016, |
|
"learning_rate": 3.9045202952029526e-06, |
|
"loss": 1.5095, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.911014312383323, |
|
"grad_norm": 2.4998092651367188, |
|
"learning_rate": 3.902214022140222e-06, |
|
"loss": 1.4376, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.9122588674548848, |
|
"grad_norm": 2.5143818855285645, |
|
"learning_rate": 3.899907749077491e-06, |
|
"loss": 1.5222, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.9135034225264468, |
|
"grad_norm": 2.7795114517211914, |
|
"learning_rate": 3.897601476014761e-06, |
|
"loss": 1.5498, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.9147479775980087, |
|
"grad_norm": 2.819859027862549, |
|
"learning_rate": 3.895295202952029e-06, |
|
"loss": 1.5288, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.9159925326695706, |
|
"grad_norm": 2.663306951522827, |
|
"learning_rate": 3.892988929889299e-06, |
|
"loss": 1.4517, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.9172370877411326, |
|
"grad_norm": 2.775521993637085, |
|
"learning_rate": 3.890682656826569e-06, |
|
"loss": 1.529, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.9184816428126945, |
|
"grad_norm": 2.761237859725952, |
|
"learning_rate": 3.888376383763838e-06, |
|
"loss": 1.4781, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.9197261978842564, |
|
"grad_norm": 2.5410993099212646, |
|
"learning_rate": 3.886070110701107e-06, |
|
"loss": 1.4205, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.9209707529558183, |
|
"grad_norm": 2.6289987564086914, |
|
"learning_rate": 3.8837638376383765e-06, |
|
"loss": 1.5253, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9209707529558183, |
|
"eval_loss": 1.700616717338562, |
|
"eval_runtime": 46.1038, |
|
"eval_samples_per_second": 21.69, |
|
"eval_steps_per_second": 0.911, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9222153080273802, |
|
"grad_norm": 2.694406270980835, |
|
"learning_rate": 3.881457564575646e-06, |
|
"loss": 1.5335, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.9234598630989421, |
|
"grad_norm": 2.597160577774048, |
|
"learning_rate": 3.879151291512915e-06, |
|
"loss": 1.5026, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.9247044181705041, |
|
"grad_norm": 2.625049114227295, |
|
"learning_rate": 3.876845018450185e-06, |
|
"loss": 1.4885, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.9259489732420659, |
|
"grad_norm": 2.5983152389526367, |
|
"learning_rate": 3.874538745387454e-06, |
|
"loss": 1.5015, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.9271935283136279, |
|
"grad_norm": 2.6912894248962402, |
|
"learning_rate": 3.872232472324724e-06, |
|
"loss": 1.5179, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9284380833851898, |
|
"grad_norm": 2.699328899383545, |
|
"learning_rate": 3.869926199261993e-06, |
|
"loss": 1.5571, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.9296826384567517, |
|
"grad_norm": 2.7117769718170166, |
|
"learning_rate": 3.8676199261992625e-06, |
|
"loss": 1.5225, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.9309271935283137, |
|
"grad_norm": 2.785900354385376, |
|
"learning_rate": 3.865313653136532e-06, |
|
"loss": 1.5571, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.9321717485998755, |
|
"grad_norm": 2.6901021003723145, |
|
"learning_rate": 3.863007380073801e-06, |
|
"loss": 1.5108, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.9334163036714375, |
|
"grad_norm": 2.55047607421875, |
|
"learning_rate": 3.860701107011071e-06, |
|
"loss": 1.4934, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9334163036714375, |
|
"eval_loss": 1.693824291229248, |
|
"eval_runtime": 45.7403, |
|
"eval_samples_per_second": 21.863, |
|
"eval_steps_per_second": 0.918, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9346608587429994, |
|
"grad_norm": 2.7620902061462402, |
|
"learning_rate": 3.858394833948339e-06, |
|
"loss": 1.5129, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.9359054138145613, |
|
"grad_norm": 2.798583984375, |
|
"learning_rate": 3.856088560885609e-06, |
|
"loss": 1.4986, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.9371499688861232, |
|
"grad_norm": 2.5885400772094727, |
|
"learning_rate": 3.853782287822879e-06, |
|
"loss": 1.4779, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.9383945239576851, |
|
"grad_norm": 2.8280391693115234, |
|
"learning_rate": 3.851476014760148e-06, |
|
"loss": 1.6156, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.939639079029247, |
|
"grad_norm": 2.7616288661956787, |
|
"learning_rate": 3.849169741697417e-06, |
|
"loss": 1.5051, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.940883634100809, |
|
"grad_norm": 2.598370313644409, |
|
"learning_rate": 3.8468634686346865e-06, |
|
"loss": 1.5569, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.9421281891723708, |
|
"grad_norm": 2.649433135986328, |
|
"learning_rate": 3.844557195571956e-06, |
|
"loss": 1.515, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.9433727442439328, |
|
"grad_norm": 2.568361759185791, |
|
"learning_rate": 3.842250922509225e-06, |
|
"loss": 1.4926, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.9446172993154948, |
|
"grad_norm": 2.6539297103881836, |
|
"learning_rate": 3.839944649446495e-06, |
|
"loss": 1.4813, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.9458618543870566, |
|
"grad_norm": 2.6790902614593506, |
|
"learning_rate": 3.837638376383764e-06, |
|
"loss": 1.5056, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9458618543870566, |
|
"eval_loss": 1.6913493871688843, |
|
"eval_runtime": 44.5061, |
|
"eval_samples_per_second": 22.469, |
|
"eval_steps_per_second": 0.944, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9471064094586186, |
|
"grad_norm": 2.6265010833740234, |
|
"learning_rate": 3.8353321033210336e-06, |
|
"loss": 1.5421, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.9483509645301804, |
|
"grad_norm": 2.5852062702178955, |
|
"learning_rate": 3.833025830258303e-06, |
|
"loss": 1.5145, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.9495955196017424, |
|
"grad_norm": 2.6579225063323975, |
|
"learning_rate": 3.830719557195572e-06, |
|
"loss": 1.4988, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.9508400746733043, |
|
"grad_norm": 2.7944133281707764, |
|
"learning_rate": 3.828413284132842e-06, |
|
"loss": 1.5199, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.9520846297448662, |
|
"grad_norm": 2.5813052654266357, |
|
"learning_rate": 3.826107011070111e-06, |
|
"loss": 1.5072, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.9533291848164281, |
|
"grad_norm": 2.551501512527466, |
|
"learning_rate": 3.823800738007381e-06, |
|
"loss": 1.5348, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.95457373988799, |
|
"grad_norm": 2.755141496658325, |
|
"learning_rate": 3.821494464944649e-06, |
|
"loss": 1.4787, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.955818294959552, |
|
"grad_norm": 2.521984338760376, |
|
"learning_rate": 3.819188191881919e-06, |
|
"loss": 1.4778, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.9570628500311139, |
|
"grad_norm": 2.6301770210266113, |
|
"learning_rate": 3.816881918819189e-06, |
|
"loss": 1.4703, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.9583074051026758, |
|
"grad_norm": 2.972749710083008, |
|
"learning_rate": 3.814575645756458e-06, |
|
"loss": 1.5516, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9583074051026758, |
|
"eval_loss": 1.689251184463501, |
|
"eval_runtime": 44.3352, |
|
"eval_samples_per_second": 22.555, |
|
"eval_steps_per_second": 0.947, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9595519601742377, |
|
"grad_norm": 2.7247636318206787, |
|
"learning_rate": 3.812269372693727e-06, |
|
"loss": 1.5659, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.9607965152457997, |
|
"grad_norm": 2.6853489875793457, |
|
"learning_rate": 3.809963099630997e-06, |
|
"loss": 1.5149, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.9620410703173615, |
|
"grad_norm": 2.6767218112945557, |
|
"learning_rate": 3.8076568265682662e-06, |
|
"loss": 1.5573, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.9632856253889235, |
|
"grad_norm": 2.5949296951293945, |
|
"learning_rate": 3.8053505535055352e-06, |
|
"loss": 1.517, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.9645301804604853, |
|
"grad_norm": 2.841171979904175, |
|
"learning_rate": 3.8030442804428046e-06, |
|
"loss": 1.515, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9657747355320473, |
|
"grad_norm": 2.7451512813568115, |
|
"learning_rate": 3.800738007380074e-06, |
|
"loss": 1.516, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.9670192906036092, |
|
"grad_norm": 2.6852564811706543, |
|
"learning_rate": 3.798431734317343e-06, |
|
"loss": 1.5187, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.9682638456751711, |
|
"grad_norm": 2.72956919670105, |
|
"learning_rate": 3.796125461254613e-06, |
|
"loss": 1.5344, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.9695084007467331, |
|
"grad_norm": 2.672933578491211, |
|
"learning_rate": 3.7938191881918823e-06, |
|
"loss": 1.5143, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.9707529558182949, |
|
"grad_norm": 2.6565003395080566, |
|
"learning_rate": 3.7915129151291518e-06, |
|
"loss": 1.4759, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9707529558182949, |
|
"eval_loss": 1.6836074590682983, |
|
"eval_runtime": 44.0331, |
|
"eval_samples_per_second": 22.71, |
|
"eval_steps_per_second": 0.954, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9719975108898569, |
|
"grad_norm": 2.665921449661255, |
|
"learning_rate": 3.7892066420664208e-06, |
|
"loss": 1.4985, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.9732420659614188, |
|
"grad_norm": 2.671757221221924, |
|
"learning_rate": 3.7869003690036906e-06, |
|
"loss": 1.5014, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.9744866210329807, |
|
"grad_norm": 2.7320780754089355, |
|
"learning_rate": 3.78459409594096e-06, |
|
"loss": 1.5052, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.9757311761045426, |
|
"grad_norm": 3.0772581100463867, |
|
"learning_rate": 3.782287822878229e-06, |
|
"loss": 1.5666, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.9769757311761046, |
|
"grad_norm": 2.651129722595215, |
|
"learning_rate": 3.7799815498154984e-06, |
|
"loss": 1.4954, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.9782202862476664, |
|
"grad_norm": 2.6942505836486816, |
|
"learning_rate": 3.777675276752768e-06, |
|
"loss": 1.5186, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.9794648413192284, |
|
"grad_norm": 2.767474412918091, |
|
"learning_rate": 3.775369003690037e-06, |
|
"loss": 1.4699, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.9807093963907902, |
|
"grad_norm": 2.728269100189209, |
|
"learning_rate": 3.7730627306273067e-06, |
|
"loss": 1.5176, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.9819539514623522, |
|
"grad_norm": 2.697984218597412, |
|
"learning_rate": 3.770756457564576e-06, |
|
"loss": 1.5327, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.9831985065339142, |
|
"grad_norm": 2.8172037601470947, |
|
"learning_rate": 3.768450184501845e-06, |
|
"loss": 1.4996, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.9831985065339142, |
|
"eval_loss": 1.7013198137283325, |
|
"eval_runtime": 44.5233, |
|
"eval_samples_per_second": 22.46, |
|
"eval_steps_per_second": 0.943, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.984443061605476, |
|
"grad_norm": 2.8202197551727295, |
|
"learning_rate": 3.7661439114391146e-06, |
|
"loss": 1.5193, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.985687616677038, |
|
"grad_norm": 2.764653444290161, |
|
"learning_rate": 3.763837638376384e-06, |
|
"loss": 1.5544, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.9869321717485999, |
|
"grad_norm": 2.6528818607330322, |
|
"learning_rate": 3.761531365313654e-06, |
|
"loss": 1.5328, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.9881767268201618, |
|
"grad_norm": 2.643666982650757, |
|
"learning_rate": 3.759225092250923e-06, |
|
"loss": 1.5279, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.9894212818917237, |
|
"grad_norm": 2.631007432937622, |
|
"learning_rate": 3.7569188191881922e-06, |
|
"loss": 1.5135, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.9906658369632856, |
|
"grad_norm": 2.6622400283813477, |
|
"learning_rate": 3.7546125461254617e-06, |
|
"loss": 1.5099, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.9919103920348475, |
|
"grad_norm": 2.5758702754974365, |
|
"learning_rate": 3.7523062730627307e-06, |
|
"loss": 1.4691, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.9931549471064095, |
|
"grad_norm": 2.822147846221924, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.4846, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.9943995021779714, |
|
"grad_norm": 3.166783571243286, |
|
"learning_rate": 3.74769372693727e-06, |
|
"loss": 1.5586, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.9956440572495333, |
|
"grad_norm": 2.931654691696167, |
|
"learning_rate": 3.745387453874539e-06, |
|
"loss": 1.4773, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9956440572495333, |
|
"eval_loss": 1.6904431581497192, |
|
"eval_runtime": 43.927, |
|
"eval_samples_per_second": 22.765, |
|
"eval_steps_per_second": 0.956, |
|
"step": 800 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2409, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.493847527489536e+17, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|