|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.49782202862476665, |
|
"eval_steps": 10, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012445550715619166, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8073, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002489110143123833, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8285, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00373366521468575, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7975, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004978220286247666, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.798, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006222775357809583, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8443, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0074673304293715, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8074, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.008711885500933417, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7588, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.009956440572495333, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8136, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01120099564405725, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7397, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012445550715619166, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7745, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012445550715619166, |
|
"eval_loss": 2.8210322856903076, |
|
"eval_runtime": 43.0476, |
|
"eval_samples_per_second": 23.23, |
|
"eval_steps_per_second": 0.976, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013690105787181083, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.8275, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.014934660858743, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7762, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.016179215930304917, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7799, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.017423771001866834, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7906, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.018668326073428748, |
|
"grad_norm": 5.01304292678833, |
|
"learning_rate": 2.0746887966804982e-08, |
|
"loss": 2.7712, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019912881144990666, |
|
"grad_norm": 4.976474761962891, |
|
"learning_rate": 4.1493775933609963e-08, |
|
"loss": 2.829, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.021157436216552583, |
|
"grad_norm": 5.539012908935547, |
|
"learning_rate": 6.224066390041494e-08, |
|
"loss": 2.813, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0224019912881145, |
|
"grad_norm": 5.4900407791137695, |
|
"learning_rate": 8.298755186721993e-08, |
|
"loss": 2.8126, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.023646546359676415, |
|
"grad_norm": 5.457218647003174, |
|
"learning_rate": 1.037344398340249e-07, |
|
"loss": 2.7482, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.024891101431238332, |
|
"grad_norm": 5.231036186218262, |
|
"learning_rate": 1.2448132780082988e-07, |
|
"loss": 2.8323, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.024891101431238332, |
|
"eval_loss": 2.819715738296509, |
|
"eval_runtime": 43.9884, |
|
"eval_samples_per_second": 22.733, |
|
"eval_steps_per_second": 0.955, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02613565650280025, |
|
"grad_norm": 5.337130546569824, |
|
"learning_rate": 1.4522821576763488e-07, |
|
"loss": 2.776, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.027380211574362167, |
|
"grad_norm": 4.973108291625977, |
|
"learning_rate": 1.6597510373443985e-07, |
|
"loss": 2.8239, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02862476664592408, |
|
"grad_norm": 4.8733906745910645, |
|
"learning_rate": 1.8672199170124483e-07, |
|
"loss": 2.8069, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.029869321717486, |
|
"grad_norm": 5.061434745788574, |
|
"learning_rate": 2.074688796680498e-07, |
|
"loss": 2.8063, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.031113876789047916, |
|
"grad_norm": 4.782018661499023, |
|
"learning_rate": 2.2821576763485478e-07, |
|
"loss": 2.7488, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03235843186060983, |
|
"grad_norm": 4.394975662231445, |
|
"learning_rate": 2.4896265560165975e-07, |
|
"loss": 2.7683, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03360298693217175, |
|
"grad_norm": 4.475763320922852, |
|
"learning_rate": 2.6970954356846476e-07, |
|
"loss": 2.8143, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03484754200373367, |
|
"grad_norm": 4.362309455871582, |
|
"learning_rate": 2.9045643153526976e-07, |
|
"loss": 2.7875, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03609209707529558, |
|
"grad_norm": 3.9444823265075684, |
|
"learning_rate": 3.112033195020747e-07, |
|
"loss": 2.766, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.037336652146857496, |
|
"grad_norm": 3.4614129066467285, |
|
"learning_rate": 3.319502074688797e-07, |
|
"loss": 2.7644, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.037336652146857496, |
|
"eval_loss": 2.772022008895874, |
|
"eval_runtime": 45.8513, |
|
"eval_samples_per_second": 21.81, |
|
"eval_steps_per_second": 0.916, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.038581207218419414, |
|
"grad_norm": 3.449410915374756, |
|
"learning_rate": 3.5269709543568466e-07, |
|
"loss": 2.7653, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03982576228998133, |
|
"grad_norm": 3.3115482330322266, |
|
"learning_rate": 3.7344398340248966e-07, |
|
"loss": 2.7032, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04107031736154325, |
|
"grad_norm": 3.2057440280914307, |
|
"learning_rate": 3.941908713692946e-07, |
|
"loss": 2.7766, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.042314872433105166, |
|
"grad_norm": 3.4503021240234375, |
|
"learning_rate": 4.149377593360996e-07, |
|
"loss": 2.7214, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.043559427504667084, |
|
"grad_norm": 3.1477363109588623, |
|
"learning_rate": 4.3568464730290456e-07, |
|
"loss": 2.7333, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.044803982576229, |
|
"grad_norm": 2.8025119304656982, |
|
"learning_rate": 4.5643153526970956e-07, |
|
"loss": 2.7535, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04604853764779091, |
|
"grad_norm": 2.967703342437744, |
|
"learning_rate": 4.771784232365145e-07, |
|
"loss": 2.7615, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04729309271935283, |
|
"grad_norm": 2.9308114051818848, |
|
"learning_rate": 4.979253112033195e-07, |
|
"loss": 2.7115, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04853764779091475, |
|
"grad_norm": 2.9203720092773438, |
|
"learning_rate": 5.186721991701245e-07, |
|
"loss": 2.701, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.049782202862476664, |
|
"grad_norm": 2.8226709365844727, |
|
"learning_rate": 5.394190871369295e-07, |
|
"loss": 2.6267, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.049782202862476664, |
|
"eval_loss": 2.6964941024780273, |
|
"eval_runtime": 46.5181, |
|
"eval_samples_per_second": 21.497, |
|
"eval_steps_per_second": 0.903, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05102675793403858, |
|
"grad_norm": 2.7212748527526855, |
|
"learning_rate": 5.601659751037345e-07, |
|
"loss": 2.6505, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0522713130056005, |
|
"grad_norm": 2.658881425857544, |
|
"learning_rate": 5.809128630705395e-07, |
|
"loss": 2.6655, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.053515868077162417, |
|
"grad_norm": 2.4534730911254883, |
|
"learning_rate": 6.016597510373444e-07, |
|
"loss": 2.7098, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.054760423148724334, |
|
"grad_norm": 2.4222075939178467, |
|
"learning_rate": 6.224066390041494e-07, |
|
"loss": 2.6793, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.056004978220286245, |
|
"grad_norm": 2.5824973583221436, |
|
"learning_rate": 6.431535269709543e-07, |
|
"loss": 2.6212, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05724953329184816, |
|
"grad_norm": 2.5982468128204346, |
|
"learning_rate": 6.639004149377594e-07, |
|
"loss": 2.5901, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05849408836341008, |
|
"grad_norm": 2.213388442993164, |
|
"learning_rate": 6.846473029045644e-07, |
|
"loss": 2.6156, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.059738643434972, |
|
"grad_norm": 2.097372531890869, |
|
"learning_rate": 7.053941908713693e-07, |
|
"loss": 2.6154, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.060983198506533914, |
|
"grad_norm": 1.9788408279418945, |
|
"learning_rate": 7.261410788381744e-07, |
|
"loss": 2.5516, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06222775357809583, |
|
"grad_norm": 2.052147150039673, |
|
"learning_rate": 7.468879668049793e-07, |
|
"loss": 2.6128, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06222775357809583, |
|
"eval_loss": 2.6048905849456787, |
|
"eval_runtime": 51.2137, |
|
"eval_samples_per_second": 19.526, |
|
"eval_steps_per_second": 0.82, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06347230864965775, |
|
"grad_norm": 1.8841335773468018, |
|
"learning_rate": 7.676348547717843e-07, |
|
"loss": 2.5617, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06471686372121967, |
|
"grad_norm": 2.0841684341430664, |
|
"learning_rate": 7.883817427385892e-07, |
|
"loss": 2.5627, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06596141879278158, |
|
"grad_norm": 1.8119730949401855, |
|
"learning_rate": 8.091286307053943e-07, |
|
"loss": 2.5572, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0672059738643435, |
|
"grad_norm": 1.7168290615081787, |
|
"learning_rate": 8.298755186721992e-07, |
|
"loss": 2.5465, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06845052893590542, |
|
"grad_norm": 1.7731754779815674, |
|
"learning_rate": 8.506224066390042e-07, |
|
"loss": 2.543, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06969508400746734, |
|
"grad_norm": 1.9654561281204224, |
|
"learning_rate": 8.713692946058091e-07, |
|
"loss": 2.5547, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07093963907902924, |
|
"grad_norm": 1.8032375574111938, |
|
"learning_rate": 8.921161825726142e-07, |
|
"loss": 2.5319, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07218419415059116, |
|
"grad_norm": 1.8775644302368164, |
|
"learning_rate": 9.128630705394191e-07, |
|
"loss": 2.5533, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07342874922215308, |
|
"grad_norm": 1.6019998788833618, |
|
"learning_rate": 9.336099585062241e-07, |
|
"loss": 2.5402, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07467330429371499, |
|
"grad_norm": 1.7785435914993286, |
|
"learning_rate": 9.54356846473029e-07, |
|
"loss": 2.4704, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07467330429371499, |
|
"eval_loss": 2.541548252105713, |
|
"eval_runtime": 51.1601, |
|
"eval_samples_per_second": 19.546, |
|
"eval_steps_per_second": 0.821, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07591785936527691, |
|
"grad_norm": 1.5377200841903687, |
|
"learning_rate": 9.751037344398341e-07, |
|
"loss": 2.5028, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07716241443683883, |
|
"grad_norm": 1.7212916612625122, |
|
"learning_rate": 9.95850622406639e-07, |
|
"loss": 2.4621, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07840696950840075, |
|
"grad_norm": 1.632606029510498, |
|
"learning_rate": 1.0165975103734441e-06, |
|
"loss": 2.4868, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07965152457996266, |
|
"grad_norm": 1.8643872737884521, |
|
"learning_rate": 1.037344398340249e-06, |
|
"loss": 2.4582, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08089607965152458, |
|
"grad_norm": 1.8329377174377441, |
|
"learning_rate": 1.058091286307054e-06, |
|
"loss": 2.464, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0821406347230865, |
|
"grad_norm": 1.9488356113433838, |
|
"learning_rate": 1.078838174273859e-06, |
|
"loss": 2.4275, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08338518979464841, |
|
"grad_norm": 1.7663215398788452, |
|
"learning_rate": 1.099585062240664e-06, |
|
"loss": 2.516, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08462974486621033, |
|
"grad_norm": 1.7675347328186035, |
|
"learning_rate": 1.120331950207469e-06, |
|
"loss": 2.4535, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08587429993777225, |
|
"grad_norm": 1.7481404542922974, |
|
"learning_rate": 1.141078838174274e-06, |
|
"loss": 2.4089, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08711885500933417, |
|
"grad_norm": 1.7963330745697021, |
|
"learning_rate": 1.161825726141079e-06, |
|
"loss": 2.4408, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08711885500933417, |
|
"eval_loss": 2.466163396835327, |
|
"eval_runtime": 50.4204, |
|
"eval_samples_per_second": 19.833, |
|
"eval_steps_per_second": 0.833, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08836341008089608, |
|
"grad_norm": 2.053469657897949, |
|
"learning_rate": 1.182572614107884e-06, |
|
"loss": 2.408, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.089607965152458, |
|
"grad_norm": 1.900294303894043, |
|
"learning_rate": 1.2033195020746888e-06, |
|
"loss": 2.4066, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09085252022401992, |
|
"grad_norm": 2.38637113571167, |
|
"learning_rate": 1.224066390041494e-06, |
|
"loss": 2.3548, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09209707529558182, |
|
"grad_norm": 1.8274579048156738, |
|
"learning_rate": 1.2448132780082988e-06, |
|
"loss": 2.3844, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09334163036714374, |
|
"grad_norm": 1.9040184020996094, |
|
"learning_rate": 1.2655601659751037e-06, |
|
"loss": 2.4406, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09458618543870566, |
|
"grad_norm": 2.0351619720458984, |
|
"learning_rate": 1.2863070539419086e-06, |
|
"loss": 2.3624, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09583074051026758, |
|
"grad_norm": 1.989876627922058, |
|
"learning_rate": 1.307053941908714e-06, |
|
"loss": 2.4097, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0970752955818295, |
|
"grad_norm": 1.8507741689682007, |
|
"learning_rate": 1.3278008298755188e-06, |
|
"loss": 2.4482, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09831985065339141, |
|
"grad_norm": 2.6118454933166504, |
|
"learning_rate": 1.3485477178423237e-06, |
|
"loss": 2.3178, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09956440572495333, |
|
"grad_norm": 1.9467326402664185, |
|
"learning_rate": 1.3692946058091288e-06, |
|
"loss": 2.3175, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09956440572495333, |
|
"eval_loss": 2.3719911575317383, |
|
"eval_runtime": 52.3293, |
|
"eval_samples_per_second": 19.11, |
|
"eval_steps_per_second": 0.803, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10080896079651525, |
|
"grad_norm": 2.4913370609283447, |
|
"learning_rate": 1.3900414937759337e-06, |
|
"loss": 2.3063, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10205351586807716, |
|
"grad_norm": 2.4838919639587402, |
|
"learning_rate": 1.4107883817427386e-06, |
|
"loss": 2.3534, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10329807093963908, |
|
"grad_norm": 2.798588991165161, |
|
"learning_rate": 1.4315352697095435e-06, |
|
"loss": 2.2732, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.104542626011201, |
|
"grad_norm": 2.688399076461792, |
|
"learning_rate": 1.4522821576763488e-06, |
|
"loss": 2.3112, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.10578718108276292, |
|
"grad_norm": 2.7367143630981445, |
|
"learning_rate": 1.4730290456431537e-06, |
|
"loss": 2.3055, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10703173615432483, |
|
"grad_norm": 2.6406664848327637, |
|
"learning_rate": 1.4937759336099586e-06, |
|
"loss": 2.312, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.10827629122588675, |
|
"grad_norm": 2.703355312347412, |
|
"learning_rate": 1.5145228215767635e-06, |
|
"loss": 2.3305, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.10952084629744867, |
|
"grad_norm": 2.420084238052368, |
|
"learning_rate": 1.5352697095435686e-06, |
|
"loss": 2.2608, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11076540136901059, |
|
"grad_norm": 2.999835729598999, |
|
"learning_rate": 1.5560165975103735e-06, |
|
"loss": 2.2389, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11200995644057249, |
|
"grad_norm": 2.35611629486084, |
|
"learning_rate": 1.5767634854771784e-06, |
|
"loss": 2.2671, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11200995644057249, |
|
"eval_loss": 2.2900290489196777, |
|
"eval_runtime": 52.1871, |
|
"eval_samples_per_second": 19.162, |
|
"eval_steps_per_second": 0.805, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1132545115121344, |
|
"grad_norm": 2.2686431407928467, |
|
"learning_rate": 1.5975103734439833e-06, |
|
"loss": 2.2405, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11449906658369632, |
|
"grad_norm": 2.883517265319824, |
|
"learning_rate": 1.6182572614107886e-06, |
|
"loss": 2.2364, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11574362165525824, |
|
"grad_norm": 2.4562909603118896, |
|
"learning_rate": 1.6390041493775935e-06, |
|
"loss": 2.2308, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11698817672682016, |
|
"grad_norm": 2.7968456745147705, |
|
"learning_rate": 1.6597510373443984e-06, |
|
"loss": 2.1959, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11823273179838208, |
|
"grad_norm": 2.8692259788513184, |
|
"learning_rate": 1.6804979253112035e-06, |
|
"loss": 2.215, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.119477286869944, |
|
"grad_norm": 2.8436100482940674, |
|
"learning_rate": 1.7012448132780084e-06, |
|
"loss": 2.1816, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12072184194150591, |
|
"grad_norm": 2.4477386474609375, |
|
"learning_rate": 1.7219917012448133e-06, |
|
"loss": 2.1684, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12196639701306783, |
|
"grad_norm": 2.610046863555908, |
|
"learning_rate": 1.7427385892116182e-06, |
|
"loss": 2.1656, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12321095208462975, |
|
"grad_norm": 2.698709487915039, |
|
"learning_rate": 1.7634854771784235e-06, |
|
"loss": 2.2115, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12445550715619166, |
|
"grad_norm": 2.476600408554077, |
|
"learning_rate": 1.7842323651452284e-06, |
|
"loss": 2.1619, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12445550715619166, |
|
"eval_loss": 2.221313953399658, |
|
"eval_runtime": 51.6214, |
|
"eval_samples_per_second": 19.372, |
|
"eval_steps_per_second": 0.814, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12570006222775357, |
|
"grad_norm": 3.238584518432617, |
|
"learning_rate": 1.8049792531120333e-06, |
|
"loss": 2.1543, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1269446172993155, |
|
"grad_norm": 2.3609964847564697, |
|
"learning_rate": 1.8257261410788382e-06, |
|
"loss": 2.1602, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1281891723708774, |
|
"grad_norm": 3.3314859867095947, |
|
"learning_rate": 1.8464730290456433e-06, |
|
"loss": 2.1843, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.12943372744243933, |
|
"grad_norm": 2.42441725730896, |
|
"learning_rate": 1.8672199170124482e-06, |
|
"loss": 2.135, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13067828251400124, |
|
"grad_norm": 3.2766177654266357, |
|
"learning_rate": 1.8879668049792531e-06, |
|
"loss": 2.158, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13192283758556317, |
|
"grad_norm": 2.8646531105041504, |
|
"learning_rate": 1.908713692946058e-06, |
|
"loss": 2.1214, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13316739265712507, |
|
"grad_norm": 2.8570213317871094, |
|
"learning_rate": 1.929460580912863e-06, |
|
"loss": 2.1345, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.134411947728687, |
|
"grad_norm": 2.5014989376068115, |
|
"learning_rate": 1.9502074688796682e-06, |
|
"loss": 2.1147, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1356565028002489, |
|
"grad_norm": 2.818286895751953, |
|
"learning_rate": 1.970954356846473e-06, |
|
"loss": 2.0914, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13690105787181084, |
|
"grad_norm": 3.0579800605773926, |
|
"learning_rate": 1.991701244813278e-06, |
|
"loss": 2.0879, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13690105787181084, |
|
"eval_loss": 2.167081594467163, |
|
"eval_runtime": 42.3907, |
|
"eval_samples_per_second": 23.59, |
|
"eval_steps_per_second": 0.991, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13814561294337274, |
|
"grad_norm": 2.6039772033691406, |
|
"learning_rate": 2.012448132780083e-06, |
|
"loss": 2.1006, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13939016801493467, |
|
"grad_norm": 3.1309447288513184, |
|
"learning_rate": 2.0331950207468883e-06, |
|
"loss": 2.0758, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14063472308649658, |
|
"grad_norm": 2.6772613525390625, |
|
"learning_rate": 2.053941908713693e-06, |
|
"loss": 2.1542, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14187927815805848, |
|
"grad_norm": 3.2689080238342285, |
|
"learning_rate": 2.074688796680498e-06, |
|
"loss": 2.0873, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1431238332296204, |
|
"grad_norm": 2.7289857864379883, |
|
"learning_rate": 2.095435684647303e-06, |
|
"loss": 2.0697, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14436838830118232, |
|
"grad_norm": 2.8635787963867188, |
|
"learning_rate": 2.116182572614108e-06, |
|
"loss": 2.0908, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14561294337274425, |
|
"grad_norm": 2.805933713912964, |
|
"learning_rate": 2.136929460580913e-06, |
|
"loss": 2.0643, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14685749844430615, |
|
"grad_norm": 2.562567710876465, |
|
"learning_rate": 2.157676348547718e-06, |
|
"loss": 2.0559, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.14810205351586808, |
|
"grad_norm": 3.000135898590088, |
|
"learning_rate": 2.178423236514523e-06, |
|
"loss": 2.0442, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.14934660858742999, |
|
"grad_norm": 2.6913654804229736, |
|
"learning_rate": 2.199170124481328e-06, |
|
"loss": 2.0551, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14934660858742999, |
|
"eval_loss": 2.130500316619873, |
|
"eval_runtime": 44.3089, |
|
"eval_samples_per_second": 22.569, |
|
"eval_steps_per_second": 0.948, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15059116365899192, |
|
"grad_norm": 3.38159441947937, |
|
"learning_rate": 2.219917012448133e-06, |
|
"loss": 2.0359, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15183571873055382, |
|
"grad_norm": 2.4632813930511475, |
|
"learning_rate": 2.240663900414938e-06, |
|
"loss": 2.0805, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15308027380211575, |
|
"grad_norm": 3.2085115909576416, |
|
"learning_rate": 2.2614107883817427e-06, |
|
"loss": 2.0617, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15432482887367766, |
|
"grad_norm": 2.7810094356536865, |
|
"learning_rate": 2.282157676348548e-06, |
|
"loss": 2.0467, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1555693839452396, |
|
"grad_norm": 2.6023035049438477, |
|
"learning_rate": 2.302904564315353e-06, |
|
"loss": 2.0578, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1568139390168015, |
|
"grad_norm": 3.073814630508423, |
|
"learning_rate": 2.323651452282158e-06, |
|
"loss": 2.0742, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.15805849408836342, |
|
"grad_norm": 2.622281312942505, |
|
"learning_rate": 2.3443983402489627e-06, |
|
"loss": 2.1668, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.15930304915992533, |
|
"grad_norm": 3.3766582012176514, |
|
"learning_rate": 2.365145228215768e-06, |
|
"loss": 2.0129, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16054760423148726, |
|
"grad_norm": 3.1513423919677734, |
|
"learning_rate": 2.385892116182573e-06, |
|
"loss": 2.0215, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16179215930304916, |
|
"grad_norm": 3.2852959632873535, |
|
"learning_rate": 2.4066390041493776e-06, |
|
"loss": 2.0557, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16179215930304916, |
|
"eval_loss": 2.096493721008301, |
|
"eval_runtime": 46.9447, |
|
"eval_samples_per_second": 21.302, |
|
"eval_steps_per_second": 0.895, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16303671437461106, |
|
"grad_norm": 2.526399612426758, |
|
"learning_rate": 2.4273858921161828e-06, |
|
"loss": 2.065, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.164281269446173, |
|
"grad_norm": 3.3448667526245117, |
|
"learning_rate": 2.448132780082988e-06, |
|
"loss": 2.0472, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.1655258245177349, |
|
"grad_norm": 2.6260809898376465, |
|
"learning_rate": 2.468879668049793e-06, |
|
"loss": 1.9804, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.16677037958929683, |
|
"grad_norm": 3.6141812801361084, |
|
"learning_rate": 2.4896265560165977e-06, |
|
"loss": 2.0197, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.16801493466085873, |
|
"grad_norm": 2.4911234378814697, |
|
"learning_rate": 2.5103734439834028e-06, |
|
"loss": 1.9642, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.16925948973242066, |
|
"grad_norm": 2.928642749786377, |
|
"learning_rate": 2.5311203319502074e-06, |
|
"loss": 1.9489, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17050404480398257, |
|
"grad_norm": 3.090965509414673, |
|
"learning_rate": 2.5518672199170125e-06, |
|
"loss": 1.994, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1717485998755445, |
|
"grad_norm": 3.2897465229034424, |
|
"learning_rate": 2.5726141078838172e-06, |
|
"loss": 1.9746, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1729931549471064, |
|
"grad_norm": 2.857083320617676, |
|
"learning_rate": 2.5933609958506228e-06, |
|
"loss": 1.9774, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17423771001866833, |
|
"grad_norm": 3.803220510482788, |
|
"learning_rate": 2.614107883817428e-06, |
|
"loss": 1.9731, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17423771001866833, |
|
"eval_loss": 2.064887046813965, |
|
"eval_runtime": 44.0231, |
|
"eval_samples_per_second": 22.715, |
|
"eval_steps_per_second": 0.954, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17548226509023024, |
|
"grad_norm": 2.9923534393310547, |
|
"learning_rate": 2.6348547717842326e-06, |
|
"loss": 1.9854, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.17672682016179217, |
|
"grad_norm": 3.3368566036224365, |
|
"learning_rate": 2.6556016597510377e-06, |
|
"loss": 2.0687, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.17797137523335407, |
|
"grad_norm": 3.3132379055023193, |
|
"learning_rate": 2.6763485477178423e-06, |
|
"loss": 1.9696, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.179215930304916, |
|
"grad_norm": 3.7914819717407227, |
|
"learning_rate": 2.6970954356846475e-06, |
|
"loss": 1.9926, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1804604853764779, |
|
"grad_norm": 3.20161509513855, |
|
"learning_rate": 2.717842323651452e-06, |
|
"loss": 1.9193, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18170504044803984, |
|
"grad_norm": 3.440420150756836, |
|
"learning_rate": 2.7385892116182577e-06, |
|
"loss": 1.991, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.18294959551960174, |
|
"grad_norm": 3.152684450149536, |
|
"learning_rate": 2.7593360995850628e-06, |
|
"loss": 1.9695, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18419415059116365, |
|
"grad_norm": 3.0402464866638184, |
|
"learning_rate": 2.7800829875518675e-06, |
|
"loss": 1.9274, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.18543870566272558, |
|
"grad_norm": 2.778444290161133, |
|
"learning_rate": 2.8008298755186726e-06, |
|
"loss": 1.9198, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.18668326073428748, |
|
"grad_norm": 2.7946548461914062, |
|
"learning_rate": 2.8215767634854773e-06, |
|
"loss": 1.8958, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.18668326073428748, |
|
"eval_loss": 2.043288230895996, |
|
"eval_runtime": 44.2268, |
|
"eval_samples_per_second": 22.611, |
|
"eval_steps_per_second": 0.95, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1879278158058494, |
|
"grad_norm": 3.269259452819824, |
|
"learning_rate": 2.8423236514522824e-06, |
|
"loss": 1.9832, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.18917237087741132, |
|
"grad_norm": 3.2673771381378174, |
|
"learning_rate": 2.863070539419087e-06, |
|
"loss": 1.9345, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19041692594897325, |
|
"grad_norm": 3.016599655151367, |
|
"learning_rate": 2.883817427385892e-06, |
|
"loss": 1.8969, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19166148102053515, |
|
"grad_norm": 3.2771544456481934, |
|
"learning_rate": 2.9045643153526977e-06, |
|
"loss": 1.921, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.19290603609209708, |
|
"grad_norm": 3.008080244064331, |
|
"learning_rate": 2.9253112033195024e-06, |
|
"loss": 1.9367, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.194150591163659, |
|
"grad_norm": 3.2312428951263428, |
|
"learning_rate": 2.9460580912863075e-06, |
|
"loss": 1.9021, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19539514623522092, |
|
"grad_norm": 3.294121503829956, |
|
"learning_rate": 2.966804979253112e-06, |
|
"loss": 1.9216, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.19663970130678282, |
|
"grad_norm": 2.771685838699341, |
|
"learning_rate": 2.9875518672199173e-06, |
|
"loss": 1.9435, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.19788425637834475, |
|
"grad_norm": 2.971971273422241, |
|
"learning_rate": 3.008298755186722e-06, |
|
"loss": 1.8851, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.19912881144990666, |
|
"grad_norm": 3.3144047260284424, |
|
"learning_rate": 3.029045643153527e-06, |
|
"loss": 1.853, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.19912881144990666, |
|
"eval_loss": 2.023491859436035, |
|
"eval_runtime": 44.9425, |
|
"eval_samples_per_second": 22.251, |
|
"eval_steps_per_second": 0.935, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2003733665214686, |
|
"grad_norm": 3.3733646869659424, |
|
"learning_rate": 3.0497925311203326e-06, |
|
"loss": 1.8836, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2016179215930305, |
|
"grad_norm": 3.1801207065582275, |
|
"learning_rate": 3.0705394190871373e-06, |
|
"loss": 1.9438, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2028624766645924, |
|
"grad_norm": 3.1199593544006348, |
|
"learning_rate": 3.0912863070539424e-06, |
|
"loss": 1.9219, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.20410703173615433, |
|
"grad_norm": 3.344089984893799, |
|
"learning_rate": 3.112033195020747e-06, |
|
"loss": 1.9174, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.20535158680771623, |
|
"grad_norm": 3.269702911376953, |
|
"learning_rate": 3.132780082987552e-06, |
|
"loss": 1.894, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.20659614187927816, |
|
"grad_norm": 3.021744966506958, |
|
"learning_rate": 3.153526970954357e-06, |
|
"loss": 1.8799, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.20784069695084006, |
|
"grad_norm": 3.0104167461395264, |
|
"learning_rate": 3.174273858921162e-06, |
|
"loss": 1.9116, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.209085252022402, |
|
"grad_norm": 3.3018341064453125, |
|
"learning_rate": 3.1950207468879666e-06, |
|
"loss": 1.8389, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2103298070939639, |
|
"grad_norm": 3.0919857025146484, |
|
"learning_rate": 3.215767634854772e-06, |
|
"loss": 1.9522, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21157436216552583, |
|
"grad_norm": 3.4702494144439697, |
|
"learning_rate": 3.2365145228215773e-06, |
|
"loss": 1.9204, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21157436216552583, |
|
"eval_loss": 1.997478723526001, |
|
"eval_runtime": 43.2143, |
|
"eval_samples_per_second": 23.14, |
|
"eval_steps_per_second": 0.972, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21281891723708773, |
|
"grad_norm": 3.133046865463257, |
|
"learning_rate": 3.257261410788382e-06, |
|
"loss": 1.891, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.21406347230864967, |
|
"grad_norm": 3.1828863620758057, |
|
"learning_rate": 3.278008298755187e-06, |
|
"loss": 1.8816, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21530802738021157, |
|
"grad_norm": 3.374898910522461, |
|
"learning_rate": 3.2987551867219918e-06, |
|
"loss": 1.8886, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2165525824517735, |
|
"grad_norm": 3.6660516262054443, |
|
"learning_rate": 3.319502074688797e-06, |
|
"loss": 1.9442, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2177971375233354, |
|
"grad_norm": 3.080636978149414, |
|
"learning_rate": 3.3402489626556016e-06, |
|
"loss": 1.866, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.21904169259489734, |
|
"grad_norm": 3.3684473037719727, |
|
"learning_rate": 3.360995850622407e-06, |
|
"loss": 1.8699, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22028624766645924, |
|
"grad_norm": 3.818382740020752, |
|
"learning_rate": 3.381742738589212e-06, |
|
"loss": 1.8541, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.22153080273802117, |
|
"grad_norm": 3.0995497703552246, |
|
"learning_rate": 3.402489626556017e-06, |
|
"loss": 1.8453, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.22277535780958307, |
|
"grad_norm": 3.5696587562561035, |
|
"learning_rate": 3.423236514522822e-06, |
|
"loss": 1.9289, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.22401991288114498, |
|
"grad_norm": 3.480571985244751, |
|
"learning_rate": 3.4439834024896267e-06, |
|
"loss": 1.8175, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22401991288114498, |
|
"eval_loss": 1.9842805862426758, |
|
"eval_runtime": 43.7649, |
|
"eval_samples_per_second": 22.849, |
|
"eval_steps_per_second": 0.96, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2252644679527069, |
|
"grad_norm": 3.229316473007202, |
|
"learning_rate": 3.4647302904564318e-06, |
|
"loss": 1.9048, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2265090230242688, |
|
"grad_norm": 3.1970808506011963, |
|
"learning_rate": 3.4854771784232365e-06, |
|
"loss": 1.907, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.22775357809583074, |
|
"grad_norm": 3.5275299549102783, |
|
"learning_rate": 3.5062240663900416e-06, |
|
"loss": 1.8147, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.22899813316739265, |
|
"grad_norm": 3.367077589035034, |
|
"learning_rate": 3.526970954356847e-06, |
|
"loss": 1.8189, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23024268823895458, |
|
"grad_norm": 3.3052914142608643, |
|
"learning_rate": 3.5477178423236518e-06, |
|
"loss": 1.8362, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.23148724331051648, |
|
"grad_norm": 3.4737954139709473, |
|
"learning_rate": 3.568464730290457e-06, |
|
"loss": 1.8747, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.23273179838207841, |
|
"grad_norm": 2.851705312728882, |
|
"learning_rate": 3.5892116182572616e-06, |
|
"loss": 1.7835, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.23397635345364032, |
|
"grad_norm": 3.615224599838257, |
|
"learning_rate": 3.6099585062240667e-06, |
|
"loss": 1.7855, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.23522090852520225, |
|
"grad_norm": 2.7468442916870117, |
|
"learning_rate": 3.6307053941908714e-06, |
|
"loss": 1.843, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.23646546359676415, |
|
"grad_norm": 3.3228273391723633, |
|
"learning_rate": 3.6514522821576765e-06, |
|
"loss": 1.8453, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23646546359676415, |
|
"eval_loss": 1.9668089151382446, |
|
"eval_runtime": 44.9228, |
|
"eval_samples_per_second": 22.26, |
|
"eval_steps_per_second": 0.935, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23771001866832608, |
|
"grad_norm": 2.7882697582244873, |
|
"learning_rate": 3.672199170124482e-06, |
|
"loss": 1.8625, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.238954573739888, |
|
"grad_norm": 3.244044780731201, |
|
"learning_rate": 3.6929460580912867e-06, |
|
"loss": 1.8903, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24019912881144992, |
|
"grad_norm": 2.8844549655914307, |
|
"learning_rate": 3.713692946058092e-06, |
|
"loss": 1.8929, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.24144368388301182, |
|
"grad_norm": 2.9207894802093506, |
|
"learning_rate": 3.7344398340248965e-06, |
|
"loss": 1.8458, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.24268823895457373, |
|
"grad_norm": 2.908562183380127, |
|
"learning_rate": 3.7551867219917016e-06, |
|
"loss": 1.828, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.24393279402613566, |
|
"grad_norm": 3.1195480823516846, |
|
"learning_rate": 3.7759336099585063e-06, |
|
"loss": 1.855, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.24517734909769756, |
|
"grad_norm": 3.3910183906555176, |
|
"learning_rate": 3.7966804979253114e-06, |
|
"loss": 1.7868, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2464219041692595, |
|
"grad_norm": 2.921475887298584, |
|
"learning_rate": 3.817427385892116e-06, |
|
"loss": 1.7505, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2476664592408214, |
|
"grad_norm": 3.5471713542938232, |
|
"learning_rate": 3.838174273858922e-06, |
|
"loss": 1.8445, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.24891101431238333, |
|
"grad_norm": 3.047698736190796, |
|
"learning_rate": 3.858921161825726e-06, |
|
"loss": 1.8037, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.24891101431238333, |
|
"eval_loss": 1.9400665760040283, |
|
"eval_runtime": 49.1044, |
|
"eval_samples_per_second": 20.365, |
|
"eval_steps_per_second": 0.855, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25015556938394523, |
|
"grad_norm": 3.4988913536071777, |
|
"learning_rate": 3.879668049792531e-06, |
|
"loss": 1.8061, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.25140012445550713, |
|
"grad_norm": 3.0373125076293945, |
|
"learning_rate": 3.9004149377593365e-06, |
|
"loss": 1.8081, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2526446795270691, |
|
"grad_norm": 2.9764745235443115, |
|
"learning_rate": 3.921161825726142e-06, |
|
"loss": 1.8244, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.253889234598631, |
|
"grad_norm": 3.4413554668426514, |
|
"learning_rate": 3.941908713692946e-06, |
|
"loss": 1.8492, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2551337896701929, |
|
"grad_norm": 2.9980437755584717, |
|
"learning_rate": 3.962655601659751e-06, |
|
"loss": 1.805, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2563783447417548, |
|
"grad_norm": 3.228750228881836, |
|
"learning_rate": 3.983402489626556e-06, |
|
"loss": 1.7683, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.25762289981331676, |
|
"grad_norm": 3.008496046066284, |
|
"learning_rate": 4.004149377593361e-06, |
|
"loss": 1.8602, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.25886745488487867, |
|
"grad_norm": 3.338935375213623, |
|
"learning_rate": 4.024896265560166e-06, |
|
"loss": 1.7978, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.26011200995644057, |
|
"grad_norm": 3.1450204849243164, |
|
"learning_rate": 4.045643153526971e-06, |
|
"loss": 1.8243, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2613565650280025, |
|
"grad_norm": 3.5714142322540283, |
|
"learning_rate": 4.0663900414937765e-06, |
|
"loss": 1.8386, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2613565650280025, |
|
"eval_loss": 1.9310717582702637, |
|
"eval_runtime": 42.4826, |
|
"eval_samples_per_second": 23.539, |
|
"eval_steps_per_second": 0.989, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.26260112009956443, |
|
"grad_norm": 3.3498189449310303, |
|
"learning_rate": 4.087136929460581e-06, |
|
"loss": 1.8055, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.26384567517112634, |
|
"grad_norm": 3.2423200607299805, |
|
"learning_rate": 4.107883817427386e-06, |
|
"loss": 1.8378, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.26509023024268824, |
|
"grad_norm": 3.0533974170684814, |
|
"learning_rate": 4.128630705394191e-06, |
|
"loss": 1.7699, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.26633478531425014, |
|
"grad_norm": 3.135615587234497, |
|
"learning_rate": 4.149377593360996e-06, |
|
"loss": 1.8019, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.26757934038581205, |
|
"grad_norm": 2.9863040447235107, |
|
"learning_rate": 4.170124481327801e-06, |
|
"loss": 1.7817, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.268823895457374, |
|
"grad_norm": 3.270598888397217, |
|
"learning_rate": 4.190871369294606e-06, |
|
"loss": 1.7542, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.2700684505289359, |
|
"grad_norm": 3.2279393672943115, |
|
"learning_rate": 4.211618257261411e-06, |
|
"loss": 1.7964, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2713130056004978, |
|
"grad_norm": 3.0626120567321777, |
|
"learning_rate": 4.232365145228216e-06, |
|
"loss": 1.7687, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.2725575606720597, |
|
"grad_norm": 3.070702075958252, |
|
"learning_rate": 4.253112033195021e-06, |
|
"loss": 1.8075, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.2738021157436217, |
|
"grad_norm": 3.3623268604278564, |
|
"learning_rate": 4.273858921161826e-06, |
|
"loss": 1.8517, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2738021157436217, |
|
"eval_loss": 1.9212934970855713, |
|
"eval_runtime": 46.8503, |
|
"eval_samples_per_second": 21.345, |
|
"eval_steps_per_second": 0.896, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2750466708151836, |
|
"grad_norm": 3.005740165710449, |
|
"learning_rate": 4.294605809128631e-06, |
|
"loss": 1.816, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.2762912258867455, |
|
"grad_norm": 3.5257251262664795, |
|
"learning_rate": 4.315352697095436e-06, |
|
"loss": 1.8191, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2775357809583074, |
|
"grad_norm": 3.361103057861328, |
|
"learning_rate": 4.336099585062241e-06, |
|
"loss": 1.8378, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.27878033602986935, |
|
"grad_norm": 3.787623405456543, |
|
"learning_rate": 4.356846473029046e-06, |
|
"loss": 1.7736, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.28002489110143125, |
|
"grad_norm": 3.45717453956604, |
|
"learning_rate": 4.3775933609958506e-06, |
|
"loss": 1.8195, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.28126944617299315, |
|
"grad_norm": 3.1698601245880127, |
|
"learning_rate": 4.398340248962656e-06, |
|
"loss": 1.7756, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.28251400124455506, |
|
"grad_norm": 3.345258951187134, |
|
"learning_rate": 4.419087136929461e-06, |
|
"loss": 1.7272, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.28375855631611696, |
|
"grad_norm": 2.905773639678955, |
|
"learning_rate": 4.439834024896266e-06, |
|
"loss": 1.84, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2850031113876789, |
|
"grad_norm": 3.2004425525665283, |
|
"learning_rate": 4.460580912863071e-06, |
|
"loss": 1.8164, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.2862476664592408, |
|
"grad_norm": 2.965275287628174, |
|
"learning_rate": 4.481327800829876e-06, |
|
"loss": 1.7718, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2862476664592408, |
|
"eval_loss": 1.903228521347046, |
|
"eval_runtime": 51.1663, |
|
"eval_samples_per_second": 19.544, |
|
"eval_steps_per_second": 0.821, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2874922215308027, |
|
"grad_norm": 2.8130710124969482, |
|
"learning_rate": 4.502074688796681e-06, |
|
"loss": 1.7696, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.28873677660236463, |
|
"grad_norm": 2.9987664222717285, |
|
"learning_rate": 4.5228215767634855e-06, |
|
"loss": 1.775, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2899813316739266, |
|
"grad_norm": 3.0936989784240723, |
|
"learning_rate": 4.543568464730291e-06, |
|
"loss": 1.7775, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2912258867454885, |
|
"grad_norm": 3.0936989784240723, |
|
"learning_rate": 4.543568464730291e-06, |
|
"loss": 1.7154, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2924704418170504, |
|
"grad_norm": 2.8632843494415283, |
|
"learning_rate": 4.564315352697096e-06, |
|
"loss": 1.7771, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2937149968886123, |
|
"grad_norm": 3.3314013481140137, |
|
"learning_rate": 4.585062240663901e-06, |
|
"loss": 1.7243, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.29495955196017426, |
|
"grad_norm": 2.8522872924804688, |
|
"learning_rate": 4.605809128630706e-06, |
|
"loss": 1.7543, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.29620410703173616, |
|
"grad_norm": 3.319157600402832, |
|
"learning_rate": 4.626556016597511e-06, |
|
"loss": 1.7652, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.29744866210329807, |
|
"grad_norm": 3.123116970062256, |
|
"learning_rate": 4.647302904564316e-06, |
|
"loss": 1.6933, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.29869321717485997, |
|
"grad_norm": 3.1735854148864746, |
|
"learning_rate": 4.66804979253112e-06, |
|
"loss": 1.7437, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29869321717485997, |
|
"eval_loss": 1.891274333000183, |
|
"eval_runtime": 50.0054, |
|
"eval_samples_per_second": 19.998, |
|
"eval_steps_per_second": 0.84, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29993777224642193, |
|
"grad_norm": 3.0996642112731934, |
|
"learning_rate": 4.6887966804979255e-06, |
|
"loss": 1.7594, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.30118232731798383, |
|
"grad_norm": 3.255063533782959, |
|
"learning_rate": 4.709543568464731e-06, |
|
"loss": 1.7645, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.30242688238954574, |
|
"grad_norm": 2.722285747528076, |
|
"learning_rate": 4.730290456431536e-06, |
|
"loss": 1.7549, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.30367143746110764, |
|
"grad_norm": 3.1903417110443115, |
|
"learning_rate": 4.751037344398341e-06, |
|
"loss": 1.7382, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.30491599253266954, |
|
"grad_norm": 2.8352959156036377, |
|
"learning_rate": 4.771784232365146e-06, |
|
"loss": 1.7497, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3061605476042315, |
|
"grad_norm": 3.158536434173584, |
|
"learning_rate": 4.792531120331951e-06, |
|
"loss": 1.7467, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3074051026757934, |
|
"grad_norm": 2.8456921577453613, |
|
"learning_rate": 4.813278008298755e-06, |
|
"loss": 1.7472, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3086496577473553, |
|
"grad_norm": 3.1215128898620605, |
|
"learning_rate": 4.83402489626556e-06, |
|
"loss": 1.7577, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3098942128189172, |
|
"grad_norm": 2.928015947341919, |
|
"learning_rate": 4.8547717842323655e-06, |
|
"loss": 1.6932, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3111387678904792, |
|
"grad_norm": 3.001044511795044, |
|
"learning_rate": 4.875518672199171e-06, |
|
"loss": 1.7276, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3111387678904792, |
|
"eval_loss": 1.8855507373809814, |
|
"eval_runtime": 46.0143, |
|
"eval_samples_per_second": 21.732, |
|
"eval_steps_per_second": 0.913, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3123833229620411, |
|
"grad_norm": 2.988994598388672, |
|
"learning_rate": 4.896265560165976e-06, |
|
"loss": 1.739, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.313627878033603, |
|
"grad_norm": 3.0091233253479004, |
|
"learning_rate": 4.91701244813278e-06, |
|
"loss": 1.7118, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3148724331051649, |
|
"grad_norm": 3.2735323905944824, |
|
"learning_rate": 4.937759336099586e-06, |
|
"loss": 1.7832, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.31611698817672684, |
|
"grad_norm": 3.055468797683716, |
|
"learning_rate": 4.95850622406639e-06, |
|
"loss": 1.7151, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.31736154324828875, |
|
"grad_norm": 3.0889883041381836, |
|
"learning_rate": 4.979253112033195e-06, |
|
"loss": 1.7411, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.31860609831985065, |
|
"grad_norm": 2.8418147563934326, |
|
"learning_rate": 5e-06, |
|
"loss": 1.7575, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.31985065339141255, |
|
"grad_norm": 3.09739351272583, |
|
"learning_rate": 4.99769372693727e-06, |
|
"loss": 1.7988, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3210952084629745, |
|
"grad_norm": 3.245515823364258, |
|
"learning_rate": 4.995387453874539e-06, |
|
"loss": 1.7706, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.3223397635345364, |
|
"grad_norm": 3.250432014465332, |
|
"learning_rate": 4.993081180811809e-06, |
|
"loss": 1.7839, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.3235843186060983, |
|
"grad_norm": 3.1325368881225586, |
|
"learning_rate": 4.990774907749078e-06, |
|
"loss": 1.754, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3235843186060983, |
|
"eval_loss": 1.869175672531128, |
|
"eval_runtime": 49.0199, |
|
"eval_samples_per_second": 20.4, |
|
"eval_steps_per_second": 0.857, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3248288736776602, |
|
"grad_norm": 3.1627941131591797, |
|
"learning_rate": 4.988468634686347e-06, |
|
"loss": 1.7123, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.3260734287492221, |
|
"grad_norm": 2.848202705383301, |
|
"learning_rate": 4.986162361623617e-06, |
|
"loss": 1.7231, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3273179838207841, |
|
"grad_norm": 2.9585494995117188, |
|
"learning_rate": 4.983856088560886e-06, |
|
"loss": 1.7301, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.328562538892346, |
|
"grad_norm": 3.160170316696167, |
|
"learning_rate": 4.981549815498156e-06, |
|
"loss": 1.7283, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.3298070939639079, |
|
"grad_norm": 2.6879520416259766, |
|
"learning_rate": 4.979243542435424e-06, |
|
"loss": 1.7494, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3310516490354698, |
|
"grad_norm": 3.1313576698303223, |
|
"learning_rate": 4.976937269372694e-06, |
|
"loss": 1.7224, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.33229620410703176, |
|
"grad_norm": 3.0477867126464844, |
|
"learning_rate": 4.974630996309964e-06, |
|
"loss": 1.6951, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.33354075917859366, |
|
"grad_norm": 3.133974313735962, |
|
"learning_rate": 4.972324723247233e-06, |
|
"loss": 1.7362, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.33478531425015556, |
|
"grad_norm": 3.4925222396850586, |
|
"learning_rate": 4.970018450184502e-06, |
|
"loss": 1.7053, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.33602986932171747, |
|
"grad_norm": 2.848466634750366, |
|
"learning_rate": 4.9677121771217715e-06, |
|
"loss": 1.7336, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.33602986932171747, |
|
"eval_loss": 1.8723454475402832, |
|
"eval_runtime": 47.9758, |
|
"eval_samples_per_second": 20.844, |
|
"eval_steps_per_second": 0.875, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3372744243932794, |
|
"grad_norm": 3.405003309249878, |
|
"learning_rate": 4.965405904059041e-06, |
|
"loss": 1.7178, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.33851897946484133, |
|
"grad_norm": 2.982916831970215, |
|
"learning_rate": 4.96309963099631e-06, |
|
"loss": 1.7255, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.33976353453640323, |
|
"grad_norm": 3.1712088584899902, |
|
"learning_rate": 4.96079335793358e-06, |
|
"loss": 1.7328, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.34100808960796514, |
|
"grad_norm": 3.325481414794922, |
|
"learning_rate": 4.958487084870849e-06, |
|
"loss": 1.7268, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3422526446795271, |
|
"grad_norm": 3.0751800537109375, |
|
"learning_rate": 4.956180811808119e-06, |
|
"loss": 1.7916, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.343497199751089, |
|
"grad_norm": 2.9451465606689453, |
|
"learning_rate": 4.953874538745388e-06, |
|
"loss": 1.7588, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.3447417548226509, |
|
"grad_norm": 3.1492040157318115, |
|
"learning_rate": 4.9515682656826574e-06, |
|
"loss": 1.7312, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.3459863098942128, |
|
"grad_norm": 3.165748119354248, |
|
"learning_rate": 4.949261992619927e-06, |
|
"loss": 1.7645, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3472308649657747, |
|
"grad_norm": 3.059307336807251, |
|
"learning_rate": 4.946955719557196e-06, |
|
"loss": 1.6783, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.34847542003733667, |
|
"grad_norm": 2.940147638320923, |
|
"learning_rate": 4.944649446494466e-06, |
|
"loss": 1.7079, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.34847542003733667, |
|
"eval_loss": 1.8577951192855835, |
|
"eval_runtime": 50.1741, |
|
"eval_samples_per_second": 19.931, |
|
"eval_steps_per_second": 0.837, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3497199751088986, |
|
"grad_norm": 2.6892037391662598, |
|
"learning_rate": 4.942343173431734e-06, |
|
"loss": 1.7104, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3509645301804605, |
|
"grad_norm": 3.136791467666626, |
|
"learning_rate": 4.940036900369004e-06, |
|
"loss": 1.763, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.3522090852520224, |
|
"grad_norm": 2.6873879432678223, |
|
"learning_rate": 4.937730627306274e-06, |
|
"loss": 1.722, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.35345364032358434, |
|
"grad_norm": 3.1375961303710938, |
|
"learning_rate": 4.9354243542435426e-06, |
|
"loss": 1.7048, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.35469819539514624, |
|
"grad_norm": 2.920725107192993, |
|
"learning_rate": 4.933118081180812e-06, |
|
"loss": 1.7014, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.35594275046670815, |
|
"grad_norm": 2.750018835067749, |
|
"learning_rate": 4.930811808118081e-06, |
|
"loss": 1.6283, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.35718730553827005, |
|
"grad_norm": 3.399036169052124, |
|
"learning_rate": 4.928505535055351e-06, |
|
"loss": 1.7265, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.358431860609832, |
|
"grad_norm": 3.1734066009521484, |
|
"learning_rate": 4.92619926199262e-06, |
|
"loss": 1.7007, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3596764156813939, |
|
"grad_norm": 3.038865089416504, |
|
"learning_rate": 4.92389298892989e-06, |
|
"loss": 1.7357, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3609209707529558, |
|
"grad_norm": 3.0309486389160156, |
|
"learning_rate": 4.921586715867159e-06, |
|
"loss": 1.7487, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3609209707529558, |
|
"eval_loss": 1.8484960794448853, |
|
"eval_runtime": 50.374, |
|
"eval_samples_per_second": 19.852, |
|
"eval_steps_per_second": 0.834, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3621655258245177, |
|
"grad_norm": 2.867432117462158, |
|
"learning_rate": 4.9192804428044285e-06, |
|
"loss": 1.6988, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3634100808960797, |
|
"grad_norm": 3.31636905670166, |
|
"learning_rate": 4.916974169741698e-06, |
|
"loss": 1.7003, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3646546359676416, |
|
"grad_norm": 3.1181511878967285, |
|
"learning_rate": 4.914667896678967e-06, |
|
"loss": 1.6762, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3658991910392035, |
|
"grad_norm": 2.978194236755371, |
|
"learning_rate": 4.912361623616237e-06, |
|
"loss": 1.6435, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3671437461107654, |
|
"grad_norm": 3.0623116493225098, |
|
"learning_rate": 4.910055350553506e-06, |
|
"loss": 1.7038, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3683883011823273, |
|
"grad_norm": 2.83353853225708, |
|
"learning_rate": 4.907749077490776e-06, |
|
"loss": 1.7237, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.36963285625388925, |
|
"grad_norm": 2.8587100505828857, |
|
"learning_rate": 4.905442804428044e-06, |
|
"loss": 1.6927, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.37087741132545116, |
|
"grad_norm": 2.7930493354797363, |
|
"learning_rate": 4.903136531365314e-06, |
|
"loss": 1.7381, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.37212196639701306, |
|
"grad_norm": 2.8992788791656494, |
|
"learning_rate": 4.900830258302584e-06, |
|
"loss": 1.6485, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.37336652146857496, |
|
"grad_norm": 2.8896944522857666, |
|
"learning_rate": 4.898523985239853e-06, |
|
"loss": 1.7143, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.37336652146857496, |
|
"eval_loss": 1.8331259489059448, |
|
"eval_runtime": 51.0214, |
|
"eval_samples_per_second": 19.6, |
|
"eval_steps_per_second": 0.823, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3746110765401369, |
|
"grad_norm": 2.885444164276123, |
|
"learning_rate": 4.896217712177122e-06, |
|
"loss": 1.6914, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.3758556316116988, |
|
"grad_norm": 3.2996826171875, |
|
"learning_rate": 4.893911439114391e-06, |
|
"loss": 1.7353, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.37710018668326073, |
|
"grad_norm": 2.7231531143188477, |
|
"learning_rate": 4.891605166051661e-06, |
|
"loss": 1.6837, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.37834474175482263, |
|
"grad_norm": 2.9413955211639404, |
|
"learning_rate": 4.88929889298893e-06, |
|
"loss": 1.6448, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3795892968263846, |
|
"grad_norm": 2.6486589908599854, |
|
"learning_rate": 4.8869926199262e-06, |
|
"loss": 1.6826, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3808338518979465, |
|
"grad_norm": 2.977836847305298, |
|
"learning_rate": 4.884686346863469e-06, |
|
"loss": 1.6737, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.3820784069695084, |
|
"grad_norm": 2.635324716567993, |
|
"learning_rate": 4.8823800738007384e-06, |
|
"loss": 1.7112, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.3833229620410703, |
|
"grad_norm": 3.0650839805603027, |
|
"learning_rate": 4.880073800738008e-06, |
|
"loss": 1.7123, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.3845675171126322, |
|
"grad_norm": 3.059629440307617, |
|
"learning_rate": 4.877767527675277e-06, |
|
"loss": 1.662, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.38581207218419417, |
|
"grad_norm": 2.7559597492218018, |
|
"learning_rate": 4.875461254612546e-06, |
|
"loss": 1.7418, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38581207218419417, |
|
"eval_loss": 1.8277366161346436, |
|
"eval_runtime": 42.6671, |
|
"eval_samples_per_second": 23.437, |
|
"eval_steps_per_second": 0.984, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38705662725575607, |
|
"grad_norm": 2.7987024784088135, |
|
"learning_rate": 4.873154981549816e-06, |
|
"loss": 1.7326, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.388301182327318, |
|
"grad_norm": 2.999824047088623, |
|
"learning_rate": 4.8708487084870856e-06, |
|
"loss": 1.7344, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.3895457373988799, |
|
"grad_norm": 2.942995071411133, |
|
"learning_rate": 4.868542435424355e-06, |
|
"loss": 1.7055, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.39079029247044184, |
|
"grad_norm": 2.823906183242798, |
|
"learning_rate": 4.8662361623616235e-06, |
|
"loss": 1.6819, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.39203484754200374, |
|
"grad_norm": 2.7798759937286377, |
|
"learning_rate": 4.863929889298894e-06, |
|
"loss": 1.6694, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.39327940261356564, |
|
"grad_norm": 2.599005699157715, |
|
"learning_rate": 4.861623616236163e-06, |
|
"loss": 1.6924, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.39452395768512755, |
|
"grad_norm": 2.687539577484131, |
|
"learning_rate": 4.859317343173432e-06, |
|
"loss": 1.685, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.3957685127566895, |
|
"grad_norm": 2.9679677486419678, |
|
"learning_rate": 4.857011070110701e-06, |
|
"loss": 1.718, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.3970130678282514, |
|
"grad_norm": 2.8611817359924316, |
|
"learning_rate": 4.854704797047971e-06, |
|
"loss": 1.7247, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.3982576228998133, |
|
"grad_norm": 2.7461249828338623, |
|
"learning_rate": 4.85239852398524e-06, |
|
"loss": 1.6434, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3982576228998133, |
|
"eval_loss": 1.8234667778015137, |
|
"eval_runtime": 44.3433, |
|
"eval_samples_per_second": 22.551, |
|
"eval_steps_per_second": 0.947, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3995021779713752, |
|
"grad_norm": 2.7142515182495117, |
|
"learning_rate": 4.8500922509225095e-06, |
|
"loss": 1.6855, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4007467330429372, |
|
"grad_norm": 2.87727689743042, |
|
"learning_rate": 4.847785977859779e-06, |
|
"loss": 1.705, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.4019912881144991, |
|
"grad_norm": 2.9598982334136963, |
|
"learning_rate": 4.845479704797048e-06, |
|
"loss": 1.6482, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.403235843186061, |
|
"grad_norm": 2.8654463291168213, |
|
"learning_rate": 4.843173431734318e-06, |
|
"loss": 1.5816, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4044803982576229, |
|
"grad_norm": 2.983837842941284, |
|
"learning_rate": 4.840867158671587e-06, |
|
"loss": 1.6962, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4057249533291848, |
|
"grad_norm": 2.730914354324341, |
|
"learning_rate": 4.838560885608857e-06, |
|
"loss": 1.6991, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.40696950840074675, |
|
"grad_norm": 2.6901161670684814, |
|
"learning_rate": 4.836254612546126e-06, |
|
"loss": 1.6839, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.40821406347230865, |
|
"grad_norm": 2.6661486625671387, |
|
"learning_rate": 4.8339483394833955e-06, |
|
"loss": 1.6894, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.40945861854387056, |
|
"grad_norm": 2.64243221282959, |
|
"learning_rate": 4.831642066420665e-06, |
|
"loss": 1.6533, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.41070317361543246, |
|
"grad_norm": 2.586725950241089, |
|
"learning_rate": 4.8293357933579335e-06, |
|
"loss": 1.6486, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.41070317361543246, |
|
"eval_loss": 1.82283616065979, |
|
"eval_runtime": 46.5937, |
|
"eval_samples_per_second": 21.462, |
|
"eval_steps_per_second": 0.901, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4119477286869944, |
|
"grad_norm": 2.899968147277832, |
|
"learning_rate": 4.827029520295204e-06, |
|
"loss": 1.7136, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4131922837585563, |
|
"grad_norm": 3.067671537399292, |
|
"learning_rate": 4.824723247232473e-06, |
|
"loss": 1.672, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4144368388301182, |
|
"grad_norm": 2.8144404888153076, |
|
"learning_rate": 4.822416974169742e-06, |
|
"loss": 1.7173, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.41568139390168013, |
|
"grad_norm": 3.0551373958587646, |
|
"learning_rate": 4.820110701107011e-06, |
|
"loss": 1.6843, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.4169259489732421, |
|
"grad_norm": 2.7585153579711914, |
|
"learning_rate": 4.817804428044281e-06, |
|
"loss": 1.6834, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.418170504044804, |
|
"grad_norm": 2.8691210746765137, |
|
"learning_rate": 4.81549815498155e-06, |
|
"loss": 1.6814, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.4194150591163659, |
|
"grad_norm": 2.871523380279541, |
|
"learning_rate": 4.8131918819188194e-06, |
|
"loss": 1.6222, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4206596141879278, |
|
"grad_norm": 2.8928768634796143, |
|
"learning_rate": 4.810885608856089e-06, |
|
"loss": 1.6984, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.42190416925948976, |
|
"grad_norm": 3.285256862640381, |
|
"learning_rate": 4.808579335793358e-06, |
|
"loss": 1.7029, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.42314872433105166, |
|
"grad_norm": 2.785504102706909, |
|
"learning_rate": 4.806273062730628e-06, |
|
"loss": 1.6857, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.42314872433105166, |
|
"eval_loss": 1.8053412437438965, |
|
"eval_runtime": 48.0656, |
|
"eval_samples_per_second": 20.805, |
|
"eval_steps_per_second": 0.874, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.42439327940261357, |
|
"grad_norm": 3.0227086544036865, |
|
"learning_rate": 4.803966789667897e-06, |
|
"loss": 1.6903, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.42563783447417547, |
|
"grad_norm": 2.616769790649414, |
|
"learning_rate": 4.8016605166051665e-06, |
|
"loss": 1.6144, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.4268823895457374, |
|
"grad_norm": 2.9519753456115723, |
|
"learning_rate": 4.799354243542436e-06, |
|
"loss": 1.6157, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.42812694461729933, |
|
"grad_norm": 2.764512062072754, |
|
"learning_rate": 4.797047970479705e-06, |
|
"loss": 1.6248, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.42937149968886124, |
|
"grad_norm": 2.999178886413574, |
|
"learning_rate": 4.794741697416975e-06, |
|
"loss": 1.6764, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.43061605476042314, |
|
"grad_norm": 3.0472185611724854, |
|
"learning_rate": 4.792435424354243e-06, |
|
"loss": 1.6421, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.43186060983198504, |
|
"grad_norm": 2.6638400554656982, |
|
"learning_rate": 4.790129151291514e-06, |
|
"loss": 1.6304, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.433105164903547, |
|
"grad_norm": 2.945295810699463, |
|
"learning_rate": 4.787822878228783e-06, |
|
"loss": 1.6181, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.4343497199751089, |
|
"grad_norm": 2.915125846862793, |
|
"learning_rate": 4.7855166051660525e-06, |
|
"loss": 1.6789, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.4355942750466708, |
|
"grad_norm": 2.5674026012420654, |
|
"learning_rate": 4.783210332103321e-06, |
|
"loss": 1.6723, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4355942750466708, |
|
"eval_loss": 1.8109745979309082, |
|
"eval_runtime": 43.9943, |
|
"eval_samples_per_second": 22.73, |
|
"eval_steps_per_second": 0.955, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4368388301182327, |
|
"grad_norm": 2.9533393383026123, |
|
"learning_rate": 4.7809040590405905e-06, |
|
"loss": 1.6271, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.43808338518979467, |
|
"grad_norm": 2.7677664756774902, |
|
"learning_rate": 4.778597785977861e-06, |
|
"loss": 1.6633, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.4393279402613566, |
|
"grad_norm": 2.527456521987915, |
|
"learning_rate": 4.776291512915129e-06, |
|
"loss": 1.5644, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4405724953329185, |
|
"grad_norm": 2.9778528213500977, |
|
"learning_rate": 4.773985239852399e-06, |
|
"loss": 1.6166, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4418170504044804, |
|
"grad_norm": 2.5554800033569336, |
|
"learning_rate": 4.771678966789668e-06, |
|
"loss": 1.5573, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.44306160547604234, |
|
"grad_norm": 2.7415409088134766, |
|
"learning_rate": 4.769372693726938e-06, |
|
"loss": 1.6564, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.44430616054760425, |
|
"grad_norm": 3.0275440216064453, |
|
"learning_rate": 4.767066420664207e-06, |
|
"loss": 1.67, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.44555071561916615, |
|
"grad_norm": 2.720694065093994, |
|
"learning_rate": 4.7647601476014765e-06, |
|
"loss": 1.6374, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.44679527069072805, |
|
"grad_norm": 2.97589111328125, |
|
"learning_rate": 4.762453874538746e-06, |
|
"loss": 1.6596, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.44803982576228996, |
|
"grad_norm": 2.8005781173706055, |
|
"learning_rate": 4.760147601476015e-06, |
|
"loss": 1.6281, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.44803982576228996, |
|
"eval_loss": 1.8139524459838867, |
|
"eval_runtime": 44.5817, |
|
"eval_samples_per_second": 22.431, |
|
"eval_steps_per_second": 0.942, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4492843808338519, |
|
"grad_norm": 3.3947463035583496, |
|
"learning_rate": 4.757841328413285e-06, |
|
"loss": 1.7149, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.4505289359054138, |
|
"grad_norm": 2.991457939147949, |
|
"learning_rate": 4.755535055350554e-06, |
|
"loss": 1.5971, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.4517734909769757, |
|
"grad_norm": 2.653313398361206, |
|
"learning_rate": 4.753228782287823e-06, |
|
"loss": 1.6465, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.4530180460485376, |
|
"grad_norm": 2.716360569000244, |
|
"learning_rate": 4.750922509225093e-06, |
|
"loss": 1.6775, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.4542626011200996, |
|
"grad_norm": 2.733598470687866, |
|
"learning_rate": 4.748616236162362e-06, |
|
"loss": 1.6087, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4555071561916615, |
|
"grad_norm": 2.581547737121582, |
|
"learning_rate": 4.746309963099631e-06, |
|
"loss": 1.6339, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.4567517112632234, |
|
"grad_norm": 2.6841800212860107, |
|
"learning_rate": 4.7440036900369e-06, |
|
"loss": 1.6193, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.4579962663347853, |
|
"grad_norm": 2.658369302749634, |
|
"learning_rate": 4.741697416974171e-06, |
|
"loss": 1.6287, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.45924082140634725, |
|
"grad_norm": 2.7204179763793945, |
|
"learning_rate": 4.739391143911439e-06, |
|
"loss": 1.6755, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.46048537647790916, |
|
"grad_norm": 2.6627941131591797, |
|
"learning_rate": 4.737084870848709e-06, |
|
"loss": 1.6142, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.46048537647790916, |
|
"eval_loss": 1.8019787073135376, |
|
"eval_runtime": 42.7804, |
|
"eval_samples_per_second": 23.375, |
|
"eval_steps_per_second": 0.982, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.46172993154947106, |
|
"grad_norm": 2.650705337524414, |
|
"learning_rate": 4.734778597785978e-06, |
|
"loss": 1.641, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.46297448662103297, |
|
"grad_norm": 2.7031619548797607, |
|
"learning_rate": 4.7324723247232475e-06, |
|
"loss": 1.6596, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.4642190416925949, |
|
"grad_norm": 3.3050339221954346, |
|
"learning_rate": 4.730166051660517e-06, |
|
"loss": 1.6664, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.46546359676415683, |
|
"grad_norm": 2.841404676437378, |
|
"learning_rate": 4.727859778597786e-06, |
|
"loss": 1.6474, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.46670815183571873, |
|
"grad_norm": 3.0990259647369385, |
|
"learning_rate": 4.725553505535056e-06, |
|
"loss": 1.6008, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.46795270690728064, |
|
"grad_norm": 2.987772226333618, |
|
"learning_rate": 4.723247232472325e-06, |
|
"loss": 1.6374, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.46919726197884254, |
|
"grad_norm": 3.3345255851745605, |
|
"learning_rate": 4.720940959409595e-06, |
|
"loss": 1.6405, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.4704418170504045, |
|
"grad_norm": 3.0810067653656006, |
|
"learning_rate": 4.718634686346864e-06, |
|
"loss": 1.6229, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4716863721219664, |
|
"grad_norm": 3.246685266494751, |
|
"learning_rate": 4.716328413284133e-06, |
|
"loss": 1.6262, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.4729309271935283, |
|
"grad_norm": 2.6052889823913574, |
|
"learning_rate": 4.714022140221403e-06, |
|
"loss": 1.6241, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4729309271935283, |
|
"eval_loss": 1.7957485914230347, |
|
"eval_runtime": 43.6626, |
|
"eval_samples_per_second": 22.903, |
|
"eval_steps_per_second": 0.962, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4741754822650902, |
|
"grad_norm": 3.747142791748047, |
|
"learning_rate": 4.711715867158672e-06, |
|
"loss": 1.6764, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.47542003733665217, |
|
"grad_norm": 2.8508121967315674, |
|
"learning_rate": 4.709409594095941e-06, |
|
"loss": 1.6597, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.47666459240821407, |
|
"grad_norm": 3.5615711212158203, |
|
"learning_rate": 4.70710332103321e-06, |
|
"loss": 1.6089, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.477909147479776, |
|
"grad_norm": 2.5886309146881104, |
|
"learning_rate": 4.704797047970481e-06, |
|
"loss": 1.5996, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.4791537025513379, |
|
"grad_norm": 3.3900864124298096, |
|
"learning_rate": 4.702490774907749e-06, |
|
"loss": 1.7137, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.48039825762289984, |
|
"grad_norm": 2.913641929626465, |
|
"learning_rate": 4.700184501845019e-06, |
|
"loss": 1.6512, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.48164281269446174, |
|
"grad_norm": 2.810722827911377, |
|
"learning_rate": 4.697878228782288e-06, |
|
"loss": 1.6939, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.48288736776602365, |
|
"grad_norm": 3.2701401710510254, |
|
"learning_rate": 4.6955719557195575e-06, |
|
"loss": 1.6455, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.48413192283758555, |
|
"grad_norm": 2.972931385040283, |
|
"learning_rate": 4.693265682656827e-06, |
|
"loss": 1.6272, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.48537647790914745, |
|
"grad_norm": 3.2157890796661377, |
|
"learning_rate": 4.690959409594096e-06, |
|
"loss": 1.6409, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.48537647790914745, |
|
"eval_loss": 1.7859243154525757, |
|
"eval_runtime": 45.2093, |
|
"eval_samples_per_second": 22.119, |
|
"eval_steps_per_second": 0.929, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4866210329807094, |
|
"grad_norm": 2.783360719680786, |
|
"learning_rate": 4.688653136531366e-06, |
|
"loss": 1.6303, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.4878655880522713, |
|
"grad_norm": 2.6783013343811035, |
|
"learning_rate": 4.686346863468635e-06, |
|
"loss": 1.7259, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.4891101431238332, |
|
"grad_norm": 2.899916172027588, |
|
"learning_rate": 4.6840405904059046e-06, |
|
"loss": 1.6434, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.4903546981953951, |
|
"grad_norm": 2.604377269744873, |
|
"learning_rate": 4.681734317343174e-06, |
|
"loss": 1.6838, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.4915992532669571, |
|
"grad_norm": 3.0830864906311035, |
|
"learning_rate": 4.6794280442804426e-06, |
|
"loss": 1.6053, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.492843808338519, |
|
"grad_norm": 3.1381475925445557, |
|
"learning_rate": 4.677121771217713e-06, |
|
"loss": 1.6132, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4940883634100809, |
|
"grad_norm": 3.097426652908325, |
|
"learning_rate": 4.674815498154982e-06, |
|
"loss": 1.6237, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.4953329184816428, |
|
"grad_norm": 2.9942305088043213, |
|
"learning_rate": 4.672509225092252e-06, |
|
"loss": 1.6554, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.49657747355320475, |
|
"grad_norm": 2.834199905395508, |
|
"learning_rate": 4.67020295202952e-06, |
|
"loss": 1.6389, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.49782202862476665, |
|
"grad_norm": 3.183389902114868, |
|
"learning_rate": 4.66789667896679e-06, |
|
"loss": 1.6518, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.49782202862476665, |
|
"eval_loss": 1.781000018119812, |
|
"eval_runtime": 45.4127, |
|
"eval_samples_per_second": 22.02, |
|
"eval_steps_per_second": 0.925, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2409, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.246923763744768e+17, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|