|
{ |
|
"best_metric": 0.7111111111111111, |
|
"best_model_checkpoint": "CTMAE-P2-V4-S3/checkpoint-4698", |
|
"epoch": 49.02, |
|
"eval_steps": 500, |
|
"global_step": 13050, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007662835249042146, |
|
"grad_norm": 5.78713321685791, |
|
"learning_rate": 7.662835249042146e-08, |
|
"loss": 0.7127, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0015325670498084292, |
|
"grad_norm": 5.718565940856934, |
|
"learning_rate": 1.5325670498084292e-07, |
|
"loss": 0.7457, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0022988505747126436, |
|
"grad_norm": 5.675544261932373, |
|
"learning_rate": 2.2988505747126437e-07, |
|
"loss": 0.7094, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0030651340996168583, |
|
"grad_norm": 5.074069976806641, |
|
"learning_rate": 3.0651340996168583e-07, |
|
"loss": 0.7117, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0038314176245210726, |
|
"grad_norm": 5.342907428741455, |
|
"learning_rate": 3.831417624521073e-07, |
|
"loss": 0.7096, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004597701149425287, |
|
"grad_norm": 6.194801330566406, |
|
"learning_rate": 4.5977011494252875e-07, |
|
"loss": 0.6906, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0053639846743295016, |
|
"grad_norm": 6.671578884124756, |
|
"learning_rate": 5.363984674329502e-07, |
|
"loss": 0.702, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.006130268199233717, |
|
"grad_norm": 6.9470367431640625, |
|
"learning_rate": 6.130268199233717e-07, |
|
"loss": 0.6875, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.006896551724137931, |
|
"grad_norm": 5.741835117340088, |
|
"learning_rate": 6.896551724137931e-07, |
|
"loss": 0.6539, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.007662835249042145, |
|
"grad_norm": 8.77403450012207, |
|
"learning_rate": 7.662835249042146e-07, |
|
"loss": 0.7427, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.00842911877394636, |
|
"grad_norm": 6.60261344909668, |
|
"learning_rate": 8.429118773946361e-07, |
|
"loss": 0.6371, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.009195402298850575, |
|
"grad_norm": 10.245949745178223, |
|
"learning_rate": 9.195402298850575e-07, |
|
"loss": 0.612, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.00996168582375479, |
|
"grad_norm": 9.052543640136719, |
|
"learning_rate": 9.96168582375479e-07, |
|
"loss": 0.5196, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.010727969348659003, |
|
"grad_norm": 9.11584186553955, |
|
"learning_rate": 1.0727969348659004e-06, |
|
"loss": 0.666, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.011494252873563218, |
|
"grad_norm": 27.162185668945312, |
|
"learning_rate": 1.1494252873563219e-06, |
|
"loss": 0.5036, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.012260536398467433, |
|
"grad_norm": 26.26243782043457, |
|
"learning_rate": 1.2260536398467433e-06, |
|
"loss": 0.5363, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.013026819923371647, |
|
"grad_norm": 10.345433235168457, |
|
"learning_rate": 1.3026819923371648e-06, |
|
"loss": 0.5589, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.013793103448275862, |
|
"grad_norm": 6.396850109100342, |
|
"learning_rate": 1.3793103448275862e-06, |
|
"loss": 0.5457, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.014559386973180077, |
|
"grad_norm": 5.104989528656006, |
|
"learning_rate": 1.455938697318008e-06, |
|
"loss": 0.821, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01532567049808429, |
|
"grad_norm": 9.261125564575195, |
|
"learning_rate": 1.5325670498084292e-06, |
|
"loss": 0.8162, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.016091954022988506, |
|
"grad_norm": 2.922889471054077, |
|
"learning_rate": 1.6091954022988506e-06, |
|
"loss": 0.8582, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01685823754789272, |
|
"grad_norm": 5.474356651306152, |
|
"learning_rate": 1.6858237547892723e-06, |
|
"loss": 1.2444, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.017624521072796936, |
|
"grad_norm": 1.6021324396133423, |
|
"learning_rate": 1.7624521072796935e-06, |
|
"loss": 0.0405, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01839080459770115, |
|
"grad_norm": 0.5833091735839844, |
|
"learning_rate": 1.839080459770115e-06, |
|
"loss": 0.4125, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.019157088122605363, |
|
"grad_norm": 1.004506230354309, |
|
"learning_rate": 1.9157088122605367e-06, |
|
"loss": 1.3072, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01992337164750958, |
|
"grad_norm": 0.3814922571182251, |
|
"learning_rate": 1.992337164750958e-06, |
|
"loss": 1.3133, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 2.0786259174346924, |
|
"eval_runtime": 28.7603, |
|
"eval_samples_per_second": 1.565, |
|
"eval_steps_per_second": 1.565, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.0006896551724138, |
|
"grad_norm": 0.3009163737297058, |
|
"learning_rate": 2.0689655172413796e-06, |
|
"loss": 1.0363, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.001455938697318, |
|
"grad_norm": 129.52061462402344, |
|
"learning_rate": 2.145593869731801e-06, |
|
"loss": 1.4158, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.0022222222222221, |
|
"grad_norm": 2.0100300312042236, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 1.0448, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.0029885057471264, |
|
"grad_norm": 0.24209345877170563, |
|
"learning_rate": 2.2988505747126437e-06, |
|
"loss": 0.79, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0037547892720307, |
|
"grad_norm": 0.5508049130439758, |
|
"learning_rate": 2.3754789272030654e-06, |
|
"loss": 2.6733, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.004521072796935, |
|
"grad_norm": 0.7398741245269775, |
|
"learning_rate": 2.4521072796934867e-06, |
|
"loss": 1.4254, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.0052873563218392, |
|
"grad_norm": 0.6040136218070984, |
|
"learning_rate": 2.5287356321839083e-06, |
|
"loss": 1.8749, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0060536398467432, |
|
"grad_norm": 0.4468666613101959, |
|
"learning_rate": 2.6053639846743296e-06, |
|
"loss": 0.9238, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0068199233716475, |
|
"grad_norm": 0.574260950088501, |
|
"learning_rate": 2.6819923371647512e-06, |
|
"loss": 1.9996, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0075862068965518, |
|
"grad_norm": 64.08356475830078, |
|
"learning_rate": 2.7586206896551725e-06, |
|
"loss": 1.4815, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.008352490421456, |
|
"grad_norm": 0.6038119792938232, |
|
"learning_rate": 2.835249042145594e-06, |
|
"loss": 1.919, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.00911877394636, |
|
"grad_norm": 0.3107043504714966, |
|
"learning_rate": 2.911877394636016e-06, |
|
"loss": 0.5302, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0098850574712643, |
|
"grad_norm": 167.3493194580078, |
|
"learning_rate": 2.988505747126437e-06, |
|
"loss": 2.5656, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.0106513409961686, |
|
"grad_norm": 0.36962807178497314, |
|
"learning_rate": 3.0651340996168583e-06, |
|
"loss": 0.4602, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0114176245210729, |
|
"grad_norm": 93.20677185058594, |
|
"learning_rate": 3.14176245210728e-06, |
|
"loss": 1.1151, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.012183908045977, |
|
"grad_norm": 0.33222442865371704, |
|
"learning_rate": 3.2183908045977012e-06, |
|
"loss": 1.5841, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.0129501915708812, |
|
"grad_norm": 0.32849252223968506, |
|
"learning_rate": 3.295019157088123e-06, |
|
"loss": 1.6505, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.0137164750957854, |
|
"grad_norm": 0.29110532999038696, |
|
"learning_rate": 3.3716475095785446e-06, |
|
"loss": 1.5132, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.0144827586206897, |
|
"grad_norm": 0.24014906585216522, |
|
"learning_rate": 3.448275862068966e-06, |
|
"loss": 0.0049, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.015249042145594, |
|
"grad_norm": 0.3130265176296234, |
|
"learning_rate": 3.524904214559387e-06, |
|
"loss": 1.0703, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.016015325670498, |
|
"grad_norm": 0.7240421772003174, |
|
"learning_rate": 3.6015325670498087e-06, |
|
"loss": 1.5601, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0167816091954023, |
|
"grad_norm": 0.7779229283332825, |
|
"learning_rate": 3.67816091954023e-06, |
|
"loss": 1.8425, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.0175478927203065, |
|
"grad_norm": 0.2861528992652893, |
|
"learning_rate": 3.7547892720306517e-06, |
|
"loss": 1.7631, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0183141762452108, |
|
"grad_norm": 0.22280102968215942, |
|
"learning_rate": 3.831417624521073e-06, |
|
"loss": 0.4159, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0190804597701149, |
|
"grad_norm": 0.14091116189956665, |
|
"learning_rate": 3.908045977011495e-06, |
|
"loss": 0.5537, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0198467432950191, |
|
"grad_norm": 0.21295303106307983, |
|
"learning_rate": 3.984674329501916e-06, |
|
"loss": 0.6004, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 2.5564706325531006, |
|
"eval_runtime": 27.9692, |
|
"eval_samples_per_second": 1.609, |
|
"eval_steps_per_second": 1.609, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.0006130268199236, |
|
"grad_norm": 56.727325439453125, |
|
"learning_rate": 4.0613026819923375e-06, |
|
"loss": 1.7394, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.0013793103448276, |
|
"grad_norm": 2.81852126121521, |
|
"learning_rate": 4.137931034482759e-06, |
|
"loss": 2.2782, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.0021455938697317, |
|
"grad_norm": 69.70447540283203, |
|
"learning_rate": 4.214559386973181e-06, |
|
"loss": 0.8623, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.002911877394636, |
|
"grad_norm": 61.30961227416992, |
|
"learning_rate": 4.291187739463602e-06, |
|
"loss": 2.3654, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.00367816091954, |
|
"grad_norm": 0.9703733921051025, |
|
"learning_rate": 4.367816091954023e-06, |
|
"loss": 1.6526, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.0044444444444443, |
|
"grad_norm": 0.48899760842323303, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 1.3719, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.0052107279693487, |
|
"grad_norm": 0.4851645827293396, |
|
"learning_rate": 4.521072796934866e-06, |
|
"loss": 0.9701, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.005977011494253, |
|
"grad_norm": 0.40878111124038696, |
|
"learning_rate": 4.5977011494252875e-06, |
|
"loss": 2.4902, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.0067432950191573, |
|
"grad_norm": 0.3135189116001129, |
|
"learning_rate": 4.674329501915709e-06, |
|
"loss": 0.4164, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.0075095785440613, |
|
"grad_norm": 0.1862274408340454, |
|
"learning_rate": 4.750957854406131e-06, |
|
"loss": 1.0298, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.0082758620689654, |
|
"grad_norm": 0.19461576640605927, |
|
"learning_rate": 4.8275862068965525e-06, |
|
"loss": 0.0048, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.00904214559387, |
|
"grad_norm": 61.038124084472656, |
|
"learning_rate": 4.904214559386973e-06, |
|
"loss": 2.3317, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.009808429118774, |
|
"grad_norm": 53.239498138427734, |
|
"learning_rate": 4.980842911877395e-06, |
|
"loss": 1.0729, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.0105747126436784, |
|
"grad_norm": 0.29409465193748474, |
|
"learning_rate": 5.057471264367817e-06, |
|
"loss": 0.5395, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.0113409961685824, |
|
"grad_norm": 45.73566818237305, |
|
"learning_rate": 5.134099616858238e-06, |
|
"loss": 1.103, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.0121072796934865, |
|
"grad_norm": 46.631378173828125, |
|
"learning_rate": 5.210727969348659e-06, |
|
"loss": 2.0557, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.012873563218391, |
|
"grad_norm": 0.39027202129364014, |
|
"learning_rate": 5.287356321839081e-06, |
|
"loss": 0.4729, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.013639846743295, |
|
"grad_norm": 40.25749588012695, |
|
"learning_rate": 5.3639846743295025e-06, |
|
"loss": 1.4261, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.014406130268199, |
|
"grad_norm": 0.3236549198627472, |
|
"learning_rate": 5.440613026819924e-06, |
|
"loss": 0.9894, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.0151724137931035, |
|
"grad_norm": 0.18210084736347198, |
|
"learning_rate": 5.517241379310345e-06, |
|
"loss": 2.0841, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.0159386973180076, |
|
"grad_norm": 46.184043884277344, |
|
"learning_rate": 5.593869731800766e-06, |
|
"loss": 1.6895, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.016704980842912, |
|
"grad_norm": 0.5805896520614624, |
|
"learning_rate": 5.670498084291188e-06, |
|
"loss": 0.3677, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.017471264367816, |
|
"grad_norm": 45.431270599365234, |
|
"learning_rate": 5.747126436781609e-06, |
|
"loss": 0.6003, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.01823754789272, |
|
"grad_norm": 0.07333827018737793, |
|
"learning_rate": 5.823754789272032e-06, |
|
"loss": 0.6145, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.0190038314176246, |
|
"grad_norm": 0.105204276740551, |
|
"learning_rate": 5.9003831417624525e-06, |
|
"loss": 1.2102, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.0197701149425287, |
|
"grad_norm": 0.23360463976860046, |
|
"learning_rate": 5.977011494252874e-06, |
|
"loss": 1.6674, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 2.1021173000335693, |
|
"eval_runtime": 25.7407, |
|
"eval_samples_per_second": 1.748, |
|
"eval_steps_per_second": 1.748, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.000536398467433, |
|
"grad_norm": 34.55115509033203, |
|
"learning_rate": 6.053639846743296e-06, |
|
"loss": 1.3633, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.001302681992337, |
|
"grad_norm": 0.9316493272781372, |
|
"learning_rate": 6.130268199233717e-06, |
|
"loss": 0.4478, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.0020689655172412, |
|
"grad_norm": 0.8690051436424255, |
|
"learning_rate": 6.206896551724138e-06, |
|
"loss": 0.9098, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.0028352490421457, |
|
"grad_norm": 0.08970779180526733, |
|
"learning_rate": 6.28352490421456e-06, |
|
"loss": 1.0919, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.0036015325670498, |
|
"grad_norm": 0.40346193313598633, |
|
"learning_rate": 6.360153256704982e-06, |
|
"loss": 1.4719, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.004367816091954, |
|
"grad_norm": 34.25103759765625, |
|
"learning_rate": 6.4367816091954025e-06, |
|
"loss": 0.9572, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.0051340996168583, |
|
"grad_norm": 0.5762563347816467, |
|
"learning_rate": 6.513409961685824e-06, |
|
"loss": 1.7809, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.0059003831417623, |
|
"grad_norm": 0.8154065012931824, |
|
"learning_rate": 6.590038314176246e-06, |
|
"loss": 0.7823, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.006666666666667, |
|
"grad_norm": 0.3043971657752991, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.4039, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.007432950191571, |
|
"grad_norm": 0.09112297743558884, |
|
"learning_rate": 6.743295019157089e-06, |
|
"loss": 0.0051, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.008199233716475, |
|
"grad_norm": 32.38028335571289, |
|
"learning_rate": 6.81992337164751e-06, |
|
"loss": 1.2145, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.0089655172413794, |
|
"grad_norm": 0.6471147537231445, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 1.5575, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.0097318007662834, |
|
"grad_norm": 0.13889610767364502, |
|
"learning_rate": 6.973180076628353e-06, |
|
"loss": 1.018, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.010498084291188, |
|
"grad_norm": 43.24924850463867, |
|
"learning_rate": 7.049808429118774e-06, |
|
"loss": 2.4933, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.011264367816092, |
|
"grad_norm": 0.2864219546318054, |
|
"learning_rate": 7.126436781609196e-06, |
|
"loss": 0.0347, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.012030651340996, |
|
"grad_norm": 34.56663513183594, |
|
"learning_rate": 7.2030651340996175e-06, |
|
"loss": 2.0219, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.0127969348659005, |
|
"grad_norm": 0.609322726726532, |
|
"learning_rate": 7.279693486590039e-06, |
|
"loss": 0.9786, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.0135632183908045, |
|
"grad_norm": 38.839969635009766, |
|
"learning_rate": 7.35632183908046e-06, |
|
"loss": 0.939, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.014329501915709, |
|
"grad_norm": 32.40059280395508, |
|
"learning_rate": 7.4329501915708825e-06, |
|
"loss": 2.4894, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.015095785440613, |
|
"grad_norm": 0.35538342595100403, |
|
"learning_rate": 7.509578544061303e-06, |
|
"loss": 0.4342, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.015862068965517, |
|
"grad_norm": 0.48621436953544617, |
|
"learning_rate": 7.586206896551724e-06, |
|
"loss": 1.2555, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.0166283524904216, |
|
"grad_norm": 0.1707615852355957, |
|
"learning_rate": 7.662835249042147e-06, |
|
"loss": 0.4916, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.0173946360153256, |
|
"grad_norm": 33.12370300292969, |
|
"learning_rate": 7.739463601532567e-06, |
|
"loss": 2.4789, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.0181609195402297, |
|
"grad_norm": 1.8646819591522217, |
|
"learning_rate": 7.81609195402299e-06, |
|
"loss": 1.7722, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.018927203065134, |
|
"grad_norm": 0.5279535055160522, |
|
"learning_rate": 7.89272030651341e-06, |
|
"loss": 0.9427, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.0196934865900382, |
|
"grad_norm": 0.36963194608688354, |
|
"learning_rate": 7.969348659003832e-06, |
|
"loss": 0.7822, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 2.1913793087005615, |
|
"eval_runtime": 25.9571, |
|
"eval_samples_per_second": 1.734, |
|
"eval_steps_per_second": 1.734, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.000459770114943, |
|
"grad_norm": 0.11884228885173798, |
|
"learning_rate": 8.045977011494253e-06, |
|
"loss": 0.9994, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.001226053639847, |
|
"grad_norm": 27.315988540649414, |
|
"learning_rate": 8.122605363984675e-06, |
|
"loss": 0.5647, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.001992337164751, |
|
"grad_norm": 29.26296043395996, |
|
"learning_rate": 8.199233716475097e-06, |
|
"loss": 1.0415, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.002758620689655, |
|
"grad_norm": 0.4243505299091339, |
|
"learning_rate": 8.275862068965518e-06, |
|
"loss": 1.8669, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.00352490421456, |
|
"grad_norm": 0.5804070234298706, |
|
"learning_rate": 8.35249042145594e-06, |
|
"loss": 0.8037, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.004291187739463, |
|
"grad_norm": 30.714359283447266, |
|
"learning_rate": 8.429118773946362e-06, |
|
"loss": 1.358, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.005057471264368, |
|
"grad_norm": 27.16830825805664, |
|
"learning_rate": 8.505747126436782e-06, |
|
"loss": 1.799, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.005823754789272, |
|
"grad_norm": 1.4379671812057495, |
|
"learning_rate": 8.582375478927203e-06, |
|
"loss": 1.203, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.006590038314176, |
|
"grad_norm": 0.18475264310836792, |
|
"learning_rate": 8.659003831417625e-06, |
|
"loss": 1.0007, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.00735632183908, |
|
"grad_norm": 45.419288635253906, |
|
"learning_rate": 8.735632183908047e-06, |
|
"loss": 1.6504, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.008122605363985, |
|
"grad_norm": 0.49893710017204285, |
|
"learning_rate": 8.812260536398468e-06, |
|
"loss": 0.8582, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.0088888888888885, |
|
"grad_norm": 0.12608516216278076, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.0214, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.009655172413793, |
|
"grad_norm": 0.8027613759040833, |
|
"learning_rate": 8.965517241379312e-06, |
|
"loss": 1.2684, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.0104214559386975, |
|
"grad_norm": 0.6243689656257629, |
|
"learning_rate": 9.042145593869732e-06, |
|
"loss": 1.5253, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.011187739463602, |
|
"grad_norm": 0.30972281098365784, |
|
"learning_rate": 9.118773946360155e-06, |
|
"loss": 1.3132, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.011954022988506, |
|
"grad_norm": 0.15964627265930176, |
|
"learning_rate": 9.195402298850575e-06, |
|
"loss": 0.9737, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.01272030651341, |
|
"grad_norm": 0.11204283684492111, |
|
"learning_rate": 9.272030651340997e-06, |
|
"loss": 0.4998, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.0134865900383145, |
|
"grad_norm": 0.08842886984348297, |
|
"learning_rate": 9.348659003831418e-06, |
|
"loss": 0.6496, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.014252873563218, |
|
"grad_norm": 0.7018284797668457, |
|
"learning_rate": 9.42528735632184e-06, |
|
"loss": 2.0747, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.015019157088123, |
|
"grad_norm": 0.33458009362220764, |
|
"learning_rate": 9.501915708812262e-06, |
|
"loss": 1.6904, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.015785440613027, |
|
"grad_norm": 38.35465621948242, |
|
"learning_rate": 9.578544061302683e-06, |
|
"loss": 1.1873, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.016551724137931, |
|
"grad_norm": 0.09213503450155258, |
|
"learning_rate": 9.655172413793105e-06, |
|
"loss": 0.4954, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.017318007662835, |
|
"grad_norm": 0.10861529409885406, |
|
"learning_rate": 9.731800766283525e-06, |
|
"loss": 0.6038, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.01808429118774, |
|
"grad_norm": 0.10436627268791199, |
|
"learning_rate": 9.808429118773947e-06, |
|
"loss": 1.1949, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.018850574712643, |
|
"grad_norm": 0.16878753900527954, |
|
"learning_rate": 9.885057471264368e-06, |
|
"loss": 1.0687, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.019616858237548, |
|
"grad_norm": 34.10907745361328, |
|
"learning_rate": 9.96168582375479e-06, |
|
"loss": 1.6707, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 1.996254324913025, |
|
"eval_runtime": 25.7113, |
|
"eval_samples_per_second": 1.75, |
|
"eval_steps_per_second": 1.75, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 5.000383141762452, |
|
"grad_norm": 0.31460273265838623, |
|
"learning_rate": 9.995742869306088e-06, |
|
"loss": 0.8506, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 5.001149425287356, |
|
"grad_norm": 0.5211421847343445, |
|
"learning_rate": 9.987228607918263e-06, |
|
"loss": 1.564, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 5.001915708812261, |
|
"grad_norm": 0.1419966220855713, |
|
"learning_rate": 9.97871434653044e-06, |
|
"loss": 0.4915, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 5.002681992337164, |
|
"grad_norm": 0.21831358969211578, |
|
"learning_rate": 9.970200085142615e-06, |
|
"loss": 1.2529, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 5.003448275862069, |
|
"grad_norm": 27.290477752685547, |
|
"learning_rate": 9.96168582375479e-06, |
|
"loss": 1.8882, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 5.004214559386973, |
|
"grad_norm": 57.21908950805664, |
|
"learning_rate": 9.953171562366965e-06, |
|
"loss": 0.9694, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 5.004980842911878, |
|
"grad_norm": 0.14878727495670319, |
|
"learning_rate": 9.944657300979142e-06, |
|
"loss": 0.0066, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 5.005747126436781, |
|
"grad_norm": 0.1945212334394455, |
|
"learning_rate": 9.936143039591317e-06, |
|
"loss": 1.6235, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 5.006513409961686, |
|
"grad_norm": 58.22991943359375, |
|
"learning_rate": 9.927628778203492e-06, |
|
"loss": 1.968, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 5.00727969348659, |
|
"grad_norm": 0.12945781648159027, |
|
"learning_rate": 9.919114516815667e-06, |
|
"loss": 0.4166, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 5.008045977011494, |
|
"grad_norm": 40.8744010925293, |
|
"learning_rate": 9.910600255427842e-06, |
|
"loss": 2.556, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 5.0088122605363985, |
|
"grad_norm": 0.1400824636220932, |
|
"learning_rate": 9.902085994040018e-06, |
|
"loss": 0.5527, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 5.009578544061303, |
|
"grad_norm": 37.295921325683594, |
|
"learning_rate": 9.893571732652193e-06, |
|
"loss": 1.5533, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.010344827586207, |
|
"grad_norm": 1.6201562881469727, |
|
"learning_rate": 9.885057471264368e-06, |
|
"loss": 1.7586, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.011111111111111, |
|
"grad_norm": 0.2895391583442688, |
|
"learning_rate": 9.876543209876543e-06, |
|
"loss": 0.7412, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.011877394636016, |
|
"grad_norm": 0.16323475539684296, |
|
"learning_rate": 9.86802894848872e-06, |
|
"loss": 0.9269, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.012643678160919, |
|
"grad_norm": 0.3992156386375427, |
|
"learning_rate": 9.859514687100895e-06, |
|
"loss": 0.9285, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.013409961685824, |
|
"grad_norm": 0.9059655070304871, |
|
"learning_rate": 9.85100042571307e-06, |
|
"loss": 1.6443, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.014176245210728, |
|
"grad_norm": 0.20975957810878754, |
|
"learning_rate": 9.842486164325245e-06, |
|
"loss": 0.7709, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.014942528735633, |
|
"grad_norm": 0.12266507744789124, |
|
"learning_rate": 9.833971902937422e-06, |
|
"loss": 1.6198, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.015708812260536, |
|
"grad_norm": 0.5826163291931152, |
|
"learning_rate": 9.825457641549597e-06, |
|
"loss": 1.347, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 5.016475095785441, |
|
"grad_norm": 0.10637885332107544, |
|
"learning_rate": 9.816943380161772e-06, |
|
"loss": 0.3859, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 5.017241379310345, |
|
"grad_norm": 30.229930877685547, |
|
"learning_rate": 9.808429118773947e-06, |
|
"loss": 1.9272, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 5.018007662835249, |
|
"grad_norm": 30.916406631469727, |
|
"learning_rate": 9.799914857386122e-06, |
|
"loss": 2.0833, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 5.018773946360153, |
|
"grad_norm": 0.28442785143852234, |
|
"learning_rate": 9.791400595998298e-06, |
|
"loss": 0.3269, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 5.019540229885058, |
|
"grad_norm": 0.15460698306560516, |
|
"learning_rate": 9.782886334610473e-06, |
|
"loss": 0.9301, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 2.011039972305298, |
|
"eval_runtime": 30.153, |
|
"eval_samples_per_second": 1.492, |
|
"eval_steps_per_second": 1.492, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 6.000306513409962, |
|
"grad_norm": 36.79002380371094, |
|
"learning_rate": 9.774372073222648e-06, |
|
"loss": 0.8878, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 6.001072796934866, |
|
"grad_norm": 29.985212326049805, |
|
"learning_rate": 9.765857811834825e-06, |
|
"loss": 1.1629, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 6.00183908045977, |
|
"grad_norm": 1.781684160232544, |
|
"learning_rate": 9.757343550447e-06, |
|
"loss": 0.8512, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 6.002605363984674, |
|
"grad_norm": 0.05741284787654877, |
|
"learning_rate": 9.748829289059175e-06, |
|
"loss": 0.7249, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 6.003371647509579, |
|
"grad_norm": 0.2985162138938904, |
|
"learning_rate": 9.74031502767135e-06, |
|
"loss": 2.3959, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 6.0041379310344825, |
|
"grad_norm": 0.3436413109302521, |
|
"learning_rate": 9.731800766283525e-06, |
|
"loss": 0.7211, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 6.004904214559387, |
|
"grad_norm": 25.59644889831543, |
|
"learning_rate": 9.723286504895702e-06, |
|
"loss": 1.9829, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 6.005670498084291, |
|
"grad_norm": 0.4846716821193695, |
|
"learning_rate": 9.714772243507877e-06, |
|
"loss": 0.0286, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 6.006436781609195, |
|
"grad_norm": 31.87965965270996, |
|
"learning_rate": 9.706257982120052e-06, |
|
"loss": 0.9693, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 6.0072030651340995, |
|
"grad_norm": 29.26520347595215, |
|
"learning_rate": 9.697743720732228e-06, |
|
"loss": 1.0076, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 6.007969348659004, |
|
"grad_norm": 24.303810119628906, |
|
"learning_rate": 9.689229459344403e-06, |
|
"loss": 1.685, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 6.008735632183908, |
|
"grad_norm": 2.623420476913452, |
|
"learning_rate": 9.680715197956578e-06, |
|
"loss": 0.7417, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 6.009501915708812, |
|
"grad_norm": 0.5099232792854309, |
|
"learning_rate": 9.672200936568753e-06, |
|
"loss": 0.6744, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 6.010268199233717, |
|
"grad_norm": 38.22340393066406, |
|
"learning_rate": 9.663686675180928e-06, |
|
"loss": 1.8855, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 6.011034482758621, |
|
"grad_norm": 0.6430128812789917, |
|
"learning_rate": 9.655172413793105e-06, |
|
"loss": 1.4972, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 6.011800766283525, |
|
"grad_norm": 2.3135502338409424, |
|
"learning_rate": 9.64665815240528e-06, |
|
"loss": 1.1483, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 6.012567049808429, |
|
"grad_norm": 1.014036774635315, |
|
"learning_rate": 9.638143891017455e-06, |
|
"loss": 0.0281, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.013333333333334, |
|
"grad_norm": 0.09851463884115219, |
|
"learning_rate": 9.62962962962963e-06, |
|
"loss": 1.6295, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.014099616858237, |
|
"grad_norm": 26.03883934020996, |
|
"learning_rate": 9.621115368241805e-06, |
|
"loss": 1.5327, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.014865900383142, |
|
"grad_norm": 0.4320446252822876, |
|
"learning_rate": 9.612601106853982e-06, |
|
"loss": 0.8857, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.015632183908046, |
|
"grad_norm": 0.10283305495977402, |
|
"learning_rate": 9.604086845466157e-06, |
|
"loss": 0.4115, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 6.01639846743295, |
|
"grad_norm": 0.08647419512271881, |
|
"learning_rate": 9.595572584078332e-06, |
|
"loss": 0.8938, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 6.017164750957854, |
|
"grad_norm": 40.355712890625, |
|
"learning_rate": 9.587058322690508e-06, |
|
"loss": 2.3969, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 6.017931034482759, |
|
"grad_norm": 27.914424896240234, |
|
"learning_rate": 9.578544061302683e-06, |
|
"loss": 1.4772, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.018697318007663, |
|
"grad_norm": 0.27048933506011963, |
|
"learning_rate": 9.570029799914858e-06, |
|
"loss": 0.4138, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 6.019463601532567, |
|
"grad_norm": 0.7599508762359619, |
|
"learning_rate": 9.561515538527033e-06, |
|
"loss": 0.7333, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 2.0192933082580566, |
|
"eval_runtime": 26.7858, |
|
"eval_samples_per_second": 1.68, |
|
"eval_steps_per_second": 1.68, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 7.000229885057471, |
|
"grad_norm": 27.52840805053711, |
|
"learning_rate": 9.553001277139208e-06, |
|
"loss": 0.9155, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 7.000996168582375, |
|
"grad_norm": 36.91724395751953, |
|
"learning_rate": 9.544487015751385e-06, |
|
"loss": 1.7714, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 7.00176245210728, |
|
"grad_norm": 0.42006102204322815, |
|
"learning_rate": 9.53597275436356e-06, |
|
"loss": 0.694, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 7.0025287356321835, |
|
"grad_norm": 39.159515380859375, |
|
"learning_rate": 9.527458492975735e-06, |
|
"loss": 1.2159, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 7.003295019157088, |
|
"grad_norm": 0.17200647294521332, |
|
"learning_rate": 9.518944231587912e-06, |
|
"loss": 0.5668, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 7.0040613026819925, |
|
"grad_norm": 0.05252674221992493, |
|
"learning_rate": 9.510429970200085e-06, |
|
"loss": 0.5018, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 7.004827586206897, |
|
"grad_norm": 26.796857833862305, |
|
"learning_rate": 9.501915708812262e-06, |
|
"loss": 1.2749, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 7.0055938697318005, |
|
"grad_norm": 26.167831420898438, |
|
"learning_rate": 9.493401447424437e-06, |
|
"loss": 2.0759, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 7.006360153256705, |
|
"grad_norm": 0.5811284184455872, |
|
"learning_rate": 9.484887186036612e-06, |
|
"loss": 1.0943, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 7.0071264367816095, |
|
"grad_norm": 0.28958868980407715, |
|
"learning_rate": 9.476372924648788e-06, |
|
"loss": 0.8729, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 7.007892720306513, |
|
"grad_norm": 0.12947584688663483, |
|
"learning_rate": 9.467858663260963e-06, |
|
"loss": 0.87, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 7.008659003831418, |
|
"grad_norm": 0.1990060955286026, |
|
"learning_rate": 9.459344401873138e-06, |
|
"loss": 1.6972, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 7.009425287356322, |
|
"grad_norm": 25.003578186035156, |
|
"learning_rate": 9.450830140485315e-06, |
|
"loss": 0.843, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 7.010191570881226, |
|
"grad_norm": 71.36479187011719, |
|
"learning_rate": 9.442315879097488e-06, |
|
"loss": 1.2086, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 7.01095785440613, |
|
"grad_norm": 24.533315658569336, |
|
"learning_rate": 9.433801617709665e-06, |
|
"loss": 1.5405, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 7.011724137931035, |
|
"grad_norm": 0.4695298671722412, |
|
"learning_rate": 9.42528735632184e-06, |
|
"loss": 0.0115, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 7.012490421455938, |
|
"grad_norm": 33.2484016418457, |
|
"learning_rate": 9.416773094934015e-06, |
|
"loss": 0.977, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 7.013256704980843, |
|
"grad_norm": 0.4324823319911957, |
|
"learning_rate": 9.408258833546192e-06, |
|
"loss": 1.001, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.014022988505747, |
|
"grad_norm": 0.07076765596866608, |
|
"learning_rate": 9.399744572158365e-06, |
|
"loss": 0.783, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 7.014789272030652, |
|
"grad_norm": 37.56367111206055, |
|
"learning_rate": 9.391230310770542e-06, |
|
"loss": 1.5398, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 7.015555555555555, |
|
"grad_norm": 0.30659720301628113, |
|
"learning_rate": 9.382716049382717e-06, |
|
"loss": 1.0745, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 7.01632183908046, |
|
"grad_norm": 0.03149334341287613, |
|
"learning_rate": 9.374201787994892e-06, |
|
"loss": 0.4215, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 7.017088122605364, |
|
"grad_norm": 0.07586458325386047, |
|
"learning_rate": 9.365687526607068e-06, |
|
"loss": 0.8704, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 7.017854406130268, |
|
"grad_norm": 0.130014106631279, |
|
"learning_rate": 9.357173265219243e-06, |
|
"loss": 2.1002, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 7.018620689655172, |
|
"grad_norm": 0.12581387162208557, |
|
"learning_rate": 9.348659003831418e-06, |
|
"loss": 1.3058, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 7.019386973180077, |
|
"grad_norm": 28.129209518432617, |
|
"learning_rate": 9.340144742443595e-06, |
|
"loss": 1.0221, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 1.7498509883880615, |
|
"eval_runtime": 26.6897, |
|
"eval_samples_per_second": 1.686, |
|
"eval_steps_per_second": 1.686, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 8.00015325670498, |
|
"grad_norm": 0.34716856479644775, |
|
"learning_rate": 9.331630481055768e-06, |
|
"loss": 1.2586, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 8.000919540229885, |
|
"grad_norm": 26.041139602661133, |
|
"learning_rate": 9.323116219667945e-06, |
|
"loss": 0.9765, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 8.001685823754789, |
|
"grad_norm": 0.11287342756986618, |
|
"learning_rate": 9.31460195828012e-06, |
|
"loss": 1.3792, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 8.002452107279694, |
|
"grad_norm": 0.11667508631944656, |
|
"learning_rate": 9.306087696892295e-06, |
|
"loss": 0.8092, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 8.003218390804598, |
|
"grad_norm": 0.697624683380127, |
|
"learning_rate": 9.297573435504472e-06, |
|
"loss": 2.0203, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 8.003984674329502, |
|
"grad_norm": 0.09068849682807922, |
|
"learning_rate": 9.289059174116647e-06, |
|
"loss": 0.0187, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 8.004750957854407, |
|
"grad_norm": 0.06734386086463928, |
|
"learning_rate": 9.280544912728822e-06, |
|
"loss": 0.4847, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 8.00551724137931, |
|
"grad_norm": 37.46767044067383, |
|
"learning_rate": 9.272030651340997e-06, |
|
"loss": 0.5108, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 8.006283524904214, |
|
"grad_norm": 23.4182186126709, |
|
"learning_rate": 9.263516389953172e-06, |
|
"loss": 3.039, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 8.00704980842912, |
|
"grad_norm": 2.3994476795196533, |
|
"learning_rate": 9.255002128565348e-06, |
|
"loss": 1.7098, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 8.007816091954023, |
|
"grad_norm": 28.87586784362793, |
|
"learning_rate": 9.246487867177523e-06, |
|
"loss": 0.7651, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 8.008582375478927, |
|
"grad_norm": 19.866666793823242, |
|
"learning_rate": 9.237973605789698e-06, |
|
"loss": 0.5462, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 8.009348659003832, |
|
"grad_norm": 0.7668360471725464, |
|
"learning_rate": 9.229459344401875e-06, |
|
"loss": 1.6841, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 8.010114942528736, |
|
"grad_norm": 0.15364041924476624, |
|
"learning_rate": 9.220945083014048e-06, |
|
"loss": 0.718, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 8.01088122605364, |
|
"grad_norm": 0.2549060583114624, |
|
"learning_rate": 9.212430821626225e-06, |
|
"loss": 0.3386, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 8.011647509578545, |
|
"grad_norm": 0.07364042848348618, |
|
"learning_rate": 9.2039165602384e-06, |
|
"loss": 0.7142, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 8.012413793103448, |
|
"grad_norm": 0.376457154750824, |
|
"learning_rate": 9.195402298850575e-06, |
|
"loss": 0.717, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 8.013180076628352, |
|
"grad_norm": 0.5830609202384949, |
|
"learning_rate": 9.186888037462752e-06, |
|
"loss": 1.6442, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 8.013946360153257, |
|
"grad_norm": 25.14147186279297, |
|
"learning_rate": 9.178373776074927e-06, |
|
"loss": 1.1029, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 8.01471264367816, |
|
"grad_norm": 0.0229099802672863, |
|
"learning_rate": 9.169859514687102e-06, |
|
"loss": 0.4147, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 8.015478927203064, |
|
"grad_norm": 100.32247161865234, |
|
"learning_rate": 9.161345253299277e-06, |
|
"loss": 0.6985, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 8.01624521072797, |
|
"grad_norm": 0.028025947511196136, |
|
"learning_rate": 9.152830991911452e-06, |
|
"loss": 1.7017, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 8.017011494252873, |
|
"grad_norm": 0.06772996485233307, |
|
"learning_rate": 9.144316730523628e-06, |
|
"loss": 1.1687, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 8.017777777777777, |
|
"grad_norm": 40.855262756347656, |
|
"learning_rate": 9.135802469135803e-06, |
|
"loss": 1.5519, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 8.018544061302682, |
|
"grad_norm": 0.045240502804517746, |
|
"learning_rate": 9.127288207747978e-06, |
|
"loss": 0.4594, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 8.019310344827586, |
|
"grad_norm": 0.05894453451037407, |
|
"learning_rate": 9.118773946360155e-06, |
|
"loss": 1.0956, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 1.946253776550293, |
|
"eval_runtime": 26.6414, |
|
"eval_samples_per_second": 1.689, |
|
"eval_steps_per_second": 1.689, |
|
"step": 2349 |
|
}, |
|
{ |
|
"epoch": 9.00007662835249, |
|
"grad_norm": 0.10985762625932693, |
|
"learning_rate": 9.110259684972328e-06, |
|
"loss": 0.9917, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 9.000842911877395, |
|
"grad_norm": 29.338027954101562, |
|
"learning_rate": 9.101745423584505e-06, |
|
"loss": 0.4399, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 9.001609195402299, |
|
"grad_norm": 38.87636184692383, |
|
"learning_rate": 9.09323116219668e-06, |
|
"loss": 1.1992, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 9.002375478927203, |
|
"grad_norm": 28.34626007080078, |
|
"learning_rate": 9.084716900808855e-06, |
|
"loss": 0.3652, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 9.003141762452108, |
|
"grad_norm": 0.39937955141067505, |
|
"learning_rate": 9.076202639421032e-06, |
|
"loss": 1.1082, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 9.003908045977012, |
|
"grad_norm": 23.303422927856445, |
|
"learning_rate": 9.067688378033207e-06, |
|
"loss": 1.6644, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 9.004674329501915, |
|
"grad_norm": 17.07232666015625, |
|
"learning_rate": 9.059174116645382e-06, |
|
"loss": 1.2759, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 9.00544061302682, |
|
"grad_norm": 3.284682273864746, |
|
"learning_rate": 9.050659855257558e-06, |
|
"loss": 0.5649, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 9.006206896551724, |
|
"grad_norm": 3.3589494228363037, |
|
"learning_rate": 9.042145593869732e-06, |
|
"loss": 1.3064, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 9.006973180076628, |
|
"grad_norm": 54.32512664794922, |
|
"learning_rate": 9.033631332481908e-06, |
|
"loss": 1.3149, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 9.007739463601533, |
|
"grad_norm": 0.1900382786989212, |
|
"learning_rate": 9.025117071094083e-06, |
|
"loss": 0.7367, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 9.008505747126437, |
|
"grad_norm": 0.1709248274564743, |
|
"learning_rate": 9.016602809706258e-06, |
|
"loss": 1.2269, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 9.00927203065134, |
|
"grad_norm": 0.21548721194267273, |
|
"learning_rate": 9.008088548318435e-06, |
|
"loss": 0.3541, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 9.010038314176246, |
|
"grad_norm": 0.05619485676288605, |
|
"learning_rate": 8.999574286930608e-06, |
|
"loss": 0.0034, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 9.01080459770115, |
|
"grad_norm": 0.014529417268931866, |
|
"learning_rate": 8.991060025542785e-06, |
|
"loss": 0.0029, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 9.011570881226053, |
|
"grad_norm": 0.035903241485357285, |
|
"learning_rate": 8.98254576415496e-06, |
|
"loss": 1.2632, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 9.012337164750958, |
|
"grad_norm": 0.08663924038410187, |
|
"learning_rate": 8.974031502767135e-06, |
|
"loss": 1.1121, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 9.013103448275862, |
|
"grad_norm": 0.43985405564308167, |
|
"learning_rate": 8.965517241379312e-06, |
|
"loss": 2.5278, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 9.013869731800765, |
|
"grad_norm": 1.0710844993591309, |
|
"learning_rate": 8.957002979991487e-06, |
|
"loss": 0.3805, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 9.01463601532567, |
|
"grad_norm": 1.0170711278915405, |
|
"learning_rate": 8.948488718603662e-06, |
|
"loss": 2.0243, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 9.015402298850574, |
|
"grad_norm": 2.849256753921509, |
|
"learning_rate": 8.939974457215838e-06, |
|
"loss": 1.0862, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 9.01616858237548, |
|
"grad_norm": 100.30935668945312, |
|
"learning_rate": 8.931460195828012e-06, |
|
"loss": 0.6143, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 9.016934865900383, |
|
"grad_norm": 45.941490173339844, |
|
"learning_rate": 8.922945934440188e-06, |
|
"loss": 1.3019, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 9.017701149425287, |
|
"grad_norm": 36.43630599975586, |
|
"learning_rate": 8.914431673052363e-06, |
|
"loss": 0.3436, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 9.018467432950192, |
|
"grad_norm": 1.0312012434005737, |
|
"learning_rate": 8.905917411664538e-06, |
|
"loss": 0.7663, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 9.019233716475096, |
|
"grad_norm": 0.12983280420303345, |
|
"learning_rate": 8.897403150276715e-06, |
|
"loss": 2.4394, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"grad_norm": 0.5582833290100098, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.6189, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 1.3986384868621826, |
|
"eval_runtime": 27.0614, |
|
"eval_samples_per_second": 1.663, |
|
"eval_steps_per_second": 1.663, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 10.000766283524904, |
|
"grad_norm": 0.3672633469104767, |
|
"learning_rate": 8.880374627501065e-06, |
|
"loss": 0.8524, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 10.001532567049809, |
|
"grad_norm": 0.38283008337020874, |
|
"learning_rate": 8.87186036611324e-06, |
|
"loss": 1.5116, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 10.002298850574713, |
|
"grad_norm": 0.15805433690547943, |
|
"learning_rate": 8.863346104725415e-06, |
|
"loss": 0.5493, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 10.003065134099616, |
|
"grad_norm": 108.91146087646484, |
|
"learning_rate": 8.854831843337592e-06, |
|
"loss": 1.6385, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 10.003831417624522, |
|
"grad_norm": 0.4613369405269623, |
|
"learning_rate": 8.846317581949767e-06, |
|
"loss": 0.8386, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 10.004597701149425, |
|
"grad_norm": 0.01575782708823681, |
|
"learning_rate": 8.837803320561942e-06, |
|
"loss": 0.0929, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 10.005363984674329, |
|
"grad_norm": 9.924572944641113, |
|
"learning_rate": 8.829289059174118e-06, |
|
"loss": 1.5252, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 10.006130268199234, |
|
"grad_norm": 0.6361826658248901, |
|
"learning_rate": 8.820774797786292e-06, |
|
"loss": 0.7407, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 10.006896551724138, |
|
"grad_norm": 0.41480955481529236, |
|
"learning_rate": 8.812260536398468e-06, |
|
"loss": 0.0319, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 10.007662835249041, |
|
"grad_norm": 0.533891499042511, |
|
"learning_rate": 8.803746275010643e-06, |
|
"loss": 2.1185, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 10.008429118773947, |
|
"grad_norm": 0.5630529522895813, |
|
"learning_rate": 8.795232013622818e-06, |
|
"loss": 1.2424, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 10.00919540229885, |
|
"grad_norm": 0.18709838390350342, |
|
"learning_rate": 8.786717752234995e-06, |
|
"loss": 0.004, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 10.009961685823756, |
|
"grad_norm": 0.06530976295471191, |
|
"learning_rate": 8.77820349084717e-06, |
|
"loss": 1.1749, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 10.01072796934866, |
|
"grad_norm": 29.11233901977539, |
|
"learning_rate": 8.769689229459345e-06, |
|
"loss": 1.6183, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 10.011494252873563, |
|
"grad_norm": 0.54607093334198, |
|
"learning_rate": 8.76117496807152e-06, |
|
"loss": 1.2571, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 10.012260536398468, |
|
"grad_norm": 1.368139624595642, |
|
"learning_rate": 8.752660706683695e-06, |
|
"loss": 0.3516, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 10.013026819923372, |
|
"grad_norm": 0.1056910902261734, |
|
"learning_rate": 8.744146445295872e-06, |
|
"loss": 1.1487, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 10.013793103448275, |
|
"grad_norm": 3.486266613006592, |
|
"learning_rate": 8.735632183908047e-06, |
|
"loss": 0.3582, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 10.01455938697318, |
|
"grad_norm": 60.05018615722656, |
|
"learning_rate": 8.727117922520222e-06, |
|
"loss": 1.9951, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 10.015325670498084, |
|
"grad_norm": 1.5321989059448242, |
|
"learning_rate": 8.718603661132398e-06, |
|
"loss": 1.2217, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 10.016091954022988, |
|
"grad_norm": 44.85920715332031, |
|
"learning_rate": 8.710089399744572e-06, |
|
"loss": 1.9508, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 10.016858237547893, |
|
"grad_norm": 33.874900817871094, |
|
"learning_rate": 8.701575138356748e-06, |
|
"loss": 0.952, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 10.017624521072797, |
|
"grad_norm": 0.07882528007030487, |
|
"learning_rate": 8.693060876968923e-06, |
|
"loss": 0.0956, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 10.0183908045977, |
|
"grad_norm": 0.09010494500398636, |
|
"learning_rate": 8.684546615581098e-06, |
|
"loss": 0.6502, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 10.019157088122606, |
|
"grad_norm": 0.1537899374961853, |
|
"learning_rate": 8.676032354193275e-06, |
|
"loss": 1.251, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 10.01992337164751, |
|
"grad_norm": 0.3124453127384186, |
|
"learning_rate": 8.66751809280545e-06, |
|
"loss": 1.5383, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 1.7563549280166626, |
|
"eval_runtime": 26.6638, |
|
"eval_samples_per_second": 1.688, |
|
"eval_steps_per_second": 1.688, |
|
"step": 2871 |
|
}, |
|
{ |
|
"epoch": 11.000689655172414, |
|
"grad_norm": 0.721426784992218, |
|
"learning_rate": 8.659003831417625e-06, |
|
"loss": 0.6554, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 11.001455938697317, |
|
"grad_norm": 1.0374860763549805, |
|
"learning_rate": 8.650489570029802e-06, |
|
"loss": 1.697, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 11.002222222222223, |
|
"grad_norm": 2.169524908065796, |
|
"learning_rate": 8.641975308641975e-06, |
|
"loss": 0.315, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 11.002988505747126, |
|
"grad_norm": 75.38426208496094, |
|
"learning_rate": 8.633461047254152e-06, |
|
"loss": 1.371, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 11.00375478927203, |
|
"grad_norm": 0.36113521456718445, |
|
"learning_rate": 8.624946785866327e-06, |
|
"loss": 0.4666, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 11.004521072796935, |
|
"grad_norm": 43.9693489074707, |
|
"learning_rate": 8.616432524478502e-06, |
|
"loss": 1.4018, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 11.005287356321839, |
|
"grad_norm": 47.97806930541992, |
|
"learning_rate": 8.607918263090678e-06, |
|
"loss": 1.2183, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 11.006053639846744, |
|
"grad_norm": 40.872894287109375, |
|
"learning_rate": 8.599404001702853e-06, |
|
"loss": 1.1801, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 11.006819923371648, |
|
"grad_norm": 1.335880160331726, |
|
"learning_rate": 8.590889740315028e-06, |
|
"loss": 0.7524, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 11.007586206896551, |
|
"grad_norm": 0.07456907629966736, |
|
"learning_rate": 8.582375478927203e-06, |
|
"loss": 0.6325, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 11.008352490421457, |
|
"grad_norm": 9.913630485534668, |
|
"learning_rate": 8.573861217539378e-06, |
|
"loss": 0.7153, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 11.00911877394636, |
|
"grad_norm": 45.1269645690918, |
|
"learning_rate": 8.565346956151555e-06, |
|
"loss": 0.9315, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 11.009885057471264, |
|
"grad_norm": 0.23690205812454224, |
|
"learning_rate": 8.55683269476373e-06, |
|
"loss": 0.6738, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 11.01065134099617, |
|
"grad_norm": 204.60675048828125, |
|
"learning_rate": 8.548318433375905e-06, |
|
"loss": 0.3024, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 11.011417624521073, |
|
"grad_norm": 0.018819095566868782, |
|
"learning_rate": 8.539804171988082e-06, |
|
"loss": 0.0007, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 11.012183908045976, |
|
"grad_norm": 89.70406341552734, |
|
"learning_rate": 8.531289910600255e-06, |
|
"loss": 1.2629, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 11.012950191570882, |
|
"grad_norm": 0.04545266553759575, |
|
"learning_rate": 8.522775649212432e-06, |
|
"loss": 1.711, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 11.013716475095785, |
|
"grad_norm": 79.39248657226562, |
|
"learning_rate": 8.514261387824607e-06, |
|
"loss": 1.7525, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 11.014482758620689, |
|
"grad_norm": 35.357215881347656, |
|
"learning_rate": 8.505747126436782e-06, |
|
"loss": 0.399, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 11.015249042145594, |
|
"grad_norm": 0.34647271037101746, |
|
"learning_rate": 8.497232865048958e-06, |
|
"loss": 0.7542, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 11.016015325670498, |
|
"grad_norm": 2.148348093032837, |
|
"learning_rate": 8.488718603661133e-06, |
|
"loss": 0.284, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 11.016781609195402, |
|
"grad_norm": 0.015207786113023758, |
|
"learning_rate": 8.480204342273308e-06, |
|
"loss": 0.1789, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 11.017547892720307, |
|
"grad_norm": 0.06861928105354309, |
|
"learning_rate": 8.471690080885483e-06, |
|
"loss": 1.5162, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 11.01831417624521, |
|
"grad_norm": 40.698604583740234, |
|
"learning_rate": 8.463175819497658e-06, |
|
"loss": 1.544, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 11.019080459770114, |
|
"grad_norm": 0.03721854090690613, |
|
"learning_rate": 8.454661558109835e-06, |
|
"loss": 0.0701, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 11.01984674329502, |
|
"grad_norm": 47.97694778442383, |
|
"learning_rate": 8.44614729672201e-06, |
|
"loss": 1.9417, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"eval_accuracy": 0.5555555555555556, |
|
"eval_loss": 1.7499446868896484, |
|
"eval_runtime": 27.817, |
|
"eval_samples_per_second": 1.618, |
|
"eval_steps_per_second": 1.618, |
|
"step": 3132 |
|
}, |
|
{ |
|
"epoch": 12.000613026819924, |
|
"grad_norm": 0.2790820300579071, |
|
"learning_rate": 8.437633035334185e-06, |
|
"loss": 0.4759, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 12.001379310344827, |
|
"grad_norm": 0.016759010031819344, |
|
"learning_rate": 8.429118773946362e-06, |
|
"loss": 0.8737, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 12.002145593869733, |
|
"grad_norm": 55.927947998046875, |
|
"learning_rate": 8.420604512558537e-06, |
|
"loss": 1.3787, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 12.002911877394636, |
|
"grad_norm": 53.60752487182617, |
|
"learning_rate": 8.412090251170712e-06, |
|
"loss": 0.4737, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 12.00367816091954, |
|
"grad_norm": 12.805241584777832, |
|
"learning_rate": 8.403575989782887e-06, |
|
"loss": 0.3458, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 12.004444444444445, |
|
"grad_norm": 0.01970883645117283, |
|
"learning_rate": 8.395061728395062e-06, |
|
"loss": 1.1822, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 12.005210727969349, |
|
"grad_norm": 0.06797318160533905, |
|
"learning_rate": 8.386547467007238e-06, |
|
"loss": 0.3603, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 12.005977011494252, |
|
"grad_norm": 1.851926326751709, |
|
"learning_rate": 8.378033205619413e-06, |
|
"loss": 1.8643, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 12.006743295019158, |
|
"grad_norm": 0.03403859958052635, |
|
"learning_rate": 8.369518944231588e-06, |
|
"loss": 0.7065, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 12.007509578544061, |
|
"grad_norm": 0.03215191140770912, |
|
"learning_rate": 8.361004682843763e-06, |
|
"loss": 0.4997, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 12.008275862068965, |
|
"grad_norm": 78.01289367675781, |
|
"learning_rate": 8.35249042145594e-06, |
|
"loss": 1.7423, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 12.00904214559387, |
|
"grad_norm": 44.349769592285156, |
|
"learning_rate": 8.343976160068115e-06, |
|
"loss": 1.1782, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 12.009808429118774, |
|
"grad_norm": 1.6300876140594482, |
|
"learning_rate": 8.33546189868029e-06, |
|
"loss": 0.9793, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 12.010574712643677, |
|
"grad_norm": 9.07623291015625, |
|
"learning_rate": 8.326947637292465e-06, |
|
"loss": 0.6355, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 12.011340996168583, |
|
"grad_norm": 0.07964489609003067, |
|
"learning_rate": 8.318433375904642e-06, |
|
"loss": 1.2985, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 12.012107279693486, |
|
"grad_norm": 0.778725266456604, |
|
"learning_rate": 8.309919114516817e-06, |
|
"loss": 0.563, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 12.01287356321839, |
|
"grad_norm": 0.24556642770767212, |
|
"learning_rate": 8.301404853128992e-06, |
|
"loss": 1.2655, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 12.013639846743295, |
|
"grad_norm": 1.3439182043075562, |
|
"learning_rate": 8.292890591741167e-06, |
|
"loss": 1.0096, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 12.014406130268199, |
|
"grad_norm": 38.030879974365234, |
|
"learning_rate": 8.284376330353342e-06, |
|
"loss": 1.6456, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 12.015172413793103, |
|
"grad_norm": 0.02573794312775135, |
|
"learning_rate": 8.275862068965518e-06, |
|
"loss": 0.7449, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 12.015938697318008, |
|
"grad_norm": 0.04563940688967705, |
|
"learning_rate": 8.267347807577693e-06, |
|
"loss": 0.2841, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 12.016704980842912, |
|
"grad_norm": 21.8176326751709, |
|
"learning_rate": 8.258833546189868e-06, |
|
"loss": 1.7554, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 12.017471264367815, |
|
"grad_norm": 1.8843274116516113, |
|
"learning_rate": 8.250319284802043e-06, |
|
"loss": 0.5507, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 12.01823754789272, |
|
"grad_norm": 0.6266366243362427, |
|
"learning_rate": 8.24180502341422e-06, |
|
"loss": 0.5479, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 12.019003831417624, |
|
"grad_norm": 7.612668514251709, |
|
"learning_rate": 8.233290762026395e-06, |
|
"loss": 0.1982, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 12.01977011494253, |
|
"grad_norm": 32.22077178955078, |
|
"learning_rate": 8.22477650063857e-06, |
|
"loss": 0.4756, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 12.02, |
|
"eval_accuracy": 0.6888888888888889, |
|
"eval_loss": 1.3449900150299072, |
|
"eval_runtime": 26.9892, |
|
"eval_samples_per_second": 1.667, |
|
"eval_steps_per_second": 1.667, |
|
"step": 3393 |
|
}, |
|
{ |
|
"epoch": 13.000536398467434, |
|
"grad_norm": 0.018872743472456932, |
|
"learning_rate": 8.216262239250745e-06, |
|
"loss": 0.0474, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 13.001302681992337, |
|
"grad_norm": 0.013307456858456135, |
|
"learning_rate": 8.207747977862922e-06, |
|
"loss": 0.6616, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 13.00206896551724, |
|
"grad_norm": 0.060726575553417206, |
|
"learning_rate": 8.199233716475097e-06, |
|
"loss": 1.2224, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 13.002835249042146, |
|
"grad_norm": 111.4458236694336, |
|
"learning_rate": 8.190719455087272e-06, |
|
"loss": 0.1991, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 13.00360153256705, |
|
"grad_norm": 0.12966758012771606, |
|
"learning_rate": 8.182205193699447e-06, |
|
"loss": 0.9914, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 13.004367816091953, |
|
"grad_norm": 0.07849890738725662, |
|
"learning_rate": 8.173690932311623e-06, |
|
"loss": 0.7065, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 13.005134099616859, |
|
"grad_norm": 28.96215057373047, |
|
"learning_rate": 8.165176670923798e-06, |
|
"loss": 1.3631, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 13.005900383141762, |
|
"grad_norm": 0.02223191224038601, |
|
"learning_rate": 8.156662409535973e-06, |
|
"loss": 0.2472, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 13.006666666666666, |
|
"grad_norm": 9.828189849853516, |
|
"learning_rate": 8.148148148148148e-06, |
|
"loss": 1.3738, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 13.007432950191571, |
|
"grad_norm": 0.12735942006111145, |
|
"learning_rate": 8.139633886760325e-06, |
|
"loss": 0.0051, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 13.008199233716475, |
|
"grad_norm": 1.1451942920684814, |
|
"learning_rate": 8.1311196253725e-06, |
|
"loss": 3.1629, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 13.008965517241379, |
|
"grad_norm": 0.37017548084259033, |
|
"learning_rate": 8.122605363984675e-06, |
|
"loss": 1.0499, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 13.009731800766284, |
|
"grad_norm": 113.45726776123047, |
|
"learning_rate": 8.11409110259685e-06, |
|
"loss": 0.2917, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 13.010498084291187, |
|
"grad_norm": 64.98446655273438, |
|
"learning_rate": 8.105576841209027e-06, |
|
"loss": 0.7388, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 13.011264367816091, |
|
"grad_norm": 0.021769991144537926, |
|
"learning_rate": 8.097062579821202e-06, |
|
"loss": 0.613, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 13.012030651340996, |
|
"grad_norm": 0.023841824382543564, |
|
"learning_rate": 8.088548318433377e-06, |
|
"loss": 0.8421, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 13.0127969348659, |
|
"grad_norm": 22.392879486083984, |
|
"learning_rate": 8.080034057045552e-06, |
|
"loss": 0.0696, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 13.013563218390805, |
|
"grad_norm": 0.0371825285255909, |
|
"learning_rate": 8.071519795657727e-06, |
|
"loss": 2.2118, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 13.014329501915709, |
|
"grad_norm": 0.025469820946455002, |
|
"learning_rate": 8.063005534269903e-06, |
|
"loss": 1.1406, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 13.015095785440613, |
|
"grad_norm": 1.266277551651001, |
|
"learning_rate": 8.054491272882078e-06, |
|
"loss": 0.0585, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 13.015862068965518, |
|
"grad_norm": 0.01860865391790867, |
|
"learning_rate": 8.045977011494253e-06, |
|
"loss": 1.0894, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 13.016628352490422, |
|
"grad_norm": 0.022707907482981682, |
|
"learning_rate": 8.037462750106428e-06, |
|
"loss": 0.4101, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 13.017394636015325, |
|
"grad_norm": 0.6282780170440674, |
|
"learning_rate": 8.028948488718605e-06, |
|
"loss": 0.9036, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 13.01816091954023, |
|
"grad_norm": 0.9271881580352783, |
|
"learning_rate": 8.02043422733078e-06, |
|
"loss": 0.1855, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 13.018927203065134, |
|
"grad_norm": 0.6670782566070557, |
|
"learning_rate": 8.011919965942955e-06, |
|
"loss": 1.0005, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 13.019693486590038, |
|
"grad_norm": 0.062439046800136566, |
|
"learning_rate": 8.00340570455513e-06, |
|
"loss": 0.3515, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 1.4140169620513916, |
|
"eval_runtime": 27.5413, |
|
"eval_samples_per_second": 1.634, |
|
"eval_steps_per_second": 1.634, |
|
"step": 3654 |
|
}, |
|
{ |
|
"epoch": 14.000459770114942, |
|
"grad_norm": 115.58851623535156, |
|
"learning_rate": 7.994891443167307e-06, |
|
"loss": 1.3132, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 14.001226053639847, |
|
"grad_norm": 0.29224392771720886, |
|
"learning_rate": 7.986377181779482e-06, |
|
"loss": 0.1474, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 14.00199233716475, |
|
"grad_norm": 0.74747234582901, |
|
"learning_rate": 7.977862920391657e-06, |
|
"loss": 0.6812, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 14.002758620689654, |
|
"grad_norm": 0.01658850722014904, |
|
"learning_rate": 7.969348659003832e-06, |
|
"loss": 0.2522, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 14.00352490421456, |
|
"grad_norm": 142.4066925048828, |
|
"learning_rate": 7.960834397616007e-06, |
|
"loss": 1.5431, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 14.004291187739463, |
|
"grad_norm": 0.0948290303349495, |
|
"learning_rate": 7.952320136228183e-06, |
|
"loss": 0.6671, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 14.005057471264367, |
|
"grad_norm": 5.533627033233643, |
|
"learning_rate": 7.943805874840358e-06, |
|
"loss": 0.0125, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 14.005823754789272, |
|
"grad_norm": 0.027199843898415565, |
|
"learning_rate": 7.935291613452533e-06, |
|
"loss": 1.1109, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 14.006590038314176, |
|
"grad_norm": 2.1814827919006348, |
|
"learning_rate": 7.92677735206471e-06, |
|
"loss": 0.0838, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 14.007356321839081, |
|
"grad_norm": 206.9722442626953, |
|
"learning_rate": 7.918263090676885e-06, |
|
"loss": 0.3985, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 14.008122605363985, |
|
"grad_norm": 0.017775095999240875, |
|
"learning_rate": 7.90974882928906e-06, |
|
"loss": 0.4, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 14.008888888888889, |
|
"grad_norm": 0.039329394698143005, |
|
"learning_rate": 7.901234567901235e-06, |
|
"loss": 0.0127, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 14.009655172413794, |
|
"grad_norm": 16.223806381225586, |
|
"learning_rate": 7.89272030651341e-06, |
|
"loss": 0.6058, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 14.010421455938697, |
|
"grad_norm": 0.0765138790011406, |
|
"learning_rate": 7.884206045125587e-06, |
|
"loss": 0.6501, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 14.011187739463601, |
|
"grad_norm": 4.709262847900391, |
|
"learning_rate": 7.875691783737762e-06, |
|
"loss": 0.0092, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 14.011954022988506, |
|
"grad_norm": 0.19915767014026642, |
|
"learning_rate": 7.867177522349937e-06, |
|
"loss": 0.666, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 14.01272030651341, |
|
"grad_norm": 14.52304744720459, |
|
"learning_rate": 7.858663260962112e-06, |
|
"loss": 1.8816, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 14.013486590038314, |
|
"grad_norm": 0.02702498994767666, |
|
"learning_rate": 7.850148999574287e-06, |
|
"loss": 1.6484, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 14.014252873563219, |
|
"grad_norm": 0.940314531326294, |
|
"learning_rate": 7.841634738186463e-06, |
|
"loss": 0.8623, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 14.015019157088123, |
|
"grad_norm": 0.03782011941075325, |
|
"learning_rate": 7.833120476798638e-06, |
|
"loss": 1.1899, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 14.015785440613026, |
|
"grad_norm": 0.013969366438686848, |
|
"learning_rate": 7.824606215410813e-06, |
|
"loss": 0.0047, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 14.016551724137932, |
|
"grad_norm": 0.02114093117415905, |
|
"learning_rate": 7.81609195402299e-06, |
|
"loss": 0.001, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 14.017318007662835, |
|
"grad_norm": 0.009078224189579487, |
|
"learning_rate": 7.807577692635165e-06, |
|
"loss": 1.5564, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 14.018084291187739, |
|
"grad_norm": 0.11376553773880005, |
|
"learning_rate": 7.79906343124734e-06, |
|
"loss": 2.4157, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 14.018850574712644, |
|
"grad_norm": 138.33175659179688, |
|
"learning_rate": 7.790549169859515e-06, |
|
"loss": 1.0855, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 14.019616858237548, |
|
"grad_norm": 0.2779475748538971, |
|
"learning_rate": 7.78203490847169e-06, |
|
"loss": 0.0096, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 1.8713815212249756, |
|
"eval_runtime": 27.7984, |
|
"eval_samples_per_second": 1.619, |
|
"eval_steps_per_second": 1.619, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 15.000383141762452, |
|
"grad_norm": 0.030268236994743347, |
|
"learning_rate": 7.773520647083867e-06, |
|
"loss": 1.623, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 15.001149425287357, |
|
"grad_norm": 0.029209017753601074, |
|
"learning_rate": 7.765006385696042e-06, |
|
"loss": 0.0096, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 15.00191570881226, |
|
"grad_norm": 0.04086444154381752, |
|
"learning_rate": 7.756492124308217e-06, |
|
"loss": 1.1659, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 15.002681992337164, |
|
"grad_norm": 124.86769104003906, |
|
"learning_rate": 7.747977862920393e-06, |
|
"loss": 0.3305, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 15.00344827586207, |
|
"grad_norm": 0.6207280158996582, |
|
"learning_rate": 7.739463601532567e-06, |
|
"loss": 0.7113, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 15.004214559386973, |
|
"grad_norm": 0.5740492939949036, |
|
"learning_rate": 7.730949340144743e-06, |
|
"loss": 1.1287, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 15.004980842911877, |
|
"grad_norm": 0.050564948469400406, |
|
"learning_rate": 7.722435078756918e-06, |
|
"loss": 0.774, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 15.005747126436782, |
|
"grad_norm": 0.0978650152683258, |
|
"learning_rate": 7.713920817369093e-06, |
|
"loss": 0.8024, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 15.006513409961686, |
|
"grad_norm": 0.07209397852420807, |
|
"learning_rate": 7.70540655598127e-06, |
|
"loss": 0.5673, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 15.00727969348659, |
|
"grad_norm": 9.260441780090332, |
|
"learning_rate": 7.696892294593445e-06, |
|
"loss": 0.0554, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 15.008045977011495, |
|
"grad_norm": 0.03125443309545517, |
|
"learning_rate": 7.68837803320562e-06, |
|
"loss": 0.1426, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 15.008812260536398, |
|
"grad_norm": 0.012001493945717812, |
|
"learning_rate": 7.679863771817797e-06, |
|
"loss": 1.7252, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 15.009578544061302, |
|
"grad_norm": 102.81620025634766, |
|
"learning_rate": 7.67134951042997e-06, |
|
"loss": 1.7485, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 15.010344827586207, |
|
"grad_norm": 5.039968967437744, |
|
"learning_rate": 7.662835249042147e-06, |
|
"loss": 0.9835, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 15.011111111111111, |
|
"grad_norm": 1.7594513893127441, |
|
"learning_rate": 7.654320987654322e-06, |
|
"loss": 0.5558, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 15.011877394636015, |
|
"grad_norm": 0.3397904634475708, |
|
"learning_rate": 7.645806726266497e-06, |
|
"loss": 0.4311, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 15.01264367816092, |
|
"grad_norm": 0.13894565403461456, |
|
"learning_rate": 7.637292464878673e-06, |
|
"loss": 1.8801, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 15.013409961685824, |
|
"grad_norm": 0.025840580463409424, |
|
"learning_rate": 7.6287782034908475e-06, |
|
"loss": 0.4805, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 15.014176245210727, |
|
"grad_norm": 0.012139157392084599, |
|
"learning_rate": 7.620263942103023e-06, |
|
"loss": 1.1086, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 15.014942528735633, |
|
"grad_norm": 0.14552293717861176, |
|
"learning_rate": 7.611749680715198e-06, |
|
"loss": 0.6004, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 15.015708812260536, |
|
"grad_norm": 0.01706760935485363, |
|
"learning_rate": 7.603235419327374e-06, |
|
"loss": 0.2954, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 15.01647509578544, |
|
"grad_norm": 0.4709959924221039, |
|
"learning_rate": 7.59472115793955e-06, |
|
"loss": 1.1213, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 15.017241379310345, |
|
"grad_norm": 81.85173034667969, |
|
"learning_rate": 7.586206896551724e-06, |
|
"loss": 0.4273, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 15.018007662835249, |
|
"grad_norm": 0.030423123389482498, |
|
"learning_rate": 7.5776926351639e-06, |
|
"loss": 0.7009, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 15.018773946360152, |
|
"grad_norm": 0.08157113194465637, |
|
"learning_rate": 7.569178373776076e-06, |
|
"loss": 1.9636, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 15.019540229885058, |
|
"grad_norm": 127.59949493408203, |
|
"learning_rate": 7.560664112388251e-06, |
|
"loss": 0.7724, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 15.02, |
|
"eval_accuracy": 0.6888888888888889, |
|
"eval_loss": 1.6674045324325562, |
|
"eval_runtime": 26.6453, |
|
"eval_samples_per_second": 1.689, |
|
"eval_steps_per_second": 1.689, |
|
"step": 4176 |
|
}, |
|
{ |
|
"epoch": 16.00030651340996, |
|
"grad_norm": 0.5671827793121338, |
|
"learning_rate": 7.552149851000427e-06, |
|
"loss": 0.1968, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 16.001072796934867, |
|
"grad_norm": 25.622739791870117, |
|
"learning_rate": 7.543635589612601e-06, |
|
"loss": 1.1645, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 16.00183908045977, |
|
"grad_norm": 0.044543080031871796, |
|
"learning_rate": 7.535121328224777e-06, |
|
"loss": 2.1519, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 16.002605363984674, |
|
"grad_norm": 0.4847821295261383, |
|
"learning_rate": 7.5266070668369525e-06, |
|
"loss": 0.0086, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 16.003371647509578, |
|
"grad_norm": 0.43561309576034546, |
|
"learning_rate": 7.5180928054491275e-06, |
|
"loss": 0.4111, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 16.00413793103448, |
|
"grad_norm": 0.014339642599225044, |
|
"learning_rate": 7.509578544061303e-06, |
|
"loss": 0.6705, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 16.00490421455939, |
|
"grad_norm": 0.05114021524786949, |
|
"learning_rate": 7.501064282673479e-06, |
|
"loss": 0.0195, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 16.005670498084292, |
|
"grad_norm": 0.026472069323062897, |
|
"learning_rate": 7.492550021285654e-06, |
|
"loss": 0.7473, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 16.006436781609196, |
|
"grad_norm": 0.0083317244425416, |
|
"learning_rate": 7.48403575989783e-06, |
|
"loss": 0.1636, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 16.0072030651341, |
|
"grad_norm": 7.841292381286621, |
|
"learning_rate": 7.475521498510004e-06, |
|
"loss": 1.2166, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 16.007969348659003, |
|
"grad_norm": 0.09563468396663666, |
|
"learning_rate": 7.46700723712218e-06, |
|
"loss": 1.3421, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 16.008735632183907, |
|
"grad_norm": 0.2711217403411865, |
|
"learning_rate": 7.458492975734356e-06, |
|
"loss": 0.381, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 16.009501915708814, |
|
"grad_norm": 93.96241760253906, |
|
"learning_rate": 7.449978714346531e-06, |
|
"loss": 1.5959, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 16.010268199233717, |
|
"grad_norm": 2.4429824352264404, |
|
"learning_rate": 7.441464452958707e-06, |
|
"loss": 2.0461, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 16.01103448275862, |
|
"grad_norm": 0.048106927424669266, |
|
"learning_rate": 7.4329501915708825e-06, |
|
"loss": 0.7554, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 16.011800766283525, |
|
"grad_norm": 0.19793254137039185, |
|
"learning_rate": 7.4244359301830575e-06, |
|
"loss": 0.0062, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 16.01256704980843, |
|
"grad_norm": 0.22945082187652588, |
|
"learning_rate": 7.4159216687952325e-06, |
|
"loss": 1.1866, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 16.013333333333332, |
|
"grad_norm": 177.23330688476562, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.8337, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 16.01409961685824, |
|
"grad_norm": 0.019397037103772163, |
|
"learning_rate": 7.398893146019583e-06, |
|
"loss": 1.3731, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 16.014865900383143, |
|
"grad_norm": 0.008158219046890736, |
|
"learning_rate": 7.390378884631759e-06, |
|
"loss": 0.4793, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 16.015632183908046, |
|
"grad_norm": 0.09013034403324127, |
|
"learning_rate": 7.381864623243934e-06, |
|
"loss": 1.406, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 16.01639846743295, |
|
"grad_norm": 328.1301574707031, |
|
"learning_rate": 7.37335036185611e-06, |
|
"loss": 0.7331, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 16.017164750957853, |
|
"grad_norm": 0.03495466709136963, |
|
"learning_rate": 7.364836100468284e-06, |
|
"loss": 0.4612, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 16.017931034482757, |
|
"grad_norm": 70.02825927734375, |
|
"learning_rate": 7.35632183908046e-06, |
|
"loss": 1.9104, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 16.018697318007664, |
|
"grad_norm": 0.9442333579063416, |
|
"learning_rate": 7.347807577692636e-06, |
|
"loss": 0.3978, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 16.019463601532568, |
|
"grad_norm": 0.15351228415966034, |
|
"learning_rate": 7.339293316304811e-06, |
|
"loss": 0.549, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"eval_accuracy": 0.6, |
|
"eval_loss": 2.0383543968200684, |
|
"eval_runtime": 26.7932, |
|
"eval_samples_per_second": 1.68, |
|
"eval_steps_per_second": 1.68, |
|
"step": 4437 |
|
}, |
|
{ |
|
"epoch": 17.000229885057472, |
|
"grad_norm": 0.03269746154546738, |
|
"learning_rate": 7.330779054916987e-06, |
|
"loss": 0.2738, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 17.000996168582375, |
|
"grad_norm": 126.98841094970703, |
|
"learning_rate": 7.3222647935291625e-06, |
|
"loss": 2.7202, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 17.00176245210728, |
|
"grad_norm": 0.024574726819992065, |
|
"learning_rate": 7.3137505321413375e-06, |
|
"loss": 0.8718, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 17.002528735632183, |
|
"grad_norm": 0.15055644512176514, |
|
"learning_rate": 7.305236270753513e-06, |
|
"loss": 0.5053, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 17.00329501915709, |
|
"grad_norm": 0.02868310734629631, |
|
"learning_rate": 7.2967220093656875e-06, |
|
"loss": 0.0967, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 17.004061302681993, |
|
"grad_norm": 93.8185806274414, |
|
"learning_rate": 7.288207747977863e-06, |
|
"loss": 0.8645, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 17.004827586206897, |
|
"grad_norm": 0.014004296623170376, |
|
"learning_rate": 7.279693486590039e-06, |
|
"loss": 0.9862, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 17.0055938697318, |
|
"grad_norm": 1.3647541999816895, |
|
"learning_rate": 7.271179225202214e-06, |
|
"loss": 0.0045, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 17.006360153256704, |
|
"grad_norm": 0.07483195513486862, |
|
"learning_rate": 7.26266496381439e-06, |
|
"loss": 1.1039, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 17.007126436781608, |
|
"grad_norm": 28.133129119873047, |
|
"learning_rate": 7.254150702426566e-06, |
|
"loss": 0.5409, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 17.007892720306515, |
|
"grad_norm": 1.213812232017517, |
|
"learning_rate": 7.24563644103874e-06, |
|
"loss": 1.9365, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 17.00865900383142, |
|
"grad_norm": 1.4416884183883667, |
|
"learning_rate": 7.237122179650916e-06, |
|
"loss": 0.5428, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 17.009425287356322, |
|
"grad_norm": 57.450740814208984, |
|
"learning_rate": 7.228607918263091e-06, |
|
"loss": 0.8646, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 17.010191570881226, |
|
"grad_norm": 0.01538799051195383, |
|
"learning_rate": 7.220093656875267e-06, |
|
"loss": 0.2005, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 17.01095785440613, |
|
"grad_norm": 0.14328859746456146, |
|
"learning_rate": 7.2115793954874425e-06, |
|
"loss": 0.0598, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 17.011724137931033, |
|
"grad_norm": 0.009004896506667137, |
|
"learning_rate": 7.2030651340996175e-06, |
|
"loss": 0.0007, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 17.01249042145594, |
|
"grad_norm": 0.006117129232734442, |
|
"learning_rate": 7.194550872711793e-06, |
|
"loss": 0.9453, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 17.013256704980844, |
|
"grad_norm": 0.19872944056987762, |
|
"learning_rate": 7.1860366113239675e-06, |
|
"loss": 1.8477, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 17.014022988505747, |
|
"grad_norm": 52.965049743652344, |
|
"learning_rate": 7.177522349936143e-06, |
|
"loss": 1.0744, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 17.01478927203065, |
|
"grad_norm": 0.017271993681788445, |
|
"learning_rate": 7.169008088548319e-06, |
|
"loss": 0.9541, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 17.015555555555554, |
|
"grad_norm": 0.2654757499694824, |
|
"learning_rate": 7.160493827160494e-06, |
|
"loss": 0.6339, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 17.016321839080458, |
|
"grad_norm": 0.010725772939622402, |
|
"learning_rate": 7.15197956577267e-06, |
|
"loss": 0.8491, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 17.017088122605365, |
|
"grad_norm": 0.06713994592428207, |
|
"learning_rate": 7.143465304384846e-06, |
|
"loss": 0.6543, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 17.01785440613027, |
|
"grad_norm": 72.36029815673828, |
|
"learning_rate": 7.13495104299702e-06, |
|
"loss": 0.3014, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 17.018620689655172, |
|
"grad_norm": 37.205528259277344, |
|
"learning_rate": 7.126436781609196e-06, |
|
"loss": 1.6264, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 17.019386973180076, |
|
"grad_norm": 7.975151062011719, |
|
"learning_rate": 7.117922520221371e-06, |
|
"loss": 1.033, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 17.02, |
|
"eval_accuracy": 0.7111111111111111, |
|
"eval_loss": 1.6580545902252197, |
|
"eval_runtime": 26.8024, |
|
"eval_samples_per_second": 1.679, |
|
"eval_steps_per_second": 1.679, |
|
"step": 4698 |
|
}, |
|
{ |
|
"epoch": 18.00015325670498, |
|
"grad_norm": 0.010165479965507984, |
|
"learning_rate": 7.109408258833547e-06, |
|
"loss": 0.3414, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 18.000919540229884, |
|
"grad_norm": 0.018579374998807907, |
|
"learning_rate": 7.1008939974457225e-06, |
|
"loss": 0.515, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 18.00168582375479, |
|
"grad_norm": 0.21927808225154877, |
|
"learning_rate": 7.0923797360578975e-06, |
|
"loss": 0.8353, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 18.002452107279694, |
|
"grad_norm": 0.021109564229846, |
|
"learning_rate": 7.083865474670073e-06, |
|
"loss": 0.8006, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 18.003218390804598, |
|
"grad_norm": 112.68936157226562, |
|
"learning_rate": 7.075351213282249e-06, |
|
"loss": 0.5364, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 18.0039846743295, |
|
"grad_norm": 0.007576945703476667, |
|
"learning_rate": 7.066836951894423e-06, |
|
"loss": 0.0019, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 18.004750957854405, |
|
"grad_norm": 0.009337992407381535, |
|
"learning_rate": 7.058322690506599e-06, |
|
"loss": 1.9355, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 18.00551724137931, |
|
"grad_norm": 0.9535887241363525, |
|
"learning_rate": 7.049808429118774e-06, |
|
"loss": 0.9092, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 18.006283524904216, |
|
"grad_norm": 0.009785390459001064, |
|
"learning_rate": 7.04129416773095e-06, |
|
"loss": 0.459, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 18.00704980842912, |
|
"grad_norm": 0.00706516532227397, |
|
"learning_rate": 7.032779906343126e-06, |
|
"loss": 1.0517, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 18.007816091954023, |
|
"grad_norm": 0.04726971313357353, |
|
"learning_rate": 7.0242656449553e-06, |
|
"loss": 0.4152, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 18.008582375478927, |
|
"grad_norm": 231.0004119873047, |
|
"learning_rate": 7.015751383567476e-06, |
|
"loss": 0.3949, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 18.00934865900383, |
|
"grad_norm": 0.2374602109193802, |
|
"learning_rate": 7.007237122179652e-06, |
|
"loss": 0.3163, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 18.010114942528734, |
|
"grad_norm": 0.7454726696014404, |
|
"learning_rate": 6.998722860791827e-06, |
|
"loss": 0.3471, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 18.01088122605364, |
|
"grad_norm": 0.09716503322124481, |
|
"learning_rate": 6.9902085994040025e-06, |
|
"loss": 0.7012, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 18.011647509578545, |
|
"grad_norm": 3.4391729831695557, |
|
"learning_rate": 6.9816943380161775e-06, |
|
"loss": 0.5641, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 18.01241379310345, |
|
"grad_norm": 0.004662221763283014, |
|
"learning_rate": 6.973180076628353e-06, |
|
"loss": 0.135, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 18.013180076628352, |
|
"grad_norm": 0.02987459860742092, |
|
"learning_rate": 6.964665815240529e-06, |
|
"loss": 0.2554, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 18.013946360153255, |
|
"grad_norm": 0.02062298357486725, |
|
"learning_rate": 6.956151553852703e-06, |
|
"loss": 0.4499, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 18.014712643678163, |
|
"grad_norm": 0.6109151840209961, |
|
"learning_rate": 6.947637292464879e-06, |
|
"loss": 0.8021, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 18.015478927203066, |
|
"grad_norm": 0.012360738590359688, |
|
"learning_rate": 6.939123031077054e-06, |
|
"loss": 0.8778, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 18.01624521072797, |
|
"grad_norm": 0.02551034651696682, |
|
"learning_rate": 6.93060876968923e-06, |
|
"loss": 0.0025, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 18.017011494252873, |
|
"grad_norm": 0.2105444073677063, |
|
"learning_rate": 6.922094508301406e-06, |
|
"loss": 1.155, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 18.017777777777777, |
|
"grad_norm": 323.2512512207031, |
|
"learning_rate": 6.913580246913581e-06, |
|
"loss": 0.1342, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 18.01854406130268, |
|
"grad_norm": 0.03846808522939682, |
|
"learning_rate": 6.905065985525757e-06, |
|
"loss": 0.5797, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 18.019310344827588, |
|
"grad_norm": 0.062138259410858154, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 1.0439, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 1.951107144355774, |
|
"eval_runtime": 27.252, |
|
"eval_samples_per_second": 1.651, |
|
"eval_steps_per_second": 1.651, |
|
"step": 4959 |
|
}, |
|
{ |
|
"epoch": 19.000076628352492, |
|
"grad_norm": 225.21969604492188, |
|
"learning_rate": 6.888037462750107e-06, |
|
"loss": 0.0963, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 19.000842911877395, |
|
"grad_norm": 0.5668207406997681, |
|
"learning_rate": 6.8795232013622825e-06, |
|
"loss": 0.7582, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 19.0016091954023, |
|
"grad_norm": 0.013587575405836105, |
|
"learning_rate": 6.8710089399744575e-06, |
|
"loss": 0.0211, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 19.002375478927203, |
|
"grad_norm": 0.003807453438639641, |
|
"learning_rate": 6.862494678586633e-06, |
|
"loss": 0.4751, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 19.003141762452106, |
|
"grad_norm": 0.004750713240355253, |
|
"learning_rate": 6.853980417198809e-06, |
|
"loss": 0.6077, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 19.00390804597701, |
|
"grad_norm": 0.015361142344772816, |
|
"learning_rate": 6.845466155810983e-06, |
|
"loss": 1.8627, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 19.004674329501917, |
|
"grad_norm": 27.08148193359375, |
|
"learning_rate": 6.836951894423159e-06, |
|
"loss": 0.0743, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 19.00544061302682, |
|
"grad_norm": 0.06813742965459824, |
|
"learning_rate": 6.828437633035335e-06, |
|
"loss": 0.0101, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 19.006206896551724, |
|
"grad_norm": 48.28098678588867, |
|
"learning_rate": 6.81992337164751e-06, |
|
"loss": 1.5878, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 19.006973180076628, |
|
"grad_norm": 32.75178909301758, |
|
"learning_rate": 6.811409110259686e-06, |
|
"loss": 0.5236, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 19.00773946360153, |
|
"grad_norm": 0.013089810498058796, |
|
"learning_rate": 6.802894848871861e-06, |
|
"loss": 1.1647, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 19.00850574712644, |
|
"grad_norm": 0.027366667985916138, |
|
"learning_rate": 6.794380587484037e-06, |
|
"loss": 0.0293, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 19.009272030651342, |
|
"grad_norm": 0.010478328913450241, |
|
"learning_rate": 6.7858663260962125e-06, |
|
"loss": 0.5793, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 19.010038314176246, |
|
"grad_norm": 9.110784530639648, |
|
"learning_rate": 6.777352064708387e-06, |
|
"loss": 0.6513, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 19.01080459770115, |
|
"grad_norm": 0.04082193970680237, |
|
"learning_rate": 6.7688378033205625e-06, |
|
"loss": 0.0057, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 19.011570881226053, |
|
"grad_norm": 0.003504957305267453, |
|
"learning_rate": 6.760323541932738e-06, |
|
"loss": 0.002, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 19.012337164750956, |
|
"grad_norm": 0.004141181707382202, |
|
"learning_rate": 6.751809280544913e-06, |
|
"loss": 0.5782, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 19.013103448275864, |
|
"grad_norm": 339.9183654785156, |
|
"learning_rate": 6.743295019157089e-06, |
|
"loss": 0.3841, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 19.013869731800767, |
|
"grad_norm": 0.04252168908715248, |
|
"learning_rate": 6.734780757769263e-06, |
|
"loss": 0.9655, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 19.01463601532567, |
|
"grad_norm": 279.74212646484375, |
|
"learning_rate": 6.726266496381439e-06, |
|
"loss": 1.0617, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 19.015402298850574, |
|
"grad_norm": 0.1304304003715515, |
|
"learning_rate": 6.717752234993615e-06, |
|
"loss": 1.7934, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 19.016168582375478, |
|
"grad_norm": 0.007185114547610283, |
|
"learning_rate": 6.70923797360579e-06, |
|
"loss": 0.0009, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 19.01693486590038, |
|
"grad_norm": 0.004625165369361639, |
|
"learning_rate": 6.700723712217966e-06, |
|
"loss": 1.2902, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 19.01770114942529, |
|
"grad_norm": 0.00409284234046936, |
|
"learning_rate": 6.692209450830141e-06, |
|
"loss": 0.5628, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 19.018467432950192, |
|
"grad_norm": 0.008344005793333054, |
|
"learning_rate": 6.683695189442317e-06, |
|
"loss": 1.0599, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 19.019233716475096, |
|
"grad_norm": 285.9661560058594, |
|
"learning_rate": 6.6751809280544925e-06, |
|
"loss": 1.464, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 19.02, |
|
"grad_norm": 60.123680114746094, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.7522, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 19.02, |
|
"eval_accuracy": 0.7111111111111111, |
|
"eval_loss": 1.9120391607284546, |
|
"eval_runtime": 27.011, |
|
"eval_samples_per_second": 1.666, |
|
"eval_steps_per_second": 1.666, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 20.000766283524904, |
|
"grad_norm": 0.008071123622357845, |
|
"learning_rate": 6.6581524052788425e-06, |
|
"loss": 0.0007, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 20.001532567049807, |
|
"grad_norm": 1.0390774011611938, |
|
"learning_rate": 6.649638143891018e-06, |
|
"loss": 0.7437, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 20.002298850574714, |
|
"grad_norm": 6.639537334442139, |
|
"learning_rate": 6.641123882503193e-06, |
|
"loss": 0.4184, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 20.003065134099618, |
|
"grad_norm": 0.020966628566384315, |
|
"learning_rate": 6.632609621115369e-06, |
|
"loss": 0.4986, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 20.00383141762452, |
|
"grad_norm": 0.056898221373558044, |
|
"learning_rate": 6.624095359727543e-06, |
|
"loss": 0.2892, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 20.004597701149425, |
|
"grad_norm": 176.62693786621094, |
|
"learning_rate": 6.615581098339719e-06, |
|
"loss": 0.514, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 20.00536398467433, |
|
"grad_norm": 0.044064853340387344, |
|
"learning_rate": 6.607066836951895e-06, |
|
"loss": 0.4032, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 20.006130268199232, |
|
"grad_norm": 1.4815845489501953, |
|
"learning_rate": 6.59855257556407e-06, |
|
"loss": 0.5883, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 20.00689655172414, |
|
"grad_norm": 0.0036416244693100452, |
|
"learning_rate": 6.590038314176246e-06, |
|
"loss": 0.0045, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 20.007662835249043, |
|
"grad_norm": 194.84112548828125, |
|
"learning_rate": 6.581524052788422e-06, |
|
"loss": 0.5794, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 20.008429118773947, |
|
"grad_norm": 0.14704416692256927, |
|
"learning_rate": 6.573009791400597e-06, |
|
"loss": 0.0009, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 20.00919540229885, |
|
"grad_norm": 325.7463684082031, |
|
"learning_rate": 6.5644955300127725e-06, |
|
"loss": 1.2152, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 20.009961685823754, |
|
"grad_norm": 0.3056221902370453, |
|
"learning_rate": 6.555981268624947e-06, |
|
"loss": 0.9163, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 20.010727969348657, |
|
"grad_norm": 0.004691016860306263, |
|
"learning_rate": 6.5474670072371225e-06, |
|
"loss": 0.4172, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 20.011494252873565, |
|
"grad_norm": 0.20932753384113312, |
|
"learning_rate": 6.538952745849298e-06, |
|
"loss": 0.0534, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 20.01226053639847, |
|
"grad_norm": 12.387452125549316, |
|
"learning_rate": 6.530438484461473e-06, |
|
"loss": 0.005, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 20.013026819923372, |
|
"grad_norm": 0.003740863874554634, |
|
"learning_rate": 6.521924223073649e-06, |
|
"loss": 0.5472, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 20.013793103448275, |
|
"grad_norm": 2.9918148517608643, |
|
"learning_rate": 6.513409961685824e-06, |
|
"loss": 0.6894, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 20.01455938697318, |
|
"grad_norm": 0.004370540846139193, |
|
"learning_rate": 6.504895700297999e-06, |
|
"loss": 0.0023, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 20.015325670498083, |
|
"grad_norm": 0.05200687795877457, |
|
"learning_rate": 6.496381438910175e-06, |
|
"loss": 0.8061, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 20.01609195402299, |
|
"grad_norm": 15.071517944335938, |
|
"learning_rate": 6.48786717752235e-06, |
|
"loss": 0.0732, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 20.016858237547893, |
|
"grad_norm": 123.60091400146484, |
|
"learning_rate": 6.479352916134526e-06, |
|
"loss": 0.5485, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 20.017624521072797, |
|
"grad_norm": 56.30704879760742, |
|
"learning_rate": 6.470838654746702e-06, |
|
"loss": 2.5624, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 20.0183908045977, |
|
"grad_norm": 0.00854418147355318, |
|
"learning_rate": 6.462324393358877e-06, |
|
"loss": 1.4496, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 20.019157088122604, |
|
"grad_norm": 0.017714973539114, |
|
"learning_rate": 6.4538101319710525e-06, |
|
"loss": 0.5328, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 20.01992337164751, |
|
"grad_norm": 0.004511045292019844, |
|
"learning_rate": 6.445295870583227e-06, |
|
"loss": 0.0011, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 20.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 2.155228853225708, |
|
"eval_runtime": 25.6895, |
|
"eval_samples_per_second": 1.752, |
|
"eval_steps_per_second": 1.752, |
|
"step": 5481 |
|
}, |
|
{ |
|
"epoch": 21.000689655172415, |
|
"grad_norm": 169.8488311767578, |
|
"learning_rate": 6.4367816091954025e-06, |
|
"loss": 0.4896, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 21.00145593869732, |
|
"grad_norm": 0.01613098755478859, |
|
"learning_rate": 6.428267347807578e-06, |
|
"loss": 0.2155, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 21.002222222222223, |
|
"grad_norm": 0.025164268910884857, |
|
"learning_rate": 6.419753086419753e-06, |
|
"loss": 0.8494, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 21.002988505747126, |
|
"grad_norm": 0.04445045441389084, |
|
"learning_rate": 6.411238825031929e-06, |
|
"loss": 2.8709, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 21.00375478927203, |
|
"grad_norm": 0.00460793124511838, |
|
"learning_rate": 6.402724563644105e-06, |
|
"loss": 0.6915, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 21.004521072796933, |
|
"grad_norm": 196.26080322265625, |
|
"learning_rate": 6.39421030225628e-06, |
|
"loss": 1.0091, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 21.00528735632184, |
|
"grad_norm": 0.8934230804443359, |
|
"learning_rate": 6.385696040868455e-06, |
|
"loss": 0.4876, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 21.006053639846744, |
|
"grad_norm": 223.2466278076172, |
|
"learning_rate": 6.37718177948063e-06, |
|
"loss": 1.7922, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 21.006819923371648, |
|
"grad_norm": 1.639689564704895, |
|
"learning_rate": 6.368667518092806e-06, |
|
"loss": 0.473, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 21.00758620689655, |
|
"grad_norm": 0.09040302783250809, |
|
"learning_rate": 6.360153256704982e-06, |
|
"loss": 0.0509, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 21.008352490421455, |
|
"grad_norm": 0.2614004611968994, |
|
"learning_rate": 6.351638995317157e-06, |
|
"loss": 0.1043, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 21.00911877394636, |
|
"grad_norm": 10.296463966369629, |
|
"learning_rate": 6.3431247339293325e-06, |
|
"loss": 0.0065, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 21.009885057471266, |
|
"grad_norm": 0.03850758075714111, |
|
"learning_rate": 6.334610472541508e-06, |
|
"loss": 1.079, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 21.01065134099617, |
|
"grad_norm": 244.8512420654297, |
|
"learning_rate": 6.3260962111536825e-06, |
|
"loss": 0.9132, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 21.011417624521073, |
|
"grad_norm": 0.008782829158008099, |
|
"learning_rate": 6.317581949765858e-06, |
|
"loss": 0.4563, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 21.012183908045976, |
|
"grad_norm": 0.007988948374986649, |
|
"learning_rate": 6.309067688378033e-06, |
|
"loss": 0.7631, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 21.01295019157088, |
|
"grad_norm": 0.0269088726490736, |
|
"learning_rate": 6.300553426990209e-06, |
|
"loss": 0.0012, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 21.013716475095784, |
|
"grad_norm": 0.008409671485424042, |
|
"learning_rate": 6.292039165602385e-06, |
|
"loss": 0.1667, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 21.01448275862069, |
|
"grad_norm": 0.0059095160104334354, |
|
"learning_rate": 6.28352490421456e-06, |
|
"loss": 0.1982, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 21.015249042145594, |
|
"grad_norm": 0.40540429949760437, |
|
"learning_rate": 6.275010642826736e-06, |
|
"loss": 0.0348, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 21.016015325670498, |
|
"grad_norm": 0.056105632334947586, |
|
"learning_rate": 6.26649638143891e-06, |
|
"loss": 0.2315, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 21.0167816091954, |
|
"grad_norm": 5.564243793487549, |
|
"learning_rate": 6.257982120051086e-06, |
|
"loss": 1.2896, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 21.017547892720305, |
|
"grad_norm": 85.38070678710938, |
|
"learning_rate": 6.249467858663262e-06, |
|
"loss": 1.1236, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 21.018314176245212, |
|
"grad_norm": 103.62547302246094, |
|
"learning_rate": 6.240953597275437e-06, |
|
"loss": 1.6774, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 21.019080459770116, |
|
"grad_norm": 0.009410320781171322, |
|
"learning_rate": 6.2324393358876125e-06, |
|
"loss": 0.0015, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 21.01984674329502, |
|
"grad_norm": 0.005156536120921373, |
|
"learning_rate": 6.223925074499788e-06, |
|
"loss": 0.7904, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 21.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.072270631790161, |
|
"eval_runtime": 25.6829, |
|
"eval_samples_per_second": 1.752, |
|
"eval_steps_per_second": 1.752, |
|
"step": 5742 |
|
}, |
|
{ |
|
"epoch": 22.000613026819924, |
|
"grad_norm": 0.02685803920030594, |
|
"learning_rate": 6.2154108131119625e-06, |
|
"loss": 0.0012, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 22.001379310344827, |
|
"grad_norm": 0.012517916038632393, |
|
"learning_rate": 6.206896551724138e-06, |
|
"loss": 0.0231, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 22.00214559386973, |
|
"grad_norm": 0.07292059063911438, |
|
"learning_rate": 6.198382290336313e-06, |
|
"loss": 0.6619, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 22.002911877394634, |
|
"grad_norm": 0.2761479914188385, |
|
"learning_rate": 6.189868028948489e-06, |
|
"loss": 0.0019, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 22.00367816091954, |
|
"grad_norm": 7.414251327514648, |
|
"learning_rate": 6.181353767560665e-06, |
|
"loss": 0.3693, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 22.004444444444445, |
|
"grad_norm": 0.009334002621471882, |
|
"learning_rate": 6.17283950617284e-06, |
|
"loss": 1.0581, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 22.00521072796935, |
|
"grad_norm": 21.360097885131836, |
|
"learning_rate": 6.164325244785016e-06, |
|
"loss": 0.0174, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 22.005977011494252, |
|
"grad_norm": 0.36625248193740845, |
|
"learning_rate": 6.155810983397192e-06, |
|
"loss": 0.4287, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 22.006743295019156, |
|
"grad_norm": 1.877589464187622, |
|
"learning_rate": 6.147296722009366e-06, |
|
"loss": 0.1948, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 22.00750957854406, |
|
"grad_norm": 0.14674128592014313, |
|
"learning_rate": 6.138782460621542e-06, |
|
"loss": 0.5643, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 22.008275862068967, |
|
"grad_norm": 0.021958723664283752, |
|
"learning_rate": 6.130268199233717e-06, |
|
"loss": 0.3486, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 22.00904214559387, |
|
"grad_norm": 0.008868115022778511, |
|
"learning_rate": 6.1217539378458925e-06, |
|
"loss": 2.4311, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 22.009808429118774, |
|
"grad_norm": 0.04825178161263466, |
|
"learning_rate": 6.113239676458068e-06, |
|
"loss": 0.3446, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 22.010574712643677, |
|
"grad_norm": 0.006025779992341995, |
|
"learning_rate": 6.1047254150702425e-06, |
|
"loss": 1.3454, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 22.01134099616858, |
|
"grad_norm": 0.3849467933177948, |
|
"learning_rate": 6.096211153682418e-06, |
|
"loss": 0.0009, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 22.01210727969349, |
|
"grad_norm": 400.67266845703125, |
|
"learning_rate": 6.087696892294594e-06, |
|
"loss": 0.6205, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 22.012873563218392, |
|
"grad_norm": 0.09580480307340622, |
|
"learning_rate": 6.079182630906769e-06, |
|
"loss": 1.2347, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 22.013639846743295, |
|
"grad_norm": 0.028075139969587326, |
|
"learning_rate": 6.070668369518945e-06, |
|
"loss": 0.6484, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 22.0144061302682, |
|
"grad_norm": 0.20924535393714905, |
|
"learning_rate": 6.06215410813112e-06, |
|
"loss": 0.0011, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 22.015172413793103, |
|
"grad_norm": 0.006337861530482769, |
|
"learning_rate": 6.053639846743296e-06, |
|
"loss": 0.0089, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 22.015938697318006, |
|
"grad_norm": 0.0026473095640540123, |
|
"learning_rate": 6.045125585355472e-06, |
|
"loss": 0.0004, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 22.016704980842913, |
|
"grad_norm": 0.2897782325744629, |
|
"learning_rate": 6.036611323967646e-06, |
|
"loss": 0.6633, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 22.017471264367817, |
|
"grad_norm": 0.007315934170037508, |
|
"learning_rate": 6.028097062579822e-06, |
|
"loss": 0.6561, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 22.01823754789272, |
|
"grad_norm": 586.8655395507812, |
|
"learning_rate": 6.019582801191997e-06, |
|
"loss": 0.9464, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 22.019003831417624, |
|
"grad_norm": 684.4703369140625, |
|
"learning_rate": 6.0110685398041725e-06, |
|
"loss": 0.6768, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 22.019770114942528, |
|
"grad_norm": 0.00842924416065216, |
|
"learning_rate": 6.002554278416348e-06, |
|
"loss": 0.0824, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 22.02, |
|
"eval_accuracy": 0.6, |
|
"eval_loss": 2.322413921356201, |
|
"eval_runtime": 27.8399, |
|
"eval_samples_per_second": 1.616, |
|
"eval_steps_per_second": 1.616, |
|
"step": 6003 |
|
}, |
|
{ |
|
"epoch": 23.000536398467432, |
|
"grad_norm": 0.35203030705451965, |
|
"learning_rate": 5.9940400170285225e-06, |
|
"loss": 0.0008, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 23.001302681992335, |
|
"grad_norm": 248.3006134033203, |
|
"learning_rate": 5.985525755640698e-06, |
|
"loss": 0.78, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 23.002068965517243, |
|
"grad_norm": 0.06187109649181366, |
|
"learning_rate": 5.977011494252874e-06, |
|
"loss": 0.001, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 23.002835249042146, |
|
"grad_norm": 0.017878668382763863, |
|
"learning_rate": 5.968497232865049e-06, |
|
"loss": 0.0086, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 23.00360153256705, |
|
"grad_norm": 20.91975975036621, |
|
"learning_rate": 5.959982971477225e-06, |
|
"loss": 0.0128, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 23.004367816091953, |
|
"grad_norm": 0.14271274209022522, |
|
"learning_rate": 5.9514687100894e-06, |
|
"loss": 0.3949, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 23.005134099616857, |
|
"grad_norm": 0.0022982426453381777, |
|
"learning_rate": 5.942954448701576e-06, |
|
"loss": 1.1908, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 23.005900383141764, |
|
"grad_norm": 0.07136853039264679, |
|
"learning_rate": 5.934440187313752e-06, |
|
"loss": 0.0004, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 23.006666666666668, |
|
"grad_norm": 0.005678771063685417, |
|
"learning_rate": 5.925925925925926e-06, |
|
"loss": 1.2216, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 23.00743295019157, |
|
"grad_norm": 15.724533081054688, |
|
"learning_rate": 5.917411664538102e-06, |
|
"loss": 0.984, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 23.008199233716475, |
|
"grad_norm": 0.015127353370189667, |
|
"learning_rate": 5.9088974031502775e-06, |
|
"loss": 1.0131, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 23.00896551724138, |
|
"grad_norm": 0.3639283776283264, |
|
"learning_rate": 5.9003831417624525e-06, |
|
"loss": 0.0009, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 23.009731800766282, |
|
"grad_norm": 0.09395662695169449, |
|
"learning_rate": 5.891868880374628e-06, |
|
"loss": 0.6155, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 23.01049808429119, |
|
"grad_norm": 0.05603405460715294, |
|
"learning_rate": 5.883354618986803e-06, |
|
"loss": 0.183, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 23.011264367816093, |
|
"grad_norm": 0.7371284365653992, |
|
"learning_rate": 5.874840357598979e-06, |
|
"loss": 0.0842, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 23.012030651340996, |
|
"grad_norm": 0.014493432827293873, |
|
"learning_rate": 5.866326096211154e-06, |
|
"loss": 0.0117, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 23.0127969348659, |
|
"grad_norm": 0.011029372923076153, |
|
"learning_rate": 5.857811834823329e-06, |
|
"loss": 0.2, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 23.013563218390804, |
|
"grad_norm": 0.014823973178863525, |
|
"learning_rate": 5.849297573435505e-06, |
|
"loss": 0.6377, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 23.014329501915707, |
|
"grad_norm": 0.027684876695275307, |
|
"learning_rate": 5.84078331204768e-06, |
|
"loss": 2.2999, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 23.015095785440614, |
|
"grad_norm": 0.03351214528083801, |
|
"learning_rate": 5.832269050659856e-06, |
|
"loss": 1.5203, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 23.015862068965518, |
|
"grad_norm": 0.0054610916413366795, |
|
"learning_rate": 5.823754789272032e-06, |
|
"loss": 0.0471, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 23.01662835249042, |
|
"grad_norm": 0.04454955831170082, |
|
"learning_rate": 5.815240527884206e-06, |
|
"loss": 1.2863, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 23.017394636015325, |
|
"grad_norm": 3.4245729446411133, |
|
"learning_rate": 5.806726266496382e-06, |
|
"loss": 1.5685, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 23.01816091954023, |
|
"grad_norm": 0.011352489702403545, |
|
"learning_rate": 5.7982120051085575e-06, |
|
"loss": 0.5982, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 23.018927203065132, |
|
"grad_norm": 161.6426239013672, |
|
"learning_rate": 5.7896977437207325e-06, |
|
"loss": 1.6938, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 23.01969348659004, |
|
"grad_norm": 199.2720947265625, |
|
"learning_rate": 5.781183482332908e-06, |
|
"loss": 0.4656, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 23.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 1.8394169807434082, |
|
"eval_runtime": 27.7422, |
|
"eval_samples_per_second": 1.622, |
|
"eval_steps_per_second": 1.622, |
|
"step": 6264 |
|
}, |
|
{ |
|
"epoch": 24.000459770114944, |
|
"grad_norm": 106.89867401123047, |
|
"learning_rate": 5.772669220945083e-06, |
|
"loss": 0.5839, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 24.001226053639847, |
|
"grad_norm": 0.12666413187980652, |
|
"learning_rate": 5.764154959557259e-06, |
|
"loss": 0.0157, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 24.00199233716475, |
|
"grad_norm": 0.47772225737571716, |
|
"learning_rate": 5.755640698169435e-06, |
|
"loss": 0.0018, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 24.002758620689654, |
|
"grad_norm": 0.004135240335017443, |
|
"learning_rate": 5.747126436781609e-06, |
|
"loss": 0.6984, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 24.003524904214558, |
|
"grad_norm": 0.004683276172727346, |
|
"learning_rate": 5.738612175393785e-06, |
|
"loss": 0.0003, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 24.004291187739465, |
|
"grad_norm": 0.003153575351461768, |
|
"learning_rate": 5.730097914005961e-06, |
|
"loss": 0.5653, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 24.00505747126437, |
|
"grad_norm": 0.22170044481754303, |
|
"learning_rate": 5.721583652618136e-06, |
|
"loss": 1.7902, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 24.005823754789272, |
|
"grad_norm": 0.05017457529902458, |
|
"learning_rate": 5.713069391230312e-06, |
|
"loss": 0.6723, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 24.006590038314176, |
|
"grad_norm": 0.07758428901433945, |
|
"learning_rate": 5.704555129842486e-06, |
|
"loss": 0.6425, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 24.00735632183908, |
|
"grad_norm": 0.004366467706859112, |
|
"learning_rate": 5.696040868454662e-06, |
|
"loss": 0.6144, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 24.008122605363983, |
|
"grad_norm": 0.08577321469783783, |
|
"learning_rate": 5.6875266070668375e-06, |
|
"loss": 0.0014, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 24.00888888888889, |
|
"grad_norm": 0.02223465032875538, |
|
"learning_rate": 5.6790123456790125e-06, |
|
"loss": 0.5301, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 24.009655172413794, |
|
"grad_norm": 5.776513576507568, |
|
"learning_rate": 5.670498084291188e-06, |
|
"loss": 0.5538, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 24.010421455938697, |
|
"grad_norm": 0.0038269704673439264, |
|
"learning_rate": 5.661983822903364e-06, |
|
"loss": 0.0198, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 24.0111877394636, |
|
"grad_norm": 0.009990855120122433, |
|
"learning_rate": 5.653469561515539e-06, |
|
"loss": 0.0007, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 24.011954022988505, |
|
"grad_norm": 0.024605492129921913, |
|
"learning_rate": 5.644955300127715e-06, |
|
"loss": 0.0006, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 24.01272030651341, |
|
"grad_norm": 0.006209048442542553, |
|
"learning_rate": 5.636441038739889e-06, |
|
"loss": 0.0011, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 24.013486590038315, |
|
"grad_norm": 0.010548588819801807, |
|
"learning_rate": 5.627926777352065e-06, |
|
"loss": 0.2099, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 24.01425287356322, |
|
"grad_norm": 0.03874419257044792, |
|
"learning_rate": 5.619412515964241e-06, |
|
"loss": 0.0393, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 24.015019157088123, |
|
"grad_norm": 0.05194197967648506, |
|
"learning_rate": 5.610898254576416e-06, |
|
"loss": 0.4447, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 24.015785440613026, |
|
"grad_norm": 0.004177581984549761, |
|
"learning_rate": 5.602383993188592e-06, |
|
"loss": 0.0079, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 24.01655172413793, |
|
"grad_norm": 0.0064995852299034595, |
|
"learning_rate": 5.593869731800766e-06, |
|
"loss": 0.0045, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 24.017318007662837, |
|
"grad_norm": 0.0031617269851267338, |
|
"learning_rate": 5.585355470412942e-06, |
|
"loss": 0.0187, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 24.01808429118774, |
|
"grad_norm": 0.01218902412801981, |
|
"learning_rate": 5.5768412090251175e-06, |
|
"loss": 0.0106, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 24.018850574712644, |
|
"grad_norm": 0.16686464846134186, |
|
"learning_rate": 5.5683269476372925e-06, |
|
"loss": 0.3291, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 24.019616858237548, |
|
"grad_norm": 1.0756574869155884, |
|
"learning_rate": 5.559812686249468e-06, |
|
"loss": 1.5142, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 24.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.4023046493530273, |
|
"eval_runtime": 24.7262, |
|
"eval_samples_per_second": 1.82, |
|
"eval_steps_per_second": 1.82, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 25.000383141762452, |
|
"grad_norm": 0.23478935658931732, |
|
"learning_rate": 5.551298424861644e-06, |
|
"loss": 0.0035, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 25.001149425287355, |
|
"grad_norm": 0.008979057893157005, |
|
"learning_rate": 5.542784163473819e-06, |
|
"loss": 0.005, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 25.00191570881226, |
|
"grad_norm": 515.0016479492188, |
|
"learning_rate": 5.534269902085995e-06, |
|
"loss": 0.5506, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 25.002681992337166, |
|
"grad_norm": 0.009823550470173359, |
|
"learning_rate": 5.525755640698169e-06, |
|
"loss": 1.3643, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 25.00344827586207, |
|
"grad_norm": 216.33673095703125, |
|
"learning_rate": 5.517241379310345e-06, |
|
"loss": 1.9839, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 25.004214559386973, |
|
"grad_norm": 0.002864914247766137, |
|
"learning_rate": 5.508727117922521e-06, |
|
"loss": 1.0561, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 25.004980842911877, |
|
"grad_norm": 0.03450574353337288, |
|
"learning_rate": 5.500212856534696e-06, |
|
"loss": 0.6956, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 25.00574712643678, |
|
"grad_norm": 0.006375276483595371, |
|
"learning_rate": 5.491698595146872e-06, |
|
"loss": 1.1123, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 25.006513409961684, |
|
"grad_norm": 0.005265065468847752, |
|
"learning_rate": 5.4831843337590475e-06, |
|
"loss": 0.0186, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 25.00727969348659, |
|
"grad_norm": 0.006362410727888346, |
|
"learning_rate": 5.474670072371222e-06, |
|
"loss": 0.0017, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 25.008045977011495, |
|
"grad_norm": 0.0057250866666436195, |
|
"learning_rate": 5.4661558109833975e-06, |
|
"loss": 0.1718, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 25.0088122605364, |
|
"grad_norm": 0.007489965762943029, |
|
"learning_rate": 5.4576415495955725e-06, |
|
"loss": 1.7385, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 25.009578544061302, |
|
"grad_norm": 0.0037433006800711155, |
|
"learning_rate": 5.449127288207748e-06, |
|
"loss": 0.5765, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 25.010344827586206, |
|
"grad_norm": 0.3312411904335022, |
|
"learning_rate": 5.440613026819924e-06, |
|
"loss": 0.4274, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 25.011111111111113, |
|
"grad_norm": 0.005550125148147345, |
|
"learning_rate": 5.432098765432099e-06, |
|
"loss": 0.6633, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 25.011877394636016, |
|
"grad_norm": 515.3299560546875, |
|
"learning_rate": 5.423584504044275e-06, |
|
"loss": 0.384, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 25.01264367816092, |
|
"grad_norm": 0.0034298666287213564, |
|
"learning_rate": 5.415070242656451e-06, |
|
"loss": 1.0644, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 25.013409961685824, |
|
"grad_norm": 0.07524304836988449, |
|
"learning_rate": 5.406555981268625e-06, |
|
"loss": 0.5647, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 25.014176245210727, |
|
"grad_norm": 0.0026857892517000437, |
|
"learning_rate": 5.398041719880801e-06, |
|
"loss": 0.0772, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 25.01494252873563, |
|
"grad_norm": 0.45005977153778076, |
|
"learning_rate": 5.389527458492976e-06, |
|
"loss": 0.0019, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 25.015708812260538, |
|
"grad_norm": 0.10023090988397598, |
|
"learning_rate": 5.381013197105152e-06, |
|
"loss": 1.0686, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 25.01647509578544, |
|
"grad_norm": 0.12803435325622559, |
|
"learning_rate": 5.3724989357173275e-06, |
|
"loss": 1.1586, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 25.017241379310345, |
|
"grad_norm": 0.03280169516801834, |
|
"learning_rate": 5.3639846743295025e-06, |
|
"loss": 1.111, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 25.01800766283525, |
|
"grad_norm": 0.01774153672158718, |
|
"learning_rate": 5.3554704129416775e-06, |
|
"loss": 0.3865, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 25.018773946360152, |
|
"grad_norm": 0.011220112442970276, |
|
"learning_rate": 5.3469561515538525e-06, |
|
"loss": 1.4325, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 25.019540229885056, |
|
"grad_norm": 0.030729902908205986, |
|
"learning_rate": 5.338441890166028e-06, |
|
"loss": 0.0002, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 25.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.0958974361419678, |
|
"eval_runtime": 24.8595, |
|
"eval_samples_per_second": 1.81, |
|
"eval_steps_per_second": 1.81, |
|
"step": 6786 |
|
}, |
|
{ |
|
"epoch": 26.00030651340996, |
|
"grad_norm": 0.0165704358369112, |
|
"learning_rate": 5.329927628778204e-06, |
|
"loss": 0.8773, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 26.001072796934867, |
|
"grad_norm": 0.019194411113858223, |
|
"learning_rate": 5.321413367390379e-06, |
|
"loss": 0.3882, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 26.00183908045977, |
|
"grad_norm": 0.21865501999855042, |
|
"learning_rate": 5.312899106002555e-06, |
|
"loss": 0.0019, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 26.002605363984674, |
|
"grad_norm": 0.0046053738333284855, |
|
"learning_rate": 5.304384844614731e-06, |
|
"loss": 0.2845, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 26.003371647509578, |
|
"grad_norm": 0.14539921283721924, |
|
"learning_rate": 5.295870583226905e-06, |
|
"loss": 0.0073, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 26.00413793103448, |
|
"grad_norm": 0.0023881872184574604, |
|
"learning_rate": 5.287356321839081e-06, |
|
"loss": 0.851, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 26.00490421455939, |
|
"grad_norm": 0.004179185256361961, |
|
"learning_rate": 5.278842060451256e-06, |
|
"loss": 0.4828, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 26.005670498084292, |
|
"grad_norm": 394.6401062011719, |
|
"learning_rate": 5.270327799063432e-06, |
|
"loss": 1.0666, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 26.006436781609196, |
|
"grad_norm": 0.006806519813835621, |
|
"learning_rate": 5.2618135376756075e-06, |
|
"loss": 0.0017, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 26.0072030651341, |
|
"grad_norm": 0.003203527769073844, |
|
"learning_rate": 5.2532992762877825e-06, |
|
"loss": 0.0896, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 26.007969348659003, |
|
"grad_norm": 0.0578441321849823, |
|
"learning_rate": 5.244785014899958e-06, |
|
"loss": 1.4829, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 26.008735632183907, |
|
"grad_norm": 0.013379551470279694, |
|
"learning_rate": 5.236270753512134e-06, |
|
"loss": 0.9376, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 26.009501915708814, |
|
"grad_norm": 105.02237701416016, |
|
"learning_rate": 5.227756492124308e-06, |
|
"loss": 0.7295, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 26.010268199233717, |
|
"grad_norm": 666.5088500976562, |
|
"learning_rate": 5.219242230736484e-06, |
|
"loss": 1.078, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 26.01103448275862, |
|
"grad_norm": 324.5596618652344, |
|
"learning_rate": 5.210727969348659e-06, |
|
"loss": 0.1195, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 26.011800766283525, |
|
"grad_norm": 0.008104785345494747, |
|
"learning_rate": 5.202213707960835e-06, |
|
"loss": 0.001, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 26.01256704980843, |
|
"grad_norm": 0.004602055996656418, |
|
"learning_rate": 5.193699446573011e-06, |
|
"loss": 0.0004, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 26.013333333333332, |
|
"grad_norm": 3.331594944000244, |
|
"learning_rate": 5.185185185185185e-06, |
|
"loss": 1.5353, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 26.01409961685824, |
|
"grad_norm": 0.00612606480717659, |
|
"learning_rate": 5.176670923797361e-06, |
|
"loss": 0.9532, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 26.014865900383143, |
|
"grad_norm": 0.07543738931417465, |
|
"learning_rate": 5.168156662409536e-06, |
|
"loss": 1.0561, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 26.015632183908046, |
|
"grad_norm": 0.6736128330230713, |
|
"learning_rate": 5.159642401021712e-06, |
|
"loss": 0.2616, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 26.01639846743295, |
|
"grad_norm": 0.09130196273326874, |
|
"learning_rate": 5.1511281396338875e-06, |
|
"loss": 0.0023, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 26.017164750957853, |
|
"grad_norm": 0.0300575140863657, |
|
"learning_rate": 5.1426138782460625e-06, |
|
"loss": 0.002, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 26.017931034482757, |
|
"grad_norm": 349.5252990722656, |
|
"learning_rate": 5.134099616858238e-06, |
|
"loss": 0.8282, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 26.018697318007664, |
|
"grad_norm": 0.035327911376953125, |
|
"learning_rate": 5.125585355470414e-06, |
|
"loss": 0.6367, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 26.019463601532568, |
|
"grad_norm": 0.696599006652832, |
|
"learning_rate": 5.117071094082588e-06, |
|
"loss": 0.8021, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 26.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 1.9081264734268188, |
|
"eval_runtime": 25.4174, |
|
"eval_samples_per_second": 1.77, |
|
"eval_steps_per_second": 1.77, |
|
"step": 7047 |
|
}, |
|
{ |
|
"epoch": 27.000229885057472, |
|
"grad_norm": 0.0030064648017287254, |
|
"learning_rate": 5.108556832694764e-06, |
|
"loss": 1.3934, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 27.000996168582375, |
|
"grad_norm": 0.0029099867679178715, |
|
"learning_rate": 5.100042571306939e-06, |
|
"loss": 1.4468, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 27.00176245210728, |
|
"grad_norm": 0.017775215208530426, |
|
"learning_rate": 5.091528309919115e-06, |
|
"loss": 0.6865, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 27.002528735632183, |
|
"grad_norm": 0.03126843273639679, |
|
"learning_rate": 5.083014048531291e-06, |
|
"loss": 0.4644, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 27.00329501915709, |
|
"grad_norm": 0.009791210293769836, |
|
"learning_rate": 5.074499787143465e-06, |
|
"loss": 0.3409, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 27.004061302681993, |
|
"grad_norm": 0.11714817583560944, |
|
"learning_rate": 5.065985525755641e-06, |
|
"loss": 0.0029, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 27.004827586206897, |
|
"grad_norm": 0.004838420543819666, |
|
"learning_rate": 5.057471264367817e-06, |
|
"loss": 0.0609, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 27.0055938697318, |
|
"grad_norm": 0.013135085813701153, |
|
"learning_rate": 5.048957002979992e-06, |
|
"loss": 0.601, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 27.006360153256704, |
|
"grad_norm": 0.015902109444141388, |
|
"learning_rate": 5.0404427415921675e-06, |
|
"loss": 0.4323, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 27.007126436781608, |
|
"grad_norm": 0.003578481962904334, |
|
"learning_rate": 5.0319284802043425e-06, |
|
"loss": 0.5887, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 27.007892720306515, |
|
"grad_norm": 131.916259765625, |
|
"learning_rate": 5.023414218816518e-06, |
|
"loss": 0.6109, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 27.00865900383142, |
|
"grad_norm": 620.7122192382812, |
|
"learning_rate": 5.014899957428694e-06, |
|
"loss": 1.1338, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 27.009425287356322, |
|
"grad_norm": 0.009120309725403786, |
|
"learning_rate": 5.006385696040868e-06, |
|
"loss": 0.0019, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 27.010191570881226, |
|
"grad_norm": 0.2590232491493225, |
|
"learning_rate": 4.997871434653044e-06, |
|
"loss": 0.5099, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 27.01095785440613, |
|
"grad_norm": 0.3680216073989868, |
|
"learning_rate": 4.98935717326522e-06, |
|
"loss": 0.5077, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 27.011724137931033, |
|
"grad_norm": 0.0044039529748260975, |
|
"learning_rate": 4.980842911877395e-06, |
|
"loss": 0.0068, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 27.01249042145594, |
|
"grad_norm": 0.015745682641863823, |
|
"learning_rate": 4.972328650489571e-06, |
|
"loss": 0.0178, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 27.013256704980844, |
|
"grad_norm": 0.0036849898751825094, |
|
"learning_rate": 4.963814389101746e-06, |
|
"loss": 0.449, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 27.014022988505747, |
|
"grad_norm": 0.011248349212110043, |
|
"learning_rate": 4.955300127713921e-06, |
|
"loss": 0.0006, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 27.01478927203065, |
|
"grad_norm": 0.0033127309288829565, |
|
"learning_rate": 4.946785866326097e-06, |
|
"loss": 0.0012, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 27.015555555555554, |
|
"grad_norm": 0.002992176217958331, |
|
"learning_rate": 4.938271604938272e-06, |
|
"loss": 0.0111, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 27.016321839080458, |
|
"grad_norm": 0.07924830168485641, |
|
"learning_rate": 4.9297573435504475e-06, |
|
"loss": 0.0014, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 27.017088122605365, |
|
"grad_norm": 0.04315262660384178, |
|
"learning_rate": 4.9212430821626225e-06, |
|
"loss": 0.8462, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 27.01785440613027, |
|
"grad_norm": 0.012697023339569569, |
|
"learning_rate": 4.912728820774798e-06, |
|
"loss": 0.0013, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 27.018620689655172, |
|
"grad_norm": 0.0033431092742830515, |
|
"learning_rate": 4.904214559386973e-06, |
|
"loss": 0.0003, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 27.019386973180076, |
|
"grad_norm": 0.4717285931110382, |
|
"learning_rate": 4.895700297999149e-06, |
|
"loss": 0.6738, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 27.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.4258668422698975, |
|
"eval_runtime": 24.7266, |
|
"eval_samples_per_second": 1.82, |
|
"eval_steps_per_second": 1.82, |
|
"step": 7308 |
|
}, |
|
{ |
|
"epoch": 28.00015325670498, |
|
"grad_norm": 0.001604745746590197, |
|
"learning_rate": 4.887186036611324e-06, |
|
"loss": 0.6045, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 28.000919540229884, |
|
"grad_norm": 0.0017908330773934722, |
|
"learning_rate": 4.8786717752235e-06, |
|
"loss": 1.8704, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 28.00168582375479, |
|
"grad_norm": 0.006596135441213846, |
|
"learning_rate": 4.870157513835675e-06, |
|
"loss": 0.0007, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 28.002452107279694, |
|
"grad_norm": 0.021018363535404205, |
|
"learning_rate": 4.861643252447851e-06, |
|
"loss": 0.7268, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 28.003218390804598, |
|
"grad_norm": 0.017117802053689957, |
|
"learning_rate": 4.853128991060026e-06, |
|
"loss": 0.7699, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 28.0039846743295, |
|
"grad_norm": 0.3689514696598053, |
|
"learning_rate": 4.844614729672202e-06, |
|
"loss": 0.0015, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 28.004750957854405, |
|
"grad_norm": 0.0034252069890499115, |
|
"learning_rate": 4.836100468284377e-06, |
|
"loss": 0.1645, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 28.00551724137931, |
|
"grad_norm": 295.11517333984375, |
|
"learning_rate": 4.8275862068965525e-06, |
|
"loss": 0.0274, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 28.006283524904216, |
|
"grad_norm": 0.016669755801558495, |
|
"learning_rate": 4.8190719455087275e-06, |
|
"loss": 0.6342, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 28.00704980842912, |
|
"grad_norm": 0.0072222864255309105, |
|
"learning_rate": 4.8105576841209025e-06, |
|
"loss": 0.011, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 28.007816091954023, |
|
"grad_norm": 402.57147216796875, |
|
"learning_rate": 4.802043422733078e-06, |
|
"loss": 0.8977, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 28.008582375478927, |
|
"grad_norm": 0.01174142211675644, |
|
"learning_rate": 4.793529161345254e-06, |
|
"loss": 0.0707, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 28.00934865900383, |
|
"grad_norm": 0.10693544894456863, |
|
"learning_rate": 4.785014899957429e-06, |
|
"loss": 0.4987, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 28.010114942528734, |
|
"grad_norm": 0.002083864063024521, |
|
"learning_rate": 4.776500638569604e-06, |
|
"loss": 1.2207, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 28.01088122605364, |
|
"grad_norm": 0.00595466373488307, |
|
"learning_rate": 4.76798637718178e-06, |
|
"loss": 0.0268, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 28.011647509578545, |
|
"grad_norm": 0.005388176999986172, |
|
"learning_rate": 4.759472115793956e-06, |
|
"loss": 0.8611, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 28.01241379310345, |
|
"grad_norm": 0.00751941092312336, |
|
"learning_rate": 4.750957854406131e-06, |
|
"loss": 0.5984, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 28.013180076628352, |
|
"grad_norm": 0.0021053869277238846, |
|
"learning_rate": 4.742443593018306e-06, |
|
"loss": 0.0006, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 28.013946360153255, |
|
"grad_norm": 0.031106695532798767, |
|
"learning_rate": 4.733929331630482e-06, |
|
"loss": 0.5842, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 28.014712643678163, |
|
"grad_norm": 0.0178633164614439, |
|
"learning_rate": 4.7254150702426575e-06, |
|
"loss": 0.6476, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 28.015478927203066, |
|
"grad_norm": 35.224586486816406, |
|
"learning_rate": 4.7169008088548325e-06, |
|
"loss": 0.6159, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 28.01624521072797, |
|
"grad_norm": 0.0960080549120903, |
|
"learning_rate": 4.7083865474670075e-06, |
|
"loss": 0.0015, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 28.017011494252873, |
|
"grad_norm": 181.11029052734375, |
|
"learning_rate": 4.6998722860791825e-06, |
|
"loss": 0.6247, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 28.017777777777777, |
|
"grad_norm": 198.4553985595703, |
|
"learning_rate": 4.691358024691358e-06, |
|
"loss": 1.0887, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 28.01854406130268, |
|
"grad_norm": 0.003270102431997657, |
|
"learning_rate": 4.682843763303534e-06, |
|
"loss": 0.5144, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 28.019310344827588, |
|
"grad_norm": 85.21669006347656, |
|
"learning_rate": 4.674329501915709e-06, |
|
"loss": 0.015, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 28.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 2.56001353263855, |
|
"eval_runtime": 24.8696, |
|
"eval_samples_per_second": 1.809, |
|
"eval_steps_per_second": 1.809, |
|
"step": 7569 |
|
}, |
|
{ |
|
"epoch": 29.000076628352492, |
|
"grad_norm": 0.005918196868151426, |
|
"learning_rate": 4.665815240527884e-06, |
|
"loss": 0.0009, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 29.000842911877395, |
|
"grad_norm": 228.7015838623047, |
|
"learning_rate": 4.65730097914006e-06, |
|
"loss": 1.6778, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 29.0016091954023, |
|
"grad_norm": 0.005259399767965078, |
|
"learning_rate": 4.648786717752236e-06, |
|
"loss": 0.5771, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 29.002375478927203, |
|
"grad_norm": 0.003083228599280119, |
|
"learning_rate": 4.640272456364411e-06, |
|
"loss": 0.013, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 29.003141762452106, |
|
"grad_norm": 0.12991248071193695, |
|
"learning_rate": 4.631758194976586e-06, |
|
"loss": 0.0006, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 29.00390804597701, |
|
"grad_norm": 0.004700188059359789, |
|
"learning_rate": 4.623243933588762e-06, |
|
"loss": 0.5919, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 29.004674329501917, |
|
"grad_norm": 0.011378041468560696, |
|
"learning_rate": 4.6147296722009375e-06, |
|
"loss": 0.8737, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 29.00544061302682, |
|
"grad_norm": 0.0016201697289943695, |
|
"learning_rate": 4.6062154108131125e-06, |
|
"loss": 0.0695, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 29.006206896551724, |
|
"grad_norm": 0.0019867869559675455, |
|
"learning_rate": 4.5977011494252875e-06, |
|
"loss": 0.5786, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 29.006973180076628, |
|
"grad_norm": 0.09608129411935806, |
|
"learning_rate": 4.589186888037463e-06, |
|
"loss": 0.3774, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 29.00773946360153, |
|
"grad_norm": 0.007291839923709631, |
|
"learning_rate": 4.580672626649638e-06, |
|
"loss": 0.0016, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 29.00850574712644, |
|
"grad_norm": 0.0065483879297971725, |
|
"learning_rate": 4.572158365261814e-06, |
|
"loss": 0.5658, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 29.009272030651342, |
|
"grad_norm": 0.16096486151218414, |
|
"learning_rate": 4.563644103873989e-06, |
|
"loss": 0.001, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 29.010038314176246, |
|
"grad_norm": 0.10927268862724304, |
|
"learning_rate": 4.555129842486164e-06, |
|
"loss": 0.4678, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 29.01080459770115, |
|
"grad_norm": 0.015788882970809937, |
|
"learning_rate": 4.54661558109834e-06, |
|
"loss": 0.4832, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 29.011570881226053, |
|
"grad_norm": 0.0019136458868160844, |
|
"learning_rate": 4.538101319710516e-06, |
|
"loss": 0.0049, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 29.012337164750956, |
|
"grad_norm": 686.8013305664062, |
|
"learning_rate": 4.529587058322691e-06, |
|
"loss": 1.152, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 29.013103448275864, |
|
"grad_norm": 0.0020894187036901712, |
|
"learning_rate": 4.521072796934866e-06, |
|
"loss": 0.8544, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 29.013869731800767, |
|
"grad_norm": 0.044846970587968826, |
|
"learning_rate": 4.512558535547042e-06, |
|
"loss": 0.5089, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 29.01463601532567, |
|
"grad_norm": 114.77469635009766, |
|
"learning_rate": 4.5040442741592175e-06, |
|
"loss": 0.5906, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 29.015402298850574, |
|
"grad_norm": 9.375887870788574, |
|
"learning_rate": 4.4955300127713925e-06, |
|
"loss": 1.2329, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 29.016168582375478, |
|
"grad_norm": 0.027233008295297623, |
|
"learning_rate": 4.4870157513835675e-06, |
|
"loss": 0.0018, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 29.01693486590038, |
|
"grad_norm": 0.006551343481987715, |
|
"learning_rate": 4.478501489995743e-06, |
|
"loss": 0.3195, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 29.01770114942529, |
|
"grad_norm": 0.01023783814162016, |
|
"learning_rate": 4.469987228607919e-06, |
|
"loss": 0.0011, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 29.018467432950192, |
|
"grad_norm": 0.014980033040046692, |
|
"learning_rate": 4.461472967220094e-06, |
|
"loss": 0.0372, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 29.019233716475096, |
|
"grad_norm": 0.42260363698005676, |
|
"learning_rate": 4.452958705832269e-06, |
|
"loss": 0.0066, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 29.02, |
|
"grad_norm": 0.00476185604929924, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.001, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 29.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 2.409654140472412, |
|
"eval_runtime": 25.7304, |
|
"eval_samples_per_second": 1.749, |
|
"eval_steps_per_second": 1.749, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 30.000766283524904, |
|
"grad_norm": 0.0019038737518712878, |
|
"learning_rate": 4.43593018305662e-06, |
|
"loss": 0.6056, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 30.001532567049807, |
|
"grad_norm": 0.0034009136725217104, |
|
"learning_rate": 4.427415921668796e-06, |
|
"loss": 0.6311, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 30.002298850574714, |
|
"grad_norm": 0.3392273783683777, |
|
"learning_rate": 4.418901660280971e-06, |
|
"loss": 0.9863, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 30.003065134099618, |
|
"grad_norm": 0.009864503517746925, |
|
"learning_rate": 4.410387398893146e-06, |
|
"loss": 0.0012, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 30.00383141762452, |
|
"grad_norm": 0.013031632639467716, |
|
"learning_rate": 4.401873137505322e-06, |
|
"loss": 0.001, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 30.004597701149425, |
|
"grad_norm": 0.019748827442526817, |
|
"learning_rate": 4.3933588761174975e-06, |
|
"loss": 0.4375, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 30.00536398467433, |
|
"grad_norm": 0.011364581994712353, |
|
"learning_rate": 4.3848446147296725e-06, |
|
"loss": 0.6799, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 30.006130268199232, |
|
"grad_norm": 445.3628234863281, |
|
"learning_rate": 4.3763303533418475e-06, |
|
"loss": 0.262, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 30.00689655172414, |
|
"grad_norm": 0.061565157026052475, |
|
"learning_rate": 4.367816091954023e-06, |
|
"loss": 0.0003, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 30.007662835249043, |
|
"grad_norm": 0.0302883367985487, |
|
"learning_rate": 4.359301830566199e-06, |
|
"loss": 0.0005, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 30.008429118773947, |
|
"grad_norm": 0.03159965202212334, |
|
"learning_rate": 4.350787569178374e-06, |
|
"loss": 0.002, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 30.00919540229885, |
|
"grad_norm": 0.0025514259468764067, |
|
"learning_rate": 4.342273307790549e-06, |
|
"loss": 0.5943, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 30.009961685823754, |
|
"grad_norm": 0.07011362165212631, |
|
"learning_rate": 4.333759046402725e-06, |
|
"loss": 1.2049, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 30.010727969348657, |
|
"grad_norm": 0.25096532702445984, |
|
"learning_rate": 4.325244785014901e-06, |
|
"loss": 0.0389, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 30.011494252873565, |
|
"grad_norm": 0.06902328878641129, |
|
"learning_rate": 4.316730523627076e-06, |
|
"loss": 0.5591, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 30.01226053639847, |
|
"grad_norm": 0.0011447424767538905, |
|
"learning_rate": 4.308216262239251e-06, |
|
"loss": 0.0002, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 30.013026819923372, |
|
"grad_norm": 0.10466296225786209, |
|
"learning_rate": 4.299702000851427e-06, |
|
"loss": 0.2699, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 30.013793103448275, |
|
"grad_norm": 0.0667080506682396, |
|
"learning_rate": 4.291187739463602e-06, |
|
"loss": 0.0006, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 30.01455938697318, |
|
"grad_norm": 0.01340166199952364, |
|
"learning_rate": 4.2826734780757775e-06, |
|
"loss": 0.6079, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 30.015325670498083, |
|
"grad_norm": 0.0014369983691722155, |
|
"learning_rate": 4.2741592166879525e-06, |
|
"loss": 0.0007, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 30.01609195402299, |
|
"grad_norm": 0.032362256199121475, |
|
"learning_rate": 4.2656449553001275e-06, |
|
"loss": 0.0003, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 30.016858237547893, |
|
"grad_norm": 0.002232549712061882, |
|
"learning_rate": 4.257130693912303e-06, |
|
"loss": 0.0011, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 30.017624521072797, |
|
"grad_norm": 0.010500124655663967, |
|
"learning_rate": 4.248616432524479e-06, |
|
"loss": 0.8106, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 30.0183908045977, |
|
"grad_norm": 0.09263892471790314, |
|
"learning_rate": 4.240102171136654e-06, |
|
"loss": 0.5826, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 30.019157088122604, |
|
"grad_norm": 0.0010759371798485518, |
|
"learning_rate": 4.231587909748829e-06, |
|
"loss": 0.0045, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 30.01992337164751, |
|
"grad_norm": 0.006209959741681814, |
|
"learning_rate": 4.223073648361005e-06, |
|
"loss": 0.0003, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 30.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 2.333623170852661, |
|
"eval_runtime": 26.6628, |
|
"eval_samples_per_second": 1.688, |
|
"eval_steps_per_second": 1.688, |
|
"step": 8091 |
|
}, |
|
{ |
|
"epoch": 31.000689655172415, |
|
"grad_norm": 834.5934448242188, |
|
"learning_rate": 4.214559386973181e-06, |
|
"loss": 0.3359, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 31.00145593869732, |
|
"grad_norm": 0.000998163130134344, |
|
"learning_rate": 4.206045125585356e-06, |
|
"loss": 0.0009, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 31.002222222222223, |
|
"grad_norm": 0.0022653343621641397, |
|
"learning_rate": 4.197530864197531e-06, |
|
"loss": 0.0002, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 31.002988505747126, |
|
"grad_norm": 701.7037353515625, |
|
"learning_rate": 4.189016602809707e-06, |
|
"loss": 0.4127, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 31.00375478927203, |
|
"grad_norm": 0.005995426792651415, |
|
"learning_rate": 4.180502341421882e-06, |
|
"loss": 0.0003, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 31.004521072796933, |
|
"grad_norm": 0.00687633128836751, |
|
"learning_rate": 4.1719880800340575e-06, |
|
"loss": 0.7177, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 31.00528735632184, |
|
"grad_norm": 0.0011697874870151281, |
|
"learning_rate": 4.1634738186462325e-06, |
|
"loss": 0.2944, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 31.006053639846744, |
|
"grad_norm": 0.0028579551726579666, |
|
"learning_rate": 4.154959557258408e-06, |
|
"loss": 0.7018, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 31.006819923371648, |
|
"grad_norm": 0.013076997362077236, |
|
"learning_rate": 4.146445295870583e-06, |
|
"loss": 0.7371, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 31.00758620689655, |
|
"grad_norm": 0.0025177043862640858, |
|
"learning_rate": 4.137931034482759e-06, |
|
"loss": 0.5434, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 31.008352490421455, |
|
"grad_norm": 0.004951695445924997, |
|
"learning_rate": 4.129416773094934e-06, |
|
"loss": 1.3915, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 31.00911877394636, |
|
"grad_norm": 0.006266167853027582, |
|
"learning_rate": 4.12090251170711e-06, |
|
"loss": 0.6285, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 31.009885057471266, |
|
"grad_norm": 253.12742614746094, |
|
"learning_rate": 4.112388250319285e-06, |
|
"loss": 0.3764, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 31.01065134099617, |
|
"grad_norm": 0.0014777772594243288, |
|
"learning_rate": 4.103873988931461e-06, |
|
"loss": 0.8625, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 31.011417624521073, |
|
"grad_norm": 0.050426263362169266, |
|
"learning_rate": 4.095359727543636e-06, |
|
"loss": 0.8262, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 31.012183908045976, |
|
"grad_norm": 0.006508816033601761, |
|
"learning_rate": 4.086845466155812e-06, |
|
"loss": 0.4031, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 31.01295019157088, |
|
"grad_norm": 0.003250868758186698, |
|
"learning_rate": 4.078331204767987e-06, |
|
"loss": 0.4935, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 31.013716475095784, |
|
"grad_norm": 0.07815868407487869, |
|
"learning_rate": 4.0698169433801625e-06, |
|
"loss": 0.1263, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 31.01448275862069, |
|
"grad_norm": 0.0030389244202524424, |
|
"learning_rate": 4.0613026819923375e-06, |
|
"loss": 0.0005, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 31.015249042145594, |
|
"grad_norm": 0.002248189179226756, |
|
"learning_rate": 4.052788420604513e-06, |
|
"loss": 0.0012, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 31.016015325670498, |
|
"grad_norm": 0.026243863627314568, |
|
"learning_rate": 4.044274159216688e-06, |
|
"loss": 0.0002, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 31.0167816091954, |
|
"grad_norm": 0.0020766369998455048, |
|
"learning_rate": 4.035759897828863e-06, |
|
"loss": 0.0003, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 31.017547892720305, |
|
"grad_norm": 0.0027371151372790337, |
|
"learning_rate": 4.027245636441039e-06, |
|
"loss": 1.12, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 31.018314176245212, |
|
"grad_norm": 0.04507753252983093, |
|
"learning_rate": 4.018731375053214e-06, |
|
"loss": 0.0007, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 31.019080459770116, |
|
"grad_norm": 0.0016026358352974057, |
|
"learning_rate": 4.01021711366539e-06, |
|
"loss": 0.9346, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 31.01984674329502, |
|
"grad_norm": 0.00482403626665473, |
|
"learning_rate": 4.001702852277565e-06, |
|
"loss": 0.353, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 31.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.4537243843078613, |
|
"eval_runtime": 26.7209, |
|
"eval_samples_per_second": 1.684, |
|
"eval_steps_per_second": 1.684, |
|
"step": 8352 |
|
}, |
|
{ |
|
"epoch": 32.00061302681992, |
|
"grad_norm": 0.025300433859229088, |
|
"learning_rate": 3.993188590889741e-06, |
|
"loss": 0.0026, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 32.00137931034483, |
|
"grad_norm": 9.10494613647461, |
|
"learning_rate": 3.984674329501916e-06, |
|
"loss": 0.5676, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 32.002145593869734, |
|
"grad_norm": 0.056493666023015976, |
|
"learning_rate": 3.976160068114092e-06, |
|
"loss": 0.6826, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 32.00291187739464, |
|
"grad_norm": 0.5616773366928101, |
|
"learning_rate": 3.967645806726267e-06, |
|
"loss": 0.3819, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 32.00367816091954, |
|
"grad_norm": 0.002295893616974354, |
|
"learning_rate": 3.9591315453384425e-06, |
|
"loss": 0.0002, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 32.004444444444445, |
|
"grad_norm": 317.98101806640625, |
|
"learning_rate": 3.9506172839506175e-06, |
|
"loss": 0.4593, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 32.00521072796935, |
|
"grad_norm": 1.2093020677566528, |
|
"learning_rate": 3.942103022562793e-06, |
|
"loss": 0.75, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 32.00597701149425, |
|
"grad_norm": 0.3837658166885376, |
|
"learning_rate": 3.933588761174968e-06, |
|
"loss": 0.6746, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 32.006743295019156, |
|
"grad_norm": 0.012436291202902794, |
|
"learning_rate": 3.925074499787143e-06, |
|
"loss": 0.0234, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 32.00750957854406, |
|
"grad_norm": 0.01659245230257511, |
|
"learning_rate": 3.916560238399319e-06, |
|
"loss": 0.0007, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 32.00827586206896, |
|
"grad_norm": 435.0284118652344, |
|
"learning_rate": 3.908045977011495e-06, |
|
"loss": 0.0308, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 32.00904214559387, |
|
"grad_norm": 0.0037364887539297342, |
|
"learning_rate": 3.89953171562367e-06, |
|
"loss": 0.0026, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 32.00980842911878, |
|
"grad_norm": 0.001753072370775044, |
|
"learning_rate": 3.891017454235845e-06, |
|
"loss": 0.0002, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 32.01057471264368, |
|
"grad_norm": 0.027982644736766815, |
|
"learning_rate": 3.882503192848021e-06, |
|
"loss": 0.0001, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 32.011340996168585, |
|
"grad_norm": 4.973803997039795, |
|
"learning_rate": 3.873988931460197e-06, |
|
"loss": 0.6264, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 32.01210727969349, |
|
"grad_norm": 0.08634742349386215, |
|
"learning_rate": 3.865474670072372e-06, |
|
"loss": 0.1824, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 32.01287356321839, |
|
"grad_norm": 0.002766046905890107, |
|
"learning_rate": 3.856960408684547e-06, |
|
"loss": 0.0001, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 32.013639846743295, |
|
"grad_norm": 0.003736493643373251, |
|
"learning_rate": 3.8484461472967225e-06, |
|
"loss": 0.0003, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 32.0144061302682, |
|
"grad_norm": 0.0009198188199661672, |
|
"learning_rate": 3.839931885908898e-06, |
|
"loss": 0.0009, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 32.0151724137931, |
|
"grad_norm": 4.1219258308410645, |
|
"learning_rate": 3.831417624521073e-06, |
|
"loss": 0.0034, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 32.015938697318006, |
|
"grad_norm": 0.0027516514528542757, |
|
"learning_rate": 3.822903363133248e-06, |
|
"loss": 0.0007, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 32.01670498084291, |
|
"grad_norm": 883.1157836914062, |
|
"learning_rate": 3.8143891017454237e-06, |
|
"loss": 0.2421, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 32.01747126436781, |
|
"grad_norm": 0.048466455191373825, |
|
"learning_rate": 3.805874840357599e-06, |
|
"loss": 0.2408, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 32.01823754789272, |
|
"grad_norm": 0.03369063138961792, |
|
"learning_rate": 3.797360578969775e-06, |
|
"loss": 0.0007, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 32.01900383141763, |
|
"grad_norm": 0.0010800773743540049, |
|
"learning_rate": 3.78884631758195e-06, |
|
"loss": 0.0004, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 32.01977011494253, |
|
"grad_norm": 0.12326602637767792, |
|
"learning_rate": 3.7803320561941254e-06, |
|
"loss": 0.0003, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 32.02, |
|
"eval_accuracy": 0.6888888888888889, |
|
"eval_loss": 2.4429101943969727, |
|
"eval_runtime": 25.564, |
|
"eval_samples_per_second": 1.76, |
|
"eval_steps_per_second": 1.76, |
|
"step": 8613 |
|
}, |
|
{ |
|
"epoch": 33.00053639846743, |
|
"grad_norm": 416.8299865722656, |
|
"learning_rate": 3.7718177948063004e-06, |
|
"loss": 0.9207, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 33.001302681992335, |
|
"grad_norm": 2.8819141387939453, |
|
"learning_rate": 3.7633035334184762e-06, |
|
"loss": 0.791, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 33.00206896551724, |
|
"grad_norm": 53.023738861083984, |
|
"learning_rate": 3.7547892720306517e-06, |
|
"loss": 0.0041, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 33.00283524904214, |
|
"grad_norm": 0.0026518318336457014, |
|
"learning_rate": 3.746275010642827e-06, |
|
"loss": 0.0012, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 33.00360153256705, |
|
"grad_norm": 0.0012365657603368163, |
|
"learning_rate": 3.737760749255002e-06, |
|
"loss": 0.003, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 33.00436781609196, |
|
"grad_norm": 0.0010270520579069853, |
|
"learning_rate": 3.729246487867178e-06, |
|
"loss": 1.048, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 33.00513409961686, |
|
"grad_norm": 0.04330509528517723, |
|
"learning_rate": 3.7207322264793533e-06, |
|
"loss": 0.0002, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 33.005900383141764, |
|
"grad_norm": 2.569209098815918, |
|
"learning_rate": 3.7122179650915287e-06, |
|
"loss": 0.0092, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 33.00666666666667, |
|
"grad_norm": 0.014211867935955524, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.655, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 33.00743295019157, |
|
"grad_norm": 0.5646183490753174, |
|
"learning_rate": 3.6951894423158796e-06, |
|
"loss": 0.2261, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 33.008199233716475, |
|
"grad_norm": 0.0009431659709662199, |
|
"learning_rate": 3.686675180928055e-06, |
|
"loss": 0.0057, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 33.00896551724138, |
|
"grad_norm": 0.009504112415015697, |
|
"learning_rate": 3.67816091954023e-06, |
|
"loss": 0.0002, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 33.00973180076628, |
|
"grad_norm": 0.7017917633056641, |
|
"learning_rate": 3.6696466581524054e-06, |
|
"loss": 0.4487, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 33.010498084291186, |
|
"grad_norm": 0.1036515161395073, |
|
"learning_rate": 3.6611323967645812e-06, |
|
"loss": 0.0253, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 33.01126436781609, |
|
"grad_norm": 0.0015443471493199468, |
|
"learning_rate": 3.6526181353767567e-06, |
|
"loss": 0.0002, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 33.01203065134099, |
|
"grad_norm": 585.27783203125, |
|
"learning_rate": 3.6441038739889317e-06, |
|
"loss": 0.5172, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 33.012796934865904, |
|
"grad_norm": 0.002773696556687355, |
|
"learning_rate": 3.635589612601107e-06, |
|
"loss": 0.1162, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 33.01356321839081, |
|
"grad_norm": 0.0008107810281217098, |
|
"learning_rate": 3.627075351213283e-06, |
|
"loss": 1.4347, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 33.01432950191571, |
|
"grad_norm": 0.001161490217782557, |
|
"learning_rate": 3.618561089825458e-06, |
|
"loss": 0.8489, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 33.015095785440614, |
|
"grad_norm": 0.007870987989008427, |
|
"learning_rate": 3.6100468284376333e-06, |
|
"loss": 0.0005, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 33.01586206896552, |
|
"grad_norm": 0.0013368806103244424, |
|
"learning_rate": 3.6015325670498087e-06, |
|
"loss": 0.0005, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 33.01662835249042, |
|
"grad_norm": 0.2093639224767685, |
|
"learning_rate": 3.5930183056619837e-06, |
|
"loss": 0.0003, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 33.017394636015325, |
|
"grad_norm": 0.0007116400520317256, |
|
"learning_rate": 3.5845040442741596e-06, |
|
"loss": 0.0003, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 33.01816091954023, |
|
"grad_norm": 0.020393405109643936, |
|
"learning_rate": 3.575989782886335e-06, |
|
"loss": 0.0005, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 33.01892720306513, |
|
"grad_norm": 0.012554151006042957, |
|
"learning_rate": 3.56747552149851e-06, |
|
"loss": 0.0058, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 33.019693486590036, |
|
"grad_norm": 0.27662163972854614, |
|
"learning_rate": 3.5589612601106854e-06, |
|
"loss": 0.5047, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 33.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 2.5712084770202637, |
|
"eval_runtime": 24.9664, |
|
"eval_samples_per_second": 1.802, |
|
"eval_steps_per_second": 1.802, |
|
"step": 8874 |
|
}, |
|
{ |
|
"epoch": 34.000459770114944, |
|
"grad_norm": 0.007375423796474934, |
|
"learning_rate": 3.5504469987228612e-06, |
|
"loss": 0.0028, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 34.00122605363985, |
|
"grad_norm": 0.03325073421001434, |
|
"learning_rate": 3.5419327373350367e-06, |
|
"loss": 0.5812, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 34.00199233716475, |
|
"grad_norm": 6.460132598876953, |
|
"learning_rate": 3.5334184759472117e-06, |
|
"loss": 0.0007, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 34.002758620689654, |
|
"grad_norm": 0.006866810377687216, |
|
"learning_rate": 3.524904214559387e-06, |
|
"loss": 0.0001, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 34.00352490421456, |
|
"grad_norm": 0.00434919074177742, |
|
"learning_rate": 3.516389953171563e-06, |
|
"loss": 0.0001, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 34.00429118773946, |
|
"grad_norm": 0.001511908252723515, |
|
"learning_rate": 3.507875691783738e-06, |
|
"loss": 1.2773, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 34.005057471264365, |
|
"grad_norm": 0.023352844640612602, |
|
"learning_rate": 3.4993614303959133e-06, |
|
"loss": 0.2119, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 34.00582375478927, |
|
"grad_norm": 0.12104704976081848, |
|
"learning_rate": 3.4908471690080887e-06, |
|
"loss": 0.0016, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 34.00659003831418, |
|
"grad_norm": 0.0007515622419305146, |
|
"learning_rate": 3.4823329076202646e-06, |
|
"loss": 0.0002, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 34.00735632183908, |
|
"grad_norm": 1252.3907470703125, |
|
"learning_rate": 3.4738186462324396e-06, |
|
"loss": 0.5591, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 34.00812260536399, |
|
"grad_norm": 0.0015894509851932526, |
|
"learning_rate": 3.465304384844615e-06, |
|
"loss": 0.0001, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 34.00888888888889, |
|
"grad_norm": 0.0016435530269518495, |
|
"learning_rate": 3.4567901234567904e-06, |
|
"loss": 0.0002, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 34.009655172413794, |
|
"grad_norm": 0.005118685774505138, |
|
"learning_rate": 3.448275862068966e-06, |
|
"loss": 0.6716, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 34.0104214559387, |
|
"grad_norm": 0.0012057090643793344, |
|
"learning_rate": 3.4397616006811412e-06, |
|
"loss": 0.0001, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 34.0111877394636, |
|
"grad_norm": 7.137910842895508, |
|
"learning_rate": 3.4312473392933167e-06, |
|
"loss": 0.0035, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 34.011954022988505, |
|
"grad_norm": 0.0020726255606859922, |
|
"learning_rate": 3.4227330779054917e-06, |
|
"loss": 0.6344, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 34.01272030651341, |
|
"grad_norm": 0.019497565925121307, |
|
"learning_rate": 3.4142188165176675e-06, |
|
"loss": 0.6919, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 34.01348659003831, |
|
"grad_norm": 0.0009703706600703299, |
|
"learning_rate": 3.405704555129843e-06, |
|
"loss": 0.9368, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 34.014252873563215, |
|
"grad_norm": 201.88323974609375, |
|
"learning_rate": 3.3971902937420183e-06, |
|
"loss": 0.0156, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 34.01501915708812, |
|
"grad_norm": 508.5426940917969, |
|
"learning_rate": 3.3886760323541933e-06, |
|
"loss": 1.8173, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 34.01578544061303, |
|
"grad_norm": 0.02900901436805725, |
|
"learning_rate": 3.380161770966369e-06, |
|
"loss": 0.4028, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 34.01655172413793, |
|
"grad_norm": 0.1268599033355713, |
|
"learning_rate": 3.3716475095785446e-06, |
|
"loss": 0.0002, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 34.01731800766284, |
|
"grad_norm": 1.608505129814148, |
|
"learning_rate": 3.3631332481907196e-06, |
|
"loss": 0.0067, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 34.01808429118774, |
|
"grad_norm": 0.00156217475887388, |
|
"learning_rate": 3.354618986802895e-06, |
|
"loss": 0.5875, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 34.018850574712644, |
|
"grad_norm": 0.00174483354203403, |
|
"learning_rate": 3.3461047254150704e-06, |
|
"loss": 0.1095, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 34.01961685823755, |
|
"grad_norm": 0.03174484893679619, |
|
"learning_rate": 3.3375904640272463e-06, |
|
"loss": 0.0002, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 34.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.333618640899658, |
|
"eval_runtime": 25.6089, |
|
"eval_samples_per_second": 1.757, |
|
"eval_steps_per_second": 1.757, |
|
"step": 9135 |
|
}, |
|
{ |
|
"epoch": 35.000383141762455, |
|
"grad_norm": 0.004596439655870199, |
|
"learning_rate": 3.3290762026394212e-06, |
|
"loss": 0.0571, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 35.00114942528736, |
|
"grad_norm": 0.003077858127653599, |
|
"learning_rate": 3.3205619412515967e-06, |
|
"loss": 0.0001, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 35.00191570881226, |
|
"grad_norm": 0.0021875801030546427, |
|
"learning_rate": 3.3120476798637717e-06, |
|
"loss": 0.0003, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 35.002681992337166, |
|
"grad_norm": 0.0009014836978167295, |
|
"learning_rate": 3.3035334184759475e-06, |
|
"loss": 0.0205, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 35.00344827586207, |
|
"grad_norm": 959.107421875, |
|
"learning_rate": 3.295019157088123e-06, |
|
"loss": 1.1494, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 35.00421455938697, |
|
"grad_norm": 0.0009777839295566082, |
|
"learning_rate": 3.2865048957002983e-06, |
|
"loss": 0.0002, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 35.00498084291188, |
|
"grad_norm": 0.0031836393754929304, |
|
"learning_rate": 3.2779906343124733e-06, |
|
"loss": 0.6735, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 35.00574712643678, |
|
"grad_norm": 0.0012983878841623664, |
|
"learning_rate": 3.269476372924649e-06, |
|
"loss": 0.6771, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 35.006513409961684, |
|
"grad_norm": 0.011930296197533607, |
|
"learning_rate": 3.2609621115368246e-06, |
|
"loss": 0.0002, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 35.00727969348659, |
|
"grad_norm": 0.21532519161701202, |
|
"learning_rate": 3.2524478501489996e-06, |
|
"loss": 0.0051, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 35.00804597701149, |
|
"grad_norm": 0.10600554198026657, |
|
"learning_rate": 3.243933588761175e-06, |
|
"loss": 0.5906, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 35.008812260536395, |
|
"grad_norm": 0.009332036599516869, |
|
"learning_rate": 3.235419327373351e-06, |
|
"loss": 0.4678, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 35.009578544061306, |
|
"grad_norm": 0.012161843478679657, |
|
"learning_rate": 3.2269050659855262e-06, |
|
"loss": 0.9194, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 35.01034482758621, |
|
"grad_norm": 0.17826266586780548, |
|
"learning_rate": 3.2183908045977012e-06, |
|
"loss": 0.0713, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 35.01111111111111, |
|
"grad_norm": 0.0954296663403511, |
|
"learning_rate": 3.2098765432098767e-06, |
|
"loss": 0.4367, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 35.01187739463602, |
|
"grad_norm": 0.019350633025169373, |
|
"learning_rate": 3.2013622818220525e-06, |
|
"loss": 0.0002, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 35.01264367816092, |
|
"grad_norm": 0.004323380999267101, |
|
"learning_rate": 3.1928480204342275e-06, |
|
"loss": 0.0193, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 35.013409961685824, |
|
"grad_norm": 0.0006493760738521814, |
|
"learning_rate": 3.184333759046403e-06, |
|
"loss": 0.0001, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 35.01417624521073, |
|
"grad_norm": 0.09640694409608841, |
|
"learning_rate": 3.1758194976585783e-06, |
|
"loss": 0.0006, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 35.01494252873563, |
|
"grad_norm": 0.0017739328322932124, |
|
"learning_rate": 3.167305236270754e-06, |
|
"loss": 0.6993, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 35.015708812260534, |
|
"grad_norm": 0.020408447831869125, |
|
"learning_rate": 3.158790974882929e-06, |
|
"loss": 0.0004, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 35.01647509578544, |
|
"grad_norm": 0.0007868781685829163, |
|
"learning_rate": 3.1502767134951046e-06, |
|
"loss": 0.0001, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 35.01724137931034, |
|
"grad_norm": 0.0018460003193467855, |
|
"learning_rate": 3.14176245210728e-06, |
|
"loss": 0.3248, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 35.01800766283525, |
|
"grad_norm": 0.0017984374426305294, |
|
"learning_rate": 3.133248190719455e-06, |
|
"loss": 0.0001, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 35.018773946360156, |
|
"grad_norm": 0.005243689753115177, |
|
"learning_rate": 3.124733929331631e-06, |
|
"loss": 0.0202, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 35.01954022988506, |
|
"grad_norm": 0.00201997859403491, |
|
"learning_rate": 3.1162196679438062e-06, |
|
"loss": 1.3626, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 35.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 2.5391104221343994, |
|
"eval_runtime": 26.0032, |
|
"eval_samples_per_second": 1.731, |
|
"eval_steps_per_second": 1.731, |
|
"step": 9396 |
|
}, |
|
{ |
|
"epoch": 36.00030651340996, |
|
"grad_norm": 0.5998082756996155, |
|
"learning_rate": 3.1077054065559812e-06, |
|
"loss": 0.0008, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 36.001072796934864, |
|
"grad_norm": 240.1182403564453, |
|
"learning_rate": 3.0991911451681567e-06, |
|
"loss": 0.3689, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 36.00183908045977, |
|
"grad_norm": 0.01313919946551323, |
|
"learning_rate": 3.0906768837803325e-06, |
|
"loss": 0.0002, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 36.00260536398467, |
|
"grad_norm": 0.0010190668981522322, |
|
"learning_rate": 3.082162622392508e-06, |
|
"loss": 2.1998, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 36.00337164750958, |
|
"grad_norm": 0.3388751447200775, |
|
"learning_rate": 3.073648361004683e-06, |
|
"loss": 0.6518, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 36.004137931034485, |
|
"grad_norm": 0.0012775210198014975, |
|
"learning_rate": 3.0651340996168583e-06, |
|
"loss": 0.001, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 36.00490421455939, |
|
"grad_norm": 0.001542083453387022, |
|
"learning_rate": 3.056619838229034e-06, |
|
"loss": 0.2721, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 36.00567049808429, |
|
"grad_norm": 0.0005672808620147407, |
|
"learning_rate": 3.048105576841209e-06, |
|
"loss": 0.0213, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 36.006436781609196, |
|
"grad_norm": 0.0033107404597103596, |
|
"learning_rate": 3.0395913154533846e-06, |
|
"loss": 0.0007, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 36.0072030651341, |
|
"grad_norm": 0.013874908909201622, |
|
"learning_rate": 3.03107705406556e-06, |
|
"loss": 0.0003, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 36.007969348659, |
|
"grad_norm": 213.28749084472656, |
|
"learning_rate": 3.022562792677736e-06, |
|
"loss": 0.677, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 36.00873563218391, |
|
"grad_norm": 0.00127232086379081, |
|
"learning_rate": 3.014048531289911e-06, |
|
"loss": 0.0001, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 36.00950191570881, |
|
"grad_norm": 0.002287566429004073, |
|
"learning_rate": 3.0055342699020862e-06, |
|
"loss": 0.0003, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 36.010268199233714, |
|
"grad_norm": 0.004714479669928551, |
|
"learning_rate": 2.9970200085142612e-06, |
|
"loss": 0.6342, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 36.01103448275862, |
|
"grad_norm": 0.006515284534543753, |
|
"learning_rate": 2.988505747126437e-06, |
|
"loss": 0.5459, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 36.01180076628353, |
|
"grad_norm": 0.001372402417473495, |
|
"learning_rate": 2.9799914857386125e-06, |
|
"loss": 0.0008, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 36.01256704980843, |
|
"grad_norm": 0.0035509751178324223, |
|
"learning_rate": 2.971477224350788e-06, |
|
"loss": 0.0395, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 36.013333333333335, |
|
"grad_norm": 0.000936897296924144, |
|
"learning_rate": 2.962962962962963e-06, |
|
"loss": 0.0002, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 36.01409961685824, |
|
"grad_norm": 0.0029338167514652014, |
|
"learning_rate": 2.9544487015751387e-06, |
|
"loss": 0.0003, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 36.01486590038314, |
|
"grad_norm": 6.021915435791016, |
|
"learning_rate": 2.945934440187314e-06, |
|
"loss": 0.1836, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 36.015632183908046, |
|
"grad_norm": 0.003689864883199334, |
|
"learning_rate": 2.9374201787994896e-06, |
|
"loss": 1.0028, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 36.01639846743295, |
|
"grad_norm": 0.0033734790049493313, |
|
"learning_rate": 2.9289059174116646e-06, |
|
"loss": 0.8264, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 36.01716475095785, |
|
"grad_norm": 0.0006126068765297532, |
|
"learning_rate": 2.92039165602384e-06, |
|
"loss": 0.0941, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 36.01793103448276, |
|
"grad_norm": 0.007638865150511265, |
|
"learning_rate": 2.911877394636016e-06, |
|
"loss": 0.0009, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 36.01869731800766, |
|
"grad_norm": 1832.1776123046875, |
|
"learning_rate": 2.903363133248191e-06, |
|
"loss": 0.1052, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 36.019463601532564, |
|
"grad_norm": 0.0006373985088430345, |
|
"learning_rate": 2.8948488718603662e-06, |
|
"loss": 0.5114, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 36.02, |
|
"eval_accuracy": 0.6222222222222222, |
|
"eval_loss": 2.600322723388672, |
|
"eval_runtime": 26.8551, |
|
"eval_samples_per_second": 1.676, |
|
"eval_steps_per_second": 1.676, |
|
"step": 9657 |
|
}, |
|
{ |
|
"epoch": 37.00022988505747, |
|
"grad_norm": 0.003570280270650983, |
|
"learning_rate": 2.8863346104725417e-06, |
|
"loss": 1.1122, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 37.000996168582375, |
|
"grad_norm": 0.0009315114002674818, |
|
"learning_rate": 2.8778203490847175e-06, |
|
"loss": 0.0002, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 37.00176245210728, |
|
"grad_norm": 0.0007604791317135096, |
|
"learning_rate": 2.8693060876968925e-06, |
|
"loss": 0.0005, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 37.00252873563218, |
|
"grad_norm": 0.0012069162912666798, |
|
"learning_rate": 2.860791826309068e-06, |
|
"loss": 0.0001, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 37.003295019157086, |
|
"grad_norm": 0.002958060009405017, |
|
"learning_rate": 2.852277564921243e-06, |
|
"loss": 0.223, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 37.00406130268199, |
|
"grad_norm": 0.0008952196221798658, |
|
"learning_rate": 2.8437633035334187e-06, |
|
"loss": 0.0003, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 37.00482758620689, |
|
"grad_norm": 0.0006224775570444763, |
|
"learning_rate": 2.835249042145594e-06, |
|
"loss": 0.0165, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 37.005593869731804, |
|
"grad_norm": 0.0007755410624668002, |
|
"learning_rate": 2.8267347807577696e-06, |
|
"loss": 0.0018, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 37.00636015325671, |
|
"grad_norm": 0.031969159841537476, |
|
"learning_rate": 2.8182205193699446e-06, |
|
"loss": 0.0004, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 37.00712643678161, |
|
"grad_norm": 0.0036592516116797924, |
|
"learning_rate": 2.8097062579821204e-06, |
|
"loss": 0.4604, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 37.007892720306515, |
|
"grad_norm": 0.007150766905397177, |
|
"learning_rate": 2.801191996594296e-06, |
|
"loss": 0.0001, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 37.00865900383142, |
|
"grad_norm": 0.0014679997693747282, |
|
"learning_rate": 2.792677735206471e-06, |
|
"loss": 0.0003, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 37.00942528735632, |
|
"grad_norm": 6.632998466491699, |
|
"learning_rate": 2.7841634738186462e-06, |
|
"loss": 0.001, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 37.010191570881226, |
|
"grad_norm": 350.12567138671875, |
|
"learning_rate": 2.775649212430822e-06, |
|
"loss": 0.8235, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 37.01095785440613, |
|
"grad_norm": 17.584909439086914, |
|
"learning_rate": 2.7671349510429975e-06, |
|
"loss": 0.002, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 37.01172413793103, |
|
"grad_norm": 0.0007768812356516719, |
|
"learning_rate": 2.7586206896551725e-06, |
|
"loss": 0.0002, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 37.01249042145594, |
|
"grad_norm": 0.0006663873209618032, |
|
"learning_rate": 2.750106428267348e-06, |
|
"loss": 0.0013, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 37.01325670498084, |
|
"grad_norm": 0.0025282136630266905, |
|
"learning_rate": 2.7415921668795238e-06, |
|
"loss": 0.6833, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 37.014022988505744, |
|
"grad_norm": 0.0022443109191954136, |
|
"learning_rate": 2.7330779054916987e-06, |
|
"loss": 0.0003, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 37.014789272030654, |
|
"grad_norm": 0.0011056849034503102, |
|
"learning_rate": 2.724563644103874e-06, |
|
"loss": 0.0064, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 37.01555555555556, |
|
"grad_norm": 0.09684690088033676, |
|
"learning_rate": 2.7160493827160496e-06, |
|
"loss": 0.0014, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 37.01632183908046, |
|
"grad_norm": 0.0006668069399893284, |
|
"learning_rate": 2.7075351213282254e-06, |
|
"loss": 0.3566, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 37.017088122605365, |
|
"grad_norm": 0.005238790065050125, |
|
"learning_rate": 2.6990208599404004e-06, |
|
"loss": 1.3116, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 37.01785440613027, |
|
"grad_norm": 109.37792205810547, |
|
"learning_rate": 2.690506598552576e-06, |
|
"loss": 1.2386, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 37.01862068965517, |
|
"grad_norm": 0.08802510052919388, |
|
"learning_rate": 2.6819923371647512e-06, |
|
"loss": 1.5404, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 37.019386973180076, |
|
"grad_norm": 145.9061737060547, |
|
"learning_rate": 2.6734780757769262e-06, |
|
"loss": 0.0528, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 37.02, |
|
"eval_accuracy": 0.6, |
|
"eval_loss": 2.4778451919555664, |
|
"eval_runtime": 25.72, |
|
"eval_samples_per_second": 1.75, |
|
"eval_steps_per_second": 1.75, |
|
"step": 9918 |
|
}, |
|
{ |
|
"epoch": 38.000153256704984, |
|
"grad_norm": 459.9485778808594, |
|
"learning_rate": 2.664963814389102e-06, |
|
"loss": 0.6775, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 38.00091954022989, |
|
"grad_norm": 0.10935943573713303, |
|
"learning_rate": 2.6564495530012775e-06, |
|
"loss": 0.0048, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 38.00168582375479, |
|
"grad_norm": 0.003584092017263174, |
|
"learning_rate": 2.6479352916134525e-06, |
|
"loss": 0.5523, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 38.002452107279694, |
|
"grad_norm": 0.007022094447165728, |
|
"learning_rate": 2.639421030225628e-06, |
|
"loss": 0.3777, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 38.0032183908046, |
|
"grad_norm": 0.24366505444049835, |
|
"learning_rate": 2.6309067688378037e-06, |
|
"loss": 0.2943, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 38.0039846743295, |
|
"grad_norm": 284.4385070800781, |
|
"learning_rate": 2.622392507449979e-06, |
|
"loss": 0.0373, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 38.004750957854405, |
|
"grad_norm": 30.966238021850586, |
|
"learning_rate": 2.613878246062154e-06, |
|
"loss": 0.217, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 38.00551724137931, |
|
"grad_norm": 0.0009917523711919785, |
|
"learning_rate": 2.6053639846743296e-06, |
|
"loss": 0.5054, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 38.00628352490421, |
|
"grad_norm": 0.0007773310062475502, |
|
"learning_rate": 2.5968497232865054e-06, |
|
"loss": 0.0022, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 38.007049808429116, |
|
"grad_norm": 0.23083847761154175, |
|
"learning_rate": 2.5883354618986804e-06, |
|
"loss": 0.0104, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 38.00781609195402, |
|
"grad_norm": 0.255422979593277, |
|
"learning_rate": 2.579821200510856e-06, |
|
"loss": 0.0038, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 38.00858237547893, |
|
"grad_norm": 0.0013695292873308063, |
|
"learning_rate": 2.5713069391230312e-06, |
|
"loss": 0.0002, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 38.009348659003834, |
|
"grad_norm": 837.1519165039062, |
|
"learning_rate": 2.562792677735207e-06, |
|
"loss": 1.6548, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 38.01011494252874, |
|
"grad_norm": 0.0014173381496220827, |
|
"learning_rate": 2.554278416347382e-06, |
|
"loss": 0.0001, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 38.01088122605364, |
|
"grad_norm": 0.0024138791486620903, |
|
"learning_rate": 2.5457641549595575e-06, |
|
"loss": 0.0002, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 38.011647509578545, |
|
"grad_norm": 0.01608857698738575, |
|
"learning_rate": 2.5372498935717325e-06, |
|
"loss": 0.4806, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 38.01241379310345, |
|
"grad_norm": 0.32779234647750854, |
|
"learning_rate": 2.5287356321839083e-06, |
|
"loss": 0.0002, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 38.01318007662835, |
|
"grad_norm": 0.023095596581697464, |
|
"learning_rate": 2.5202213707960837e-06, |
|
"loss": 0.0148, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 38.013946360153255, |
|
"grad_norm": 0.002442727331072092, |
|
"learning_rate": 2.511707109408259e-06, |
|
"loss": 0.0001, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 38.01471264367816, |
|
"grad_norm": 42.18749237060547, |
|
"learning_rate": 2.503192848020434e-06, |
|
"loss": 0.0047, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 38.01547892720306, |
|
"grad_norm": 0.003864702768623829, |
|
"learning_rate": 2.49467858663261e-06, |
|
"loss": 0.5635, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 38.016245210727966, |
|
"grad_norm": 0.07847326993942261, |
|
"learning_rate": 2.4861643252447854e-06, |
|
"loss": 0.0003, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 38.01701149425288, |
|
"grad_norm": 0.027744580060243607, |
|
"learning_rate": 2.4776500638569604e-06, |
|
"loss": 0.0003, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 38.01777777777778, |
|
"grad_norm": 0.0008341350476257503, |
|
"learning_rate": 2.469135802469136e-06, |
|
"loss": 0.0001, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 38.018544061302684, |
|
"grad_norm": 0.00043057341827079654, |
|
"learning_rate": 2.4606215410813112e-06, |
|
"loss": 0.0016, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 38.01931034482759, |
|
"grad_norm": 0.038214828819036484, |
|
"learning_rate": 2.4521072796934867e-06, |
|
"loss": 0.0027, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 38.02, |
|
"eval_accuracy": 0.6, |
|
"eval_loss": 2.8719892501831055, |
|
"eval_runtime": 25.0179, |
|
"eval_samples_per_second": 1.799, |
|
"eval_steps_per_second": 1.799, |
|
"step": 10179 |
|
}, |
|
{ |
|
"epoch": 39.00007662835249, |
|
"grad_norm": 0.3668863773345947, |
|
"learning_rate": 2.443593018305662e-06, |
|
"loss": 0.0002, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 39.00084291187739, |
|
"grad_norm": 0.0018092518439516425, |
|
"learning_rate": 2.4350787569178375e-06, |
|
"loss": 0.007, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 39.001609195402295, |
|
"grad_norm": 233.8143310546875, |
|
"learning_rate": 2.426564495530013e-06, |
|
"loss": 1.48, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 39.002375478927206, |
|
"grad_norm": 0.0012799007818102837, |
|
"learning_rate": 2.4180502341421883e-06, |
|
"loss": 0.0001, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 39.00314176245211, |
|
"grad_norm": 0.0009965369245037436, |
|
"learning_rate": 2.4095359727543637e-06, |
|
"loss": 0.0001, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 39.00390804597701, |
|
"grad_norm": 123.97967529296875, |
|
"learning_rate": 2.401021711366539e-06, |
|
"loss": 0.0095, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 39.00467432950192, |
|
"grad_norm": 0.0013832368422299623, |
|
"learning_rate": 2.3925074499787146e-06, |
|
"loss": 0.0001, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 39.00544061302682, |
|
"grad_norm": 0.0021355636417865753, |
|
"learning_rate": 2.38399318859089e-06, |
|
"loss": 0.0418, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 39.006206896551724, |
|
"grad_norm": 0.06866810470819473, |
|
"learning_rate": 2.3754789272030654e-06, |
|
"loss": 0.0003, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 39.00697318007663, |
|
"grad_norm": 0.044091515243053436, |
|
"learning_rate": 2.366964665815241e-06, |
|
"loss": 0.5459, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 39.00773946360153, |
|
"grad_norm": 0.0049442690797150135, |
|
"learning_rate": 2.3584504044274162e-06, |
|
"loss": 0.0002, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 39.008505747126435, |
|
"grad_norm": 0.019521689042448997, |
|
"learning_rate": 2.3499361430395912e-06, |
|
"loss": 0.0001, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 39.00927203065134, |
|
"grad_norm": 0.0023232789244502783, |
|
"learning_rate": 2.341421881651767e-06, |
|
"loss": 0.6611, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 39.01003831417624, |
|
"grad_norm": 0.0004890805575996637, |
|
"learning_rate": 2.332907620263942e-06, |
|
"loss": 0.0001, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 39.01080459770115, |
|
"grad_norm": 0.005810135044157505, |
|
"learning_rate": 2.324393358876118e-06, |
|
"loss": 0.2743, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 39.011570881226056, |
|
"grad_norm": 0.0013695996021851897, |
|
"learning_rate": 2.315879097488293e-06, |
|
"loss": 0.0004, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 39.01233716475096, |
|
"grad_norm": 0.0004354335251264274, |
|
"learning_rate": 2.3073648361004688e-06, |
|
"loss": 0.0002, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 39.013103448275864, |
|
"grad_norm": 0.04072890058159828, |
|
"learning_rate": 2.2988505747126437e-06, |
|
"loss": 0.0002, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 39.01386973180077, |
|
"grad_norm": 0.018684715032577515, |
|
"learning_rate": 2.290336313324819e-06, |
|
"loss": 0.0001, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 39.01463601532567, |
|
"grad_norm": 0.0008990708738565445, |
|
"learning_rate": 2.2818220519369946e-06, |
|
"loss": 0.0004, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 39.015402298850574, |
|
"grad_norm": 0.0012443230953067541, |
|
"learning_rate": 2.27330779054917e-06, |
|
"loss": 0.0002, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 39.01616858237548, |
|
"grad_norm": 0.0007185989525169134, |
|
"learning_rate": 2.2647935291613454e-06, |
|
"loss": 0.0002, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 39.01693486590038, |
|
"grad_norm": 0.3441164791584015, |
|
"learning_rate": 2.256279267773521e-06, |
|
"loss": 0.0079, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 39.017701149425285, |
|
"grad_norm": 0.002471212763339281, |
|
"learning_rate": 2.2477650063856962e-06, |
|
"loss": 0.7587, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 39.01846743295019, |
|
"grad_norm": 0.0006055875564925373, |
|
"learning_rate": 2.2392507449978717e-06, |
|
"loss": 0.1849, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 39.01923371647509, |
|
"grad_norm": 0.0010601780377328396, |
|
"learning_rate": 2.230736483610047e-06, |
|
"loss": 0.5523, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 39.02, |
|
"grad_norm": 214.96469116210938, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.703, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 39.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.7653093338012695, |
|
"eval_runtime": 26.5632, |
|
"eval_samples_per_second": 1.694, |
|
"eval_steps_per_second": 1.694, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 40.000766283524904, |
|
"grad_norm": 0.0005617381539195776, |
|
"learning_rate": 2.213707960834398e-06, |
|
"loss": 0.0001, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 40.00153256704981, |
|
"grad_norm": 0.0004308985371608287, |
|
"learning_rate": 2.205193699446573e-06, |
|
"loss": 0.079, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 40.00229885057471, |
|
"grad_norm": 0.00044181098928675056, |
|
"learning_rate": 2.1966794380587487e-06, |
|
"loss": 0.0002, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 40.003065134099614, |
|
"grad_norm": 0.0013837021542713046, |
|
"learning_rate": 2.1881651766709237e-06, |
|
"loss": 0.5168, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 40.00383141762452, |
|
"grad_norm": 0.002742999931797385, |
|
"learning_rate": 2.1796509152830996e-06, |
|
"loss": 0.0001, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 40.00459770114943, |
|
"grad_norm": 0.0005596329574473202, |
|
"learning_rate": 2.1711366538952746e-06, |
|
"loss": 0.4566, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 40.00536398467433, |
|
"grad_norm": 0.009267897345125675, |
|
"learning_rate": 2.1626223925074504e-06, |
|
"loss": 0.0003, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 40.006130268199236, |
|
"grad_norm": 0.14671576023101807, |
|
"learning_rate": 2.1541081311196254e-06, |
|
"loss": 0.659, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 40.00689655172414, |
|
"grad_norm": 0.1988813728094101, |
|
"learning_rate": 2.145593869731801e-06, |
|
"loss": 0.0007, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 40.00766283524904, |
|
"grad_norm": 0.15066532790660858, |
|
"learning_rate": 2.1370796083439762e-06, |
|
"loss": 0.0003, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 40.00842911877395, |
|
"grad_norm": 0.02539963833987713, |
|
"learning_rate": 2.1285653469561517e-06, |
|
"loss": 0.3385, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 40.00919540229885, |
|
"grad_norm": 0.0004459109914023429, |
|
"learning_rate": 2.120051085568327e-06, |
|
"loss": 0.0006, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 40.009961685823754, |
|
"grad_norm": 0.0007194732315838337, |
|
"learning_rate": 2.1115368241805025e-06, |
|
"loss": 0.0022, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 40.01072796934866, |
|
"grad_norm": 0.0005542331491596997, |
|
"learning_rate": 2.103022562792678e-06, |
|
"loss": 0.2811, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 40.01149425287356, |
|
"grad_norm": 0.0027718700002878904, |
|
"learning_rate": 2.0945083014048533e-06, |
|
"loss": 0.6671, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 40.012260536398465, |
|
"grad_norm": 0.0006096771103329957, |
|
"learning_rate": 2.0859940400170287e-06, |
|
"loss": 0.0001, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 40.01302681992337, |
|
"grad_norm": 0.20386645197868347, |
|
"learning_rate": 2.077479778629204e-06, |
|
"loss": 0.3385, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 40.01379310344828, |
|
"grad_norm": 0.002746242331340909, |
|
"learning_rate": 2.0689655172413796e-06, |
|
"loss": 0.0001, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 40.01455938697318, |
|
"grad_norm": 0.07641975581645966, |
|
"learning_rate": 2.060451255853555e-06, |
|
"loss": 0.0007, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 40.015325670498086, |
|
"grad_norm": 0.00033724671811796725, |
|
"learning_rate": 2.0519369944657304e-06, |
|
"loss": 0.0002, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 40.01609195402299, |
|
"grad_norm": 0.0005371053121052682, |
|
"learning_rate": 2.043422733077906e-06, |
|
"loss": 1.1409, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 40.01685823754789, |
|
"grad_norm": 0.0005278227035887539, |
|
"learning_rate": 2.0349084716900813e-06, |
|
"loss": 0.0112, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 40.0176245210728, |
|
"grad_norm": 110.43573760986328, |
|
"learning_rate": 2.0263942103022567e-06, |
|
"loss": 0.5057, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 40.0183908045977, |
|
"grad_norm": 0.0010212173219770193, |
|
"learning_rate": 2.0178799489144317e-06, |
|
"loss": 0.1841, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 40.019157088122604, |
|
"grad_norm": 0.0034464863128960133, |
|
"learning_rate": 2.009365687526607e-06, |
|
"loss": 0.0038, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 40.01992337164751, |
|
"grad_norm": 0.0018594469875097275, |
|
"learning_rate": 2.0008514261387825e-06, |
|
"loss": 0.0001, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 40.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.6458890438079834, |
|
"eval_runtime": 26.7996, |
|
"eval_samples_per_second": 1.679, |
|
"eval_steps_per_second": 1.679, |
|
"step": 10701 |
|
}, |
|
{ |
|
"epoch": 41.000689655172415, |
|
"grad_norm": 0.00769667886197567, |
|
"learning_rate": 1.992337164750958e-06, |
|
"loss": 0.4213, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 41.00145593869732, |
|
"grad_norm": 0.004477243404835463, |
|
"learning_rate": 1.9838229033631333e-06, |
|
"loss": 0.0004, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 41.00222222222222, |
|
"grad_norm": 0.007561236619949341, |
|
"learning_rate": 1.9753086419753087e-06, |
|
"loss": 0.0002, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 41.002988505747126, |
|
"grad_norm": 0.001369941863231361, |
|
"learning_rate": 1.966794380587484e-06, |
|
"loss": 0.6289, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 41.00375478927203, |
|
"grad_norm": 0.0004314217367209494, |
|
"learning_rate": 1.9582801191996596e-06, |
|
"loss": 0.0004, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 41.00452107279693, |
|
"grad_norm": 0.000997088965959847, |
|
"learning_rate": 1.949765857811835e-06, |
|
"loss": 0.0002, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 41.00528735632184, |
|
"grad_norm": 0.006650271359831095, |
|
"learning_rate": 1.9412515964240104e-06, |
|
"loss": 0.0431, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 41.00605363984674, |
|
"grad_norm": 0.0025702593848109245, |
|
"learning_rate": 1.932737335036186e-06, |
|
"loss": 0.0002, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 41.006819923371644, |
|
"grad_norm": 0.0007053497829474509, |
|
"learning_rate": 1.9242230736483612e-06, |
|
"loss": 0.3337, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 41.007586206896555, |
|
"grad_norm": 0.37883439660072327, |
|
"learning_rate": 1.9157088122605367e-06, |
|
"loss": 0.0004, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 41.00835249042146, |
|
"grad_norm": 0.0013751531951129436, |
|
"learning_rate": 1.9071945508727119e-06, |
|
"loss": 0.0006, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 41.00911877394636, |
|
"grad_norm": 0.002156210830435157, |
|
"learning_rate": 1.8986802894848875e-06, |
|
"loss": 0.0004, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 41.009885057471266, |
|
"grad_norm": 0.0028155988547950983, |
|
"learning_rate": 1.8901660280970627e-06, |
|
"loss": 0.0, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 41.01065134099617, |
|
"grad_norm": 0.00035497843055054545, |
|
"learning_rate": 1.8816517667092381e-06, |
|
"loss": 0.0002, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 41.01141762452107, |
|
"grad_norm": 0.0007868811371736228, |
|
"learning_rate": 1.8731375053214135e-06, |
|
"loss": 0.0001, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 41.01218390804598, |
|
"grad_norm": 2.8760898113250732, |
|
"learning_rate": 1.864623243933589e-06, |
|
"loss": 0.001, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 41.01295019157088, |
|
"grad_norm": 0.007423271890729666, |
|
"learning_rate": 1.8561089825457644e-06, |
|
"loss": 0.0032, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 41.013716475095784, |
|
"grad_norm": 0.009884797967970371, |
|
"learning_rate": 1.8475947211579398e-06, |
|
"loss": 0.0001, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 41.01448275862069, |
|
"grad_norm": 0.0009261261438950896, |
|
"learning_rate": 1.839080459770115e-06, |
|
"loss": 0.0001, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 41.01524904214559, |
|
"grad_norm": 0.002712372224777937, |
|
"learning_rate": 1.8305661983822906e-06, |
|
"loss": 0.3398, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 41.0160153256705, |
|
"grad_norm": 0.0022002735640853643, |
|
"learning_rate": 1.8220519369944658e-06, |
|
"loss": 0.4491, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 41.016781609195405, |
|
"grad_norm": 0.004302080255001783, |
|
"learning_rate": 1.8135376756066415e-06, |
|
"loss": 0.0001, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 41.01754789272031, |
|
"grad_norm": 0.0019112623995169997, |
|
"learning_rate": 1.8050234142188167e-06, |
|
"loss": 0.5577, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 41.01831417624521, |
|
"grad_norm": 11.327588081359863, |
|
"learning_rate": 1.7965091528309919e-06, |
|
"loss": 0.0009, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 41.019080459770116, |
|
"grad_norm": 1752.826904296875, |
|
"learning_rate": 1.7879948914431675e-06, |
|
"loss": 0.2276, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 41.01984674329502, |
|
"grad_norm": 0.0005377314519137144, |
|
"learning_rate": 1.7794806300553427e-06, |
|
"loss": 0.0001, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 41.02, |
|
"eval_accuracy": 0.6666666666666666, |
|
"eval_loss": 2.5735113620758057, |
|
"eval_runtime": 26.7388, |
|
"eval_samples_per_second": 1.683, |
|
"eval_steps_per_second": 1.683, |
|
"step": 10962 |
|
}, |
|
{ |
|
"epoch": 42.00061302681992, |
|
"grad_norm": 0.011441824026405811, |
|
"learning_rate": 1.7709663686675183e-06, |
|
"loss": 0.0106, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 42.00137931034483, |
|
"grad_norm": 0.024787303060293198, |
|
"learning_rate": 1.7624521072796935e-06, |
|
"loss": 0.0003, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 42.002145593869734, |
|
"grad_norm": 0.0011965780286118388, |
|
"learning_rate": 1.753937845891869e-06, |
|
"loss": 0.0001, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 42.00291187739464, |
|
"grad_norm": 0.008172291330993176, |
|
"learning_rate": 1.7454235845040444e-06, |
|
"loss": 0.0001, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 42.00367816091954, |
|
"grad_norm": 0.0009244986576959491, |
|
"learning_rate": 1.7369093231162198e-06, |
|
"loss": 0.0, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 42.004444444444445, |
|
"grad_norm": 0.0008702843333594501, |
|
"learning_rate": 1.7283950617283952e-06, |
|
"loss": 0.01, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 42.00521072796935, |
|
"grad_norm": 0.0017342583741992712, |
|
"learning_rate": 1.7198808003405706e-06, |
|
"loss": 0.0, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 42.00597701149425, |
|
"grad_norm": 0.0005900550750084221, |
|
"learning_rate": 1.7113665389527458e-06, |
|
"loss": 0.3035, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 42.006743295019156, |
|
"grad_norm": 0.00040124828228726983, |
|
"learning_rate": 1.7028522775649215e-06, |
|
"loss": 0.0001, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 42.00750957854406, |
|
"grad_norm": 0.0005654390552081168, |
|
"learning_rate": 1.6943380161770967e-06, |
|
"loss": 0.2468, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 42.00827586206896, |
|
"grad_norm": 0.0009583558421581984, |
|
"learning_rate": 1.6858237547892723e-06, |
|
"loss": 0.0004, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 42.00904214559387, |
|
"grad_norm": 0.0008892641053535044, |
|
"learning_rate": 1.6773094934014475e-06, |
|
"loss": 0.0002, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 42.00980842911878, |
|
"grad_norm": 1369.541748046875, |
|
"learning_rate": 1.6687952320136231e-06, |
|
"loss": 0.2126, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 42.01057471264368, |
|
"grad_norm": 0.0018894057720899582, |
|
"learning_rate": 1.6602809706257983e-06, |
|
"loss": 0.0005, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 42.011340996168585, |
|
"grad_norm": 0.0005669199163094163, |
|
"learning_rate": 1.6517667092379737e-06, |
|
"loss": 0.4438, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 42.01210727969349, |
|
"grad_norm": 0.0004346051428001374, |
|
"learning_rate": 1.6432524478501492e-06, |
|
"loss": 0.6518, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 42.01287356321839, |
|
"grad_norm": 0.0005145483301021159, |
|
"learning_rate": 1.6347381864623246e-06, |
|
"loss": 0.0, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 42.013639846743295, |
|
"grad_norm": 0.0006672671879641712, |
|
"learning_rate": 1.6262239250744998e-06, |
|
"loss": 0.0002, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 42.0144061302682, |
|
"grad_norm": 0.0025090023409575224, |
|
"learning_rate": 1.6177096636866754e-06, |
|
"loss": 0.7355, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 42.0151724137931, |
|
"grad_norm": 0.0031055163126438856, |
|
"learning_rate": 1.6091954022988506e-06, |
|
"loss": 0.0001, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 42.015938697318006, |
|
"grad_norm": 0.0011592346709221601, |
|
"learning_rate": 1.6006811409110262e-06, |
|
"loss": 0.0001, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 42.01670498084291, |
|
"grad_norm": 0.004185730125755072, |
|
"learning_rate": 1.5921668795232015e-06, |
|
"loss": 0.0372, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 42.01747126436781, |
|
"grad_norm": 0.001283592195250094, |
|
"learning_rate": 1.583652618135377e-06, |
|
"loss": 0.0, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 42.01823754789272, |
|
"grad_norm": 0.004349360708147287, |
|
"learning_rate": 1.5751383567475523e-06, |
|
"loss": 0.0001, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 42.01900383141763, |
|
"grad_norm": 0.009801444597542286, |
|
"learning_rate": 1.5666240953597275e-06, |
|
"loss": 0.7271, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 42.01977011494253, |
|
"grad_norm": 0.0005156445549800992, |
|
"learning_rate": 1.5581098339719031e-06, |
|
"loss": 0.0, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 42.02, |
|
"eval_accuracy": 0.6888888888888889, |
|
"eval_loss": 2.602997064590454, |
|
"eval_runtime": 27.531, |
|
"eval_samples_per_second": 1.635, |
|
"eval_steps_per_second": 1.635, |
|
"step": 11223 |
|
}, |
|
{ |
|
"epoch": 43.00053639846743, |
|
"grad_norm": 0.0028184240218251944, |
|
"learning_rate": 1.5495955725840783e-06, |
|
"loss": 0.0001, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 43.001302681992335, |
|
"grad_norm": 0.013690020889043808, |
|
"learning_rate": 1.541081311196254e-06, |
|
"loss": 0.0004, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 43.00206896551724, |
|
"grad_norm": 0.000378176016965881, |
|
"learning_rate": 1.5325670498084292e-06, |
|
"loss": 0.0002, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 43.00283524904214, |
|
"grad_norm": 0.0004595549835357815, |
|
"learning_rate": 1.5240527884206046e-06, |
|
"loss": 0.0655, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 43.00360153256705, |
|
"grad_norm": 0.038032129406929016, |
|
"learning_rate": 1.51553852703278e-06, |
|
"loss": 0.0001, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 43.00436781609196, |
|
"grad_norm": 0.0005863924161531031, |
|
"learning_rate": 1.5070242656449554e-06, |
|
"loss": 0.0001, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 43.00513409961686, |
|
"grad_norm": 0.006312482990324497, |
|
"learning_rate": 1.4985100042571306e-06, |
|
"loss": 0.0001, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 43.005900383141764, |
|
"grad_norm": 0.0006896348786540329, |
|
"learning_rate": 1.4899957428693062e-06, |
|
"loss": 0.6347, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 43.00666666666667, |
|
"grad_norm": 0.010312017984688282, |
|
"learning_rate": 1.4814814814814815e-06, |
|
"loss": 0.0002, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 43.00743295019157, |
|
"grad_norm": 0.0030278032645583153, |
|
"learning_rate": 1.472967220093657e-06, |
|
"loss": 0.0001, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 43.008199233716475, |
|
"grad_norm": 0.0003869073698297143, |
|
"learning_rate": 1.4644529587058323e-06, |
|
"loss": 0.0004, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 43.00896551724138, |
|
"grad_norm": 0.0004066386609338224, |
|
"learning_rate": 1.455938697318008e-06, |
|
"loss": 0.0, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 43.00973180076628, |
|
"grad_norm": 0.0022265457082539797, |
|
"learning_rate": 1.4474244359301831e-06, |
|
"loss": 0.0002, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 43.010498084291186, |
|
"grad_norm": 0.0006593198049813509, |
|
"learning_rate": 1.4389101745423588e-06, |
|
"loss": 0.0002, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 43.01126436781609, |
|
"grad_norm": 0.016597378998994827, |
|
"learning_rate": 1.430395913154534e-06, |
|
"loss": 0.0001, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 43.01203065134099, |
|
"grad_norm": 0.013694500550627708, |
|
"learning_rate": 1.4218816517667094e-06, |
|
"loss": 0.0008, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 43.012796934865904, |
|
"grad_norm": 0.049981165677309036, |
|
"learning_rate": 1.4133673903788848e-06, |
|
"loss": 0.0002, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 43.01356321839081, |
|
"grad_norm": 0.00038756002322770655, |
|
"learning_rate": 1.4048531289910602e-06, |
|
"loss": 0.0001, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 43.01432950191571, |
|
"grad_norm": 0.013279089704155922, |
|
"learning_rate": 1.3963388676032354e-06, |
|
"loss": 0.0004, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 43.015095785440614, |
|
"grad_norm": 0.001706786802969873, |
|
"learning_rate": 1.387824606215411e-06, |
|
"loss": 0.8803, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 43.01586206896552, |
|
"grad_norm": 0.0003157041210215539, |
|
"learning_rate": 1.3793103448275862e-06, |
|
"loss": 0.0001, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 43.01662835249042, |
|
"grad_norm": 0.0009101926116272807, |
|
"learning_rate": 1.3707960834397619e-06, |
|
"loss": 0.0002, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 43.017394636015325, |
|
"grad_norm": 0.00048753214650787413, |
|
"learning_rate": 1.362281822051937e-06, |
|
"loss": 0.0001, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 43.01816091954023, |
|
"grad_norm": 0.001277430448681116, |
|
"learning_rate": 1.3537675606641127e-06, |
|
"loss": 0.0001, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 43.01892720306513, |
|
"grad_norm": 4.2616376876831055, |
|
"learning_rate": 1.345253299276288e-06, |
|
"loss": 0.0004, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 43.019693486590036, |
|
"grad_norm": 4.271060943603516, |
|
"learning_rate": 1.3367390378884631e-06, |
|
"loss": 0.0008, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 43.02, |
|
"eval_accuracy": 0.6666666666666666, |
|
"eval_loss": 2.7085378170013428, |
|
"eval_runtime": 27.8609, |
|
"eval_samples_per_second": 1.615, |
|
"eval_steps_per_second": 1.615, |
|
"step": 11484 |
|
}, |
|
{ |
|
"epoch": 44.000459770114944, |
|
"grad_norm": 0.002231120364740491, |
|
"learning_rate": 1.3282247765006387e-06, |
|
"loss": 0.0001, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 44.00122605363985, |
|
"grad_norm": 0.012424326501786709, |
|
"learning_rate": 1.319710515112814e-06, |
|
"loss": 0.0002, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 44.00199233716475, |
|
"grad_norm": 0.0021405848674476147, |
|
"learning_rate": 1.3111962537249896e-06, |
|
"loss": 0.1207, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 44.002758620689654, |
|
"grad_norm": 186.84902954101562, |
|
"learning_rate": 1.3026819923371648e-06, |
|
"loss": 0.8398, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 44.00352490421456, |
|
"grad_norm": 0.0004238563997205347, |
|
"learning_rate": 1.2941677309493402e-06, |
|
"loss": 0.0004, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 44.00429118773946, |
|
"grad_norm": 0.0007826776127330959, |
|
"learning_rate": 1.2856534695615156e-06, |
|
"loss": 0.0123, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 44.005057471264365, |
|
"grad_norm": 0.001074014464393258, |
|
"learning_rate": 1.277139208173691e-06, |
|
"loss": 0.0011, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 44.00582375478927, |
|
"grad_norm": 0.007456593215465546, |
|
"learning_rate": 1.2686249467858662e-06, |
|
"loss": 0.0568, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 44.00659003831418, |
|
"grad_norm": 0.002022550208494067, |
|
"learning_rate": 1.2601106853980419e-06, |
|
"loss": 0.306, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 44.00735632183908, |
|
"grad_norm": 0.0005478428793139756, |
|
"learning_rate": 1.251596424010217e-06, |
|
"loss": 0.0001, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 44.00812260536399, |
|
"grad_norm": 0.0005965171149000525, |
|
"learning_rate": 1.2430821626223927e-06, |
|
"loss": 0.0, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 44.00888888888889, |
|
"grad_norm": 0.00036345416447147727, |
|
"learning_rate": 1.234567901234568e-06, |
|
"loss": 0.0, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 44.009655172413794, |
|
"grad_norm": 0.00039661044138483703, |
|
"learning_rate": 1.2260536398467433e-06, |
|
"loss": 0.0001, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 44.0104214559387, |
|
"grad_norm": 0.0005007166764698923, |
|
"learning_rate": 1.2175393784589187e-06, |
|
"loss": 0.0001, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 44.0111877394636, |
|
"grad_norm": 0.0003881788579747081, |
|
"learning_rate": 1.2090251170710942e-06, |
|
"loss": 0.0002, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 44.011954022988505, |
|
"grad_norm": 0.0006638368940912187, |
|
"learning_rate": 1.2005108556832696e-06, |
|
"loss": 0.0001, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 44.01272030651341, |
|
"grad_norm": 0.01510770432651043, |
|
"learning_rate": 1.191996594295445e-06, |
|
"loss": 0.0002, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 44.01348659003831, |
|
"grad_norm": 0.0009375409572385252, |
|
"learning_rate": 1.1834823329076204e-06, |
|
"loss": 0.0003, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 44.014252873563215, |
|
"grad_norm": 0.0017861068481579423, |
|
"learning_rate": 1.1749680715197956e-06, |
|
"loss": 0.6774, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 44.01501915708812, |
|
"grad_norm": 0.0007072360604070127, |
|
"learning_rate": 1.166453810131971e-06, |
|
"loss": 0.0002, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 44.01578544061303, |
|
"grad_norm": 0.0004052703734487295, |
|
"learning_rate": 1.1579395487441465e-06, |
|
"loss": 0.0006, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 44.01655172413793, |
|
"grad_norm": 0.0007049337727949023, |
|
"learning_rate": 1.1494252873563219e-06, |
|
"loss": 0.0131, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 44.01731800766284, |
|
"grad_norm": 0.0023608545307070017, |
|
"learning_rate": 1.1409110259684973e-06, |
|
"loss": 0.0002, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 44.01808429118774, |
|
"grad_norm": 0.0007592226611450315, |
|
"learning_rate": 1.1323967645806727e-06, |
|
"loss": 0.0001, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 44.018850574712644, |
|
"grad_norm": 0.022096728906035423, |
|
"learning_rate": 1.1238825031928481e-06, |
|
"loss": 1.343, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 44.01961685823755, |
|
"grad_norm": 0.0006767487502656877, |
|
"learning_rate": 1.1153682418050235e-06, |
|
"loss": 0.0001, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 44.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.801389217376709, |
|
"eval_runtime": 25.6643, |
|
"eval_samples_per_second": 1.753, |
|
"eval_steps_per_second": 1.753, |
|
"step": 11745 |
|
}, |
|
{ |
|
"epoch": 45.000383141762455, |
|
"grad_norm": 0.0016754636308178306, |
|
"learning_rate": 1.106853980417199e-06, |
|
"loss": 0.0172, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 45.00114942528736, |
|
"grad_norm": 0.010163234546780586, |
|
"learning_rate": 1.0983397190293744e-06, |
|
"loss": 0.0001, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 45.00191570881226, |
|
"grad_norm": 0.00038828927790746093, |
|
"learning_rate": 1.0898254576415498e-06, |
|
"loss": 0.0017, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 45.002681992337166, |
|
"grad_norm": 179.7286376953125, |
|
"learning_rate": 1.0813111962537252e-06, |
|
"loss": 0.5792, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 45.00344827586207, |
|
"grad_norm": 0.001220043166540563, |
|
"learning_rate": 1.0727969348659004e-06, |
|
"loss": 0.6836, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 45.00421455938697, |
|
"grad_norm": 0.18691496551036835, |
|
"learning_rate": 1.0642826734780758e-06, |
|
"loss": 0.6858, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 45.00498084291188, |
|
"grad_norm": 0.0005287218373268843, |
|
"learning_rate": 1.0557684120902512e-06, |
|
"loss": 0.0001, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 45.00574712643678, |
|
"grad_norm": 0.002112560672685504, |
|
"learning_rate": 1.0472541507024267e-06, |
|
"loss": 0.0033, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 45.006513409961684, |
|
"grad_norm": 0.0004538028733804822, |
|
"learning_rate": 1.038739889314602e-06, |
|
"loss": 0.0013, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 45.00727969348659, |
|
"grad_norm": 0.005300307180732489, |
|
"learning_rate": 1.0302256279267775e-06, |
|
"loss": 0.0001, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 45.00804597701149, |
|
"grad_norm": 0.01059534028172493, |
|
"learning_rate": 1.021711366538953e-06, |
|
"loss": 0.0001, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 45.008812260536395, |
|
"grad_norm": 0.0020577851682901382, |
|
"learning_rate": 1.0131971051511283e-06, |
|
"loss": 0.0002, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 45.009578544061306, |
|
"grad_norm": 0.3919330835342407, |
|
"learning_rate": 1.0046828437633035e-06, |
|
"loss": 0.0003, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 45.01034482758621, |
|
"grad_norm": 0.3530638813972473, |
|
"learning_rate": 9.96168582375479e-07, |
|
"loss": 0.0326, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 45.01111111111111, |
|
"grad_norm": 0.028299113735556602, |
|
"learning_rate": 9.876543209876544e-07, |
|
"loss": 0.003, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 45.01187739463602, |
|
"grad_norm": 0.00040512188570573926, |
|
"learning_rate": 9.791400595998298e-07, |
|
"loss": 0.0008, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 45.01264367816092, |
|
"grad_norm": 0.019978506490588188, |
|
"learning_rate": 9.706257982120052e-07, |
|
"loss": 0.0013, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 45.013409961685824, |
|
"grad_norm": 0.0003510297101456672, |
|
"learning_rate": 9.621115368241806e-07, |
|
"loss": 0.0001, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 45.01417624521073, |
|
"grad_norm": 0.023367272689938545, |
|
"learning_rate": 9.535972754363559e-07, |
|
"loss": 0.8049, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 45.01494252873563, |
|
"grad_norm": 0.0013532958691939712, |
|
"learning_rate": 9.450830140485314e-07, |
|
"loss": 0.0001, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 45.015708812260534, |
|
"grad_norm": 0.014343813993036747, |
|
"learning_rate": 9.365687526607068e-07, |
|
"loss": 0.6631, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 45.01647509578544, |
|
"grad_norm": 0.0005006958381272852, |
|
"learning_rate": 9.280544912728822e-07, |
|
"loss": 0.0001, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 45.01724137931034, |
|
"grad_norm": 0.0004863318463321775, |
|
"learning_rate": 9.195402298850575e-07, |
|
"loss": 0.2354, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 45.01800766283525, |
|
"grad_norm": 0.021632444113492966, |
|
"learning_rate": 9.110259684972329e-07, |
|
"loss": 0.0001, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 45.018773946360156, |
|
"grad_norm": 0.0003966492076870054, |
|
"learning_rate": 9.025117071094083e-07, |
|
"loss": 0.0018, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 45.01954022988506, |
|
"grad_norm": 0.005706295371055603, |
|
"learning_rate": 8.939974457215837e-07, |
|
"loss": 0.0001, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 45.02, |
|
"eval_accuracy": 0.6666666666666666, |
|
"eval_loss": 2.747692584991455, |
|
"eval_runtime": 25.6514, |
|
"eval_samples_per_second": 1.754, |
|
"eval_steps_per_second": 1.754, |
|
"step": 12006 |
|
}, |
|
{ |
|
"epoch": 46.00030651340996, |
|
"grad_norm": 0.0006745823775418103, |
|
"learning_rate": 8.854831843337592e-07, |
|
"loss": 0.6235, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 46.001072796934864, |
|
"grad_norm": 0.0005363059462979436, |
|
"learning_rate": 8.769689229459345e-07, |
|
"loss": 0.0131, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 46.00183908045977, |
|
"grad_norm": 0.0437430664896965, |
|
"learning_rate": 8.684546615581099e-07, |
|
"loss": 0.0004, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 46.00260536398467, |
|
"grad_norm": 0.0005971883074380457, |
|
"learning_rate": 8.599404001702853e-07, |
|
"loss": 0.0001, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 46.00337164750958, |
|
"grad_norm": 0.0041750106029212475, |
|
"learning_rate": 8.514261387824607e-07, |
|
"loss": 0.0009, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 46.004137931034485, |
|
"grad_norm": 0.0014191080117598176, |
|
"learning_rate": 8.429118773946361e-07, |
|
"loss": 0.0002, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 46.00490421455939, |
|
"grad_norm": 0.0012048515491187572, |
|
"learning_rate": 8.343976160068116e-07, |
|
"loss": 0.0, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 46.00567049808429, |
|
"grad_norm": 0.012600412592291832, |
|
"learning_rate": 8.258833546189869e-07, |
|
"loss": 0.0074, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 46.006436781609196, |
|
"grad_norm": 0.00043088599340990186, |
|
"learning_rate": 8.173690932311623e-07, |
|
"loss": 0.0001, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 46.0072030651341, |
|
"grad_norm": 0.00086090003605932, |
|
"learning_rate": 8.088548318433377e-07, |
|
"loss": 0.0001, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 46.007969348659, |
|
"grad_norm": 0.0043645999394357204, |
|
"learning_rate": 8.003405704555131e-07, |
|
"loss": 0.0001, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 46.00873563218391, |
|
"grad_norm": 0.010623692534863949, |
|
"learning_rate": 7.918263090676885e-07, |
|
"loss": 0.0006, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 46.00950191570881, |
|
"grad_norm": 0.0006461400189436972, |
|
"learning_rate": 7.833120476798637e-07, |
|
"loss": 0.0002, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 46.010268199233714, |
|
"grad_norm": 0.0013729810016229749, |
|
"learning_rate": 7.747977862920392e-07, |
|
"loss": 0.0, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 46.01103448275862, |
|
"grad_norm": 0.0003474123077467084, |
|
"learning_rate": 7.662835249042146e-07, |
|
"loss": 0.0001, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 46.01180076628353, |
|
"grad_norm": 0.0010698989499360323, |
|
"learning_rate": 7.5776926351639e-07, |
|
"loss": 0.0003, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 46.01256704980843, |
|
"grad_norm": 0.0003078237932641059, |
|
"learning_rate": 7.492550021285653e-07, |
|
"loss": 0.0102, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 46.013333333333335, |
|
"grad_norm": 114.88867950439453, |
|
"learning_rate": 7.407407407407407e-07, |
|
"loss": 0.4662, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 46.01409961685824, |
|
"grad_norm": 0.0015720983501523733, |
|
"learning_rate": 7.322264793529161e-07, |
|
"loss": 0.0002, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 46.01486590038314, |
|
"grad_norm": 0.0022138843778520823, |
|
"learning_rate": 7.237122179650916e-07, |
|
"loss": 0.0, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 46.015632183908046, |
|
"grad_norm": 0.010039104148745537, |
|
"learning_rate": 7.15197956577267e-07, |
|
"loss": 0.0002, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 46.01639846743295, |
|
"grad_norm": 0.004299751482903957, |
|
"learning_rate": 7.066836951894424e-07, |
|
"loss": 0.0001, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 46.01716475095785, |
|
"grad_norm": 0.0005732214776799083, |
|
"learning_rate": 6.981694338016177e-07, |
|
"loss": 0.0083, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 46.01793103448276, |
|
"grad_norm": 0.0007530484581366181, |
|
"learning_rate": 6.896551724137931e-07, |
|
"loss": 0.0, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 46.01869731800766, |
|
"grad_norm": 0.0009310397435911, |
|
"learning_rate": 6.811409110259685e-07, |
|
"loss": 0.0001, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 46.019463601532564, |
|
"grad_norm": 0.0005763958324678242, |
|
"learning_rate": 6.72626649638144e-07, |
|
"loss": 0.0028, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 46.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.8430819511413574, |
|
"eval_runtime": 26.393, |
|
"eval_samples_per_second": 1.705, |
|
"eval_steps_per_second": 1.705, |
|
"step": 12267 |
|
}, |
|
{ |
|
"epoch": 47.00022988505747, |
|
"grad_norm": 878.3082885742188, |
|
"learning_rate": 6.641123882503194e-07, |
|
"loss": 0.1142, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 47.000996168582375, |
|
"grad_norm": 0.0009827708126977086, |
|
"learning_rate": 6.555981268624948e-07, |
|
"loss": 0.7327, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 47.00176245210728, |
|
"grad_norm": 0.0004977863281965256, |
|
"learning_rate": 6.470838654746701e-07, |
|
"loss": 0.7853, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 47.00252873563218, |
|
"grad_norm": 0.0027017523534595966, |
|
"learning_rate": 6.385696040868455e-07, |
|
"loss": 0.0004, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 47.003295019157086, |
|
"grad_norm": 0.003932940308004618, |
|
"learning_rate": 6.300553426990209e-07, |
|
"loss": 0.0001, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 47.00406130268199, |
|
"grad_norm": 0.0014727659290656447, |
|
"learning_rate": 6.215410813111964e-07, |
|
"loss": 0.0, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 47.00482758620689, |
|
"grad_norm": 0.0005117836990393698, |
|
"learning_rate": 6.130268199233717e-07, |
|
"loss": 0.7804, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 47.005593869731804, |
|
"grad_norm": 0.0010762950405478477, |
|
"learning_rate": 6.045125585355471e-07, |
|
"loss": 0.0001, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 47.00636015325671, |
|
"grad_norm": 0.0002965295861940831, |
|
"learning_rate": 5.959982971477225e-07, |
|
"loss": 0.0002, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 47.00712643678161, |
|
"grad_norm": 0.013439073227345943, |
|
"learning_rate": 5.874840357598978e-07, |
|
"loss": 0.0, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 47.007892720306515, |
|
"grad_norm": 0.00029065439593978226, |
|
"learning_rate": 5.789697743720732e-07, |
|
"loss": 0.0003, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 47.00865900383142, |
|
"grad_norm": 0.00045318141928873956, |
|
"learning_rate": 5.704555129842486e-07, |
|
"loss": 0.0, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 47.00942528735632, |
|
"grad_norm": 0.03974248468875885, |
|
"learning_rate": 5.619412515964241e-07, |
|
"loss": 0.0001, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 47.010191570881226, |
|
"grad_norm": 0.00041028112173080444, |
|
"learning_rate": 5.534269902085995e-07, |
|
"loss": 0.355, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 47.01095785440613, |
|
"grad_norm": 0.00039147085044533014, |
|
"learning_rate": 5.449127288207749e-07, |
|
"loss": 0.0148, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 47.01172413793103, |
|
"grad_norm": 0.0009370796615257859, |
|
"learning_rate": 5.363984674329502e-07, |
|
"loss": 0.6019, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 47.01249042145594, |
|
"grad_norm": 0.0034563420340418816, |
|
"learning_rate": 5.278842060451256e-07, |
|
"loss": 0.0001, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 47.01325670498084, |
|
"grad_norm": 0.0003248891734983772, |
|
"learning_rate": 5.19369944657301e-07, |
|
"loss": 0.0, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 47.014022988505744, |
|
"grad_norm": 0.0011132057989016175, |
|
"learning_rate": 5.108556832694765e-07, |
|
"loss": 0.0037, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 47.014789272030654, |
|
"grad_norm": 0.000656172342132777, |
|
"learning_rate": 5.023414218816518e-07, |
|
"loss": 0.0001, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 47.01555555555556, |
|
"grad_norm": 0.11941202729940414, |
|
"learning_rate": 4.938271604938272e-07, |
|
"loss": 0.0004, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 47.01632183908046, |
|
"grad_norm": 0.006858575623482466, |
|
"learning_rate": 4.853128991060026e-07, |
|
"loss": 0.0002, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 47.017088122605365, |
|
"grad_norm": 0.0012859116541221738, |
|
"learning_rate": 4.7679863771817797e-07, |
|
"loss": 0.0002, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 47.01785440613027, |
|
"grad_norm": 0.001454568700864911, |
|
"learning_rate": 4.682843763303534e-07, |
|
"loss": 0.0002, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 47.01862068965517, |
|
"grad_norm": 0.0005325938109308481, |
|
"learning_rate": 4.5977011494252875e-07, |
|
"loss": 0.0001, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 47.019386973180076, |
|
"grad_norm": 0.0004166490107309073, |
|
"learning_rate": 4.5125585355470417e-07, |
|
"loss": 0.0002, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 47.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.8466780185699463, |
|
"eval_runtime": 26.7574, |
|
"eval_samples_per_second": 1.682, |
|
"eval_steps_per_second": 1.682, |
|
"step": 12528 |
|
}, |
|
{ |
|
"epoch": 48.000153256704984, |
|
"grad_norm": 0.0003041029267478734, |
|
"learning_rate": 4.427415921668796e-07, |
|
"loss": 0.0001, |
|
"step": 12530 |
|
}, |
|
{ |
|
"epoch": 48.00091954022989, |
|
"grad_norm": 0.0010256686946377158, |
|
"learning_rate": 4.3422733077905495e-07, |
|
"loss": 0.0004, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 48.00168582375479, |
|
"grad_norm": 0.009041458368301392, |
|
"learning_rate": 4.2571306939123036e-07, |
|
"loss": 0.0002, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 48.002452107279694, |
|
"grad_norm": 0.012755746953189373, |
|
"learning_rate": 4.171988080034058e-07, |
|
"loss": 0.0001, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 48.0032183908046, |
|
"grad_norm": 0.00051366031402722, |
|
"learning_rate": 4.0868454661558115e-07, |
|
"loss": 0.0001, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 48.0039846743295, |
|
"grad_norm": 0.0010975806508213282, |
|
"learning_rate": 4.0017028522775656e-07, |
|
"loss": 0.0298, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 48.004750957854405, |
|
"grad_norm": 0.0407705120742321, |
|
"learning_rate": 3.916560238399319e-07, |
|
"loss": 0.0002, |
|
"step": 12590 |
|
}, |
|
{ |
|
"epoch": 48.00551724137931, |
|
"grad_norm": 0.003167961724102497, |
|
"learning_rate": 3.831417624521073e-07, |
|
"loss": 0.1391, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 48.00628352490421, |
|
"grad_norm": 0.014415454119443893, |
|
"learning_rate": 3.7462750106428265e-07, |
|
"loss": 0.4358, |
|
"step": 12610 |
|
}, |
|
{ |
|
"epoch": 48.007049808429116, |
|
"grad_norm": 0.0003218734636902809, |
|
"learning_rate": 3.6611323967645807e-07, |
|
"loss": 0.0002, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 48.00781609195402, |
|
"grad_norm": 0.001004243502393365, |
|
"learning_rate": 3.575989782886335e-07, |
|
"loss": 0.0, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 48.00858237547893, |
|
"grad_norm": 0.000971637200564146, |
|
"learning_rate": 3.4908471690080885e-07, |
|
"loss": 0.0001, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 48.009348659003834, |
|
"grad_norm": 0.007098360452800989, |
|
"learning_rate": 3.4057045551298427e-07, |
|
"loss": 0.1384, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 48.01011494252874, |
|
"grad_norm": 0.0008312310674227774, |
|
"learning_rate": 3.320561941251597e-07, |
|
"loss": 0.0, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 48.01088122605364, |
|
"grad_norm": 0.0006329029565677047, |
|
"learning_rate": 3.2354193273733505e-07, |
|
"loss": 0.0001, |
|
"step": 12670 |
|
}, |
|
{ |
|
"epoch": 48.011647509578545, |
|
"grad_norm": 0.008518343791365623, |
|
"learning_rate": 3.1502767134951047e-07, |
|
"loss": 0.0001, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 48.01241379310345, |
|
"grad_norm": 0.00873009767383337, |
|
"learning_rate": 3.0651340996168583e-07, |
|
"loss": 0.0143, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 48.01318007662835, |
|
"grad_norm": 0.0022083998192101717, |
|
"learning_rate": 2.9799914857386125e-07, |
|
"loss": 0.0001, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 48.013946360153255, |
|
"grad_norm": 0.0003249324799980968, |
|
"learning_rate": 2.894848871860366e-07, |
|
"loss": 0.0, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 48.01471264367816, |
|
"grad_norm": 0.0005248098750598729, |
|
"learning_rate": 2.8097062579821203e-07, |
|
"loss": 0.0, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 48.01547892720306, |
|
"grad_norm": 0.028333090245723724, |
|
"learning_rate": 2.7245636441038745e-07, |
|
"loss": 0.0002, |
|
"step": 12730 |
|
}, |
|
{ |
|
"epoch": 48.016245210727966, |
|
"grad_norm": 0.0004154023772571236, |
|
"learning_rate": 2.639421030225628e-07, |
|
"loss": 0.0001, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 48.01701149425288, |
|
"grad_norm": 0.0005421733949333429, |
|
"learning_rate": 2.5542784163473823e-07, |
|
"loss": 0.0001, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 48.01777777777778, |
|
"grad_norm": 0.000413823698181659, |
|
"learning_rate": 2.469135802469136e-07, |
|
"loss": 0.3994, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 48.018544061302684, |
|
"grad_norm": 0.0012550768442451954, |
|
"learning_rate": 2.3839931885908898e-07, |
|
"loss": 0.0001, |
|
"step": 12770 |
|
}, |
|
{ |
|
"epoch": 48.01931034482759, |
|
"grad_norm": 0.0007684645242989063, |
|
"learning_rate": 2.2988505747126437e-07, |
|
"loss": 0.003, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 48.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.9743099212646484, |
|
"eval_runtime": 26.0304, |
|
"eval_samples_per_second": 1.729, |
|
"eval_steps_per_second": 1.729, |
|
"step": 12789 |
|
}, |
|
{ |
|
"epoch": 49.00007662835249, |
|
"grad_norm": 0.03529129549860954, |
|
"learning_rate": 2.213707960834398e-07, |
|
"loss": 0.0001, |
|
"step": 12790 |
|
}, |
|
{ |
|
"epoch": 49.00084291187739, |
|
"grad_norm": 0.0020950573962181807, |
|
"learning_rate": 2.1285653469561518e-07, |
|
"loss": 0.0, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 49.001609195402295, |
|
"grad_norm": 0.04050696641206741, |
|
"learning_rate": 2.0434227330779057e-07, |
|
"loss": 0.0003, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 49.002375478927206, |
|
"grad_norm": 0.0015282719396054745, |
|
"learning_rate": 1.9582801191996594e-07, |
|
"loss": 0.0001, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 49.00314176245211, |
|
"grad_norm": 0.0007836720324121416, |
|
"learning_rate": 1.8731375053214133e-07, |
|
"loss": 0.0001, |
|
"step": 12830 |
|
}, |
|
{ |
|
"epoch": 49.00390804597701, |
|
"grad_norm": 0.00043607846600934863, |
|
"learning_rate": 1.7879948914431674e-07, |
|
"loss": 0.6463, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 49.00467432950192, |
|
"grad_norm": 0.0005458514206111431, |
|
"learning_rate": 1.7028522775649214e-07, |
|
"loss": 0.4763, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 49.00544061302682, |
|
"grad_norm": 0.000987923122011125, |
|
"learning_rate": 1.6177096636866753e-07, |
|
"loss": 0.0001, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 49.006206896551724, |
|
"grad_norm": 0.0005801734514534473, |
|
"learning_rate": 1.5325670498084292e-07, |
|
"loss": 0.0001, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 49.00697318007663, |
|
"grad_norm": 0.001657277811318636, |
|
"learning_rate": 1.447424435930183e-07, |
|
"loss": 0.9275, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 49.00773946360153, |
|
"grad_norm": 0.0008287379168905318, |
|
"learning_rate": 1.3622818220519372e-07, |
|
"loss": 0.0001, |
|
"step": 12890 |
|
}, |
|
{ |
|
"epoch": 49.008505747126435, |
|
"grad_norm": 0.0003342766431160271, |
|
"learning_rate": 1.2771392081736911e-07, |
|
"loss": 0.0002, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 49.00927203065134, |
|
"grad_norm": 0.0006663193926215172, |
|
"learning_rate": 1.1919965942954449e-07, |
|
"loss": 0.0001, |
|
"step": 12910 |
|
}, |
|
{ |
|
"epoch": 49.01003831417624, |
|
"grad_norm": 0.00036574172554537654, |
|
"learning_rate": 1.106853980417199e-07, |
|
"loss": 0.0004, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 49.01080459770115, |
|
"grad_norm": 304.3724060058594, |
|
"learning_rate": 1.0217113665389529e-07, |
|
"loss": 0.3712, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 49.011570881226056, |
|
"grad_norm": 0.0047303359024226665, |
|
"learning_rate": 9.365687526607066e-08, |
|
"loss": 0.0001, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 49.01233716475096, |
|
"grad_norm": 0.00043497199658304453, |
|
"learning_rate": 8.514261387824607e-08, |
|
"loss": 0.0001, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 49.013103448275864, |
|
"grad_norm": 0.003613162087276578, |
|
"learning_rate": 7.662835249042146e-08, |
|
"loss": 0.5375, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 49.01386973180077, |
|
"grad_norm": 0.0004304467875044793, |
|
"learning_rate": 6.811409110259686e-08, |
|
"loss": 0.0, |
|
"step": 12970 |
|
}, |
|
{ |
|
"epoch": 49.01463601532567, |
|
"grad_norm": 0.5795473456382751, |
|
"learning_rate": 5.9599829714772246e-08, |
|
"loss": 0.649, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 49.015402298850574, |
|
"grad_norm": 0.0009736455394886434, |
|
"learning_rate": 5.108556832694764e-08, |
|
"loss": 0.0001, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 49.01616858237548, |
|
"grad_norm": 0.0086960569024086, |
|
"learning_rate": 4.2571306939123034e-08, |
|
"loss": 0.0001, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 49.01693486590038, |
|
"grad_norm": 0.05347593501210213, |
|
"learning_rate": 3.405704555129843e-08, |
|
"loss": 0.0001, |
|
"step": 13010 |
|
}, |
|
{ |
|
"epoch": 49.017701149425285, |
|
"grad_norm": 411.26702880859375, |
|
"learning_rate": 2.554278416347382e-08, |
|
"loss": 0.4025, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 49.01846743295019, |
|
"grad_norm": 0.000454594410257414, |
|
"learning_rate": 1.7028522775649215e-08, |
|
"loss": 0.0001, |
|
"step": 13030 |
|
}, |
|
{ |
|
"epoch": 49.01923371647509, |
|
"grad_norm": 0.0010197683004662395, |
|
"learning_rate": 8.514261387824608e-09, |
|
"loss": 0.0196, |
|
"step": 13040 |
|
}, |
|
{ |
|
"epoch": 49.02, |
|
"grad_norm": 0.0016529200365766883, |
|
"learning_rate": 0.0, |
|
"loss": 0.0503, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 49.02, |
|
"eval_accuracy": 0.6444444444444445, |
|
"eval_loss": 2.9219369888305664, |
|
"eval_runtime": 29.0362, |
|
"eval_samples_per_second": 1.55, |
|
"eval_steps_per_second": 1.55, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 49.02, |
|
"step": 13050, |
|
"total_flos": 5.730289341462282e+19, |
|
"train_loss": 0.57228142680396, |
|
"train_runtime": 20584.6844, |
|
"train_samples_per_second": 0.634, |
|
"train_steps_per_second": 0.634 |
|
}, |
|
{ |
|
"epoch": 49.02, |
|
"eval_accuracy": 0.7111111111111111, |
|
"eval_loss": 1.6580544710159302, |
|
"eval_runtime": 26.5052, |
|
"eval_samples_per_second": 1.698, |
|
"eval_steps_per_second": 1.698, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 49.02, |
|
"eval_accuracy": 0.7111111111111111, |
|
"eval_loss": 1.658054232597351, |
|
"eval_runtime": 26.6025, |
|
"eval_samples_per_second": 1.692, |
|
"eval_steps_per_second": 1.692, |
|
"step": 13050 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 13050, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.730289341462282e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|