|
{ |
|
"best_metric": 1.3443834781646729, |
|
"best_model_checkpoint": "./qlora-out/checkpoint-1488", |
|
"epoch": 0.7503782148260212, |
|
"eval_steps": 248, |
|
"global_step": 1488, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.3547, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 1.5788122415542603, |
|
"eval_runtime": 99.5564, |
|
"eval_samples_per_second": 1.165, |
|
"eval_steps_per_second": 1.165, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.5678, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6e-06, |
|
"loss": 1.8406, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.2337, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6725, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.4016, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.3171, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.4367, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.4082, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5169, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999996846759028e-05, |
|
"loss": 1.6735, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.99999873870381e-05, |
|
"loss": 1.3341, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999971620843182e-05, |
|
"loss": 1.7104, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999949548184215e-05, |
|
"loss": 1.2838, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999921169075117e-05, |
|
"loss": 1.4659, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999886483533792e-05, |
|
"loss": 1.504, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.999984549158211e-05, |
|
"loss": 1.7891, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999798193245924e-05, |
|
"loss": 1.7854, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999744588555065e-05, |
|
"loss": 1.7957, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999684677543332e-05, |
|
"loss": 1.5033, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999618460248515e-05, |
|
"loss": 1.5491, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999545936712364e-05, |
|
"loss": 1.5846, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999467106980627e-05, |
|
"loss": 1.6081, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999381971103015e-05, |
|
"loss": 1.0283, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999290529133215e-05, |
|
"loss": 2.0389, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999192781128893e-05, |
|
"loss": 1.3088, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.99990887271517e-05, |
|
"loss": 1.6174, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9998978367267258e-05, |
|
"loss": 1.4197, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9998861701545155e-05, |
|
"loss": 1.2337, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9998738730058974e-05, |
|
"loss": 1.3482, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999860945288627e-05, |
|
"loss": 1.6648, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9998473870108565e-05, |
|
"loss": 1.4751, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999833198181137e-05, |
|
"loss": 2.0115, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9998183788084155e-05, |
|
"loss": 1.1591, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9998029289020388e-05, |
|
"loss": 1.4557, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9997868484717504e-05, |
|
"loss": 1.8469, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999770137527691e-05, |
|
"loss": 1.7395, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9997527960803994e-05, |
|
"loss": 1.1644, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999734824140812e-05, |
|
"loss": 1.3629, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999716221720263e-05, |
|
"loss": 1.1913, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9996969888304835e-05, |
|
"loss": 1.9801, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999677125483603e-05, |
|
"loss": 1.1722, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9996566316921485e-05, |
|
"loss": 1.4999, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9996355074690438e-05, |
|
"loss": 1.5593, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999613752827611e-05, |
|
"loss": 1.7799, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9995913677815705e-05, |
|
"loss": 1.3714, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9995683523450382e-05, |
|
"loss": 1.428, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9995447065325292e-05, |
|
"loss": 1.4206, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9995204303589557e-05, |
|
"loss": 1.6583, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9994955238396276e-05, |
|
"loss": 1.4349, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9994699869902516e-05, |
|
"loss": 1.1203, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999443819826933e-05, |
|
"loss": 1.2595, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999417022366174e-05, |
|
"loss": 1.7085, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9993895946248744e-05, |
|
"loss": 1.4112, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9993615366203313e-05, |
|
"loss": 1.1461, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9993328483702393e-05, |
|
"loss": 1.3644, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999303529892691e-05, |
|
"loss": 1.2273, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9992735812061757e-05, |
|
"loss": 1.2104, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999243002329581e-05, |
|
"loss": 1.6421, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9992117932821906e-05, |
|
"loss": 1.3875, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9991799540836867e-05, |
|
"loss": 1.4965, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999147484754149e-05, |
|
"loss": 1.304, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9991143853140543e-05, |
|
"loss": 1.5476, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9990806557842758e-05, |
|
"loss": 1.609, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999046296186086e-05, |
|
"loss": 1.5958, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9990113065411532e-05, |
|
"loss": 1.6518, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9989756868715435e-05, |
|
"loss": 1.6508, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9989394371997205e-05, |
|
"loss": 1.6813, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9989025575485453e-05, |
|
"loss": 1.5673, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9988650479412754e-05, |
|
"loss": 1.2511, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9988269084015668e-05, |
|
"loss": 1.6433, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9987881389534715e-05, |
|
"loss": 1.7642, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.99874873962144e-05, |
|
"loss": 1.2926, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9987087104303188e-05, |
|
"loss": 1.1941, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9986680514053526e-05, |
|
"loss": 1.6356, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998626762572183e-05, |
|
"loss": 1.6645, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9985848439568486e-05, |
|
"loss": 1.0501, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998542295585785e-05, |
|
"loss": 1.1477, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998499117485826e-05, |
|
"loss": 1.1104, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998455309684201e-05, |
|
"loss": 1.2144, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9984108722085378e-05, |
|
"loss": 1.6791, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998365805086861e-05, |
|
"loss": 1.2276, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998320108347591e-05, |
|
"loss": 1.6222, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998273782019548e-05, |
|
"loss": 1.6216, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9982268261319462e-05, |
|
"loss": 1.332, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9981792407143988e-05, |
|
"loss": 1.3808, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9981310257969158e-05, |
|
"loss": 1.4833, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9980821814099033e-05, |
|
"loss": 1.5134, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998032707584165e-05, |
|
"loss": 1.6795, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997982604350902e-05, |
|
"loss": 1.1978, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9979318717417112e-05, |
|
"loss": 1.6426, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997880509788587e-05, |
|
"loss": 1.5072, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9978285185239215e-05, |
|
"loss": 1.5113, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997775897980502e-05, |
|
"loss": 1.6025, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997722648191514e-05, |
|
"loss": 1.5437, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9976687691905394e-05, |
|
"loss": 0.8807, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9976142610115567e-05, |
|
"loss": 1.2017, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9975591236889414e-05, |
|
"loss": 1.571, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997503357257466e-05, |
|
"loss": 1.2984, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9974469617522992e-05, |
|
"loss": 1.6714, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997389937209007e-05, |
|
"loss": 1.8847, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9973322836635517e-05, |
|
"loss": 1.2481, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9972740011522927e-05, |
|
"loss": 1.3737, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997215089711985e-05, |
|
"loss": 1.1397, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9971555493797817e-05, |
|
"loss": 0.7657, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9970953801932313e-05, |
|
"loss": 1.2579, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9970345821902795e-05, |
|
"loss": 1.588, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.996973155409269e-05, |
|
"loss": 1.0317, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.996911099888938e-05, |
|
"loss": 1.6896, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9968484156684215e-05, |
|
"loss": 1.7309, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996785102787252e-05, |
|
"loss": 1.0569, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9967211612853566e-05, |
|
"loss": 1.8228, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9966565912030607e-05, |
|
"loss": 1.4042, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9965913925810847e-05, |
|
"loss": 1.2318, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9965255654605466e-05, |
|
"loss": 1.6043, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99645910988296e-05, |
|
"loss": 1.7256, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9963920258902344e-05, |
|
"loss": 1.477, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996324313524677e-05, |
|
"loss": 1.4139, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99625597282899e-05, |
|
"loss": 1.4781, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9961870038462727e-05, |
|
"loss": 1.1951, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99611740662002e-05, |
|
"loss": 1.2997, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996047181194123e-05, |
|
"loss": 1.2891, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99597632761287e-05, |
|
"loss": 2.083, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995904845920944e-05, |
|
"loss": 1.3798, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9958327361634248e-05, |
|
"loss": 1.3713, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995759998385789e-05, |
|
"loss": 1.2818, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9956866326339076e-05, |
|
"loss": 2.1837, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9956126389540493e-05, |
|
"loss": 1.3562, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9955380173928777e-05, |
|
"loss": 1.4653, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.995462767997453e-05, |
|
"loss": 1.5638, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.995386890815231e-05, |
|
"loss": 1.3003, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9953103858940633e-05, |
|
"loss": 1.676, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.995233253282198e-05, |
|
"loss": 1.5618, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9951554930282782e-05, |
|
"loss": 0.9876, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9950771051813435e-05, |
|
"loss": 1.3107, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994998089790829e-05, |
|
"loss": 1.3858, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994918446906566e-05, |
|
"loss": 1.7285, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9948381765787802e-05, |
|
"loss": 1.5864, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994757278858095e-05, |
|
"loss": 1.6605, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994675753795528e-05, |
|
"loss": 1.3543, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9945936014424924e-05, |
|
"loss": 1.481, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9945108218507976e-05, |
|
"loss": 1.5271, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994427415072649e-05, |
|
"loss": 1.4439, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9943433811606465e-05, |
|
"loss": 1.5647, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994258720167786e-05, |
|
"loss": 1.2134, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9941734321474586e-05, |
|
"loss": 1.3431, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994087517153451e-05, |
|
"loss": 1.6574, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9940009752399462e-05, |
|
"loss": 1.3856, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9939138064615205e-05, |
|
"loss": 1.2281, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9938260108731474e-05, |
|
"loss": 1.7524, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9937375885301948e-05, |
|
"loss": 1.4849, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9936485394884263e-05, |
|
"loss": 1.6183, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9935588638040005e-05, |
|
"loss": 1.854, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.993468561533471e-05, |
|
"loss": 1.6805, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.993377632733787e-05, |
|
"loss": 1.3257, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.993286077462292e-05, |
|
"loss": 1.5286, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.993193895776726e-05, |
|
"loss": 1.7098, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.993101087735223e-05, |
|
"loss": 2.0118, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9930076533963117e-05, |
|
"loss": 1.0722, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.992913592818917e-05, |
|
"loss": 1.1482, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9928189060623574e-05, |
|
"loss": 1.5841, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9927235931863477e-05, |
|
"loss": 1.3712, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.992627654250996e-05, |
|
"loss": 1.563, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.992531089316806e-05, |
|
"loss": 1.1601, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9924338984446773e-05, |
|
"loss": 1.5141, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9923360816959016e-05, |
|
"loss": 1.6872, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.992237639132168e-05, |
|
"loss": 1.6658, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9921385708155588e-05, |
|
"loss": 1.5354, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9920388768085513e-05, |
|
"loss": 1.457, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9919385571740172e-05, |
|
"loss": 1.5823, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.991837611975223e-05, |
|
"loss": 1.5392, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9917360412758295e-05, |
|
"loss": 1.3985, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9916338451398923e-05, |
|
"loss": 1.1896, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9915310236318607e-05, |
|
"loss": 1.2652, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9914275768165793e-05, |
|
"loss": 1.0745, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.991323504759287e-05, |
|
"loss": 1.8471, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.991218807525616e-05, |
|
"loss": 1.871, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9911134851815935e-05, |
|
"loss": 1.3217, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9910075377936414e-05, |
|
"loss": 1.4894, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9909009654285748e-05, |
|
"loss": 1.1755, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9907937681536032e-05, |
|
"loss": 1.6328, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9906859460363307e-05, |
|
"loss": 1.4971, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9905774991447552e-05, |
|
"loss": 1.4121, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9904684275472684e-05, |
|
"loss": 1.5965, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9903587313126557e-05, |
|
"loss": 1.21, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9902484105100974e-05, |
|
"loss": 1.8833, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9901374652091666e-05, |
|
"loss": 1.0323, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9900258954798315e-05, |
|
"loss": 1.5577, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.989913701392453e-05, |
|
"loss": 1.8889, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9898008830177856e-05, |
|
"loss": 1.6154, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9896874404269786e-05, |
|
"loss": 1.5606, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.989573373691574e-05, |
|
"loss": 1.2633, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.989458682883508e-05, |
|
"loss": 1.2808, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9893433680751105e-05, |
|
"loss": 1.5281, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9892274293391035e-05, |
|
"loss": 1.1487, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9891108667486047e-05, |
|
"loss": 1.1523, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9889936803771237e-05, |
|
"loss": 1.3093, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9888758702985637e-05, |
|
"loss": 1.249, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9887574365872214e-05, |
|
"loss": 1.1357, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.988638379317787e-05, |
|
"loss": 1.5416, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.988518698565344e-05, |
|
"loss": 1.63, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9883983944053678e-05, |
|
"loss": 1.194, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9882774669137293e-05, |
|
"loss": 1.281, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9881559161666905e-05, |
|
"loss": 1.1447, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.988033742240907e-05, |
|
"loss": 1.7913, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9879109452134283e-05, |
|
"loss": 1.5167, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9877875251616954e-05, |
|
"loss": 1.2428, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9876634821635432e-05, |
|
"loss": 1.3816, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9875388162971992e-05, |
|
"loss": 1.4307, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9874135276412837e-05, |
|
"loss": 1.3444, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.98728761627481e-05, |
|
"loss": 1.3973, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9871610822771835e-05, |
|
"loss": 1.4477, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9870339257282028e-05, |
|
"loss": 1.0952, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9869061467080587e-05, |
|
"loss": 1.582, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9867777452973352e-05, |
|
"loss": 1.4796, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9866487215770084e-05, |
|
"loss": 0.996, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9865190756284467e-05, |
|
"loss": 1.2355, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9863888075334113e-05, |
|
"loss": 1.3907, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.986257917374055e-05, |
|
"loss": 1.5925, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.986126405232924e-05, |
|
"loss": 1.3692, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9859942711929557e-05, |
|
"loss": 1.2802, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9858615153374808e-05, |
|
"loss": 1.3644, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.985728137750221e-05, |
|
"loss": 1.1744, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.985594138515291e-05, |
|
"loss": 1.4946, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9854595177171968e-05, |
|
"loss": 1.6203, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9853242754408376e-05, |
|
"loss": 1.5148, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9851884117715027e-05, |
|
"loss": 1.7796, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9850519267948747e-05, |
|
"loss": 1.0399, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9849148205970275e-05, |
|
"loss": 1.1837, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.984777093264427e-05, |
|
"loss": 1.3893, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9846387448839308e-05, |
|
"loss": 1.2283, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9844997755427875e-05, |
|
"loss": 1.0594, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.984360185328639e-05, |
|
"loss": 1.5023, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9842199743295164e-05, |
|
"loss": 1.619, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.984079142633844e-05, |
|
"loss": 0.9526, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983937690330437e-05, |
|
"loss": 1.3427, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983795617508502e-05, |
|
"loss": 1.1187, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9836529242576373e-05, |
|
"loss": 1.2346, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983509610667832e-05, |
|
"loss": 1.8011, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983365676829466e-05, |
|
"loss": 1.1902, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983221122833312e-05, |
|
"loss": 1.3564, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983075948770532e-05, |
|
"loss": 1.2038, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9829301547326794e-05, |
|
"loss": 1.3158, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9827837408116996e-05, |
|
"loss": 1.6507, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9826367070999284e-05, |
|
"loss": 1.8778, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9824890536900917e-05, |
|
"loss": 1.3691, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.982340780675307e-05, |
|
"loss": 1.3956, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.982191888149083e-05, |
|
"loss": 1.275, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"eval_loss": 1.4743634462356567, |
|
"eval_runtime": 99.6539, |
|
"eval_samples_per_second": 1.164, |
|
"eval_steps_per_second": 1.164, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9820423762053178e-05, |
|
"loss": 1.3582, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.981892244938301e-05, |
|
"loss": 1.3869, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9817414944427133e-05, |
|
"loss": 1.2653, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9815901248136242e-05, |
|
"loss": 1.3705, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9814381361464953e-05, |
|
"loss": 1.5251, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9812855285371778e-05, |
|
"loss": 1.5613, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9811323020819136e-05, |
|
"loss": 1.3138, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.980978456877334e-05, |
|
"loss": 1.0931, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9808239930204625e-05, |
|
"loss": 1.4202, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.98066891060871e-05, |
|
"loss": 1.2566, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.98051320973988e-05, |
|
"loss": 1.1673, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9803568905121647e-05, |
|
"loss": 1.2889, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.980199953024146e-05, |
|
"loss": 1.3371, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9800423973747972e-05, |
|
"loss": 1.4299, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9798842236634797e-05, |
|
"loss": 1.4825, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9797254319899453e-05, |
|
"loss": 1.2745, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.979566022454337e-05, |
|
"loss": 1.4379, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9794059951571848e-05, |
|
"loss": 1.386, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.97924535019941e-05, |
|
"loss": 1.2546, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.979084087682323e-05, |
|
"loss": 1.2028, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.978922207707624e-05, |
|
"loss": 1.1725, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.978759710377402e-05, |
|
"loss": 1.6772, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9785965957941362e-05, |
|
"loss": 1.555, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.978432864060694e-05, |
|
"loss": 1.6195, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9782685152803326e-05, |
|
"loss": 1.4591, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.978103549556698e-05, |
|
"loss": 1.2727, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9779379669938265e-05, |
|
"loss": 1.3214, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9777717676961412e-05, |
|
"loss": 1.6267, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.977604951768456e-05, |
|
"loss": 1.1487, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.977437519315973e-05, |
|
"loss": 1.8019, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9772694704442836e-05, |
|
"loss": 1.3772, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.977100805259367e-05, |
|
"loss": 1.1348, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9769315238675916e-05, |
|
"loss": 1.209, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9767616263757146e-05, |
|
"loss": 1.4235, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9765911128908813e-05, |
|
"loss": 1.2202, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.976419983520626e-05, |
|
"loss": 1.2418, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.976248238372871e-05, |
|
"loss": 1.3901, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9760758775559275e-05, |
|
"loss": 1.1207, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9759029011784936e-05, |
|
"loss": 1.0332, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9757293093496573e-05, |
|
"loss": 1.4162, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9755551021788934e-05, |
|
"loss": 1.5068, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.975380279776066e-05, |
|
"loss": 1.5863, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9752048422514262e-05, |
|
"loss": 1.4527, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9750287897156136e-05, |
|
"loss": 1.2066, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.974852122279655e-05, |
|
"loss": 1.2543, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9746748400549653e-05, |
|
"loss": 1.455, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9744969431533474e-05, |
|
"loss": 1.4658, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9743184316869924e-05, |
|
"loss": 1.2493, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.974139305768477e-05, |
|
"loss": 1.4861, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9739595655107675e-05, |
|
"loss": 1.6473, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9737792110272167e-05, |
|
"loss": 1.4982, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9735982424315642e-05, |
|
"loss": 1.4031, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.973416659837938e-05, |
|
"loss": 1.1998, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.973234463360853e-05, |
|
"loss": 1.0635, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.973051653115211e-05, |
|
"loss": 1.29, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9728682292163002e-05, |
|
"loss": 1.2678, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9726841917797977e-05, |
|
"loss": 1.3882, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9724995409217658e-05, |
|
"loss": 1.0476, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.972314276758654e-05, |
|
"loss": 1.2119, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9721283994072995e-05, |
|
"loss": 1.7283, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.971941908984925e-05, |
|
"loss": 1.6008, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.97175480560914e-05, |
|
"loss": 1.3317, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9715670893979416e-05, |
|
"loss": 1.2612, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9713787604697125e-05, |
|
"loss": 1.4356, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9711898189432218e-05, |
|
"loss": 1.3431, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9710002649376255e-05, |
|
"loss": 0.8521, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9708100985724654e-05, |
|
"loss": 1.9041, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.970619319967669e-05, |
|
"loss": 1.6982, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.970427929243551e-05, |
|
"loss": 1.6813, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9702359265208114e-05, |
|
"loss": 1.5686, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9700433119205368e-05, |
|
"loss": 1.7669, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9698500855641988e-05, |
|
"loss": 0.9086, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9696562475736556e-05, |
|
"loss": 1.4198, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9694617980711503e-05, |
|
"loss": 1.8693, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9692667371793127e-05, |
|
"loss": 0.7734, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9690710650211572e-05, |
|
"loss": 1.1205, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.968874781720084e-05, |
|
"loss": 0.959, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9686778873998792e-05, |
|
"loss": 1.9894, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9684803821847137e-05, |
|
"loss": 1.6784, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9682822661991435e-05, |
|
"loss": 1.4841, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.968083539568111e-05, |
|
"loss": 1.7689, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9678842024169418e-05, |
|
"loss": 1.3004, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9676842548713475e-05, |
|
"loss": 1.5111, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9674836970574253e-05, |
|
"loss": 1.0877, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9672825291016564e-05, |
|
"loss": 1.3979, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.967080751130907e-05, |
|
"loss": 1.5652, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9668783632724278e-05, |
|
"loss": 1.2373, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9666753656538545e-05, |
|
"loss": 1.5039, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9664717584032075e-05, |
|
"loss": 1.3572, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9662675416488908e-05, |
|
"loss": 1.3156, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9660627155196934e-05, |
|
"loss": 1.4775, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.965857280144789e-05, |
|
"loss": 1.5048, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9656512356537343e-05, |
|
"loss": 1.0645, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9654445821764717e-05, |
|
"loss": 1.4329, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9652373198433265e-05, |
|
"loss": 1.3218, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.965029448785008e-05, |
|
"loss": 1.6661, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9648209691326103e-05, |
|
"loss": 1.183, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.96461188101761e-05, |
|
"loss": 1.2924, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.964402184571869e-05, |
|
"loss": 1.0152, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9641918799276313e-05, |
|
"loss": 1.3248, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9639809672175253e-05, |
|
"loss": 1.2242, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.963769446574563e-05, |
|
"loss": 1.2416, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9635573181321394e-05, |
|
"loss": 1.4711, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9633445820240323e-05, |
|
"loss": 1.454, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.963131238384404e-05, |
|
"loss": 1.355, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9629172873477995e-05, |
|
"loss": 1.3991, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.962702729049146e-05, |
|
"loss": 1.3905, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9624875636237547e-05, |
|
"loss": 1.5024, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9622717912073193e-05, |
|
"loss": 1.1932, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.962055411935916e-05, |
|
"loss": 1.4808, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.961838425946004e-05, |
|
"loss": 1.5079, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9616208333744255e-05, |
|
"loss": 1.1668, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9614026343584048e-05, |
|
"loss": 0.9482, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9611838290355483e-05, |
|
"loss": 1.5103, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9609644175438457e-05, |
|
"loss": 1.4825, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9607444000216676e-05, |
|
"loss": 1.0542, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9605237766077686e-05, |
|
"loss": 1.3711, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9603025474412844e-05, |
|
"loss": 1.8036, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.960080712661732e-05, |
|
"loss": 1.2591, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.959858272409012e-05, |
|
"loss": 1.5749, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9596352268234053e-05, |
|
"loss": 1.4592, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9594115760455755e-05, |
|
"loss": 0.8912, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9591873202165678e-05, |
|
"loss": 0.9196, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9589624594778077e-05, |
|
"loss": 1.4079, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9587369939711044e-05, |
|
"loss": 1.3132, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.958510923838647e-05, |
|
"loss": 0.6786, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.958284249223006e-05, |
|
"loss": 1.4426, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9580569702671332e-05, |
|
"loss": 1.1317, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.957829087114362e-05, |
|
"loss": 1.4542, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.957600599908406e-05, |
|
"loss": 0.807, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.957371508793361e-05, |
|
"loss": 1.5726, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9571418139137023e-05, |
|
"loss": 1.4531, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9569115154142873e-05, |
|
"loss": 1.4213, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9566806134403526e-05, |
|
"loss": 1.3222, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9564491081375157e-05, |
|
"loss": 0.9039, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.956216999651776e-05, |
|
"loss": 1.507, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9559842881295122e-05, |
|
"loss": 1.4292, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.955750973717483e-05, |
|
"loss": 1.2868, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.955517056562828e-05, |
|
"loss": 1.5842, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.955282536813066e-05, |
|
"loss": 1.615, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.955047414616097e-05, |
|
"loss": 1.3707, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9548116901202006e-05, |
|
"loss": 1.4077, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9545753634740358e-05, |
|
"loss": 1.4326, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9543384348266415e-05, |
|
"loss": 1.3821, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.954100904327436e-05, |
|
"loss": 1.4803, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.953862772126218e-05, |
|
"loss": 1.5412, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.953624038373165e-05, |
|
"loss": 1.7189, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9533847032188337e-05, |
|
"loss": 1.1109, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.953144766814161e-05, |
|
"loss": 1.237, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.952904229310462e-05, |
|
"loss": 1.2492, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.952663090859431e-05, |
|
"loss": 1.3605, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.952421351613142e-05, |
|
"loss": 1.4423, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9521790117240472e-05, |
|
"loss": 1.3342, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9519360713449775e-05, |
|
"loss": 1.7274, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9516925306291435e-05, |
|
"loss": 1.6019, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.951448389730133e-05, |
|
"loss": 1.1267, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9512036488019138e-05, |
|
"loss": 1.1338, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9509583079988307e-05, |
|
"loss": 1.6472, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9507123674756076e-05, |
|
"loss": 1.2389, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9504658273873465e-05, |
|
"loss": 1.5887, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9502186878895273e-05, |
|
"loss": 1.0987, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9499709491380083e-05, |
|
"loss": 1.384, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9497226112890252e-05, |
|
"loss": 1.7325, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9494736744991925e-05, |
|
"loss": 1.4435, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9492241389255006e-05, |
|
"loss": 1.9634, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9489740047253197e-05, |
|
"loss": 1.5567, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9487232720563962e-05, |
|
"loss": 1.3805, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.948471941076854e-05, |
|
"loss": 1.325, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9482200119451945e-05, |
|
"loss": 1.3569, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.947967484820297e-05, |
|
"loss": 1.1947, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.947714359861416e-05, |
|
"loss": 1.231, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9474606372281854e-05, |
|
"loss": 1.1062, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9472063170806144e-05, |
|
"loss": 1.8761, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.94695139957909e-05, |
|
"loss": 1.3697, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9466958848843748e-05, |
|
"loss": 1.0142, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9464397731576093e-05, |
|
"loss": 1.0189, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.94618306456031e-05, |
|
"loss": 1.7268, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9459257592543688e-05, |
|
"loss": 1.0269, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9456678574020557e-05, |
|
"loss": 1.2833, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9454093591660155e-05, |
|
"loss": 1.1967, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.94515026470927e-05, |
|
"loss": 1.5793, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9448905741952167e-05, |
|
"loss": 1.4184, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.944630287787629e-05, |
|
"loss": 1.2217, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9443694056506556e-05, |
|
"loss": 1.222, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9441079279488213e-05, |
|
"loss": 1.2332, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9438458548470268e-05, |
|
"loss": 1.7458, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9435831865105482e-05, |
|
"loss": 1.3248, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9433199231050367e-05, |
|
"loss": 1.9621, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9430560647965192e-05, |
|
"loss": 0.8943, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.942791611751397e-05, |
|
"loss": 0.6546, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9425265641364467e-05, |
|
"loss": 1.4626, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9422609221188208e-05, |
|
"loss": 1.3289, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9419946858660452e-05, |
|
"loss": 0.8965, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9417278555460223e-05, |
|
"loss": 1.225, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.941460431327027e-05, |
|
"loss": 1.713, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.941192413377711e-05, |
|
"loss": 1.0535, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9409238018670986e-05, |
|
"loss": 1.4671, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9406545969645894e-05, |
|
"loss": 1.5399, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.940384798839957e-05, |
|
"loss": 1.3484, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.940114407663349e-05, |
|
"loss": 1.6319, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9398434236052873e-05, |
|
"loss": 1.7901, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9395718468366672e-05, |
|
"loss": 1.4023, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9392996775287588e-05, |
|
"loss": 1.1306, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9390269158532043e-05, |
|
"loss": 1.648, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9387535619820207e-05, |
|
"loss": 1.5907, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9384796160875982e-05, |
|
"loss": 1.2352, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9382050783427e-05, |
|
"loss": 1.6986, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9379299489204634e-05, |
|
"loss": 1.4436, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.937654227994398e-05, |
|
"loss": 1.8155, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.937377915738386e-05, |
|
"loss": 1.4464, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.937101012326685e-05, |
|
"loss": 1.5623, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9368235179339217e-05, |
|
"loss": 1.6515, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9365454327350984e-05, |
|
"loss": 1.3808, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.936266756905589e-05, |
|
"loss": 1.708, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.93598749062114e-05, |
|
"loss": 1.3696, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9357076340578696e-05, |
|
"loss": 1.4497, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9354271873922692e-05, |
|
"loss": 1.2003, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.935146150801202e-05, |
|
"loss": 1.182, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9348645244619035e-05, |
|
"loss": 1.9314, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9345823085519804e-05, |
|
"loss": 1.5496, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9342995032494116e-05, |
|
"loss": 1.0179, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9340161087325483e-05, |
|
"loss": 1.3892, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9337321251801123e-05, |
|
"loss": 1.0485, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9334475527711973e-05, |
|
"loss": 1.786, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9331623916852683e-05, |
|
"loss": 1.3026, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.932876642102162e-05, |
|
"loss": 1.2246, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9325903042020856e-05, |
|
"loss": 1.7167, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9323033781656178e-05, |
|
"loss": 1.5324, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9320158641737077e-05, |
|
"loss": 1.51, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9317277624076758e-05, |
|
"loss": 1.3634, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.931439073049213e-05, |
|
"loss": 1.2313, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.93114979628038e-05, |
|
"loss": 1.624, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9308599322836092e-05, |
|
"loss": 1.152, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.930569481241703e-05, |
|
"loss": 1.2635, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9302784433378333e-05, |
|
"loss": 1.921, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.929986818755543e-05, |
|
"loss": 1.2884, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9296946076787447e-05, |
|
"loss": 1.2914, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9294018102917208e-05, |
|
"loss": 1.2168, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.929108426779123e-05, |
|
"loss": 1.7837, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9288144573259735e-05, |
|
"loss": 1.3335, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9285199021176634e-05, |
|
"loss": 1.2004, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9282247613399537e-05, |
|
"loss": 1.3216, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9279290351789737e-05, |
|
"loss": 1.7597, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9276327238212232e-05, |
|
"loss": 1.3228, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9273358274535703e-05, |
|
"loss": 1.6335, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9270383462632524e-05, |
|
"loss": 1.4952, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.926740280437875e-05, |
|
"loss": 1.8452, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.926441630165413e-05, |
|
"loss": 1.1917, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 1.436846375465393, |
|
"eval_runtime": 99.6318, |
|
"eval_samples_per_second": 1.164, |
|
"eval_steps_per_second": 1.164, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9261423956342096e-05, |
|
"loss": 1.3163, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9258425770329764e-05, |
|
"loss": 1.0938, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9255421745507937e-05, |
|
"loss": 1.0377, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.92524118837711e-05, |
|
"loss": 1.2684, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.924939618701741e-05, |
|
"loss": 1.3812, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9246374657148714e-05, |
|
"loss": 1.4278, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9243347296070535e-05, |
|
"loss": 1.503, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9240314105692074e-05, |
|
"loss": 1.4397, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9237275087926206e-05, |
|
"loss": 1.3929, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9234230244689484e-05, |
|
"loss": 1.4961, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9231179577902125e-05, |
|
"loss": 0.7874, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9228123089488034e-05, |
|
"loss": 1.4337, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9225060781374782e-05, |
|
"loss": 1.2741, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.92219926554936e-05, |
|
"loss": 1.049, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9218918713779404e-05, |
|
"loss": 1.5724, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9215838958170762e-05, |
|
"loss": 1.6128, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.921275339060992e-05, |
|
"loss": 1.2712, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9209662013042786e-05, |
|
"loss": 1.47, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9206564827418935e-05, |
|
"loss": 1.174, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9203461835691596e-05, |
|
"loss": 1.5194, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.920035303981767e-05, |
|
"loss": 1.3734, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9197238441757705e-05, |
|
"loss": 1.559, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9194118043475923e-05, |
|
"loss": 1.7758, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9190991846940202e-05, |
|
"loss": 1.4306, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9187859854122065e-05, |
|
"loss": 0.7942, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9184722066996697e-05, |
|
"loss": 1.4129, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.918157848754294e-05, |
|
"loss": 1.4661, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9178429117743294e-05, |
|
"loss": 1.7917, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9175273959583892e-05, |
|
"loss": 1.7892, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.917211301505453e-05, |
|
"loss": 1.204, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9168946286148663e-05, |
|
"loss": 1.017, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9165773774863367e-05, |
|
"loss": 0.9312, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9162595483199396e-05, |
|
"loss": 1.2145, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.915941141316112e-05, |
|
"loss": 1.6209, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9156221566756578e-05, |
|
"loss": 1.6366, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9153025945997437e-05, |
|
"loss": 1.6848, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9149824552899008e-05, |
|
"loss": 1.2836, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.914661738948024e-05, |
|
"loss": 1.2836, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9143404457763738e-05, |
|
"loss": 1.598, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.914018575977572e-05, |
|
"loss": 1.2498, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9136961297546052e-05, |
|
"loss": 1.4344, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9133731073108238e-05, |
|
"loss": 1.2924, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9130495088499417e-05, |
|
"loss": 1.0017, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9127253345760354e-05, |
|
"loss": 1.048, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9124005846935446e-05, |
|
"loss": 1.1805, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9120752594072724e-05, |
|
"loss": 1.1611, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.911749358922385e-05, |
|
"loss": 1.2517, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.91142288344441e-05, |
|
"loss": 1.4351, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9110958331792396e-05, |
|
"loss": 1.2312, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9107682083331265e-05, |
|
"loss": 1.3427, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9104400091126877e-05, |
|
"loss": 1.9784, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.910111235724901e-05, |
|
"loss": 1.0882, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.909781888377106e-05, |
|
"loss": 1.2761, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.909451967277006e-05, |
|
"loss": 1.1144, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9091214726326652e-05, |
|
"loss": 1.5043, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.908790404652509e-05, |
|
"loss": 1.6154, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9084587635453246e-05, |
|
"loss": 1.3277, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9081265495202618e-05, |
|
"loss": 1.6941, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9077937627868295e-05, |
|
"loss": 1.0345, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9074604035549e-05, |
|
"loss": 1.4388, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9071264720347057e-05, |
|
"loss": 0.8748, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.906791968436839e-05, |
|
"loss": 1.4478, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9064568929722553e-05, |
|
"loss": 1.1109, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.906121245852268e-05, |
|
"loss": 1.4213, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9057850272885538e-05, |
|
"loss": 1.3594, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.905448237493147e-05, |
|
"loss": 1.2485, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.905110876678444e-05, |
|
"loss": 1.5978, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9047729450572007e-05, |
|
"loss": 1.517, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9044344428425335e-05, |
|
"loss": 1.2011, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9040953702479176e-05, |
|
"loss": 1.098, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9037557274871893e-05, |
|
"loss": 1.1603, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9034155147745424e-05, |
|
"loss": 1.5996, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.903074732324533e-05, |
|
"loss": 1.0791, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9027333803520736e-05, |
|
"loss": 1.4224, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9023914590724383e-05, |
|
"loss": 1.3715, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9020489687012582e-05, |
|
"loss": 0.907, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9017059094545244e-05, |
|
"loss": 1.4973, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9013622815485877e-05, |
|
"loss": 1.7539, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9010180852001546e-05, |
|
"loss": 1.5102, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9006733206262933e-05, |
|
"loss": 1.0709, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9003279880444285e-05, |
|
"loss": 1.5776, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.899982087672344e-05, |
|
"loss": 1.5828, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8996356197281803e-05, |
|
"loss": 0.9347, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8992885844304374e-05, |
|
"loss": 1.2101, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8989409819979724e-05, |
|
"loss": 1.3425, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.89859281265e-05, |
|
"loss": 1.2859, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.898244076606093e-05, |
|
"loss": 1.0246, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.897894774086181e-05, |
|
"loss": 1.2258, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8975449053105505e-05, |
|
"loss": 1.8727, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8971944704998462e-05, |
|
"loss": 1.8236, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8968434698750688e-05, |
|
"loss": 1.353, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8964919036575764e-05, |
|
"loss": 1.6112, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8961397720690837e-05, |
|
"loss": 1.0724, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8957870753316617e-05, |
|
"loss": 0.7038, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.895433813667738e-05, |
|
"loss": 1.3323, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8950799873000964e-05, |
|
"loss": 1.5053, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8947255964518766e-05, |
|
"loss": 1.8766, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.894370641346575e-05, |
|
"loss": 1.5618, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8940151222080432e-05, |
|
"loss": 1.4632, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.893659039260489e-05, |
|
"loss": 1.3342, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.893302392728475e-05, |
|
"loss": 1.6476, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.89294518283692e-05, |
|
"loss": 1.6092, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8925874098110976e-05, |
|
"loss": 1.4304, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8922290738766368e-05, |
|
"loss": 1.1196, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8918701752595213e-05, |
|
"loss": 1.3435, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8915107141860902e-05, |
|
"loss": 1.0456, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8911506908830367e-05, |
|
"loss": 1.4373, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8907901055774092e-05, |
|
"loss": 0.7475, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8904289584966098e-05, |
|
"loss": 1.6336, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8900672498683953e-05, |
|
"loss": 0.7168, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8897049799208765e-05, |
|
"loss": 1.0685, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.889342148882519e-05, |
|
"loss": 1.774, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.888978756982141e-05, |
|
"loss": 1.2926, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8886148044489145e-05, |
|
"loss": 1.387, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8882502915123662e-05, |
|
"loss": 1.3983, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8878852184023754e-05, |
|
"loss": 1.0143, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8875195853491744e-05, |
|
"loss": 1.538, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8871533925833497e-05, |
|
"loss": 1.4635, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8867866403358394e-05, |
|
"loss": 1.255, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.886419328837936e-05, |
|
"loss": 0.8735, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8860514583212825e-05, |
|
"loss": 1.2476, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.885683029017877e-05, |
|
"loss": 1.2701, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8853140411600685e-05, |
|
"loss": 1.107, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8849444949805582e-05, |
|
"loss": 0.975, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8845743907123998e-05, |
|
"loss": 1.4108, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8842037285889993e-05, |
|
"loss": 1.5908, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8838325088441137e-05, |
|
"loss": 1.6427, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.883460731711852e-05, |
|
"loss": 1.4362, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8830883974266752e-05, |
|
"loss": 1.4124, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8827155062233945e-05, |
|
"loss": 0.7645, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8823420583371738e-05, |
|
"loss": 1.5692, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8819680540035272e-05, |
|
"loss": 1.3647, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8815934934583198e-05, |
|
"loss": 1.6264, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8812183769377674e-05, |
|
"loss": 1.45, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8808427046784365e-05, |
|
"loss": 1.2184, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8804664769172443e-05, |
|
"loss": 1.2716, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8800896938914583e-05, |
|
"loss": 1.218, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8797123558386954e-05, |
|
"loss": 1.3116, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.879334462996924e-05, |
|
"loss": 1.3002, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8789560156044612e-05, |
|
"loss": 1.1891, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8785770138999737e-05, |
|
"loss": 1.2831, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8781974581224792e-05, |
|
"loss": 0.9363, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.877817348511343e-05, |
|
"loss": 0.6865, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8774366853062807e-05, |
|
"loss": 1.1503, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8770554687473574e-05, |
|
"loss": 1.2252, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.876673699074986e-05, |
|
"loss": 1.3985, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8762913765299294e-05, |
|
"loss": 1.3642, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.875908501353298e-05, |
|
"loss": 1.0494, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8755250737865518e-05, |
|
"loss": 1.2987, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8751410940714986e-05, |
|
"loss": 1.0606, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8747565624502946e-05, |
|
"loss": 1.7578, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8743714791654438e-05, |
|
"loss": 1.3986, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8739858444597984e-05, |
|
"loss": 1.5013, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8735996585765578e-05, |
|
"loss": 1.5823, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.87321292175927e-05, |
|
"loss": 1.3707, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8728256342518302e-05, |
|
"loss": 1.6083, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8724377962984795e-05, |
|
"loss": 1.6429, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.872049408143808e-05, |
|
"loss": 1.289, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8716604700327516e-05, |
|
"loss": 1.3554, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8712709822105936e-05, |
|
"loss": 1.1375, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8708809449229633e-05, |
|
"loss": 1.4046, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.870490358415838e-05, |
|
"loss": 0.9515, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8700992229355398e-05, |
|
"loss": 1.4147, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8697075387287373e-05, |
|
"loss": 2.114, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.869315306042446e-05, |
|
"loss": 1.3317, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8689225251240268e-05, |
|
"loss": 1.4219, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8685291962211855e-05, |
|
"loss": 1.7639, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.868135319581975e-05, |
|
"loss": 1.3751, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8677408954547927e-05, |
|
"loss": 1.5809, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.867345924088381e-05, |
|
"loss": 1.5636, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8669504057318285e-05, |
|
"loss": 1.2585, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.866554340634568e-05, |
|
"loss": 1.4658, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.866157729046377e-05, |
|
"loss": 1.5769, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8657605712173782e-05, |
|
"loss": 1.6577, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8653628673980383e-05, |
|
"loss": 1.1018, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.864964617839168e-05, |
|
"loss": 1.4336, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8645658227919234e-05, |
|
"loss": 1.7202, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.864166482507804e-05, |
|
"loss": 2.1243, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8637665972386522e-05, |
|
"loss": 1.5908, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.863366167236656e-05, |
|
"loss": 1.2659, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8629651927543447e-05, |
|
"loss": 1.3677, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8625636740445925e-05, |
|
"loss": 1.3144, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.862161611360617e-05, |
|
"loss": 1.3658, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8617590049559778e-05, |
|
"loss": 1.7923, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.861355855084578e-05, |
|
"loss": 1.3498, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8609521620006636e-05, |
|
"loss": 1.2324, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8605479259588225e-05, |
|
"loss": 1.4791, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.860143147213986e-05, |
|
"loss": 1.743, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.859737826021426e-05, |
|
"loss": 2.0427, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.859331962636759e-05, |
|
"loss": 1.2335, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8589255573159408e-05, |
|
"loss": 1.1923, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8585186103152707e-05, |
|
"loss": 1.3751, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.858111121891389e-05, |
|
"loss": 1.4027, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.857703092301278e-05, |
|
"loss": 1.6319, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.85729452180226e-05, |
|
"loss": 1.4707, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.856885410652e-05, |
|
"loss": 1.17, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8564757591085027e-05, |
|
"loss": 1.6464, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8560655674301144e-05, |
|
"loss": 1.4498, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8556548358755217e-05, |
|
"loss": 1.668, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8552435647037512e-05, |
|
"loss": 1.6958, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.854831754174171e-05, |
|
"loss": 1.049, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8544194045464888e-05, |
|
"loss": 1.6657, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.854006516080751e-05, |
|
"loss": 1.2644, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8535930890373467e-05, |
|
"loss": 0.9207, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8531791236770015e-05, |
|
"loss": 0.6102, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8527646202607822e-05, |
|
"loss": 1.5107, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8523495790500948e-05, |
|
"loss": 1.7761, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8519340003066846e-05, |
|
"loss": 1.4449, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.851517884292635e-05, |
|
"loss": 1.2168, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8511012312703692e-05, |
|
"loss": 1.315, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.850684041502649e-05, |
|
"loss": 0.9184, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.850266315252573e-05, |
|
"loss": 1.4521, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8498480527835812e-05, |
|
"loss": 1.3578, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8494292543594488e-05, |
|
"loss": 1.4366, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8490099202442912e-05, |
|
"loss": 1.5684, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8485900507025603e-05, |
|
"loss": 1.5953, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.848169645999046e-05, |
|
"loss": 1.4611, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8477487063988757e-05, |
|
"loss": 1.0892, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8473272321675143e-05, |
|
"loss": 1.2696, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8469052235707637e-05, |
|
"loss": 1.297, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.846482680874763e-05, |
|
"loss": 1.5286, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.846059604345988e-05, |
|
"loss": 1.111, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8456359942512513e-05, |
|
"loss": 1.7865, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.845211850857701e-05, |
|
"loss": 1.3706, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8447871744328238e-05, |
|
"loss": 1.1235, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8443619652444396e-05, |
|
"loss": 1.4602, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8439362235607074e-05, |
|
"loss": 1.1644, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.843509949650119e-05, |
|
"loss": 1.2309, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8430831437815035e-05, |
|
"loss": 1.5331, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8426558062240258e-05, |
|
"loss": 1.4842, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8422279372471853e-05, |
|
"loss": 1.0548, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.841799537120817e-05, |
|
"loss": 1.2995, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8413706061150903e-05, |
|
"loss": 1.2846, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.84094114450051e-05, |
|
"loss": 1.3393, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8405111525479153e-05, |
|
"loss": 0.6297, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8400806305284797e-05, |
|
"loss": 1.6738, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8396495787137115e-05, |
|
"loss": 1.0798, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8392179973754523e-05, |
|
"loss": 1.5205, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.838785886785878e-05, |
|
"loss": 1.2577, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8383532472174988e-05, |
|
"loss": 1.739, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8379200789431578e-05, |
|
"loss": 1.3997, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.837486382236032e-05, |
|
"loss": 1.417, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8370521573696317e-05, |
|
"loss": 1.3401, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8366174046177993e-05, |
|
"loss": 1.2875, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.836182124254711e-05, |
|
"loss": 1.1579, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.835746316554876e-05, |
|
"loss": 2.2142, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8353099817931354e-05, |
|
"loss": 1.1692, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8348731202446628e-05, |
|
"loss": 1.2082, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_loss": 1.4074231386184692, |
|
"eval_runtime": 99.6576, |
|
"eval_samples_per_second": 1.164, |
|
"eval_steps_per_second": 1.164, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8344357321849642e-05, |
|
"loss": 1.6486, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.833997817889878e-05, |
|
"loss": 2.2842, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8335593776355737e-05, |
|
"loss": 1.2669, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.833120411698553e-05, |
|
"loss": 1.1171, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8326809203556485e-05, |
|
"loss": 0.8204, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8322409038840253e-05, |
|
"loss": 1.5321, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8318003625611784e-05, |
|
"loss": 1.271, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8313592966649346e-05, |
|
"loss": 1.5388, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8309177064734515e-05, |
|
"loss": 0.7276, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.830475592265217e-05, |
|
"loss": 1.5962, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8300329543190497e-05, |
|
"loss": 1.4854, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8295897929140988e-05, |
|
"loss": 0.9288, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8291461083298422e-05, |
|
"loss": 1.1828, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.82870190084609e-05, |
|
"loss": 1.5382, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8282571707429796e-05, |
|
"loss": 1.355, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8278119183009802e-05, |
|
"loss": 1.3523, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8273661438008893e-05, |
|
"loss": 1.2927, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8269198475238334e-05, |
|
"loss": 1.6289, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8264730297512686e-05, |
|
"loss": 1.2027, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.82602569076498e-05, |
|
"loss": 1.5092, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.825577830847081e-05, |
|
"loss": 1.3273, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8251294502800136e-05, |
|
"loss": 1.5111, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8246805493465477e-05, |
|
"loss": 1.2229, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8242311283297826e-05, |
|
"loss": 1.2548, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8237811875131446e-05, |
|
"loss": 1.5801, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.823330727180388e-05, |
|
"loss": 0.7902, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.822879747615595e-05, |
|
"loss": 0.941, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8224282491031747e-05, |
|
"loss": 0.8148, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8219762319278638e-05, |
|
"loss": 1.2423, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8215236963747264e-05, |
|
"loss": 1.2113, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8210706427291535e-05, |
|
"loss": 1.3838, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.820617071276862e-05, |
|
"loss": 1.1883, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8201629823038958e-05, |
|
"loss": 1.4372, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8197083760966257e-05, |
|
"loss": 1.1854, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8192532529417486e-05, |
|
"loss": 1.5133, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8187976131262863e-05, |
|
"loss": 1.5026, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.818341456937588e-05, |
|
"loss": 1.6061, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.817884784663327e-05, |
|
"loss": 1.2502, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.817427596591504e-05, |
|
"loss": 1.3701, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.816969893010442e-05, |
|
"loss": 1.3572, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8165116742087927e-05, |
|
"loss": 0.8913, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.81605294047553e-05, |
|
"loss": 1.351, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8155936920999542e-05, |
|
"loss": 1.6656, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8151339293716884e-05, |
|
"loss": 1.1011, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8146736525806824e-05, |
|
"loss": 1.2324, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.814212862017208e-05, |
|
"loss": 1.8689, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.813751557971862e-05, |
|
"loss": 1.0252, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8132897407355657e-05, |
|
"loss": 1.349, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8128274105995626e-05, |
|
"loss": 0.9465, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.81236456785542e-05, |
|
"loss": 1.0772, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8119012127950303e-05, |
|
"loss": 1.7306, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.811437345710606e-05, |
|
"loss": 1.2599, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8109729668946848e-05, |
|
"loss": 1.2942, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8105080766401268e-05, |
|
"loss": 1.565, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.810042675240113e-05, |
|
"loss": 1.5088, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8095767629881486e-05, |
|
"loss": 1.4096, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8091103401780604e-05, |
|
"loss": 1.3494, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8086434071039972e-05, |
|
"loss": 1.4677, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8081759640604294e-05, |
|
"loss": 1.3054, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.807708011342149e-05, |
|
"loss": 0.9348, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8072395492442695e-05, |
|
"loss": 1.8479, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8067705780622254e-05, |
|
"loss": 1.7246, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8063010980917733e-05, |
|
"loss": 1.3406, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8058311096289892e-05, |
|
"loss": 1.1065, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.805360612970271e-05, |
|
"loss": 1.2406, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.804889608412336e-05, |
|
"loss": 1.1292, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8044180962522228e-05, |
|
"loss": 1.3689, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.80394607678729e-05, |
|
"loss": 1.2584, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8034735503152147e-05, |
|
"loss": 1.1992, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.803000517133996e-05, |
|
"loss": 1.4389, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.802526977541951e-05, |
|
"loss": 1.4263, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8020529318377165e-05, |
|
"loss": 1.4402, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8015783803202483e-05, |
|
"loss": 1.3552, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8011033232888224e-05, |
|
"loss": 1.0657, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8006277610430317e-05, |
|
"loss": 1.5182, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.800151693882789e-05, |
|
"loss": 1.2039, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7996751221083256e-05, |
|
"loss": 1.3726, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.79919804602019e-05, |
|
"loss": 1.1037, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7987204659192495e-05, |
|
"loss": 1.6955, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7982423821066893e-05, |
|
"loss": 1.6935, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7977637948840124e-05, |
|
"loss": 1.4399, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7972847045530383e-05, |
|
"loss": 1.1159, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7968051114159046e-05, |
|
"loss": 1.5111, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7963250157750665e-05, |
|
"loss": 1.0608, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7958444179332946e-05, |
|
"loss": 1.3674, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7953633181936776e-05, |
|
"loss": 1.1912, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.79488171685962e-05, |
|
"loss": 1.3468, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.794399614234843e-05, |
|
"loss": 1.0985, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7939170106233833e-05, |
|
"loss": 1.3871, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7934339063295942e-05, |
|
"loss": 1.3854, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7929503016581446e-05, |
|
"loss": 1.2603, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.792466196914019e-05, |
|
"loss": 1.3454, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7919815924025174e-05, |
|
"loss": 1.3331, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7914964884292543e-05, |
|
"loss": 1.1845, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7910108853001597e-05, |
|
"loss": 1.2957, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7905247833214785e-05, |
|
"loss": 1.2642, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.79003818279977e-05, |
|
"loss": 1.3585, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7895510840419083e-05, |
|
"loss": 1.3701, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7890634873550805e-05, |
|
"loss": 1.5067, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7885753930467894e-05, |
|
"loss": 1.3115, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.78808680142485e-05, |
|
"loss": 1.3628, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7875977127973925e-05, |
|
"loss": 1.0624, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7871081274728594e-05, |
|
"loss": 1.3664, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.786618045760007e-05, |
|
"loss": 1.3321, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.786127467967904e-05, |
|
"loss": 1.418, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7856363944059327e-05, |
|
"loss": 1.2677, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.785144825383788e-05, |
|
"loss": 1.2668, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7846527612114766e-05, |
|
"loss": 1.1138, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7841602021993177e-05, |
|
"loss": 1.4431, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7836671486579434e-05, |
|
"loss": 1.113, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7831736008982967e-05, |
|
"loss": 1.2348, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7826795592316326e-05, |
|
"loss": 1.1089, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.782185023969518e-05, |
|
"loss": 1.9396, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.78168999542383e-05, |
|
"loss": 1.4311, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.781194473906757e-05, |
|
"loss": 1.3562, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7806984597308004e-05, |
|
"loss": 1.748, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7802019532087692e-05, |
|
"loss": 1.4277, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.779704954653785e-05, |
|
"loss": 1.6625, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7792074643792793e-05, |
|
"loss": 1.6347, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7787094826989927e-05, |
|
"loss": 1.7164, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.778211009926977e-05, |
|
"loss": 1.1276, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7777120463775927e-05, |
|
"loss": 1.1386, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7772125923655104e-05, |
|
"loss": 0.7429, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7767126482057102e-05, |
|
"loss": 1.7416, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.776212214213481e-05, |
|
"loss": 1.0909, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.77571129070442e-05, |
|
"loss": 1.2001, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.775209877994434e-05, |
|
"loss": 1.1985, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7747079763997383e-05, |
|
"loss": 1.3927, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.774205586236856e-05, |
|
"loss": 1.2791, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7737027078226187e-05, |
|
"loss": 1.329, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7731993414741656e-05, |
|
"loss": 1.845, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.772695487508944e-05, |
|
"loss": 1.4102, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7721911462447074e-05, |
|
"loss": 1.004, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7716863179995194e-05, |
|
"loss": 1.7119, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.771181003091748e-05, |
|
"loss": 1.5352, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7706752018400693e-05, |
|
"loss": 1.1019, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7701689145634664e-05, |
|
"loss": 1.6929, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7696621415812275e-05, |
|
"loss": 1.3236, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.769154883212949e-05, |
|
"loss": 1.0412, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7686471397785322e-05, |
|
"loss": 1.3958, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7681389115981843e-05, |
|
"loss": 1.8158, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.767630198992419e-05, |
|
"loss": 0.9901, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7671210022820543e-05, |
|
"loss": 1.5822, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7666113217882145e-05, |
|
"loss": 1.0834, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.766101157832329e-05, |
|
"loss": 1.2856, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7655905107361313e-05, |
|
"loss": 1.3248, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7650793808216603e-05, |
|
"loss": 1.2248, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.764567768411259e-05, |
|
"loss": 1.1503, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7640556738275755e-05, |
|
"loss": 1.1089, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7635430973935606e-05, |
|
"loss": 1.0886, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.76303003943247e-05, |
|
"loss": 1.2826, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7625165002678623e-05, |
|
"loss": 1.5564, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7620024802236004e-05, |
|
"loss": 1.4393, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7614879796238505e-05, |
|
"loss": 1.3972, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.760972998793081e-05, |
|
"loss": 1.121, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7604575380560637e-05, |
|
"loss": 1.1306, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.759941597737873e-05, |
|
"loss": 1.4002, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7594251781638857e-05, |
|
"loss": 1.1716, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7589082796597812e-05, |
|
"loss": 1.4862, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.75839090255154e-05, |
|
"loss": 1.1721, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.757873047165446e-05, |
|
"loss": 1.4819, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7573547138280828e-05, |
|
"loss": 0.9485, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7568359028663365e-05, |
|
"loss": 1.1956, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7563166146073947e-05, |
|
"loss": 0.8966, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7557968493787455e-05, |
|
"loss": 1.3303, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7552766075081776e-05, |
|
"loss": 1.485, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7547558893237808e-05, |
|
"loss": 1.3002, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7542346951539454e-05, |
|
"loss": 1.4948, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7537130253273613e-05, |
|
"loss": 1.7421, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7531908801730185e-05, |
|
"loss": 1.2823, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7526682600202068e-05, |
|
"loss": 1.3982, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.752145165198516e-05, |
|
"loss": 1.8014, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7516215960378347e-05, |
|
"loss": 1.5365, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7510975528683512e-05, |
|
"loss": 1.3348, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.750573036020552e-05, |
|
"loss": 1.2711, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7500480458252226e-05, |
|
"loss": 1.1657, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7495225826134476e-05, |
|
"loss": 1.3426, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7489966467166088e-05, |
|
"loss": 1.9029, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7484702384663873e-05, |
|
"loss": 1.0706, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7479433581947614e-05, |
|
"loss": 1.5385, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7474160062340066e-05, |
|
"loss": 1.3825, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7468881829166972e-05, |
|
"loss": 1.3206, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7463598885757038e-05, |
|
"loss": 0.9489, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7458311235441944e-05, |
|
"loss": 1.2469, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7453018881556333e-05, |
|
"loss": 1.5614, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.744772182743782e-05, |
|
"loss": 1.6497, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7442420076426986e-05, |
|
"loss": 1.1453, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7437113631867366e-05, |
|
"loss": 1.1558, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.743180249710546e-05, |
|
"loss": 1.0424, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7426486675490732e-05, |
|
"loss": 1.4262, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7421166170375587e-05, |
|
"loss": 1.3042, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7415840985115394e-05, |
|
"loss": 1.3541, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.741051112306848e-05, |
|
"loss": 1.2677, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.74051765875961e-05, |
|
"loss": 1.1085, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7399837382062478e-05, |
|
"loss": 1.4783, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.739449350983477e-05, |
|
"loss": 1.7204, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.738914497428308e-05, |
|
"loss": 1.9833, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7383791778780455e-05, |
|
"loss": 1.4038, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7378433926702877e-05, |
|
"loss": 1.4205, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.737307142142926e-05, |
|
"loss": 1.2557, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7367704266341468e-05, |
|
"loss": 1.4119, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7362332464824284e-05, |
|
"loss": 1.1406, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.735695602026542e-05, |
|
"loss": 1.6188, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7351574936055528e-05, |
|
"loss": 1.2576, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.734618921558817e-05, |
|
"loss": 0.953, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.734079886225985e-05, |
|
"loss": 1.1006, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7335403879469984e-05, |
|
"loss": 0.7349, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7330004270620904e-05, |
|
"loss": 1.2366, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7324600039117862e-05, |
|
"loss": 1.3368, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7319191188369032e-05, |
|
"loss": 1.1208, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7313777721785496e-05, |
|
"loss": 1.1123, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.730835964278124e-05, |
|
"loss": 1.2072, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.730293695477318e-05, |
|
"loss": 1.5776, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7297509661181104e-05, |
|
"loss": 1.2845, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.729207776542774e-05, |
|
"loss": 1.4605, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.72866412709387e-05, |
|
"loss": 0.9502, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.72812001811425e-05, |
|
"loss": 0.9166, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7275754499470547e-05, |
|
"loss": 1.2824, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.727030422935716e-05, |
|
"loss": 1.1884, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7264849374239538e-05, |
|
"loss": 1.3889, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.725938993755777e-05, |
|
"loss": 1.316, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7253925922754846e-05, |
|
"loss": 1.1767, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7248457333276638e-05, |
|
"loss": 1.2903, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.72429841725719e-05, |
|
"loss": 1.4666, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7237506444092268e-05, |
|
"loss": 1.2527, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7232024151292267e-05, |
|
"loss": 1.4831, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.722653729762929e-05, |
|
"loss": 1.385, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7221045886563615e-05, |
|
"loss": 1.3572, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.721554992155839e-05, |
|
"loss": 1.6375, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7210049406079633e-05, |
|
"loss": 1.4267, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.720454434359624e-05, |
|
"loss": 1.7291, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7199034737579962e-05, |
|
"loss": 1.4631, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.719352059150542e-05, |
|
"loss": 1.1362, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.718800190885011e-05, |
|
"loss": 1.3162, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7182478693094372e-05, |
|
"loss": 1.7466, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.717695094772142e-05, |
|
"loss": 1.314, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7171418676217304e-05, |
|
"loss": 1.6715, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7165881882070952e-05, |
|
"loss": 1.0099, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.716034056877412e-05, |
|
"loss": 1.0538, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7154794739821447e-05, |
|
"loss": 1.3831, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7149244398710387e-05, |
|
"loss": 0.8075, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7143689548941253e-05, |
|
"loss": 1.3607, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7138130194017203e-05, |
|
"loss": 1.6853, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7132566337444238e-05, |
|
"loss": 1.8848, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7126997982731185e-05, |
|
"loss": 1.3898, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7121425133389725e-05, |
|
"loss": 1.462, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.711584779293436e-05, |
|
"loss": 1.3396, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7110265964882432e-05, |
|
"loss": 0.9142, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 1.3848216533660889, |
|
"eval_runtime": 99.6301, |
|
"eval_samples_per_second": 1.164, |
|
"eval_steps_per_second": 1.164, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.710467965275411e-05, |
|
"loss": 1.3565, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7099088860072397e-05, |
|
"loss": 1.2052, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7093493590363112e-05, |
|
"loss": 1.2579, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7087893847154897e-05, |
|
"loss": 1.5135, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7082289633979225e-05, |
|
"loss": 1.6288, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7076680954370383e-05, |
|
"loss": 1.2431, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 2.0285, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7065450210004422e-05, |
|
"loss": 1.4954, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7059828152329943e-05, |
|
"loss": 1.3016, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7054201642387595e-05, |
|
"loss": 1.2285, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.704857068372571e-05, |
|
"loss": 1.3029, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7042935279895452e-05, |
|
"loss": 1.5233, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7037295434450774e-05, |
|
"loss": 1.4802, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.703165115094844e-05, |
|
"loss": 1.1672, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7026002432948004e-05, |
|
"loss": 1.2408, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7020349284011815e-05, |
|
"loss": 0.5283, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7014691707705027e-05, |
|
"loss": 1.3596, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7009029707595578e-05, |
|
"loss": 0.9959, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.70033632872542e-05, |
|
"loss": 1.5955, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6997692450254407e-05, |
|
"loss": 1.0983, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.699201720017251e-05, |
|
"loss": 1.4687, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6986337540587586e-05, |
|
"loss": 1.2817, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6980653475081505e-05, |
|
"loss": 1.2472, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6974965007238917e-05, |
|
"loss": 1.5873, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6969272140647236e-05, |
|
"loss": 1.0519, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6963574878896663e-05, |
|
"loss": 0.9721, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.695787322558017e-05, |
|
"loss": 1.6194, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6952167184293485e-05, |
|
"loss": 1.1158, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.694645675863512e-05, |
|
"loss": 0.9189, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6940741952206342e-05, |
|
"loss": 1.3312, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.693502276861118e-05, |
|
"loss": 1.0799, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6929299211456434e-05, |
|
"loss": 1.6904, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.692357128435165e-05, |
|
"loss": 1.5506, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6917838990909133e-05, |
|
"loss": 1.2288, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.691210233474395e-05, |
|
"loss": 1.3026, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.690636131947391e-05, |
|
"loss": 1.4344, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.690061594871957e-05, |
|
"loss": 1.4413, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6894866226104248e-05, |
|
"loss": 1.989, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.688911215525398e-05, |
|
"loss": 1.6863, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.688335373979758e-05, |
|
"loss": 1.6185, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.687759098336656e-05, |
|
"loss": 1.1258, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.687182388959521e-05, |
|
"loss": 1.1172, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6866052462120532e-05, |
|
"loss": 1.4315, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6860276704582266e-05, |
|
"loss": 1.0994, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.685449662062288e-05, |
|
"loss": 0.7539, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.684871221388758e-05, |
|
"loss": 1.4692, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.684292348802428e-05, |
|
"loss": 1.2738, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6837130446683642e-05, |
|
"loss": 1.0591, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6831333093519037e-05, |
|
"loss": 1.1646, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6825531432186545e-05, |
|
"loss": 1.0403, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.681972546634498e-05, |
|
"loss": 1.1038, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.681391519965586e-05, |
|
"loss": 0.8883, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.680810063578342e-05, |
|
"loss": 0.8832, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.680228177839461e-05, |
|
"loss": 1.5098, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.679645863115908e-05, |
|
"loss": 1.2526, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6790631197749182e-05, |
|
"loss": 1.4754, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6784799481839984e-05, |
|
"loss": 1.2071, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.677896348710924e-05, |
|
"loss": 1.3081, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6773123217237412e-05, |
|
"loss": 1.1485, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6767278675907654e-05, |
|
"loss": 1.7123, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6761429866805818e-05, |
|
"loss": 2.1722, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6755576793620446e-05, |
|
"loss": 0.5983, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.674971946004277e-05, |
|
"loss": 1.4454, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6743857869766698e-05, |
|
"loss": 1.4261, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6737992026488834e-05, |
|
"loss": 1.8654, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.673212193390847e-05, |
|
"loss": 1.4037, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.672624759572756e-05, |
|
"loss": 1.3131, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.672036901565075e-05, |
|
"loss": 1.1671, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.671448619738535e-05, |
|
"loss": 1.1584, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6708599144641357e-05, |
|
"loss": 1.2702, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6702707861131422e-05, |
|
"loss": 1.3125, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.669681235057088e-05, |
|
"loss": 1.3498, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.669091261667772e-05, |
|
"loss": 1.3509, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6685008663172597e-05, |
|
"loss": 1.1434, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.667910049377883e-05, |
|
"loss": 1.3289, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6673188112222394e-05, |
|
"loss": 1.2959, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6667271522231926e-05, |
|
"loss": 1.2411, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6661350727538712e-05, |
|
"loss": 1.0544, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6655425731876685e-05, |
|
"loss": 1.1301, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6649496538982443e-05, |
|
"loss": 1.1511, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6643563152595204e-05, |
|
"loss": 1.232, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6637625576456868e-05, |
|
"loss": 1.193, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.663168381431194e-05, |
|
"loss": 1.135, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.662573786990759e-05, |
|
"loss": 1.1136, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6619787746993617e-05, |
|
"loss": 0.9565, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.661383344932245e-05, |
|
"loss": 1.489, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6607874980649162e-05, |
|
"loss": 1.3864, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.660191234473145e-05, |
|
"loss": 1.4215, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.659594554532963e-05, |
|
"loss": 1.6097, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.658997458620667e-05, |
|
"loss": 1.2021, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6583999471128128e-05, |
|
"loss": 1.696, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.657802020386221e-05, |
|
"loss": 0.3453, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6572036788179728e-05, |
|
"loss": 1.247, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.656604922785411e-05, |
|
"loss": 1.5292, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.65600575266614e-05, |
|
"loss": 1.2324, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6554061688380256e-05, |
|
"loss": 0.6247, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6548061716791943e-05, |
|
"loss": 1.1605, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.654205761568033e-05, |
|
"loss": 1.2244, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6536049388831897e-05, |
|
"loss": 1.2709, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.653003704003571e-05, |
|
"loss": 0.67, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.652402057308346e-05, |
|
"loss": 1.3283, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.651799999176941e-05, |
|
"loss": 1.1593, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6511975299890438e-05, |
|
"loss": 1.5307, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6505946501246e-05, |
|
"loss": 1.0621, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6499913599638147e-05, |
|
"loss": 1.8225, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6493876598871517e-05, |
|
"loss": 1.1734, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6487835502753333e-05, |
|
"loss": 0.8684, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6481790315093405e-05, |
|
"loss": 1.0447, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6475741039704118e-05, |
|
"loss": 1.2504, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6469687680400435e-05, |
|
"loss": 1.5288, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.64636302409999e-05, |
|
"loss": 1.5258, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6457568725322623e-05, |
|
"loss": 0.825, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6451503137191285e-05, |
|
"loss": 0.9596, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6445433480431145e-05, |
|
"loss": 1.2489, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6439359758870016e-05, |
|
"loss": 1.4619, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6433281976338285e-05, |
|
"loss": 1.7392, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6427200136668886e-05, |
|
"loss": 1.2478, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6421114243697328e-05, |
|
"loss": 1.1571, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6415024301261664e-05, |
|
"loss": 1.4271, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6408930313202508e-05, |
|
"loss": 1.6418, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.640283228336302e-05, |
|
"loss": 0.8528, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6396730215588913e-05, |
|
"loss": 0.4624, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6390624113728448e-05, |
|
"loss": 1.2621, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.638451398163242e-05, |
|
"loss": 1.369, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.637839982315418e-05, |
|
"loss": 1.0964, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6372281642149603e-05, |
|
"loss": 1.2814, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.636615944247712e-05, |
|
"loss": 1.1635, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6360033227997674e-05, |
|
"loss": 1.1724, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6353903002574758e-05, |
|
"loss": 1.484, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6347768770074385e-05, |
|
"loss": 1.2226, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6341630534365095e-05, |
|
"loss": 1.3481, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.633548829931796e-05, |
|
"loss": 1.006, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.632934206880657e-05, |
|
"loss": 1.5531, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.632319184670703e-05, |
|
"loss": 1.0621, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6317037636897972e-05, |
|
"loss": 1.3661, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.631087944326053e-05, |
|
"loss": 1.0466, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6304717269678363e-05, |
|
"loss": 1.3364, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6298551120037633e-05, |
|
"loss": 1.412, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.629238099822701e-05, |
|
"loss": 1.3741, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.628620690813767e-05, |
|
"loss": 1.3023, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6280028853663297e-05, |
|
"loss": 1.4314, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6273846838700063e-05, |
|
"loss": 0.7864, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.626766086714665e-05, |
|
"loss": 1.6701, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6261470942904224e-05, |
|
"loss": 1.5097, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6255277069876454e-05, |
|
"loss": 0.8726, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.62490792519695e-05, |
|
"loss": 1.3155, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.624287749309199e-05, |
|
"loss": 1.0042, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6236671797155064e-05, |
|
"loss": 1.6354, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.623046216807233e-05, |
|
"loss": 1.744, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6224248609759877e-05, |
|
"loss": 0.9126, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.621803112613628e-05, |
|
"loss": 0.7313, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6211809721122575e-05, |
|
"loss": 1.2979, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6205584398642286e-05, |
|
"loss": 1.2585, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.61993551626214e-05, |
|
"loss": 1.6595, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6193122016988377e-05, |
|
"loss": 1.5178, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6186884965674134e-05, |
|
"loss": 1.4424, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6180644012612056e-05, |
|
"loss": 1.3139, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6174399161737993e-05, |
|
"loss": 1.4371, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6168150416990246e-05, |
|
"loss": 1.1936, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6161897782309574e-05, |
|
"loss": 1.4718, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.615564126163919e-05, |
|
"loss": 1.4299, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.614938085892476e-05, |
|
"loss": 1.3404, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6143116578114395e-05, |
|
"loss": 1.8379, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6136848423158652e-05, |
|
"loss": 1.356, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6130576398010528e-05, |
|
"loss": 1.3782, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.612430050662547e-05, |
|
"loss": 1.3236, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.611802075296135e-05, |
|
"loss": 1.5099, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6111737140978495e-05, |
|
"loss": 0.8791, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6105449674639642e-05, |
|
"loss": 1.6677, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.609915835790998e-05, |
|
"loss": 1.4567, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.609286319475711e-05, |
|
"loss": 1.0328, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.608656418915106e-05, |
|
"loss": 1.1495, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6080261345064298e-05, |
|
"loss": 1.438, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.60739546664717e-05, |
|
"loss": 0.9594, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6067644157350555e-05, |
|
"loss": 1.3485, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6061329821680578e-05, |
|
"loss": 1.5222, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6055011663443894e-05, |
|
"loss": 0.5173, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6048689686625033e-05, |
|
"loss": 1.4507, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6042363895210948e-05, |
|
"loss": 1.4224, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6036034293190978e-05, |
|
"loss": 1.6695, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.602970088455688e-05, |
|
"loss": 1.1182, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6023363673302802e-05, |
|
"loss": 1.052, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6017022663425304e-05, |
|
"loss": 0.8028, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6010677858923328e-05, |
|
"loss": 1.2943, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6004329263798208e-05, |
|
"loss": 1.4253, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.599797688205368e-05, |
|
"loss": 1.0901, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.599162071769586e-05, |
|
"loss": 1.5041, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5985260774733252e-05, |
|
"loss": 0.9272, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5978897057176746e-05, |
|
"loss": 0.8361, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5972529569039603e-05, |
|
"loss": 4.7241, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5966158314337472e-05, |
|
"loss": 1.0232, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.595978329708837e-05, |
|
"loss": 0.9797, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5953404521312695e-05, |
|
"loss": 1.0088, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5947021991033206e-05, |
|
"loss": 1.5512, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5940635710275033e-05, |
|
"loss": 1.1678, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5934245683065682e-05, |
|
"loss": 1.3641, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5927851913435e-05, |
|
"loss": 1.7091, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.592145440541521e-05, |
|
"loss": 1.1663, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5915053163040894e-05, |
|
"loss": 1.668, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.590864819034898e-05, |
|
"loss": 1.3709, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5902239491378756e-05, |
|
"loss": 1.4036, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.589582707017185e-05, |
|
"loss": 0.7514, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5889410930772245e-05, |
|
"loss": 0.9069, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.588299107722627e-05, |
|
"loss": 1.0832, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5876567513582598e-05, |
|
"loss": 1.4737, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5870140243892233e-05, |
|
"loss": 1.0943, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.586370927220852e-05, |
|
"loss": 1.3604, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5857274602587137e-05, |
|
"loss": 1.5542, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5850836239086107e-05, |
|
"loss": 1.0561, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5844394185765764e-05, |
|
"loss": 1.4651, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5837948446688782e-05, |
|
"loss": 1.3101, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5831499025920153e-05, |
|
"loss": 1.1301, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.582504592752719e-05, |
|
"loss": 1.8708, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.581858915557953e-05, |
|
"loss": 1.287, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5812128714149125e-05, |
|
"loss": 1.2097, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5805664607310238e-05, |
|
"loss": 0.9074, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5799196839139456e-05, |
|
"loss": 1.4384, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.579272541371565e-05, |
|
"loss": 1.3117, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5786250335120022e-05, |
|
"loss": 0.4972, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5779771607436073e-05, |
|
"loss": 1.0646, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5773289234749588e-05, |
|
"loss": 1.5521, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5766803221148676e-05, |
|
"loss": 1.3221, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5760313570723716e-05, |
|
"loss": 1.2105, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.575382028756741e-05, |
|
"loss": 0.2071, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.574732337577473e-05, |
|
"loss": 1.3853, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5740822839442936e-05, |
|
"loss": 1.0781, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5734318682671586e-05, |
|
"loss": 1.1282, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5727810909562508e-05, |
|
"loss": 1.0346, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5721299524219823e-05, |
|
"loss": 1.2719, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5714784530749927e-05, |
|
"loss": 0.631, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.570826593326148e-05, |
|
"loss": 1.5083, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.570174373586543e-05, |
|
"loss": 1.2729, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.569521794267499e-05, |
|
"loss": 1.5531, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.568868855780563e-05, |
|
"loss": 1.0676, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.568215558537511e-05, |
|
"loss": 1.0867, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5675619029503426e-05, |
|
"loss": 1.2246, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5669078894312848e-05, |
|
"loss": 1.6245, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.56625351839279e-05, |
|
"loss": 1.1104, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5655987902475365e-05, |
|
"loss": 1.5263, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5649437054084267e-05, |
|
"loss": 0.6199, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5642882642885894e-05, |
|
"loss": 1.3261, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.563632467301377e-05, |
|
"loss": 1.3702, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5629763148603666e-05, |
|
"loss": 1.5479, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.56231980737936e-05, |
|
"loss": 1.3279, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5616629452723818e-05, |
|
"loss": 1.7209, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5610057289536814e-05, |
|
"loss": 1.3794, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.560348158837731e-05, |
|
"loss": 1.1775, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5596902353392258e-05, |
|
"loss": 1.3188, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_loss": 1.3631528615951538, |
|
"eval_runtime": 99.6336, |
|
"eval_samples_per_second": 1.164, |
|
"eval_steps_per_second": 1.164, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5590319588730844e-05, |
|
"loss": 1.1441, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5583733298544472e-05, |
|
"loss": 1.1252, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5577143486986778e-05, |
|
"loss": 1.2104, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5570550158213614e-05, |
|
"loss": 1.6675, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5563953316383052e-05, |
|
"loss": 0.8392, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.555735296565537e-05, |
|
"loss": 1.0056, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.555074911019308e-05, |
|
"loss": 1.1701, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5544141754160885e-05, |
|
"loss": 1.0574, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5537530901725702e-05, |
|
"loss": 1.1025, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5530916557056653e-05, |
|
"loss": 1.3033, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5524298724325064e-05, |
|
"loss": 1.4389, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.551767740770446e-05, |
|
"loss": 1.1951, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.551105261137056e-05, |
|
"loss": 1.2612, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5504424339501284e-05, |
|
"loss": 1.195, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.549779259627673e-05, |
|
"loss": 1.3947, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5491157385879206e-05, |
|
"loss": 1.0962, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5484518712493188e-05, |
|
"loss": 1.1659, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.547787658030535e-05, |
|
"loss": 1.6983, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5471230993504535e-05, |
|
"loss": 1.4831, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.546458195628177e-05, |
|
"loss": 1.4953, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5457929472830263e-05, |
|
"loss": 1.0964, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5451273547345387e-05, |
|
"loss": 1.2131, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5444614184024694e-05, |
|
"loss": 0.727, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.543795138706789e-05, |
|
"loss": 1.263, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5431285160676866e-05, |
|
"loss": 0.8979, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5424615509055664e-05, |
|
"loss": 1.1657, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5417942436410482e-05, |
|
"loss": 1.6339, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5411265946949686e-05, |
|
"loss": 0.7919, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5404586044883788e-05, |
|
"loss": 1.6113, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.539790273442546e-05, |
|
"loss": 1.0389, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5391216019789522e-05, |
|
"loss": 1.1089, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.538452590519293e-05, |
|
"loss": 1.1045, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.53778323948548e-05, |
|
"loss": 1.4433, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5371135492996382e-05, |
|
"loss": 1.0384, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5364435203841058e-05, |
|
"loss": 1.2829, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5357731531614363e-05, |
|
"loss": 1.6972, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5351024480543948e-05, |
|
"loss": 1.0734, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5344314054859606e-05, |
|
"loss": 1.5467, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5337600258793255e-05, |
|
"loss": 1.0984, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5330883096578937e-05, |
|
"loss": 1.3236, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.532416257245282e-05, |
|
"loss": 1.1772, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.531743869065319e-05, |
|
"loss": 1.708, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.531071145542045e-05, |
|
"loss": 1.5264, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.530398087099712e-05, |
|
"loss": 1.1115, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5297246941627827e-05, |
|
"loss": 1.1771, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5290509671559315e-05, |
|
"loss": 1.3713, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5283769065040434e-05, |
|
"loss": 1.205, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5277025126322128e-05, |
|
"loss": 1.148, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5270277859657457e-05, |
|
"loss": 1.64, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5263527269301567e-05, |
|
"loss": 1.3575, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.525677335951171e-05, |
|
"loss": 1.4293, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5250016134547223e-05, |
|
"loss": 1.019, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.524325559866954e-05, |
|
"loss": 1.6696, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5236491756142183e-05, |
|
"loss": 1.6222, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5229724611230752e-05, |
|
"loss": 0.9186, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5222954168202938e-05, |
|
"loss": 0.3488, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5216180431328506e-05, |
|
"loss": 1.9509, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5209403404879305e-05, |
|
"loss": 1.1477, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.520262309312925e-05, |
|
"loss": 1.1513, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5195839500354337e-05, |
|
"loss": 1.3353, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5189052630832626e-05, |
|
"loss": 1.2326, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5182262488844237e-05, |
|
"loss": 1.4649, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.517546907867137e-05, |
|
"loss": 1.4153, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5168672404598273e-05, |
|
"loss": 1.2863, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5161872470911252e-05, |
|
"loss": 1.4202, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.515506928189868e-05, |
|
"loss": 1.231, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5148262841850972e-05, |
|
"loss": 0.758, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5141453155060601e-05, |
|
"loss": 1.247, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5134640225822077e-05, |
|
"loss": 0.9016, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5127824058431967e-05, |
|
"loss": 1.6539, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5121004657188871e-05, |
|
"loss": 1.4608, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5114182026393434e-05, |
|
"loss": 1.7002, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.510735617034834e-05, |
|
"loss": 1.2566, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5100527093358295e-05, |
|
"loss": 1.1942, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5093694799730046e-05, |
|
"loss": 0.9594, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5086859293772369e-05, |
|
"loss": 1.2609, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5080020579796066e-05, |
|
"loss": 1.5419, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5073178662113952e-05, |
|
"loss": 1.4504, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5066333545040879e-05, |
|
"loss": 1.3445, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.50594852328937e-05, |
|
"loss": 1.1537, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5052633729991296e-05, |
|
"loss": 1.051, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.504577904065455e-05, |
|
"loss": 0.9607, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5038921169206361e-05, |
|
"loss": 1.2126, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.503206011997164e-05, |
|
"loss": 1.1248, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5025195897277284e-05, |
|
"loss": 1.2748, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5018328505452213e-05, |
|
"loss": 0.9557, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5011457948827328e-05, |
|
"loss": 1.2038, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5004584231735536e-05, |
|
"loss": 1.3273, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4997707358511734e-05, |
|
"loss": 1.3941, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4990827333492812e-05, |
|
"loss": 0.4418, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.498394416101764e-05, |
|
"loss": 0.946, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4977057845427083e-05, |
|
"loss": 1.737, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4970168391063982e-05, |
|
"loss": 0.7765, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4963275802273156e-05, |
|
"loss": 1.2591, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4956380083401412e-05, |
|
"loss": 1.3889, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4949481238797512e-05, |
|
"loss": 1.1744, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4942579272812208e-05, |
|
"loss": 1.1486, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4935674189798209e-05, |
|
"loss": 0.9002, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4928765994110192e-05, |
|
"loss": 0.9724, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.49218546901048e-05, |
|
"loss": 0.9945, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4914940282140635e-05, |
|
"loss": 1.1417, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4908022774578254e-05, |
|
"loss": 1.4544, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4901102171780175e-05, |
|
"loss": 1.2665, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4894178478110856e-05, |
|
"loss": 1.2536, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.488725169793672e-05, |
|
"loss": 1.5239, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4880321835626118e-05, |
|
"loss": 1.0851, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.487338889554937e-05, |
|
"loss": 1.1498, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.486645288207871e-05, |
|
"loss": 1.5201, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4859513799588326e-05, |
|
"loss": 1.4594, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4852571652454343e-05, |
|
"loss": 1.6581, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4845626445054802e-05, |
|
"loss": 0.9147, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4838678181769695e-05, |
|
"loss": 1.1244, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4831726866980929e-05, |
|
"loss": 1.3587, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4824772505072335e-05, |
|
"loss": 1.1802, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4817815100429678e-05, |
|
"loss": 1.4132, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4810854657440622e-05, |
|
"loss": 1.3665, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4803891180494763e-05, |
|
"loss": 1.3553, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4796924673983604e-05, |
|
"loss": 1.2812, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.478995514230056e-05, |
|
"loss": 1.6723, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4782982589840957e-05, |
|
"loss": 1.3414, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4776007021002019e-05, |
|
"loss": 0.8794, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4769028440182874e-05, |
|
"loss": 1.7376, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4762046851784554e-05, |
|
"loss": 1.1508, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4755062260209985e-05, |
|
"loss": 0.9689, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4748074669863986e-05, |
|
"loss": 0.928, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.474108408515327e-05, |
|
"loss": 1.2703, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4734090510486435e-05, |
|
"loss": 1.7092, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4727093950273967e-05, |
|
"loss": 1.4311, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4720094408928239e-05, |
|
"loss": 1.1123, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4713091890863487e-05, |
|
"loss": 1.3895, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4706086400495844e-05, |
|
"loss": 1.2201, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4699077942243315e-05, |
|
"loss": 0.9592, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4692066520525763e-05, |
|
"loss": 0.9824, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.468505213976493e-05, |
|
"loss": 1.277, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4678034804384424e-05, |
|
"loss": 1.4331, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4671014518809717e-05, |
|
"loss": 0.6717, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4663991287468136e-05, |
|
"loss": 1.1993, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.465696511478887e-05, |
|
"loss": 1.2738, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4649936005202965e-05, |
|
"loss": 1.45, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4642903963143309e-05, |
|
"loss": 1.2078, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4635868993044655e-05, |
|
"loss": 0.8517, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.462883109934359e-05, |
|
"loss": 1.329, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4621790286478547e-05, |
|
"loss": 1.3455, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.461474655888981e-05, |
|
"loss": 1.4645, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4607699921019483e-05, |
|
"loss": 1.2868, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4600650377311523e-05, |
|
"loss": 1.3007, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4593597932211709e-05, |
|
"loss": 0.9934, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4586542590167648e-05, |
|
"loss": 1.0202, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4579484355628788e-05, |
|
"loss": 1.344, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4572423233046386e-05, |
|
"loss": 0.8531, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4565359226873529e-05, |
|
"loss": 1.2904, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4558292341565117e-05, |
|
"loss": 1.2244, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.455122258157787e-05, |
|
"loss": 1.0088, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.454414995137032e-05, |
|
"loss": 1.2391, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4537074455402806e-05, |
|
"loss": 0.6694, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4529996098137478e-05, |
|
"loss": 1.2455, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4522914884038293e-05, |
|
"loss": 0.9336, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4515830817570998e-05, |
|
"loss": 1.1965, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4508743903203156e-05, |
|
"loss": 1.4426, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.450165414540411e-05, |
|
"loss": 1.6646, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4494561548645e-05, |
|
"loss": 0.3554, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4487466117398772e-05, |
|
"loss": 1.0771, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4480367856140136e-05, |
|
"loss": 0.8783, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.44732667693456e-05, |
|
"loss": 1.7154, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4466162861493453e-05, |
|
"loss": 1.2587, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4459056137063763e-05, |
|
"loss": 1.2324, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.445194660053837e-05, |
|
"loss": 0.9895, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.444483425640089e-05, |
|
"loss": 1.2503, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4437719109136712e-05, |
|
"loss": 1.3562, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4430601163232991e-05, |
|
"loss": 2.0011, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4423480423178643e-05, |
|
"loss": 1.3415, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4416356893464355e-05, |
|
"loss": 1.2739, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4409230578582566e-05, |
|
"loss": 1.2871, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.440210148302747e-05, |
|
"loss": 1.2031, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4394969611295029e-05, |
|
"loss": 1.3758, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4387834967882934e-05, |
|
"loss": 1.2098, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4380697557290638e-05, |
|
"loss": 1.0952, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4373557384019337e-05, |
|
"loss": 1.3265, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4366414452571969e-05, |
|
"loss": 1.5613, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4359268767453206e-05, |
|
"loss": 0.7024, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4352120333169472e-05, |
|
"loss": 1.3177, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4344969154228903e-05, |
|
"loss": 1.6341, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4337815235141386e-05, |
|
"loss": 1.2655, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4330658580418519e-05, |
|
"loss": 1.843, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4323499194573637e-05, |
|
"loss": 1.1735, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.43163370821218e-05, |
|
"loss": 0.3937, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4309172247579769e-05, |
|
"loss": 1.2832, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4302004695466041e-05, |
|
"loss": 1.5502, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4294834430300822e-05, |
|
"loss": 1.1838, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4287661456606021e-05, |
|
"loss": 1.2188, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4280485778905265e-05, |
|
"loss": 1.1253, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4273307401723881e-05, |
|
"loss": 1.308, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4266126329588899e-05, |
|
"loss": 1.8793, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.425894256702905e-05, |
|
"loss": 1.1015, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4251756118574763e-05, |
|
"loss": 1.2162, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4244566988758152e-05, |
|
"loss": 1.8843, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4237375182113036e-05, |
|
"loss": 1.0567, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.423018070317491e-05, |
|
"loss": 1.7976, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4222983556480964e-05, |
|
"loss": 1.2846, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.421578374657006e-05, |
|
"loss": 1.3551, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4208581277982752e-05, |
|
"loss": 0.8113, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4201376155261256e-05, |
|
"loss": 1.3371, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4194168382949472e-05, |
|
"loss": 1.6013, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4186957965592973e-05, |
|
"loss": 1.7967, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.417974490773899e-05, |
|
"loss": 1.525, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.417252921393643e-05, |
|
"loss": 1.355, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4165310888735854e-05, |
|
"loss": 1.0396, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4158089936689485e-05, |
|
"loss": 1.3804, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4150866362351205e-05, |
|
"loss": 1.6258, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.414364017027655e-05, |
|
"loss": 0.8329, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4136411365022702e-05, |
|
"loss": 1.8108, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4129179951148495e-05, |
|
"loss": 1.3163, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4121945933214407e-05, |
|
"loss": 1.172, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.411470931578256e-05, |
|
"loss": 1.2028, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.410747010341671e-05, |
|
"loss": 1.1196, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4100228300682252e-05, |
|
"loss": 1.1249, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4092983912146226e-05, |
|
"loss": 0.9167, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4085736942377285e-05, |
|
"loss": 1.2677, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4078487395945712e-05, |
|
"loss": 1.2615, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4071235277423432e-05, |
|
"loss": 1.6524, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4063980591383972e-05, |
|
"loss": 1.132, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.405672334240249e-05, |
|
"loss": 0.9321, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4049463535055758e-05, |
|
"loss": 0.7317, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4042201173922158e-05, |
|
"loss": 1.2999, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4034936263581686e-05, |
|
"loss": 1.2965, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4027668808615944e-05, |
|
"loss": 0.7941, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4020398813608141e-05, |
|
"loss": 1.2406, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4013126283143084e-05, |
|
"loss": 1.3433, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.400585122180718e-05, |
|
"loss": 1.0819, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3998573634188441e-05, |
|
"loss": 1.3964, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3991293524876456e-05, |
|
"loss": 1.2857, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3984010898462417e-05, |
|
"loss": 1.2419, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3976725759539095e-05, |
|
"loss": 1.6901, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3969438112700857e-05, |
|
"loss": 1.2265, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3962147962543635e-05, |
|
"loss": 1.5302, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3954855313664958e-05, |
|
"loss": 1.5453, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3947560170663916e-05, |
|
"loss": 0.8828, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3940262538141182e-05, |
|
"loss": 1.4679, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3932962420698993e-05, |
|
"loss": 1.3953, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3925659822941149e-05, |
|
"loss": 0.9066, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.391835474947303e-05, |
|
"loss": 1.576, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.391104720490156e-05, |
|
"loss": 1.3168, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3903737193835236e-05, |
|
"loss": 1.0702, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3896424720884097e-05, |
|
"loss": 1.5452, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3889109790659739e-05, |
|
"loss": 1.9422, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3881792407775318e-05, |
|
"loss": 1.1427, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3874472576845523e-05, |
|
"loss": 1.3092, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.386715030248659e-05, |
|
"loss": 1.1538, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 1.3443834781646729, |
|
"eval_runtime": 99.6391, |
|
"eval_samples_per_second": 1.164, |
|
"eval_steps_per_second": 1.164, |
|
"step": 1488 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3966, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 496, |
|
"total_flos": 2.630979922302075e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|