|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9991089991089991, |
|
"global_step": 841, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.988109393579072e-05, |
|
"loss": 5.3447, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.976218787158146e-05, |
|
"loss": 4.0093, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.964328180737218e-05, |
|
"loss": 3.4538, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.95243757431629e-05, |
|
"loss": 3.1487, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.940546967895364e-05, |
|
"loss": 3.1387, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.928656361474435e-05, |
|
"loss": 2.9713, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.916765755053509e-05, |
|
"loss": 2.8585, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.904875148632581e-05, |
|
"loss": 2.7266, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.892984542211652e-05, |
|
"loss": 2.7606, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.881093935790727e-05, |
|
"loss": 2.5991, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.869203329369798e-05, |
|
"loss": 2.674, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.857312722948871e-05, |
|
"loss": 2.6881, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.845422116527944e-05, |
|
"loss": 2.5562, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.833531510107015e-05, |
|
"loss": 2.6063, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.821640903686088e-05, |
|
"loss": 2.5019, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.809750297265161e-05, |
|
"loss": 2.6296, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.797859690844233e-05, |
|
"loss": 2.6206, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.785969084423307e-05, |
|
"loss": 2.6798, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.774078478002379e-05, |
|
"loss": 2.5783, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.762187871581451e-05, |
|
"loss": 2.5709, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.750297265160524e-05, |
|
"loss": 2.5785, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.738406658739596e-05, |
|
"loss": 2.4995, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.726516052318669e-05, |
|
"loss": 2.5272, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.714625445897742e-05, |
|
"loss": 2.5163, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.702734839476813e-05, |
|
"loss": 2.617, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.690844233055887e-05, |
|
"loss": 2.5328, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.678953626634959e-05, |
|
"loss": 2.5843, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.667063020214032e-05, |
|
"loss": 2.4304, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.655172413793105e-05, |
|
"loss": 2.4767, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.643281807372176e-05, |
|
"loss": 2.4652, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.631391200951249e-05, |
|
"loss": 2.5466, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.619500594530322e-05, |
|
"loss": 2.4983, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.607609988109393e-05, |
|
"loss": 2.4045, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.595719381688466e-05, |
|
"loss": 2.5499, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.583828775267539e-05, |
|
"loss": 2.5097, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.571938168846611e-05, |
|
"loss": 2.4802, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.560047562425685e-05, |
|
"loss": 2.4572, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.548156956004757e-05, |
|
"loss": 2.527, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.53626634958383e-05, |
|
"loss": 2.5414, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.524375743162902e-05, |
|
"loss": 2.5576, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.512485136741974e-05, |
|
"loss": 2.438, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.500594530321047e-05, |
|
"loss": 2.369, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.48870392390012e-05, |
|
"loss": 2.5205, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.476813317479191e-05, |
|
"loss": 2.5029, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.464922711058264e-05, |
|
"loss": 2.397, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.453032104637337e-05, |
|
"loss": 2.4967, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.44114149821641e-05, |
|
"loss": 2.4222, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.429250891795483e-05, |
|
"loss": 2.427, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.417360285374554e-05, |
|
"loss": 2.433, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.405469678953627e-05, |
|
"loss": 2.4334, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.3935790725327e-05, |
|
"loss": 2.4152, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.381688466111772e-05, |
|
"loss": 2.4286, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.369797859690844e-05, |
|
"loss": 2.4926, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.357907253269917e-05, |
|
"loss": 2.3161, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.34601664684899e-05, |
|
"loss": 2.4149, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.334126040428063e-05, |
|
"loss": 2.4591, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.322235434007135e-05, |
|
"loss": 2.4776, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.310344827586207e-05, |
|
"loss": 2.4473, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.29845422116528e-05, |
|
"loss": 2.3586, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.286563614744352e-05, |
|
"loss": 2.39, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.274673008323425e-05, |
|
"loss": 2.4279, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.262782401902498e-05, |
|
"loss": 2.4257, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.25089179548157e-05, |
|
"loss": 2.4391, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.239001189060642e-05, |
|
"loss": 2.529, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.227110582639715e-05, |
|
"loss": 2.5275, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.215219976218788e-05, |
|
"loss": 2.4203, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.203329369797861e-05, |
|
"loss": 2.5276, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.191438763376932e-05, |
|
"loss": 2.4122, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.179548156956005e-05, |
|
"loss": 2.3995, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.167657550535078e-05, |
|
"loss": 2.3908, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.155766944114151e-05, |
|
"loss": 2.4562, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.143876337693222e-05, |
|
"loss": 2.429, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.131985731272295e-05, |
|
"loss": 2.3626, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.120095124851368e-05, |
|
"loss": 2.454, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.10820451843044e-05, |
|
"loss": 2.3086, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.096313912009513e-05, |
|
"loss": 2.3726, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.084423305588586e-05, |
|
"loss": 2.4329, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.072532699167658e-05, |
|
"loss": 2.3842, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.06064209274673e-05, |
|
"loss": 2.3961, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.048751486325803e-05, |
|
"loss": 2.3575, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.036860879904876e-05, |
|
"loss": 2.3325, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.024970273483949e-05, |
|
"loss": 2.3595, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.01307966706302e-05, |
|
"loss": 2.4147, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.001189060642093e-05, |
|
"loss": 2.3623, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8.989298454221166e-05, |
|
"loss": 2.2816, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8.977407847800237e-05, |
|
"loss": 2.4441, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8.96551724137931e-05, |
|
"loss": 2.2866, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8.953626634958383e-05, |
|
"loss": 2.3949, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.941736028537456e-05, |
|
"loss": 2.3233, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.929845422116529e-05, |
|
"loss": 2.4015, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.9179548156956e-05, |
|
"loss": 2.3602, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.906064209274673e-05, |
|
"loss": 2.3944, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.894173602853746e-05, |
|
"loss": 2.1986, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.882282996432818e-05, |
|
"loss": 2.3742, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.87039239001189e-05, |
|
"loss": 2.3592, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 8.858501783590964e-05, |
|
"loss": 2.2872, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.846611177170036e-05, |
|
"loss": 2.3018, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.83472057074911e-05, |
|
"loss": 2.3394, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.822829964328181e-05, |
|
"loss": 2.3526, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.810939357907254e-05, |
|
"loss": 2.2932, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.799048751486327e-05, |
|
"loss": 2.3435, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.787158145065398e-05, |
|
"loss": 2.4042, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.775267538644471e-05, |
|
"loss": 2.3656, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.763376932223544e-05, |
|
"loss": 2.4653, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.751486325802615e-05, |
|
"loss": 2.3387, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.73959571938169e-05, |
|
"loss": 2.4952, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.727705112960761e-05, |
|
"loss": 2.4118, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.715814506539834e-05, |
|
"loss": 2.3231, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.703923900118907e-05, |
|
"loss": 2.219, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.692033293697979e-05, |
|
"loss": 2.3674, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.680142687277051e-05, |
|
"loss": 2.3375, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.668252080856124e-05, |
|
"loss": 2.4082, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.656361474435196e-05, |
|
"loss": 2.3514, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.64447086801427e-05, |
|
"loss": 2.3587, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.632580261593342e-05, |
|
"loss": 2.3861, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.620689655172413e-05, |
|
"loss": 2.4125, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.608799048751487e-05, |
|
"loss": 2.3367, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.596908442330559e-05, |
|
"loss": 2.3436, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.585017835909632e-05, |
|
"loss": 2.288, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.573127229488705e-05, |
|
"loss": 2.4524, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.561236623067776e-05, |
|
"loss": 2.3003, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.549346016646849e-05, |
|
"loss": 2.2782, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.537455410225922e-05, |
|
"loss": 2.3073, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.525564803804994e-05, |
|
"loss": 2.3048, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.513674197384068e-05, |
|
"loss": 2.3759, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.501783590963139e-05, |
|
"loss": 2.3557, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.489892984542212e-05, |
|
"loss": 2.4122, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.478002378121285e-05, |
|
"loss": 2.267, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.466111771700357e-05, |
|
"loss": 2.3591, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.45422116527943e-05, |
|
"loss": 2.3761, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.442330558858502e-05, |
|
"loss": 2.3009, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.430439952437574e-05, |
|
"loss": 2.3331, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.418549346016648e-05, |
|
"loss": 2.4314, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.40665873959572e-05, |
|
"loss": 2.356, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.394768133174791e-05, |
|
"loss": 2.2634, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.382877526753865e-05, |
|
"loss": 2.2519, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.370986920332937e-05, |
|
"loss": 2.2652, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.35909631391201e-05, |
|
"loss": 2.3167, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.347205707491083e-05, |
|
"loss": 2.3001, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.335315101070154e-05, |
|
"loss": 2.3149, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.323424494649228e-05, |
|
"loss": 2.2451, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.3115338882283e-05, |
|
"loss": 2.2441, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.299643281807372e-05, |
|
"loss": 2.3269, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.287752675386446e-05, |
|
"loss": 2.4435, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.275862068965517e-05, |
|
"loss": 2.3918, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.26397146254459e-05, |
|
"loss": 2.2992, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.252080856123663e-05, |
|
"loss": 2.2931, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.240190249702735e-05, |
|
"loss": 2.3183, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.228299643281809e-05, |
|
"loss": 2.3543, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.21640903686088e-05, |
|
"loss": 2.3209, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.204518430439952e-05, |
|
"loss": 2.2987, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.192627824019026e-05, |
|
"loss": 2.4052, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.180737217598098e-05, |
|
"loss": 2.4125, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.16884661117717e-05, |
|
"loss": 2.395, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.156956004756243e-05, |
|
"loss": 2.3669, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.145065398335315e-05, |
|
"loss": 2.3195, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.133174791914388e-05, |
|
"loss": 2.287, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.121284185493461e-05, |
|
"loss": 2.3577, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.109393579072532e-05, |
|
"loss": 2.3937, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.097502972651607e-05, |
|
"loss": 2.2992, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.085612366230678e-05, |
|
"loss": 2.3189, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.073721759809751e-05, |
|
"loss": 2.2906, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.061831153388824e-05, |
|
"loss": 2.4352, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.049940546967895e-05, |
|
"loss": 2.3556, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.038049940546968e-05, |
|
"loss": 2.2791, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.026159334126041e-05, |
|
"loss": 2.3316, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.014268727705113e-05, |
|
"loss": 2.303, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.002378121284187e-05, |
|
"loss": 2.357, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.990487514863258e-05, |
|
"loss": 2.2938, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.978596908442331e-05, |
|
"loss": 2.3928, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.966706302021404e-05, |
|
"loss": 2.3917, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.954815695600476e-05, |
|
"loss": 2.3116, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.942925089179549e-05, |
|
"loss": 2.2779, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.931034482758621e-05, |
|
"loss": 2.2834, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.919143876337693e-05, |
|
"loss": 2.2771, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.907253269916766e-05, |
|
"loss": 2.2942, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.895362663495839e-05, |
|
"loss": 2.347, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.88347205707491e-05, |
|
"loss": 2.3082, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.871581450653985e-05, |
|
"loss": 2.3719, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.859690844233056e-05, |
|
"loss": 2.3116, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.847800237812129e-05, |
|
"loss": 2.3329, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.835909631391202e-05, |
|
"loss": 2.3221, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.824019024970273e-05, |
|
"loss": 2.2503, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.812128418549346e-05, |
|
"loss": 2.3554, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.800237812128419e-05, |
|
"loss": 2.307, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.788347205707491e-05, |
|
"loss": 2.381, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.776456599286564e-05, |
|
"loss": 2.2745, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.764565992865636e-05, |
|
"loss": 2.3479, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.75267538644471e-05, |
|
"loss": 2.2945, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.740784780023782e-05, |
|
"loss": 2.2905, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.728894173602854e-05, |
|
"loss": 2.4561, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.717003567181927e-05, |
|
"loss": 2.3004, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.705112960761e-05, |
|
"loss": 2.2648, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.693222354340071e-05, |
|
"loss": 2.2132, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.681331747919144e-05, |
|
"loss": 2.3311, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.669441141498217e-05, |
|
"loss": 2.3481, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.65755053507729e-05, |
|
"loss": 2.2923, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.645659928656363e-05, |
|
"loss": 2.2442, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.633769322235434e-05, |
|
"loss": 2.3526, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.621878715814507e-05, |
|
"loss": 2.4091, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.60998810939358e-05, |
|
"loss": 2.353, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.598097502972651e-05, |
|
"loss": 2.2054, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.586206896551724e-05, |
|
"loss": 2.3387, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.574316290130797e-05, |
|
"loss": 2.2297, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.56242568370987e-05, |
|
"loss": 2.3688, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 7.550535077288942e-05, |
|
"loss": 2.267, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.538644470868014e-05, |
|
"loss": 2.4415, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.526753864447087e-05, |
|
"loss": 2.3147, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.51486325802616e-05, |
|
"loss": 2.391, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.502972651605232e-05, |
|
"loss": 2.3805, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.491082045184305e-05, |
|
"loss": 2.3899, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.479191438763378e-05, |
|
"loss": 2.1841, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.46730083234245e-05, |
|
"loss": 2.3532, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.455410225921522e-05, |
|
"loss": 2.3409, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.443519619500595e-05, |
|
"loss": 2.234, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.431629013079668e-05, |
|
"loss": 2.2062, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.419738406658739e-05, |
|
"loss": 2.2888, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.407847800237812e-05, |
|
"loss": 2.3873, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.395957193816885e-05, |
|
"loss": 2.2904, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.384066587395958e-05, |
|
"loss": 2.4081, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.372175980975031e-05, |
|
"loss": 2.2885, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.360285374554102e-05, |
|
"loss": 2.276, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 7.348394768133175e-05, |
|
"loss": 2.2864, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.336504161712248e-05, |
|
"loss": 2.2151, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.32461355529132e-05, |
|
"loss": 2.304, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.312722948870393e-05, |
|
"loss": 2.3298, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.300832342449465e-05, |
|
"loss": 2.4339, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.288941736028538e-05, |
|
"loss": 2.2226, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.27705112960761e-05, |
|
"loss": 2.2705, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.265160523186683e-05, |
|
"loss": 2.2733, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.253269916765756e-05, |
|
"loss": 2.3243, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.241379310344828e-05, |
|
"loss": 2.3555, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.2294887039239e-05, |
|
"loss": 2.3382, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.217598097502973e-05, |
|
"loss": 2.2313, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.205707491082046e-05, |
|
"loss": 2.2455, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.193816884661117e-05, |
|
"loss": 2.2006, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.18192627824019e-05, |
|
"loss": 2.3497, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.170035671819263e-05, |
|
"loss": 2.2399, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.158145065398336e-05, |
|
"loss": 2.2781, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.146254458977409e-05, |
|
"loss": 2.2743, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.13436385255648e-05, |
|
"loss": 2.249, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.122473246135553e-05, |
|
"loss": 2.3425, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.110582639714626e-05, |
|
"loss": 2.5219, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.098692033293698e-05, |
|
"loss": 2.3501, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.08680142687277e-05, |
|
"loss": 2.2665, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.074910820451843e-05, |
|
"loss": 2.1755, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.063020214030915e-05, |
|
"loss": 2.3712, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.051129607609989e-05, |
|
"loss": 2.2851, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.039239001189061e-05, |
|
"loss": 2.3449, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.027348394768134e-05, |
|
"loss": 2.2827, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.015457788347207e-05, |
|
"loss": 2.1749, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.003567181926278e-05, |
|
"loss": 2.3592, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6.991676575505351e-05, |
|
"loss": 2.2849, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6.979785969084424e-05, |
|
"loss": 2.2484, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6.967895362663495e-05, |
|
"loss": 2.2173, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6.95600475624257e-05, |
|
"loss": 2.2347, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.944114149821641e-05, |
|
"loss": 2.2383, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.932223543400713e-05, |
|
"loss": 2.312, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.920332936979787e-05, |
|
"loss": 2.2611, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.908442330558858e-05, |
|
"loss": 2.2153, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.896551724137931e-05, |
|
"loss": 2.2891, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.884661117717004e-05, |
|
"loss": 2.2153, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.872770511296076e-05, |
|
"loss": 2.3287, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.86087990487515e-05, |
|
"loss": 2.3064, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 6.848989298454222e-05, |
|
"loss": 2.3049, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.837098692033293e-05, |
|
"loss": 2.3795, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.825208085612367e-05, |
|
"loss": 2.3491, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.813317479191439e-05, |
|
"loss": 2.1713, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.801426872770512e-05, |
|
"loss": 2.2631, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.789536266349585e-05, |
|
"loss": 2.2635, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.777645659928656e-05, |
|
"loss": 2.3542, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.765755053507729e-05, |
|
"loss": 2.3213, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 6.753864447086802e-05, |
|
"loss": 2.3188, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.741973840665873e-05, |
|
"loss": 2.2233, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.730083234244948e-05, |
|
"loss": 2.2859, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.718192627824019e-05, |
|
"loss": 2.1248, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.706302021403092e-05, |
|
"loss": 2.2384, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.694411414982165e-05, |
|
"loss": 2.2221, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.682520808561236e-05, |
|
"loss": 2.432, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.67063020214031e-05, |
|
"loss": 2.3601, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 6.658739595719382e-05, |
|
"loss": 2.2901, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.646848989298454e-05, |
|
"loss": 2.3097, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.634958382877528e-05, |
|
"loss": 2.2873, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.6230677764566e-05, |
|
"loss": 2.2359, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.611177170035671e-05, |
|
"loss": 2.2229, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.599286563614745e-05, |
|
"loss": 2.3036, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.587395957193817e-05, |
|
"loss": 2.2648, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.57550535077289e-05, |
|
"loss": 2.2226, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.563614744351963e-05, |
|
"loss": 2.1788, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.551724137931034e-05, |
|
"loss": 2.2763, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.539833531510108e-05, |
|
"loss": 2.3467, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.52794292508918e-05, |
|
"loss": 2.3056, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.516052318668251e-05, |
|
"loss": 2.3257, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.504161712247326e-05, |
|
"loss": 2.3038, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.492271105826397e-05, |
|
"loss": 2.2515, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.48038049940547e-05, |
|
"loss": 2.418, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.468489892984543e-05, |
|
"loss": 2.3426, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.456599286563615e-05, |
|
"loss": 2.3863, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.444708680142689e-05, |
|
"loss": 2.2289, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.43281807372176e-05, |
|
"loss": 2.3307, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.420927467300832e-05, |
|
"loss": 2.3791, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.409036860879906e-05, |
|
"loss": 2.1555, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.397146254458978e-05, |
|
"loss": 2.2717, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.38525564803805e-05, |
|
"loss": 2.3077, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.373365041617123e-05, |
|
"loss": 2.296, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.361474435196195e-05, |
|
"loss": 2.3474, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.349583828775268e-05, |
|
"loss": 2.1714, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.33769322235434e-05, |
|
"loss": 2.2277, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.325802615933412e-05, |
|
"loss": 2.2941, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.313912009512486e-05, |
|
"loss": 2.2197, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.302021403091558e-05, |
|
"loss": 2.2809, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.290130796670631e-05, |
|
"loss": 2.1887, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.278240190249704e-05, |
|
"loss": 2.2816, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.266349583828775e-05, |
|
"loss": 2.1933, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.254458977407848e-05, |
|
"loss": 2.2013, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.242568370986921e-05, |
|
"loss": 2.2848, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.230677764565993e-05, |
|
"loss": 2.214, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.218787158145065e-05, |
|
"loss": 2.2029, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.206896551724138e-05, |
|
"loss": 2.3902, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.195005945303211e-05, |
|
"loss": 2.2143, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.183115338882284e-05, |
|
"loss": 2.309, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.171224732461356e-05, |
|
"loss": 2.3207, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.159334126040429e-05, |
|
"loss": 2.2282, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.147443519619501e-05, |
|
"loss": 2.2536, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.135552913198573e-05, |
|
"loss": 2.2217, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.123662306777646e-05, |
|
"loss": 2.131, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.111771700356719e-05, |
|
"loss": 2.2531, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.099881093935791e-05, |
|
"loss": 2.2165, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.087990487514863e-05, |
|
"loss": 2.3067, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.076099881093936e-05, |
|
"loss": 2.1329, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.064209274673008e-05, |
|
"loss": 2.1235, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.052318668252082e-05, |
|
"loss": 2.3135, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.040428061831154e-05, |
|
"loss": 2.1634, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.028537455410226e-05, |
|
"loss": 2.2177, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.016646848989299e-05, |
|
"loss": 2.3188, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.004756242568371e-05, |
|
"loss": 2.295, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.9928656361474435e-05, |
|
"loss": 2.3287, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.9809750297265164e-05, |
|
"loss": 2.3284, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.9690844233055886e-05, |
|
"loss": 2.2374, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.957193816884662e-05, |
|
"loss": 2.1413, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.9453032104637343e-05, |
|
"loss": 2.2421, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.933412604042806e-05, |
|
"loss": 2.1714, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.9215219976218794e-05, |
|
"loss": 2.3596, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.9096313912009516e-05, |
|
"loss": 2.2769, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.897740784780024e-05, |
|
"loss": 2.2542, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.885850178359097e-05, |
|
"loss": 2.3413, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.873959571938169e-05, |
|
"loss": 2.2888, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.862068965517241e-05, |
|
"loss": 2.3477, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.850178359096315e-05, |
|
"loss": 2.2458, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.838287752675386e-05, |
|
"loss": 2.2946, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.82639714625446e-05, |
|
"loss": 2.2475, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.814506539833532e-05, |
|
"loss": 2.1557, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.802615933412604e-05, |
|
"loss": 2.2356, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.790725326991677e-05, |
|
"loss": 2.3096, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.778834720570749e-05, |
|
"loss": 2.2057, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.7669441141498215e-05, |
|
"loss": 2.3387, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.7550535077288944e-05, |
|
"loss": 2.2772, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.7431629013079666e-05, |
|
"loss": 2.2169, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.731272294887039e-05, |
|
"loss": 2.2603, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.7193816884661124e-05, |
|
"loss": 2.4154, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.7074910820451846e-05, |
|
"loss": 2.2245, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.6956004756242575e-05, |
|
"loss": 2.2239, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.68370986920333e-05, |
|
"loss": 2.2393, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.671819262782402e-05, |
|
"loss": 2.3081, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.659928656361475e-05, |
|
"loss": 2.1714, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.648038049940547e-05, |
|
"loss": 2.2818, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.636147443519619e-05, |
|
"loss": 2.2281, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.624256837098693e-05, |
|
"loss": 2.2456, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.612366230677765e-05, |
|
"loss": 2.2467, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.600475624256838e-05, |
|
"loss": 2.225, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.58858501783591e-05, |
|
"loss": 2.2764, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.576694411414982e-05, |
|
"loss": 2.2419, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.564803804994055e-05, |
|
"loss": 2.4232, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.5529131985731273e-05, |
|
"loss": 2.1652, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.5410225921521996e-05, |
|
"loss": 2.317, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.529131985731273e-05, |
|
"loss": 2.342, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.517241379310345e-05, |
|
"loss": 2.2358, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.505350772889417e-05, |
|
"loss": 2.0867, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.4934601664684904e-05, |
|
"loss": 2.3673, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.4815695600475626e-05, |
|
"loss": 2.2821, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.4696789536266355e-05, |
|
"loss": 2.2601, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.457788347205708e-05, |
|
"loss": 2.3248, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.44589774078478e-05, |
|
"loss": 2.2778, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.4340071343638535e-05, |
|
"loss": 2.3508, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.422116527942925e-05, |
|
"loss": 2.2594, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.410225921521997e-05, |
|
"loss": 2.3678, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.398335315101071e-05, |
|
"loss": 2.1526, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.386444708680143e-05, |
|
"loss": 2.1964, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.374554102259215e-05, |
|
"loss": 2.2819, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.362663495838288e-05, |
|
"loss": 2.1758, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.35077288941736e-05, |
|
"loss": 2.2343, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.338882282996434e-05, |
|
"loss": 2.1678, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.3269916765755054e-05, |
|
"loss": 2.2366, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.3151010701545776e-05, |
|
"loss": 2.2031, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.303210463733651e-05, |
|
"loss": 2.2163, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.2913198573127234e-05, |
|
"loss": 2.2695, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.2794292508917956e-05, |
|
"loss": 2.2588, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.2675386444708685e-05, |
|
"loss": 2.2567, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.255648038049941e-05, |
|
"loss": 2.1395, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.243757431629013e-05, |
|
"loss": 2.1811, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.231866825208086e-05, |
|
"loss": 2.1719, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.219976218787158e-05, |
|
"loss": 2.2563, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.2080856123662315e-05, |
|
"loss": 2.3277, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.196195005945304e-05, |
|
"loss": 2.2357, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.184304399524376e-05, |
|
"loss": 2.0933, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.172413793103449e-05, |
|
"loss": 2.3188, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.160523186682521e-05, |
|
"loss": 2.1265, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.148632580261593e-05, |
|
"loss": 2.3085, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.136741973840666e-05, |
|
"loss": 2.1882, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.124851367419738e-05, |
|
"loss": 2.1895, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.112960760998812e-05, |
|
"loss": 2.2029, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.101070154577884e-05, |
|
"loss": 2.3377, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.0891795481569556e-05, |
|
"loss": 2.1893, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.077288941736029e-05, |
|
"loss": 2.1874, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.0653983353151014e-05, |
|
"loss": 2.2244, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.0535077288941736e-05, |
|
"loss": 2.1764, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.0416171224732465e-05, |
|
"loss": 2.2646, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.029726516052319e-05, |
|
"loss": 2.1993, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.017835909631391e-05, |
|
"loss": 2.2233, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.0059453032104645e-05, |
|
"loss": 2.2618, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.994054696789536e-05, |
|
"loss": 2.2423, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.982164090368609e-05, |
|
"loss": 2.3157, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.970273483947682e-05, |
|
"loss": 2.2353, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.9583828775267547e-05, |
|
"loss": 2.2558, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.946492271105826e-05, |
|
"loss": 2.2812, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.934601664684899e-05, |
|
"loss": 2.2187, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.922711058263972e-05, |
|
"loss": 2.2293, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.910820451843044e-05, |
|
"loss": 2.3245, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8989298454221164e-05, |
|
"loss": 2.3281, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.887039239001189e-05, |
|
"loss": 2.3905, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.875148632580262e-05, |
|
"loss": 2.1536, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8632580261593344e-05, |
|
"loss": 2.3786, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8513674197384066e-05, |
|
"loss": 2.2038, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.8394768133174794e-05, |
|
"loss": 2.1641, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.827586206896552e-05, |
|
"loss": 2.179, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.8156956004756245e-05, |
|
"loss": 2.3641, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.803804994054697e-05, |
|
"loss": 2.1672, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.7919143876337696e-05, |
|
"loss": 2.2152, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.7800237812128425e-05, |
|
"loss": 2.213, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.768133174791915e-05, |
|
"loss": 2.1171, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.756242568370987e-05, |
|
"loss": 2.2231, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.74435196195006e-05, |
|
"loss": 2.3679, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.732461355529132e-05, |
|
"loss": 2.3396, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.720570749108205e-05, |
|
"loss": 2.234, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.708680142687277e-05, |
|
"loss": 2.2397, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.69678953626635e-05, |
|
"loss": 2.3825, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.684898929845422e-05, |
|
"loss": 2.1288, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.673008323424495e-05, |
|
"loss": 2.3515, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.661117717003567e-05, |
|
"loss": 2.3081, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.64922711058264e-05, |
|
"loss": 2.2713, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6373365041617124e-05, |
|
"loss": 2.2098, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.625445897740785e-05, |
|
"loss": 2.2789, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6135552913198575e-05, |
|
"loss": 2.158, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6016646848989304e-05, |
|
"loss": 2.1363, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.5897740784780026e-05, |
|
"loss": 2.3815, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.5778834720570755e-05, |
|
"loss": 2.2428, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.565992865636148e-05, |
|
"loss": 2.2803, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.55410225921522e-05, |
|
"loss": 2.2665, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.542211652794293e-05, |
|
"loss": 2.237, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.530321046373365e-05, |
|
"loss": 2.0968, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.518430439952438e-05, |
|
"loss": 2.2658, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.50653983353151e-05, |
|
"loss": 2.1024, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.494649227110583e-05, |
|
"loss": 2.2326, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.482758620689655e-05, |
|
"loss": 2.2192, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.470868014268728e-05, |
|
"loss": 2.207, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.4589774078478e-05, |
|
"loss": 2.253, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.447086801426873e-05, |
|
"loss": 2.2174, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.435196195005945e-05, |
|
"loss": 2.1958, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.423305588585018e-05, |
|
"loss": 2.3602, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.4114149821640904e-05, |
|
"loss": 2.2938, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.399524375743163e-05, |
|
"loss": 2.2012, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.3876337693222355e-05, |
|
"loss": 2.2574, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.375743162901308e-05, |
|
"loss": 2.274, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.3638525564803806e-05, |
|
"loss": 2.1153, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.3519619500594535e-05, |
|
"loss": 2.3574, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.340071343638526e-05, |
|
"loss": 2.2793, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.328180737217598e-05, |
|
"loss": 2.1159, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.316290130796671e-05, |
|
"loss": 2.2014, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.304399524375744e-05, |
|
"loss": 2.2351, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.292508917954816e-05, |
|
"loss": 2.224, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.280618311533888e-05, |
|
"loss": 2.2815, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.268727705112961e-05, |
|
"loss": 2.2269, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.256837098692034e-05, |
|
"loss": 2.2478, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.244946492271106e-05, |
|
"loss": 2.1831, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.233055885850178e-05, |
|
"loss": 2.3645, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.221165279429251e-05, |
|
"loss": 2.1427, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.209274673008324e-05, |
|
"loss": 2.1986, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.1973840665873956e-05, |
|
"loss": 2.2482, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.1854934601664685e-05, |
|
"loss": 2.2452, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.1736028537455414e-05, |
|
"loss": 2.3125, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.161712247324614e-05, |
|
"loss": 2.1476, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.149821640903686e-05, |
|
"loss": 2.464, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.1379310344827587e-05, |
|
"loss": 2.1521, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.1260404280618315e-05, |
|
"loss": 2.2442, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.1141498216409044e-05, |
|
"loss": 2.2195, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.102259215219976e-05, |
|
"loss": 2.2759, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.090368608799049e-05, |
|
"loss": 2.1908, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.078478002378122e-05, |
|
"loss": 2.1681, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.066587395957194e-05, |
|
"loss": 2.3413, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.054696789536266e-05, |
|
"loss": 2.3572, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.042806183115339e-05, |
|
"loss": 2.3184, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.030915576694412e-05, |
|
"loss": 2.2968, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.019024970273484e-05, |
|
"loss": 2.1584, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.007134363852556e-05, |
|
"loss": 2.2421, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.995243757431629e-05, |
|
"loss": 2.2078, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.983353151010702e-05, |
|
"loss": 2.201, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.971462544589774e-05, |
|
"loss": 2.278, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9595719381688465e-05, |
|
"loss": 2.2308, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9476813317479194e-05, |
|
"loss": 2.2405, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.935790725326992e-05, |
|
"loss": 2.2972, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9239001189060645e-05, |
|
"loss": 2.2174, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.912009512485137e-05, |
|
"loss": 2.3242, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9001189060642096e-05, |
|
"loss": 2.2663, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.888228299643282e-05, |
|
"loss": 2.3909, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.876337693222355e-05, |
|
"loss": 2.2299, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.864447086801427e-05, |
|
"loss": 2.1867, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.8525564803805e-05, |
|
"loss": 2.1073, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.840665873959572e-05, |
|
"loss": 2.2189, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.828775267538645e-05, |
|
"loss": 2.2019, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.816884661117717e-05, |
|
"loss": 2.141, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.80499405469679e-05, |
|
"loss": 2.3097, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.793103448275862e-05, |
|
"loss": 2.2185, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.781212841854935e-05, |
|
"loss": 2.141, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.769322235434007e-05, |
|
"loss": 2.2261, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.75743162901308e-05, |
|
"loss": 2.4785, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.7455410225921523e-05, |
|
"loss": 2.2557, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.733650416171225e-05, |
|
"loss": 2.1774, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.7217598097502974e-05, |
|
"loss": 2.1873, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.7098692033293696e-05, |
|
"loss": 2.2902, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.6979785969084425e-05, |
|
"loss": 2.3026, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.6860879904875154e-05, |
|
"loss": 2.1049, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.6741973840665876e-05, |
|
"loss": 2.2915, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.66230677764566e-05, |
|
"loss": 2.3209, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.650416171224733e-05, |
|
"loss": 2.2021, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.638525564803805e-05, |
|
"loss": 2.3287, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.626634958382878e-05, |
|
"loss": 2.2969, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.61474435196195e-05, |
|
"loss": 2.2725, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.602853745541023e-05, |
|
"loss": 2.1367, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.590963139120095e-05, |
|
"loss": 2.2286, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.579072532699168e-05, |
|
"loss": 2.2187, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.56718192627824e-05, |
|
"loss": 2.3044, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.555291319857313e-05, |
|
"loss": 2.1917, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.543400713436385e-05, |
|
"loss": 2.2167, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.5315101070154575e-05, |
|
"loss": 2.216, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.5196195005945304e-05, |
|
"loss": 2.2266, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.507728894173603e-05, |
|
"loss": 2.2856, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.4958382877526755e-05, |
|
"loss": 2.2331, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.483947681331748e-05, |
|
"loss": 2.2298, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.4720570749108206e-05, |
|
"loss": 2.208, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.4601664684898935e-05, |
|
"loss": 2.2366, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 2.2715, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.436385255648038e-05, |
|
"loss": 2.1407, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.424494649227111e-05, |
|
"loss": 2.1614, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.4126040428061836e-05, |
|
"loss": 2.2332, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.400713436385256e-05, |
|
"loss": 2.2479, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.388822829964328e-05, |
|
"loss": 2.1933, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.376932223543401e-05, |
|
"loss": 2.0951, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.365041617122474e-05, |
|
"loss": 2.2382, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.353151010701546e-05, |
|
"loss": 2.0846, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.341260404280618e-05, |
|
"loss": 2.2666, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.329369797859691e-05, |
|
"loss": 2.3088, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.317479191438764e-05, |
|
"loss": 2.2048, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.3055885850178355e-05, |
|
"loss": 2.2291, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.2936979785969084e-05, |
|
"loss": 2.2375, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.281807372175981e-05, |
|
"loss": 2.2414, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.269916765755054e-05, |
|
"loss": 2.222, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.258026159334126e-05, |
|
"loss": 2.1755, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.2461355529131986e-05, |
|
"loss": 2.1439, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.2342449464922715e-05, |
|
"loss": 2.2702, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.2223543400713444e-05, |
|
"loss": 2.1515, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.210463733650416e-05, |
|
"loss": 2.2432, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.198573127229489e-05, |
|
"loss": 2.1748, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.186682520808562e-05, |
|
"loss": 2.1715, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.174791914387634e-05, |
|
"loss": 2.2309, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.162901307966706e-05, |
|
"loss": 2.2443, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.151010701545779e-05, |
|
"loss": 2.2152, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.139120095124852e-05, |
|
"loss": 2.2743, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.127229488703924e-05, |
|
"loss": 2.2062, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.115338882282996e-05, |
|
"loss": 2.0563, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.103448275862069e-05, |
|
"loss": 2.1936, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.091557669441142e-05, |
|
"loss": 2.133, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.079667063020214e-05, |
|
"loss": 2.28, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.0677764565992865e-05, |
|
"loss": 2.18, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.0558858501783593e-05, |
|
"loss": 2.229, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.0439952437574316e-05, |
|
"loss": 2.1952, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.032104637336504e-05, |
|
"loss": 2.2677, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.020214030915577e-05, |
|
"loss": 2.154, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.0083234244946495e-05, |
|
"loss": 2.1495, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.9964328180737217e-05, |
|
"loss": 2.1062, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.9845422116527943e-05, |
|
"loss": 2.2469, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.9726516052318672e-05, |
|
"loss": 2.2564, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.9607609988109397e-05, |
|
"loss": 2.1979, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.948870392390012e-05, |
|
"loss": 2.1785, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.9369797859690845e-05, |
|
"loss": 2.3257, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.9250891795481574e-05, |
|
"loss": 2.2859, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.91319857312723e-05, |
|
"loss": 2.3093, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.901307966706302e-05, |
|
"loss": 2.2966, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.8894173602853747e-05, |
|
"loss": 2.1724, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.8775267538644472e-05, |
|
"loss": 2.2575, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.8656361474435194e-05, |
|
"loss": 2.1236, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.8537455410225923e-05, |
|
"loss": 2.3509, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.841854934601665e-05, |
|
"loss": 2.1918, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.8299643281807374e-05, |
|
"loss": 2.1598, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.8180737217598096e-05, |
|
"loss": 2.2406, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.8061831153388825e-05, |
|
"loss": 2.3547, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.794292508917955e-05, |
|
"loss": 2.3098, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.7824019024970276e-05, |
|
"loss": 2.3284, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.7705112960760998e-05, |
|
"loss": 2.1737, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.7586206896551727e-05, |
|
"loss": 2.2454, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.7467300832342452e-05, |
|
"loss": 2.2807, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.7348394768133178e-05, |
|
"loss": 2.1575, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.72294887039239e-05, |
|
"loss": 2.2564, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.7110582639714625e-05, |
|
"loss": 2.2158, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.6991676575505354e-05, |
|
"loss": 2.3102, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.6872770511296076e-05, |
|
"loss": 2.2931, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.67538644470868e-05, |
|
"loss": 2.2509, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.6634958382877527e-05, |
|
"loss": 2.2295, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.6516052318668256e-05, |
|
"loss": 2.317, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.6397146254458978e-05, |
|
"loss": 2.2505, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.6278240190249703e-05, |
|
"loss": 2.1714, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.615933412604043e-05, |
|
"loss": 2.1926, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.6040428061831158e-05, |
|
"loss": 2.2712, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.592152199762188e-05, |
|
"loss": 2.3375, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.5802615933412605e-05, |
|
"loss": 2.288, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.568370986920333e-05, |
|
"loss": 2.4223, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.556480380499406e-05, |
|
"loss": 2.1204, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.5445897740784778e-05, |
|
"loss": 2.2502, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.5326991676575507e-05, |
|
"loss": 2.2847, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.5208085612366232e-05, |
|
"loss": 2.2578, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.5089179548156955e-05, |
|
"loss": 2.1796, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.497027348394768e-05, |
|
"loss": 2.1652, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.485136741973841e-05, |
|
"loss": 2.2063, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.473246135552913e-05, |
|
"loss": 2.251, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.461355529131986e-05, |
|
"loss": 2.201, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.4494649227110582e-05, |
|
"loss": 2.1851, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.437574316290131e-05, |
|
"loss": 2.1646, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.4256837098692033e-05, |
|
"loss": 2.2301, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.413793103448276e-05, |
|
"loss": 2.353, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.4019024970273484e-05, |
|
"loss": 2.1269, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.3900118906064213e-05, |
|
"loss": 2.3744, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.3781212841854935e-05, |
|
"loss": 2.189, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.366230677764566e-05, |
|
"loss": 2.1565, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.3543400713436386e-05, |
|
"loss": 2.2877, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.342449464922711e-05, |
|
"loss": 2.2354, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.3305588585017836e-05, |
|
"loss": 2.2779, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.3186682520808562e-05, |
|
"loss": 2.1766, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.3067776456599287e-05, |
|
"loss": 2.3143, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.2948870392390013e-05, |
|
"loss": 2.2451, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.282996432818074e-05, |
|
"loss": 2.2755, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.2711058263971464e-05, |
|
"loss": 2.0572, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.259215219976219e-05, |
|
"loss": 2.174, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.2473246135552915e-05, |
|
"loss": 2.2179, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.235434007134364e-05, |
|
"loss": 2.1972, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.2235434007134366e-05, |
|
"loss": 2.3448, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.211652794292509e-05, |
|
"loss": 2.232, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.1997621878715817e-05, |
|
"loss": 2.3028, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.187871581450654e-05, |
|
"loss": 2.3468, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.1759809750297268e-05, |
|
"loss": 2.1703, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.164090368608799e-05, |
|
"loss": 2.2652, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.152199762187872e-05, |
|
"loss": 2.1998, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.140309155766944e-05, |
|
"loss": 2.277, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.128418549346017e-05, |
|
"loss": 2.3778, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.116527942925089e-05, |
|
"loss": 2.2846, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.104637336504162e-05, |
|
"loss": 2.2416, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.0927467300832342e-05, |
|
"loss": 2.2836, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.080856123662307e-05, |
|
"loss": 2.3047, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.0689655172413793e-05, |
|
"loss": 2.161, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.0570749108204522e-05, |
|
"loss": 2.0814, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.0451843043995244e-05, |
|
"loss": 2.1639, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.033293697978597e-05, |
|
"loss": 2.3163, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0214030915576695e-05, |
|
"loss": 2.0997, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.009512485136742e-05, |
|
"loss": 2.1806, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9976218787158146e-05, |
|
"loss": 2.1886, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.985731272294887e-05, |
|
"loss": 2.1856, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9738406658739597e-05, |
|
"loss": 2.2507, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9619500594530322e-05, |
|
"loss": 2.2262, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9500594530321048e-05, |
|
"loss": 2.3656, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9381688466111773e-05, |
|
"loss": 2.1696, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.92627824019025e-05, |
|
"loss": 2.2295, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9143876337693224e-05, |
|
"loss": 2.2313, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.902497027348395e-05, |
|
"loss": 2.3442, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8906064209274675e-05, |
|
"loss": 2.2892, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.87871581450654e-05, |
|
"loss": 2.2087, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8668252080856126e-05, |
|
"loss": 2.2255, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8549346016646848e-05, |
|
"loss": 2.3335, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8430439952437577e-05, |
|
"loss": 2.2124, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.83115338882283e-05, |
|
"loss": 2.2033, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8192627824019025e-05, |
|
"loss": 2.2625, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.807372175980975e-05, |
|
"loss": 2.2056, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7954815695600476e-05, |
|
"loss": 2.4297, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.78359096313912e-05, |
|
"loss": 2.1936, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7717003567181926e-05, |
|
"loss": 2.2837, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7598097502972652e-05, |
|
"loss": 2.1024, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7479191438763377e-05, |
|
"loss": 2.1015, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7360285374554103e-05, |
|
"loss": 2.2641, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7241379310344828e-05, |
|
"loss": 2.2723, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7122473246135554e-05, |
|
"loss": 2.1656, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.700356718192628e-05, |
|
"loss": 2.2522, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6884661117717005e-05, |
|
"loss": 2.188, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.676575505350773e-05, |
|
"loss": 2.1868, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6646848989298456e-05, |
|
"loss": 2.224, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6527942925089178e-05, |
|
"loss": 2.2671, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6409036860879907e-05, |
|
"loss": 2.297, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.629013079667063e-05, |
|
"loss": 2.3026, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6171224732461357e-05, |
|
"loss": 2.2377, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.605231866825208e-05, |
|
"loss": 2.2205, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.593341260404281e-05, |
|
"loss": 2.1496, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.581450653983353e-05, |
|
"loss": 2.2312, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.569560047562426e-05, |
|
"loss": 2.172, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.557669441141498e-05, |
|
"loss": 2.1986, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.545778834720571e-05, |
|
"loss": 2.1183, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.5338882282996432e-05, |
|
"loss": 2.3068, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.5219976218787158e-05, |
|
"loss": 2.3595, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.5101070154577885e-05, |
|
"loss": 2.2684, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.4982164090368609e-05, |
|
"loss": 2.1524, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.4863258026159336e-05, |
|
"loss": 2.2399, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.474435196195006e-05, |
|
"loss": 2.2979, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.4625445897740787e-05, |
|
"loss": 2.1425, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.450653983353151e-05, |
|
"loss": 2.1784, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.4387633769322236e-05, |
|
"loss": 2.2853, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.4268727705112961e-05, |
|
"loss": 2.2289, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.4149821640903687e-05, |
|
"loss": 2.1901, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.4030915576694412e-05, |
|
"loss": 2.2425, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.3912009512485138e-05, |
|
"loss": 2.2349, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.3793103448275863e-05, |
|
"loss": 2.2013, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.3674197384066589e-05, |
|
"loss": 2.2391, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.3555291319857313e-05, |
|
"loss": 2.19, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.3436385255648038e-05, |
|
"loss": 2.1743, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.3317479191438763e-05, |
|
"loss": 2.3051, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.3198573127229489e-05, |
|
"loss": 2.0934, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.3079667063020214e-05, |
|
"loss": 2.1383, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.296076099881094e-05, |
|
"loss": 2.236, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2841854934601665e-05, |
|
"loss": 2.3205, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2722948870392389e-05, |
|
"loss": 2.2399, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2604042806183116e-05, |
|
"loss": 2.198, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.248513674197384e-05, |
|
"loss": 2.2564, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2366230677764565e-05, |
|
"loss": 2.2605, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2247324613555291e-05, |
|
"loss": 2.2386, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2128418549346016e-05, |
|
"loss": 2.0987, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.2009512485136742e-05, |
|
"loss": 2.1949, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1890606420927467e-05, |
|
"loss": 2.1448, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1771700356718193e-05, |
|
"loss": 2.1706, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1652794292508918e-05, |
|
"loss": 2.2455, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1533888228299644e-05, |
|
"loss": 2.2891, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.141498216409037e-05, |
|
"loss": 2.225, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1296076099881095e-05, |
|
"loss": 2.3256, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.117717003567182e-05, |
|
"loss": 2.1633, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1058263971462546e-05, |
|
"loss": 2.2338, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.093935790725327e-05, |
|
"loss": 2.1043, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.0820451843043995e-05, |
|
"loss": 2.2679, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.070154577883472e-05, |
|
"loss": 2.168, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.0582639714625446e-05, |
|
"loss": 2.1355, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.0463733650416171e-05, |
|
"loss": 2.1416, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0344827586206897e-05, |
|
"loss": 2.2013, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0225921521997622e-05, |
|
"loss": 2.2837, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.0107015457788348e-05, |
|
"loss": 2.2035, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.988109393579073e-06, |
|
"loss": 2.2164, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.869203329369798e-06, |
|
"loss": 2.3539, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.750297265160524e-06, |
|
"loss": 2.2583, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.63139120095125e-06, |
|
"loss": 2.4076, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.512485136741975e-06, |
|
"loss": 2.1296, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.3935790725327e-06, |
|
"loss": 2.2343, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.274673008323424e-06, |
|
"loss": 2.2106, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.15576694411415e-06, |
|
"loss": 2.2108, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.036860879904875e-06, |
|
"loss": 2.2726, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.9179548156956e-06, |
|
"loss": 2.233, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.799048751486326e-06, |
|
"loss": 2.156, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.680142687277051e-06, |
|
"loss": 2.1531, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.561236623067777e-06, |
|
"loss": 2.2671, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.442330558858502e-06, |
|
"loss": 2.1105, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 8.323424494649228e-06, |
|
"loss": 2.3624, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 8.204518430439953e-06, |
|
"loss": 2.235, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 8.085612366230679e-06, |
|
"loss": 2.2495, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.966706302021404e-06, |
|
"loss": 2.1934, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.84780023781213e-06, |
|
"loss": 2.1693, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.728894173602855e-06, |
|
"loss": 2.2035, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.609988109393579e-06, |
|
"loss": 2.0866, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.491082045184304e-06, |
|
"loss": 2.3758, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.37217598097503e-06, |
|
"loss": 2.2301, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.253269916765755e-06, |
|
"loss": 2.1721, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.134363852556481e-06, |
|
"loss": 2.232, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.015457788347206e-06, |
|
"loss": 2.0901, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 2.0837, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.777645659928656e-06, |
|
"loss": 2.2091, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.658739595719382e-06, |
|
"loss": 2.2804, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.539833531510107e-06, |
|
"loss": 2.2699, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 6.420927467300833e-06, |
|
"loss": 2.1696, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.302021403091558e-06, |
|
"loss": 2.2217, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.183115338882283e-06, |
|
"loss": 2.3185, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 6.064209274673008e-06, |
|
"loss": 2.2237, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.945303210463734e-06, |
|
"loss": 2.1767, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.826397146254459e-06, |
|
"loss": 2.1886, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.707491082045185e-06, |
|
"loss": 2.2296, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.58858501783591e-06, |
|
"loss": 2.289, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.469678953626635e-06, |
|
"loss": 2.0918, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.35077288941736e-06, |
|
"loss": 2.2153, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.231866825208086e-06, |
|
"loss": 2.1989, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.112960760998811e-06, |
|
"loss": 2.221, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.9940546967895365e-06, |
|
"loss": 2.2363, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.875148632580262e-06, |
|
"loss": 2.2369, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.7562425683709874e-06, |
|
"loss": 2.2818, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.637336504161712e-06, |
|
"loss": 2.1997, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.5184304399524375e-06, |
|
"loss": 2.1886, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.399524375743163e-06, |
|
"loss": 2.1808, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.2806183115338884e-06, |
|
"loss": 2.2151, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.161712247324614e-06, |
|
"loss": 2.36, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.042806183115339e-06, |
|
"loss": 2.188, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.923900118906065e-06, |
|
"loss": 2.2938, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.8049940546967894e-06, |
|
"loss": 2.1663, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.686087990487515e-06, |
|
"loss": 2.2305, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.5671819262782404e-06, |
|
"loss": 2.183, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.448275862068966e-06, |
|
"loss": 2.2173, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.329369797859691e-06, |
|
"loss": 2.2282, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.2104637336504163e-06, |
|
"loss": 2.2354, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.0915576694411414e-06, |
|
"loss": 2.3018, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.972651605231867e-06, |
|
"loss": 2.325, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.8537455410225923e-06, |
|
"loss": 2.3601, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.7348394768133173e-06, |
|
"loss": 2.1459, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.615933412604043e-06, |
|
"loss": 2.0587, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.4970273483947683e-06, |
|
"loss": 2.4179, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.3781212841854937e-06, |
|
"loss": 2.2687, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.2592152199762188e-06, |
|
"loss": 2.1549, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.1403091557669442e-06, |
|
"loss": 2.1375, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.0214030915576697e-06, |
|
"loss": 2.2797, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9024970273483947e-06, |
|
"loss": 2.1761, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7835909631391202e-06, |
|
"loss": 2.1904, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6646848989298454e-06, |
|
"loss": 2.2223, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5457788347205707e-06, |
|
"loss": 2.3168, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4268727705112961e-06, |
|
"loss": 2.3925, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.3079667063020214e-06, |
|
"loss": 2.1307, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.1890606420927469e-06, |
|
"loss": 2.385, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0701545778834721e-06, |
|
"loss": 2.196, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.512485136741974e-07, |
|
"loss": 2.2124, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.323424494649227e-07, |
|
"loss": 2.2072, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.134363852556481e-07, |
|
"loss": 2.3335, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.945303210463734e-07, |
|
"loss": 2.28, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.756242568370987e-07, |
|
"loss": 2.2315, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.5671819262782404e-07, |
|
"loss": 2.2179, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.3781212841854934e-07, |
|
"loss": 2.1931, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.1890606420927467e-07, |
|
"loss": 2.199, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.2945, |
|
"step": 841 |
|
} |
|
], |
|
"max_steps": 841, |
|
"num_train_epochs": 1, |
|
"total_flos": 9.501901140996915e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|