|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9978570900761102, |
|
"eval_steps": 500, |
|
"global_step": 422, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.921568627450981e-07, |
|
"loss": 2.8563, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 7.843137254901962e-07, |
|
"loss": 3.0208, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 4.0651, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.5686274509803923e-06, |
|
"loss": 3.3154, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.96078431372549e-06, |
|
"loss": 3.9875, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.3529411764705885e-06, |
|
"loss": 9.4198, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.7450980392156867e-06, |
|
"loss": 3.4956, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.1372549019607846e-06, |
|
"loss": 3.646, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.529411764705883e-06, |
|
"loss": 2.9569, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.92156862745098e-06, |
|
"loss": 3.2712, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.313725490196079e-06, |
|
"loss": 4.4552, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.705882352941177e-06, |
|
"loss": 3.2141, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.098039215686274e-06, |
|
"loss": 2.8645, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.4901960784313735e-06, |
|
"loss": 2.9299, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 3.2333, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.274509803921569e-06, |
|
"loss": 3.3361, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.8326, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.058823529411766e-06, |
|
"loss": 5.9936, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.450980392156863e-06, |
|
"loss": 2.7551, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.84313725490196e-06, |
|
"loss": 2.3296, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.23529411764706e-06, |
|
"loss": 2.1901, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.627450980392157e-06, |
|
"loss": 1.7673, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.019607843137256e-06, |
|
"loss": 1.6721, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 1.7151, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.803921568627451e-06, |
|
"loss": 1.7576, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0196078431372549e-05, |
|
"loss": 1.4461, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0588235294117648e-05, |
|
"loss": 3.8357, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.0980392156862747e-05, |
|
"loss": 1.5292, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1372549019607844e-05, |
|
"loss": 1.484, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 1.2995, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.215686274509804e-05, |
|
"loss": 1.1459, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2549019607843138e-05, |
|
"loss": 1.1017, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2941176470588238e-05, |
|
"loss": 1.2681, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.7749, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3725490196078432e-05, |
|
"loss": 1.4039, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4117647058823532e-05, |
|
"loss": 1.304, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4509803921568629e-05, |
|
"loss": 1.1641, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4901960784313726e-05, |
|
"loss": 1.0715, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5294117647058822e-05, |
|
"loss": 1.0567, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.568627450980392e-05, |
|
"loss": 1.3111, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.607843137254902e-05, |
|
"loss": 1.4909, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.647058823529412e-05, |
|
"loss": 0.961, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.686274509803922e-05, |
|
"loss": 1.1519, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.7254901960784314e-05, |
|
"loss": 0.9403, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 1.1603, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8039215686274513e-05, |
|
"loss": 0.9909, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.843137254901961e-05, |
|
"loss": 1.0872, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8823529411764708e-05, |
|
"loss": 1.0935, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9215686274509807e-05, |
|
"loss": 1.2776, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9607843137254903e-05, |
|
"loss": 1.0812, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0436, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9987782529016497e-05, |
|
"loss": 1.275, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.997556505803299e-05, |
|
"loss": 1.1309, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9963347587049484e-05, |
|
"loss": 1.0838, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9951130116065975e-05, |
|
"loss": 1.0861, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.993891264508247e-05, |
|
"loss": 0.9737, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9926695174098962e-05, |
|
"loss": 0.9726, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9914477703115457e-05, |
|
"loss": 1.1044, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9902260232131952e-05, |
|
"loss": 1.2198, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9890042761148444e-05, |
|
"loss": 1.2454, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.987782529016494e-05, |
|
"loss": 1.0945, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.986560781918143e-05, |
|
"loss": 1.2243, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9853390348197926e-05, |
|
"loss": 1.0895, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9841172877214418e-05, |
|
"loss": 1.3153, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9828955406230913e-05, |
|
"loss": 1.3891, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9816737935247404e-05, |
|
"loss": 1.1219, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.98045204642639e-05, |
|
"loss": 1.0493, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.979230299328039e-05, |
|
"loss": 1.0645, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9780085522296886e-05, |
|
"loss": 1.3425, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9767868051313378e-05, |
|
"loss": 1.8974, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9755650580329873e-05, |
|
"loss": 1.0191, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9743433109346365e-05, |
|
"loss": 1.1784, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.973121563836286e-05, |
|
"loss": 1.0353, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9718998167379355e-05, |
|
"loss": 1.2755, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.970678069639585e-05, |
|
"loss": 0.9851, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9694563225412342e-05, |
|
"loss": 1.2396, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9682345754428837e-05, |
|
"loss": 1.2519, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.967012828344533e-05, |
|
"loss": 0.9313, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9657910812461824e-05, |
|
"loss": 1.1745, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9645693341478315e-05, |
|
"loss": 1.0521, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.963347587049481e-05, |
|
"loss": 0.9781, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9621258399511302e-05, |
|
"loss": 1.0292, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9609040928527797e-05, |
|
"loss": 0.851, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.959682345754429e-05, |
|
"loss": 1.1486, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9584605986560784e-05, |
|
"loss": 1.0732, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9572388515577276e-05, |
|
"loss": 1.0466, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.956017104459377e-05, |
|
"loss": 0.9275, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9547953573610263e-05, |
|
"loss": 1.7509, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9535736102626758e-05, |
|
"loss": 1.3038, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9523518631643253e-05, |
|
"loss": 0.8664, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9511301160659744e-05, |
|
"loss": 1.1555, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.949908368967624e-05, |
|
"loss": 1.1517, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.948686621869273e-05, |
|
"loss": 1.0223, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9474648747709226e-05, |
|
"loss": 1.0649, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9462431276725718e-05, |
|
"loss": 1.0147, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9450213805742213e-05, |
|
"loss": 1.3624, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9437996334758705e-05, |
|
"loss": 1.1294, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.94257788637752e-05, |
|
"loss": 1.1903, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.941356139279169e-05, |
|
"loss": 0.8732, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9401343921808187e-05, |
|
"loss": 1.1793, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.938912645082468e-05, |
|
"loss": 0.9008, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9376908979841174e-05, |
|
"loss": 1.2374, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.936469150885767e-05, |
|
"loss": 0.9163, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9352474037874164e-05, |
|
"loss": 0.9702, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9340256566890655e-05, |
|
"loss": 1.0813, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.932803909590715e-05, |
|
"loss": 1.1164, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9315821624923642e-05, |
|
"loss": 0.9403, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9303604153940137e-05, |
|
"loss": 1.074, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.929138668295663e-05, |
|
"loss": 1.1077, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9279169211973124e-05, |
|
"loss": 1.0153, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9266951740989616e-05, |
|
"loss": 1.0281, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.925473427000611e-05, |
|
"loss": 0.9075, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9242516799022603e-05, |
|
"loss": 1.2191, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9230299328039098e-05, |
|
"loss": 1.0649, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.921808185705559e-05, |
|
"loss": 0.9901, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9205864386072085e-05, |
|
"loss": 1.0329, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9193646915088576e-05, |
|
"loss": 1.4406, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.918142944410507e-05, |
|
"loss": 1.138, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9169211973121567e-05, |
|
"loss": 1.1755, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9156994502138058e-05, |
|
"loss": 0.9568, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9144777031154553e-05, |
|
"loss": 1.0204, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.913255956017105e-05, |
|
"loss": 0.9391, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.912034208918754e-05, |
|
"loss": 1.109, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9108124618204035e-05, |
|
"loss": 1.2183, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9095907147220527e-05, |
|
"loss": 0.9377, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9083689676237022e-05, |
|
"loss": 1.0865, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9071472205253514e-05, |
|
"loss": 1.2078, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.905925473427001e-05, |
|
"loss": 1.0149, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.90470372632865e-05, |
|
"loss": 1.1191, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9034819792302996e-05, |
|
"loss": 1.0933, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9022602321319487e-05, |
|
"loss": 0.9312, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9010384850335982e-05, |
|
"loss": 0.794, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8998167379352474e-05, |
|
"loss": 0.8745, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.898594990836897e-05, |
|
"loss": 0.9092, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8973732437385464e-05, |
|
"loss": 0.7504, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8961514966401956e-05, |
|
"loss": 1.2165, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.894929749541845e-05, |
|
"loss": 0.9727, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8937080024434943e-05, |
|
"loss": 1.0236, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8924862553451438e-05, |
|
"loss": 0.9628, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.891264508246793e-05, |
|
"loss": 0.8762, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8900427611484425e-05, |
|
"loss": 1.0712, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8888210140500916e-05, |
|
"loss": 0.7999, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.887599266951741e-05, |
|
"loss": 0.9812, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8863775198533903e-05, |
|
"loss": 1.8394, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.88515577275504e-05, |
|
"loss": 0.9033, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.883934025656689e-05, |
|
"loss": 0.9813, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8827122785583385e-05, |
|
"loss": 0.7722, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8814905314599877e-05, |
|
"loss": 0.9991, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8802687843616375e-05, |
|
"loss": 0.9531, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8790470372632867e-05, |
|
"loss": 0.9034, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8778252901649362e-05, |
|
"loss": 0.8741, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8766035430665854e-05, |
|
"loss": 1.1228, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.875381795968235e-05, |
|
"loss": 0.7849, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.874160048869884e-05, |
|
"loss": 0.9642, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8729383017715336e-05, |
|
"loss": 1.0823, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8717165546731827e-05, |
|
"loss": 1.2156, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8704948075748323e-05, |
|
"loss": 1.4682, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8692730604764814e-05, |
|
"loss": 1.2043, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.868051313378131e-05, |
|
"loss": 0.9802, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.86682956627978e-05, |
|
"loss": 1.1539, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8656078191814296e-05, |
|
"loss": 1.1245, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8643860720830788e-05, |
|
"loss": 1.099, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8631643249847283e-05, |
|
"loss": 1.0272, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8619425778863778e-05, |
|
"loss": 1.0072, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.860720830788027e-05, |
|
"loss": 1.0215, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8594990836896765e-05, |
|
"loss": 1.0221, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8582773365913257e-05, |
|
"loss": 1.0754, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.857055589492975e-05, |
|
"loss": 0.9724, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8558338423946243e-05, |
|
"loss": 1.031, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.854612095296274e-05, |
|
"loss": 1.157, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.853390348197923e-05, |
|
"loss": 0.9819, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8521686010995725e-05, |
|
"loss": 1.1156, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.850946854001222e-05, |
|
"loss": 1.0699, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8497251069028712e-05, |
|
"loss": 1.1092, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8485033598045207e-05, |
|
"loss": 1.0092, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.84728161270617e-05, |
|
"loss": 1.0403, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8460598656078194e-05, |
|
"loss": 0.8809, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.8448381185094686e-05, |
|
"loss": 1.0139, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.843616371411118e-05, |
|
"loss": 1.0701, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8423946243127676e-05, |
|
"loss": 0.9199, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8411728772144168e-05, |
|
"loss": 0.8542, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8399511301160663e-05, |
|
"loss": 1.0369, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.8387293830177154e-05, |
|
"loss": 0.9847, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.837507635919365e-05, |
|
"loss": 0.8046, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.836285888821014e-05, |
|
"loss": 1.2821, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8350641417226636e-05, |
|
"loss": 1.0127, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8338423946243128e-05, |
|
"loss": 0.8535, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.8326206475259623e-05, |
|
"loss": 0.984, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8313989004276115e-05, |
|
"loss": 0.771, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.830177153329261e-05, |
|
"loss": 2.1482, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.82895540623091e-05, |
|
"loss": 0.7799, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8277336591325597e-05, |
|
"loss": 1.1474, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.826511912034209e-05, |
|
"loss": 1.0967, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.8252901649358587e-05, |
|
"loss": 0.8824, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.824068417837508e-05, |
|
"loss": 0.9202, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.8228466707391574e-05, |
|
"loss": 0.8055, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.8216249236408065e-05, |
|
"loss": 0.7309, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.820403176542456e-05, |
|
"loss": 1.1121, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.8191814294441052e-05, |
|
"loss": 1.5694, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.8179596823457547e-05, |
|
"loss": 1.2677, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.816737935247404e-05, |
|
"loss": 1.1242, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8155161881490534e-05, |
|
"loss": 0.8419, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8142944410507026e-05, |
|
"loss": 1.1399, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.813072693952352e-05, |
|
"loss": 0.7964, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8118509468540013e-05, |
|
"loss": 0.9321, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.8106291997556508e-05, |
|
"loss": 0.8461, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.8094074526573e-05, |
|
"loss": 1.0457, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.8081857055589494e-05, |
|
"loss": 1.085, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.806963958460599e-05, |
|
"loss": 0.9105, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.805742211362248e-05, |
|
"loss": 0.9143, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.8045204642638976e-05, |
|
"loss": 1.0978, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.8032987171655468e-05, |
|
"loss": 0.9884, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.8020769700671963e-05, |
|
"loss": 1.0179, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8008552229688455e-05, |
|
"loss": 1.2135, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.799633475870495e-05, |
|
"loss": 0.9051, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7984117287721442e-05, |
|
"loss": 0.8954, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7971899816737937e-05, |
|
"loss": 1.1429, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.795968234575443e-05, |
|
"loss": 1.0803, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7947464874770924e-05, |
|
"loss": 1.0558, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7935247403787415e-05, |
|
"loss": 1.0904, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.792302993280391e-05, |
|
"loss": 1.0909, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.7910812461820402e-05, |
|
"loss": 1.1515, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7898594990836897e-05, |
|
"loss": 1.0393, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7886377519853392e-05, |
|
"loss": 1.0072, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7874160048869887e-05, |
|
"loss": 0.8387, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.786194257788638e-05, |
|
"loss": 0.8908, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7849725106902874e-05, |
|
"loss": 0.875, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7837507635919366e-05, |
|
"loss": 1.0477, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.782529016493586e-05, |
|
"loss": 1.0117, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7813072693952353e-05, |
|
"loss": 1.1684, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7800855222968848e-05, |
|
"loss": 1.1475, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.778863775198534e-05, |
|
"loss": 1.2069, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7776420281001835e-05, |
|
"loss": 1.1107, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7764202810018326e-05, |
|
"loss": 0.9738, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.775198533903482e-05, |
|
"loss": 0.8838, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.7739767868051313e-05, |
|
"loss": 1.0103, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.7727550397067808e-05, |
|
"loss": 0.9279, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.77153329260843e-05, |
|
"loss": 0.9682, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7703115455100795e-05, |
|
"loss": 1.2049, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.769089798411729e-05, |
|
"loss": 1.0387, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7678680513133785e-05, |
|
"loss": 0.9754, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7666463042150277e-05, |
|
"loss": 1.0202, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7654245571166772e-05, |
|
"loss": 1.0224, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7642028100183264e-05, |
|
"loss": 0.8577, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.762981062919976e-05, |
|
"loss": 0.9112, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.761759315821625e-05, |
|
"loss": 0.9395, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7605375687232746e-05, |
|
"loss": 1.1198, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7593158216249237e-05, |
|
"loss": 1.4934, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7580940745265732e-05, |
|
"loss": 1.0408, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7568723274282224e-05, |
|
"loss": 0.9475, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.755650580329872e-05, |
|
"loss": 1.1368, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.754428833231521e-05, |
|
"loss": 0.9984, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7532070861331706e-05, |
|
"loss": 1.1552, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.75198533903482e-05, |
|
"loss": 1.0785, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7507635919364693e-05, |
|
"loss": 0.8089, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.7495418448381188e-05, |
|
"loss": 1.0439, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.748320097739768e-05, |
|
"loss": 0.7603, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.7470983506414175e-05, |
|
"loss": 0.8235, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.7458766035430666e-05, |
|
"loss": 1.7352, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.744654856444716e-05, |
|
"loss": 0.9731, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.7434331093463653e-05, |
|
"loss": 0.8848, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.742211362248015e-05, |
|
"loss": 0.8874, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.740989615149664e-05, |
|
"loss": 1.0125, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.7397678680513135e-05, |
|
"loss": 1.0552, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7385461209529627e-05, |
|
"loss": 0.9488, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7373243738546122e-05, |
|
"loss": 0.794, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7361026267562614e-05, |
|
"loss": 1.0277, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.734880879657911e-05, |
|
"loss": 0.835, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7336591325595604e-05, |
|
"loss": 1.2321, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.73243738546121e-05, |
|
"loss": 0.9804, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.731215638362859e-05, |
|
"loss": 1.1228, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7299938912645086e-05, |
|
"loss": 1.1392, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7287721441661577e-05, |
|
"loss": 0.8711, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7275503970678073e-05, |
|
"loss": 0.9201, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7263286499694564e-05, |
|
"loss": 1.008, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.725106902871106e-05, |
|
"loss": 1.0344, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.723885155772755e-05, |
|
"loss": 0.9427, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7226634086744046e-05, |
|
"loss": 0.8963, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7214416615760538e-05, |
|
"loss": 1.3487, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7202199144777033e-05, |
|
"loss": 1.0002, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7189981673793525e-05, |
|
"loss": 0.961, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.717776420281002e-05, |
|
"loss": 0.9015, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.716554673182651e-05, |
|
"loss": 0.9172, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7153329260843007e-05, |
|
"loss": 0.9849, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.71411117898595e-05, |
|
"loss": 0.9166, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.7128894318875993e-05, |
|
"loss": 1.0685, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.711667684789249e-05, |
|
"loss": 1.0196, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.710445937690898e-05, |
|
"loss": 0.8409, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.7092241905925475e-05, |
|
"loss": 0.9327, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7080024434941967e-05, |
|
"loss": 0.8675, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7067806963958462e-05, |
|
"loss": 1.1109, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7055589492974954e-05, |
|
"loss": 0.8427, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.704337202199145e-05, |
|
"loss": 1.1322, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7031154551007944e-05, |
|
"loss": 0.9094, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7018937080024436e-05, |
|
"loss": 1.1168, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.700671960904093e-05, |
|
"loss": 0.9982, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.6994502138057422e-05, |
|
"loss": 0.7812, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.6982284667073918e-05, |
|
"loss": 0.8858, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.6970067196090413e-05, |
|
"loss": 0.9693, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.6957849725106904e-05, |
|
"loss": 0.9097, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.69456322541234e-05, |
|
"loss": 0.9178, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.693341478313989e-05, |
|
"loss": 1.0581, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6921197312156386e-05, |
|
"loss": 1.1521, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6908979841172878e-05, |
|
"loss": 1.065, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6896762370189373e-05, |
|
"loss": 0.8954, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6884544899205865e-05, |
|
"loss": 0.7037, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.687232742822236e-05, |
|
"loss": 0.9347, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.686010995723885e-05, |
|
"loss": 0.9681, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.6847892486255347e-05, |
|
"loss": 0.8554, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.683567501527184e-05, |
|
"loss": 0.8793, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6823457544288334e-05, |
|
"loss": 0.8881, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6811240073304825e-05, |
|
"loss": 0.9284, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.679902260232132e-05, |
|
"loss": 0.9632, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6786805131337815e-05, |
|
"loss": 1.0815, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.677458766035431e-05, |
|
"loss": 0.9314, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6762370189370802e-05, |
|
"loss": 0.7965, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6750152718387297e-05, |
|
"loss": 0.8365, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.673793524740379e-05, |
|
"loss": 1.2019, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6725717776420284e-05, |
|
"loss": 0.9525, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6713500305436776e-05, |
|
"loss": 0.7911, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.670128283445327e-05, |
|
"loss": 0.9614, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6689065363469763e-05, |
|
"loss": 1.0212, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6676847892486258e-05, |
|
"loss": 1.128, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.666463042150275e-05, |
|
"loss": 0.9668, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6652412950519245e-05, |
|
"loss": 0.9095, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6640195479535736e-05, |
|
"loss": 0.8985, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.662797800855223e-05, |
|
"loss": 0.8876, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6615760537568723e-05, |
|
"loss": 0.9104, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6603543066585218e-05, |
|
"loss": 0.9719, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6591325595601713e-05, |
|
"loss": 1.2485, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6579108124618205e-05, |
|
"loss": 0.8247, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.65668906536347e-05, |
|
"loss": 1.1209, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.6554673182651192e-05, |
|
"loss": 1.0325, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.6542455711667687e-05, |
|
"loss": 1.0139, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.653023824068418e-05, |
|
"loss": 1.0233, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.6518020769700674e-05, |
|
"loss": 0.8545, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.6505803298717165e-05, |
|
"loss": 1.2186, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.649358582773366e-05, |
|
"loss": 0.9952, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.6481368356750152e-05, |
|
"loss": 1.2843, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.6469150885766647e-05, |
|
"loss": 1.2914, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.645693341478314e-05, |
|
"loss": 0.7621, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.6444715943799634e-05, |
|
"loss": 1.0924, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.6432498472816126e-05, |
|
"loss": 1.1418, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.6420281001832624e-05, |
|
"loss": 0.9116, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6408063530849116e-05, |
|
"loss": 1.7392, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.639584605986561e-05, |
|
"loss": 0.7273, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6383628588882103e-05, |
|
"loss": 0.9469, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6371411117898598e-05, |
|
"loss": 0.7986, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.635919364691509e-05, |
|
"loss": 0.8359, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6346976175931585e-05, |
|
"loss": 0.7852, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6334758704948076e-05, |
|
"loss": 1.1206, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.632254123396457e-05, |
|
"loss": 0.9291, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6310323762981063e-05, |
|
"loss": 1.0176, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6298106291997558e-05, |
|
"loss": 1.018, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.628588882101405e-05, |
|
"loss": 0.7873, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6273671350030545e-05, |
|
"loss": 0.9801, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6261453879047037e-05, |
|
"loss": 0.752, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6249236408063532e-05, |
|
"loss": 0.8991, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6237018937080027e-05, |
|
"loss": 0.8771, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.622480146609652e-05, |
|
"loss": 0.8734, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6212583995113014e-05, |
|
"loss": 0.9302, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.620036652412951e-05, |
|
"loss": 0.9362, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6188149053146e-05, |
|
"loss": 0.9994, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6175931582162496e-05, |
|
"loss": 0.7499, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6163714111178987e-05, |
|
"loss": 1.7184, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6151496640195482e-05, |
|
"loss": 1.0205, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6139279169211974e-05, |
|
"loss": 0.8763, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.612706169822847e-05, |
|
"loss": 1.0264, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.611484422724496e-05, |
|
"loss": 0.9088, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6102626756261456e-05, |
|
"loss": 0.7762, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6090409285277948e-05, |
|
"loss": 0.876, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6078191814294443e-05, |
|
"loss": 1.0591, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6065974343310935e-05, |
|
"loss": 0.8602, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.605375687232743e-05, |
|
"loss": 0.8489, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6041539401343925e-05, |
|
"loss": 0.9205, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6029321930360416e-05, |
|
"loss": 0.926, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.601710445937691e-05, |
|
"loss": 0.8983, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6004886988393403e-05, |
|
"loss": 1.0779, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.59926695174099e-05, |
|
"loss": 0.9446, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.598045204642639e-05, |
|
"loss": 1.1564, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.5968234575442885e-05, |
|
"loss": 1.0101, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.5956017104459377e-05, |
|
"loss": 1.0597, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5943799633475872e-05, |
|
"loss": 0.9408, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5931582162492364e-05, |
|
"loss": 1.1833, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.591936469150886e-05, |
|
"loss": 1.0106, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.590714722052535e-05, |
|
"loss": 1.6698, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5894929749541846e-05, |
|
"loss": 1.0123, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5882712278558337e-05, |
|
"loss": 0.9237, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5870494807574836e-05, |
|
"loss": 0.9182, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5858277336591327e-05, |
|
"loss": 0.9832, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5846059865607823e-05, |
|
"loss": 0.7884, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5833842394624314e-05, |
|
"loss": 1.0028, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.582162492364081e-05, |
|
"loss": 1.0835, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.58094074526573e-05, |
|
"loss": 0.9394, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5797189981673796e-05, |
|
"loss": 1.25, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5784972510690288e-05, |
|
"loss": 1.0046, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5772755039706783e-05, |
|
"loss": 0.6862, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5760537568723275e-05, |
|
"loss": 0.9672, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.574832009773977e-05, |
|
"loss": 0.847, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.573610262675626e-05, |
|
"loss": 1.1316, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5723885155772757e-05, |
|
"loss": 1.1577, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5711667684789248e-05, |
|
"loss": 0.7909, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5699450213805743e-05, |
|
"loss": 0.9476, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.568723274282224e-05, |
|
"loss": 0.8651, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.567501527183873e-05, |
|
"loss": 0.8349, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5662797800855225e-05, |
|
"loss": 1.0023, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5650580329871717e-05, |
|
"loss": 0.9267, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5638362858888212e-05, |
|
"loss": 0.7568, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5626145387904704e-05, |
|
"loss": 0.9849, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.56139279169212e-05, |
|
"loss": 0.8452, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.560171044593769e-05, |
|
"loss": 0.9507, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5589492974954186e-05, |
|
"loss": 0.9459, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.557727550397068e-05, |
|
"loss": 0.9053, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5565058032987173e-05, |
|
"loss": 1.0029, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5552840562003668e-05, |
|
"loss": 0.7987, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.554062309102016e-05, |
|
"loss": 0.7423, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5528405620036654e-05, |
|
"loss": 1.0905, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5516188149053146e-05, |
|
"loss": 0.829, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.550397067806964e-05, |
|
"loss": 0.698, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5491753207086136e-05, |
|
"loss": 0.8893, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5479535736102628e-05, |
|
"loss": 0.992, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5467318265119123e-05, |
|
"loss": 1.0658, |
|
"step": 422 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1688, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"total_flos": 2.030735869261906e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|