|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 705, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 1.2041, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 1.3097, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.2029, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.1238, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.3468, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.0357, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.1403, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.3239, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 1.0573, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.268, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2789, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 1.152, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 1.2186, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 1.1533, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 1.0094, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 0.9916, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 1.0282, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 1.0948, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 1.304, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 1.1626, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 1.3438, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0418, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999989421415082e-05, |
|
"loss": 1.2165, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999576858841395e-05, |
|
"loss": 1.2404, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9999047940786076e-05, |
|
"loss": 1.151, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9998307471175264e-05, |
|
"loss": 0.946, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9997355465675207e-05, |
|
"loss": 1.1605, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999619194442764e-05, |
|
"loss": 1.0594, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9994816932049384e-05, |
|
"loss": 1.1498, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9993230457631812e-05, |
|
"loss": 1.1866, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9991432554740228e-05, |
|
"loss": 1.0144, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998942326141317e-05, |
|
"loss": 1.0463, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.99872026201616e-05, |
|
"loss": 1.1274, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9984770677968e-05, |
|
"loss": 0.9874, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9982127486285386e-05, |
|
"loss": 1.2072, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997927310103621e-05, |
|
"loss": 1.0499, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997620758261119e-05, |
|
"loss": 1.0916, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9972930995868015e-05, |
|
"loss": 1.1274, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996944341012999e-05, |
|
"loss": 1.09, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996574489918456e-05, |
|
"loss": 1.0397, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9961835541281746e-05, |
|
"loss": 1.076, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99577154191325e-05, |
|
"loss": 1.256, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9953384619906945e-05, |
|
"loss": 1.0442, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9948843235232534e-05, |
|
"loss": 1.0692, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.994409136119212e-05, |
|
"loss": 0.9714, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9939129098321904e-05, |
|
"loss": 1.1995, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993395655160932e-05, |
|
"loss": 1.0976, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9928573830490828e-05, |
|
"loss": 1.0683, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9922981048849563e-05, |
|
"loss": 1.042, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9917178325012962e-05, |
|
"loss": 0.9205, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9911165781750235e-05, |
|
"loss": 1.0892, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9904943546269787e-05, |
|
"loss": 1.1622, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9898511750216505e-05, |
|
"loss": 1.0148, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.989187052966899e-05, |
|
"loss": 1.0344, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9885020025136677e-05, |
|
"loss": 1.0166, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9877960381556852e-05, |
|
"loss": 1.0739, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9870691748291594e-05, |
|
"loss": 1.0504, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.986321427912461e-05, |
|
"loss": 1.0437, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9855528132257984e-05, |
|
"loss": 1.0806, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9847633470308833e-05, |
|
"loss": 1.0201, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9839530460305863e-05, |
|
"loss": 1.1955, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9831219273685828e-05, |
|
"loss": 1.1367, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9822700086289915e-05, |
|
"loss": 1.2063, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9813973078360024e-05, |
|
"loss": 1.1051, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.980503843453494e-05, |
|
"loss": 1.0723, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.979589634384644e-05, |
|
"loss": 0.9828, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9786546999715285e-05, |
|
"loss": 0.8472, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9776990599947148e-05, |
|
"loss": 1.0304, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9767227346728393e-05, |
|
"loss": 1.0447, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.975725744662183e-05, |
|
"loss": 1.1636, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9747081110562324e-05, |
|
"loss": 1.0379, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.973669855385235e-05, |
|
"loss": 1.1352, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9726109996157423e-05, |
|
"loss": 1.0338, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9715315661501454e-05, |
|
"loss": 1.0593, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.970431577826202e-05, |
|
"loss": 1.2188, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9693110579165514e-05, |
|
"loss": 1.1663, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9681700301282235e-05, |
|
"loss": 1.0257, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9670085186021377e-05, |
|
"loss": 1.1147, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.96582654791259e-05, |
|
"loss": 1.0613, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9646241430667353e-05, |
|
"loss": 1.0762, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.963401329504057e-05, |
|
"loss": 1.122, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9621581330958293e-05, |
|
"loss": 0.9268, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.96089458014457e-05, |
|
"loss": 0.9894, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9596106973834833e-05, |
|
"loss": 1.0181, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.958306511975895e-05, |
|
"loss": 0.9888, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9569820515146768e-05, |
|
"loss": 0.9074, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.955637344021664e-05, |
|
"loss": 0.8809, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9542724179470616e-05, |
|
"loss": 1.009, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9528873021688422e-05, |
|
"loss": 1.2569, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9514820259921353e-05, |
|
"loss": 1.2034, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9500566191486077e-05, |
|
"loss": 1.1256, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9486111117958343e-05, |
|
"loss": 1.075, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9471455345166595e-05, |
|
"loss": 0.8509, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9456599183185508e-05, |
|
"loss": 1.1977, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9441542946329422e-05, |
|
"loss": 1.1116, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9426286953145707e-05, |
|
"loss": 1.0384, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9410831526407984e-05, |
|
"loss": 1.1884, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9395176993109357e-05, |
|
"loss": 0.9846, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.937932368445544e-05, |
|
"loss": 1.008, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9363271935857374e-05, |
|
"loss": 0.9739, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9347022086924733e-05, |
|
"loss": 0.9542, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.933057448145833e-05, |
|
"loss": 1.0802, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9313929467442953e-05, |
|
"loss": 1.0577, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9297087397039985e-05, |
|
"loss": 0.9401, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9280048626579964e-05, |
|
"loss": 1.2293, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.926281351655506e-05, |
|
"loss": 1.0088, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.924538243161142e-05, |
|
"loss": 1.1677, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.922775574054147e-05, |
|
"loss": 1.1176, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9209933816276104e-05, |
|
"loss": 0.9818, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.91919170358768e-05, |
|
"loss": 1.0263, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9173705780527643e-05, |
|
"loss": 1.1362, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9155300435527255e-05, |
|
"loss": 1.0157, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9136701390280644e-05, |
|
"loss": 1.0391, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9117909038290975e-05, |
|
"loss": 1.0368, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9098923777151222e-05, |
|
"loss": 1.1465, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9079746008535784e-05, |
|
"loss": 1.0191, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.906037613819197e-05, |
|
"loss": 0.9189, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9040814575931413e-05, |
|
"loss": 0.947, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9021061735621413e-05, |
|
"loss": 1.1261, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9001118035176163e-05, |
|
"loss": 0.8959, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8980983896547922e-05, |
|
"loss": 1.0372, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.896065974571808e-05, |
|
"loss": 0.982, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8940146012688148e-05, |
|
"loss": 1.0937, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.891944313147066e-05, |
|
"loss": 1.0602, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.889855154007999e-05, |
|
"loss": 0.9134, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8877471680523083e-05, |
|
"loss": 0.9775, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8856203998790113e-05, |
|
"loss": 1.1114, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8834748944845028e-05, |
|
"loss": 0.9003, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8813106972616055e-05, |
|
"loss": 0.9752, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.879127853998607e-05, |
|
"loss": 1.0671, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8769264108782935e-05, |
|
"loss": 0.9768, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8747064144769705e-05, |
|
"loss": 1.0939, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.872467911763479e-05, |
|
"loss": 0.864, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.870210950098201e-05, |
|
"loss": 0.9725, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8679355772320585e-05, |
|
"loss": 0.9786, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8656418413055006e-05, |
|
"loss": 1.0435, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8633297908474883e-05, |
|
"loss": 1.0748, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8609994747744663e-05, |
|
"loss": 0.9074, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8586509423893268e-05, |
|
"loss": 1.079, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8562842433803688e-05, |
|
"loss": 0.9117, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.853899427820245e-05, |
|
"loss": 1.1194, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8514965461649032e-05, |
|
"loss": 1.0018, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8490756492525185e-05, |
|
"loss": 0.9685, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8466367883024187e-05, |
|
"loss": 1.0195, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8441800149139988e-05, |
|
"loss": 0.9216, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8417053810656302e-05, |
|
"loss": 1.0363, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.839212939113562e-05, |
|
"loss": 1.1621, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8367027417908115e-05, |
|
"loss": 1.0108, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8341748422060504e-05, |
|
"loss": 1.0736, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8316292938424788e-05, |
|
"loss": 1.13, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8290661505566964e-05, |
|
"loss": 0.8874, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8264854665775607e-05, |
|
"loss": 0.8437, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.823887296505041e-05, |
|
"loss": 1.0343, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8212716953090627e-05, |
|
"loss": 1.1008, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8186387183283446e-05, |
|
"loss": 0.9401, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8159884212692275e-05, |
|
"loss": 1.0816, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.813320860204497e-05, |
|
"loss": 1.0325, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8106360915721956e-05, |
|
"loss": 1.0924, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.807934172174429e-05, |
|
"loss": 0.9261, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8052151591761644e-05, |
|
"loss": 0.9983, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.802479110104022e-05, |
|
"loss": 1.0472, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7997260828450568e-05, |
|
"loss": 1.1403, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.796956135645534e-05, |
|
"loss": 1.0501, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7941693271096966e-05, |
|
"loss": 1.0481, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7913657161985266e-05, |
|
"loss": 1.0142, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7885453622284957e-05, |
|
"loss": 1.1758, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7857083248703126e-05, |
|
"loss": 1.1482, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7828546641476577e-05, |
|
"loss": 1.1445, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7799844404359162e-05, |
|
"loss": 1.1811, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.777097714460898e-05, |
|
"loss": 0.9937, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.774194547297555e-05, |
|
"loss": 1.151, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7712750003686884e-05, |
|
"loss": 1.0487, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.768339135443648e-05, |
|
"loss": 1.0659, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.765387014637027e-05, |
|
"loss": 0.8877, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7624187004073464e-05, |
|
"loss": 1.1443, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7594342555557343e-05, |
|
"loss": 0.9694, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7564337432245977e-05, |
|
"loss": 1.1203, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7534172268962853e-05, |
|
"loss": 0.9387, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7503847703917456e-05, |
|
"loss": 0.9995, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.747336437869176e-05, |
|
"loss": 0.9974, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7442722938226647e-05, |
|
"loss": 0.8692, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7411924030808287e-05, |
|
"loss": 0.9464, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7380968308054385e-05, |
|
"loss": 0.9365, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.734985642490043e-05, |
|
"loss": 1.0132, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7318589039585818e-05, |
|
"loss": 1.0511, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.728716681363993e-05, |
|
"loss": 1.0418, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7255590411868138e-05, |
|
"loss": 1.0608, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7223860502337735e-05, |
|
"loss": 1.0386, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7191977756363808e-05, |
|
"loss": 0.973, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7159942848495027e-05, |
|
"loss": 1.1204, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7127756456499373e-05, |
|
"loss": 1.0082, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7095419261349818e-05, |
|
"loss": 1.0809, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.706293194720989e-05, |
|
"loss": 0.7784, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.703029520141921e-05, |
|
"loss": 0.9853, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6997509714478944e-05, |
|
"loss": 1.1248, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6964576180037217e-05, |
|
"loss": 1.0447, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.693149529487441e-05, |
|
"loss": 0.9837, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6898267758888422e-05, |
|
"loss": 0.9668, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.686489427507988e-05, |
|
"loss": 1.094, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.683137554953725e-05, |
|
"loss": 1.071, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6797712291421905e-05, |
|
"loss": 1.0331, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6763905212953103e-05, |
|
"loss": 0.9158, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.672995502939295e-05, |
|
"loss": 1.0372, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.669586245903125e-05, |
|
"loss": 0.9151, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6661628223170298e-05, |
|
"loss": 1.006, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.662725304610964e-05, |
|
"loss": 0.9668, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.659273765513073e-05, |
|
"loss": 1.0059, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6558082780481562e-05, |
|
"loss": 0.8644, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6523289155361206e-05, |
|
"loss": 0.9563, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6488357515904297e-05, |
|
"loss": 0.9226, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.645328860116546e-05, |
|
"loss": 1.0188, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6418083153103683e-05, |
|
"loss": 0.9819, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.638274191656661e-05, |
|
"loss": 1.1677, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.634726563927478e-05, |
|
"loss": 1.0698, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6311655071805824e-05, |
|
"loss": 0.9551, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6275910967578558e-05, |
|
"loss": 1.1529, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6240034082837078e-05, |
|
"loss": 1.0159, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6204025176634712e-05, |
|
"loss": 1.2224, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6167885010818017e-05, |
|
"loss": 1.0017, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6131614350010614e-05, |
|
"loss": 0.8603, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6095213961597034e-05, |
|
"loss": 1.0294, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6058684615706476e-05, |
|
"loss": 1.1921, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6022027085196516e-05, |
|
"loss": 0.983, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5985242145636753e-05, |
|
"loss": 1.089, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.59483305752924e-05, |
|
"loss": 0.8742, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.591129315510782e-05, |
|
"loss": 1.0083, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5874130668690004e-05, |
|
"loss": 0.9337, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5836843902291986e-05, |
|
"loss": 1.0195, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5799433644796215e-05, |
|
"loss": 0.9243, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5761900687697867e-05, |
|
"loss": 0.9224, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5724245825088086e-05, |
|
"loss": 1.0919, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.568646985363719e-05, |
|
"loss": 0.9274, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5648573572577837e-05, |
|
"loss": 0.9783, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.561055778368807e-05, |
|
"loss": 1.2035, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5572423291274393e-05, |
|
"loss": 0.9525, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.553417090215474e-05, |
|
"loss": 1.2295, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.549580142564141e-05, |
|
"loss": 1.0563, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.545731567352392e-05, |
|
"loss": 0.8898, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5418714460051875e-05, |
|
"loss": 1.1597, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5379998601917706e-05, |
|
"loss": 0.9869, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.534116891823939e-05, |
|
"loss": 0.9286, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5302226230543146e-05, |
|
"loss": 0.8408, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5263171362746028e-05, |
|
"loss": 0.8621, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5224005141138509e-05, |
|
"loss": 0.9568, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.518472839436699e-05, |
|
"loss": 1.0426, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5145341953416273e-05, |
|
"loss": 0.9256, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5105846651591981e-05, |
|
"loss": 1.1305, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5066243324502919e-05, |
|
"loss": 1.0253, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5026532810043408e-05, |
|
"loss": 1.0201, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4986715948375543e-05, |
|
"loss": 0.8552, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.494679358191143e-05, |
|
"loss": 0.971, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4906766555295357e-05, |
|
"loss": 1.1717, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4866635715385926e-05, |
|
"loss": 1.0087, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4826401911238131e-05, |
|
"loss": 1.0643, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4786065994085397e-05, |
|
"loss": 1.0314, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4745628817321577e-05, |
|
"loss": 1.1085, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4705091236482888e-05, |
|
"loss": 0.9997, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4664454109229809e-05, |
|
"loss": 1.1218, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4623718295328946e-05, |
|
"loss": 0.983, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4582884656634828e-05, |
|
"loss": 1.0985, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4541954057071692e-05, |
|
"loss": 1.1434, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4500927362615178e-05, |
|
"loss": 0.9975, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4459805441274028e-05, |
|
"loss": 0.9655, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4418589163071721e-05, |
|
"loss": 1.0507, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4377279400028053e-05, |
|
"loss": 1.0641, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.433587702614069e-05, |
|
"loss": 0.86, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4294382917366698e-05, |
|
"loss": 0.9321, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4252797951603978e-05, |
|
"loss": 0.9124, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4211123008672714e-05, |
|
"loss": 0.9226, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4169358970296751e-05, |
|
"loss": 1.0869, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4127506720084943e-05, |
|
"loss": 0.9645, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4085567143512458e-05, |
|
"loss": 0.94, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4043541127902037e-05, |
|
"loss": 0.9829, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4001429562405227e-05, |
|
"loss": 1.0159, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3959233337983582e-05, |
|
"loss": 1.0578, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3916953347389776e-05, |
|
"loss": 0.9338, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.387459048514876e-05, |
|
"loss": 1.0826, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.38321456475388e-05, |
|
"loss": 0.8876, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.378961973257254e-05, |
|
"loss": 1.0341, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3747013639977972e-05, |
|
"loss": 0.961, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.370432827117945e-05, |
|
"loss": 0.898, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3661564529278562e-05, |
|
"loss": 0.9889, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3618723319035056e-05, |
|
"loss": 1.096, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.35758055468477e-05, |
|
"loss": 1.044, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3532812120735088e-05, |
|
"loss": 0.9632, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3489743950316431e-05, |
|
"loss": 0.8849, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3446601946792334e-05, |
|
"loss": 1.0345, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3403387022925488e-05, |
|
"loss": 0.8672, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3360100093021378e-05, |
|
"loss": 1.2281, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3316742072908927e-05, |
|
"loss": 0.9715, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3273313879921139e-05, |
|
"loss": 1.0987, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3229816432875665e-05, |
|
"loss": 0.9382, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.318625065205538e-05, |
|
"loss": 0.815, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.31426174591889e-05, |
|
"loss": 1.075, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.309891777743111e-05, |
|
"loss": 1.0681, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3055152531343592e-05, |
|
"loss": 0.9761, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3011322646875088e-05, |
|
"loss": 0.9582, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2967429051341913e-05, |
|
"loss": 0.9192, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2923472673408321e-05, |
|
"loss": 1.1883, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2879454443066862e-05, |
|
"loss": 1.0281, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2835375291618716e-05, |
|
"loss": 1.1208, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2791236151653974e-05, |
|
"loss": 0.9879, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2747037957031917e-05, |
|
"loss": 1.0591, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2702781642861252e-05, |
|
"loss": 1.0364, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2658468145480338e-05, |
|
"loss": 1.0091, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2614098402437367e-05, |
|
"loss": 1.0409, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2569673352470523e-05, |
|
"loss": 0.9112, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2525193935488138e-05, |
|
"loss": 1.0983, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2480661092548787e-05, |
|
"loss": 1.2059, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2436075765841396e-05, |
|
"loss": 0.9998, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.239143889866529e-05, |
|
"loss": 0.888, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2346751435410249e-05, |
|
"loss": 0.8673, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.230201432153653e-05, |
|
"loss": 1.0609, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2257228503554834e-05, |
|
"loss": 0.8979, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2212394929006337e-05, |
|
"loss": 0.8436, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2167514546442576e-05, |
|
"loss": 1.0598, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2122588305405434e-05, |
|
"loss": 1.2273, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.207761715640702e-05, |
|
"loss": 1.0679, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2032602050909575e-05, |
|
"loss": 1.037, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1987543941305322e-05, |
|
"loss": 0.9155, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.194244378089635e-05, |
|
"loss": 1.0046, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1897302523874405e-05, |
|
"loss": 0.9412, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.185212112530073e-05, |
|
"loss": 1.0702, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.180690054108585e-05, |
|
"loss": 0.9705, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1761641727969344e-05, |
|
"loss": 1.0439, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1716345643499608e-05, |
|
"loss": 1.1034, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1671013246013595e-05, |
|
"loss": 0.954, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1625645494616535e-05, |
|
"loss": 1.1571, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.158024334916165e-05, |
|
"loss": 0.9643, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1534807770229845e-05, |
|
"loss": 0.8641, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1489339719109378e-05, |
|
"loss": 0.9144, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1443840157775528e-05, |
|
"loss": 1.0252, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1398310048870247e-05, |
|
"loss": 0.8798, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1352750355681771e-05, |
|
"loss": 0.9269, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1307162042124278e-05, |
|
"loss": 0.82, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1261546072717455e-05, |
|
"loss": 0.8315, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1215903412566112e-05, |
|
"loss": 0.9339, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1170235027339766e-05, |
|
"loss": 1.1432, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1124541883252199e-05, |
|
"loss": 0.8138, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1078824947041016e-05, |
|
"loss": 0.9904, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1033085185947208e-05, |
|
"loss": 0.9693, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0987323567694661e-05, |
|
"loss": 1.0318, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0941541060469712e-05, |
|
"loss": 0.9994, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0895738632900637e-05, |
|
"loss": 1.0541, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0849917254037174e-05, |
|
"loss": 0.9852, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0804077893330022e-05, |
|
"loss": 0.8855, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0758221520610321e-05, |
|
"loss": 1.0008, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0712349106069131e-05, |
|
"loss": 0.9588, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0666461620236923e-05, |
|
"loss": 0.9101, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0620560033963026e-05, |
|
"loss": 0.825, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0574645318395096e-05, |
|
"loss": 1.0322, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0528718444958568e-05, |
|
"loss": 1.0302, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0482780385336107e-05, |
|
"loss": 1.1374, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0436832111447034e-05, |
|
"loss": 0.941, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0390874595426794e-05, |
|
"loss": 0.9547, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0344908809606353e-05, |
|
"loss": 1.0572, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0298935726491648e-05, |
|
"loss": 0.9117, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0252956318743006e-05, |
|
"loss": 0.9152, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.020697155915457e-05, |
|
"loss": 0.8955, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0160982420633701e-05, |
|
"loss": 0.9301, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0114989876180424e-05, |
|
"loss": 0.9871, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0068994898866805e-05, |
|
"loss": 0.9394, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.002299846181639e-05, |
|
"loss": 1.0454, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.977001538183615e-06, |
|
"loss": 1.0332, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.9310051011332e-06, |
|
"loss": 1.0111, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.88501012381958e-06, |
|
"loss": 0.9455, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.839017579366299e-06, |
|
"loss": 0.9485, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.793028440845435e-06, |
|
"loss": 0.9813, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.747043681256996e-06, |
|
"loss": 0.9804, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.701064273508356e-06, |
|
"loss": 0.9842, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.65509119039365e-06, |
|
"loss": 1.0643, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.609125404573211e-06, |
|
"loss": 0.8455, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.563167888552969e-06, |
|
"loss": 0.999, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.517219614663896e-06, |
|
"loss": 0.8631, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.471281555041432e-06, |
|
"loss": 1.0162, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.425354681604908e-06, |
|
"loss": 1.0233, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.379439966036977e-06, |
|
"loss": 0.8583, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.33353837976308e-06, |
|
"loss": 1.0218, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.28765089393087e-06, |
|
"loss": 0.9836, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.241778479389684e-06, |
|
"loss": 0.8345, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.195922106669981e-06, |
|
"loss": 0.9187, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.150082745962828e-06, |
|
"loss": 0.9211, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.104261367099365e-06, |
|
"loss": 1.0165, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.058458939530295e-06, |
|
"loss": 0.9564, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.01267643230534e-06, |
|
"loss": 0.9146, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.966914814052797e-06, |
|
"loss": 0.9635, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.921175052958985e-06, |
|
"loss": 1.0199, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.875458116747807e-06, |
|
"loss": 1.2024, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.829764972660237e-06, |
|
"loss": 0.9747, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.78409658743389e-06, |
|
"loss": 0.9822, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.738453927282549e-06, |
|
"loss": 0.9269, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.692837957875727e-06, |
|
"loss": 0.9547, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.647249644318232e-06, |
|
"loss": 0.8801, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.601689951129757e-06, |
|
"loss": 1.006, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.556159842224472e-06, |
|
"loss": 1.1163, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.510660280890625e-06, |
|
"loss": 1.154, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.465192229770157e-06, |
|
"loss": 0.8659, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.419756650838351e-06, |
|
"loss": 1.062, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.374354505383468e-06, |
|
"loss": 0.9176, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.32898675398641e-06, |
|
"loss": 0.8966, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.283654356500395e-06, |
|
"loss": 1.033, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.238358272030658e-06, |
|
"loss": 0.9129, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.193099458914148e-06, |
|
"loss": 1.0298, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.147878874699273e-06, |
|
"loss": 0.8082, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.102697476125596e-06, |
|
"loss": 0.93, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.057556219103653e-06, |
|
"loss": 1.0115, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.012456058694678e-06, |
|
"loss": 1.0265, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.967397949090432e-06, |
|
"loss": 0.9531, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.922382843592983e-06, |
|
"loss": 0.9911, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.87741169459457e-06, |
|
"loss": 0.9252, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.832485453557424e-06, |
|
"loss": 0.8949, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.787605070993668e-06, |
|
"loss": 0.9604, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.742771496445167e-06, |
|
"loss": 0.9617, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.697985678463476e-06, |
|
"loss": 1.2224, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.653248564589751e-06, |
|
"loss": 1.0475, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.608561101334714e-06, |
|
"loss": 1.1681, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.563924234158608e-06, |
|
"loss": 1.0791, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.519338907451214e-06, |
|
"loss": 1.0108, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.474806064511864e-06, |
|
"loss": 0.9222, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.43032664752948e-06, |
|
"loss": 1.1035, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.385901597562637e-06, |
|
"loss": 1.0648, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.341531854519664e-06, |
|
"loss": 0.9768, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.297218357138749e-06, |
|
"loss": 1.0155, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.25296204296809e-06, |
|
"loss": 0.951, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.2087638483460295e-06, |
|
"loss": 1.0709, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.164624708381286e-06, |
|
"loss": 0.9438, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.1205455569331384e-06, |
|
"loss": 0.987, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.076527326591682e-06, |
|
"loss": 0.877, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.03257094865809e-06, |
|
"loss": 1.0672, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.988677353124913e-06, |
|
"loss": 0.9273, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.94484746865641e-06, |
|
"loss": 1.0268, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.901082222568894e-06, |
|
"loss": 0.8797, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.857382540811101e-06, |
|
"loss": 1.013, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.813749347944625e-06, |
|
"loss": 1.0319, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.770183567124336e-06, |
|
"loss": 1.0675, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.726686120078862e-06, |
|
"loss": 0.9384, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.683257927091073e-06, |
|
"loss": 0.9433, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.639899906978626e-06, |
|
"loss": 1.0475, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.596612977074515e-06, |
|
"loss": 0.9955, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.5533980532076716e-06, |
|
"loss": 0.9284, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.510256049683572e-06, |
|
"loss": 1.0718, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.467187879264916e-06, |
|
"loss": 0.7599, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.4241944531523e-06, |
|
"loss": 1.0618, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.381276680964947e-06, |
|
"loss": 0.823, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.338435470721442e-06, |
|
"loss": 1.1501, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.295671728820553e-06, |
|
"loss": 1.0543, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.2529863600220285e-06, |
|
"loss": 0.967, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.210380267427467e-06, |
|
"loss": 0.954, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.167854352461202e-06, |
|
"loss": 0.9743, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.125409514851244e-06, |
|
"loss": 1.1063, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.083046652610224e-06, |
|
"loss": 0.9986, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.040766662016424e-06, |
|
"loss": 0.9286, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.998570437594775e-06, |
|
"loss": 0.9988, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.956458872097966e-06, |
|
"loss": 0.907, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.914432856487544e-06, |
|
"loss": 0.9555, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.872493279915059e-06, |
|
"loss": 0.949, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.830641029703254e-06, |
|
"loss": 0.8961, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.788876991327288e-06, |
|
"loss": 0.9688, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.747202048396023e-06, |
|
"loss": 0.991, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.705617082633305e-06, |
|
"loss": 1.0318, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.664122973859313e-06, |
|
"loss": 0.9646, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.622720599971953e-06, |
|
"loss": 0.9297, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.5814108369282824e-06, |
|
"loss": 0.9207, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.540194558725973e-06, |
|
"loss": 0.8339, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.499072637384825e-06, |
|
"loss": 1.0934, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.458045942928309e-06, |
|
"loss": 0.9421, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.417115343365172e-06, |
|
"loss": 1.1037, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.37628170467106e-06, |
|
"loss": 1.1366, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.335545890770193e-06, |
|
"loss": 0.9912, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.294908763517115e-06, |
|
"loss": 0.9681, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.254371182678424e-06, |
|
"loss": 0.9724, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.213934005914607e-06, |
|
"loss": 0.8659, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1735980887618745e-06, |
|
"loss": 0.9341, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.133364284614077e-06, |
|
"loss": 0.9018, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.093233444704642e-06, |
|
"loss": 0.99, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.053206418088572e-06, |
|
"loss": 1.1519, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.01328405162446e-06, |
|
"loss": 1.0575, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.973467189956596e-06, |
|
"loss": 1.0781, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.933756675497082e-06, |
|
"loss": 0.9561, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.894153348408022e-06, |
|
"loss": 1.1172, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.854658046583728e-06, |
|
"loss": 1.0779, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.815271605633012e-06, |
|
"loss": 0.8225, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.775994858861492e-06, |
|
"loss": 0.931, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7368286372539775e-06, |
|
"loss": 1.0002, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.697773769456859e-06, |
|
"loss": 1.0456, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.658831081760614e-06, |
|
"loss": 1.1681, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.620001398082295e-06, |
|
"loss": 1.1472, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.581285539948126e-06, |
|
"loss": 0.9188, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.542684326476082e-06, |
|
"loss": 1.0009, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.504198574358596e-06, |
|
"loss": 0.9792, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.4658290978452615e-06, |
|
"loss": 1.0876, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.4275767087256095e-06, |
|
"loss": 1.076, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.389442216311933e-06, |
|
"loss": 0.9606, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.351426427422165e-06, |
|
"loss": 0.9863, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.313530146362809e-06, |
|
"loss": 0.9644, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.275754174911921e-06, |
|
"loss": 1.0194, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2380993123021385e-06, |
|
"loss": 0.8884, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.200566355203784e-06, |
|
"loss": 1.0964, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.163156097708014e-06, |
|
"loss": 0.8013, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.125869331309999e-06, |
|
"loss": 0.9615, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.088706844892182e-06, |
|
"loss": 0.9682, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.051669424707603e-06, |
|
"loss": 0.8881, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.014757854363249e-06, |
|
"loss": 1.0597, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.977972914803487e-06, |
|
"loss": 1.0211, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.9413153842935255e-06, |
|
"loss": 0.9069, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.904786038402968e-06, |
|
"loss": 0.9595, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.868385649989388e-06, |
|
"loss": 1.0338, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.832114989181988e-06, |
|
"loss": 1.0379, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.795974823365287e-06, |
|
"loss": 0.937, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7599659171629254e-06, |
|
"loss": 0.8212, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.724089032421441e-06, |
|
"loss": 0.7433, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.688344928194181e-06, |
|
"loss": 1.0898, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.652734360725224e-06, |
|
"loss": 1.0453, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.617258083433396e-06, |
|
"loss": 1.1607, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5819168468963183e-06, |
|
"loss": 0.8873, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5467113988345438e-06, |
|
"loss": 0.9415, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5116424840957066e-06, |
|
"loss": 1.067, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.4767108446387955e-06, |
|
"loss": 1.0038, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.441917219518438e-06, |
|
"loss": 0.9977, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.407262344869272e-06, |
|
"loss": 0.946, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3727469538903645e-06, |
|
"loss": 0.9236, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3383717768297054e-06, |
|
"loss": 1.148, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3041375409687525e-06, |
|
"loss": 1.0021, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2700449706070536e-06, |
|
"loss": 0.9826, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2360947870469016e-06, |
|
"loss": 0.9472, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.2022877085780967e-06, |
|
"loss": 1.0949, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.168624450462746e-06, |
|
"loss": 0.8969, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.13510572492012e-06, |
|
"loss": 0.9965, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1017322411115803e-06, |
|
"loss": 1.1017, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0685047051255946e-06, |
|
"loss": 1.1201, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.035423819962785e-06, |
|
"loss": 0.9899, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.002490285521059e-06, |
|
"loss": 0.8676, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.969704798580796e-06, |
|
"loss": 1.0323, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.937068052790112e-06, |
|
"loss": 1.0097, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.904580738650181e-06, |
|
"loss": 1.1074, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.872243543500629e-06, |
|
"loss": 1.0416, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8400571515049793e-06, |
|
"loss": 0.8266, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8080222436361937e-06, |
|
"loss": 1.063, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.776139497662266e-06, |
|
"loss": 1.0304, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7444095881318656e-06, |
|
"loss": 1.0025, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.712833186360072e-06, |
|
"loss": 1.0514, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6814109604141845e-06, |
|
"loss": 1.0438, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.650143575099573e-06, |
|
"loss": 0.9465, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6190316919456184e-06, |
|
"loss": 1.0099, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.588075969191718e-06, |
|
"loss": 1.0305, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5572770617733544e-06, |
|
"loss": 0.9918, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5266356213082434e-06, |
|
"loss": 0.8579, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.496152296082548e-06, |
|
"loss": 1.2437, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.465827731037147e-06, |
|
"loss": 1.1401, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4356625677540237e-06, |
|
"loss": 1.0769, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.405657444442657e-06, |
|
"loss": 0.9141, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.375812995926541e-06, |
|
"loss": 0.9615, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3461298536297326e-06, |
|
"loss": 0.7358, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.3166086455635216e-06, |
|
"loss": 1.048, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2872499963131155e-06, |
|
"loss": 1.1585, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2580545270244512e-06, |
|
"loss": 1.0178, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.229022855391024e-06, |
|
"loss": 0.9983, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2001555956408428e-06, |
|
"loss": 0.899, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1714533585234243e-06, |
|
"loss": 0.8212, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1429167512968763e-06, |
|
"loss": 0.9148, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.114546377715042e-06, |
|
"loss": 0.9587, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0863428380147346e-06, |
|
"loss": 1.0275, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0583067289030334e-06, |
|
"loss": 0.8267, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.030438643544663e-06, |
|
"loss": 0.9677, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.002739171549435e-06, |
|
"loss": 0.9609, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9752088989597794e-06, |
|
"loss": 1.074, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9478484082383564e-06, |
|
"loss": 0.9023, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.920658278255714e-06, |
|
"loss": 0.9831, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.893639084278046e-06, |
|
"loss": 0.966, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8667913979550302e-06, |
|
"loss": 0.9048, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8401157873077258e-06, |
|
"loss": 0.9463, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.813612816716558e-06, |
|
"loss": 0.9245, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.787283046909376e-06, |
|
"loss": 0.9813, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7611270349495923e-06, |
|
"loss": 0.9172, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.735145334224394e-06, |
|
"loss": 0.9771, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7093384944330394e-06, |
|
"loss": 0.9921, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6837070615752116e-06, |
|
"loss": 1.0297, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.658251577939497e-06, |
|
"loss": 0.9288, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.632972582091884e-06, |
|
"loss": 0.8836, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6078706088643836e-06, |
|
"loss": 0.8562, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5829461893437015e-06, |
|
"loss": 0.8242, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5581998508600161e-06, |
|
"loss": 0.9472, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.533632116975814e-06, |
|
"loss": 1.026, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5092435074748145e-06, |
|
"loss": 0.8626, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4850345383509701e-06, |
|
"loss": 0.869, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4610057217975526e-06, |
|
"loss": 0.9362, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4371575661963145e-06, |
|
"loss": 0.9731, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.413490576106733e-06, |
|
"loss": 1.0407, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3900052522553398e-06, |
|
"loss": 0.9148, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3667020915251173e-06, |
|
"loss": 1.1184, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3435815869449964e-06, |
|
"loss": 0.9784, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3206442276794207e-06, |
|
"loss": 1.0372, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2978904990179919e-06, |
|
"loss": 0.9861, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2753208823652142e-06, |
|
"loss": 1.1687, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2529358552302973e-06, |
|
"loss": 1.1031, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2307358912170687e-06, |
|
"loss": 0.8472, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2087214600139308e-06, |
|
"loss": 1.0433, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1868930273839474e-06, |
|
"loss": 0.9519, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1652510551549722e-06, |
|
"loss": 0.9413, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1437960012098892e-06, |
|
"loss": 1.0189, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1225283194769176e-06, |
|
"loss": 0.9054, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1014484599200125e-06, |
|
"loss": 1.0996, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0805568685293421e-06, |
|
"loss": 0.9392, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0598539873118552e-06, |
|
"loss": 0.9674, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0393402542819231e-06, |
|
"loss": 1.0506, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0190161034520797e-06, |
|
"loss": 0.9507, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.988819648238379e-07, |
|
"loss": 0.7685, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.789382643785894e-07, |
|
"loss": 0.9887, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.591854240685882e-07, |
|
"loss": 1.0764, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.396238618080322e-07, |
|
"loss": 1.0055, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.202539914642184e-07, |
|
"loss": 0.7657, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.010762228487813e-07, |
|
"loss": 1.1411, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.82090961709029e-07, |
|
"loss": 0.8572, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.632986097193574e-07, |
|
"loss": 1.1252, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.446995644727473e-07, |
|
"loss": 0.914, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.2629421947236e-07, |
|
"loss": 0.9381, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.080829641232013e-07, |
|
"loss": 0.9887, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.900661837238977e-07, |
|
"loss": 0.8681, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.722442594585312e-07, |
|
"loss": 0.9718, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.546175683885815e-07, |
|
"loss": 0.9835, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.371864834449405e-07, |
|
"loss": 1.0341, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.19951373420037e-07, |
|
"loss": 1.038, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.029126029600197e-07, |
|
"loss": 0.8693, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.860705325570493e-07, |
|
"loss": 1.09, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.694255185416687e-07, |
|
"loss": 0.9367, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.529779130752678e-07, |
|
"loss": 1.004, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.367280641426277e-07, |
|
"loss": 0.9833, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.206763155445628e-07, |
|
"loss": 0.928, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.048230068906447e-07, |
|
"loss": 0.8501, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.891684735920166e-07, |
|
"loss": 1.0349, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.737130468542973e-07, |
|
"loss": 1.0553, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.58457053670578e-07, |
|
"loss": 0.8809, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.434008168144944e-07, |
|
"loss": 0.778, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.285446548334072e-07, |
|
"loss": 0.9346, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.138888820416587e-07, |
|
"loss": 0.9802, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.994338085139261e-07, |
|
"loss": 1.079, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.851797400786506e-07, |
|
"loss": 1.0603, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.711269783115813e-07, |
|
"loss": 0.9383, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.572758205293848e-07, |
|
"loss": 0.9539, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.4362655978336e-07, |
|
"loss": 0.9954, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.301794848532326e-07, |
|
"loss": 1.0169, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.169348802410522e-07, |
|
"loss": 0.8573, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.0389302616516747e-07, |
|
"loss": 0.9394, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9105419855430146e-07, |
|
"loss": 0.8933, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7841866904170797e-07, |
|
"loss": 0.924, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.659867049594312e-07, |
|
"loss": 1.0655, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5375856933264843e-07, |
|
"loss": 1.0421, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4173452087410186e-07, |
|
"loss": 0.8971, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.2991481397862567e-07, |
|
"loss": 0.987, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1829969871776556e-07, |
|
"loss": 0.8905, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.068894208344897e-07, |
|
"loss": 0.8928, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.9568422173798293e-07, |
|
"loss": 1.0383, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.846843384985476e-07, |
|
"loss": 1.012, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.738900038425796e-07, |
|
"loss": 0.8459, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.633014461476524e-07, |
|
"loss": 0.9434, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.5291888943767993e-07, |
|
"loss": 0.9878, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4274255337817465e-07, |
|
"loss": 0.9085, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.3277265327160903e-07, |
|
"loss": 0.8787, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.2300940005285377e-07, |
|
"loss": 1.0057, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.134530002847146e-07, |
|
"loss": 0.8496, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0410365615356364e-07, |
|
"loss": 1.0935, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9496156546506273e-07, |
|
"loss": 1.078, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.860269216399768e-07, |
|
"loss": 0.9329, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7729991371008504e-07, |
|
"loss": 1.0333, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6878072631417386e-07, |
|
"loss": 0.7911, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6046953969413913e-07, |
|
"loss": 1.0209, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5236652969116805e-07, |
|
"loss": 0.9866, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.444718677420176e-07, |
|
"loss": 0.919, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.367857208753931e-07, |
|
"loss": 0.9626, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2930825170840877e-07, |
|
"loss": 1.0431, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2203961844315048e-07, |
|
"loss": 0.8534, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1497997486332512e-07, |
|
"loss": 1.0252, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0812947033101207e-07, |
|
"loss": 1.0337, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0148824978349791e-07, |
|
"loss": 0.9547, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.505645373021455e-08, |
|
"loss": 1.042, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.883421824976479e-08, |
|
"loss": 1.0198, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.282167498703919e-08, |
|
"loss": 0.9271, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.701895115043823e-08, |
|
"loss": 1.0045, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.142616950917447e-08, |
|
"loss": 0.9935, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.604344839068022e-08, |
|
"loss": 1.0396, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.08709016780984e-08, |
|
"loss": 0.9228, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.590863880788111e-08, |
|
"loss": 0.9778, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.115676476746489e-08, |
|
"loss": 1.1956, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.6615380093055774e-08, |
|
"loss": 0.9367, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.228458086750098e-08, |
|
"loss": 1.044, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8164458718255025e-08, |
|
"loss": 1.0658, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.425510081544237e-08, |
|
"loss": 1.0386, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.05565898700122e-08, |
|
"loss": 0.8835, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7069004131987652e-08, |
|
"loss": 0.7686, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.379241738881377e-08, |
|
"loss": 0.9699, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.0726898963793208e-08, |
|
"loss": 1.0765, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7872513714617403e-08, |
|
"loss": 1.0337, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5229322032002113e-08, |
|
"loss": 1.078, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2797379838402901e-08, |
|
"loss": 0.9252, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0576738586831614e-08, |
|
"loss": 1.0517, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.567445259775042e-09, |
|
"loss": 1.0319, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.7695423681901625e-09, |
|
"loss": 1.1047, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.183067950617071e-09, |
|
"loss": 0.9721, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.808055572362967e-09, |
|
"loss": 0.9197, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.6445343247982758e-09, |
|
"loss": 1.0672, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6925288247393589e-09, |
|
"loss": 0.9606, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.52059213927825e-10, |
|
"loss": 1.0111, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.2314115860642157e-10, |
|
"loss": 0.8451, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0578584918374824e-10, |
|
"loss": 0.9923, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.0306, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 705, |
|
"total_flos": 262405283512320.0, |
|
"train_loss": 1.0077643413070245, |
|
"train_runtime": 9660.1894, |
|
"train_samples_per_second": 4.665, |
|
"train_steps_per_second": 0.073 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 705, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"total_flos": 262405283512320.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|