|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 312, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009615384615384616, |
|
"grad_norm": 6.949608325958252, |
|
"learning_rate": 3.125e-07, |
|
"loss": 1.0512, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.019230769230769232, |
|
"grad_norm": 7.026703357696533, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.0592, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.028846153846153848, |
|
"grad_norm": 6.95483922958374, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 1.0391, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.038461538461538464, |
|
"grad_norm": 7.0581231117248535, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.0671, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04807692307692308, |
|
"grad_norm": 6.7878737449646, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 1.0551, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.057692307692307696, |
|
"grad_norm": 6.292112350463867, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 1.0392, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0673076923076923, |
|
"grad_norm": 4.9926958084106445, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 0.9965, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 4.684077739715576, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.9904, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08653846153846154, |
|
"grad_norm": 2.9312803745269775, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 0.9546, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09615384615384616, |
|
"grad_norm": 2.813443183898926, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.9474, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10576923076923077, |
|
"grad_norm": 2.5470032691955566, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 0.9387, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.11538461538461539, |
|
"grad_norm": 3.7931878566741943, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.9115, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 4.256772994995117, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 0.9166, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1346153846153846, |
|
"grad_norm": 4.049694538116455, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.8974, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.14423076923076922, |
|
"grad_norm": 3.689324378967285, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 0.8862, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 2.5206212997436523, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8618, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.16346153846153846, |
|
"grad_norm": 2.2595858573913574, |
|
"learning_rate": 5.3125e-06, |
|
"loss": 0.8466, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17307692307692307, |
|
"grad_norm": 1.8435250520706177, |
|
"learning_rate": 5.625e-06, |
|
"loss": 0.8225, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.18269230769230768, |
|
"grad_norm": 1.453895092010498, |
|
"learning_rate": 5.9375e-06, |
|
"loss": 0.7979, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 1.2612786293029785, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.8085, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.20192307692307693, |
|
"grad_norm": 1.2307376861572266, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 0.7795, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.21153846153846154, |
|
"grad_norm": 1.2606533765792847, |
|
"learning_rate": 6.875e-06, |
|
"loss": 0.7871, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.22115384615384615, |
|
"grad_norm": 1.2339670658111572, |
|
"learning_rate": 7.1875e-06, |
|
"loss": 0.7624, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 1.0207619667053223, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.7557, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2403846153846154, |
|
"grad_norm": 0.8926437497138977, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 0.7342, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.9713419079780579, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 0.7372, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.25961538461538464, |
|
"grad_norm": 1.042178988456726, |
|
"learning_rate": 8.4375e-06, |
|
"loss": 0.7375, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2692307692307692, |
|
"grad_norm": 0.8239858150482178, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.7147, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.27884615384615385, |
|
"grad_norm": 0.8827706575393677, |
|
"learning_rate": 9.0625e-06, |
|
"loss": 0.7287, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.28846153846153844, |
|
"grad_norm": 0.9991267323493958, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 0.7046, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2980769230769231, |
|
"grad_norm": 0.8923914432525635, |
|
"learning_rate": 9.6875e-06, |
|
"loss": 0.701, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.7739760875701904, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7091, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3173076923076923, |
|
"grad_norm": 0.9522834420204163, |
|
"learning_rate": 9.999685283773504e-06, |
|
"loss": 0.71, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3269230769230769, |
|
"grad_norm": 0.7752206921577454, |
|
"learning_rate": 9.998741174712534e-06, |
|
"loss": 0.6876, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.33653846153846156, |
|
"grad_norm": 1.002880573272705, |
|
"learning_rate": 9.997167791667668e-06, |
|
"loss": 0.7019, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.34615384615384615, |
|
"grad_norm": 0.7485476136207581, |
|
"learning_rate": 9.994965332706574e-06, |
|
"loss": 0.6779, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3557692307692308, |
|
"grad_norm": 0.8847535252571106, |
|
"learning_rate": 9.992134075089085e-06, |
|
"loss": 0.6753, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.36538461538461536, |
|
"grad_norm": 0.6897234320640564, |
|
"learning_rate": 9.98867437523228e-06, |
|
"loss": 0.7009, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.9160583019256592, |
|
"learning_rate": 9.984586668665641e-06, |
|
"loss": 0.6828, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.7275518178939819, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 0.6833, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3942307692307692, |
|
"grad_norm": 0.7628114223480225, |
|
"learning_rate": 9.974529372743762e-06, |
|
"loss": 0.6711, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.40384615384615385, |
|
"grad_norm": 0.7545289993286133, |
|
"learning_rate": 9.968561049466214e-06, |
|
"loss": 0.6767, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.41346153846153844, |
|
"grad_norm": 0.6503568291664124, |
|
"learning_rate": 9.961967251474823e-06, |
|
"loss": 0.6519, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4230769230769231, |
|
"grad_norm": 0.7073460817337036, |
|
"learning_rate": 9.954748808839675e-06, |
|
"loss": 0.67, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4326923076923077, |
|
"grad_norm": 0.6822070479393005, |
|
"learning_rate": 9.946906630265184e-06, |
|
"loss": 0.6659, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4423076923076923, |
|
"grad_norm": 0.6585090160369873, |
|
"learning_rate": 9.938441702975689e-06, |
|
"loss": 0.6758, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4519230769230769, |
|
"grad_norm": 0.7007927894592285, |
|
"learning_rate": 9.92935509259118e-06, |
|
"loss": 0.6537, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 0.5756403207778931, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 0.6674, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.47115384615384615, |
|
"grad_norm": 0.673179566860199, |
|
"learning_rate": 9.909321476180594e-06, |
|
"loss": 0.6574, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4807692307692308, |
|
"grad_norm": 0.5679298043251038, |
|
"learning_rate": 9.898376992116179e-06, |
|
"loss": 0.6641, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.49038461538461536, |
|
"grad_norm": 0.6427024602890015, |
|
"learning_rate": 9.886815868562596e-06, |
|
"loss": 0.6628, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6467263102531433, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.6669, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5096153846153846, |
|
"grad_norm": 0.5870878100395203, |
|
"learning_rate": 9.861849601988384e-06, |
|
"loss": 0.6642, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5192307692307693, |
|
"grad_norm": 0.5774953961372375, |
|
"learning_rate": 9.848447601883436e-06, |
|
"loss": 0.63, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5288461538461539, |
|
"grad_norm": 0.6247491240501404, |
|
"learning_rate": 9.834435247725032e-06, |
|
"loss": 0.6441, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 0.5765902996063232, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 0.6387, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5480769230769231, |
|
"grad_norm": 0.6525144577026367, |
|
"learning_rate": 9.804586609725499e-06, |
|
"loss": 0.653, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5576923076923077, |
|
"grad_norm": 0.6681458950042725, |
|
"learning_rate": 9.788754083424654e-06, |
|
"loss": 0.6394, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5673076923076923, |
|
"grad_norm": 0.5383596420288086, |
|
"learning_rate": 9.772318717677905e-06, |
|
"loss": 0.6415, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 0.6443718671798706, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 0.6371, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5865384615384616, |
|
"grad_norm": 0.6086981296539307, |
|
"learning_rate": 9.737647819437645e-06, |
|
"loss": 0.6474, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5961538461538461, |
|
"grad_norm": 0.557014524936676, |
|
"learning_rate": 9.719416651541839e-06, |
|
"loss": 0.6524, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6057692307692307, |
|
"grad_norm": 0.6446460485458374, |
|
"learning_rate": 9.700591372846096e-06, |
|
"loss": 0.6444, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.6329739689826965, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 0.6601, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.6192615628242493, |
|
"learning_rate": 9.661168036940071e-06, |
|
"loss": 0.6596, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6346153846153846, |
|
"grad_norm": 0.5624246001243591, |
|
"learning_rate": 9.640574942595195e-06, |
|
"loss": 0.6608, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6442307692307693, |
|
"grad_norm": 0.6665242910385132, |
|
"learning_rate": 9.619397662556434e-06, |
|
"loss": 0.6434, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6538461538461539, |
|
"grad_norm": 0.5866331458091736, |
|
"learning_rate": 9.597638862757255e-06, |
|
"loss": 0.6369, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6634615384615384, |
|
"grad_norm": 0.5087175369262695, |
|
"learning_rate": 9.5753012823366e-06, |
|
"loss": 0.6333, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6730769230769231, |
|
"grad_norm": 0.6268306970596313, |
|
"learning_rate": 9.552387733294081e-06, |
|
"loss": 0.647, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6826923076923077, |
|
"grad_norm": 0.5971682667732239, |
|
"learning_rate": 9.528901100135971e-06, |
|
"loss": 0.6293, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 0.6260504722595215, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.6329, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7019230769230769, |
|
"grad_norm": 0.5957349538803101, |
|
"learning_rate": 9.480220479843627e-06, |
|
"loss": 0.6373, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7115384615384616, |
|
"grad_norm": 0.5238078236579895, |
|
"learning_rate": 9.45503262094184e-06, |
|
"loss": 0.647, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7211538461538461, |
|
"grad_norm": 0.6080106496810913, |
|
"learning_rate": 9.4292839336179e-06, |
|
"loss": 0.6469, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7307692307692307, |
|
"grad_norm": 0.6035282015800476, |
|
"learning_rate": 9.40297765928369e-06, |
|
"loss": 0.6316, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7403846153846154, |
|
"grad_norm": 0.5384305715560913, |
|
"learning_rate": 9.376117109543769e-06, |
|
"loss": 0.6369, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6150159239768982, |
|
"learning_rate": 9.348705665778479e-06, |
|
"loss": 0.6435, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7596153846153846, |
|
"grad_norm": 0.5395728945732117, |
|
"learning_rate": 9.320746778718274e-06, |
|
"loss": 0.6237, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.5888304710388184, |
|
"learning_rate": 9.292243968009332e-06, |
|
"loss": 0.6413, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7788461538461539, |
|
"grad_norm": 0.5334227681159973, |
|
"learning_rate": 9.263200821770462e-06, |
|
"loss": 0.6491, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7884615384615384, |
|
"grad_norm": 0.5316609144210815, |
|
"learning_rate": 9.233620996141421e-06, |
|
"loss": 0.6537, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7980769230769231, |
|
"grad_norm": 0.495564341545105, |
|
"learning_rate": 9.203508214822652e-06, |
|
"loss": 0.6395, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8076923076923077, |
|
"grad_norm": 0.5694448351860046, |
|
"learning_rate": 9.172866268606514e-06, |
|
"loss": 0.6428, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8173076923076923, |
|
"grad_norm": 0.49600711464881897, |
|
"learning_rate": 9.141699014900084e-06, |
|
"loss": 0.6321, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8269230769230769, |
|
"grad_norm": 0.63581782579422, |
|
"learning_rate": 9.110010377239552e-06, |
|
"loss": 0.6139, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8365384615384616, |
|
"grad_norm": 0.5633158683776855, |
|
"learning_rate": 9.077804344796302e-06, |
|
"loss": 0.6539, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 0.6227946281433105, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.634, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8557692307692307, |
|
"grad_norm": 0.5541588068008423, |
|
"learning_rate": 9.011856377401891e-06, |
|
"loss": 0.6336, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8653846153846154, |
|
"grad_norm": 0.5803648829460144, |
|
"learning_rate": 8.978122744408905e-06, |
|
"loss": 0.6327, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.5258088111877441, |
|
"learning_rate": 8.943888319504456e-06, |
|
"loss": 0.6248, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8846153846153846, |
|
"grad_norm": 0.6071212887763977, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.6259, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8942307692307693, |
|
"grad_norm": 0.571426510810852, |
|
"learning_rate": 8.873934395068006e-06, |
|
"loss": 0.6248, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9038461538461539, |
|
"grad_norm": 0.5370094776153564, |
|
"learning_rate": 8.838223701790057e-06, |
|
"loss": 0.6125, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9134615384615384, |
|
"grad_norm": 0.5595990419387817, |
|
"learning_rate": 8.802029828000157e-06, |
|
"loss": 0.616, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.5226612687110901, |
|
"learning_rate": 8.765357330018056e-06, |
|
"loss": 0.6369, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9326923076923077, |
|
"grad_norm": 0.541027307510376, |
|
"learning_rate": 8.728210824415829e-06, |
|
"loss": 0.6244, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9423076923076923, |
|
"grad_norm": 0.5068326592445374, |
|
"learning_rate": 8.690594987436705e-06, |
|
"loss": 0.6323, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9519230769230769, |
|
"grad_norm": 0.5558856725692749, |
|
"learning_rate": 8.652514554406388e-06, |
|
"loss": 0.6283, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 0.5843573212623596, |
|
"learning_rate": 8.613974319136959e-06, |
|
"loss": 0.6339, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9711538461538461, |
|
"grad_norm": 0.4959486126899719, |
|
"learning_rate": 8.574979133323378e-06, |
|
"loss": 0.6298, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9807692307692307, |
|
"grad_norm": 0.5050930380821228, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.6211, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9903846153846154, |
|
"grad_norm": 0.5262631773948669, |
|
"learning_rate": 8.495643602586287e-06, |
|
"loss": 0.6135, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4946816563606262, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 0.6295, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0096153846153846, |
|
"grad_norm": 0.5233300924301147, |
|
"learning_rate": 8.414547910024035e-06, |
|
"loss": 0.5924, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.0192307692307692, |
|
"grad_norm": 0.52397620677948, |
|
"learning_rate": 8.373352729660373e-06, |
|
"loss": 0.6012, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.0288461538461537, |
|
"grad_norm": 0.612706184387207, |
|
"learning_rate": 8.331732889760021e-06, |
|
"loss": 0.6092, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.0384615384615385, |
|
"grad_norm": 0.5767417550086975, |
|
"learning_rate": 8.289693629698564e-06, |
|
"loss": 0.5799, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0480769230769231, |
|
"grad_norm": 0.6034297943115234, |
|
"learning_rate": 8.247240241650918e-06, |
|
"loss": 0.5634, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0576923076923077, |
|
"grad_norm": 0.5869642496109009, |
|
"learning_rate": 8.204378069925121e-06, |
|
"loss": 0.5949, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0673076923076923, |
|
"grad_norm": 0.5452908873558044, |
|
"learning_rate": 8.16111251028955e-06, |
|
"loss": 0.5997, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.0769230769230769, |
|
"grad_norm": 0.6273619532585144, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.5972, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0865384615384615, |
|
"grad_norm": 0.5914397239685059, |
|
"learning_rate": 8.073393063582386e-06, |
|
"loss": 0.5684, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0961538461538463, |
|
"grad_norm": 0.5578101277351379, |
|
"learning_rate": 8.0289502192041e-06, |
|
"loss": 0.5967, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1057692307692308, |
|
"grad_norm": 0.5323030352592468, |
|
"learning_rate": 7.984126070912519e-06, |
|
"loss": 0.5956, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1153846153846154, |
|
"grad_norm": 0.5913547873497009, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 0.5787, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.125, |
|
"grad_norm": 0.5356422662734985, |
|
"learning_rate": 7.89335648089903e-06, |
|
"loss": 0.5913, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.1346153846153846, |
|
"grad_norm": 0.5649756193161011, |
|
"learning_rate": 7.84742246584226e-06, |
|
"loss": 0.5776, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.1442307692307692, |
|
"grad_norm": 0.5341564416885376, |
|
"learning_rate": 7.801129998764014e-06, |
|
"loss": 0.6113, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 0.6811894774436951, |
|
"learning_rate": 7.754484907260513e-06, |
|
"loss": 0.58, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.1634615384615385, |
|
"grad_norm": 0.6039808392524719, |
|
"learning_rate": 7.70749306331863e-06, |
|
"loss": 0.5996, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.1730769230769231, |
|
"grad_norm": 0.6511228680610657, |
|
"learning_rate": 7.660160382576683e-06, |
|
"loss": 0.5876, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1826923076923077, |
|
"grad_norm": 0.5735483765602112, |
|
"learning_rate": 7.612492823579744e-06, |
|
"loss": 0.5733, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1923076923076923, |
|
"grad_norm": 0.48792481422424316, |
|
"learning_rate": 7.564496387029532e-06, |
|
"loss": 0.6047, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2019230769230769, |
|
"grad_norm": 0.6753615736961365, |
|
"learning_rate": 7.516177115029002e-06, |
|
"loss": 0.5975, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.2115384615384615, |
|
"grad_norm": 0.567205011844635, |
|
"learning_rate": 7.467541090321735e-06, |
|
"loss": 0.5921, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.2211538461538463, |
|
"grad_norm": 0.623609185218811, |
|
"learning_rate": 7.4185944355261996e-06, |
|
"loss": 0.5726, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.4969348609447479, |
|
"learning_rate": 7.369343312364994e-06, |
|
"loss": 0.5844, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.2403846153846154, |
|
"grad_norm": 0.5775588750839233, |
|
"learning_rate": 7.319793920889171e-06, |
|
"loss": 0.5758, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.5946178436279297, |
|
"learning_rate": 7.269952498697734e-06, |
|
"loss": 0.5936, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.2596153846153846, |
|
"grad_norm": 0.501392662525177, |
|
"learning_rate": 7.219825320152411e-06, |
|
"loss": 0.591, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2692307692307692, |
|
"grad_norm": 0.6227275133132935, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.6117, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2788461538461537, |
|
"grad_norm": 0.49991685152053833, |
|
"learning_rate": 7.118738970516944e-06, |
|
"loss": 0.5771, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2884615384615383, |
|
"grad_norm": 0.4866224229335785, |
|
"learning_rate": 7.067792524832604e-06, |
|
"loss": 0.6068, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2980769230769231, |
|
"grad_norm": 0.5264644622802734, |
|
"learning_rate": 7.016585772004026e-06, |
|
"loss": 0.5748, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.3076923076923077, |
|
"grad_norm": 0.5308865904808044, |
|
"learning_rate": 6.965125158269619e-06, |
|
"loss": 0.5829, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.3173076923076923, |
|
"grad_norm": 0.530244767665863, |
|
"learning_rate": 6.913417161825449e-06, |
|
"loss": 0.5679, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.3269230769230769, |
|
"grad_norm": 0.4928182065486908, |
|
"learning_rate": 6.8614682920097265e-06, |
|
"loss": 0.5679, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.3365384615384617, |
|
"grad_norm": 0.523705244064331, |
|
"learning_rate": 6.809285088483361e-06, |
|
"loss": 0.5587, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.3461538461538463, |
|
"grad_norm": 0.48403921723365784, |
|
"learning_rate": 6.7568741204067145e-06, |
|
"loss": 0.5717, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.3557692307692308, |
|
"grad_norm": 0.551434338092804, |
|
"learning_rate": 6.704241985612625e-06, |
|
"loss": 0.6041, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.3653846153846154, |
|
"grad_norm": 0.501440703868866, |
|
"learning_rate": 6.651395309775837e-06, |
|
"loss": 0.5718, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.375, |
|
"grad_norm": 0.46742498874664307, |
|
"learning_rate": 6.598340745578908e-06, |
|
"loss": 0.5718, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.3846153846153846, |
|
"grad_norm": 0.5607274770736694, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.608, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.3942307692307692, |
|
"grad_norm": 0.5115028023719788, |
|
"learning_rate": 6.491634692845781e-06, |
|
"loss": 0.5636, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.4038461538461537, |
|
"grad_norm": 0.5423773527145386, |
|
"learning_rate": 6.437996637160086e-06, |
|
"loss": 0.5734, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.4134615384615383, |
|
"grad_norm": 0.4873090982437134, |
|
"learning_rate": 6.384177557124247e-06, |
|
"loss": 0.6, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.4230769230769231, |
|
"grad_norm": 0.4671023190021515, |
|
"learning_rate": 6.330184227833376e-06, |
|
"loss": 0.5843, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.4326923076923077, |
|
"grad_norm": 0.5089084506034851, |
|
"learning_rate": 6.276023446318214e-06, |
|
"loss": 0.5755, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.4423076923076923, |
|
"grad_norm": 0.4525885283946991, |
|
"learning_rate": 6.2217020306894705e-06, |
|
"loss": 0.5776, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.4519230769230769, |
|
"grad_norm": 0.5407394170761108, |
|
"learning_rate": 6.1672268192795285e-06, |
|
"loss": 0.5902, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.4615384615384617, |
|
"grad_norm": 0.4763994812965393, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.5825, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.4711538461538463, |
|
"grad_norm": 0.480131059885025, |
|
"learning_rate": 6.057842458386315e-06, |
|
"loss": 0.5854, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.4807692307692308, |
|
"grad_norm": 0.48260608315467834, |
|
"learning_rate": 6.002947078916365e-06, |
|
"loss": 0.5766, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4903846153846154, |
|
"grad_norm": 0.44361093640327454, |
|
"learning_rate": 5.947925441958393e-06, |
|
"loss": 0.5834, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.4906068742275238, |
|
"learning_rate": 5.892784473993184e-06, |
|
"loss": 0.5856, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.5096153846153846, |
|
"grad_norm": 0.48007476329803467, |
|
"learning_rate": 5.837531116523683e-06, |
|
"loss": 0.5735, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.5192307692307692, |
|
"grad_norm": 0.4426267147064209, |
|
"learning_rate": 5.782172325201155e-06, |
|
"loss": 0.5799, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.5288461538461537, |
|
"grad_norm": 0.45236214995384216, |
|
"learning_rate": 5.726715068949564e-06, |
|
"loss": 0.6024, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.42147666215896606, |
|
"learning_rate": 5.671166329088278e-06, |
|
"loss": 0.5834, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.5480769230769231, |
|
"grad_norm": 0.42501798272132874, |
|
"learning_rate": 5.615533098453215e-06, |
|
"loss": 0.5828, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.5576923076923077, |
|
"grad_norm": 0.4713789224624634, |
|
"learning_rate": 5.559822380516539e-06, |
|
"loss": 0.5965, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.5673076923076923, |
|
"grad_norm": 0.438809871673584, |
|
"learning_rate": 5.504041188505022e-06, |
|
"loss": 0.572, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.5769230769230769, |
|
"grad_norm": 0.5145413279533386, |
|
"learning_rate": 5.448196544517168e-06, |
|
"loss": 0.5698, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5865384615384617, |
|
"grad_norm": 0.5000028610229492, |
|
"learning_rate": 5.392295478639226e-06, |
|
"loss": 0.5943, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.5961538461538463, |
|
"grad_norm": 0.5032768845558167, |
|
"learning_rate": 5.336345028060199e-06, |
|
"loss": 0.5881, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.6057692307692308, |
|
"grad_norm": 0.4803272783756256, |
|
"learning_rate": 5.2803522361859596e-06, |
|
"loss": 0.5482, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.6153846153846154, |
|
"grad_norm": 0.4768068790435791, |
|
"learning_rate": 5.224324151752575e-06, |
|
"loss": 0.5694, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 0.5475027561187744, |
|
"learning_rate": 5.168267827938971e-06, |
|
"loss": 0.5868, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.6346153846153846, |
|
"grad_norm": 0.5247706770896912, |
|
"learning_rate": 5.112190321479026e-06, |
|
"loss": 0.5903, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.6442307692307692, |
|
"grad_norm": 0.45129239559173584, |
|
"learning_rate": 5.05609869177323e-06, |
|
"loss": 0.5673, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.6538461538461537, |
|
"grad_norm": 0.4390396773815155, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5749, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.6634615384615383, |
|
"grad_norm": 0.5230726003646851, |
|
"learning_rate": 4.943901308226771e-06, |
|
"loss": 0.5734, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.6730769230769231, |
|
"grad_norm": 0.5058849453926086, |
|
"learning_rate": 4.887809678520976e-06, |
|
"loss": 0.5902, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.6826923076923077, |
|
"grad_norm": 0.5016485452651978, |
|
"learning_rate": 4.831732172061032e-06, |
|
"loss": 0.5726, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6923076923076923, |
|
"grad_norm": 0.5082340836524963, |
|
"learning_rate": 4.775675848247427e-06, |
|
"loss": 0.5868, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.7019230769230769, |
|
"grad_norm": 0.43624982237815857, |
|
"learning_rate": 4.719647763814041e-06, |
|
"loss": 0.6001, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.7115384615384617, |
|
"grad_norm": 0.4955120086669922, |
|
"learning_rate": 4.663654971939802e-06, |
|
"loss": 0.5661, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.7211538461538463, |
|
"grad_norm": 0.5228602886199951, |
|
"learning_rate": 4.6077045213607765e-06, |
|
"loss": 0.5749, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.7307692307692308, |
|
"grad_norm": 0.4157963693141937, |
|
"learning_rate": 4.551803455482833e-06, |
|
"loss": 0.5781, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.7403846153846154, |
|
"grad_norm": 0.4823377728462219, |
|
"learning_rate": 4.4959588114949785e-06, |
|
"loss": 0.5713, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.49718138575553894, |
|
"learning_rate": 4.4401776194834615e-06, |
|
"loss": 0.5971, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.7596153846153846, |
|
"grad_norm": 0.4611460566520691, |
|
"learning_rate": 4.384466901546786e-06, |
|
"loss": 0.5757, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.7692307692307692, |
|
"grad_norm": 0.4549019932746887, |
|
"learning_rate": 4.3288336709117246e-06, |
|
"loss": 0.5823, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.7788461538461537, |
|
"grad_norm": 0.45323625206947327, |
|
"learning_rate": 4.273284931050438e-06, |
|
"loss": 0.5728, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.7884615384615383, |
|
"grad_norm": 0.49955999851226807, |
|
"learning_rate": 4.217827674798845e-06, |
|
"loss": 0.5789, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.7980769230769231, |
|
"grad_norm": 0.5281660556793213, |
|
"learning_rate": 4.162468883476319e-06, |
|
"loss": 0.5449, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.8076923076923077, |
|
"grad_norm": 0.46816229820251465, |
|
"learning_rate": 4.107215526006818e-06, |
|
"loss": 0.5789, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.8173076923076923, |
|
"grad_norm": 0.5172679424285889, |
|
"learning_rate": 4.052074558041608e-06, |
|
"loss": 0.5634, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.8269230769230769, |
|
"grad_norm": 0.5312298536300659, |
|
"learning_rate": 3.997052921083637e-06, |
|
"loss": 0.5795, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.8365384615384617, |
|
"grad_norm": 0.40530455112457275, |
|
"learning_rate": 3.9421575416136866e-06, |
|
"loss": 0.5707, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.4245462715625763, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.5764, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.8557692307692308, |
|
"grad_norm": 0.4844348430633545, |
|
"learning_rate": 3.832773180720475e-06, |
|
"loss": 0.5847, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.8653846153846154, |
|
"grad_norm": 0.3969320058822632, |
|
"learning_rate": 3.778297969310529e-06, |
|
"loss": 0.5776, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 0.4481745958328247, |
|
"learning_rate": 3.723976553681787e-06, |
|
"loss": 0.5834, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.8846153846153846, |
|
"grad_norm": 0.4347081482410431, |
|
"learning_rate": 3.669815772166625e-06, |
|
"loss": 0.5761, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.8942307692307692, |
|
"grad_norm": 0.4153216779232025, |
|
"learning_rate": 3.6158224428757538e-06, |
|
"loss": 0.5648, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.9038461538461537, |
|
"grad_norm": 0.4166128933429718, |
|
"learning_rate": 3.562003362839914e-06, |
|
"loss": 0.5709, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.9134615384615383, |
|
"grad_norm": 0.45052772760391235, |
|
"learning_rate": 3.50836530715422e-06, |
|
"loss": 0.5691, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.43254920840263367, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.5632, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.9326923076923077, |
|
"grad_norm": 0.4649442136287689, |
|
"learning_rate": 3.4016592544210937e-06, |
|
"loss": 0.5666, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.9423076923076923, |
|
"grad_norm": 0.4219006597995758, |
|
"learning_rate": 3.3486046902241663e-06, |
|
"loss": 0.588, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.9519230769230769, |
|
"grad_norm": 0.43250784277915955, |
|
"learning_rate": 3.295758014387375e-06, |
|
"loss": 0.5787, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.9615384615384617, |
|
"grad_norm": 0.4511379897594452, |
|
"learning_rate": 3.2431258795932863e-06, |
|
"loss": 0.5766, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.9711538461538463, |
|
"grad_norm": 0.41626155376434326, |
|
"learning_rate": 3.1907149115166403e-06, |
|
"loss": 0.5714, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.9807692307692308, |
|
"grad_norm": 0.4738382399082184, |
|
"learning_rate": 3.1385317079902743e-06, |
|
"loss": 0.5858, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.9903846153846154, |
|
"grad_norm": 0.4208838939666748, |
|
"learning_rate": 3.0865828381745515e-06, |
|
"loss": 0.5735, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.4607163369655609, |
|
"learning_rate": 3.0348748417303826e-06, |
|
"loss": 0.5677, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.0096153846153846, |
|
"grad_norm": 0.42707422375679016, |
|
"learning_rate": 2.9834142279959754e-06, |
|
"loss": 0.5413, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.019230769230769, |
|
"grad_norm": 0.4337131381034851, |
|
"learning_rate": 2.932207475167398e-06, |
|
"loss": 0.548, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.0288461538461537, |
|
"grad_norm": 0.3963165283203125, |
|
"learning_rate": 2.8812610294830568e-06, |
|
"loss": 0.5441, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.0384615384615383, |
|
"grad_norm": 0.3808555006980896, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.5317, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.048076923076923, |
|
"grad_norm": 0.41117677092552185, |
|
"learning_rate": 2.7801746798475905e-06, |
|
"loss": 0.5275, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.0576923076923075, |
|
"grad_norm": 0.3876531422138214, |
|
"learning_rate": 2.7300475013022666e-06, |
|
"loss": 0.5631, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.0673076923076925, |
|
"grad_norm": 0.39290904998779297, |
|
"learning_rate": 2.6802060791108304e-06, |
|
"loss": 0.5447, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.076923076923077, |
|
"grad_norm": 0.38423505425453186, |
|
"learning_rate": 2.6306566876350072e-06, |
|
"loss": 0.535, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.0865384615384617, |
|
"grad_norm": 0.3680586814880371, |
|
"learning_rate": 2.5814055644738013e-06, |
|
"loss": 0.5534, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.0961538461538463, |
|
"grad_norm": 0.4062810242176056, |
|
"learning_rate": 2.532458909678266e-06, |
|
"loss": 0.5499, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.105769230769231, |
|
"grad_norm": 0.39526402950286865, |
|
"learning_rate": 2.483822884971e-06, |
|
"loss": 0.5393, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.1153846153846154, |
|
"grad_norm": 0.4416036903858185, |
|
"learning_rate": 2.43550361297047e-06, |
|
"loss": 0.5432, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.125, |
|
"grad_norm": 0.40109318494796753, |
|
"learning_rate": 2.387507176420256e-06, |
|
"loss": 0.5167, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.1346153846153846, |
|
"grad_norm": 0.37532350420951843, |
|
"learning_rate": 2.339839617423318e-06, |
|
"loss": 0.5517, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.144230769230769, |
|
"grad_norm": 0.36060187220573425, |
|
"learning_rate": 2.2925069366813718e-06, |
|
"loss": 0.5388, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.1538461538461537, |
|
"grad_norm": 0.41168034076690674, |
|
"learning_rate": 2.245515092739488e-06, |
|
"loss": 0.5351, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.1634615384615383, |
|
"grad_norm": 0.3630206286907196, |
|
"learning_rate": 2.1988700012359865e-06, |
|
"loss": 0.5398, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.173076923076923, |
|
"grad_norm": 0.36833643913269043, |
|
"learning_rate": 2.1525775341577404e-06, |
|
"loss": 0.5399, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.1826923076923075, |
|
"grad_norm": 0.3559540808200836, |
|
"learning_rate": 2.1066435191009717e-06, |
|
"loss": 0.535, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.1923076923076925, |
|
"grad_norm": 0.3911932706832886, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 0.5203, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.201923076923077, |
|
"grad_norm": 0.3908790647983551, |
|
"learning_rate": 2.0158739290874822e-06, |
|
"loss": 0.5592, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.2115384615384617, |
|
"grad_norm": 0.38927578926086426, |
|
"learning_rate": 1.971049780795901e-06, |
|
"loss": 0.5399, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.2211538461538463, |
|
"grad_norm": 0.3874114751815796, |
|
"learning_rate": 1.9266069364176144e-06, |
|
"loss": 0.522, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.230769230769231, |
|
"grad_norm": 0.37980931997299194, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.5227, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.2403846153846154, |
|
"grad_norm": 0.3756473958492279, |
|
"learning_rate": 1.838887489710452e-06, |
|
"loss": 0.537, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.36786749958992004, |
|
"learning_rate": 1.7956219300748796e-06, |
|
"loss": 0.5505, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.2596153846153846, |
|
"grad_norm": 0.361908495426178, |
|
"learning_rate": 1.7527597583490825e-06, |
|
"loss": 0.5678, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.269230769230769, |
|
"grad_norm": 0.339290052652359, |
|
"learning_rate": 1.7103063703014372e-06, |
|
"loss": 0.5369, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.2788461538461537, |
|
"grad_norm": 0.412014365196228, |
|
"learning_rate": 1.6682671102399806e-06, |
|
"loss": 0.5319, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.2884615384615383, |
|
"grad_norm": 0.3827563524246216, |
|
"learning_rate": 1.6266472703396286e-06, |
|
"loss": 0.5492, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.298076923076923, |
|
"grad_norm": 0.35775306820869446, |
|
"learning_rate": 1.5854520899759656e-06, |
|
"loss": 0.5322, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.3076923076923075, |
|
"grad_norm": 0.3594055771827698, |
|
"learning_rate": 1.544686755065677e-06, |
|
"loss": 0.553, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.3173076923076925, |
|
"grad_norm": 0.3411286473274231, |
|
"learning_rate": 1.5043563974137132e-06, |
|
"loss": 0.546, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.326923076923077, |
|
"grad_norm": 0.42784562706947327, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.5357, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.3365384615384617, |
|
"grad_norm": 0.387997567653656, |
|
"learning_rate": 1.4250208666766235e-06, |
|
"loss": 0.5192, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.3461538461538463, |
|
"grad_norm": 0.36996373534202576, |
|
"learning_rate": 1.3860256808630429e-06, |
|
"loss": 0.5457, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.355769230769231, |
|
"grad_norm": 0.3782255947589874, |
|
"learning_rate": 1.3474854455936126e-06, |
|
"loss": 0.5543, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.3653846153846154, |
|
"grad_norm": 0.38931819796562195, |
|
"learning_rate": 1.3094050125632973e-06, |
|
"loss": 0.5661, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.375, |
|
"grad_norm": 0.36775466799736023, |
|
"learning_rate": 1.2717891755841722e-06, |
|
"loss": 0.5461, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.3846153846153846, |
|
"grad_norm": 0.3692035675048828, |
|
"learning_rate": 1.234642669981946e-06, |
|
"loss": 0.5263, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.394230769230769, |
|
"grad_norm": 0.3765394389629364, |
|
"learning_rate": 1.1979701719998454e-06, |
|
"loss": 0.5478, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.4038461538461537, |
|
"grad_norm": 0.36372944712638855, |
|
"learning_rate": 1.1617762982099446e-06, |
|
"loss": 0.5534, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.4134615384615383, |
|
"grad_norm": 0.3634006083011627, |
|
"learning_rate": 1.1260656049319957e-06, |
|
"loss": 0.5366, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.423076923076923, |
|
"grad_norm": 0.3760945796966553, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.5351, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.4326923076923075, |
|
"grad_norm": 0.3575971722602844, |
|
"learning_rate": 1.0561116804955451e-06, |
|
"loss": 0.5445, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.4423076923076925, |
|
"grad_norm": 0.3454532325267792, |
|
"learning_rate": 1.0218772555910955e-06, |
|
"loss": 0.5545, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.451923076923077, |
|
"grad_norm": 0.3429204225540161, |
|
"learning_rate": 9.881436225981107e-07, |
|
"loss": 0.5489, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.4615384615384617, |
|
"grad_norm": 0.37573421001434326, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.524, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.4711538461538463, |
|
"grad_norm": 0.3568671941757202, |
|
"learning_rate": 9.221956552036992e-07, |
|
"loss": 0.5328, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.480769230769231, |
|
"grad_norm": 0.3483606278896332, |
|
"learning_rate": 8.899896227604509e-07, |
|
"loss": 0.561, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.4903846153846154, |
|
"grad_norm": 0.34958329796791077, |
|
"learning_rate": 8.58300985099918e-07, |
|
"loss": 0.5525, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.35783374309539795, |
|
"learning_rate": 8.271337313934869e-07, |
|
"loss": 0.5313, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.5096153846153846, |
|
"grad_norm": 0.34240663051605225, |
|
"learning_rate": 7.964917851773496e-07, |
|
"loss": 0.5327, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.519230769230769, |
|
"grad_norm": 0.3467879891395569, |
|
"learning_rate": 7.663790038585794e-07, |
|
"loss": 0.5375, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.5288461538461537, |
|
"grad_norm": 0.34128859639167786, |
|
"learning_rate": 7.367991782295392e-07, |
|
"loss": 0.5497, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.5384615384615383, |
|
"grad_norm": 0.35784712433815, |
|
"learning_rate": 7.077560319906696e-07, |
|
"loss": 0.5425, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.5480769230769234, |
|
"grad_norm": 0.359834223985672, |
|
"learning_rate": 6.792532212817271e-07, |
|
"loss": 0.548, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.5576923076923075, |
|
"grad_norm": 0.36030974984169006, |
|
"learning_rate": 6.512943342215234e-07, |
|
"loss": 0.5527, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.5673076923076925, |
|
"grad_norm": 0.3154827058315277, |
|
"learning_rate": 6.238828904562316e-07, |
|
"loss": 0.5562, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.5769230769230766, |
|
"grad_norm": 0.35393068194389343, |
|
"learning_rate": 5.9702234071631e-07, |
|
"loss": 0.5426, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.5865384615384617, |
|
"grad_norm": 0.3374582529067993, |
|
"learning_rate": 5.707160663821009e-07, |
|
"loss": 0.5482, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.5961538461538463, |
|
"grad_norm": 0.3574032485485077, |
|
"learning_rate": 5.449673790581611e-07, |
|
"loss": 0.5282, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.605769230769231, |
|
"grad_norm": 0.36037665605545044, |
|
"learning_rate": 5.197795201563744e-07, |
|
"loss": 0.5496, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.6153846153846154, |
|
"grad_norm": 0.3468829095363617, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.5476, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.625, |
|
"grad_norm": 0.3324459493160248, |
|
"learning_rate": 4.710988998640298e-07, |
|
"loss": 0.5586, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.6346153846153846, |
|
"grad_norm": 0.33732524514198303, |
|
"learning_rate": 4.4761226670592074e-07, |
|
"loss": 0.5531, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.644230769230769, |
|
"grad_norm": 0.35106703639030457, |
|
"learning_rate": 4.2469871766340096e-07, |
|
"loss": 0.5454, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.6538461538461537, |
|
"grad_norm": 0.36037111282348633, |
|
"learning_rate": 4.0236113724274716e-07, |
|
"loss": 0.5415, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.6634615384615383, |
|
"grad_norm": 0.3727104961872101, |
|
"learning_rate": 3.8060233744356634e-07, |
|
"loss": 0.5286, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.6730769230769234, |
|
"grad_norm": 0.3624821901321411, |
|
"learning_rate": 3.5942505740480583e-07, |
|
"loss": 0.5483, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.6826923076923075, |
|
"grad_norm": 0.34953874349594116, |
|
"learning_rate": 3.3883196305992906e-07, |
|
"loss": 0.5397, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.6923076923076925, |
|
"grad_norm": 0.31536760926246643, |
|
"learning_rate": 3.18825646801314e-07, |
|
"loss": 0.5813, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.7019230769230766, |
|
"grad_norm": 0.34410393238067627, |
|
"learning_rate": 2.9940862715390483e-07, |
|
"loss": 0.5477, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.7115384615384617, |
|
"grad_norm": 0.33748528361320496, |
|
"learning_rate": 2.8058334845816214e-07, |
|
"loss": 0.5699, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.7211538461538463, |
|
"grad_norm": 0.3506697416305542, |
|
"learning_rate": 2.6235218056235633e-07, |
|
"loss": 0.5141, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.730769230769231, |
|
"grad_norm": 0.3356882333755493, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 0.5593, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.7403846153846154, |
|
"grad_norm": 0.34487199783325195, |
|
"learning_rate": 2.276812823220964e-07, |
|
"loss": 0.5575, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.333774596452713, |
|
"learning_rate": 2.1124591657534776e-07, |
|
"loss": 0.5249, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.7596153846153846, |
|
"grad_norm": 0.3721064031124115, |
|
"learning_rate": 1.9541339027450256e-07, |
|
"loss": 0.5234, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.769230769230769, |
|
"grad_norm": 0.35415223240852356, |
|
"learning_rate": 1.801856965207338e-07, |
|
"loss": 0.5215, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.7788461538461537, |
|
"grad_norm": 0.3103128969669342, |
|
"learning_rate": 1.6556475227496816e-07, |
|
"loss": 0.5574, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.7884615384615383, |
|
"grad_norm": 0.35190919041633606, |
|
"learning_rate": 1.5155239811656562e-07, |
|
"loss": 0.5465, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.7980769230769234, |
|
"grad_norm": 0.3369409143924713, |
|
"learning_rate": 1.3815039801161723e-07, |
|
"loss": 0.5469, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.8076923076923075, |
|
"grad_norm": 0.3374597430229187, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.5399, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.8173076923076925, |
|
"grad_norm": 0.3631032407283783, |
|
"learning_rate": 1.1318413143740436e-07, |
|
"loss": 0.5451, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.8269230769230766, |
|
"grad_norm": 0.30834081768989563, |
|
"learning_rate": 1.0162300788382263e-07, |
|
"loss": 0.5584, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.8365384615384617, |
|
"grad_norm": 0.3387204706668854, |
|
"learning_rate": 9.0678523819408e-08, |
|
"loss": 0.5332, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.8461538461538463, |
|
"grad_norm": 0.35595011711120605, |
|
"learning_rate": 8.035205700685167e-08, |
|
"loss": 0.5622, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.855769230769231, |
|
"grad_norm": 0.33064529299736023, |
|
"learning_rate": 7.064490740882057e-08, |
|
"loss": 0.5351, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.8653846153846154, |
|
"grad_norm": 0.3262805938720703, |
|
"learning_rate": 6.15582970243117e-08, |
|
"loss": 0.5532, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.875, |
|
"grad_norm": 0.3150298297405243, |
|
"learning_rate": 5.3093369734816824e-08, |
|
"loss": 0.5441, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.8846153846153846, |
|
"grad_norm": 0.32384297251701355, |
|
"learning_rate": 4.52511911603265e-08, |
|
"loss": 0.5482, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.894230769230769, |
|
"grad_norm": 0.4312896728515625, |
|
"learning_rate": 3.8032748525179684e-08, |
|
"loss": 0.5473, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.9038461538461537, |
|
"grad_norm": 0.3357475697994232, |
|
"learning_rate": 3.143895053378698e-08, |
|
"loss": 0.5371, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.9134615384615383, |
|
"grad_norm": 0.34379279613494873, |
|
"learning_rate": 2.547062725623828e-08, |
|
"loss": 0.5424, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.9230769230769234, |
|
"grad_norm": 0.3341004252433777, |
|
"learning_rate": 2.012853002380466e-08, |
|
"loss": 0.5553, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.9326923076923075, |
|
"grad_norm": 0.3333733081817627, |
|
"learning_rate": 1.541333133436018e-08, |
|
"loss": 0.5281, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.9423076923076925, |
|
"grad_norm": 0.3348696529865265, |
|
"learning_rate": 1.132562476771959e-08, |
|
"loss": 0.5465, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.9519230769230766, |
|
"grad_norm": 0.35991808772087097, |
|
"learning_rate": 7.865924910916977e-09, |
|
"loss": 0.5219, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.9615384615384617, |
|
"grad_norm": 0.3342251479625702, |
|
"learning_rate": 5.034667293427053e-09, |
|
"loss": 0.5476, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.9711538461538463, |
|
"grad_norm": 0.32228055596351624, |
|
"learning_rate": 2.8322083323334417e-09, |
|
"loss": 0.5605, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.980769230769231, |
|
"grad_norm": 0.31481924653053284, |
|
"learning_rate": 1.2588252874673469e-09, |
|
"loss": 0.5469, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.9903846153846154, |
|
"grad_norm": 0.31989800930023193, |
|
"learning_rate": 3.147162264971471e-10, |
|
"loss": 0.548, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.3253592550754547, |
|
"learning_rate": 0.0, |
|
"loss": 0.5313, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 312, |
|
"total_flos": 582499086630912.0, |
|
"train_loss": 0.6133231129019688, |
|
"train_runtime": 4436.2424, |
|
"train_samples_per_second": 6.73, |
|
"train_steps_per_second": 0.07 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 312, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 582499086630912.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|