|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 234, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008547008547008548, |
|
"grad_norm": 0.25262296199798584, |
|
"learning_rate": 2e-05, |
|
"loss": 1.288, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017094017094017096, |
|
"grad_norm": 0.2176843136548996, |
|
"learning_rate": 4e-05, |
|
"loss": 0.9905, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02564102564102564, |
|
"grad_norm": 0.2575605809688568, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3014, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03418803418803419, |
|
"grad_norm": 0.1782544106245041, |
|
"learning_rate": 8e-05, |
|
"loss": 1.5081, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.042735042735042736, |
|
"grad_norm": 0.33221080899238586, |
|
"learning_rate": 0.0001, |
|
"loss": 1.1328, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05128205128205128, |
|
"grad_norm": 0.28970077633857727, |
|
"learning_rate": 0.00012, |
|
"loss": 1.4862, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05982905982905983, |
|
"grad_norm": 0.32833603024482727, |
|
"learning_rate": 0.00014, |
|
"loss": 1.0591, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06837606837606838, |
|
"grad_norm": 0.35267820954322815, |
|
"learning_rate": 0.00016, |
|
"loss": 1.0766, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 0.402508020401001, |
|
"learning_rate": 0.00018, |
|
"loss": 0.9063, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08547008547008547, |
|
"grad_norm": 0.316371351480484, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1015, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09401709401709402, |
|
"grad_norm": 0.32572099566459656, |
|
"learning_rate": 0.0001999976474595967, |
|
"loss": 1.08, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10256410256410256, |
|
"grad_norm": 0.18087100982666016, |
|
"learning_rate": 0.00019999058994907564, |
|
"loss": 0.8118, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 0.30534857511520386, |
|
"learning_rate": 0.00019997882780049847, |
|
"loss": 0.9287, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11965811965811966, |
|
"grad_norm": 0.332878053188324, |
|
"learning_rate": 0.0001999623615672837, |
|
"loss": 1.0165, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 0.17985212802886963, |
|
"learning_rate": 0.00019994119202418098, |
|
"loss": 1.1294, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13675213675213677, |
|
"grad_norm": 0.17866399884223938, |
|
"learning_rate": 0.00019991532016723439, |
|
"loss": 0.8047, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1452991452991453, |
|
"grad_norm": 0.10377021133899689, |
|
"learning_rate": 0.00019988474721373568, |
|
"loss": 1.193, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 0.11353971809148788, |
|
"learning_rate": 0.00019984947460216707, |
|
"loss": 0.6695, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1623931623931624, |
|
"grad_norm": 0.12540249526500702, |
|
"learning_rate": 0.00019980950399213344, |
|
"loss": 0.7988, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17094017094017094, |
|
"grad_norm": 0.11810794472694397, |
|
"learning_rate": 0.00019976483726428422, |
|
"loss": 0.6776, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1794871794871795, |
|
"grad_norm": 0.15847349166870117, |
|
"learning_rate": 0.0001997154765202251, |
|
"loss": 0.6852, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18803418803418803, |
|
"grad_norm": 0.13485313951969147, |
|
"learning_rate": 0.00019966142408241901, |
|
"loss": 0.7981, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19658119658119658, |
|
"grad_norm": 0.1381629854440689, |
|
"learning_rate": 0.00019960268249407675, |
|
"loss": 0.8672, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.20512820512820512, |
|
"grad_norm": 0.18560636043548584, |
|
"learning_rate": 0.00019953925451903756, |
|
"loss": 0.8382, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21367521367521367, |
|
"grad_norm": 0.11415428668260574, |
|
"learning_rate": 0.0001994711431416389, |
|
"loss": 1.4257, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.1419740468263626, |
|
"learning_rate": 0.00019939835156657616, |
|
"loss": 1.2219, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 0.1541571021080017, |
|
"learning_rate": 0.00019932088321875172, |
|
"loss": 0.7459, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.23931623931623933, |
|
"grad_norm": 0.16184499859809875, |
|
"learning_rate": 0.00019923874174311394, |
|
"loss": 0.66, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.24786324786324787, |
|
"grad_norm": 0.13992361724376678, |
|
"learning_rate": 0.0001991519310044857, |
|
"loss": 1.0709, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 0.1397615224123001, |
|
"learning_rate": 0.00019906045508738228, |
|
"loss": 0.9601, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.26495726495726496, |
|
"grad_norm": 0.15078146755695343, |
|
"learning_rate": 0.0001989643182958196, |
|
"loss": 0.678, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.27350427350427353, |
|
"grad_norm": 0.18909209966659546, |
|
"learning_rate": 0.00019886352515311134, |
|
"loss": 0.7399, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.28205128205128205, |
|
"grad_norm": 0.149637833237648, |
|
"learning_rate": 0.0001987580804016563, |
|
"loss": 0.9793, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2905982905982906, |
|
"grad_norm": 0.14903782308101654, |
|
"learning_rate": 0.00019864798900271532, |
|
"loss": 0.8615, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.29914529914529914, |
|
"grad_norm": 0.13387615978717804, |
|
"learning_rate": 0.0001985332561361776, |
|
"loss": 0.6926, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.11794736236333847, |
|
"learning_rate": 0.00019841388720031727, |
|
"loss": 0.6114, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3162393162393162, |
|
"grad_norm": 0.14885829389095306, |
|
"learning_rate": 0.00019828988781153917, |
|
"loss": 0.7201, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3247863247863248, |
|
"grad_norm": 0.15518176555633545, |
|
"learning_rate": 0.00019816126380411476, |
|
"loss": 0.7263, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.13227546215057373, |
|
"learning_rate": 0.00019802802122990758, |
|
"loss": 0.7479, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3418803418803419, |
|
"grad_norm": 0.15872053802013397, |
|
"learning_rate": 0.00019789016635808837, |
|
"loss": 0.7847, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3504273504273504, |
|
"grad_norm": 0.13838137686252594, |
|
"learning_rate": 0.00019774770567484022, |
|
"loss": 0.9159, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.358974358974359, |
|
"grad_norm": 0.11419806629419327, |
|
"learning_rate": 0.00019760064588305345, |
|
"loss": 0.6802, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.36752136752136755, |
|
"grad_norm": 0.12754102051258087, |
|
"learning_rate": 0.00019744899390201006, |
|
"loss": 0.7116, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.37606837606837606, |
|
"grad_norm": 0.12221560627222061, |
|
"learning_rate": 0.0001972927568670583, |
|
"loss": 1.0765, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.1402164101600647, |
|
"learning_rate": 0.00019713194212927696, |
|
"loss": 0.83, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.39316239316239315, |
|
"grad_norm": 0.14776213467121124, |
|
"learning_rate": 0.00019696655725512933, |
|
"loss": 0.7333, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4017094017094017, |
|
"grad_norm": 0.14819088578224182, |
|
"learning_rate": 0.00019679661002610743, |
|
"loss": 0.8153, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.41025641025641024, |
|
"grad_norm": 0.12451574206352234, |
|
"learning_rate": 0.00019662210843836574, |
|
"loss": 0.7028, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4188034188034188, |
|
"grad_norm": 0.14047390222549438, |
|
"learning_rate": 0.0001964430607023449, |
|
"loss": 0.6932, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.42735042735042733, |
|
"grad_norm": 0.1826234757900238, |
|
"learning_rate": 0.00019625947524238563, |
|
"loss": 0.9923, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4358974358974359, |
|
"grad_norm": 0.14018255472183228, |
|
"learning_rate": 0.00019607136069633212, |
|
"loss": 0.6738, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.13014380633831024, |
|
"learning_rate": 0.0001958787259151258, |
|
"loss": 0.6896, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.452991452991453, |
|
"grad_norm": 0.1482684463262558, |
|
"learning_rate": 0.00019568157996238884, |
|
"loss": 0.6597, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 0.12220227718353271, |
|
"learning_rate": 0.0001954799321139975, |
|
"loss": 0.9904, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4700854700854701, |
|
"grad_norm": 0.1338455229997635, |
|
"learning_rate": 0.00019527379185764612, |
|
"loss": 0.6457, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.47863247863247865, |
|
"grad_norm": 0.17472369968891144, |
|
"learning_rate": 0.00019506316889240027, |
|
"loss": 1.134, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.48717948717948717, |
|
"grad_norm": 0.14439380168914795, |
|
"learning_rate": 0.00019484807312824067, |
|
"loss": 0.6166, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.49572649572649574, |
|
"grad_norm": 0.18377861380577087, |
|
"learning_rate": 0.0001946285146855968, |
|
"loss": 0.7602, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5042735042735043, |
|
"grad_norm": 0.159800723195076, |
|
"learning_rate": 0.0001944045038948709, |
|
"loss": 0.7342, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 0.17464005947113037, |
|
"learning_rate": 0.00019417605129595157, |
|
"loss": 0.6698, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5213675213675214, |
|
"grad_norm": 0.16266022622585297, |
|
"learning_rate": 0.0001939431676377183, |
|
"loss": 0.6718, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5299145299145299, |
|
"grad_norm": 0.14515793323516846, |
|
"learning_rate": 0.0001937058638775353, |
|
"loss": 0.6268, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 0.12234693765640259, |
|
"learning_rate": 0.00019346415118073632, |
|
"loss": 1.2523, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5470085470085471, |
|
"grad_norm": 0.17767716944217682, |
|
"learning_rate": 0.00019321804092009906, |
|
"loss": 0.7257, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.16069312393665314, |
|
"learning_rate": 0.00019296754467531014, |
|
"loss": 0.6947, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5641025641025641, |
|
"grad_norm": 0.18852359056472778, |
|
"learning_rate": 0.00019271267423242024, |
|
"loss": 0.6933, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5726495726495726, |
|
"grad_norm": 0.1703113317489624, |
|
"learning_rate": 0.00019245344158328972, |
|
"loss": 0.7734, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5811965811965812, |
|
"grad_norm": 0.1587096005678177, |
|
"learning_rate": 0.0001921898589250242, |
|
"loss": 0.6607, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5897435897435898, |
|
"grad_norm": 0.15161314606666565, |
|
"learning_rate": 0.0001919219386594007, |
|
"loss": 0.7139, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.5982905982905983, |
|
"grad_norm": 0.15223422646522522, |
|
"learning_rate": 0.00019164969339228422, |
|
"loss": 0.7178, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6068376068376068, |
|
"grad_norm": 0.18094822764396667, |
|
"learning_rate": 0.00019137313593303463, |
|
"loss": 0.7735, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.13845407962799072, |
|
"learning_rate": 0.00019109227929390378, |
|
"loss": 1.3756, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6239316239316239, |
|
"grad_norm": 0.15550608932971954, |
|
"learning_rate": 0.00019080713668942356, |
|
"loss": 0.6475, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6324786324786325, |
|
"grad_norm": 0.16042666137218475, |
|
"learning_rate": 0.00019051772153578389, |
|
"loss": 0.6748, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 0.17203615605831146, |
|
"learning_rate": 0.00019022404745020163, |
|
"loss": 0.6711, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6495726495726496, |
|
"grad_norm": 0.14476130902767181, |
|
"learning_rate": 0.00018992612825027976, |
|
"loss": 0.7195, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6581196581196581, |
|
"grad_norm": 0.18853308260440826, |
|
"learning_rate": 0.0001896239779533575, |
|
"loss": 0.8027, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.1497141271829605, |
|
"learning_rate": 0.00018931761077585035, |
|
"loss": 0.6621, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6752136752136753, |
|
"grad_norm": 0.16902165114879608, |
|
"learning_rate": 0.00018900704113258165, |
|
"loss": 0.6437, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.6837606837606838, |
|
"grad_norm": 0.1600257009267807, |
|
"learning_rate": 0.00018869228363610404, |
|
"loss": 0.6308, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 0.18659566342830658, |
|
"learning_rate": 0.00018837335309601213, |
|
"loss": 0.7028, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7008547008547008, |
|
"grad_norm": 0.14221739768981934, |
|
"learning_rate": 0.00018805026451824546, |
|
"loss": 1.2147, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7094017094017094, |
|
"grad_norm": 0.13898412883281708, |
|
"learning_rate": 0.00018772303310438275, |
|
"loss": 1.1227, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.717948717948718, |
|
"grad_norm": 0.16075965762138367, |
|
"learning_rate": 0.00018739167425092644, |
|
"loss": 1.1104, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7264957264957265, |
|
"grad_norm": 0.1688220500946045, |
|
"learning_rate": 0.00018705620354857833, |
|
"loss": 0.6213, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7350427350427351, |
|
"grad_norm": 0.15251010656356812, |
|
"learning_rate": 0.00018671663678150607, |
|
"loss": 0.6059, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7435897435897436, |
|
"grad_norm": 0.14779676496982574, |
|
"learning_rate": 0.0001863729899266004, |
|
"loss": 0.6402, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7521367521367521, |
|
"grad_norm": 0.16805744171142578, |
|
"learning_rate": 0.0001860252791527236, |
|
"loss": 1.0025, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7606837606837606, |
|
"grad_norm": 0.13870711624622345, |
|
"learning_rate": 0.00018567352081994852, |
|
"loss": 1.1969, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.1410149782896042, |
|
"learning_rate": 0.00018531773147878895, |
|
"loss": 1.0952, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 0.16514992713928223, |
|
"learning_rate": 0.0001849579278694209, |
|
"loss": 0.6968, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.7863247863247863, |
|
"grad_norm": 0.16152970492839813, |
|
"learning_rate": 0.00018459412692089494, |
|
"loss": 0.6271, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.7948717948717948, |
|
"grad_norm": 0.1401905119419098, |
|
"learning_rate": 0.0001842263457503397, |
|
"loss": 0.5867, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8034188034188035, |
|
"grad_norm": 0.2006424516439438, |
|
"learning_rate": 0.00018385460166215638, |
|
"loss": 0.7979, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.811965811965812, |
|
"grad_norm": 0.17356745898723602, |
|
"learning_rate": 0.00018347891214720477, |
|
"loss": 0.6557, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8205128205128205, |
|
"grad_norm": 0.13943414390087128, |
|
"learning_rate": 0.00018309929488198012, |
|
"loss": 1.1329, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8290598290598291, |
|
"grad_norm": 0.16562946140766144, |
|
"learning_rate": 0.00018271576772778154, |
|
"loss": 0.6571, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8376068376068376, |
|
"grad_norm": 0.1551978886127472, |
|
"learning_rate": 0.00018232834872987147, |
|
"loss": 1.1503, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 0.1753336638212204, |
|
"learning_rate": 0.00018193705611662696, |
|
"loss": 0.7613, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8547008547008547, |
|
"grad_norm": 0.21526718139648438, |
|
"learning_rate": 0.0001815419082986815, |
|
"loss": 0.7481, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8632478632478633, |
|
"grad_norm": 0.15033215284347534, |
|
"learning_rate": 0.00018114292386805936, |
|
"loss": 1.0287, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.8717948717948718, |
|
"grad_norm": 0.15260834991931915, |
|
"learning_rate": 0.00018074012159730032, |
|
"loss": 1.1275, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8803418803418803, |
|
"grad_norm": 0.14884799718856812, |
|
"learning_rate": 0.00018033352043857675, |
|
"loss": 0.9348, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.1598692387342453, |
|
"learning_rate": 0.00017992313952280172, |
|
"loss": 1.0837, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 0.17874813079833984, |
|
"learning_rate": 0.00017950899815872892, |
|
"loss": 1.1863, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.905982905982906, |
|
"grad_norm": 0.2233838587999344, |
|
"learning_rate": 0.00017909111583204422, |
|
"loss": 1.0691, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9145299145299145, |
|
"grad_norm": 0.2679513990879059, |
|
"learning_rate": 0.0001786695122044487, |
|
"loss": 0.8508, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.16150496900081635, |
|
"learning_rate": 0.0001782442071127338, |
|
"loss": 1.0845, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9316239316239316, |
|
"grad_norm": 0.23054973781108856, |
|
"learning_rate": 0.0001778152205678477, |
|
"loss": 1.0911, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9401709401709402, |
|
"grad_norm": 0.2072819173336029, |
|
"learning_rate": 0.00017738257275395404, |
|
"loss": 0.7793, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9487179487179487, |
|
"grad_norm": 0.18355989456176758, |
|
"learning_rate": 0.00017694628402748202, |
|
"loss": 0.6947, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9572649572649573, |
|
"grad_norm": 0.17697495222091675, |
|
"learning_rate": 0.0001765063749161688, |
|
"loss": 0.7191, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9658119658119658, |
|
"grad_norm": 0.1893756091594696, |
|
"learning_rate": 0.00017606286611809353, |
|
"loss": 0.7089, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.9743589743589743, |
|
"grad_norm": 0.175858274102211, |
|
"learning_rate": 0.00017561577850070355, |
|
"loss": 0.8156, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.9829059829059829, |
|
"grad_norm": 0.1497766226530075, |
|
"learning_rate": 0.00017516513309983253, |
|
"loss": 0.6113, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.9914529914529915, |
|
"grad_norm": 0.2035011351108551, |
|
"learning_rate": 0.00017471095111871074, |
|
"loss": 0.7514, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.19679343700408936, |
|
"learning_rate": 0.0001742532539269674, |
|
"loss": 0.6778, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0085470085470085, |
|
"grad_norm": 0.19897602498531342, |
|
"learning_rate": 0.00017379206305962526, |
|
"loss": 0.518, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.017094017094017, |
|
"grad_norm": 0.17100335657596588, |
|
"learning_rate": 0.00017332740021608722, |
|
"loss": 0.5464, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0256410256410255, |
|
"grad_norm": 0.1799200475215912, |
|
"learning_rate": 0.00017285928725911562, |
|
"loss": 0.5751, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0341880341880343, |
|
"grad_norm": 0.2159220576286316, |
|
"learning_rate": 0.00017238774621380337, |
|
"loss": 0.5944, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0427350427350428, |
|
"grad_norm": 0.20010395348072052, |
|
"learning_rate": 0.00017191279926653761, |
|
"loss": 1.2068, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0512820512820513, |
|
"grad_norm": 0.20249801874160767, |
|
"learning_rate": 0.00017143446876395602, |
|
"loss": 1.0354, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.0598290598290598, |
|
"grad_norm": 0.16663746535778046, |
|
"learning_rate": 0.00017095277721189528, |
|
"loss": 0.9905, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0683760683760684, |
|
"grad_norm": 0.22365769743919373, |
|
"learning_rate": 0.00017046774727433222, |
|
"loss": 0.6772, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0769230769230769, |
|
"grad_norm": 0.19689880311489105, |
|
"learning_rate": 0.00016997940177231722, |
|
"loss": 0.544, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.0854700854700854, |
|
"grad_norm": 0.1540079563856125, |
|
"learning_rate": 0.00016948776368290084, |
|
"loss": 1.1138, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.0940170940170941, |
|
"grad_norm": 0.21169312298297882, |
|
"learning_rate": 0.00016899285613805246, |
|
"loss": 0.4954, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1025641025641026, |
|
"grad_norm": 0.227870911359787, |
|
"learning_rate": 0.00016849470242357196, |
|
"loss": 0.5515, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.2119448482990265, |
|
"learning_rate": 0.00016799332597799413, |
|
"loss": 0.5498, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1196581196581197, |
|
"grad_norm": 0.1958005130290985, |
|
"learning_rate": 0.00016748875039148593, |
|
"loss": 0.9122, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1282051282051282, |
|
"grad_norm": 0.18614064157009125, |
|
"learning_rate": 0.0001669809994047364, |
|
"loss": 0.9878, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1367521367521367, |
|
"grad_norm": 0.22994214296340942, |
|
"learning_rate": 0.0001664700969078398, |
|
"loss": 0.6173, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1452991452991452, |
|
"grad_norm": 0.17942824959754944, |
|
"learning_rate": 0.00016595606693917142, |
|
"loss": 0.9871, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 0.19774889945983887, |
|
"learning_rate": 0.00016543893368425666, |
|
"loss": 0.531, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.1623931623931625, |
|
"grad_norm": 0.2616710662841797, |
|
"learning_rate": 0.00016491872147463306, |
|
"loss": 0.5396, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.170940170940171, |
|
"grad_norm": 0.19081617891788483, |
|
"learning_rate": 0.00016439545478670543, |
|
"loss": 1.4579, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.1794871794871795, |
|
"grad_norm": 0.22909559309482574, |
|
"learning_rate": 0.00016386915824059427, |
|
"loss": 0.5076, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.188034188034188, |
|
"grad_norm": 0.19601647555828094, |
|
"learning_rate": 0.00016333985659897735, |
|
"loss": 0.477, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.1965811965811965, |
|
"grad_norm": 0.2791956067085266, |
|
"learning_rate": 0.00016280757476592466, |
|
"loss": 0.5587, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.205128205128205, |
|
"grad_norm": 0.23856423795223236, |
|
"learning_rate": 0.0001622723377857265, |
|
"loss": 0.5495, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2136752136752136, |
|
"grad_norm": 0.2004079818725586, |
|
"learning_rate": 0.00016173417084171536, |
|
"loss": 1.0806, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 0.24053840339183807, |
|
"learning_rate": 0.00016119309925508078, |
|
"loss": 0.4846, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.2852567732334137, |
|
"learning_rate": 0.0001606491484836782, |
|
"loss": 0.5292, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2393162393162394, |
|
"grad_norm": 0.2828088402748108, |
|
"learning_rate": 0.00016010234412083086, |
|
"loss": 0.6061, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2478632478632479, |
|
"grad_norm": 0.2880561351776123, |
|
"learning_rate": 0.00015955271189412598, |
|
"loss": 0.6294, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.2564102564102564, |
|
"grad_norm": 0.2703532576560974, |
|
"learning_rate": 0.00015900027766420393, |
|
"loss": 0.4802, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.264957264957265, |
|
"grad_norm": 0.26987820863723755, |
|
"learning_rate": 0.00015844506742354164, |
|
"loss": 0.58, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.2735042735042734, |
|
"grad_norm": 0.20799943804740906, |
|
"learning_rate": 0.00015788710729522953, |
|
"loss": 0.8506, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.282051282051282, |
|
"grad_norm": 0.284532368183136, |
|
"learning_rate": 0.00015732642353174259, |
|
"loss": 0.9502, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2905982905982907, |
|
"grad_norm": 0.2279794067144394, |
|
"learning_rate": 0.0001567630425137049, |
|
"loss": 0.4345, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.2991452991452992, |
|
"grad_norm": 0.27440500259399414, |
|
"learning_rate": 0.00015619699074864864, |
|
"loss": 0.5389, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3076923076923077, |
|
"grad_norm": 0.3192152976989746, |
|
"learning_rate": 0.00015562829486976673, |
|
"loss": 0.601, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3162393162393162, |
|
"grad_norm": 0.2619931995868683, |
|
"learning_rate": 0.00015505698163465986, |
|
"loss": 0.6321, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3247863247863247, |
|
"grad_norm": 0.3034244477748871, |
|
"learning_rate": 0.00015448307792407734, |
|
"loss": 0.5392, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.24447086453437805, |
|
"learning_rate": 0.00015390661074065256, |
|
"loss": 0.5294, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.341880341880342, |
|
"grad_norm": 0.2406824827194214, |
|
"learning_rate": 0.00015332760720763232, |
|
"loss": 1.0088, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.3504273504273505, |
|
"grad_norm": 0.33081722259521484, |
|
"learning_rate": 0.00015274609456760073, |
|
"loss": 0.6686, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.358974358974359, |
|
"grad_norm": 0.2927612066268921, |
|
"learning_rate": 0.00015216210018119733, |
|
"loss": 0.6711, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.3675213675213675, |
|
"grad_norm": 0.27662229537963867, |
|
"learning_rate": 0.00015157565152583002, |
|
"loss": 0.4599, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.376068376068376, |
|
"grad_norm": 0.27406662702560425, |
|
"learning_rate": 0.0001509867761943818, |
|
"loss": 0.7595, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.3846153846153846, |
|
"grad_norm": 0.2830904424190521, |
|
"learning_rate": 0.00015039550189391298, |
|
"loss": 0.5543, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.393162393162393, |
|
"grad_norm": 0.2570502460002899, |
|
"learning_rate": 0.0001498018564443571, |
|
"loss": 0.796, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4017094017094016, |
|
"grad_norm": 0.3457013964653015, |
|
"learning_rate": 0.0001492058677772123, |
|
"loss": 0.6932, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4102564102564101, |
|
"grad_norm": 0.28781554102897644, |
|
"learning_rate": 0.000148607563934227, |
|
"loss": 0.5926, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.4188034188034189, |
|
"grad_norm": 0.22006003558635712, |
|
"learning_rate": 0.00014800697306608044, |
|
"loss": 0.4337, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4273504273504274, |
|
"grad_norm": 0.26621371507644653, |
|
"learning_rate": 0.00014740412343105828, |
|
"loss": 0.7999, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.435897435897436, |
|
"grad_norm": 0.25635233521461487, |
|
"learning_rate": 0.00014679904339372302, |
|
"loss": 0.4834, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.28802382946014404, |
|
"learning_rate": 0.00014619176142357935, |
|
"loss": 0.4865, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.452991452991453, |
|
"grad_norm": 0.28858450055122375, |
|
"learning_rate": 0.0001455823060937347, |
|
"loss": 0.5757, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.4615384615384617, |
|
"grad_norm": 0.3039717674255371, |
|
"learning_rate": 0.00014497070607955476, |
|
"loss": 0.5206, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.4700854700854702, |
|
"grad_norm": 0.29578229784965515, |
|
"learning_rate": 0.00014435699015731448, |
|
"loss": 0.5103, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.4786324786324787, |
|
"grad_norm": 0.2743285596370697, |
|
"learning_rate": 0.00014374118720284388, |
|
"loss": 0.5932, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.4871794871794872, |
|
"grad_norm": 0.23295287787914276, |
|
"learning_rate": 0.00014312332619016965, |
|
"loss": 0.734, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.4957264957264957, |
|
"grad_norm": 0.3224605917930603, |
|
"learning_rate": 0.0001425034361901516, |
|
"loss": 0.5668, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5042735042735043, |
|
"grad_norm": 0.28584739565849304, |
|
"learning_rate": 0.00014188154636911524, |
|
"loss": 1.1414, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.5128205128205128, |
|
"grad_norm": 0.3341439664363861, |
|
"learning_rate": 0.0001412576859874791, |
|
"loss": 0.527, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.5213675213675213, |
|
"grad_norm": 0.2781898081302643, |
|
"learning_rate": 0.00014063188439837832, |
|
"loss": 0.4599, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5299145299145298, |
|
"grad_norm": 0.2845589518547058, |
|
"learning_rate": 0.0001400041710462833, |
|
"loss": 0.4662, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.2917931079864502, |
|
"learning_rate": 0.0001393745754656146, |
|
"loss": 0.5176, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.547008547008547, |
|
"grad_norm": 0.27486878633499146, |
|
"learning_rate": 0.00013874312727935292, |
|
"loss": 0.4756, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.29670944809913635, |
|
"learning_rate": 0.00013810985619764572, |
|
"loss": 0.9803, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.564102564102564, |
|
"grad_norm": 0.282777339220047, |
|
"learning_rate": 0.00013747479201640914, |
|
"loss": 0.494, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.5726495726495726, |
|
"grad_norm": 0.32058680057525635, |
|
"learning_rate": 0.00013683796461592604, |
|
"loss": 0.6009, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.5811965811965814, |
|
"grad_norm": 0.2858709394931793, |
|
"learning_rate": 0.00013619940395944027, |
|
"loss": 0.5382, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.5897435897435899, |
|
"grad_norm": 0.2902598977088928, |
|
"learning_rate": 0.00013555914009174663, |
|
"loss": 0.5271, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.5982905982905984, |
|
"grad_norm": 0.30693796277046204, |
|
"learning_rate": 0.00013491720313777756, |
|
"loss": 0.8996, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.606837606837607, |
|
"grad_norm": 0.30923569202423096, |
|
"learning_rate": 0.00013427362330118543, |
|
"loss": 0.5298, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.6153846153846154, |
|
"grad_norm": 0.30768024921417236, |
|
"learning_rate": 0.0001336284308629216, |
|
"loss": 0.6628, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.623931623931624, |
|
"grad_norm": 0.2818881571292877, |
|
"learning_rate": 0.00013298165617981172, |
|
"loss": 0.721, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6324786324786325, |
|
"grad_norm": 0.32291677594184875, |
|
"learning_rate": 0.00013233332968312715, |
|
"loss": 0.7519, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.641025641025641, |
|
"grad_norm": 0.3007102310657501, |
|
"learning_rate": 0.0001316834818771535, |
|
"loss": 0.5748, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.6495726495726495, |
|
"grad_norm": 0.3087317645549774, |
|
"learning_rate": 0.00013103214333775521, |
|
"loss": 0.5906, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.658119658119658, |
|
"grad_norm": 0.3102208375930786, |
|
"learning_rate": 0.00013037934471093682, |
|
"loss": 0.5124, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.3031424283981323, |
|
"learning_rate": 0.00012972511671140125, |
|
"loss": 0.4928, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.6752136752136753, |
|
"grad_norm": 0.28559157252311707, |
|
"learning_rate": 0.00012906949012110456, |
|
"loss": 0.7699, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.6837606837606838, |
|
"grad_norm": 0.3253765106201172, |
|
"learning_rate": 0.00012841249578780757, |
|
"loss": 0.6912, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.6923076923076923, |
|
"grad_norm": 0.25747209787368774, |
|
"learning_rate": 0.00012775416462362457, |
|
"loss": 0.5606, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7008547008547008, |
|
"grad_norm": 0.26116716861724854, |
|
"learning_rate": 0.00012709452760356884, |
|
"loss": 1.1407, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.7094017094017095, |
|
"grad_norm": 0.2786200940608978, |
|
"learning_rate": 0.00012643361576409516, |
|
"loss": 0.5478, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.717948717948718, |
|
"grad_norm": 0.3031173646450043, |
|
"learning_rate": 0.00012577146020163968, |
|
"loss": 0.6042, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.7264957264957266, |
|
"grad_norm": 0.3398924469947815, |
|
"learning_rate": 0.00012510809207115666, |
|
"loss": 0.5367, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.735042735042735, |
|
"grad_norm": 0.2823917865753174, |
|
"learning_rate": 0.00012444354258465268, |
|
"loss": 0.4997, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.7435897435897436, |
|
"grad_norm": 0.3168320953845978, |
|
"learning_rate": 0.00012377784300971807, |
|
"loss": 0.8277, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.7521367521367521, |
|
"grad_norm": 0.29730290174484253, |
|
"learning_rate": 0.0001231110246680558, |
|
"loss": 1.0703, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.7606837606837606, |
|
"grad_norm": 0.3612962067127228, |
|
"learning_rate": 0.00012244311893400763, |
|
"loss": 0.622, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.7692307692307692, |
|
"grad_norm": 0.35250765085220337, |
|
"learning_rate": 0.00012177415723307808, |
|
"loss": 0.5804, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.281643807888031, |
|
"learning_rate": 0.00012110417104045575, |
|
"loss": 0.4677, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.7863247863247862, |
|
"grad_norm": 0.2842894196510315, |
|
"learning_rate": 0.00012043319187953241, |
|
"loss": 0.5971, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.7948717948717947, |
|
"grad_norm": 0.30655983090400696, |
|
"learning_rate": 0.00011976125132041974, |
|
"loss": 0.5816, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8034188034188035, |
|
"grad_norm": 0.343220055103302, |
|
"learning_rate": 0.00011908838097846404, |
|
"loss": 0.6953, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.811965811965812, |
|
"grad_norm": 0.3058364987373352, |
|
"learning_rate": 0.00011841461251275867, |
|
"loss": 0.7328, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.8205128205128205, |
|
"grad_norm": 0.3523794710636139, |
|
"learning_rate": 0.00011773997762465429, |
|
"loss": 0.5407, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.8290598290598292, |
|
"grad_norm": 0.28265875577926636, |
|
"learning_rate": 0.0001170645080562676, |
|
"loss": 0.6113, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.8376068376068377, |
|
"grad_norm": 0.2768702805042267, |
|
"learning_rate": 0.00011638823558898762, |
|
"loss": 0.4853, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.30153489112854004, |
|
"learning_rate": 0.00011571119204198037, |
|
"loss": 0.5403, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.8547008547008548, |
|
"grad_norm": 0.27942952513694763, |
|
"learning_rate": 0.00011503340927069189, |
|
"loss": 0.6213, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.8632478632478633, |
|
"grad_norm": 0.2634161114692688, |
|
"learning_rate": 0.00011435491916534919, |
|
"loss": 0.5089, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.8717948717948718, |
|
"grad_norm": 0.2846587598323822, |
|
"learning_rate": 0.00011367575364946006, |
|
"loss": 0.5329, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.8803418803418803, |
|
"grad_norm": 0.3283989727497101, |
|
"learning_rate": 0.00011299594467831078, |
|
"loss": 0.516, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 0.3399990200996399, |
|
"learning_rate": 0.00011231552423746283, |
|
"loss": 0.5947, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.8974358974358974, |
|
"grad_norm": 0.2741105258464813, |
|
"learning_rate": 0.00011163452434124773, |
|
"loss": 0.4982, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.9059829059829059, |
|
"grad_norm": 0.3004041314125061, |
|
"learning_rate": 0.00011095297703126093, |
|
"loss": 0.4908, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.9145299145299144, |
|
"grad_norm": 0.3036716878414154, |
|
"learning_rate": 0.00011027091437485404, |
|
"loss": 0.5979, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.30735576152801514, |
|
"learning_rate": 0.00010958836846362621, |
|
"loss": 0.6864, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.9316239316239316, |
|
"grad_norm": 0.2979448437690735, |
|
"learning_rate": 0.00010890537141191417, |
|
"loss": 0.4901, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.9401709401709402, |
|
"grad_norm": 0.557965874671936, |
|
"learning_rate": 0.00010822195535528106, |
|
"loss": 0.8011, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.9487179487179487, |
|
"grad_norm": 0.28031420707702637, |
|
"learning_rate": 0.00010753815244900458, |
|
"loss": 0.4857, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.9572649572649574, |
|
"grad_norm": 0.33071720600128174, |
|
"learning_rate": 0.00010685399486656406, |
|
"loss": 0.5614, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.965811965811966, |
|
"grad_norm": 0.3054099678993225, |
|
"learning_rate": 0.00010616951479812658, |
|
"loss": 0.5198, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.9743589743589745, |
|
"grad_norm": 0.33297890424728394, |
|
"learning_rate": 0.00010548474444903247, |
|
"loss": 0.4813, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.982905982905983, |
|
"grad_norm": 0.29195529222488403, |
|
"learning_rate": 0.00010479971603828, |
|
"loss": 0.5025, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.9914529914529915, |
|
"grad_norm": 0.27123546600341797, |
|
"learning_rate": 0.00010411446179700943, |
|
"loss": 0.5084, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.44304001331329346, |
|
"learning_rate": 0.00010342901396698659, |
|
"loss": 0.4979, |
|
"step": 234 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 468, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 117, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.071037992589066e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|