|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9992414664981036, |
|
"eval_steps": 500, |
|
"global_step": 988, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010113780025284451, |
|
"grad_norm": 0.25880733132362366, |
|
"learning_rate": 5.050505050505051e-06, |
|
"loss": 2.1545, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.020227560050568902, |
|
"grad_norm": 0.22247332334518433, |
|
"learning_rate": 1.0101010101010101e-05, |
|
"loss": 2.0716, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03034134007585335, |
|
"grad_norm": 0.18701794743537903, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 2.0514, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.040455120101137804, |
|
"grad_norm": 0.2414943277835846, |
|
"learning_rate": 2.0202020202020203e-05, |
|
"loss": 2.0152, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05056890012642225, |
|
"grad_norm": 0.35905972123146057, |
|
"learning_rate": 2.5252525252525256e-05, |
|
"loss": 1.9956, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0606826801517067, |
|
"grad_norm": 0.25367870926856995, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 1.9278, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 0.3600216805934906, |
|
"learning_rate": 3.535353535353535e-05, |
|
"loss": 1.9378, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08091024020227561, |
|
"grad_norm": 0.2232195883989334, |
|
"learning_rate": 4.0404040404040405e-05, |
|
"loss": 1.9108, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09102402022756005, |
|
"grad_norm": 0.21375969052314758, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 1.9112, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1011378002528445, |
|
"grad_norm": 0.2019467055797577, |
|
"learning_rate": 4.999984389896431e-05, |
|
"loss": 1.9012, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11125158027812895, |
|
"grad_norm": 0.23392678797245026, |
|
"learning_rate": 4.998111413334416e-05, |
|
"loss": 1.9264, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1213653603034134, |
|
"grad_norm": 0.25306347012519836, |
|
"learning_rate": 4.993119095936937e-05, |
|
"loss": 1.8857, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13147914032869784, |
|
"grad_norm": 0.20288558304309845, |
|
"learning_rate": 4.985013671509034e-05, |
|
"loss": 1.9397, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 0.23422028124332428, |
|
"learning_rate": 4.9738052611290836e-05, |
|
"loss": 1.8809, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15170670037926676, |
|
"grad_norm": 0.21707236766815186, |
|
"learning_rate": 4.959507860510813e-05, |
|
"loss": 1.8604, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16182048040455121, |
|
"grad_norm": 0.2721821367740631, |
|
"learning_rate": 4.9421393225271375e-05, |
|
"loss": 1.8316, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17193426042983564, |
|
"grad_norm": 0.2519826591014862, |
|
"learning_rate": 4.921721334917639e-05, |
|
"loss": 1.8368, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1820480404551201, |
|
"grad_norm": 0.296394020318985, |
|
"learning_rate": 4.8982793932075356e-05, |
|
"loss": 1.851, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19216182048040456, |
|
"grad_norm": 0.36436939239501953, |
|
"learning_rate": 4.871842768871928e-05, |
|
"loss": 1.8381, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.202275600505689, |
|
"grad_norm": 0.3738105595111847, |
|
"learning_rate": 4.8424444727851126e-05, |
|
"loss": 1.8526, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"grad_norm": 0.2505233883857727, |
|
"learning_rate": 4.81012121400057e-05, |
|
"loss": 1.8241, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2225031605562579, |
|
"grad_norm": 0.22470301389694214, |
|
"learning_rate": 4.774913353913124e-05, |
|
"loss": 1.8848, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.23261694058154236, |
|
"grad_norm": 0.22071461379528046, |
|
"learning_rate": 4.7368648558604836e-05, |
|
"loss": 1.8596, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2427307206068268, |
|
"grad_norm": 0.2559085190296173, |
|
"learning_rate": 4.69602323022712e-05, |
|
"loss": 1.8337, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2528445006321112, |
|
"grad_norm": 0.42372068762779236, |
|
"learning_rate": 4.6524394751190215e-05, |
|
"loss": 1.8369, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2629582806573957, |
|
"grad_norm": 0.2989506423473358, |
|
"learning_rate": 4.606168012683394e-05, |
|
"loss": 1.8556, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.27307206068268014, |
|
"grad_norm": 0.30372118949890137, |
|
"learning_rate": 4.5572666211528324e-05, |
|
"loss": 1.8343, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2831858407079646, |
|
"grad_norm": 0.23717807233333588, |
|
"learning_rate": 4.5057963626988255e-05, |
|
"loss": 1.7969, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.29329962073324906, |
|
"grad_norm": 0.3980520963668823, |
|
"learning_rate": 4.451821507184656e-05, |
|
"loss": 1.8078, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3034134007585335, |
|
"grad_norm": 0.2746827006340027, |
|
"learning_rate": 4.395409451912942e-05, |
|
"loss": 1.8017, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.31352718078381797, |
|
"grad_norm": 0.24784184992313385, |
|
"learning_rate": 4.336630637467991e-05, |
|
"loss": 1.8024, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.32364096080910243, |
|
"grad_norm": 0.23716619610786438, |
|
"learning_rate": 4.275558459758079e-05, |
|
"loss": 1.828, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.33375474083438683, |
|
"grad_norm": 0.32575511932373047, |
|
"learning_rate": 4.2122691783674786e-05, |
|
"loss": 1.8078, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3438685208596713, |
|
"grad_norm": 0.2531313896179199, |
|
"learning_rate": 4.14684182133266e-05, |
|
"loss": 1.7991, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 0.2725628614425659, |
|
"learning_rate": 4.079358086461605e-05, |
|
"loss": 1.812, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3640960809102402, |
|
"grad_norm": 0.2779051661491394, |
|
"learning_rate": 4.009902239319405e-05, |
|
"loss": 1.7885, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.37420986093552466, |
|
"grad_norm": 0.4107356071472168, |
|
"learning_rate": 3.938561008007578e-05, |
|
"loss": 1.8163, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3843236409608091, |
|
"grad_norm": 0.31746843457221985, |
|
"learning_rate": 3.8654234748684446e-05, |
|
"loss": 1.8151, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3944374209860936, |
|
"grad_norm": 0.31331196427345276, |
|
"learning_rate": 3.7905809652498316e-05, |
|
"loss": 1.7881, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.404551201011378, |
|
"grad_norm": 0.2541310787200928, |
|
"learning_rate": 3.714126933468959e-05, |
|
"loss": 1.8035, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.41466498103666244, |
|
"grad_norm": 0.3339349925518036, |
|
"learning_rate": 3.6361568461179516e-05, |
|
"loss": 1.8021, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4247787610619469, |
|
"grad_norm": 0.30160820484161377, |
|
"learning_rate": 3.5567680628566366e-05, |
|
"loss": 1.7554, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.43489254108723135, |
|
"grad_norm": 0.2795872986316681, |
|
"learning_rate": 3.476059714841529e-05, |
|
"loss": 1.7935, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4450063211125158, |
|
"grad_norm": 0.2831222712993622, |
|
"learning_rate": 3.3941325809427715e-05, |
|
"loss": 1.7723, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.45512010113780027, |
|
"grad_norm": 0.31489044427871704, |
|
"learning_rate": 3.311088961903613e-05, |
|
"loss": 1.7395, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.46523388116308473, |
|
"grad_norm": 0.26574623584747314, |
|
"learning_rate": 3.227032552599555e-05, |
|
"loss": 1.78, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.47534766118836913, |
|
"grad_norm": 0.24156448245048523, |
|
"learning_rate": 3.14206831255667e-05, |
|
"loss": 1.7964, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4854614412136536, |
|
"grad_norm": 0.32304009795188904, |
|
"learning_rate": 3.056302334890786e-05, |
|
"loss": 1.7529, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.49557522123893805, |
|
"grad_norm": 0.27490848302841187, |
|
"learning_rate": 2.9698417138311658e-05, |
|
"loss": 1.7542, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5056890012642224, |
|
"grad_norm": 0.2414240837097168, |
|
"learning_rate": 2.88279441099413e-05, |
|
"loss": 1.7265, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.515802781289507, |
|
"grad_norm": 0.24709856510162354, |
|
"learning_rate": 2.7952691205735832e-05, |
|
"loss": 1.7551, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5259165613147914, |
|
"grad_norm": 0.2721826732158661, |
|
"learning_rate": 2.7073751336167886e-05, |
|
"loss": 1.7354, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5360303413400759, |
|
"grad_norm": 0.26764270663261414, |
|
"learning_rate": 2.6192222015548645e-05, |
|
"loss": 1.7715, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5461441213653603, |
|
"grad_norm": 0.25439366698265076, |
|
"learning_rate": 2.5309203991584073e-05, |
|
"loss": 1.7809, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5562579013906448, |
|
"grad_norm": 0.30701911449432373, |
|
"learning_rate": 2.4425799870893682e-05, |
|
"loss": 1.7644, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5663716814159292, |
|
"grad_norm": 0.24427258968353271, |
|
"learning_rate": 2.3543112742208078e-05, |
|
"loss": 1.7809, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5764854614412137, |
|
"grad_norm": 0.31044137477874756, |
|
"learning_rate": 2.266224479896458e-05, |
|
"loss": 1.756, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5865992414664981, |
|
"grad_norm": 0.27852073311805725, |
|
"learning_rate": 2.17842959630207e-05, |
|
"loss": 1.7666, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5967130214917825, |
|
"grad_norm": 0.2860328257083893, |
|
"learning_rate": 2.091036251120411e-05, |
|
"loss": 1.7541, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.606826801517067, |
|
"grad_norm": 0.2761785089969635, |
|
"learning_rate": 2.0041535706414138e-05, |
|
"loss": 1.7656, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6169405815423514, |
|
"grad_norm": 0.25353536009788513, |
|
"learning_rate": 1.917890043498397e-05, |
|
"loss": 1.7586, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6270543615676359, |
|
"grad_norm": 0.2852698564529419, |
|
"learning_rate": 1.832353385200522e-05, |
|
"loss": 1.7354, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6371681415929203, |
|
"grad_norm": 0.27082163095474243, |
|
"learning_rate": 1.747650403630629e-05, |
|
"loss": 1.762, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6472819216182049, |
|
"grad_norm": 0.24518464505672455, |
|
"learning_rate": 1.66388686567641e-05, |
|
"loss": 1.7514, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6573957016434893, |
|
"grad_norm": 0.3101823031902313, |
|
"learning_rate": 1.5811673651614514e-05, |
|
"loss": 1.7323, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6675094816687737, |
|
"grad_norm": 0.27251914143562317, |
|
"learning_rate": 1.4995951922410551e-05, |
|
"loss": 1.7726, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6776232616940582, |
|
"grad_norm": 0.2582049071788788, |
|
"learning_rate": 1.4192722044259275e-05, |
|
"loss": 1.7459, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6877370417193426, |
|
"grad_norm": 0.2840460240840912, |
|
"learning_rate": 1.340298699394777e-05, |
|
"loss": 1.729, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6978508217446271, |
|
"grad_norm": 0.2834312915802002, |
|
"learning_rate": 1.2627732897546535e-05, |
|
"loss": 1.7216, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7079646017699115, |
|
"grad_norm": 0.27712661027908325, |
|
"learning_rate": 1.186792779905386e-05, |
|
"loss": 1.7572, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.718078381795196, |
|
"grad_norm": 0.27337151765823364, |
|
"learning_rate": 1.1124520451619048e-05, |
|
"loss": 1.741, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7281921618204804, |
|
"grad_norm": 0.2533912658691406, |
|
"learning_rate": 1.0398439132853696e-05, |
|
"loss": 1.771, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7383059418457648, |
|
"grad_norm": 0.24308809638023376, |
|
"learning_rate": 9.690590485710311e-06, |
|
"loss": 1.7094, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7484197218710493, |
|
"grad_norm": 0.2828107476234436, |
|
"learning_rate": 9.001858386375744e-06, |
|
"loss": 1.7145, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7585335018963337, |
|
"grad_norm": 0.26573511958122253, |
|
"learning_rate": 8.333102840593015e-06, |
|
"loss": 1.7757, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7686472819216182, |
|
"grad_norm": 0.24994178116321564, |
|
"learning_rate": 7.68515890978963e-06, |
|
"loss": 1.7445, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7787610619469026, |
|
"grad_norm": 0.2822398841381073, |
|
"learning_rate": 7.058835668353409e-06, |
|
"loss": 1.7122, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7888748419721872, |
|
"grad_norm": 0.3666843771934509, |
|
"learning_rate": 6.454915193357772e-06, |
|
"loss": 1.7412, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7989886219974716, |
|
"grad_norm": 0.27069199085235596, |
|
"learning_rate": 5.874151587998023e-06, |
|
"loss": 1.7443, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.809102402022756, |
|
"grad_norm": 0.2951764464378357, |
|
"learning_rate": 5.317270039958058e-06, |
|
"loss": 1.7275, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8192161820480405, |
|
"grad_norm": 0.27236202359199524, |
|
"learning_rate": 4.784965915883274e-06, |
|
"loss": 1.7312, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8293299620733249, |
|
"grad_norm": 0.2640001177787781, |
|
"learning_rate": 4.277903893090407e-06, |
|
"loss": 1.7096, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8394437420986094, |
|
"grad_norm": 0.30069518089294434, |
|
"learning_rate": 3.7967171295984925e-06, |
|
"loss": 1.7635, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8495575221238938, |
|
"grad_norm": 0.2520219683647156, |
|
"learning_rate": 3.342006473517362e-06, |
|
"loss": 1.7529, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8596713021491783, |
|
"grad_norm": 0.26766538619995117, |
|
"learning_rate": 2.9143397127808393e-06, |
|
"loss": 1.7302, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8697850821744627, |
|
"grad_norm": 0.2785772383213043, |
|
"learning_rate": 2.5142508661615077e-06, |
|
"loss": 1.7374, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8798988621997471, |
|
"grad_norm": 0.3111814558506012, |
|
"learning_rate": 2.1422395164523573e-06, |
|
"loss": 1.7211, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8900126422250316, |
|
"grad_norm": 0.2579687535762787, |
|
"learning_rate": 1.7987701866479329e-06, |
|
"loss": 1.7113, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.900126422250316, |
|
"grad_norm": 0.38696742057800293, |
|
"learning_rate": 1.4842717599039047e-06, |
|
"loss": 1.736, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9102402022756005, |
|
"grad_norm": 0.2710600197315216, |
|
"learning_rate": 1.1991369439994349e-06, |
|
"loss": 1.7054, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9203539823008849, |
|
"grad_norm": 0.27956292033195496, |
|
"learning_rate": 9.437217809709714e-07, |
|
"loss": 1.7388, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9304677623261695, |
|
"grad_norm": 0.2631407082080841, |
|
"learning_rate": 7.183452025297937e-07, |
|
"loss": 1.7099, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9405815423514539, |
|
"grad_norm": 0.2510451376438141, |
|
"learning_rate": 5.232886318184971e-07, |
|
"loss": 1.7122, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9506953223767383, |
|
"grad_norm": 0.2797139883041382, |
|
"learning_rate": 3.587956320036362e-07, |
|
"loss": 1.7152, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9608091024020228, |
|
"grad_norm": 0.2599884271621704, |
|
"learning_rate": 2.250716021433702e-07, |
|
"loss": 1.7371, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9709228824273072, |
|
"grad_norm": 0.31965357065200806, |
|
"learning_rate": 1.2228352070983719e-07, |
|
"loss": 1.7437, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9810366624525917, |
|
"grad_norm": 0.33041146397590637, |
|
"learning_rate": 5.05597370865335e-08, |
|
"loss": 1.7488, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9911504424778761, |
|
"grad_norm": 0.2697661519050598, |
|
"learning_rate": 9.98981130106158e-09, |
|
"loss": 1.718, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9992414664981036, |
|
"step": 988, |
|
"total_flos": 1.4618955817345352e+18, |
|
"train_loss": 1.7979252251536257, |
|
"train_runtime": 11520.2382, |
|
"train_samples_per_second": 2.746, |
|
"train_steps_per_second": 0.086 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 988, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4618955817345352e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|