|
{ |
|
"best_metric": 0.240097314119339, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.021830486274081753, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00021830486274081756, |
|
"grad_norm": 190.99996948242188, |
|
"learning_rate": 1e-05, |
|
"loss": 7.3753, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00021830486274081756, |
|
"eval_loss": 2.728501796722412, |
|
"eval_runtime": 730.2555, |
|
"eval_samples_per_second": 10.565, |
|
"eval_steps_per_second": 2.642, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00043660972548163513, |
|
"grad_norm": 188.52349853515625, |
|
"learning_rate": 2e-05, |
|
"loss": 7.5658, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0006549145882224527, |
|
"grad_norm": 147.88009643554688, |
|
"learning_rate": 3e-05, |
|
"loss": 6.325, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0008732194509632703, |
|
"grad_norm": 56.96106719970703, |
|
"learning_rate": 4e-05, |
|
"loss": 4.1736, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0010915243137040878, |
|
"grad_norm": 51.80082702636719, |
|
"learning_rate": 5e-05, |
|
"loss": 3.8419, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0013098291764449053, |
|
"grad_norm": 41.39720916748047, |
|
"learning_rate": 6e-05, |
|
"loss": 2.9984, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0015281340391857228, |
|
"grad_norm": 32.141746520996094, |
|
"learning_rate": 7e-05, |
|
"loss": 2.4535, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0017464389019265405, |
|
"grad_norm": 34.04977798461914, |
|
"learning_rate": 8e-05, |
|
"loss": 2.2783, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0019647437646673578, |
|
"grad_norm": 23.786035537719727, |
|
"learning_rate": 9e-05, |
|
"loss": 2.4103, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0021830486274081757, |
|
"grad_norm": 23.26265525817871, |
|
"learning_rate": 0.0001, |
|
"loss": 1.9067, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002401353490148993, |
|
"grad_norm": 26.522775650024414, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.9491, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0026196583528898107, |
|
"grad_norm": 23.938302993774414, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 1.9849, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.002837963215630628, |
|
"grad_norm": 20.957582473754883, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.7137, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0030562680783714456, |
|
"grad_norm": 22.476621627807617, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.8734, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.003274572941112263, |
|
"grad_norm": 21.35898780822754, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.3322, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.003492877803853081, |
|
"grad_norm": 27.492908477783203, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 2.1707, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0037111826665938985, |
|
"grad_norm": 19.75967025756836, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 1.3851, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0039294875293347156, |
|
"grad_norm": 16.00577735900879, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.5938, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0041477923920755335, |
|
"grad_norm": 20.51640510559082, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 1.4207, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.004366097254816351, |
|
"grad_norm": 16.361709594726562, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 1.5419, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0045844021175571684, |
|
"grad_norm": 18.289527893066406, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 1.0772, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.004802706980297986, |
|
"grad_norm": 22.397504806518555, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 1.783, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.005021011843038803, |
|
"grad_norm": 18.129150390625, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.3098, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.005239316705779621, |
|
"grad_norm": 19.402833938598633, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.2821, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.005457621568520438, |
|
"grad_norm": 30.574670791625977, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 1.3678, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.005675926431261256, |
|
"grad_norm": 26.911649703979492, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 1.4161, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.005894231294002074, |
|
"grad_norm": 14.460214614868164, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 1.4996, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.006112536156742891, |
|
"grad_norm": 23.522342681884766, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 1.0764, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.006330841019483709, |
|
"grad_norm": 24.953136444091797, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.5446, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.006549145882224526, |
|
"grad_norm": 23.39252471923828, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 1.725, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.006767450744965344, |
|
"grad_norm": 35.23050308227539, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 1.2337, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.006985755607706162, |
|
"grad_norm": 23.742778778076172, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.161, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.007204060470446979, |
|
"grad_norm": 18.19499397277832, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.7297, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.007422365333187797, |
|
"grad_norm": 24.505098342895508, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.2868, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.007640670195928614, |
|
"grad_norm": 24.310970306396484, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.2301, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.007858975058669431, |
|
"grad_norm": 17.73895263671875, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.3757, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.00807727992141025, |
|
"grad_norm": 18.120927810668945, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.1695, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.008295584784151067, |
|
"grad_norm": 19.693620681762695, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.835, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.008513889646891884, |
|
"grad_norm": 16.011274337768555, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.9548, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.008732194509632703, |
|
"grad_norm": 18.945898056030273, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.9979, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00895049937237352, |
|
"grad_norm": 14.817151069641113, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.5393, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.009168804235114337, |
|
"grad_norm": 17.74724578857422, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 1.2671, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.009387109097855154, |
|
"grad_norm": 23.253944396972656, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.5293, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.009605413960595973, |
|
"grad_norm": 17.558008193969727, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.7826, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.00982371882333679, |
|
"grad_norm": 17.273805618286133, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.8188, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.010042023686077607, |
|
"grad_norm": 29.831523895263672, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 2.2406, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.010260328548818426, |
|
"grad_norm": 37.47415542602539, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.6603, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.010478633411559243, |
|
"grad_norm": 21.16242218017578, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.3838, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.01069693827430006, |
|
"grad_norm": 32.22922134399414, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 2.6782, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.010915243137040877, |
|
"grad_norm": 39.75159454345703, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.8772, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010915243137040877, |
|
"eval_loss": 0.39272409677505493, |
|
"eval_runtime": 733.727, |
|
"eval_samples_per_second": 10.515, |
|
"eval_steps_per_second": 2.629, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.011133547999781696, |
|
"grad_norm": 45.654075622558594, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 2.172, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.011351852862522513, |
|
"grad_norm": 17.691335678100586, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.8694, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.01157015772526333, |
|
"grad_norm": 11.43874454498291, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 1.0365, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.011788462588004148, |
|
"grad_norm": 9.334946632385254, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.8827, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.012006767450744965, |
|
"grad_norm": 8.754205703735352, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 1.0497, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.012225072313485782, |
|
"grad_norm": 10.076279640197754, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.9138, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.012443377176226601, |
|
"grad_norm": 8.010160446166992, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 1.0704, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.012661682038967418, |
|
"grad_norm": 11.856115341186523, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 1.3049, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.012879986901708235, |
|
"grad_norm": 12.080299377441406, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.6587, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.013098291764449052, |
|
"grad_norm": 10.92935562133789, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 1.1809, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.013316596627189871, |
|
"grad_norm": 9.630249977111816, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 1.1388, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.013534901489930688, |
|
"grad_norm": 9.779946327209473, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.9071, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.013753206352671505, |
|
"grad_norm": 9.861835479736328, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.8605, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.013971511215412324, |
|
"grad_norm": 10.891703605651855, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.3454, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.014189816078153141, |
|
"grad_norm": 6.902316570281982, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.6042, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.014408120940893958, |
|
"grad_norm": 13.346320152282715, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 1.1173, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.014626425803634775, |
|
"grad_norm": 14.308423042297363, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.0399, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.014844730666375594, |
|
"grad_norm": 15.66178035736084, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 1.2423, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.015063035529116411, |
|
"grad_norm": 9.643138885498047, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 1.0394, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.015281340391857228, |
|
"grad_norm": 11.629717826843262, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.8529, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.015499645254598047, |
|
"grad_norm": 9.830907821655273, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.9022, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.015717950117338862, |
|
"grad_norm": 11.573919296264648, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 1.0336, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.01593625498007968, |
|
"grad_norm": 10.771193504333496, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.9029, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0161545598428205, |
|
"grad_norm": 11.649493217468262, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.9517, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.016372864705561315, |
|
"grad_norm": 10.555853843688965, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 1.0837, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.016591169568302134, |
|
"grad_norm": 12.330526351928711, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 1.3982, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.016809474431042953, |
|
"grad_norm": 8.561495780944824, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.5557, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.017027779293783768, |
|
"grad_norm": 10.27099609375, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.9408, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.017246084156524587, |
|
"grad_norm": 19.418272018432617, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.6873, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.017464389019265406, |
|
"grad_norm": 15.824254989624023, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.8229, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01768269388200622, |
|
"grad_norm": 16.62073516845703, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.2136, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.01790099874474704, |
|
"grad_norm": 11.906272888183594, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.5912, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.01811930360748786, |
|
"grad_norm": 9.35510540008545, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.6718, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.018337608470228674, |
|
"grad_norm": 18.38622283935547, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.6497, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.018555913332969493, |
|
"grad_norm": 19.09699058532715, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 1.2194, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.018774218195710308, |
|
"grad_norm": 13.23594856262207, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.7641, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.018992523058451127, |
|
"grad_norm": 13.044976234436035, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 1.1724, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.019210827921191945, |
|
"grad_norm": 17.994647979736328, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 1.4368, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.01942913278393276, |
|
"grad_norm": 11.946012496948242, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.17, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.01964743764667358, |
|
"grad_norm": 13.894006729125977, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 1.0433, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0198657425094144, |
|
"grad_norm": 10.097606658935547, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.7098, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.020084047372155214, |
|
"grad_norm": 15.475691795349121, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 1.2925, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.020302352234896032, |
|
"grad_norm": 15.94665241241455, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 1.0256, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.02052065709763685, |
|
"grad_norm": 8.952512741088867, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.5552, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.020738961960377666, |
|
"grad_norm": 14.608858108520508, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.9805, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.020957266823118485, |
|
"grad_norm": 9.33961296081543, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.32, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.021175571685859304, |
|
"grad_norm": 16.70509147644043, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.931, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.02139387654860012, |
|
"grad_norm": 17.932279586791992, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.3505, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.021612181411340938, |
|
"grad_norm": 25.598703384399414, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.713, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.021830486274081753, |
|
"grad_norm": 32.65713119506836, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 2.8362, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.021830486274081753, |
|
"eval_loss": 0.240097314119339, |
|
"eval_runtime": 734.0134, |
|
"eval_samples_per_second": 10.511, |
|
"eval_steps_per_second": 2.628, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.430998008987648e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|