|
{ |
|
"best_metric": 0.771354079246521, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.10055304172951232, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001005530417295123, |
|
"grad_norm": 2.4352548122406006, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6833, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001005530417295123, |
|
"eval_loss": 0.859174907207489, |
|
"eval_runtime": 144.4981, |
|
"eval_samples_per_second": 11.592, |
|
"eval_steps_per_second": 2.9, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002011060834590246, |
|
"grad_norm": 2.3664867877960205, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6703, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0030165912518853697, |
|
"grad_norm": 2.0348565578460693, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6261, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004022121669180492, |
|
"grad_norm": 0.7516242265701294, |
|
"learning_rate": 4e-05, |
|
"loss": 0.5305, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005027652086475616, |
|
"grad_norm": 1.6016530990600586, |
|
"learning_rate": 5e-05, |
|
"loss": 0.5401, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006033182503770739, |
|
"grad_norm": 0.9149666428565979, |
|
"learning_rate": 6e-05, |
|
"loss": 0.503, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.007038712921065862, |
|
"grad_norm": 0.5540153980255127, |
|
"learning_rate": 7e-05, |
|
"loss": 0.4683, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.008044243338360985, |
|
"grad_norm": 0.4465714693069458, |
|
"learning_rate": 8e-05, |
|
"loss": 0.4362, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00904977375565611, |
|
"grad_norm": 0.37307798862457275, |
|
"learning_rate": 9e-05, |
|
"loss": 0.4216, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.010055304172951232, |
|
"grad_norm": 0.3346532881259918, |
|
"learning_rate": 0.0001, |
|
"loss": 0.426, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011060834590246356, |
|
"grad_norm": 0.2881012558937073, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.4022, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.012066365007541479, |
|
"grad_norm": 0.252236008644104, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.3923, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.013071895424836602, |
|
"grad_norm": 0.23015816509723663, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.3805, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.014077425842131725, |
|
"grad_norm": 0.34804385900497437, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.4176, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.015082956259426848, |
|
"grad_norm": 0.23283986747264862, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.4384, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01608848667672197, |
|
"grad_norm": 3.591134786605835, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.6849, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.017094017094017096, |
|
"grad_norm": 0.9220762252807617, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 1.016, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01809954751131222, |
|
"grad_norm": 1.198287010192871, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.9521, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.019105077928607342, |
|
"grad_norm": 0.5396149158477783, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.8114, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.020110608345902465, |
|
"grad_norm": 0.3927422761917114, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.8306, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.021116138763197588, |
|
"grad_norm": 0.35858792066574097, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.7893, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02212166918049271, |
|
"grad_norm": 0.3317579925060272, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.792, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.023127199597787834, |
|
"grad_norm": 0.3556971549987793, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.7937, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.024132730015082957, |
|
"grad_norm": 0.3509909212589264, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.8118, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02513826043237808, |
|
"grad_norm": 0.21706604957580566, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.7938, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.026143790849673203, |
|
"grad_norm": 0.3561152219772339, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.8021, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.027149321266968326, |
|
"grad_norm": 0.23132124543190002, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.783, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02815485168426345, |
|
"grad_norm": 0.22269226610660553, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.7959, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.029160382101558573, |
|
"grad_norm": 0.24683009088039398, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.7602, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.030165912518853696, |
|
"grad_norm": 0.25816789269447327, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.797, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03117144293614882, |
|
"grad_norm": 0.2194022387266159, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.7661, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03217697335344394, |
|
"grad_norm": 0.20569732785224915, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.7757, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.033182503770739065, |
|
"grad_norm": 0.26061707735061646, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.8045, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03418803418803419, |
|
"grad_norm": 0.2521941065788269, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.771, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03519356460532931, |
|
"grad_norm": 0.7167894244194031, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.9258, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03619909502262444, |
|
"grad_norm": 0.7023014426231384, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.9272, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03720462543991956, |
|
"grad_norm": 0.7939850091934204, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.8852, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.038210155857214684, |
|
"grad_norm": 0.37145453691482544, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.8617, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0392156862745098, |
|
"grad_norm": 0.36598095297813416, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.8387, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04022121669180493, |
|
"grad_norm": 0.3641310930252075, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.8293, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04122674710910005, |
|
"grad_norm": 0.40900740027427673, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.8166, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.042232277526395176, |
|
"grad_norm": 0.40440893173217773, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.7699, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.043237807943690296, |
|
"grad_norm": 0.4703547954559326, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.789, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04424333836098542, |
|
"grad_norm": 0.3685894012451172, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.7647, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04524886877828054, |
|
"grad_norm": 0.36992567777633667, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.7527, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04625439919557567, |
|
"grad_norm": 0.8148142099380493, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.7516, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04725992961287079, |
|
"grad_norm": 2.1370339393615723, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.7484, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.048265460030165915, |
|
"grad_norm": 0.40228700637817383, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.7357, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.049270990447461034, |
|
"grad_norm": 0.6028892397880554, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.7278, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05027652086475616, |
|
"grad_norm": 0.35520774126052856, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.7097, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05027652086475616, |
|
"eval_loss": 0.7820449471473694, |
|
"eval_runtime": 145.6031, |
|
"eval_samples_per_second": 11.504, |
|
"eval_steps_per_second": 2.878, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05128205128205128, |
|
"grad_norm": 3.0911669731140137, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.7859, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05228758169934641, |
|
"grad_norm": 1.030360221862793, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.6146, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.053293112116641526, |
|
"grad_norm": 0.717653751373291, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.5158, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05429864253393665, |
|
"grad_norm": 0.537287175655365, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.4709, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05530417295123177, |
|
"grad_norm": 0.3627510368824005, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.4339, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0563097033685269, |
|
"grad_norm": 0.2506343424320221, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.4205, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05731523378582202, |
|
"grad_norm": 0.24423518776893616, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.4194, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.058320764203117145, |
|
"grad_norm": 0.29906725883483887, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.4156, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.059326294620412265, |
|
"grad_norm": 0.22299951314926147, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.4193, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06033182503770739, |
|
"grad_norm": 0.20469671487808228, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.3833, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06133735545500251, |
|
"grad_norm": 0.25047266483306885, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.3803, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06234288587229764, |
|
"grad_norm": 0.17370571196079254, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.3761, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06334841628959276, |
|
"grad_norm": 0.27698203921318054, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.3905, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.06435394670688788, |
|
"grad_norm": 0.14930705726146698, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.3794, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06535947712418301, |
|
"grad_norm": 0.2859864830970764, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.4343, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06636500754147813, |
|
"grad_norm": 0.3993275761604309, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.2331, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06737053795877325, |
|
"grad_norm": 2.1870105266571045, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.9635, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06837606837606838, |
|
"grad_norm": 0.7155138254165649, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.8389, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0693815987933635, |
|
"grad_norm": 0.3495980501174927, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.7636, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07038712921065862, |
|
"grad_norm": 0.28488680720329285, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.779, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07139265962795374, |
|
"grad_norm": 0.321956068277359, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.7593, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07239819004524888, |
|
"grad_norm": 0.30106544494628906, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.7582, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.073403720462544, |
|
"grad_norm": 0.2426968514919281, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.7843, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07440925087983911, |
|
"grad_norm": 0.23462745547294617, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.7782, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.07541478129713423, |
|
"grad_norm": 0.19406434893608093, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.7624, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07642031171442937, |
|
"grad_norm": 0.2184755802154541, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.7927, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.07742584213172449, |
|
"grad_norm": 0.21943315863609314, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.7767, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 0.16851526498794556, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.7379, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.07943690296631473, |
|
"grad_norm": 0.19421535730361938, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.7925, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08044243338360986, |
|
"grad_norm": 0.17354494333267212, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.7493, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08144796380090498, |
|
"grad_norm": 0.1771775633096695, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.7674, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0824534942182001, |
|
"grad_norm": 0.23186761140823364, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.7458, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08345902463549522, |
|
"grad_norm": 0.19266724586486816, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.7253, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08446455505279035, |
|
"grad_norm": 0.22116349637508392, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.7195, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.08547008547008547, |
|
"grad_norm": 0.3103989064693451, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.8048, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08647561588738059, |
|
"grad_norm": 0.443766713142395, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.7976, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.08748114630467571, |
|
"grad_norm": 0.7230583429336548, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.8926, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.08848667672197084, |
|
"grad_norm": 0.30479729175567627, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.8041, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.08949220713926596, |
|
"grad_norm": 0.2751973867416382, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.8375, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09049773755656108, |
|
"grad_norm": 0.5401359796524048, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.7718, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0915032679738562, |
|
"grad_norm": 0.3586420714855194, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.7999, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09250879839115134, |
|
"grad_norm": 0.26797112822532654, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.7566, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09351432880844646, |
|
"grad_norm": 0.2583998739719391, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.7565, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.09451985922574158, |
|
"grad_norm": 0.258475661277771, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.7338, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0955253896430367, |
|
"grad_norm": 0.22778931260108948, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.7501, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09653092006033183, |
|
"grad_norm": 0.27645596861839294, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.7565, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.09753645047762695, |
|
"grad_norm": 0.28205233812332153, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.7319, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.09854198089492207, |
|
"grad_norm": 0.23500730097293854, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.7307, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.09954751131221719, |
|
"grad_norm": 0.17771759629249573, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.7032, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.10055304172951232, |
|
"grad_norm": 0.36362266540527344, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.6929, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10055304172951232, |
|
"eval_loss": 0.771354079246521, |
|
"eval_runtime": 145.706, |
|
"eval_samples_per_second": 11.496, |
|
"eval_steps_per_second": 2.876, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.28736052133888e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|