|
{ |
|
"best_metric": 2.7514498233795166, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.9422850412249706, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004711425206124852, |
|
"grad_norm": 1.3946541547775269, |
|
"learning_rate": 1e-05, |
|
"loss": 4.1193, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004711425206124852, |
|
"eval_loss": 4.429855823516846, |
|
"eval_runtime": 5.7087, |
|
"eval_samples_per_second": 62.712, |
|
"eval_steps_per_second": 15.766, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009422850412249705, |
|
"grad_norm": 1.7446237802505493, |
|
"learning_rate": 2e-05, |
|
"loss": 3.5333, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.014134275618374558, |
|
"grad_norm": 1.2714354991912842, |
|
"learning_rate": 3e-05, |
|
"loss": 3.8054, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01884570082449941, |
|
"grad_norm": 1.037392020225525, |
|
"learning_rate": 4e-05, |
|
"loss": 4.2302, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.023557126030624265, |
|
"grad_norm": 1.5844902992248535, |
|
"learning_rate": 5e-05, |
|
"loss": 4.0444, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.028268551236749116, |
|
"grad_norm": 1.3372470140457153, |
|
"learning_rate": 6e-05, |
|
"loss": 4.1766, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03297997644287397, |
|
"grad_norm": 1.3058269023895264, |
|
"learning_rate": 7e-05, |
|
"loss": 4.1232, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03769140164899882, |
|
"grad_norm": 1.0855653285980225, |
|
"learning_rate": 8e-05, |
|
"loss": 4.4008, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04240282685512368, |
|
"grad_norm": 1.2069677114486694, |
|
"learning_rate": 9e-05, |
|
"loss": 4.0673, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04711425206124853, |
|
"grad_norm": 1.1353275775909424, |
|
"learning_rate": 0.0001, |
|
"loss": 4.2198, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05182567726737338, |
|
"grad_norm": 0.9457094669342041, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 4.1115, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05653710247349823, |
|
"grad_norm": 1.022911787033081, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 4.0602, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.061248527679623084, |
|
"grad_norm": 1.184678077697754, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 4.0178, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06595995288574794, |
|
"grad_norm": 1.3648606538772583, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 3.8917, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0706713780918728, |
|
"grad_norm": 1.3144981861114502, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 4.1258, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07538280329799764, |
|
"grad_norm": 1.241207480430603, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 3.8097, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0800942285041225, |
|
"grad_norm": 1.1597161293029785, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 3.9649, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08480565371024736, |
|
"grad_norm": 1.310815453529358, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 3.9916, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0895170789163722, |
|
"grad_norm": 1.0816928148269653, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 3.6277, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09422850412249706, |
|
"grad_norm": 1.219735026359558, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 3.6366, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0989399293286219, |
|
"grad_norm": 1.4704792499542236, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 3.4232, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10365135453474676, |
|
"grad_norm": 1.2693730592727661, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 3.9704, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10836277974087162, |
|
"grad_norm": 1.498766541481018, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 3.7581, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11307420494699646, |
|
"grad_norm": 1.684964656829834, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 3.9932, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11778563015312132, |
|
"grad_norm": 2.037986993789673, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 3.8869, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12249705535924617, |
|
"grad_norm": 2.0941431522369385, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 3.1425, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.127208480565371, |
|
"grad_norm": 1.6686711311340332, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 2.9945, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13191990577149587, |
|
"grad_norm": 1.7616199254989624, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 3.4982, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13663133097762073, |
|
"grad_norm": 1.3488978147506714, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.2957, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1413427561837456, |
|
"grad_norm": 1.2035346031188965, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 3.0778, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14605418138987045, |
|
"grad_norm": 1.160496711730957, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 2.7507, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15076560659599528, |
|
"grad_norm": 1.103440284729004, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 3.2921, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.15547703180212014, |
|
"grad_norm": 1.251758098602295, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 3.1819, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.160188457008245, |
|
"grad_norm": 1.1060675382614136, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 3.3223, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.16489988221436985, |
|
"grad_norm": 1.0669234991073608, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 2.9139, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1696113074204947, |
|
"grad_norm": 1.0646271705627441, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 2.9312, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.17432273262661954, |
|
"grad_norm": 0.9158160090446472, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 2.7344, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1790341578327444, |
|
"grad_norm": 0.9490165114402771, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 3.3129, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.18374558303886926, |
|
"grad_norm": 0.9325077533721924, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 3.0363, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.18845700824499412, |
|
"grad_norm": 1.0368661880493164, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 2.7854, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19316843345111898, |
|
"grad_norm": 0.8712729215621948, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 2.7577, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1978798586572438, |
|
"grad_norm": 0.9019601345062256, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 2.995, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.20259128386336867, |
|
"grad_norm": 0.9480728507041931, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 3.0736, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.20730270906949352, |
|
"grad_norm": 0.8850538730621338, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 2.9836, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.21201413427561838, |
|
"grad_norm": 0.8516348600387573, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 3.1142, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21672555948174324, |
|
"grad_norm": 0.9424264430999756, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 3.3857, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.22143698468786807, |
|
"grad_norm": 0.9524626135826111, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 2.8493, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.22614840989399293, |
|
"grad_norm": 0.9207716584205627, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.8681, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2308598351001178, |
|
"grad_norm": 0.8092623353004456, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 2.9606, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.23557126030624265, |
|
"grad_norm": 0.915729284286499, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 3.1628, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23557126030624265, |
|
"eval_loss": 3.093031167984009, |
|
"eval_runtime": 5.6918, |
|
"eval_samples_per_second": 62.897, |
|
"eval_steps_per_second": 15.812, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24028268551236748, |
|
"grad_norm": 1.1849467754364014, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 3.2656, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.24499411071849234, |
|
"grad_norm": 1.3945523500442505, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 3.2252, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2497055359246172, |
|
"grad_norm": 1.0486680269241333, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 3.4915, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.254416961130742, |
|
"grad_norm": 0.9627955555915833, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 2.9888, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2591283863368669, |
|
"grad_norm": 1.5820244550704956, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 2.9965, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.26383981154299174, |
|
"grad_norm": 0.8985310792922974, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 3.0081, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.26855123674911663, |
|
"grad_norm": 0.9468170404434204, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 3.2599, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.27326266195524146, |
|
"grad_norm": 0.8421167731285095, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 2.9656, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2779740871613663, |
|
"grad_norm": 0.9423841834068298, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 2.8243, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2826855123674912, |
|
"grad_norm": 0.9182611107826233, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 2.8801, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.287396937573616, |
|
"grad_norm": 0.9046218991279602, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 3.0764, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2921083627797409, |
|
"grad_norm": 0.8965874314308167, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 3.1394, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2968197879858657, |
|
"grad_norm": 1.0667086839675903, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 3.688, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.30153121319199055, |
|
"grad_norm": 0.8867380619049072, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 3.3729, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.30624263839811544, |
|
"grad_norm": 0.8846852779388428, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 3.2973, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.31095406360424027, |
|
"grad_norm": 0.8917059302330017, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 3.1064, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.31566548881036516, |
|
"grad_norm": 0.9517539739608765, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.8648, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.32037691401649, |
|
"grad_norm": 0.9675039649009705, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 3.2757, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3250883392226148, |
|
"grad_norm": 0.900853157043457, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 3.1051, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3297997644287397, |
|
"grad_norm": 1.0221682786941528, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 3.4485, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.33451118963486454, |
|
"grad_norm": 0.9935013651847839, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 3.0571, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3392226148409894, |
|
"grad_norm": 1.0712761878967285, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 3.0314, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.34393404004711425, |
|
"grad_norm": 1.1006646156311035, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 2.9187, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3486454652532391, |
|
"grad_norm": 1.3611395359039307, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 3.1346, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.35335689045936397, |
|
"grad_norm": 1.2989857196807861, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 3.2491, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3580683156654888, |
|
"grad_norm": 1.1900298595428467, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 2.7104, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3627797408716137, |
|
"grad_norm": 1.2231336832046509, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 2.5591, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3674911660777385, |
|
"grad_norm": 1.140899658203125, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 2.8716, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.37220259128386335, |
|
"grad_norm": 1.2476599216461182, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 2.6398, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.37691401648998824, |
|
"grad_norm": 1.1672461032867432, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 2.7379, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.38162544169611307, |
|
"grad_norm": 1.080532193183899, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 2.528, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.38633686690223795, |
|
"grad_norm": 1.124290943145752, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 2.3585, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3910482921083628, |
|
"grad_norm": 0.9934729933738708, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 2.5504, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3957597173144876, |
|
"grad_norm": 1.2130358219146729, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 2.9468, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4004711425206125, |
|
"grad_norm": 1.0236927270889282, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 2.9596, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.40518256772673733, |
|
"grad_norm": 0.9387683272361755, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.8189, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4098939929328622, |
|
"grad_norm": 0.9069825410842896, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 2.1935, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.41460541813898705, |
|
"grad_norm": 0.9427089095115662, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 2.3151, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4193168433451119, |
|
"grad_norm": 0.8664374947547913, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 2.7139, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.42402826855123676, |
|
"grad_norm": 0.9272505044937134, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 2.7058, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4287396937573616, |
|
"grad_norm": 0.7902560234069824, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 2.5351, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4334511189634865, |
|
"grad_norm": 0.7875714302062988, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 2.6184, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4381625441696113, |
|
"grad_norm": 0.9437817335128784, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 2.8083, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.44287396937573614, |
|
"grad_norm": 0.8163449764251709, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 2.7748, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.44758539458186103, |
|
"grad_norm": 0.9238842725753784, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 2.6347, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.45229681978798586, |
|
"grad_norm": 0.8988198041915894, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 2.9642, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.45700824499411075, |
|
"grad_norm": 0.8565858006477356, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 3.0546, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4617196702002356, |
|
"grad_norm": 0.9136887192726135, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 2.1682, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4664310954063604, |
|
"grad_norm": 0.8954570293426514, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 3.2578, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4711425206124853, |
|
"grad_norm": 0.8498767018318176, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 2.7291, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4711425206124853, |
|
"eval_loss": 2.874276876449585, |
|
"eval_runtime": 5.6805, |
|
"eval_samples_per_second": 63.023, |
|
"eval_steps_per_second": 15.844, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4758539458186101, |
|
"grad_norm": 1.2473000288009644, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 2.873, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.48056537102473496, |
|
"grad_norm": 1.1726858615875244, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 3.0055, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.48527679623085984, |
|
"grad_norm": 1.086951732635498, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 2.937, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.48998822143698467, |
|
"grad_norm": 0.9103227853775024, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 3.101, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.49469964664310956, |
|
"grad_norm": 0.8695147633552551, |
|
"learning_rate": 5e-05, |
|
"loss": 2.8832, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4994110718492344, |
|
"grad_norm": 0.8533949851989746, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 2.5325, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5041224970553593, |
|
"grad_norm": 0.9563857913017273, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 2.7812, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.508833922261484, |
|
"grad_norm": 1.043393611907959, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 2.564, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5135453474676089, |
|
"grad_norm": 0.8984698057174683, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 2.928, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5182567726737338, |
|
"grad_norm": 0.9062252044677734, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 2.8584, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5229681978798587, |
|
"grad_norm": 0.8537540435791016, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 2.7673, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5276796230859835, |
|
"grad_norm": 0.9530758261680603, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 2.957, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5323910482921084, |
|
"grad_norm": 1.0364296436309814, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 3.2921, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5371024734982333, |
|
"grad_norm": 1.1131585836410522, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 3.0307, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.541813898704358, |
|
"grad_norm": 1.1007980108261108, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 2.8602, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5465253239104829, |
|
"grad_norm": 0.9027460813522339, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 2.8229, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5512367491166078, |
|
"grad_norm": 1.0880588293075562, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 2.7676, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5559481743227326, |
|
"grad_norm": 1.158648133277893, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 3.0379, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5606595995288575, |
|
"grad_norm": 1.1515697240829468, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 3.2184, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5653710247349824, |
|
"grad_norm": 1.039744257926941, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 3.0673, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5700824499411072, |
|
"grad_norm": 1.1633607149124146, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 3.0795, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.574793875147232, |
|
"grad_norm": 1.2394342422485352, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 3.1834, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5795053003533569, |
|
"grad_norm": 1.2689552307128906, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 3.3557, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5842167255594818, |
|
"grad_norm": 1.3590177297592163, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 3.0499, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5889281507656066, |
|
"grad_norm": 1.0699342489242554, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 3.1957, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5936395759717314, |
|
"grad_norm": 1.2349456548690796, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 2.2495, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5983510011778563, |
|
"grad_norm": 1.2268959283828735, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 2.3801, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6030624263839811, |
|
"grad_norm": 1.3209049701690674, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 2.8863, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.607773851590106, |
|
"grad_norm": 1.1282166242599487, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 2.5438, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6124852767962309, |
|
"grad_norm": 1.3303471803665161, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 2.922, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6171967020023557, |
|
"grad_norm": 1.158387541770935, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 2.4659, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6219081272084805, |
|
"grad_norm": 1.2458516359329224, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 2.7004, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6266195524146054, |
|
"grad_norm": 1.0022237300872803, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 2.4997, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6313309776207303, |
|
"grad_norm": 1.0322611331939697, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 2.5472, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6360424028268551, |
|
"grad_norm": 1.1272794008255005, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 2.6108, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.64075382803298, |
|
"grad_norm": 1.0378586053848267, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 2.1245, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6454652532391049, |
|
"grad_norm": 1.0337663888931274, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 2.5266, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6501766784452296, |
|
"grad_norm": 1.0917435884475708, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 3.0042, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6548881036513545, |
|
"grad_norm": 0.9059085249900818, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 2.3542, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6595995288574794, |
|
"grad_norm": 1.0263761281967163, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 2.9872, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6643109540636042, |
|
"grad_norm": 0.8678799271583557, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 2.7748, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6690223792697291, |
|
"grad_norm": 1.0620346069335938, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 2.5043, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.673733804475854, |
|
"grad_norm": 0.9341846108436584, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.5321, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6784452296819788, |
|
"grad_norm": 0.9391326308250427, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 2.1601, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6831566548881036, |
|
"grad_norm": 0.9408818483352661, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 2.7244, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6878680800942285, |
|
"grad_norm": 0.9140599370002747, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 2.3333, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6925795053003534, |
|
"grad_norm": 0.9881057143211365, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 2.7188, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6972909305064782, |
|
"grad_norm": 0.8617043495178223, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 2.6069, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.702002355712603, |
|
"grad_norm": 0.9651095867156982, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 3.0078, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7067137809187279, |
|
"grad_norm": 1.0257482528686523, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 2.7801, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7067137809187279, |
|
"eval_loss": 2.769987106323242, |
|
"eval_runtime": 5.6966, |
|
"eval_samples_per_second": 62.844, |
|
"eval_steps_per_second": 15.799, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7114252061248527, |
|
"grad_norm": 0.9879981279373169, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 2.7686, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7161366313309776, |
|
"grad_norm": 0.9102957844734192, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 2.4153, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7208480565371025, |
|
"grad_norm": 0.9467206001281738, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 2.9807, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7255594817432274, |
|
"grad_norm": 0.8312134742736816, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 2.4495, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7302709069493521, |
|
"grad_norm": 1.0083261728286743, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 2.9834, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.734982332155477, |
|
"grad_norm": 0.926459014415741, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 2.8587, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7396937573616019, |
|
"grad_norm": 0.9118709564208984, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 2.8708, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7444051825677267, |
|
"grad_norm": 0.938479483127594, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 2.6886, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7491166077738516, |
|
"grad_norm": 1.0707321166992188, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 3.1004, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7538280329799765, |
|
"grad_norm": 0.9356095790863037, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 2.882, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7585394581861012, |
|
"grad_norm": 0.9403328895568848, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 2.8144, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7632508833922261, |
|
"grad_norm": 0.9856503009796143, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.6973, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.767962308598351, |
|
"grad_norm": 0.9402594566345215, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 3.1375, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7726737338044759, |
|
"grad_norm": 1.0533093214035034, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 2.9059, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7773851590106007, |
|
"grad_norm": 1.0802216529846191, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 2.8909, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7820965842167256, |
|
"grad_norm": 1.1166588068008423, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 3.1018, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7868080094228505, |
|
"grad_norm": 1.0489593744277954, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 3.0847, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7915194346289752, |
|
"grad_norm": 1.2919156551361084, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 3.243, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7962308598351001, |
|
"grad_norm": 1.2311078310012817, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 3.344, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.800942285041225, |
|
"grad_norm": 1.2963378429412842, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 2.9426, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8056537102473498, |
|
"grad_norm": 1.1835005283355713, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 3.004, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8103651354534747, |
|
"grad_norm": 1.3847728967666626, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 3.4973, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8150765606595995, |
|
"grad_norm": 1.259530782699585, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 2.9557, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8197879858657244, |
|
"grad_norm": 1.645922303199768, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 3.4497, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.8244994110718492, |
|
"grad_norm": 1.6107027530670166, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 2.9002, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8292108362779741, |
|
"grad_norm": 0.8580393195152283, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 2.2651, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.833922261484099, |
|
"grad_norm": 0.8875122666358948, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 2.1955, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8386336866902238, |
|
"grad_norm": 0.8691319227218628, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 2.5762, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8433451118963486, |
|
"grad_norm": 1.0163774490356445, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 2.6119, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8480565371024735, |
|
"grad_norm": 0.8920414447784424, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 2.5154, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8527679623085983, |
|
"grad_norm": 0.9631790518760681, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.5506, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8574793875147232, |
|
"grad_norm": 0.8805952072143555, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 2.679, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8621908127208481, |
|
"grad_norm": 0.9266687631607056, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 2.3374, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.866902237926973, |
|
"grad_norm": 0.854870617389679, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 2.4187, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8716136631330977, |
|
"grad_norm": 0.9341062903404236, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 2.6638, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8763250883392226, |
|
"grad_norm": 0.9016166925430298, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 2.2794, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8810365135453475, |
|
"grad_norm": 1.0805704593658447, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 2.4947, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8857479387514723, |
|
"grad_norm": 0.8769288659095764, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 2.2467, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8904593639575972, |
|
"grad_norm": 0.9262085556983948, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 2.4642, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8951707891637221, |
|
"grad_norm": 0.8909001350402832, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 2.6498, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8998822143698468, |
|
"grad_norm": 0.9195206165313721, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 2.4863, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9045936395759717, |
|
"grad_norm": 0.8525665998458862, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 2.5905, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.9093050647820966, |
|
"grad_norm": 0.8745928406715393, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 2.4172, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9140164899882215, |
|
"grad_norm": 0.9491853713989258, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 2.5457, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9187279151943463, |
|
"grad_norm": 0.9138966202735901, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 2.2656, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9234393404004712, |
|
"grad_norm": 0.8036891222000122, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 2.4428, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.928150765606596, |
|
"grad_norm": 0.9975553154945374, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 2.3633, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9328621908127208, |
|
"grad_norm": 0.8577960133552551, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 2.2966, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9375736160188457, |
|
"grad_norm": 0.888018012046814, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 2.7802, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9422850412249706, |
|
"grad_norm": 0.9421773552894592, |
|
"learning_rate": 0.0, |
|
"loss": 2.8693, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9422850412249706, |
|
"eval_loss": 2.7514498233795166, |
|
"eval_runtime": 5.6843, |
|
"eval_samples_per_second": 62.98, |
|
"eval_steps_per_second": 15.833, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7441249896235008.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|