|
{ |
|
"best_metric": 0.7420778274536133, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 2.26628895184136, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0113314447592068, |
|
"grad_norm": 1.4940721988677979, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0261, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0113314447592068, |
|
"eval_loss": 1.1746464967727661, |
|
"eval_runtime": 11.1835, |
|
"eval_samples_per_second": 13.323, |
|
"eval_steps_per_second": 3.398, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0226628895184136, |
|
"grad_norm": 1.7041244506835938, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1102, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0339943342776204, |
|
"grad_norm": 1.8132150173187256, |
|
"learning_rate": 3e-05, |
|
"loss": 1.1672, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0453257790368272, |
|
"grad_norm": 1.4703266620635986, |
|
"learning_rate": 4e-05, |
|
"loss": 1.0816, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.056657223796033995, |
|
"grad_norm": 0.9214251637458801, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0017, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0679886685552408, |
|
"grad_norm": 0.5252280235290527, |
|
"learning_rate": 6e-05, |
|
"loss": 0.98, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07932011331444759, |
|
"grad_norm": 0.7931201457977295, |
|
"learning_rate": 7e-05, |
|
"loss": 1.0041, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0906515580736544, |
|
"grad_norm": 0.9411036372184753, |
|
"learning_rate": 8e-05, |
|
"loss": 1.0307, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.10198300283286119, |
|
"grad_norm": 0.6796821355819702, |
|
"learning_rate": 9e-05, |
|
"loss": 1.0361, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.11331444759206799, |
|
"grad_norm": 0.48324570059776306, |
|
"learning_rate": 0.0001, |
|
"loss": 0.9964, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.12464589235127478, |
|
"grad_norm": 0.37594151496887207, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.9495, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1359773371104816, |
|
"grad_norm": 0.47099214792251587, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.9703, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.14730878186968838, |
|
"grad_norm": 0.4177071452140808, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.9866, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.15864022662889518, |
|
"grad_norm": 0.4068276584148407, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.9656, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.16997167138810199, |
|
"grad_norm": 0.37006956338882446, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.0134, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1813031161473088, |
|
"grad_norm": 0.3679398000240326, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.9677, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.19263456090651557, |
|
"grad_norm": 0.352753609418869, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.9623, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.20396600566572237, |
|
"grad_norm": 0.32562100887298584, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.9627, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.21529745042492918, |
|
"grad_norm": 0.3547748327255249, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.9764, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.22662889518413598, |
|
"grad_norm": 0.4081428050994873, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 1.022, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.23796033994334279, |
|
"grad_norm": 0.4699523150920868, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.879, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.24929178470254956, |
|
"grad_norm": 1.1098036766052246, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.91, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.26062322946175637, |
|
"grad_norm": 0.3437006175518036, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.8645, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2719546742209632, |
|
"grad_norm": 0.32103949785232544, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.8644, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.28328611898017, |
|
"grad_norm": 0.339089572429657, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.9377, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.29461756373937675, |
|
"grad_norm": 0.3423368036746979, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.9106, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3059490084985836, |
|
"grad_norm": 0.2850959599018097, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.8449, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.31728045325779036, |
|
"grad_norm": 0.29601606726646423, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.919, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3286118980169972, |
|
"grad_norm": 0.30124685168266296, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.9002, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.33994334277620397, |
|
"grad_norm": 0.2979277968406677, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.8651, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.35127478753541075, |
|
"grad_norm": 0.2953733503818512, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.9512, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3626062322946176, |
|
"grad_norm": 0.2760586440563202, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.9007, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.37393767705382436, |
|
"grad_norm": 0.2794976532459259, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.9375, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.38526912181303113, |
|
"grad_norm": 0.3017430305480957, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.936, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.39660056657223797, |
|
"grad_norm": 0.316226601600647, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.9133, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.40793201133144474, |
|
"grad_norm": 0.2935938835144043, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.8939, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4192634560906516, |
|
"grad_norm": 0.296953409910202, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.9582, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.43059490084985835, |
|
"grad_norm": 0.3016681373119354, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.9365, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.44192634560906513, |
|
"grad_norm": 0.2930508852005005, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.8891, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.45325779036827196, |
|
"grad_norm": 0.32635068893432617, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.8874, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.46458923512747874, |
|
"grad_norm": 0.3173358738422394, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.8823, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.47592067988668557, |
|
"grad_norm": 0.31149113178253174, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.8917, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.48725212464589235, |
|
"grad_norm": 0.38314029574394226, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.9208, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4985835694050991, |
|
"grad_norm": 1.4880560636520386, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.5576, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.509915014164306, |
|
"grad_norm": 0.25400444865226746, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.7591, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5212464589235127, |
|
"grad_norm": 0.26966241002082825, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.8677, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.5325779036827195, |
|
"grad_norm": 0.27806952595710754, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.8674, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.5439093484419264, |
|
"grad_norm": 0.26129311323165894, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.8704, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5552407932011332, |
|
"grad_norm": 0.28313538432121277, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.8913, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.56657223796034, |
|
"grad_norm": 0.2700784504413605, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.8516, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.56657223796034, |
|
"eval_loss": 0.8685831427574158, |
|
"eval_runtime": 11.3624, |
|
"eval_samples_per_second": 13.113, |
|
"eval_steps_per_second": 3.344, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5779036827195467, |
|
"grad_norm": 0.2857685983181, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.8437, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5892351274787535, |
|
"grad_norm": 0.28413069248199463, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.9037, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6005665722379604, |
|
"grad_norm": 0.27404722571372986, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.9212, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6118980169971672, |
|
"grad_norm": 0.2649526000022888, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.8784, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.623229461756374, |
|
"grad_norm": 0.2947290539741516, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.8644, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.6345609065155807, |
|
"grad_norm": 0.27699536085128784, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.8749, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.6458923512747875, |
|
"grad_norm": 0.2775574326515198, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.8695, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.6572237960339944, |
|
"grad_norm": 0.2814454436302185, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.8573, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6685552407932012, |
|
"grad_norm": 0.2803303301334381, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.891, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6798866855524079, |
|
"grad_norm": 0.28773200511932373, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.8753, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6912181303116147, |
|
"grad_norm": 0.29724711179733276, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.9011, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7025495750708215, |
|
"grad_norm": 0.32904359698295593, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.9286, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.7138810198300283, |
|
"grad_norm": 0.3503016531467438, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.8743, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.7252124645892352, |
|
"grad_norm": 0.3214200437068939, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.8733, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.7365439093484419, |
|
"grad_norm": 0.3665679693222046, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.8802, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.7478753541076487, |
|
"grad_norm": 0.737060010433197, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.9375, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.7592067988668555, |
|
"grad_norm": 0.3130401372909546, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.841, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7705382436260623, |
|
"grad_norm": 0.2984524369239807, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.8193, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.7818696883852692, |
|
"grad_norm": 0.29503563046455383, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.8554, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7932011331444759, |
|
"grad_norm": 0.28448018431663513, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.8187, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.8045325779036827, |
|
"grad_norm": 0.295426607131958, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.8892, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.8158640226628895, |
|
"grad_norm": 0.29137861728668213, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.8098, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.8271954674220963, |
|
"grad_norm": 0.29083821177482605, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.8292, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.8385269121813032, |
|
"grad_norm": 0.2817254066467285, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.8449, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.8498583569405099, |
|
"grad_norm": 0.2846548557281494, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.8838, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.8611898016997167, |
|
"grad_norm": 0.2862998843193054, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.838, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.8725212464589235, |
|
"grad_norm": 0.2817937433719635, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.8634, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8838526912181303, |
|
"grad_norm": 0.29679593443870544, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.8735, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8951841359773371, |
|
"grad_norm": 0.2932203710079193, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.8345, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.9065155807365439, |
|
"grad_norm": 0.29441186785697937, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.8419, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.9178470254957507, |
|
"grad_norm": 0.29725492000579834, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.8608, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.9291784702549575, |
|
"grad_norm": 0.3129102289676666, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.8679, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.9405099150141643, |
|
"grad_norm": 0.32254207134246826, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.8656, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.9518413597733711, |
|
"grad_norm": 0.31700399518013, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.8419, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.9631728045325779, |
|
"grad_norm": 0.36216357350349426, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.8015, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.9745042492917847, |
|
"grad_norm": 0.3494912087917328, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.8515, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.9858356940509915, |
|
"grad_norm": 0.373202383518219, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.7379, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.9971671388101983, |
|
"grad_norm": 0.7268677353858948, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.6286, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.0084985835694051, |
|
"grad_norm": 0.654803991317749, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.3468, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.019830028328612, |
|
"grad_norm": 0.26078087091445923, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.7179, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.0311614730878187, |
|
"grad_norm": 0.26947835087776184, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.7735, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.0424929178470255, |
|
"grad_norm": 0.27178534865379333, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.7403, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.0538243626062322, |
|
"grad_norm": 0.27182912826538086, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.7573, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.065155807365439, |
|
"grad_norm": 0.29053595662117004, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.7674, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.0764872521246458, |
|
"grad_norm": 0.3091391324996948, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.7174, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.0878186968838528, |
|
"grad_norm": 0.2862534821033478, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.719, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.0991501416430596, |
|
"grad_norm": 0.2900041937828064, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.7355, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.1104815864022664, |
|
"grad_norm": 0.2984274625778198, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.6907, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.1218130311614731, |
|
"grad_norm": 0.31093156337738037, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.7234, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.13314447592068, |
|
"grad_norm": 0.32328957319259644, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.7473, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.13314447592068, |
|
"eval_loss": 0.8013013601303101, |
|
"eval_runtime": 11.3593, |
|
"eval_samples_per_second": 13.117, |
|
"eval_steps_per_second": 3.345, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.1444759206798867, |
|
"grad_norm": 0.31163015961647034, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.6997, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.1558073654390935, |
|
"grad_norm": 0.3146006762981415, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.7283, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.1671388101983002, |
|
"grad_norm": 0.3301478326320648, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.7262, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.178470254957507, |
|
"grad_norm": 0.3398320972919464, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.7361, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.1898016997167138, |
|
"grad_norm": 0.36190029978752136, |
|
"learning_rate": 5e-05, |
|
"loss": 0.7502, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.2011331444759206, |
|
"grad_norm": 0.3504740595817566, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.6816, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.2124645892351276, |
|
"grad_norm": 0.40286341309547424, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.7556, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.2237960339943343, |
|
"grad_norm": 0.4191229045391083, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.7204, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.2351274787535411, |
|
"grad_norm": 0.42308875918388367, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.7062, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.246458923512748, |
|
"grad_norm": 0.6090673208236694, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.622, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.2577903682719547, |
|
"grad_norm": 0.3646659851074219, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.5627, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.2691218130311614, |
|
"grad_norm": 0.4557804465293884, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.7121, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.2804532577903682, |
|
"grad_norm": 0.3935948610305786, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.7158, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.291784702549575, |
|
"grad_norm": 0.3726375102996826, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.774, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.3031161473087818, |
|
"grad_norm": 0.3537135422229767, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.7672, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.3144475920679888, |
|
"grad_norm": 0.35993891954421997, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.7034, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.3257790368271953, |
|
"grad_norm": 0.3584566116333008, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.7001, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.3371104815864023, |
|
"grad_norm": 0.37049055099487305, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.7125, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.348441926345609, |
|
"grad_norm": 0.35479503870010376, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.7248, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.3597733711048159, |
|
"grad_norm": 0.3559660315513611, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.7629, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.3711048158640227, |
|
"grad_norm": 0.34223610162734985, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.6785, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.3824362606232294, |
|
"grad_norm": 0.3551385998725891, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.7203, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.3937677053824362, |
|
"grad_norm": 0.3625800907611847, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.7097, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.405099150141643, |
|
"grad_norm": 0.3674543797969818, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.7275, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.41643059490085, |
|
"grad_norm": 0.35478031635284424, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.6774, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.4277620396600565, |
|
"grad_norm": 0.39804625511169434, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.7227, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.4390934844192635, |
|
"grad_norm": 0.402412474155426, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.6975, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.4504249291784703, |
|
"grad_norm": 0.3918543756008148, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.6646, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.461756373937677, |
|
"grad_norm": 0.4310785233974457, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.6273, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.4730878186968839, |
|
"grad_norm": 0.47671714425086975, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.5883, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.4844192634560907, |
|
"grad_norm": 0.4919561445713043, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.5803, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.4957507082152974, |
|
"grad_norm": 0.7754276990890503, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.4437, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.5070821529745042, |
|
"grad_norm": 0.36054930090904236, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.5229, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.5184135977337112, |
|
"grad_norm": 0.4418894946575165, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.6609, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.5297450424929178, |
|
"grad_norm": 0.5525121092796326, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.7335, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.5410764872521248, |
|
"grad_norm": 0.4579167068004608, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.6938, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.5524079320113313, |
|
"grad_norm": 0.39674249291419983, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.6535, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.5637393767705383, |
|
"grad_norm": 0.4052182734012604, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.6701, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.575070821529745, |
|
"grad_norm": 0.4056691527366638, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.7056, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.5864022662889519, |
|
"grad_norm": 0.37060877680778503, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.6789, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.5977337110481586, |
|
"grad_norm": 0.3834247291088104, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.7551, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.6090651558073654, |
|
"grad_norm": 0.37477755546569824, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.685, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.6203966005665722, |
|
"grad_norm": 0.3884474039077759, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.7001, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.631728045325779, |
|
"grad_norm": 0.39254558086395264, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.6391, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.643059490084986, |
|
"grad_norm": 0.3775780498981476, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.6829, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.6543909348441925, |
|
"grad_norm": 0.3820933699607849, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.6711, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.6657223796033995, |
|
"grad_norm": 0.38209760189056396, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.6857, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.677053824362606, |
|
"grad_norm": 0.3991166055202484, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.6806, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.688385269121813, |
|
"grad_norm": 0.4100518226623535, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.6901, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.6997167138810199, |
|
"grad_norm": 0.4116714596748352, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.6445, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.6997167138810199, |
|
"eval_loss": 0.7521026134490967, |
|
"eval_runtime": 11.3497, |
|
"eval_samples_per_second": 13.128, |
|
"eval_steps_per_second": 3.348, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.7110481586402266, |
|
"grad_norm": 0.44452881813049316, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.6401, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.7223796033994334, |
|
"grad_norm": 0.447711706161499, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.6424, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.7337110481586402, |
|
"grad_norm": 0.48492729663848877, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.6073, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.7450424929178472, |
|
"grad_norm": 0.8506810069084167, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 0.5074, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.7563739376770537, |
|
"grad_norm": 0.3347216844558716, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.5402, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.7677053824362607, |
|
"grad_norm": 0.4024171531200409, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 0.7443, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.7790368271954673, |
|
"grad_norm": 0.41074129939079285, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.7573, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.7903682719546743, |
|
"grad_norm": 0.3707183003425598, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.6854, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.801699716713881, |
|
"grad_norm": 0.39143168926239014, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.698, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.8130311614730878, |
|
"grad_norm": 0.3817121982574463, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.6419, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.8243626062322946, |
|
"grad_norm": 0.38814979791641235, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.6402, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.8356940509915014, |
|
"grad_norm": 0.3999271094799042, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.6806, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.8470254957507082, |
|
"grad_norm": 0.3903258442878723, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.7103, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.858356940509915, |
|
"grad_norm": 0.39158394932746887, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.7021, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.869688385269122, |
|
"grad_norm": 0.3828256130218506, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.6773, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.8810198300283285, |
|
"grad_norm": 0.40929779410362244, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.6684, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.8923512747875355, |
|
"grad_norm": 0.37998712062835693, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 0.6873, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.903682719546742, |
|
"grad_norm": 0.37896665930747986, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.6508, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.915014164305949, |
|
"grad_norm": 0.40087971091270447, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.6933, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.9263456090651558, |
|
"grad_norm": 0.4156991243362427, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.6888, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.9376770538243626, |
|
"grad_norm": 0.4046257734298706, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.6119, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.9490084985835694, |
|
"grad_norm": 0.3981984555721283, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 0.5937, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.9603399433427762, |
|
"grad_norm": 0.4312017261981964, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.6388, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.9716713881019832, |
|
"grad_norm": 0.44606897234916687, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.6094, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.9830028328611897, |
|
"grad_norm": 0.4601079523563385, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.6116, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.9943342776203967, |
|
"grad_norm": 0.7336193919181824, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.4964, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.0056657223796033, |
|
"grad_norm": 0.8930611610412598, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.9594, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.0169971671388103, |
|
"grad_norm": 0.30886948108673096, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.6154, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.028328611898017, |
|
"grad_norm": 0.3591644763946533, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.621, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.039660056657224, |
|
"grad_norm": 0.3551977574825287, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.6448, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.0509915014164304, |
|
"grad_norm": 0.3618852198123932, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.6173, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.0623229461756374, |
|
"grad_norm": 0.3655000329017639, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.6326, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.0736543909348444, |
|
"grad_norm": 0.3681482672691345, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.6006, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.084985835694051, |
|
"grad_norm": 0.3835912346839905, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.6109, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.096317280453258, |
|
"grad_norm": 0.3807328939437866, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.6465, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.1076487252124645, |
|
"grad_norm": 0.35800546407699585, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.6601, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.1189801699716715, |
|
"grad_norm": 0.36241623759269714, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.6047, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.130311614730878, |
|
"grad_norm": 0.3821946084499359, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.5959, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.141643059490085, |
|
"grad_norm": 0.38033100962638855, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.6331, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.1529745042492916, |
|
"grad_norm": 0.39716804027557373, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.5891, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.1643059490084986, |
|
"grad_norm": 0.3756254017353058, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.6068, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.1756373937677056, |
|
"grad_norm": 0.395888090133667, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.6375, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.186968838526912, |
|
"grad_norm": 0.3884637653827667, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.6068, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.198300283286119, |
|
"grad_norm": 0.4123586416244507, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.5634, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.2096317280453257, |
|
"grad_norm": 0.4333895742893219, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.5411, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.2209631728045327, |
|
"grad_norm": 0.5002503991127014, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.5667, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.2322946175637393, |
|
"grad_norm": 0.5092443823814392, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.5003, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.2436260623229463, |
|
"grad_norm": 0.7444424629211426, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.5318, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.254957507082153, |
|
"grad_norm": 0.2637498676776886, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.3755, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.26628895184136, |
|
"grad_norm": 0.3529297411441803, |
|
"learning_rate": 0.0, |
|
"loss": 0.6149, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.26628895184136, |
|
"eval_loss": 0.7420778274536133, |
|
"eval_runtime": 11.3419, |
|
"eval_samples_per_second": 13.137, |
|
"eval_steps_per_second": 3.35, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.038712040186184e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|