|
{ |
|
"best_metric": 0.4755863547325134, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.005905744320642545, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 3.937162880428363e-05, |
|
"grad_norm": 1.02256441116333, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6653, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 3.937162880428363e-05, |
|
"eval_loss": 2.0401175022125244, |
|
"eval_runtime": 1963.3936, |
|
"eval_samples_per_second": 21.788, |
|
"eval_steps_per_second": 5.447, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 7.874325760856726e-05, |
|
"grad_norm": 1.1648401021957397, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6528, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0001181148864128509, |
|
"grad_norm": 1.2922242879867554, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6958, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00015748651521713453, |
|
"grad_norm": 1.711512565612793, |
|
"learning_rate": 4e-05, |
|
"loss": 0.9842, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00019685814402141817, |
|
"grad_norm": 2.0927672386169434, |
|
"learning_rate": 5e-05, |
|
"loss": 1.165, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0002362297728257018, |
|
"grad_norm": 2.2099623680114746, |
|
"learning_rate": 6e-05, |
|
"loss": 1.0422, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00027560140162998545, |
|
"grad_norm": 2.201043128967285, |
|
"learning_rate": 7e-05, |
|
"loss": 1.0076, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00031497303043426906, |
|
"grad_norm": 1.8493530750274658, |
|
"learning_rate": 8e-05, |
|
"loss": 0.8828, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0003543446592385527, |
|
"grad_norm": 1.8940231800079346, |
|
"learning_rate": 9e-05, |
|
"loss": 0.8789, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00039371628804283634, |
|
"grad_norm": 1.6951254606246948, |
|
"learning_rate": 0.0001, |
|
"loss": 0.7942, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00043308791684711995, |
|
"grad_norm": 1.5717829465866089, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.7426, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0004724595456514036, |
|
"grad_norm": 1.6309691667556763, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.6065, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0005118311744556872, |
|
"grad_norm": 1.7245391607284546, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.6492, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0005512028032599709, |
|
"grad_norm": 1.3676433563232422, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.6407, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0005905744320642545, |
|
"grad_norm": 1.4679405689239502, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.5274, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0006299460608685381, |
|
"grad_norm": 1.4089184999465942, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.5273, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0006693176896728218, |
|
"grad_norm": 1.5161076784133911, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.6176, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0007086893184771055, |
|
"grad_norm": 1.3749370574951172, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.5369, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.000748060947281389, |
|
"grad_norm": 1.707553505897522, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.6345, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0007874325760856727, |
|
"grad_norm": 1.5158611536026, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.601, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0008268042048899563, |
|
"grad_norm": 1.3462746143341064, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.5207, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0008661758336942399, |
|
"grad_norm": 1.358368158340454, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.5192, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0009055474624985236, |
|
"grad_norm": 1.4276319742202759, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.5678, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0009449190913028072, |
|
"grad_norm": 1.4815235137939453, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.5764, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0009842907201070908, |
|
"grad_norm": 1.3998855352401733, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.5395, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0010236623489113745, |
|
"grad_norm": 1.4529114961624146, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.5424, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0010630339777156581, |
|
"grad_norm": 1.7836538553237915, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.5755, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0011024056065199418, |
|
"grad_norm": 1.4714139699935913, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.4163, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0011417772353242255, |
|
"grad_norm": 1.8692817687988281, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.5678, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.001181148864128509, |
|
"grad_norm": 1.8492319583892822, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.5329, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0012205204929327926, |
|
"grad_norm": 1.5785956382751465, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.4777, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0012598921217370762, |
|
"grad_norm": 1.6650912761688232, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.4997, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.00129926375054136, |
|
"grad_norm": 1.9529163837432861, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.577, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0013386353793456436, |
|
"grad_norm": 1.7996156215667725, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.5334, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0013780070081499272, |
|
"grad_norm": 1.7110034227371216, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.5201, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.001417378636954211, |
|
"grad_norm": 1.8501421213150024, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.5089, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0014567502657584943, |
|
"grad_norm": 2.0073397159576416, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.6491, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.001496121894562778, |
|
"grad_norm": 1.9730331897735596, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.5395, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0015354935233670617, |
|
"grad_norm": 2.027768135070801, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.5505, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0015748651521713453, |
|
"grad_norm": 2.7377169132232666, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.7136, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.001614236780975629, |
|
"grad_norm": 2.628161907196045, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.6519, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0016536084097799127, |
|
"grad_norm": 3.1375608444213867, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.8701, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0016929800385841961, |
|
"grad_norm": 2.631460189819336, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.7225, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0017323516673884798, |
|
"grad_norm": 2.65053391456604, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.7696, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0017717232961927635, |
|
"grad_norm": 3.0114426612854004, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.7981, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0018110949249970471, |
|
"grad_norm": 3.490697145462036, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.7679, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0018504665538013308, |
|
"grad_norm": 3.9303817749023438, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.9967, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0018898381826056145, |
|
"grad_norm": 4.211299419403076, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.8935, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0019292098114098981, |
|
"grad_norm": 4.635986804962158, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.9208, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0019685814402141816, |
|
"grad_norm": 6.054840087890625, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.7544, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0019685814402141816, |
|
"eval_loss": 0.62474524974823, |
|
"eval_runtime": 1962.3949, |
|
"eval_samples_per_second": 21.799, |
|
"eval_steps_per_second": 5.45, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0020079530690184652, |
|
"grad_norm": 1.4681363105773926, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.6131, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.002047324697822749, |
|
"grad_norm": 0.8419048190116882, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.4649, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0020866963266270326, |
|
"grad_norm": 0.7242401242256165, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.4772, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0021260679554313162, |
|
"grad_norm": 1.0355290174484253, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.5924, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0021654395842356, |
|
"grad_norm": 1.2674163579940796, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.7106, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0022048112130398836, |
|
"grad_norm": 1.0684322118759155, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.5397, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0022441828418441672, |
|
"grad_norm": 1.023458480834961, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.4722, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.002283554470648451, |
|
"grad_norm": 1.130386233329773, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.5505, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0023229260994527346, |
|
"grad_norm": 1.0275880098342896, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.4878, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.002362297728257018, |
|
"grad_norm": 1.2688953876495361, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.5625, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0024016693570613015, |
|
"grad_norm": 1.0798358917236328, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.455, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.002441040985865585, |
|
"grad_norm": 1.1489145755767822, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.4756, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.002480412614669869, |
|
"grad_norm": 1.0758352279663086, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.4719, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0025197842434741525, |
|
"grad_norm": 1.1072825193405151, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.4388, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.002559155872278436, |
|
"grad_norm": 1.059680461883545, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.42, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.00259852750108272, |
|
"grad_norm": 1.0395857095718384, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.4328, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0026378991298870035, |
|
"grad_norm": 1.2434015274047852, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.464, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.002677270758691287, |
|
"grad_norm": 1.3831180334091187, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.6026, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.002716642387495571, |
|
"grad_norm": 1.1558599472045898, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.4435, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0027560140162998545, |
|
"grad_norm": 1.0922515392303467, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.4497, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.002795385645104138, |
|
"grad_norm": 1.132048487663269, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.4135, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.002834757273908422, |
|
"grad_norm": 1.2695519924163818, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.4738, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.002874128902712705, |
|
"grad_norm": 1.169937252998352, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.4338, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0029135005315169887, |
|
"grad_norm": 1.3878920078277588, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.5076, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0029528721603212724, |
|
"grad_norm": 1.1474246978759766, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.4431, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.002992243789125556, |
|
"grad_norm": 1.1987924575805664, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.4218, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0030316154179298397, |
|
"grad_norm": 1.1889253854751587, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.454, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0030709870467341234, |
|
"grad_norm": 1.2669168710708618, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.4485, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.003110358675538407, |
|
"grad_norm": 1.1950857639312744, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.4234, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0031497303043426907, |
|
"grad_norm": 1.3883540630340576, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.4564, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0031891019331469744, |
|
"grad_norm": 1.6918472051620483, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.5781, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.003228473561951258, |
|
"grad_norm": 1.3038713932037354, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.4174, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0032678451907555417, |
|
"grad_norm": 1.435478925704956, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.4292, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0033072168195598254, |
|
"grad_norm": 1.9486966133117676, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.561, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.003346588448364109, |
|
"grad_norm": 1.6429029703140259, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.4658, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0033859600771683923, |
|
"grad_norm": 1.6745431423187256, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.4302, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.003425331705972676, |
|
"grad_norm": 1.627824068069458, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.53, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0034647033347769596, |
|
"grad_norm": 1.6169121265411377, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.4768, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0035040749635812433, |
|
"grad_norm": 1.7247333526611328, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.5279, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.003543446592385527, |
|
"grad_norm": 2.0350518226623535, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.5958, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0035828182211898106, |
|
"grad_norm": 1.9901599884033203, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.5022, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0036221898499940943, |
|
"grad_norm": 2.2938389778137207, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.606, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.003661561478798378, |
|
"grad_norm": 2.3475186824798584, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.6537, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0037009331076026616, |
|
"grad_norm": 2.820521831512451, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.6219, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0037403047364069453, |
|
"grad_norm": 3.1900765895843506, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.8085, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.003779676365211229, |
|
"grad_norm": 3.7130322456359863, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.9369, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.0038190479940155126, |
|
"grad_norm": 3.98307728767395, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.8162, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0038584196228197963, |
|
"grad_norm": 3.4590582847595215, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.7228, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0038977912516240795, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.9072, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.003937162880428363, |
|
"grad_norm": 7.270034313201904, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.0212, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.003937162880428363, |
|
"eval_loss": 0.5313294529914856, |
|
"eval_runtime": 1964.4559, |
|
"eval_samples_per_second": 21.776, |
|
"eval_steps_per_second": 5.444, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.003976534509232647, |
|
"grad_norm": 0.6821577548980713, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.4472, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.0040159061380369305, |
|
"grad_norm": 0.6033506989479065, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.4505, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.004055277766841214, |
|
"grad_norm": 0.6802194118499756, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.4243, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.004094649395645498, |
|
"grad_norm": 1.0940698385238647, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.5355, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0041340210244497815, |
|
"grad_norm": 1.0095839500427246, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4859, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.004173392653254065, |
|
"grad_norm": 0.9586552381515503, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.4559, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.004212764282058349, |
|
"grad_norm": 1.0491364002227783, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.5536, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0042521359108626325, |
|
"grad_norm": 1.0424336194992065, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.531, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.004291507539666916, |
|
"grad_norm": 1.1021971702575684, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.4656, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.0043308791684712, |
|
"grad_norm": 1.0408269166946411, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.4715, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0043702507972754835, |
|
"grad_norm": 1.21599543094635, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.527, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.004409622426079767, |
|
"grad_norm": 1.0973793268203735, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.4781, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.004448994054884051, |
|
"grad_norm": 1.0023589134216309, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.3882, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.0044883656836883345, |
|
"grad_norm": 1.0988465547561646, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.4012, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.004527737312492618, |
|
"grad_norm": 1.2344098091125488, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.4895, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.004567108941296902, |
|
"grad_norm": 1.0267789363861084, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.4079, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.0046064805701011855, |
|
"grad_norm": 1.1937060356140137, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.4989, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.004645852198905469, |
|
"grad_norm": 1.0652450323104858, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.4234, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.004685223827709752, |
|
"grad_norm": 1.1903423070907593, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.3993, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.004724595456514036, |
|
"grad_norm": 1.204135537147522, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.3806, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.004763967085318319, |
|
"grad_norm": 1.1541450023651123, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.4245, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.004803338714122603, |
|
"grad_norm": 1.1386463642120361, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.4182, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.004842710342926887, |
|
"grad_norm": 1.179220199584961, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.4101, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.00488208197173117, |
|
"grad_norm": 1.2795960903167725, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.4203, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.004921453600535454, |
|
"grad_norm": 1.6313341856002808, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.4771, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.004960825229339738, |
|
"grad_norm": 1.3297889232635498, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.4054, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.005000196858144021, |
|
"grad_norm": 1.3898698091506958, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.4196, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.005039568486948305, |
|
"grad_norm": 1.2108392715454102, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.433, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.005078940115752589, |
|
"grad_norm": 1.320252776145935, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.4027, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.005118311744556872, |
|
"grad_norm": 1.3441872596740723, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.3871, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.005157683373361156, |
|
"grad_norm": 1.3732396364212036, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.4692, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.00519705500216544, |
|
"grad_norm": 1.471716284751892, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.3845, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.005236426630969723, |
|
"grad_norm": 1.654649019241333, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.4476, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.005275798259774007, |
|
"grad_norm": 1.314582109451294, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.3975, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.005315169888578291, |
|
"grad_norm": 1.2855511903762817, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.3836, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.005354541517382574, |
|
"grad_norm": 1.5129963159561157, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.4548, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.005393913146186858, |
|
"grad_norm": 1.40498685836792, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.4161, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.005433284774991142, |
|
"grad_norm": 1.7466362714767456, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.4721, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.005472656403795425, |
|
"grad_norm": 1.5604318380355835, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.4564, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.005512028032599709, |
|
"grad_norm": 1.6837966442108154, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.4667, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.005551399661403993, |
|
"grad_norm": 1.867175579071045, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.4654, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.005590771290208276, |
|
"grad_norm": 2.220432758331299, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.4996, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.00563014291901256, |
|
"grad_norm": 2.555483341217041, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.6215, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.005669514547816844, |
|
"grad_norm": 3.0226290225982666, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.5752, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.005708886176621126, |
|
"grad_norm": 2.8674111366271973, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.5822, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.00574825780542541, |
|
"grad_norm": 2.833491563796997, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.6672, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.005787629434229694, |
|
"grad_norm": 3.3857784271240234, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.7152, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.005827001063033977, |
|
"grad_norm": 3.082288980484009, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.622, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.005866372691838261, |
|
"grad_norm": 3.570404291152954, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.6337, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.005905744320642545, |
|
"grad_norm": 4.106326580047607, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.7418, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.005905744320642545, |
|
"eval_loss": 0.4755863547325134, |
|
"eval_runtime": 1963.7539, |
|
"eval_samples_per_second": 21.784, |
|
"eval_steps_per_second": 5.446, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.391626802082611e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|