|
{ |
|
"best_metric": 0.7140275239944458, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-1500", |
|
"epoch": 2.6917900403768504, |
|
"eval_steps": 150, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001794526693584567, |
|
"eval_loss": 0.9548982977867126, |
|
"eval_runtime": 93.9669, |
|
"eval_samples_per_second": 9.993, |
|
"eval_steps_per_second": 1.256, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01794526693584567, |
|
"grad_norm": 3.1223835945129395, |
|
"learning_rate": 6e-06, |
|
"loss": 3.2878, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03589053387169134, |
|
"grad_norm": 2.606353998184204, |
|
"learning_rate": 1.2e-05, |
|
"loss": 3.3004, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05383580080753701, |
|
"grad_norm": 2.548677921295166, |
|
"learning_rate": 1.8e-05, |
|
"loss": 3.6273, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07178106774338268, |
|
"grad_norm": 2.486678123474121, |
|
"learning_rate": 2.4e-05, |
|
"loss": 3.7912, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08972633467922836, |
|
"grad_norm": 10.336651802062988, |
|
"learning_rate": 3e-05, |
|
"loss": 3.7769, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10767160161507403, |
|
"grad_norm": 1.5906541347503662, |
|
"learning_rate": 2.9996479470277262e-05, |
|
"loss": 2.9001, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1256168685509197, |
|
"grad_norm": 1.8941510915756226, |
|
"learning_rate": 2.9985919533659653e-05, |
|
"loss": 2.911, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14356213548676536, |
|
"grad_norm": 2.1188039779663086, |
|
"learning_rate": 2.9968325147023267e-05, |
|
"loss": 3.3648, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16150740242261102, |
|
"grad_norm": 2.7596967220306396, |
|
"learning_rate": 2.994370456924292e-05, |
|
"loss": 3.2929, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17945266935845672, |
|
"grad_norm": 9.052000999450684, |
|
"learning_rate": 2.9912069357315394e-05, |
|
"loss": 3.4641, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19739793629430238, |
|
"grad_norm": 1.8610212802886963, |
|
"learning_rate": 2.9873434360934543e-05, |
|
"loss": 2.7594, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21534320323014805, |
|
"grad_norm": 2.275176763534546, |
|
"learning_rate": 2.9827817715520775e-05, |
|
"loss": 2.8485, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23328847016599372, |
|
"grad_norm": 2.363050699234009, |
|
"learning_rate": 2.977524083370823e-05, |
|
"loss": 3.1626, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2512337371018394, |
|
"grad_norm": 3.0472464561462402, |
|
"learning_rate": 2.9715728395293587e-05, |
|
"loss": 3.2823, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2691790040376851, |
|
"grad_norm": 9.995095252990723, |
|
"learning_rate": 2.96493083356513e-05, |
|
"loss": 3.2776, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2691790040376851, |
|
"eval_loss": 0.7650104761123657, |
|
"eval_runtime": 95.2502, |
|
"eval_samples_per_second": 9.858, |
|
"eval_steps_per_second": 1.239, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2871242709735307, |
|
"grad_norm": 2.056764841079712, |
|
"learning_rate": 2.9576011832620583e-05, |
|
"loss": 2.7666, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3050695379093764, |
|
"grad_norm": 2.3999085426330566, |
|
"learning_rate": 2.9495873291870436e-05, |
|
"loss": 2.8254, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32301480484522205, |
|
"grad_norm": 2.938490390777588, |
|
"learning_rate": 2.940893033074948e-05, |
|
"loss": 3.0781, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.34096007178106774, |
|
"grad_norm": 3.2307329177856445, |
|
"learning_rate": 2.9315223760628224e-05, |
|
"loss": 3.2347, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.35890533871691344, |
|
"grad_norm": 10.07883358001709, |
|
"learning_rate": 2.9214797567742036e-05, |
|
"loss": 3.3451, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3768506056527591, |
|
"grad_norm": 2.220188617706299, |
|
"learning_rate": 2.9107698892543862e-05, |
|
"loss": 2.5911, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39479587258860477, |
|
"grad_norm": 2.3011841773986816, |
|
"learning_rate": 2.8993978007576263e-05, |
|
"loss": 2.6557, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4127411395244504, |
|
"grad_norm": 2.9171500205993652, |
|
"learning_rate": 2.8873688293873336e-05, |
|
"loss": 2.9965, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4306864064602961, |
|
"grad_norm": 3.5936508178710938, |
|
"learning_rate": 2.874688621590339e-05, |
|
"loss": 3.2309, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4486316733961418, |
|
"grad_norm": 15.465261459350586, |
|
"learning_rate": 2.861363129506436e-05, |
|
"loss": 3.3962, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.46657694033198743, |
|
"grad_norm": 2.3160524368286133, |
|
"learning_rate": 2.847398608174417e-05, |
|
"loss": 2.676, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4845222072678331, |
|
"grad_norm": 2.586505651473999, |
|
"learning_rate": 2.832801612595937e-05, |
|
"loss": 2.7848, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5024674742036788, |
|
"grad_norm": 3.1157450675964355, |
|
"learning_rate": 2.8175789946585697e-05, |
|
"loss": 2.9969, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5204127411395244, |
|
"grad_norm": 3.554219961166382, |
|
"learning_rate": 2.801737899919502e-05, |
|
"loss": 3.2118, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5383580080753702, |
|
"grad_norm": 11.838674545288086, |
|
"learning_rate": 2.7852857642513838e-05, |
|
"loss": 3.19, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5383580080753702, |
|
"eval_loss": 0.7424979209899902, |
|
"eval_runtime": 95.2404, |
|
"eval_samples_per_second": 9.859, |
|
"eval_steps_per_second": 1.239, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5563032750112158, |
|
"grad_norm": 2.18560791015625, |
|
"learning_rate": 2.768230310351898e-05, |
|
"loss": 2.5618, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5742485419470614, |
|
"grad_norm": 2.7546699047088623, |
|
"learning_rate": 2.7505795441186953e-05, |
|
"loss": 2.7391, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5921938088829072, |
|
"grad_norm": 3.0346529483795166, |
|
"learning_rate": 2.7323417508913973e-05, |
|
"loss": 2.9975, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6101390758187528, |
|
"grad_norm": 3.5320701599121094, |
|
"learning_rate": 2.7135254915624213e-05, |
|
"loss": 3.2406, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6280843427545985, |
|
"grad_norm": 12.75845718383789, |
|
"learning_rate": 2.6941395985584656e-05, |
|
"loss": 3.3339, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6460296096904441, |
|
"grad_norm": 2.2395637035369873, |
|
"learning_rate": 2.6741931716945336e-05, |
|
"loss": 2.5051, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6639748766262898, |
|
"grad_norm": 2.6890869140625, |
|
"learning_rate": 2.6536955739024436e-05, |
|
"loss": 2.7496, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6819201435621355, |
|
"grad_norm": 3.151639461517334, |
|
"learning_rate": 2.632656426835831e-05, |
|
"loss": 2.9491, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6998654104979811, |
|
"grad_norm": 3.9626593589782715, |
|
"learning_rate": 2.6110856063537087e-05, |
|
"loss": 3.2246, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7178106774338269, |
|
"grad_norm": 10.024589538574219, |
|
"learning_rate": 2.5889932378846963e-05, |
|
"loss": 3.2718, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7357559443696725, |
|
"grad_norm": 2.5113890171051025, |
|
"learning_rate": 2.5663896916741064e-05, |
|
"loss": 2.6503, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7537012113055181, |
|
"grad_norm": 2.9303054809570312, |
|
"learning_rate": 2.543285577916108e-05, |
|
"loss": 2.61, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7716464782413638, |
|
"grad_norm": 3.2211666107177734, |
|
"learning_rate": 2.519691741773262e-05, |
|
"loss": 2.9236, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7895917451772095, |
|
"grad_norm": 3.8643081188201904, |
|
"learning_rate": 2.495619258285757e-05, |
|
"loss": 3.2966, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8075370121130552, |
|
"grad_norm": 14.004131317138672, |
|
"learning_rate": 2.4710794271727415e-05, |
|
"loss": 3.2105, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8075370121130552, |
|
"eval_loss": 0.7331081628799438, |
|
"eval_runtime": 95.7979, |
|
"eval_samples_per_second": 9.802, |
|
"eval_steps_per_second": 1.232, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8254822790489008, |
|
"grad_norm": 2.352147340774536, |
|
"learning_rate": 2.446083767528193e-05, |
|
"loss": 2.6354, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8434275459847466, |
|
"grad_norm": 2.8068552017211914, |
|
"learning_rate": 2.4206440124138064e-05, |
|
"loss": 2.6626, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8613728129205922, |
|
"grad_norm": 3.1152584552764893, |
|
"learning_rate": 2.3947721033514517e-05, |
|
"loss": 3.0455, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8793180798564378, |
|
"grad_norm": 4.005079746246338, |
|
"learning_rate": 2.3684801847177732e-05, |
|
"loss": 3.0879, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8972633467922836, |
|
"grad_norm": 10.259051322937012, |
|
"learning_rate": 2.341780598043574e-05, |
|
"loss": 3.2745, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9152086137281292, |
|
"grad_norm": 2.6010003089904785, |
|
"learning_rate": 2.3146858762206493e-05, |
|
"loss": 2.5606, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9331538806639749, |
|
"grad_norm": 2.807999610900879, |
|
"learning_rate": 2.287208737618801e-05, |
|
"loss": 2.7688, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9510991475998205, |
|
"grad_norm": 3.121795415878296, |
|
"learning_rate": 2.259362080115781e-05, |
|
"loss": 2.8815, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9690444145356663, |
|
"grad_norm": 3.921652317047119, |
|
"learning_rate": 2.231158975042979e-05, |
|
"loss": 3.1705, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9869896814715119, |
|
"grad_norm": 14.782620429992676, |
|
"learning_rate": 2.2026126610496852e-05, |
|
"loss": 3.1717, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0049349484073575, |
|
"grad_norm": 2.647948741912842, |
|
"learning_rate": 2.173736537888819e-05, |
|
"loss": 2.823, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0228802153432033, |
|
"grad_norm": 2.5824477672576904, |
|
"learning_rate": 2.1445441601270276e-05, |
|
"loss": 2.5629, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0408254822790488, |
|
"grad_norm": 3.400602340698242, |
|
"learning_rate": 2.115049230782124e-05, |
|
"loss": 2.6957, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0587707492148946, |
|
"grad_norm": 3.649951219558716, |
|
"learning_rate": 2.085265594890832e-05, |
|
"loss": 2.8003, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0767160161507403, |
|
"grad_norm": 4.453049182891846, |
|
"learning_rate": 2.055207233009872e-05, |
|
"loss": 3.1671, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0767160161507403, |
|
"eval_loss": 0.7251887917518616, |
|
"eval_runtime": 95.2302, |
|
"eval_samples_per_second": 9.86, |
|
"eval_steps_per_second": 1.239, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0946612830865858, |
|
"grad_norm": 2.900311231613159, |
|
"learning_rate": 2.0248882546534327e-05, |
|
"loss": 2.769, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.1126065500224316, |
|
"grad_norm": 2.572707176208496, |
|
"learning_rate": 1.9943228916701108e-05, |
|
"loss": 2.4179, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.1305518169582773, |
|
"grad_norm": 3.324117660522461, |
|
"learning_rate": 1.963525491562421e-05, |
|
"loss": 2.7605, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1484970838941229, |
|
"grad_norm": 3.975733757019043, |
|
"learning_rate": 1.9325105107520264e-05, |
|
"loss": 2.938, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1664423508299686, |
|
"grad_norm": 4.596153259277344, |
|
"learning_rate": 1.9012925077938318e-05, |
|
"loss": 3.0549, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1843876177658144, |
|
"grad_norm": 3.047126531600952, |
|
"learning_rate": 1.8698861365421433e-05, |
|
"loss": 2.809, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2023328847016599, |
|
"grad_norm": 2.942840337753296, |
|
"learning_rate": 1.8383061392720914e-05, |
|
"loss": 2.4638, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2202781516375056, |
|
"grad_norm": 3.421780824661255, |
|
"learning_rate": 1.8065673397595475e-05, |
|
"loss": 2.698, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2382234185733512, |
|
"grad_norm": 4.008915424346924, |
|
"learning_rate": 1.7746846363227843e-05, |
|
"loss": 2.8342, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.256168685509197, |
|
"grad_norm": 4.5783371925354, |
|
"learning_rate": 1.7426729948291474e-05, |
|
"loss": 2.9376, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2741139524450427, |
|
"grad_norm": 3.4261183738708496, |
|
"learning_rate": 1.7105474416700165e-05, |
|
"loss": 2.6337, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2920592193808882, |
|
"grad_norm": 2.6931464672088623, |
|
"learning_rate": 1.6783230567073597e-05, |
|
"loss": 2.4057, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.310004486316734, |
|
"grad_norm": 3.372232675552368, |
|
"learning_rate": 1.646014966195185e-05, |
|
"loss": 2.6486, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3279497532525797, |
|
"grad_norm": 3.8511993885040283, |
|
"learning_rate": 1.613638335679216e-05, |
|
"loss": 2.8654, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3458950201884252, |
|
"grad_norm": 4.8227152824401855, |
|
"learning_rate": 1.5812083628781265e-05, |
|
"loss": 2.9859, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3458950201884252, |
|
"eval_loss": 0.7222854495048523, |
|
"eval_runtime": 95.2008, |
|
"eval_samples_per_second": 9.863, |
|
"eval_steps_per_second": 1.239, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.363840287124271, |
|
"grad_norm": 3.064589500427246, |
|
"learning_rate": 1.548740270549671e-05, |
|
"loss": 2.6571, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3817855540601167, |
|
"grad_norm": 3.1526994705200195, |
|
"learning_rate": 1.5162492993450599e-05, |
|
"loss": 2.4803, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.3997308209959622, |
|
"grad_norm": 3.5985524654388428, |
|
"learning_rate": 1.4837507006549403e-05, |
|
"loss": 2.6762, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.417676087931808, |
|
"grad_norm": 3.952852725982666, |
|
"learning_rate": 1.4512597294503295e-05, |
|
"loss": 2.8978, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4356213548676537, |
|
"grad_norm": 4.69930362701416, |
|
"learning_rate": 1.4187916371218739e-05, |
|
"loss": 3.0415, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4535666218034993, |
|
"grad_norm": 3.1091654300689697, |
|
"learning_rate": 1.3863616643207844e-05, |
|
"loss": 2.9257, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.471511888739345, |
|
"grad_norm": 2.9218077659606934, |
|
"learning_rate": 1.3539850338048156e-05, |
|
"loss": 2.5462, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4894571556751908, |
|
"grad_norm": 3.5961787700653076, |
|
"learning_rate": 1.3216769432926405e-05, |
|
"loss": 2.6275, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.5074024226110363, |
|
"grad_norm": 4.310726165771484, |
|
"learning_rate": 1.2894525583299835e-05, |
|
"loss": 2.8565, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.525347689546882, |
|
"grad_norm": 4.69102668762207, |
|
"learning_rate": 1.2573270051708529e-05, |
|
"loss": 2.9611, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5432929564827278, |
|
"grad_norm": 3.2854912281036377, |
|
"learning_rate": 1.2253153636772158e-05, |
|
"loss": 2.7114, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5612382234185733, |
|
"grad_norm": 3.1976680755615234, |
|
"learning_rate": 1.193432660240453e-05, |
|
"loss": 2.4224, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.579183490354419, |
|
"grad_norm": 3.6152374744415283, |
|
"learning_rate": 1.1616938607279089e-05, |
|
"loss": 2.6699, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.5971287572902648, |
|
"grad_norm": 4.225895404815674, |
|
"learning_rate": 1.1301138634578571e-05, |
|
"loss": 2.93, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6150740242261103, |
|
"grad_norm": 5.0000128746032715, |
|
"learning_rate": 1.098707492206169e-05, |
|
"loss": 2.9738, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6150740242261103, |
|
"eval_loss": 0.7168869376182556, |
|
"eval_runtime": 95.1917, |
|
"eval_samples_per_second": 9.864, |
|
"eval_steps_per_second": 1.24, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6330192911619559, |
|
"grad_norm": 2.9839413166046143, |
|
"learning_rate": 1.067489489247974e-05, |
|
"loss": 2.7097, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6509645580978018, |
|
"grad_norm": 2.9765803813934326, |
|
"learning_rate": 1.036474508437579e-05, |
|
"loss": 2.4352, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.6689098250336474, |
|
"grad_norm": 3.5830776691436768, |
|
"learning_rate": 1.0056771083298894e-05, |
|
"loss": 2.6683, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.686855091969493, |
|
"grad_norm": 4.138453960418701, |
|
"learning_rate": 9.751117453465674e-06, |
|
"loss": 2.8063, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.7048003589053389, |
|
"grad_norm": 5.089868545532227, |
|
"learning_rate": 9.447927669901284e-06, |
|
"loss": 3.0384, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7227456258411844, |
|
"grad_norm": 3.140326499938965, |
|
"learning_rate": 9.147344051091682e-06, |
|
"loss": 2.7848, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.74069089277703, |
|
"grad_norm": 2.8979885578155518, |
|
"learning_rate": 8.849507692178758e-06, |
|
"loss": 2.5351, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7586361597128757, |
|
"grad_norm": 3.619845390319824, |
|
"learning_rate": 8.554558398729726e-06, |
|
"loss": 2.644, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7765814266487214, |
|
"grad_norm": 4.086766242980957, |
|
"learning_rate": 8.262634621111819e-06, |
|
"loss": 2.7442, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.794526693584567, |
|
"grad_norm": 5.267161846160889, |
|
"learning_rate": 7.97387338950315e-06, |
|
"loss": 3.0389, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8124719605204127, |
|
"grad_norm": 3.162679672241211, |
|
"learning_rate": 7.688410249570214e-06, |
|
"loss": 2.7156, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.8304172274562585, |
|
"grad_norm": 3.187302827835083, |
|
"learning_rate": 7.4063791988421905e-06, |
|
"loss": 2.4397, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.848362494392104, |
|
"grad_norm": 3.6598949432373047, |
|
"learning_rate": 7.127912623811993e-06, |
|
"loss": 2.5768, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8663077613279497, |
|
"grad_norm": 4.343390464782715, |
|
"learning_rate": 6.853141237793507e-06, |
|
"loss": 2.85, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8842530282637955, |
|
"grad_norm": 5.103370666503906, |
|
"learning_rate": 6.582194019564266e-06, |
|
"loss": 3.0224, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.8842530282637955, |
|
"eval_loss": 0.714248538017273, |
|
"eval_runtime": 95.2398, |
|
"eval_samples_per_second": 9.859, |
|
"eval_steps_per_second": 1.239, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.902198295199641, |
|
"grad_norm": 3.154374122619629, |
|
"learning_rate": 6.315198152822273e-06, |
|
"loss": 2.6001, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.9201435621354868, |
|
"grad_norm": 3.071227788925171, |
|
"learning_rate": 6.052278966485492e-06, |
|
"loss": 2.4724, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.9380888290713325, |
|
"grad_norm": 3.503994941711426, |
|
"learning_rate": 5.793559875861938e-06, |
|
"loss": 2.6831, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.956034096007178, |
|
"grad_norm": 4.184395790100098, |
|
"learning_rate": 5.539162324718075e-06, |
|
"loss": 2.7453, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.9739793629430238, |
|
"grad_norm": 5.539226531982422, |
|
"learning_rate": 5.289205728272587e-06, |
|
"loss": 3.1028, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.9919246298788695, |
|
"grad_norm": 3.3464314937591553, |
|
"learning_rate": 5.043807417142436e-06, |
|
"loss": 2.684, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.009869896814715, |
|
"grad_norm": 2.914656400680542, |
|
"learning_rate": 4.8030825822673816e-06, |
|
"loss": 2.5496, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.0278151637505606, |
|
"grad_norm": 3.4661035537719727, |
|
"learning_rate": 4.567144220838923e-06, |
|
"loss": 2.4933, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.0457604306864066, |
|
"grad_norm": 3.822174549102783, |
|
"learning_rate": 4.336103083258942e-06, |
|
"loss": 2.6796, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.063705697622252, |
|
"grad_norm": 4.563441276550293, |
|
"learning_rate": 4.110067621153041e-06, |
|
"loss": 2.8069, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.0816509645580976, |
|
"grad_norm": 5.503087520599365, |
|
"learning_rate": 3.889143936462915e-06, |
|
"loss": 2.9136, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.0995962314939436, |
|
"grad_norm": 3.0303938388824463, |
|
"learning_rate": 3.673435731641692e-06, |
|
"loss": 2.4711, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.117541498429789, |
|
"grad_norm": 3.6022088527679443, |
|
"learning_rate": 3.4630442609755666e-06, |
|
"loss": 2.2998, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.1354867653656346, |
|
"grad_norm": 4.142797946929932, |
|
"learning_rate": 3.2580682830546667e-06, |
|
"loss": 2.6211, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.1534320323014806, |
|
"grad_norm": 4.368759632110596, |
|
"learning_rate": 3.0586040144153436e-06, |
|
"loss": 2.7689, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.1534320323014806, |
|
"eval_loss": 0.7151356935501099, |
|
"eval_runtime": 95.185, |
|
"eval_samples_per_second": 9.865, |
|
"eval_steps_per_second": 1.24, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.171377299237326, |
|
"grad_norm": 5.972046375274658, |
|
"learning_rate": 2.86474508437579e-06, |
|
"loss": 2.8707, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.1893225661731717, |
|
"grad_norm": 3.227733850479126, |
|
"learning_rate": 2.67658249108603e-06, |
|
"loss": 2.4486, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.2072678331090176, |
|
"grad_norm": 3.5549120903015137, |
|
"learning_rate": 2.4942045588130504e-06, |
|
"loss": 2.3472, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.225213100044863, |
|
"grad_norm": 4.020270347595215, |
|
"learning_rate": 2.317696896481024e-06, |
|
"loss": 2.6453, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.2431583669807087, |
|
"grad_norm": 4.551384925842285, |
|
"learning_rate": 2.1471423574861643e-06, |
|
"loss": 2.777, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.2611036339165547, |
|
"grad_norm": 5.8825812339782715, |
|
"learning_rate": 1.982621000804979e-06, |
|
"loss": 2.8939, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.2790489008524, |
|
"grad_norm": 3.2557361125946045, |
|
"learning_rate": 1.8242100534143065e-06, |
|
"loss": 2.5914, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.2969941677882457, |
|
"grad_norm": 3.4699723720550537, |
|
"learning_rate": 1.6719838740406313e-06, |
|
"loss": 2.3236, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.3149394347240917, |
|
"grad_norm": 4.2601752281188965, |
|
"learning_rate": 1.5260139182558363e-06, |
|
"loss": 2.6321, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.332884701659937, |
|
"grad_norm": 4.615097522735596, |
|
"learning_rate": 1.3863687049356465e-06, |
|
"loss": 2.8052, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.3508299685957827, |
|
"grad_norm": 5.796968936920166, |
|
"learning_rate": 1.25311378409661e-06, |
|
"loss": 2.9489, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.3687752355316287, |
|
"grad_norm": 3.3097987174987793, |
|
"learning_rate": 1.1263117061266677e-06, |
|
"loss": 2.4068, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.3867205024674742, |
|
"grad_norm": 3.570329427719116, |
|
"learning_rate": 1.006021992423738e-06, |
|
"loss": 2.3704, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.4046657694033198, |
|
"grad_norm": 3.7566256523132324, |
|
"learning_rate": 8.923011074561405e-07, |
|
"loss": 2.5744, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.4226110363391653, |
|
"grad_norm": 4.76233434677124, |
|
"learning_rate": 7.852024322579649e-07, |
|
"loss": 2.684, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.4226110363391653, |
|
"eval_loss": 0.714054524898529, |
|
"eval_runtime": 95.2194, |
|
"eval_samples_per_second": 9.861, |
|
"eval_steps_per_second": 1.239, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.4405563032750113, |
|
"grad_norm": 5.729117393493652, |
|
"learning_rate": 6.847762393717782e-07, |
|
"loss": 2.9529, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.458501570210857, |
|
"grad_norm": 3.2370126247406006, |
|
"learning_rate": 5.910696692505202e-07, |
|
"loss": 2.3974, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.4764468371467023, |
|
"grad_norm": 3.672769546508789, |
|
"learning_rate": 5.041267081295648e-07, |
|
"loss": 2.4916, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.4943921040825483, |
|
"grad_norm": 3.912321090698242, |
|
"learning_rate": 4.239881673794166e-07, |
|
"loss": 2.6173, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.512337371018394, |
|
"grad_norm": 4.756332874298096, |
|
"learning_rate": 3.5069166434870014e-07, |
|
"loss": 2.8032, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.5302826379542394, |
|
"grad_norm": 5.58614444732666, |
|
"learning_rate": 2.8427160470641255e-07, |
|
"loss": 2.931, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.5482279048900853, |
|
"grad_norm": 3.3309953212738037, |
|
"learning_rate": 2.2475916629177418e-07, |
|
"loss": 2.5066, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.566173171825931, |
|
"grad_norm": 3.625159502029419, |
|
"learning_rate": 1.7218228447922869e-07, |
|
"loss": 2.3303, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.5841184387617764, |
|
"grad_norm": 4.057403087615967, |
|
"learning_rate": 1.2656563906545903e-07, |
|
"loss": 2.6747, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.6020637056976224, |
|
"grad_norm": 4.8578877449035645, |
|
"learning_rate": 8.793064268460605e-08, |
|
"loss": 2.7823, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.620008972633468, |
|
"grad_norm": 5.646084308624268, |
|
"learning_rate": 5.629543075708177e-08, |
|
"loss": 2.9719, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.6379542395693134, |
|
"grad_norm": 3.3096609115600586, |
|
"learning_rate": 3.1674852976734116e-08, |
|
"loss": 2.5002, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.6558995065051594, |
|
"grad_norm": 3.604172706604004, |
|
"learning_rate": 1.4080466340349318e-08, |
|
"loss": 2.3697, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.673844773441005, |
|
"grad_norm": 4.091582298278809, |
|
"learning_rate": 3.520529722738086e-09, |
|
"loss": 2.7008, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.6917900403768504, |
|
"grad_norm": 4.5670952796936035, |
|
"learning_rate": 0.0, |
|
"loss": 2.6918, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.6917900403768504, |
|
"eval_loss": 0.7140275239944458, |
|
"eval_runtime": 95.2168, |
|
"eval_samples_per_second": 9.862, |
|
"eval_steps_per_second": 1.239, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 150, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 3, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.78587342489387e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|