|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999775734469612, |
|
"eval_steps": 500, |
|
"global_step": 2229, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008970621215519175, |
|
"grad_norm": 1.2712721824645996, |
|
"learning_rate": 5.970149253731343e-06, |
|
"loss": 3.8614, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01794124243103835, |
|
"grad_norm": 1.3492835760116577, |
|
"learning_rate": 1.1940298507462686e-05, |
|
"loss": 2.8143, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.026911863646557524, |
|
"grad_norm": 0.6147993206977844, |
|
"learning_rate": 1.791044776119403e-05, |
|
"loss": 2.7456, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0358824848620767, |
|
"grad_norm": 0.7868013978004456, |
|
"learning_rate": 1.999821584672887e-05, |
|
"loss": 2.6969, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04485310607759587, |
|
"grad_norm": 0.8422935009002686, |
|
"learning_rate": 1.998850515736159e-05, |
|
"loss": 2.7144, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05382372729311505, |
|
"grad_norm": 0.5046640038490295, |
|
"learning_rate": 1.9970358823117534e-05, |
|
"loss": 2.6939, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06279434850863422, |
|
"grad_norm": 0.6028425097465515, |
|
"learning_rate": 1.994379216921594e-05, |
|
"loss": 2.5943, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0717649697241534, |
|
"grad_norm": 0.6477038264274597, |
|
"learning_rate": 1.990882763213298e-05, |
|
"loss": 2.6143, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08073559093967257, |
|
"grad_norm": 0.3538740873336792, |
|
"learning_rate": 1.986549474065333e-05, |
|
"loss": 2.6754, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08970621215519174, |
|
"grad_norm": 0.3921215534210205, |
|
"learning_rate": 1.98138300909321e-05, |
|
"loss": 2.7099, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09867683337071093, |
|
"grad_norm": 0.39746612310409546, |
|
"learning_rate": 1.9753877315588072e-05, |
|
"loss": 2.4453, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1076474545862301, |
|
"grad_norm": 0.5270581245422363, |
|
"learning_rate": 1.9685687046854415e-05, |
|
"loss": 2.5436, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.11661807580174927, |
|
"grad_norm": 0.4105972349643707, |
|
"learning_rate": 1.9609316873817992e-05, |
|
"loss": 2.6322, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.12558869701726844, |
|
"grad_norm": 0.33983883261680603, |
|
"learning_rate": 1.952483129378333e-05, |
|
"loss": 2.5725, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.13455931823278763, |
|
"grad_norm": 0.4139712154865265, |
|
"learning_rate": 1.9432301657802378e-05, |
|
"loss": 2.6455, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1435299394483068, |
|
"grad_norm": 0.3298761546611786, |
|
"learning_rate": 1.9331806110416027e-05, |
|
"loss": 2.61, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.15250056066382597, |
|
"grad_norm": 0.3583829998970032, |
|
"learning_rate": 1.922342952365829e-05, |
|
"loss": 2.5551, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.16147118187934514, |
|
"grad_norm": 0.3369527757167816, |
|
"learning_rate": 1.9107263425378873e-05, |
|
"loss": 2.6248, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1704418030948643, |
|
"grad_norm": 0.4316694736480713, |
|
"learning_rate": 1.8983405921944686e-05, |
|
"loss": 2.5037, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.17941242431038348, |
|
"grad_norm": 0.3557095527648926, |
|
"learning_rate": 1.8851961615385542e-05, |
|
"loss": 2.6168, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.18838304552590268, |
|
"grad_norm": 0.3330799341201782, |
|
"learning_rate": 1.8713041515054065e-05, |
|
"loss": 2.5813, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.19735366674142185, |
|
"grad_norm": 0.5107260346412659, |
|
"learning_rate": 1.8566762943874376e-05, |
|
"loss": 2.5269, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.20632428795694102, |
|
"grad_norm": 0.30552321672439575, |
|
"learning_rate": 1.8413249439258743e-05, |
|
"loss": 2.5806, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2152949091724602, |
|
"grad_norm": 0.33228904008865356, |
|
"learning_rate": 1.8252630648775874e-05, |
|
"loss": 2.5864, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.22426553038797936, |
|
"grad_norm": 0.3951259255409241, |
|
"learning_rate": 1.8085042220658993e-05, |
|
"loss": 2.5511, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.23323615160349853, |
|
"grad_norm": 0.3492446839809418, |
|
"learning_rate": 1.791062568924609e-05, |
|
"loss": 2.5582, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.24220677281901773, |
|
"grad_norm": 0.3901676833629608, |
|
"learning_rate": 1.7729528355449214e-05, |
|
"loss": 2.5093, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.25117739403453687, |
|
"grad_norm": 0.41344988346099854, |
|
"learning_rate": 1.7541903162353638e-05, |
|
"loss": 2.5634, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.26014801525005604, |
|
"grad_norm": 0.3539174497127533, |
|
"learning_rate": 1.734790856605204e-05, |
|
"loss": 2.5236, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.26911863646557527, |
|
"grad_norm": 0.30354949831962585, |
|
"learning_rate": 1.714770840182273e-05, |
|
"loss": 2.4823, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.27808925768109444, |
|
"grad_norm": 0.3280078172683716, |
|
"learning_rate": 1.6941471745764996e-05, |
|
"loss": 2.5034, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2870598788966136, |
|
"grad_norm": 0.33530184626579285, |
|
"learning_rate": 1.672937277200837e-05, |
|
"loss": 2.5895, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2960305001121328, |
|
"grad_norm": 0.6118967533111572, |
|
"learning_rate": 1.6511590605616423e-05, |
|
"loss": 2.5025, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.30500112132765195, |
|
"grad_norm": 0.45741620659828186, |
|
"learning_rate": 1.628830917130935e-05, |
|
"loss": 2.5659, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3139717425431711, |
|
"grad_norm": 0.3485761284828186, |
|
"learning_rate": 1.6059717038133038e-05, |
|
"loss": 2.6027, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3229423637586903, |
|
"grad_norm": 0.42178016901016235, |
|
"learning_rate": 1.5826007260205868e-05, |
|
"loss": 2.4769, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.33191298497420946, |
|
"grad_norm": 0.4537988305091858, |
|
"learning_rate": 1.5587377213677705e-05, |
|
"loss": 2.4729, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3408836061897286, |
|
"grad_norm": 0.355874627828598, |
|
"learning_rate": 1.5344028430038764e-05, |
|
"loss": 2.4891, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3498542274052478, |
|
"grad_norm": 0.32584452629089355, |
|
"learning_rate": 1.5096166425919176e-05, |
|
"loss": 2.4743, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.35882484862076697, |
|
"grad_norm": 0.32193851470947266, |
|
"learning_rate": 1.4844000529522942e-05, |
|
"loss": 2.4749, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.36779546983628614, |
|
"grad_norm": 0.31153932213783264, |
|
"learning_rate": 1.458774370384287e-05, |
|
"loss": 2.5422, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.37676609105180536, |
|
"grad_norm": 0.3169076144695282, |
|
"learning_rate": 1.4327612366805832e-05, |
|
"loss": 2.4914, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.38573671226732453, |
|
"grad_norm": 0.2868211269378662, |
|
"learning_rate": 1.4063826208500182e-05, |
|
"loss": 2.5825, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3947073334828437, |
|
"grad_norm": 0.3413689136505127, |
|
"learning_rate": 1.3796608005639738e-05, |
|
"loss": 2.5594, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.4036779546983629, |
|
"grad_norm": 0.3216472566127777, |
|
"learning_rate": 1.352618343342098e-05, |
|
"loss": 2.4943, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.41264857591388204, |
|
"grad_norm": 0.3140867054462433, |
|
"learning_rate": 1.3252780874932395e-05, |
|
"loss": 2.4842, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.4216191971294012, |
|
"grad_norm": 0.3276834487915039, |
|
"learning_rate": 1.2976631228276894e-05, |
|
"loss": 2.5065, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.4305898183449204, |
|
"grad_norm": 0.3846268951892853, |
|
"learning_rate": 1.2697967711570243e-05, |
|
"loss": 2.4219, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.3238185942173004, |
|
"learning_rate": 1.2417025665980114e-05, |
|
"loss": 2.4617, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4485310607759587, |
|
"grad_norm": 0.34462079405784607, |
|
"learning_rate": 1.2134042356972175e-05, |
|
"loss": 2.5461, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4575016819914779, |
|
"grad_norm": 0.31439366936683655, |
|
"learning_rate": 1.1849256773931058e-05, |
|
"loss": 2.3627, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.46647230320699706, |
|
"grad_norm": 0.2591687738895416, |
|
"learning_rate": 1.156290942832536e-05, |
|
"loss": 2.4446, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.47544292442251623, |
|
"grad_norm": 0.5144124031066895, |
|
"learning_rate": 1.1275242150587254e-05, |
|
"loss": 2.4891, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.48441354563803546, |
|
"grad_norm": 0.39585819840431213, |
|
"learning_rate": 1.0986497885878145e-05, |
|
"loss": 2.4465, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.49338416685355463, |
|
"grad_norm": 0.4279848039150238, |
|
"learning_rate": 1.0696920488912923e-05, |
|
"loss": 2.4889, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5023547880690737, |
|
"grad_norm": 0.35390934348106384, |
|
"learning_rate": 1.0406754518016047e-05, |
|
"loss": 2.5671, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5113254092845929, |
|
"grad_norm": 0.32814425230026245, |
|
"learning_rate": 1.0116245028583418e-05, |
|
"loss": 2.5396, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5202960305001121, |
|
"grad_norm": 0.2794135808944702, |
|
"learning_rate": 9.825637366124458e-06, |
|
"loss": 2.4186, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5292666517156313, |
|
"grad_norm": 0.3125162124633789, |
|
"learning_rate": 9.535176959059171e-06, |
|
"loss": 2.4878, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.5382372729311505, |
|
"grad_norm": 0.3027559816837311, |
|
"learning_rate": 9.245109111445189e-06, |
|
"loss": 2.4423, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.5472078941466697, |
|
"grad_norm": 0.3461575508117676, |
|
"learning_rate": 8.95567879580984e-06, |
|
"loss": 2.4254, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.5561785153621889, |
|
"grad_norm": 0.4075671136379242, |
|
"learning_rate": 8.667130446262214e-06, |
|
"loss": 2.4678, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.565149136577708, |
|
"grad_norm": 0.36932793259620667, |
|
"learning_rate": 8.379707752059932e-06, |
|
"loss": 2.443, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.5741197577932272, |
|
"grad_norm": 0.3690980076789856, |
|
"learning_rate": 8.093653451804987e-06, |
|
"loss": 2.5446, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5830903790087464, |
|
"grad_norm": 0.33403080701828003, |
|
"learning_rate": 7.809209128442408e-06, |
|
"loss": 2.4948, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5920610002242656, |
|
"grad_norm": 0.3500635325908661, |
|
"learning_rate": 7.52661500523497e-06, |
|
"loss": 2.4717, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6010316214397847, |
|
"grad_norm": 0.3530026376247406, |
|
"learning_rate": 7.246109742886156e-06, |
|
"loss": 2.5245, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.6100022426553039, |
|
"grad_norm": 0.35534512996673584, |
|
"learning_rate": 6.967930237982793e-06, |
|
"loss": 2.407, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.6189728638708231, |
|
"grad_norm": 0.3208062946796417, |
|
"learning_rate": 6.692311422927515e-06, |
|
"loss": 2.4504, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6279434850863422, |
|
"grad_norm": 0.32474184036254883, |
|
"learning_rate": 6.4194860675300695e-06, |
|
"loss": 2.5117, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6369141063018614, |
|
"grad_norm": 0.3804394006729126, |
|
"learning_rate": 6.149684582425013e-06, |
|
"loss": 2.4671, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.6458847275173806, |
|
"grad_norm": 0.35071736574172974, |
|
"learning_rate": 5.883134824481786e-06, |
|
"loss": 2.4593, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.6548553487328997, |
|
"grad_norm": 0.348230242729187, |
|
"learning_rate": 5.620061904371565e-06, |
|
"loss": 2.436, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.6638259699484189, |
|
"grad_norm": 0.42061659693717957, |
|
"learning_rate": 5.360687996453348e-06, |
|
"loss": 2.4705, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6727965911639381, |
|
"grad_norm": 0.3868522644042969, |
|
"learning_rate": 5.105232151139895e-06, |
|
"loss": 2.4973, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6817672123794573, |
|
"grad_norm": 0.37379515171051025, |
|
"learning_rate": 4.853910109901901e-06, |
|
"loss": 2.4178, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6907378335949764, |
|
"grad_norm": 0.38078969717025757, |
|
"learning_rate": 4.606934123066739e-06, |
|
"loss": 2.3759, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.6997084548104956, |
|
"grad_norm": 0.44406720995903015, |
|
"learning_rate": 4.3645127705655654e-06, |
|
"loss": 2.507, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.7086790760260148, |
|
"grad_norm": 0.3889126777648926, |
|
"learning_rate": 4.126850785780199e-06, |
|
"loss": 2.5821, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.7176496972415339, |
|
"grad_norm": 0.362594336271286, |
|
"learning_rate": 3.8941488826385855e-06, |
|
"loss": 2.4599, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7266203184570531, |
|
"grad_norm": 0.3674083948135376, |
|
"learning_rate": 3.6666035861047744e-06, |
|
"loss": 2.4943, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.7355909396725723, |
|
"grad_norm": 0.40499699115753174, |
|
"learning_rate": 3.444407066206692e-06, |
|
"loss": 2.4119, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.7445615608880914, |
|
"grad_norm": 0.3226064443588257, |
|
"learning_rate": 3.2277469757417403e-06, |
|
"loss": 2.4375, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.7535321821036107, |
|
"grad_norm": 0.36566492915153503, |
|
"learning_rate": 3.0168062917974173e-06, |
|
"loss": 2.4084, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.7625028033191299, |
|
"grad_norm": 0.33492156863212585, |
|
"learning_rate": 2.8117631612207084e-06, |
|
"loss": 2.4378, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7714734245346491, |
|
"grad_norm": 0.3422945439815521, |
|
"learning_rate": 2.6127907501667726e-06, |
|
"loss": 2.5039, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.7804440457501682, |
|
"grad_norm": 0.39878708124160767, |
|
"learning_rate": 2.420057097854046e-06, |
|
"loss": 2.4394, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.7894146669656874, |
|
"grad_norm": 0.3516499698162079, |
|
"learning_rate": 2.2337249746491695e-06, |
|
"loss": 2.4093, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.7983852881812066, |
|
"grad_norm": 0.3679056167602539, |
|
"learning_rate": 2.0539517446016975e-06, |
|
"loss": 2.3983, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.8073559093967257, |
|
"grad_norm": 0.3900187313556671, |
|
"learning_rate": 1.880889232544585e-06, |
|
"loss": 2.4602, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8163265306122449, |
|
"grad_norm": 0.34092533588409424, |
|
"learning_rate": 1.714683595872777e-06, |
|
"loss": 2.4214, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.8252971518277641, |
|
"grad_norm": 0.3562910854816437, |
|
"learning_rate": 1.5554752011081332e-06, |
|
"loss": 2.4335, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.8342677730432833, |
|
"grad_norm": 0.39239928126335144, |
|
"learning_rate": 1.4033985053549425e-06, |
|
"loss": 2.3766, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.8432383942588024, |
|
"grad_norm": 0.3768567144870758, |
|
"learning_rate": 1.2585819427461564e-06, |
|
"loss": 2.4227, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.8522090154743216, |
|
"grad_norm": 0.378839910030365, |
|
"learning_rate": 1.121147815976248e-06, |
|
"loss": 2.3695, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.8611796366898408, |
|
"grad_norm": 0.32945871353149414, |
|
"learning_rate": 9.912121930122542e-07, |
|
"loss": 2.3736, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.8701502579053599, |
|
"grad_norm": 0.43066656589508057, |
|
"learning_rate": 8.688848090702928e-07, |
|
"loss": 2.3896, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.3874249756336212, |
|
"learning_rate": 7.542689739403097e-07, |
|
"loss": 2.4423, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.8880915003363983, |
|
"grad_norm": 0.3411208391189575, |
|
"learning_rate": 6.474614847373051e-07, |
|
"loss": 2.4222, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.8970621215519174, |
|
"grad_norm": 0.3745633661746979, |
|
"learning_rate": 5.485525441527651e-07, |
|
"loss": 2.3641, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9060327427674366, |
|
"grad_norm": 0.3834417462348938, |
|
"learning_rate": 4.5762568427529795e-07, |
|
"loss": 2.4481, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.9150033639829558, |
|
"grad_norm": 0.3854876160621643, |
|
"learning_rate": 3.747576960448551e-07, |
|
"loss": 2.4281, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.923973985198475, |
|
"grad_norm": 1.0627397298812866, |
|
"learning_rate": 3.0001856440005307e-07, |
|
"loss": 2.3635, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.9329446064139941, |
|
"grad_norm": 0.38673698902130127, |
|
"learning_rate": 2.3347140917344579e-07, |
|
"loss": 2.5118, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.9419152276295133, |
|
"grad_norm": 0.36718830466270447, |
|
"learning_rate": 1.7517243178458486e-07, |
|
"loss": 2.453, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.9508858488450325, |
|
"grad_norm": 0.5423038601875305, |
|
"learning_rate": 1.2517086777594112e-07, |
|
"loss": 2.4404, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.9598564700605517, |
|
"grad_norm": 0.33170536160469055, |
|
"learning_rate": 8.35089452317639e-08, |
|
"loss": 2.3524, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.9688270912760709, |
|
"grad_norm": 0.44688114523887634, |
|
"learning_rate": 5.022184911495864e-08, |
|
"loss": 2.3764, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.9777977124915901, |
|
"grad_norm": 0.43967434763908386, |
|
"learning_rate": 2.5337691552156372e-08, |
|
"loss": 2.427, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.9867683337071093, |
|
"grad_norm": 0.44690561294555664, |
|
"learning_rate": 8.877488092022823e-09, |
|
"loss": 2.423, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.9957389549226284, |
|
"grad_norm": 0.34528931975364685, |
|
"learning_rate": 8.551399568945684e-10, |
|
"loss": 2.4153, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.999775734469612, |
|
"step": 2229, |
|
"total_flos": 1.4099141814984376e+18, |
|
"train_loss": 2.5103028606758784, |
|
"train_runtime": 5031.0289, |
|
"train_samples_per_second": 28.356, |
|
"train_steps_per_second": 0.443 |
|
} |
|
], |
|
"logging_steps": 20, |
|
"max_steps": 2229, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 5000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4099141814984376e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|