|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.15186490098408456, |
|
"eval_steps": 500, |
|
"global_step": 2500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006074596039363382, |
|
"grad_norm": 2.236969232559204, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2628, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0012149192078726764, |
|
"grad_norm": 1.873207688331604, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6589, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0018223788118090147, |
|
"grad_norm": 1.7144545316696167, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6129, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0024298384157453528, |
|
"grad_norm": 1.6338379383087158, |
|
"learning_rate": 0.0002, |
|
"loss": 1.565, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.003037298019681691, |
|
"grad_norm": 1.733494758605957, |
|
"learning_rate": 0.0002, |
|
"loss": 1.5043, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0036447576236180294, |
|
"grad_norm": 1.6118093729019165, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4686, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.004252217227554368, |
|
"grad_norm": 1.5905429124832153, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4902, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0048596768314907056, |
|
"grad_norm": 1.667108416557312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4311, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.005467136435427044, |
|
"grad_norm": 1.813370943069458, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3872, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.006074596039363382, |
|
"grad_norm": 1.6819970607757568, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4375, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006682055643299721, |
|
"grad_norm": 1.6235817670822144, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4136, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.007289515247236059, |
|
"grad_norm": 1.6433429718017578, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3771, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.007896974851172398, |
|
"grad_norm": 1.6093605756759644, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4332, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.008504434455108735, |
|
"grad_norm": 1.63667893409729, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4148, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.009111894059045073, |
|
"grad_norm": 1.5507344007492065, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3764, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.009719353662981411, |
|
"grad_norm": 1.6159253120422363, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3141, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01032681326691775, |
|
"grad_norm": 1.6511636972427368, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4633, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.010934272870854089, |
|
"grad_norm": 1.8297101259231567, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3747, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.011541732474790426, |
|
"grad_norm": 1.8446108102798462, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3742, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.012149192078726764, |
|
"grad_norm": 1.6419591903686523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.329, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.012756651682663102, |
|
"grad_norm": 1.595116376876831, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3658, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.013364111286599442, |
|
"grad_norm": 1.6085125207901, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3062, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01397157089053578, |
|
"grad_norm": 1.6018210649490356, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3563, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.014579030494472118, |
|
"grad_norm": 1.5757384300231934, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4252, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.015186490098408455, |
|
"grad_norm": 1.6396926641464233, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3044, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.015793949702344795, |
|
"grad_norm": 1.7595162391662598, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3885, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01640140930628113, |
|
"grad_norm": 1.5528141260147095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2728, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01700886891021747, |
|
"grad_norm": 1.536747932434082, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3512, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01761632851415381, |
|
"grad_norm": 1.6435034275054932, |
|
"learning_rate": 0.0002, |
|
"loss": 1.338, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.018223788118090146, |
|
"grad_norm": 1.5331226587295532, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3519, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.018831247722026486, |
|
"grad_norm": 1.6326663494110107, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3089, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.019438707325962822, |
|
"grad_norm": 1.567794919013977, |
|
"learning_rate": 0.0002, |
|
"loss": 1.253, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.020046166929899162, |
|
"grad_norm": 1.5589838027954102, |
|
"learning_rate": 0.0002, |
|
"loss": 1.33, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0206536265338355, |
|
"grad_norm": 1.5770776271820068, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3316, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.021261086137771838, |
|
"grad_norm": 1.4471594095230103, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2864, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.021868545741708177, |
|
"grad_norm": 1.5740830898284912, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2921, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.022476005345644513, |
|
"grad_norm": 1.7635418176651, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2212, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.023083464949580853, |
|
"grad_norm": 1.5588957071304321, |
|
"learning_rate": 0.0002, |
|
"loss": 1.226, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.023690924553517193, |
|
"grad_norm": 1.6329195499420166, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3694, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02429838415745353, |
|
"grad_norm": 1.5749831199645996, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3151, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.024905843761389868, |
|
"grad_norm": 1.776865839958191, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2971, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.025513303365326204, |
|
"grad_norm": 1.4414085149765015, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3434, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.026120762969262544, |
|
"grad_norm": 1.672532081604004, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2647, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.026728222573198884, |
|
"grad_norm": 1.7707642316818237, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2875, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02733568217713522, |
|
"grad_norm": 1.510501503944397, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2962, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02794314178107156, |
|
"grad_norm": 1.5336624383926392, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3686, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.028550601385007895, |
|
"grad_norm": 1.785957932472229, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2887, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.029158060988944235, |
|
"grad_norm": 1.5779095888137817, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3048, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.029765520592880575, |
|
"grad_norm": 1.9468932151794434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2892, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03037298019681691, |
|
"grad_norm": 1.5167356729507446, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3858, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03098043980075325, |
|
"grad_norm": 1.3795329332351685, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3023, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03158789940468959, |
|
"grad_norm": 1.5920099020004272, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3765, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.032195359008625926, |
|
"grad_norm": 1.462296724319458, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2663, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03280281861256226, |
|
"grad_norm": 1.3863338232040405, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2386, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.033410278216498605, |
|
"grad_norm": 1.8175960779190063, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3238, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03401773782043494, |
|
"grad_norm": 1.7048077583312988, |
|
"learning_rate": 0.0002, |
|
"loss": 1.257, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03462519742437128, |
|
"grad_norm": 1.5479084253311157, |
|
"learning_rate": 0.0002, |
|
"loss": 1.23, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03523265702830762, |
|
"grad_norm": 1.515726089477539, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2893, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03584011663224396, |
|
"grad_norm": 1.7023965120315552, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2783, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03644757623618029, |
|
"grad_norm": 1.7180956602096558, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2578, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03705503584011663, |
|
"grad_norm": 1.3848283290863037, |
|
"learning_rate": 0.0002, |
|
"loss": 1.349, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03766249544405297, |
|
"grad_norm": 1.4944247007369995, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2718, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03826995504798931, |
|
"grad_norm": 1.581346869468689, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2798, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.038877414651925644, |
|
"grad_norm": 1.490548849105835, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2878, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03948487425586199, |
|
"grad_norm": 1.4963841438293457, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2777, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.040092333859798324, |
|
"grad_norm": 1.729665994644165, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2781, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.04069979346373466, |
|
"grad_norm": 1.5069007873535156, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2978, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.041307253067671, |
|
"grad_norm": 4.421722412109375, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2765, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.04191471267160734, |
|
"grad_norm": 1.6383577585220337, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3364, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.042522172275543675, |
|
"grad_norm": 1.9025250673294067, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3184, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04312963187948001, |
|
"grad_norm": 1.4485440254211426, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2749, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.043737091483416354, |
|
"grad_norm": 1.3731114864349365, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2254, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04434455108735269, |
|
"grad_norm": 2.1447882652282715, |
|
"learning_rate": 0.0002, |
|
"loss": 1.227, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04495201069128903, |
|
"grad_norm": 1.4844560623168945, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2656, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04555947029522537, |
|
"grad_norm": 1.6620945930480957, |
|
"learning_rate": 0.0002, |
|
"loss": 1.299, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.046166929899161706, |
|
"grad_norm": 1.4194166660308838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2961, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04677438950309804, |
|
"grad_norm": 1.4977781772613525, |
|
"learning_rate": 0.0002, |
|
"loss": 1.324, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.047381849107034385, |
|
"grad_norm": 1.5294965505599976, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2344, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.04798930871097072, |
|
"grad_norm": 1.6185721158981323, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3811, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04859676831490706, |
|
"grad_norm": 1.8349323272705078, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2636, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.0492042279188434, |
|
"grad_norm": 1.4457494020462036, |
|
"learning_rate": 0.0002, |
|
"loss": 1.278, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.049811687522779736, |
|
"grad_norm": 1.3572221994400024, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2306, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.05041914712671607, |
|
"grad_norm": 1.3937549591064453, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3023, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.05102660673065241, |
|
"grad_norm": 1.450095295906067, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2393, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.05163406633458875, |
|
"grad_norm": 1.4043591022491455, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2574, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.05224152593852509, |
|
"grad_norm": 1.4549307823181152, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2938, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.052848985542461424, |
|
"grad_norm": 1.4602893590927124, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1861, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.05345644514639777, |
|
"grad_norm": 1.867261290550232, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3015, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.0540639047503341, |
|
"grad_norm": 1.6120606660842896, |
|
"learning_rate": 0.0002, |
|
"loss": 1.268, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.05467136435427044, |
|
"grad_norm": 1.6458678245544434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2661, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05527882395820678, |
|
"grad_norm": 1.729551076889038, |
|
"learning_rate": 0.0002, |
|
"loss": 1.28, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05588628356214312, |
|
"grad_norm": 1.5503125190734863, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2612, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.056493743166079455, |
|
"grad_norm": 1.5638302564620972, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3259, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.05710120277001579, |
|
"grad_norm": 1.4959323406219482, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2489, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.057708662373952134, |
|
"grad_norm": 1.5417566299438477, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2675, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05831612197788847, |
|
"grad_norm": 1.5263869762420654, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3438, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.058923581581824806, |
|
"grad_norm": 1.4479107856750488, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2868, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05953104118576115, |
|
"grad_norm": 1.678945541381836, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2204, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.060138500789697485, |
|
"grad_norm": 1.431054949760437, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2624, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.06074596039363382, |
|
"grad_norm": 1.6529697179794312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2867, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.061353419997570165, |
|
"grad_norm": 1.4838560819625854, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2556, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.0619608796015065, |
|
"grad_norm": 1.303206443786621, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1895, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.06256833920544284, |
|
"grad_norm": 1.6381781101226807, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2975, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.06317579880937918, |
|
"grad_norm": 1.589048981666565, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2776, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.06378325841331552, |
|
"grad_norm": 1.71570885181427, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2703, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.06439071801725185, |
|
"grad_norm": 1.416674017906189, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2465, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.06499817762118819, |
|
"grad_norm": 1.773748517036438, |
|
"learning_rate": 0.0002, |
|
"loss": 1.339, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.06560563722512452, |
|
"grad_norm": 1.5272996425628662, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2973, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.06621309682906086, |
|
"grad_norm": 1.3926866054534912, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3862, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.06682055643299721, |
|
"grad_norm": 1.576254963874817, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2522, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.06742801603693355, |
|
"grad_norm": 1.6619774103164673, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3027, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.06803547564086988, |
|
"grad_norm": 1.4636040925979614, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3649, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.06864293524480622, |
|
"grad_norm": 1.6307445764541626, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2553, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.06925039484874256, |
|
"grad_norm": 1.531379222869873, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2578, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.06985785445267889, |
|
"grad_norm": 1.5244723558425903, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2745, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.07046531405661524, |
|
"grad_norm": 1.7863515615463257, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2835, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.07107277366055158, |
|
"grad_norm": 1.5718461275100708, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2352, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.07168023326448791, |
|
"grad_norm": 1.5710781812667847, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2357, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.07228769286842425, |
|
"grad_norm": 1.4836217164993286, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2811, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.07289515247236059, |
|
"grad_norm": 1.5769175291061401, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3826, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.07350261207629692, |
|
"grad_norm": 1.552350640296936, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2512, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.07411007168023326, |
|
"grad_norm": 1.4635241031646729, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3651, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.07471753128416961, |
|
"grad_norm": 1.6063421964645386, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3135, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.07532499088810594, |
|
"grad_norm": 1.3370627164840698, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3172, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.07593245049204228, |
|
"grad_norm": 1.691219449043274, |
|
"learning_rate": 0.0002, |
|
"loss": 1.114, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.07653991009597862, |
|
"grad_norm": 1.4442918300628662, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2884, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.07714736969991495, |
|
"grad_norm": 1.3976494073867798, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2585, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.07775482930385129, |
|
"grad_norm": 1.357978105545044, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2449, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.07836228890778764, |
|
"grad_norm": 1.4768654108047485, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1673, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.07896974851172398, |
|
"grad_norm": 1.3699668645858765, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3029, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.07957720811566031, |
|
"grad_norm": 1.494268774986267, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2156, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.08018466771959665, |
|
"grad_norm": 1.4966034889221191, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2387, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.08079212732353298, |
|
"grad_norm": 1.5984331369400024, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2559, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.08139958692746932, |
|
"grad_norm": 1.6565812826156616, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2035, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.08200704653140566, |
|
"grad_norm": 1.3882875442504883, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2266, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.082614506135342, |
|
"grad_norm": 1.4133611917495728, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2908, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.08322196573927834, |
|
"grad_norm": 1.5866031646728516, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2541, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.08382942534321468, |
|
"grad_norm": 1.7129892110824585, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2653, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.08443688494715101, |
|
"grad_norm": 1.821727991104126, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2413, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.08504434455108735, |
|
"grad_norm": 1.5216853618621826, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2449, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.08565180415502369, |
|
"grad_norm": 1.3088600635528564, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1995, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.08625926375896002, |
|
"grad_norm": 1.467633843421936, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2901, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.08686672336289637, |
|
"grad_norm": 1.467429518699646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2593, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.08747418296683271, |
|
"grad_norm": 1.5163699388504028, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3415, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.08808164257076904, |
|
"grad_norm": 1.4762097597122192, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2328, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.08868910217470538, |
|
"grad_norm": 1.3106896877288818, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2625, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.08929656177864172, |
|
"grad_norm": 1.7591036558151245, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2654, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.08990402138257805, |
|
"grad_norm": 1.425759196281433, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2492, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.0905114809865144, |
|
"grad_norm": 1.4507300853729248, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3173, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.09111894059045074, |
|
"grad_norm": 1.564005970954895, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2037, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.09172640019438708, |
|
"grad_norm": 1.4253226518630981, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2143, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.09233385979832341, |
|
"grad_norm": 1.4880207777023315, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2818, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.09294131940225975, |
|
"grad_norm": 1.439846396446228, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2082, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.09354877900619608, |
|
"grad_norm": 1.5116918087005615, |
|
"learning_rate": 0.0002, |
|
"loss": 1.279, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.09415623861013242, |
|
"grad_norm": 1.2679647207260132, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2541, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.09476369821406877, |
|
"grad_norm": 1.4966439008712769, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2426, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.0953711578180051, |
|
"grad_norm": 1.795647144317627, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2582, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.09597861742194144, |
|
"grad_norm": 1.485668659210205, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2277, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.09658607702587778, |
|
"grad_norm": 1.4666286706924438, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1659, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.09719353662981411, |
|
"grad_norm": 1.4714016914367676, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2541, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.09780099623375045, |
|
"grad_norm": 1.351139783859253, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2047, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.0984084558376868, |
|
"grad_norm": 1.4304152727127075, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2028, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.09901591544162314, |
|
"grad_norm": 1.3749319314956665, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2509, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.09962337504555947, |
|
"grad_norm": 1.5823308229446411, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2278, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.10023083464949581, |
|
"grad_norm": 1.3146113157272339, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2092, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.10083829425343215, |
|
"grad_norm": 1.5478814840316772, |
|
"learning_rate": 0.0002, |
|
"loss": 1.284, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.10144575385736848, |
|
"grad_norm": 1.6172051429748535, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2521, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.10205321346130482, |
|
"grad_norm": 1.52104914188385, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2477, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.10266067306524117, |
|
"grad_norm": 1.5709279775619507, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2424, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.1032681326691775, |
|
"grad_norm": 1.4448177814483643, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2296, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.10387559227311384, |
|
"grad_norm": 1.3035105466842651, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2177, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.10448305187705018, |
|
"grad_norm": 1.6021480560302734, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2391, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.10509051148098651, |
|
"grad_norm": 1.5413893461227417, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2627, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.10569797108492285, |
|
"grad_norm": 1.582407832145691, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2344, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.10630543068885918, |
|
"grad_norm": 1.4765949249267578, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2268, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.10691289029279553, |
|
"grad_norm": 1.6818124055862427, |
|
"learning_rate": 0.0002, |
|
"loss": 1.229, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.10752034989673187, |
|
"grad_norm": 1.570087194442749, |
|
"learning_rate": 0.0002, |
|
"loss": 1.322, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.1081278095006682, |
|
"grad_norm": 1.5548206567764282, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1993, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.10873526910460454, |
|
"grad_norm": 1.4159990549087524, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2136, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.10934272870854088, |
|
"grad_norm": 1.4259227514266968, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2826, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.10995018831247722, |
|
"grad_norm": 1.721754550933838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2958, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.11055764791641357, |
|
"grad_norm": 1.5920616388320923, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2152, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1111651075203499, |
|
"grad_norm": 1.4243106842041016, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2273, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.11177256712428624, |
|
"grad_norm": 1.5096100568771362, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2232, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.11238002672822257, |
|
"grad_norm": 1.573041558265686, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2303, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.11298748633215891, |
|
"grad_norm": 1.546109676361084, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2366, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.11359494593609525, |
|
"grad_norm": 1.5966582298278809, |
|
"learning_rate": 0.0002, |
|
"loss": 1.174, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.11420240554003158, |
|
"grad_norm": 1.5600007772445679, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1639, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.11480986514396793, |
|
"grad_norm": 1.7233026027679443, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2161, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.11541732474790427, |
|
"grad_norm": 1.5901647806167603, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2153, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.1160247843518406, |
|
"grad_norm": 1.376562237739563, |
|
"learning_rate": 0.0002, |
|
"loss": 1.266, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.11663224395577694, |
|
"grad_norm": 1.656231164932251, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2664, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.11723970355971328, |
|
"grad_norm": 1.356429100036621, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2244, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.11784716316364961, |
|
"grad_norm": 1.5584162473678589, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2545, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.11845462276758595, |
|
"grad_norm": 1.5809051990509033, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2847, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1190620823715223, |
|
"grad_norm": 1.6206623315811157, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1984, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.11966954197545863, |
|
"grad_norm": 1.5814268589019775, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2822, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.12027700157939497, |
|
"grad_norm": 1.6698721647262573, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2771, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.12088446118333131, |
|
"grad_norm": 1.7568098306655884, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2153, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.12149192078726764, |
|
"grad_norm": 1.5525120496749878, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1899, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.12209938039120398, |
|
"grad_norm": 1.451067328453064, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2241, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.12270683999514033, |
|
"grad_norm": 1.6643682718276978, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2726, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.12331429959907667, |
|
"grad_norm": 1.7143374681472778, |
|
"learning_rate": 0.0002, |
|
"loss": 1.277, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.123921759203013, |
|
"grad_norm": 1.7562227249145508, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2066, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.12452921880694934, |
|
"grad_norm": 1.5043004751205444, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1832, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1251366784108857, |
|
"grad_norm": 1.7817741632461548, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1517, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.125744138014822, |
|
"grad_norm": 1.7085005044937134, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2548, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.12635159761875836, |
|
"grad_norm": 1.3320608139038086, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1689, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.12695905722269468, |
|
"grad_norm": 1.2270746231079102, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3156, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.12756651682663103, |
|
"grad_norm": 1.4981920719146729, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2243, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.12817397643056735, |
|
"grad_norm": 1.4143863916397095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2765, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.1287814360345037, |
|
"grad_norm": 1.4701218605041504, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1679, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.12938889563844005, |
|
"grad_norm": 1.3249510526657104, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2313, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.12999635524237638, |
|
"grad_norm": 1.3991800546646118, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2078, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.13060381484631273, |
|
"grad_norm": 1.5967034101486206, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1447, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.13121127445024905, |
|
"grad_norm": 1.5604697465896606, |
|
"learning_rate": 0.0002, |
|
"loss": 1.247, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.1318187340541854, |
|
"grad_norm": 1.3492066860198975, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2734, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.13242619365812172, |
|
"grad_norm": 1.6497987508773804, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2067, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.13303365326205807, |
|
"grad_norm": 1.4936901330947876, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2062, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.13364111286599442, |
|
"grad_norm": 1.4576996564865112, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2225, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.13424857246993074, |
|
"grad_norm": 1.3753255605697632, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2431, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.1348560320738671, |
|
"grad_norm": 1.563539743423462, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2793, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.13546349167780342, |
|
"grad_norm": 1.4935153722763062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2361, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.13607095128173977, |
|
"grad_norm": 1.307816505432129, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2314, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.13667841088567612, |
|
"grad_norm": 1.535267949104309, |
|
"learning_rate": 0.0002, |
|
"loss": 1.224, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.13728587048961244, |
|
"grad_norm": 1.3963550329208374, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2803, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.1378933300935488, |
|
"grad_norm": 1.527891755104065, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1724, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.1385007896974851, |
|
"grad_norm": 1.4339513778686523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2773, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.13910824930142146, |
|
"grad_norm": 1.9729173183441162, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2676, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.13971570890535778, |
|
"grad_norm": 1.4557331800460815, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1851, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.14032316850929413, |
|
"grad_norm": 1.4285987615585327, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2804, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.14093062811323048, |
|
"grad_norm": 1.2934836149215698, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2107, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.1415380877171668, |
|
"grad_norm": 1.3619877099990845, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1618, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.14214554732110316, |
|
"grad_norm": 1.4161388874053955, |
|
"learning_rate": 0.0002, |
|
"loss": 1.228, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.14275300692503948, |
|
"grad_norm": 1.3923609256744385, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2526, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.14336046652897583, |
|
"grad_norm": 1.3667365312576294, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2691, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.14396792613291215, |
|
"grad_norm": 1.420930027961731, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2337, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.1445753857368485, |
|
"grad_norm": 1.5273494720458984, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2606, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.14518284534078485, |
|
"grad_norm": 1.5331289768218994, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1693, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.14579030494472117, |
|
"grad_norm": 1.8542994260787964, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2367, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.14639776454865752, |
|
"grad_norm": 1.3973726034164429, |
|
"learning_rate": 0.0002, |
|
"loss": 1.27, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.14700522415259384, |
|
"grad_norm": 1.5454223155975342, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1725, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.1476126837565302, |
|
"grad_norm": 1.3396209478378296, |
|
"learning_rate": 0.0002, |
|
"loss": 1.243, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.14822014336046652, |
|
"grad_norm": 1.4007655382156372, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2339, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.14882760296440287, |
|
"grad_norm": 1.459521770477295, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2409, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.14943506256833922, |
|
"grad_norm": 1.4330353736877441, |
|
"learning_rate": 0.0002, |
|
"loss": 1.265, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.15004252217227554, |
|
"grad_norm": 1.3534977436065674, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1958, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.1506499817762119, |
|
"grad_norm": 1.6496937274932861, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2301, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.1512574413801482, |
|
"grad_norm": 1.6388399600982666, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2952, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.15186490098408456, |
|
"grad_norm": 1.3567882776260376, |
|
"learning_rate": 0.0002, |
|
"loss": 1.202, |
|
"step": 2500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 49386, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3843779431581532e+18, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|