|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8808164257076905, |
|
"eval_steps": 500, |
|
"global_step": 14500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006074596039363382, |
|
"grad_norm": 2.236969232559204, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2628, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0012149192078726764, |
|
"grad_norm": 1.873207688331604, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6589, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0018223788118090147, |
|
"grad_norm": 1.7144545316696167, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6129, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0024298384157453528, |
|
"grad_norm": 1.6338379383087158, |
|
"learning_rate": 0.0002, |
|
"loss": 1.565, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.003037298019681691, |
|
"grad_norm": 1.733494758605957, |
|
"learning_rate": 0.0002, |
|
"loss": 1.5043, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0036447576236180294, |
|
"grad_norm": 1.6118093729019165, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4686, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.004252217227554368, |
|
"grad_norm": 1.5905429124832153, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4902, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0048596768314907056, |
|
"grad_norm": 1.667108416557312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4311, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.005467136435427044, |
|
"grad_norm": 1.813370943069458, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3872, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.006074596039363382, |
|
"grad_norm": 1.6819970607757568, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4375, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006682055643299721, |
|
"grad_norm": 1.6235817670822144, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4136, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.007289515247236059, |
|
"grad_norm": 1.6433429718017578, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3771, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.007896974851172398, |
|
"grad_norm": 1.6093605756759644, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4332, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.008504434455108735, |
|
"grad_norm": 1.63667893409729, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4148, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.009111894059045073, |
|
"grad_norm": 1.5507344007492065, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3764, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.009719353662981411, |
|
"grad_norm": 1.6159253120422363, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3141, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01032681326691775, |
|
"grad_norm": 1.6511636972427368, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4633, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.010934272870854089, |
|
"grad_norm": 1.8297101259231567, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3747, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.011541732474790426, |
|
"grad_norm": 1.8446108102798462, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3742, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.012149192078726764, |
|
"grad_norm": 1.6419591903686523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.329, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.012756651682663102, |
|
"grad_norm": 1.595116376876831, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3658, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.013364111286599442, |
|
"grad_norm": 1.6085125207901, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3062, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01397157089053578, |
|
"grad_norm": 1.6018210649490356, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3563, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.014579030494472118, |
|
"grad_norm": 1.5757384300231934, |
|
"learning_rate": 0.0002, |
|
"loss": 1.4252, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.015186490098408455, |
|
"grad_norm": 1.6396926641464233, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3044, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.015793949702344795, |
|
"grad_norm": 1.7595162391662598, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3885, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01640140930628113, |
|
"grad_norm": 1.5528141260147095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2728, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01700886891021747, |
|
"grad_norm": 1.536747932434082, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3512, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01761632851415381, |
|
"grad_norm": 1.6435034275054932, |
|
"learning_rate": 0.0002, |
|
"loss": 1.338, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.018223788118090146, |
|
"grad_norm": 1.5331226587295532, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3519, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.018831247722026486, |
|
"grad_norm": 1.6326663494110107, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3089, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.019438707325962822, |
|
"grad_norm": 1.567794919013977, |
|
"learning_rate": 0.0002, |
|
"loss": 1.253, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.020046166929899162, |
|
"grad_norm": 1.5589838027954102, |
|
"learning_rate": 0.0002, |
|
"loss": 1.33, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0206536265338355, |
|
"grad_norm": 1.5770776271820068, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3316, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.021261086137771838, |
|
"grad_norm": 1.4471594095230103, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2864, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.021868545741708177, |
|
"grad_norm": 1.5740830898284912, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2921, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.022476005345644513, |
|
"grad_norm": 1.7635418176651, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2212, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.023083464949580853, |
|
"grad_norm": 1.5588957071304321, |
|
"learning_rate": 0.0002, |
|
"loss": 1.226, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.023690924553517193, |
|
"grad_norm": 1.6329195499420166, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3694, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02429838415745353, |
|
"grad_norm": 1.5749831199645996, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3151, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.024905843761389868, |
|
"grad_norm": 1.776865839958191, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2971, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.025513303365326204, |
|
"grad_norm": 1.4414085149765015, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3434, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.026120762969262544, |
|
"grad_norm": 1.672532081604004, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2647, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.026728222573198884, |
|
"grad_norm": 1.7707642316818237, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2875, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02733568217713522, |
|
"grad_norm": 1.510501503944397, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2962, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02794314178107156, |
|
"grad_norm": 1.5336624383926392, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3686, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.028550601385007895, |
|
"grad_norm": 1.785957932472229, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2887, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.029158060988944235, |
|
"grad_norm": 1.5779095888137817, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3048, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.029765520592880575, |
|
"grad_norm": 1.9468932151794434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2892, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03037298019681691, |
|
"grad_norm": 1.5167356729507446, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3858, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03098043980075325, |
|
"grad_norm": 1.3795329332351685, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3023, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03158789940468959, |
|
"grad_norm": 1.5920099020004272, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3765, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.032195359008625926, |
|
"grad_norm": 1.462296724319458, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2663, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03280281861256226, |
|
"grad_norm": 1.3863338232040405, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2386, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.033410278216498605, |
|
"grad_norm": 1.8175960779190063, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3238, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03401773782043494, |
|
"grad_norm": 1.7048077583312988, |
|
"learning_rate": 0.0002, |
|
"loss": 1.257, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03462519742437128, |
|
"grad_norm": 1.5479084253311157, |
|
"learning_rate": 0.0002, |
|
"loss": 1.23, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03523265702830762, |
|
"grad_norm": 1.515726089477539, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2893, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03584011663224396, |
|
"grad_norm": 1.7023965120315552, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2783, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03644757623618029, |
|
"grad_norm": 1.7180956602096558, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2578, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03705503584011663, |
|
"grad_norm": 1.3848283290863037, |
|
"learning_rate": 0.0002, |
|
"loss": 1.349, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03766249544405297, |
|
"grad_norm": 1.4944247007369995, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2718, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03826995504798931, |
|
"grad_norm": 1.581346869468689, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2798, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.038877414651925644, |
|
"grad_norm": 1.490548849105835, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2878, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03948487425586199, |
|
"grad_norm": 1.4963841438293457, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2777, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.040092333859798324, |
|
"grad_norm": 1.729665994644165, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2781, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.04069979346373466, |
|
"grad_norm": 1.5069007873535156, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2978, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.041307253067671, |
|
"grad_norm": 4.421722412109375, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2765, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.04191471267160734, |
|
"grad_norm": 1.6383577585220337, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3364, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.042522172275543675, |
|
"grad_norm": 1.9025250673294067, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3184, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04312963187948001, |
|
"grad_norm": 1.4485440254211426, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2749, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.043737091483416354, |
|
"grad_norm": 1.3731114864349365, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2254, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04434455108735269, |
|
"grad_norm": 2.1447882652282715, |
|
"learning_rate": 0.0002, |
|
"loss": 1.227, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04495201069128903, |
|
"grad_norm": 1.4844560623168945, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2656, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04555947029522537, |
|
"grad_norm": 1.6620945930480957, |
|
"learning_rate": 0.0002, |
|
"loss": 1.299, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.046166929899161706, |
|
"grad_norm": 1.4194166660308838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2961, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04677438950309804, |
|
"grad_norm": 1.4977781772613525, |
|
"learning_rate": 0.0002, |
|
"loss": 1.324, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.047381849107034385, |
|
"grad_norm": 1.5294965505599976, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2344, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.04798930871097072, |
|
"grad_norm": 1.6185721158981323, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3811, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04859676831490706, |
|
"grad_norm": 1.8349323272705078, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2636, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.0492042279188434, |
|
"grad_norm": 1.4457494020462036, |
|
"learning_rate": 0.0002, |
|
"loss": 1.278, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.049811687522779736, |
|
"grad_norm": 1.3572221994400024, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2306, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.05041914712671607, |
|
"grad_norm": 1.3937549591064453, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3023, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.05102660673065241, |
|
"grad_norm": 1.450095295906067, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2393, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.05163406633458875, |
|
"grad_norm": 1.4043591022491455, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2574, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.05224152593852509, |
|
"grad_norm": 1.4549307823181152, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2938, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.052848985542461424, |
|
"grad_norm": 1.4602893590927124, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1861, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.05345644514639777, |
|
"grad_norm": 1.867261290550232, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3015, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.0540639047503341, |
|
"grad_norm": 1.6120606660842896, |
|
"learning_rate": 0.0002, |
|
"loss": 1.268, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.05467136435427044, |
|
"grad_norm": 1.6458678245544434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2661, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05527882395820678, |
|
"grad_norm": 1.729551076889038, |
|
"learning_rate": 0.0002, |
|
"loss": 1.28, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05588628356214312, |
|
"grad_norm": 1.5503125190734863, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2612, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.056493743166079455, |
|
"grad_norm": 1.5638302564620972, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3259, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.05710120277001579, |
|
"grad_norm": 1.4959323406219482, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2489, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.057708662373952134, |
|
"grad_norm": 1.5417566299438477, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2675, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05831612197788847, |
|
"grad_norm": 1.5263869762420654, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3438, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.058923581581824806, |
|
"grad_norm": 1.4479107856750488, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2868, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05953104118576115, |
|
"grad_norm": 1.678945541381836, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2204, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.060138500789697485, |
|
"grad_norm": 1.431054949760437, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2624, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.06074596039363382, |
|
"grad_norm": 1.6529697179794312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2867, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.061353419997570165, |
|
"grad_norm": 1.4838560819625854, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2556, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.0619608796015065, |
|
"grad_norm": 1.303206443786621, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1895, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.06256833920544284, |
|
"grad_norm": 1.6381781101226807, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2975, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.06317579880937918, |
|
"grad_norm": 1.589048981666565, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2776, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.06378325841331552, |
|
"grad_norm": 1.71570885181427, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2703, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.06439071801725185, |
|
"grad_norm": 1.416674017906189, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2465, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.06499817762118819, |
|
"grad_norm": 1.773748517036438, |
|
"learning_rate": 0.0002, |
|
"loss": 1.339, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.06560563722512452, |
|
"grad_norm": 1.5272996425628662, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2973, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.06621309682906086, |
|
"grad_norm": 1.3926866054534912, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3862, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.06682055643299721, |
|
"grad_norm": 1.576254963874817, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2522, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.06742801603693355, |
|
"grad_norm": 1.6619774103164673, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3027, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.06803547564086988, |
|
"grad_norm": 1.4636040925979614, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3649, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.06864293524480622, |
|
"grad_norm": 1.6307445764541626, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2553, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.06925039484874256, |
|
"grad_norm": 1.531379222869873, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2578, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.06985785445267889, |
|
"grad_norm": 1.5244723558425903, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2745, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.07046531405661524, |
|
"grad_norm": 1.7863515615463257, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2835, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.07107277366055158, |
|
"grad_norm": 1.5718461275100708, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2352, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.07168023326448791, |
|
"grad_norm": 1.5710781812667847, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2357, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.07228769286842425, |
|
"grad_norm": 1.4836217164993286, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2811, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.07289515247236059, |
|
"grad_norm": 1.5769175291061401, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3826, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.07350261207629692, |
|
"grad_norm": 1.552350640296936, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2512, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.07411007168023326, |
|
"grad_norm": 1.4635241031646729, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3651, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.07471753128416961, |
|
"grad_norm": 1.6063421964645386, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3135, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.07532499088810594, |
|
"grad_norm": 1.3370627164840698, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3172, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.07593245049204228, |
|
"grad_norm": 1.691219449043274, |
|
"learning_rate": 0.0002, |
|
"loss": 1.114, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.07653991009597862, |
|
"grad_norm": 1.4442918300628662, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2884, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.07714736969991495, |
|
"grad_norm": 1.3976494073867798, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2585, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.07775482930385129, |
|
"grad_norm": 1.357978105545044, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2449, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.07836228890778764, |
|
"grad_norm": 1.4768654108047485, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1673, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.07896974851172398, |
|
"grad_norm": 1.3699668645858765, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3029, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.07957720811566031, |
|
"grad_norm": 1.494268774986267, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2156, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.08018466771959665, |
|
"grad_norm": 1.4966034889221191, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2387, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.08079212732353298, |
|
"grad_norm": 1.5984331369400024, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2559, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.08139958692746932, |
|
"grad_norm": 1.6565812826156616, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2035, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.08200704653140566, |
|
"grad_norm": 1.3882875442504883, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2266, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.082614506135342, |
|
"grad_norm": 1.4133611917495728, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2908, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.08322196573927834, |
|
"grad_norm": 1.5866031646728516, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2541, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.08382942534321468, |
|
"grad_norm": 1.7129892110824585, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2653, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.08443688494715101, |
|
"grad_norm": 1.821727991104126, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2413, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.08504434455108735, |
|
"grad_norm": 1.5216853618621826, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2449, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.08565180415502369, |
|
"grad_norm": 1.3088600635528564, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1995, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.08625926375896002, |
|
"grad_norm": 1.467633843421936, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2901, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.08686672336289637, |
|
"grad_norm": 1.467429518699646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2593, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.08747418296683271, |
|
"grad_norm": 1.5163699388504028, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3415, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.08808164257076904, |
|
"grad_norm": 1.4762097597122192, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2328, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.08868910217470538, |
|
"grad_norm": 1.3106896877288818, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2625, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.08929656177864172, |
|
"grad_norm": 1.7591036558151245, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2654, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.08990402138257805, |
|
"grad_norm": 1.425759196281433, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2492, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.0905114809865144, |
|
"grad_norm": 1.4507300853729248, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3173, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.09111894059045074, |
|
"grad_norm": 1.564005970954895, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2037, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.09172640019438708, |
|
"grad_norm": 1.4253226518630981, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2143, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.09233385979832341, |
|
"grad_norm": 1.4880207777023315, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2818, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.09294131940225975, |
|
"grad_norm": 1.439846396446228, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2082, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.09354877900619608, |
|
"grad_norm": 1.5116918087005615, |
|
"learning_rate": 0.0002, |
|
"loss": 1.279, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.09415623861013242, |
|
"grad_norm": 1.2679647207260132, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2541, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.09476369821406877, |
|
"grad_norm": 1.4966439008712769, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2426, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.0953711578180051, |
|
"grad_norm": 1.795647144317627, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2582, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.09597861742194144, |
|
"grad_norm": 1.485668659210205, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2277, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.09658607702587778, |
|
"grad_norm": 1.4666286706924438, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1659, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.09719353662981411, |
|
"grad_norm": 1.4714016914367676, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2541, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.09780099623375045, |
|
"grad_norm": 1.351139783859253, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2047, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.0984084558376868, |
|
"grad_norm": 1.4304152727127075, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2028, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.09901591544162314, |
|
"grad_norm": 1.3749319314956665, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2509, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.09962337504555947, |
|
"grad_norm": 1.5823308229446411, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2278, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.10023083464949581, |
|
"grad_norm": 1.3146113157272339, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2092, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.10083829425343215, |
|
"grad_norm": 1.5478814840316772, |
|
"learning_rate": 0.0002, |
|
"loss": 1.284, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.10144575385736848, |
|
"grad_norm": 1.6172051429748535, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2521, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.10205321346130482, |
|
"grad_norm": 1.52104914188385, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2477, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.10266067306524117, |
|
"grad_norm": 1.5709279775619507, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2424, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.1032681326691775, |
|
"grad_norm": 1.4448177814483643, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2296, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.10387559227311384, |
|
"grad_norm": 1.3035105466842651, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2177, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.10448305187705018, |
|
"grad_norm": 1.6021480560302734, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2391, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.10509051148098651, |
|
"grad_norm": 1.5413893461227417, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2627, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.10569797108492285, |
|
"grad_norm": 1.582407832145691, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2344, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.10630543068885918, |
|
"grad_norm": 1.4765949249267578, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2268, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.10691289029279553, |
|
"grad_norm": 1.6818124055862427, |
|
"learning_rate": 0.0002, |
|
"loss": 1.229, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.10752034989673187, |
|
"grad_norm": 1.570087194442749, |
|
"learning_rate": 0.0002, |
|
"loss": 1.322, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.1081278095006682, |
|
"grad_norm": 1.5548206567764282, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1993, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.10873526910460454, |
|
"grad_norm": 1.4159990549087524, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2136, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.10934272870854088, |
|
"grad_norm": 1.4259227514266968, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2826, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.10995018831247722, |
|
"grad_norm": 1.721754550933838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2958, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.11055764791641357, |
|
"grad_norm": 1.5920616388320923, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2152, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1111651075203499, |
|
"grad_norm": 1.4243106842041016, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2273, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.11177256712428624, |
|
"grad_norm": 1.5096100568771362, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2232, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.11238002672822257, |
|
"grad_norm": 1.573041558265686, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2303, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.11298748633215891, |
|
"grad_norm": 1.546109676361084, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2366, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.11359494593609525, |
|
"grad_norm": 1.5966582298278809, |
|
"learning_rate": 0.0002, |
|
"loss": 1.174, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.11420240554003158, |
|
"grad_norm": 1.5600007772445679, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1639, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.11480986514396793, |
|
"grad_norm": 1.7233026027679443, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2161, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.11541732474790427, |
|
"grad_norm": 1.5901647806167603, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2153, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.1160247843518406, |
|
"grad_norm": 1.376562237739563, |
|
"learning_rate": 0.0002, |
|
"loss": 1.266, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.11663224395577694, |
|
"grad_norm": 1.656231164932251, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2664, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.11723970355971328, |
|
"grad_norm": 1.356429100036621, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2244, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.11784716316364961, |
|
"grad_norm": 1.5584162473678589, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2545, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.11845462276758595, |
|
"grad_norm": 1.5809051990509033, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2847, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1190620823715223, |
|
"grad_norm": 1.6206623315811157, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1984, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.11966954197545863, |
|
"grad_norm": 1.5814268589019775, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2822, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.12027700157939497, |
|
"grad_norm": 1.6698721647262573, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2771, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.12088446118333131, |
|
"grad_norm": 1.7568098306655884, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2153, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.12149192078726764, |
|
"grad_norm": 1.5525120496749878, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1899, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.12209938039120398, |
|
"grad_norm": 1.451067328453064, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2241, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.12270683999514033, |
|
"grad_norm": 1.6643682718276978, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2726, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.12331429959907667, |
|
"grad_norm": 1.7143374681472778, |
|
"learning_rate": 0.0002, |
|
"loss": 1.277, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.123921759203013, |
|
"grad_norm": 1.7562227249145508, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2066, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.12452921880694934, |
|
"grad_norm": 1.5043004751205444, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1832, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1251366784108857, |
|
"grad_norm": 1.7817741632461548, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1517, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.125744138014822, |
|
"grad_norm": 1.7085005044937134, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2548, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.12635159761875836, |
|
"grad_norm": 1.3320608139038086, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1689, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.12695905722269468, |
|
"grad_norm": 1.2270746231079102, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3156, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.12756651682663103, |
|
"grad_norm": 1.4981920719146729, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2243, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.12817397643056735, |
|
"grad_norm": 1.4143863916397095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2765, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.1287814360345037, |
|
"grad_norm": 1.4701218605041504, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1679, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.12938889563844005, |
|
"grad_norm": 1.3249510526657104, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2313, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.12999635524237638, |
|
"grad_norm": 1.3991800546646118, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2078, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.13060381484631273, |
|
"grad_norm": 1.5967034101486206, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1447, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.13121127445024905, |
|
"grad_norm": 1.5604697465896606, |
|
"learning_rate": 0.0002, |
|
"loss": 1.247, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.1318187340541854, |
|
"grad_norm": 1.3492066860198975, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2734, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.13242619365812172, |
|
"grad_norm": 1.6497987508773804, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2067, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.13303365326205807, |
|
"grad_norm": 1.4936901330947876, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2062, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.13364111286599442, |
|
"grad_norm": 1.4576996564865112, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2225, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.13424857246993074, |
|
"grad_norm": 1.3753255605697632, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2431, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.1348560320738671, |
|
"grad_norm": 1.563539743423462, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2793, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.13546349167780342, |
|
"grad_norm": 1.4935153722763062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2361, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.13607095128173977, |
|
"grad_norm": 1.307816505432129, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2314, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.13667841088567612, |
|
"grad_norm": 1.535267949104309, |
|
"learning_rate": 0.0002, |
|
"loss": 1.224, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.13728587048961244, |
|
"grad_norm": 1.3963550329208374, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2803, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.1378933300935488, |
|
"grad_norm": 1.527891755104065, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1724, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.1385007896974851, |
|
"grad_norm": 1.4339513778686523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2773, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.13910824930142146, |
|
"grad_norm": 1.9729173183441162, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2676, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.13971570890535778, |
|
"grad_norm": 1.4557331800460815, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1851, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.14032316850929413, |
|
"grad_norm": 1.4285987615585327, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2804, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.14093062811323048, |
|
"grad_norm": 1.2934836149215698, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2107, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.1415380877171668, |
|
"grad_norm": 1.3619877099990845, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1618, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.14214554732110316, |
|
"grad_norm": 1.4161388874053955, |
|
"learning_rate": 0.0002, |
|
"loss": 1.228, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.14275300692503948, |
|
"grad_norm": 1.3923609256744385, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2526, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.14336046652897583, |
|
"grad_norm": 1.3667365312576294, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2691, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.14396792613291215, |
|
"grad_norm": 1.420930027961731, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2337, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.1445753857368485, |
|
"grad_norm": 1.5273494720458984, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2606, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.14518284534078485, |
|
"grad_norm": 1.5331289768218994, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1693, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.14579030494472117, |
|
"grad_norm": 1.8542994260787964, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2367, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.14639776454865752, |
|
"grad_norm": 1.3973726034164429, |
|
"learning_rate": 0.0002, |
|
"loss": 1.27, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.14700522415259384, |
|
"grad_norm": 1.5454223155975342, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1725, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.1476126837565302, |
|
"grad_norm": 1.3396209478378296, |
|
"learning_rate": 0.0002, |
|
"loss": 1.243, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.14822014336046652, |
|
"grad_norm": 1.4007655382156372, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2339, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.14882760296440287, |
|
"grad_norm": 1.459521770477295, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2409, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.14943506256833922, |
|
"grad_norm": 1.4330353736877441, |
|
"learning_rate": 0.0002, |
|
"loss": 1.265, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.15004252217227554, |
|
"grad_norm": 1.3534977436065674, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1958, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.1506499817762119, |
|
"grad_norm": 1.6496937274932861, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2301, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.1512574413801482, |
|
"grad_norm": 1.6388399600982666, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2952, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.15186490098408456, |
|
"grad_norm": 1.3567882776260376, |
|
"learning_rate": 0.0002, |
|
"loss": 1.202, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.15247236058802088, |
|
"grad_norm": 1.297202706336975, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1876, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.15307982019195723, |
|
"grad_norm": 1.912382960319519, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2745, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.15368727979589358, |
|
"grad_norm": 1.6245383024215698, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2009, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.1542947393998299, |
|
"grad_norm": 1.4549682140350342, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1817, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.15490219900376626, |
|
"grad_norm": 1.6212128400802612, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2409, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.15550965860770258, |
|
"grad_norm": 1.49772310256958, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1789, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.15611711821163893, |
|
"grad_norm": 1.5688447952270508, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2388, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.15672457781557528, |
|
"grad_norm": 1.5437800884246826, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2927, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.1573320374195116, |
|
"grad_norm": 1.2287709712982178, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2733, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.15793949702344795, |
|
"grad_norm": 1.7542141675949097, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2561, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.15854695662738427, |
|
"grad_norm": 1.4796082973480225, |
|
"learning_rate": 0.0002, |
|
"loss": 1.224, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.15915441623132062, |
|
"grad_norm": 1.5932079553604126, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2596, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.15976187583525694, |
|
"grad_norm": 1.3710395097732544, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3019, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.1603693354391933, |
|
"grad_norm": 1.393410325050354, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1923, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.16097679504312964, |
|
"grad_norm": 1.4769561290740967, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2147, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.16158425464706597, |
|
"grad_norm": 1.402292251586914, |
|
"learning_rate": 0.0002, |
|
"loss": 1.325, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.16219171425100232, |
|
"grad_norm": 1.546512246131897, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2197, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.16279917385493864, |
|
"grad_norm": 1.724507451057434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2098, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.163406633458875, |
|
"grad_norm": 1.4906692504882812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2176, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.1640140930628113, |
|
"grad_norm": 1.510833501815796, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2124, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.16462155266674766, |
|
"grad_norm": 1.6878187656402588, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1886, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.165229012270684, |
|
"grad_norm": 1.2423148155212402, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2933, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.16583647187462033, |
|
"grad_norm": 1.377028465270996, |
|
"learning_rate": 0.0002, |
|
"loss": 1.196, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.16644393147855668, |
|
"grad_norm": 1.6662096977233887, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2769, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.167051391082493, |
|
"grad_norm": 1.7372788190841675, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1744, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.16765885068642936, |
|
"grad_norm": 1.7153924703598022, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2828, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.16826631029036568, |
|
"grad_norm": 1.474407434463501, |
|
"learning_rate": 0.0002, |
|
"loss": 1.118, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.16887376989430203, |
|
"grad_norm": 1.4870332479476929, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1382, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.16948122949823838, |
|
"grad_norm": 1.3868566751480103, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1986, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.1700886891021747, |
|
"grad_norm": 1.3732783794403076, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2488, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.17069614870611105, |
|
"grad_norm": 1.3792394399642944, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2477, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.17130360831004737, |
|
"grad_norm": 1.7015180587768555, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2269, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.17191106791398372, |
|
"grad_norm": 1.3385919332504272, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2585, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.17251852751792005, |
|
"grad_norm": 1.7190513610839844, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1882, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.1731259871218564, |
|
"grad_norm": 1.4032402038574219, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0767, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.17373344672579275, |
|
"grad_norm": 1.4926598072052002, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2304, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.17434090632972907, |
|
"grad_norm": 1.3792085647583008, |
|
"learning_rate": 0.0002, |
|
"loss": 1.209, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.17494836593366542, |
|
"grad_norm": 1.5118519067764282, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2282, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.17555582553760174, |
|
"grad_norm": 1.3079215288162231, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2303, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.1761632851415381, |
|
"grad_norm": 1.4393481016159058, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2233, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.17677074474547444, |
|
"grad_norm": 1.6815558671951294, |
|
"learning_rate": 0.0002, |
|
"loss": 1.247, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.17737820434941076, |
|
"grad_norm": 1.487924337387085, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2744, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.1779856639533471, |
|
"grad_norm": 1.4145201444625854, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1545, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.17859312355728343, |
|
"grad_norm": 1.705073595046997, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2912, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.17920058316121978, |
|
"grad_norm": 1.6041674613952637, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2128, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.1798080427651561, |
|
"grad_norm": 1.3653987646102905, |
|
"learning_rate": 0.0002, |
|
"loss": 1.259, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.18041550236909246, |
|
"grad_norm": 1.7545150518417358, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2867, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.1810229619730288, |
|
"grad_norm": 1.3169398307800293, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2008, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.18163042157696513, |
|
"grad_norm": 1.5852036476135254, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1975, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.18223788118090148, |
|
"grad_norm": 1.4632734060287476, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3247, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.1828453407848378, |
|
"grad_norm": 1.486884593963623, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2443, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.18345280038877415, |
|
"grad_norm": 1.5297883749008179, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2453, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.18406025999271047, |
|
"grad_norm": 1.488099455833435, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2177, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.18466771959664682, |
|
"grad_norm": 1.335958480834961, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2499, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.18527517920058317, |
|
"grad_norm": 1.5462721586227417, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2287, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.1858826388045195, |
|
"grad_norm": 1.4703840017318726, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2306, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.18649009840845585, |
|
"grad_norm": 1.4151811599731445, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2268, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.18709755801239217, |
|
"grad_norm": 1.5566822290420532, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2039, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.18770501761632852, |
|
"grad_norm": 1.4005810022354126, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1963, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.18831247722026484, |
|
"grad_norm": 1.4320882558822632, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2648, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.1889199368242012, |
|
"grad_norm": 1.3715740442276, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2178, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.18952739642813754, |
|
"grad_norm": 1.670812964439392, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1718, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.19013485603207386, |
|
"grad_norm": 1.5320864915847778, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1748, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.1907423156360102, |
|
"grad_norm": 1.5839077234268188, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2678, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.19134977523994653, |
|
"grad_norm": 1.5966272354125977, |
|
"learning_rate": 0.0002, |
|
"loss": 1.24, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.19195723484388288, |
|
"grad_norm": 1.4464362859725952, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2149, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.1925646944478192, |
|
"grad_norm": 1.392284631729126, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2857, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.19317215405175556, |
|
"grad_norm": 1.4531803131103516, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1909, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.1937796136556919, |
|
"grad_norm": 1.431835412979126, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2081, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.19438707325962823, |
|
"grad_norm": 1.4906381368637085, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2071, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.19499453286356458, |
|
"grad_norm": 1.5458217859268188, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2037, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.1956019924675009, |
|
"grad_norm": 1.4542298316955566, |
|
"learning_rate": 0.0002, |
|
"loss": 1.238, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.19620945207143725, |
|
"grad_norm": 1.9179754257202148, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2077, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.1968169116753736, |
|
"grad_norm": 1.3963453769683838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2329, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.19742437127930992, |
|
"grad_norm": 1.565443754196167, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2155, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.19803183088324627, |
|
"grad_norm": 1.4760220050811768, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2112, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.1986392904871826, |
|
"grad_norm": 1.324994444847107, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2491, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.19924675009111895, |
|
"grad_norm": 1.2665252685546875, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2572, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.19985420969505527, |
|
"grad_norm": 1.7519789934158325, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2012, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.20046166929899162, |
|
"grad_norm": 1.5129839181900024, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2935, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.20106912890292797, |
|
"grad_norm": 1.7140570878982544, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1867, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.2016765885068643, |
|
"grad_norm": 1.5850943326950073, |
|
"learning_rate": 0.0002, |
|
"loss": 1.214, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.20228404811080064, |
|
"grad_norm": 1.4112968444824219, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1865, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.20289150771473696, |
|
"grad_norm": 1.5837844610214233, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2948, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.2034989673186733, |
|
"grad_norm": 1.4747986793518066, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2132, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.20410642692260964, |
|
"grad_norm": 1.5337995290756226, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2326, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.20471388652654599, |
|
"grad_norm": 1.5909236669540405, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1964, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.20532134613048234, |
|
"grad_norm": 1.4380537271499634, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1912, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.20592880573441866, |
|
"grad_norm": 1.317665934562683, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2559, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.206536265338355, |
|
"grad_norm": 1.4082776308059692, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2575, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.20714372494229133, |
|
"grad_norm": 1.5903863906860352, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2513, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.20775118454622768, |
|
"grad_norm": 1.2638167142868042, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1693, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.208358644150164, |
|
"grad_norm": 1.507632851600647, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2523, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.20896610375410035, |
|
"grad_norm": 1.4946129322052002, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1957, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.2095735633580367, |
|
"grad_norm": 1.4943633079528809, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2256, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.21018102296197302, |
|
"grad_norm": 1.8334934711456299, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2509, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.21078848256590937, |
|
"grad_norm": 1.2962932586669922, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1625, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.2113959421698457, |
|
"grad_norm": 1.3973207473754883, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2567, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.21200340177378205, |
|
"grad_norm": 1.420222520828247, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2536, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.21261086137771837, |
|
"grad_norm": 1.6628416776657104, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3032, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.21321832098165472, |
|
"grad_norm": 1.5589535236358643, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2311, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.21382578058559107, |
|
"grad_norm": 1.5037486553192139, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1963, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.2144332401895274, |
|
"grad_norm": 1.2449471950531006, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1782, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.21504069979346374, |
|
"grad_norm": 1.5314222574234009, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1905, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.21564815939740006, |
|
"grad_norm": 1.4641014337539673, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2713, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.2162556190013364, |
|
"grad_norm": 1.4204001426696777, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1789, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.21686307860527274, |
|
"grad_norm": 1.7577894926071167, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1719, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.21747053820920909, |
|
"grad_norm": 1.3610202074050903, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2508, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.21807799781314544, |
|
"grad_norm": 1.416024923324585, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2606, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.21868545741708176, |
|
"grad_norm": 1.3644566535949707, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2205, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.2192929170210181, |
|
"grad_norm": 1.3592606782913208, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2563, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.21990037662495443, |
|
"grad_norm": 1.3086717128753662, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2059, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.22050783622889078, |
|
"grad_norm": 1.4019612073898315, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1962, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.22111529583282713, |
|
"grad_norm": 1.3697004318237305, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2538, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.22172275543676345, |
|
"grad_norm": 1.4234853982925415, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1667, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.2223302150406998, |
|
"grad_norm": 1.3091281652450562, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2034, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.22293767464463612, |
|
"grad_norm": 1.3341374397277832, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2284, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.22354513424857247, |
|
"grad_norm": 1.5380843877792358, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2105, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.2241525938525088, |
|
"grad_norm": 1.5012211799621582, |
|
"learning_rate": 0.0002, |
|
"loss": 1.213, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.22476005345644515, |
|
"grad_norm": 1.344851016998291, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1632, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.2253675130603815, |
|
"grad_norm": 1.4722944498062134, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2099, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.22597497266431782, |
|
"grad_norm": 1.44810950756073, |
|
"learning_rate": 0.0002, |
|
"loss": 1.234, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.22658243226825417, |
|
"grad_norm": 1.4535653591156006, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2002, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.2271898918721905, |
|
"grad_norm": 1.623395562171936, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2354, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.22779735147612684, |
|
"grad_norm": 1.8397951126098633, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2425, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.22840481108006316, |
|
"grad_norm": 1.3599379062652588, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2344, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.2290122706839995, |
|
"grad_norm": 1.6713752746582031, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2333, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.22961973028793586, |
|
"grad_norm": 1.913011074066162, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2359, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.23022718989187219, |
|
"grad_norm": 1.4186650514602661, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2487, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.23083464949580854, |
|
"grad_norm": 1.673385500907898, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2817, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.23144210909974486, |
|
"grad_norm": 1.3863779306411743, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1805, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.2320495687036812, |
|
"grad_norm": 1.4821914434432983, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2159, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.23265702830761753, |
|
"grad_norm": 1.27168869972229, |
|
"learning_rate": 0.0002, |
|
"loss": 1.236, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.23326448791155388, |
|
"grad_norm": 1.464900255203247, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2573, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.23387194751549023, |
|
"grad_norm": 1.5601277351379395, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1867, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.23447940711942655, |
|
"grad_norm": 1.4300495386123657, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2781, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.2350868667233629, |
|
"grad_norm": 1.6343556642532349, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1652, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.23569432632729923, |
|
"grad_norm": 1.391050934791565, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2798, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.23630178593123558, |
|
"grad_norm": 1.9890904426574707, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2878, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.2369092455351719, |
|
"grad_norm": 1.3645399808883667, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2392, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.23751670513910825, |
|
"grad_norm": 1.3967126607894897, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2884, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.2381241647430446, |
|
"grad_norm": 1.3073115348815918, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2548, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.23873162434698092, |
|
"grad_norm": 1.488996148109436, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1664, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.23933908395091727, |
|
"grad_norm": 1.3627840280532837, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1944, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2399465435548536, |
|
"grad_norm": 1.4558093547821045, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1846, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.24055400315878994, |
|
"grad_norm": 1.4579048156738281, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2228, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.2411614627627263, |
|
"grad_norm": 1.248146891593933, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2166, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.24176892236666261, |
|
"grad_norm": 1.358766794204712, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1992, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.24237638197059896, |
|
"grad_norm": 1.4376319646835327, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2631, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.2429838415745353, |
|
"grad_norm": 1.5409590005874634, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2061, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.24359130117847164, |
|
"grad_norm": 1.5955286026000977, |
|
"learning_rate": 0.0002, |
|
"loss": 1.263, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.24419876078240796, |
|
"grad_norm": 1.618519902229309, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2471, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.2448062203863443, |
|
"grad_norm": 1.4988939762115479, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2367, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.24541367999028066, |
|
"grad_norm": 1.4525686502456665, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2131, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.24602113959421698, |
|
"grad_norm": 1.5270054340362549, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2156, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.24662859919815333, |
|
"grad_norm": 1.3893275260925293, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1854, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.24723605880208965, |
|
"grad_norm": 1.5147534608840942, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1289, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.247843518406026, |
|
"grad_norm": 1.242431879043579, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2152, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.24845097800996233, |
|
"grad_norm": 1.593482255935669, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2299, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.24905843761389868, |
|
"grad_norm": 1.4139094352722168, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2517, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.24966589721783503, |
|
"grad_norm": 1.5035349130630493, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1643, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.2502733568217714, |
|
"grad_norm": 1.4908943176269531, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2632, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.2508808164257077, |
|
"grad_norm": 1.524407982826233, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2487, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.251488276029644, |
|
"grad_norm": 1.6802865266799927, |
|
"learning_rate": 0.0002, |
|
"loss": 1.324, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.25209573563358034, |
|
"grad_norm": 1.4685097932815552, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2453, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.2527031952375167, |
|
"grad_norm": 1.4716459512710571, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2216, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.25331065484145304, |
|
"grad_norm": 1.6466096639633179, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2248, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.25391811444538936, |
|
"grad_norm": 1.4192179441452026, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1572, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.25452557404932574, |
|
"grad_norm": 1.2112548351287842, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1588, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.25513303365326206, |
|
"grad_norm": 1.4236078262329102, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2202, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.2557404932571984, |
|
"grad_norm": 1.4484710693359375, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2102, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.2563479528611347, |
|
"grad_norm": 1.541420340538025, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2311, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.2569554124650711, |
|
"grad_norm": 1.3980011940002441, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2202, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.2575628720690074, |
|
"grad_norm": 1.7746334075927734, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2233, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.25817033167294373, |
|
"grad_norm": 1.5459847450256348, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1839, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.2587777912768801, |
|
"grad_norm": 1.3682315349578857, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1987, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.25938525088081643, |
|
"grad_norm": 1.296205759048462, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1579, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.25999271048475275, |
|
"grad_norm": 1.3495019674301147, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2146, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.2606001700886891, |
|
"grad_norm": 1.4230709075927734, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2008, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.26120762969262545, |
|
"grad_norm": 1.2394111156463623, |
|
"learning_rate": 0.0002, |
|
"loss": 1.119, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.2618150892965618, |
|
"grad_norm": 1.381503939628601, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2142, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.2624225489004981, |
|
"grad_norm": 1.391729712486267, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1233, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.2630300085044345, |
|
"grad_norm": 1.4309793710708618, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2424, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.2636374681083708, |
|
"grad_norm": 1.658195972442627, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2035, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.2642449277123071, |
|
"grad_norm": 1.5374689102172852, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2021, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.26485238731624344, |
|
"grad_norm": 1.6735734939575195, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1996, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.2654598469201798, |
|
"grad_norm": 1.449633002281189, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1782, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.26606730652411614, |
|
"grad_norm": 1.3424811363220215, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1324, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.26667476612805247, |
|
"grad_norm": 1.6032854318618774, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2865, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.26728222573198884, |
|
"grad_norm": 1.5517349243164062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1295, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.26788968533592517, |
|
"grad_norm": 1.4976743459701538, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2562, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.2684971449398615, |
|
"grad_norm": 1.352860450744629, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2158, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.26910460454379787, |
|
"grad_norm": 1.5025726556777954, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1519, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.2697120641477342, |
|
"grad_norm": 1.517626404762268, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1947, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.2703195237516705, |
|
"grad_norm": 1.6162688732147217, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2132, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.27092698335560683, |
|
"grad_norm": 1.6288328170776367, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2375, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.2715344429595432, |
|
"grad_norm": 1.4691635370254517, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2204, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.27214190256347953, |
|
"grad_norm": 1.4654793739318848, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2788, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.27274936216741585, |
|
"grad_norm": 1.381338119506836, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1982, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.27335682177135223, |
|
"grad_norm": 1.3719398975372314, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1435, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.27396428137528855, |
|
"grad_norm": 1.4959717988967896, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1868, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.2745717409792249, |
|
"grad_norm": 1.6549965143203735, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2443, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.2751792005831612, |
|
"grad_norm": 1.391357183456421, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2828, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.2757866601870976, |
|
"grad_norm": 1.360917329788208, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1248, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.2763941197910339, |
|
"grad_norm": 1.5073282718658447, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2416, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.2770015793949702, |
|
"grad_norm": 1.4248418807983398, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2142, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.2776090389989066, |
|
"grad_norm": 1.4884262084960938, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2224, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.2782164986028429, |
|
"grad_norm": 1.4659768342971802, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1661, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.27882395820677924, |
|
"grad_norm": 1.578492283821106, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2731, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.27943141781071557, |
|
"grad_norm": 1.4244043827056885, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1922, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.28003887741465194, |
|
"grad_norm": 1.5399266481399536, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2627, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.28064633701858827, |
|
"grad_norm": 1.497768759727478, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2225, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.2812537966225246, |
|
"grad_norm": 1.5408015251159668, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2791, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.28186125622646097, |
|
"grad_norm": 1.5482003688812256, |
|
"learning_rate": 0.0002, |
|
"loss": 1.199, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.2824687158303973, |
|
"grad_norm": 1.5323197841644287, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3071, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.2830761754343336, |
|
"grad_norm": 1.851555585861206, |
|
"learning_rate": 0.0002, |
|
"loss": 1.191, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.28368363503826993, |
|
"grad_norm": 1.4027585983276367, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1856, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.2842910946422063, |
|
"grad_norm": 1.3661413192749023, |
|
"learning_rate": 0.0002, |
|
"loss": 1.193, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.28489855424614263, |
|
"grad_norm": 1.6890923976898193, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2026, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.28550601385007895, |
|
"grad_norm": 1.867311954498291, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2241, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.28611347345401533, |
|
"grad_norm": 1.5613819360733032, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1743, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.28672093305795165, |
|
"grad_norm": 1.5607540607452393, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2392, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.287328392661888, |
|
"grad_norm": 1.5710781812667847, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2192, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.2879358522658243, |
|
"grad_norm": 1.3640292882919312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.145, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.2885433118697607, |
|
"grad_norm": 1.4605162143707275, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1938, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.289150771473697, |
|
"grad_norm": 1.3732244968414307, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1221, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.2897582310776333, |
|
"grad_norm": 1.642148494720459, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2009, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.2903656906815697, |
|
"grad_norm": 1.5201040506362915, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1517, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.290973150285506, |
|
"grad_norm": 1.6133527755737305, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2239, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.29158060988944234, |
|
"grad_norm": 1.2963751554489136, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2808, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.29218806949337867, |
|
"grad_norm": 1.3992732763290405, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1852, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.29279552909731504, |
|
"grad_norm": 1.5011121034622192, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2235, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.29340298870125137, |
|
"grad_norm": 1.5714153051376343, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2199, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.2940104483051877, |
|
"grad_norm": 1.4812499284744263, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1979, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.29461790790912407, |
|
"grad_norm": 1.7770074605941772, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2795, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.2952253675130604, |
|
"grad_norm": 1.3813797235488892, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2694, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.2958328271169967, |
|
"grad_norm": 1.5236833095550537, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2333, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.29644028672093303, |
|
"grad_norm": 1.365063190460205, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2719, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.2970477463248694, |
|
"grad_norm": 1.417883276939392, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1927, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.29765520592880573, |
|
"grad_norm": 1.625367283821106, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1459, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.29826266553274206, |
|
"grad_norm": 1.5433696508407593, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1908, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.29887012513667843, |
|
"grad_norm": 1.6701841354370117, |
|
"learning_rate": 0.0002, |
|
"loss": 1.31, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.29947758474061476, |
|
"grad_norm": 1.650138258934021, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2311, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.3000850443445511, |
|
"grad_norm": 1.2897210121154785, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2156, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.3006925039484874, |
|
"grad_norm": 1.449493169784546, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2398, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.3012999635524238, |
|
"grad_norm": 1.3318798542022705, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2725, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.3019074231563601, |
|
"grad_norm": 1.4735842943191528, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1351, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.3025148827602964, |
|
"grad_norm": 1.459076166152954, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2191, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.3031223423642328, |
|
"grad_norm": 1.2924869060516357, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2068, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.3037298019681691, |
|
"grad_norm": 1.3615398406982422, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2255, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.30433726157210544, |
|
"grad_norm": 1.3613234758377075, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1881, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.30494472117604177, |
|
"grad_norm": 1.3898018598556519, |
|
"learning_rate": 0.0002, |
|
"loss": 1.19, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.30555218077997814, |
|
"grad_norm": 1.5199147462844849, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3389, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.30615964038391447, |
|
"grad_norm": 1.2513540983200073, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2697, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.3067670999878508, |
|
"grad_norm": 1.4729671478271484, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2243, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.30737455959178717, |
|
"grad_norm": 1.490967869758606, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2126, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.3079820191957235, |
|
"grad_norm": 1.77041757106781, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2285, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.3085894787996598, |
|
"grad_norm": 1.528355360031128, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2168, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.30919693840359613, |
|
"grad_norm": 1.2505336999893188, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2107, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.3098043980075325, |
|
"grad_norm": 1.5367778539657593, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3089, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.31041185761146883, |
|
"grad_norm": 1.4673290252685547, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2421, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.31101931721540516, |
|
"grad_norm": 1.20176100730896, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2087, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.31162677681934153, |
|
"grad_norm": 1.36702299118042, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2473, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.31223423642327786, |
|
"grad_norm": 1.311002254486084, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2086, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.3128416960272142, |
|
"grad_norm": 1.5590543746948242, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2918, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.31344915563115056, |
|
"grad_norm": 1.7573869228363037, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2847, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.3140566152350869, |
|
"grad_norm": 1.5680657625198364, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2183, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.3146640748390232, |
|
"grad_norm": 1.5301003456115723, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2745, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.3152715344429595, |
|
"grad_norm": 1.5374345779418945, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2165, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.3158789940468959, |
|
"grad_norm": 1.933231234550476, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2294, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.3164864536508322, |
|
"grad_norm": 1.652951717376709, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1566, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.31709391325476854, |
|
"grad_norm": 1.4390983581542969, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1653, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.3177013728587049, |
|
"grad_norm": 1.4972299337387085, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3016, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.31830883246264124, |
|
"grad_norm": 1.9257149696350098, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2749, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.31891629206657757, |
|
"grad_norm": 1.342387318611145, |
|
"learning_rate": 0.0002, |
|
"loss": 1.21, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.3195237516705139, |
|
"grad_norm": 1.5092623233795166, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1745, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.32013121127445027, |
|
"grad_norm": 1.6232455968856812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2411, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.3207386708783866, |
|
"grad_norm": 1.6391104459762573, |
|
"learning_rate": 0.0002, |
|
"loss": 1.25, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.3213461304823229, |
|
"grad_norm": 1.4240717887878418, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1992, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.3219535900862593, |
|
"grad_norm": 1.3717237710952759, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1977, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.3225610496901956, |
|
"grad_norm": 1.588884711265564, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2742, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.32316850929413193, |
|
"grad_norm": 1.9746007919311523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2226, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.32377596889806826, |
|
"grad_norm": 1.44120192527771, |
|
"learning_rate": 0.0002, |
|
"loss": 1.306, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.32438342850200463, |
|
"grad_norm": 1.399114727973938, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1719, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.32499088810594096, |
|
"grad_norm": 1.334587574005127, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2752, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.3255983477098773, |
|
"grad_norm": 1.7185956239700317, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2632, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.32620580731381366, |
|
"grad_norm": 1.2144469022750854, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2716, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.32681326691775, |
|
"grad_norm": 2.094770908355713, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2631, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.3274207265216863, |
|
"grad_norm": 1.2292505502700806, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2156, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.3280281861256226, |
|
"grad_norm": 1.3732002973556519, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2069, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.328635645729559, |
|
"grad_norm": 1.239058494567871, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2366, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.3292431053334953, |
|
"grad_norm": 1.3541849851608276, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2094, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.32985056493743165, |
|
"grad_norm": 1.4627240896224976, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1718, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.330458024541368, |
|
"grad_norm": 1.4230258464813232, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2226, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.33106548414530435, |
|
"grad_norm": 1.561559796333313, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1937, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.33167294374924067, |
|
"grad_norm": 1.7308835983276367, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2314, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.332280403353177, |
|
"grad_norm": 1.6552231311798096, |
|
"learning_rate": 0.0002, |
|
"loss": 1.174, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.33288786295711337, |
|
"grad_norm": 1.50615656375885, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2601, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.3334953225610497, |
|
"grad_norm": 1.470361590385437, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2416, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.334102782164986, |
|
"grad_norm": 1.4663327932357788, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1818, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.3347102417689224, |
|
"grad_norm": 1.7949563264846802, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2176, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.3353177013728587, |
|
"grad_norm": 1.6819008588790894, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2298, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.33592516097679503, |
|
"grad_norm": 1.433971881866455, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2566, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.33653262058073136, |
|
"grad_norm": 1.287117838859558, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1138, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.33714008018466773, |
|
"grad_norm": 1.4071489572525024, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2651, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.33774753978860406, |
|
"grad_norm": 1.5105435848236084, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2409, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.3383549993925404, |
|
"grad_norm": 1.683268427848816, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1694, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.33896245899647676, |
|
"grad_norm": 1.4265189170837402, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2229, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.3395699186004131, |
|
"grad_norm": 1.526524543762207, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2259, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.3401773782043494, |
|
"grad_norm": 1.2876828908920288, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2253, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.3407848378082857, |
|
"grad_norm": 1.3172236680984497, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2494, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.3413922974122221, |
|
"grad_norm": 1.8559517860412598, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1937, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.3419997570161584, |
|
"grad_norm": 1.3766885995864868, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1814, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.34260721662009475, |
|
"grad_norm": 1.3915786743164062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2039, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.3432146762240311, |
|
"grad_norm": 1.6250295639038086, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1728, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.34382213582796745, |
|
"grad_norm": 1.2863385677337646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1466, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.34442959543190377, |
|
"grad_norm": 1.5022464990615845, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1795, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.3450370550358401, |
|
"grad_norm": 1.4029043912887573, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3014, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.34564451463977647, |
|
"grad_norm": 1.629833698272705, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2463, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.3462519742437128, |
|
"grad_norm": 1.4233486652374268, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2448, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.3468594338476491, |
|
"grad_norm": 1.4671995639801025, |
|
"learning_rate": 0.0002, |
|
"loss": 1.255, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.3474668934515855, |
|
"grad_norm": 1.5535781383514404, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1663, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.3480743530555218, |
|
"grad_norm": 1.6772747039794922, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1794, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.34868181265945813, |
|
"grad_norm": 1.595218539237976, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1627, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.34928927226339446, |
|
"grad_norm": 1.3816550970077515, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2398, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.34989673186733083, |
|
"grad_norm": 1.3850511312484741, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3081, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.35050419147126716, |
|
"grad_norm": 1.6395469903945923, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2571, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.3511116510752035, |
|
"grad_norm": 1.5687739849090576, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2466, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.35171911067913986, |
|
"grad_norm": 1.371972918510437, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2079, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.3523265702830762, |
|
"grad_norm": 1.355375051498413, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1537, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.3529340298870125, |
|
"grad_norm": 1.5896039009094238, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1209, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.3535414894909489, |
|
"grad_norm": 1.5453897714614868, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2603, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.3541489490948852, |
|
"grad_norm": 1.5207583904266357, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1796, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.3547564086988215, |
|
"grad_norm": 2.068467855453491, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3053, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.35536386830275785, |
|
"grad_norm": 1.3215420246124268, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1777, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.3559713279066942, |
|
"grad_norm": 1.4459247589111328, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1743, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.35657878751063055, |
|
"grad_norm": 1.3044105768203735, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1718, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.35718624711456687, |
|
"grad_norm": 1.6035641431808472, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1955, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.35779370671850325, |
|
"grad_norm": 1.5534878969192505, |
|
"learning_rate": 0.0002, |
|
"loss": 1.22, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.35840116632243957, |
|
"grad_norm": 1.379349708557129, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2128, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.3590086259263759, |
|
"grad_norm": 1.5389310121536255, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1994, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.3596160855303122, |
|
"grad_norm": 1.3085976839065552, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2147, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.3602235451342486, |
|
"grad_norm": 1.714221477508545, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2167, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.3608310047381849, |
|
"grad_norm": 1.374284267425537, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2322, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.36143846434212124, |
|
"grad_norm": 1.2360552549362183, |
|
"learning_rate": 0.0002, |
|
"loss": 1.17, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.3620459239460576, |
|
"grad_norm": 1.454513669013977, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1626, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.36265338354999394, |
|
"grad_norm": 1.5351463556289673, |
|
"learning_rate": 0.0002, |
|
"loss": 1.261, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.36326084315393026, |
|
"grad_norm": 1.5355708599090576, |
|
"learning_rate": 0.0002, |
|
"loss": 1.205, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.3638683027578666, |
|
"grad_norm": 1.7666850090026855, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1729, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.36447576236180296, |
|
"grad_norm": 1.2283000946044922, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2067, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.3650832219657393, |
|
"grad_norm": 1.4924534559249878, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2551, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.3656906815696756, |
|
"grad_norm": 1.5576777458190918, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2186, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.366298141173612, |
|
"grad_norm": 1.5818428993225098, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1631, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.3669056007775483, |
|
"grad_norm": 1.3564088344573975, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2092, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.3675130603814846, |
|
"grad_norm": 1.619210958480835, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2246, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.36812051998542095, |
|
"grad_norm": 1.304306149482727, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1744, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.3687279795893573, |
|
"grad_norm": 1.3461626768112183, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2008, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.36933543919329365, |
|
"grad_norm": 1.7437419891357422, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2251, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.36994289879722997, |
|
"grad_norm": 1.3706979751586914, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1056, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.37055035840116635, |
|
"grad_norm": 1.5688164234161377, |
|
"learning_rate": 0.0002, |
|
"loss": 1.195, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.37115781800510267, |
|
"grad_norm": 1.3877938985824585, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1887, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.371765277609039, |
|
"grad_norm": 1.4119068384170532, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1873, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.3723727372129753, |
|
"grad_norm": 1.506616234779358, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1423, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.3729801968169117, |
|
"grad_norm": 1.3528050184249878, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1749, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.373587656420848, |
|
"grad_norm": 1.3234665393829346, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1619, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.37419511602478434, |
|
"grad_norm": 1.3826556205749512, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2515, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.3748025756287207, |
|
"grad_norm": 1.4598699808120728, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2635, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.37541003523265704, |
|
"grad_norm": 1.496317982673645, |
|
"learning_rate": 0.0002, |
|
"loss": 1.22, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.37601749483659336, |
|
"grad_norm": 1.560288667678833, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2147, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.3766249544405297, |
|
"grad_norm": 1.411517858505249, |
|
"learning_rate": 0.0002, |
|
"loss": 1.127, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.37723241404446606, |
|
"grad_norm": 1.7069580554962158, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1665, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.3778398736484024, |
|
"grad_norm": 1.5629972219467163, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1582, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.3784473332523387, |
|
"grad_norm": 1.464280128479004, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2632, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.3790547928562751, |
|
"grad_norm": 1.2921830415725708, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0899, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.3796622524602114, |
|
"grad_norm": 1.8416919708251953, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2606, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.3802697120641477, |
|
"grad_norm": 1.4949452877044678, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2811, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.38087717166808405, |
|
"grad_norm": 1.291973352432251, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1879, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.3814846312720204, |
|
"grad_norm": 1.2897945642471313, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1769, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.38209209087595675, |
|
"grad_norm": 1.3945434093475342, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1723, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.38269955047989307, |
|
"grad_norm": 1.7047033309936523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2142, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.38330701008382945, |
|
"grad_norm": 1.5957000255584717, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2007, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.38391446968776577, |
|
"grad_norm": 1.7057619094848633, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2522, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.3845219292917021, |
|
"grad_norm": 1.510622501373291, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2798, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.3851293888956384, |
|
"grad_norm": 1.5083156824111938, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1573, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.3857368484995748, |
|
"grad_norm": 1.5803555250167847, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1618, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.3863443081035111, |
|
"grad_norm": 1.6500353813171387, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2375, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.38695176770744744, |
|
"grad_norm": 1.4603452682495117, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2284, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.3875592273113838, |
|
"grad_norm": 1.45824134349823, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2223, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.38816668691532014, |
|
"grad_norm": 1.4440996646881104, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2065, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.38877414651925646, |
|
"grad_norm": 1.4885029792785645, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2125, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.3893816061231928, |
|
"grad_norm": 1.4131464958190918, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2895, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.38998906572712916, |
|
"grad_norm": 1.508143663406372, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2626, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.3905965253310655, |
|
"grad_norm": 1.6027228832244873, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2433, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.3912039849350018, |
|
"grad_norm": 1.2766356468200684, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1961, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.3918114445389382, |
|
"grad_norm": 1.2684210538864136, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1421, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.3924189041428745, |
|
"grad_norm": 1.5941156148910522, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1218, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.3930263637468108, |
|
"grad_norm": 1.452562689781189, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1033, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.3936338233507472, |
|
"grad_norm": 1.5934187173843384, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1658, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.3942412829546835, |
|
"grad_norm": 1.5304055213928223, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2962, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.39484874255861985, |
|
"grad_norm": 1.5306092500686646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.277, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.39545620216255617, |
|
"grad_norm": 1.4074877500534058, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2062, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.39606366176649255, |
|
"grad_norm": 1.5896267890930176, |
|
"learning_rate": 0.0002, |
|
"loss": 1.151, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.39667112137042887, |
|
"grad_norm": 1.3772437572479248, |
|
"learning_rate": 0.0002, |
|
"loss": 1.152, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.3972785809743652, |
|
"grad_norm": 1.3611092567443848, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2902, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.39788604057830157, |
|
"grad_norm": 1.278731107711792, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2716, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.3984935001822379, |
|
"grad_norm": 1.9447767734527588, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2449, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.3991009597861742, |
|
"grad_norm": 1.4802900552749634, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1776, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.39970841939011054, |
|
"grad_norm": 1.6410369873046875, |
|
"learning_rate": 0.0002, |
|
"loss": 1.255, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.4003158789940469, |
|
"grad_norm": 1.5125056505203247, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1849, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.40092333859798324, |
|
"grad_norm": 1.67632257938385, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2153, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.40153079820191956, |
|
"grad_norm": 1.3930867910385132, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1671, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.40213825780585594, |
|
"grad_norm": 1.489687204360962, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1823, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.40274571740979226, |
|
"grad_norm": 1.4691733121871948, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1653, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.4033531770137286, |
|
"grad_norm": 1.4824140071868896, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1729, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.4039606366176649, |
|
"grad_norm": 1.3477215766906738, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2202, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.4045680962216013, |
|
"grad_norm": 1.489844560623169, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1715, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.4051755558255376, |
|
"grad_norm": 1.4528323411941528, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2474, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.4057830154294739, |
|
"grad_norm": 1.4013316631317139, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1461, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.4063904750334103, |
|
"grad_norm": 1.2306127548217773, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3095, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.4069979346373466, |
|
"grad_norm": 1.4853254556655884, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2001, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.40760539424128295, |
|
"grad_norm": 1.5329333543777466, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2045, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.40821285384521927, |
|
"grad_norm": 1.3074980974197388, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1843, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.40882031344915565, |
|
"grad_norm": 1.3135238885879517, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1981, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.40942777305309197, |
|
"grad_norm": 1.5675169229507446, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2781, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.4100352326570283, |
|
"grad_norm": 1.4353513717651367, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1986, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.41064269226096467, |
|
"grad_norm": 1.6439242362976074, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1873, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.411250151864901, |
|
"grad_norm": 1.4382151365280151, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2623, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.4118576114688373, |
|
"grad_norm": 1.8752695322036743, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2171, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.41246507107277364, |
|
"grad_norm": 1.2991536855697632, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1994, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.41307253067671, |
|
"grad_norm": 1.4035308361053467, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1887, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.41367999028064634, |
|
"grad_norm": 1.479036569595337, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2312, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.41428744988458266, |
|
"grad_norm": 1.43748140335083, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1587, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.41489490948851904, |
|
"grad_norm": 1.4034409523010254, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1746, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.41550236909245536, |
|
"grad_norm": 1.6533730030059814, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1636, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.4161098286963917, |
|
"grad_norm": 1.5264071226119995, |
|
"learning_rate": 0.0002, |
|
"loss": 1.213, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.416717288300328, |
|
"grad_norm": 1.3889992237091064, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1773, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.4173247479042644, |
|
"grad_norm": 1.534353494644165, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1476, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.4179322075082007, |
|
"grad_norm": 1.8379203081130981, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2032, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.418539667112137, |
|
"grad_norm": 1.385796070098877, |
|
"learning_rate": 0.0002, |
|
"loss": 1.162, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.4191471267160734, |
|
"grad_norm": 1.5238981246948242, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2319, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.4197545863200097, |
|
"grad_norm": 1.5223041772842407, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1873, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.42036204592394605, |
|
"grad_norm": 1.7702144384384155, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2035, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.42096950552788237, |
|
"grad_norm": 1.6968584060668945, |
|
"learning_rate": 0.0002, |
|
"loss": 1.175, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.42157696513181875, |
|
"grad_norm": 1.3365684747695923, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2121, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.42218442473575507, |
|
"grad_norm": 1.2800488471984863, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2039, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.4227918843396914, |
|
"grad_norm": 1.4685325622558594, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2429, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.42339934394362777, |
|
"grad_norm": 1.3646459579467773, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1854, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.4240068035475641, |
|
"grad_norm": 1.4553136825561523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2194, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.4246142631515004, |
|
"grad_norm": 1.6624222993850708, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2701, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.42522172275543674, |
|
"grad_norm": 1.4376602172851562, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1946, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.4258291823593731, |
|
"grad_norm": 1.4615241289138794, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2371, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.42643664196330944, |
|
"grad_norm": 1.3993204832077026, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2215, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.42704410156724576, |
|
"grad_norm": 1.3323475122451782, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1923, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.42765156117118214, |
|
"grad_norm": 1.5600262880325317, |
|
"learning_rate": 0.0002, |
|
"loss": 1.211, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.42825902077511846, |
|
"grad_norm": 1.491089105606079, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2394, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.4288664803790548, |
|
"grad_norm": 1.4473880529403687, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2907, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.4294739399829911, |
|
"grad_norm": 1.6947537660598755, |
|
"learning_rate": 0.0002, |
|
"loss": 1.269, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.4300813995869275, |
|
"grad_norm": 1.3262157440185547, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2718, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.4306888591908638, |
|
"grad_norm": 1.3940998315811157, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2493, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.4312963187948001, |
|
"grad_norm": 1.6804521083831787, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1106, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.4319037783987365, |
|
"grad_norm": 1.8600741624832153, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2041, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.4325112380026728, |
|
"grad_norm": 1.4588673114776611, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1889, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.43311869760660915, |
|
"grad_norm": 1.4329715967178345, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2104, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.43372615721054547, |
|
"grad_norm": 1.4520699977874756, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1788, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.43433361681448185, |
|
"grad_norm": 1.4037648439407349, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2121, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.43494107641841817, |
|
"grad_norm": 1.6112241744995117, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1776, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.4355485360223545, |
|
"grad_norm": 1.3666143417358398, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1307, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.43615599562629087, |
|
"grad_norm": 1.8091779947280884, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2301, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.4367634552302272, |
|
"grad_norm": 1.394896149635315, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2655, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.4373709148341635, |
|
"grad_norm": 1.6497581005096436, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2711, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.4379783744380999, |
|
"grad_norm": 1.5435038805007935, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2164, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.4385858340420362, |
|
"grad_norm": 1.4208731651306152, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2301, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.43919329364597254, |
|
"grad_norm": 1.5722650289535522, |
|
"learning_rate": 0.0002, |
|
"loss": 1.183, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.43980075324990886, |
|
"grad_norm": 1.6592830419540405, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1618, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.44040821285384524, |
|
"grad_norm": 1.481824517250061, |
|
"learning_rate": 0.0002, |
|
"loss": 1.165, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.44101567245778156, |
|
"grad_norm": 1.4777569770812988, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2317, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.4416231320617179, |
|
"grad_norm": 1.5149354934692383, |
|
"learning_rate": 0.0002, |
|
"loss": 1.175, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.44223059166565426, |
|
"grad_norm": 1.3621138334274292, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0801, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.4428380512695906, |
|
"grad_norm": 1.7920101881027222, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2371, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.4434455108735269, |
|
"grad_norm": 1.488234043121338, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1294, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.4440529704774632, |
|
"grad_norm": 1.4184207916259766, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1769, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.4446604300813996, |
|
"grad_norm": 1.6217948198318481, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2418, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.4452678896853359, |
|
"grad_norm": 1.5252745151519775, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2096, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.44587534928927225, |
|
"grad_norm": 1.5417364835739136, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1547, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.4464828088932086, |
|
"grad_norm": 1.6695308685302734, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2689, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.44709026849714495, |
|
"grad_norm": 1.2754809856414795, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1707, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.44769772810108127, |
|
"grad_norm": 1.3510041236877441, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1905, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.4483051877050176, |
|
"grad_norm": 1.702237606048584, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2166, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.44891264730895397, |
|
"grad_norm": 1.3573112487792969, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2114, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.4495201069128903, |
|
"grad_norm": 1.4572347402572632, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2635, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.4501275665168266, |
|
"grad_norm": 1.5683256387710571, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1404, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.450735026120763, |
|
"grad_norm": 1.671855092048645, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1997, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.4513424857246993, |
|
"grad_norm": 1.6509655714035034, |
|
"learning_rate": 0.0002, |
|
"loss": 1.234, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.45194994532863564, |
|
"grad_norm": 1.8359466791152954, |
|
"learning_rate": 0.0002, |
|
"loss": 1.259, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.45255740493257196, |
|
"grad_norm": 1.7122342586517334, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1588, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.45316486453650834, |
|
"grad_norm": 1.8128470182418823, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2071, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.45377232414044466, |
|
"grad_norm": 1.2913196086883545, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2202, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.454379783744381, |
|
"grad_norm": 1.450271487236023, |
|
"learning_rate": 0.0002, |
|
"loss": 1.079, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.45498724334831736, |
|
"grad_norm": 1.1549795866012573, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2805, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.4555947029522537, |
|
"grad_norm": 1.5973620414733887, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2319, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.45620216255619, |
|
"grad_norm": 1.6101380586624146, |
|
"learning_rate": 0.0002, |
|
"loss": 1.25, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.4568096221601263, |
|
"grad_norm": 1.511681318283081, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1875, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.4574170817640627, |
|
"grad_norm": 1.3775086402893066, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1924, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.458024541367999, |
|
"grad_norm": 1.4383291006088257, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1744, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.45863200097193535, |
|
"grad_norm": 1.8540087938308716, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2605, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.4592394605758717, |
|
"grad_norm": 1.7484431266784668, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1746, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.45984692017980805, |
|
"grad_norm": 1.532613754272461, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1702, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.46045437978374437, |
|
"grad_norm": 1.348934292793274, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2006, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.4610618393876807, |
|
"grad_norm": 1.3784677982330322, |
|
"learning_rate": 0.0002, |
|
"loss": 1.18, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.46166929899161707, |
|
"grad_norm": 1.3525645732879639, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0906, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.4622767585955534, |
|
"grad_norm": 1.6208430528640747, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1865, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.4628842181994897, |
|
"grad_norm": 1.4005343914031982, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2291, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.4634916778034261, |
|
"grad_norm": 1.6267120838165283, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2027, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.4640991374073624, |
|
"grad_norm": 1.5837438106536865, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2381, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.46470659701129874, |
|
"grad_norm": 1.5585278272628784, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1726, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.46531405661523506, |
|
"grad_norm": 1.585730791091919, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1708, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.46592151621917144, |
|
"grad_norm": 1.5836211442947388, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1671, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.46652897582310776, |
|
"grad_norm": 1.6574169397354126, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2585, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.4671364354270441, |
|
"grad_norm": 1.5859861373901367, |
|
"learning_rate": 0.0002, |
|
"loss": 1.228, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.46774389503098046, |
|
"grad_norm": 1.4105859994888306, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2245, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.4683513546349168, |
|
"grad_norm": 1.553499460220337, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2034, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.4689588142388531, |
|
"grad_norm": 1.5208845138549805, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1334, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.46956627384278943, |
|
"grad_norm": 1.4192719459533691, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2398, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.4701737334467258, |
|
"grad_norm": 1.2706866264343262, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1827, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.47078119305066213, |
|
"grad_norm": 1.5176726579666138, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1615, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.47138865265459845, |
|
"grad_norm": 1.45756196975708, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1666, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.47199611225853483, |
|
"grad_norm": 1.2748565673828125, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2276, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.47260357186247115, |
|
"grad_norm": 1.5021861791610718, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1292, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.4732110314664075, |
|
"grad_norm": 1.4229892492294312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1646, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.4738184910703438, |
|
"grad_norm": 1.4374492168426514, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3199, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.4744259506742802, |
|
"grad_norm": 1.5280470848083496, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2042, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.4750334102782165, |
|
"grad_norm": 1.6115370988845825, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2026, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.4756408698821528, |
|
"grad_norm": 1.3650151491165161, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1743, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.4762483294860892, |
|
"grad_norm": 1.3961408138275146, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1814, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.4768557890900255, |
|
"grad_norm": 1.5285776853561401, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1876, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.47746324869396184, |
|
"grad_norm": 1.5390294790267944, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2132, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.4780707082978982, |
|
"grad_norm": 1.9355554580688477, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2507, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.47867816790183454, |
|
"grad_norm": 1.5551636219024658, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1708, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.47928562750577086, |
|
"grad_norm": 1.2486780881881714, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2163, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.4798930871097072, |
|
"grad_norm": 1.60591459274292, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1202, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.48050054671364356, |
|
"grad_norm": 1.3844667673110962, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2513, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.4811080063175799, |
|
"grad_norm": 1.56134831905365, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1703, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.4817154659215162, |
|
"grad_norm": 1.6113072633743286, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2161, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.4823229255254526, |
|
"grad_norm": 1.5860167741775513, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1448, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.4829303851293889, |
|
"grad_norm": 1.349814772605896, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1548, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.48353784473332523, |
|
"grad_norm": 1.5404295921325684, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2584, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.48414530433726155, |
|
"grad_norm": 1.6452714204788208, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2712, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.48475276394119793, |
|
"grad_norm": 1.7067540884017944, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1459, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.48536022354513425, |
|
"grad_norm": 1.4783881902694702, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2514, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.4859676831490706, |
|
"grad_norm": 1.422806739807129, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2685, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.48657514275300695, |
|
"grad_norm": 1.329826831817627, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1713, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.4871826023569433, |
|
"grad_norm": 1.4270880222320557, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2034, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.4877900619608796, |
|
"grad_norm": 1.260317325592041, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1879, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.4883975215648159, |
|
"grad_norm": 1.5208438634872437, |
|
"learning_rate": 0.0002, |
|
"loss": 1.289, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.4890049811687523, |
|
"grad_norm": 1.396694302558899, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1453, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.4896124407726886, |
|
"grad_norm": 1.7723726034164429, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2369, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.49021990037662494, |
|
"grad_norm": 1.485266923904419, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1721, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.4908273599805613, |
|
"grad_norm": 1.3336842060089111, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1916, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.49143481958449764, |
|
"grad_norm": 1.372721552848816, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2192, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.49204227918843396, |
|
"grad_norm": 1.5978894233703613, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1239, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.4926497387923703, |
|
"grad_norm": 1.6521567106246948, |
|
"learning_rate": 0.0002, |
|
"loss": 1.189, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.49325719839630666, |
|
"grad_norm": 1.4695961475372314, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1799, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.493864658000243, |
|
"grad_norm": 1.6711095571517944, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2145, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.4944721176041793, |
|
"grad_norm": 1.410640001296997, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2466, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.4950795772081157, |
|
"grad_norm": 1.5452176332473755, |
|
"learning_rate": 0.0002, |
|
"loss": 1.257, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.495687036812052, |
|
"grad_norm": 1.5476586818695068, |
|
"learning_rate": 0.0002, |
|
"loss": 1.247, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.49629449641598833, |
|
"grad_norm": 1.428282380104065, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2456, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.49690195601992465, |
|
"grad_norm": 1.4470287561416626, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1985, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.49750941562386103, |
|
"grad_norm": 1.3442034721374512, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1742, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.49811687522779735, |
|
"grad_norm": 1.4205583333969116, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1945, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.4987243348317337, |
|
"grad_norm": 1.732121467590332, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1702, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.49933179443567005, |
|
"grad_norm": 2.0065855979919434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2247, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.4999392540396064, |
|
"grad_norm": 1.3520867824554443, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2432, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.5005467136435428, |
|
"grad_norm": 1.6998947858810425, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1556, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.501154173247479, |
|
"grad_norm": 1.4172024726867676, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1321, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.5017616328514154, |
|
"grad_norm": 1.5547268390655518, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2796, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.5023690924553518, |
|
"grad_norm": 2.0130136013031006, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1534, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.502976552059288, |
|
"grad_norm": 1.6734105348587036, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1958, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.5035840116632244, |
|
"grad_norm": 1.3117741346359253, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2077, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.5041914712671607, |
|
"grad_norm": 1.6821929216384888, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2818, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.5047989308710971, |
|
"grad_norm": 1.4614527225494385, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1808, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.5054063904750334, |
|
"grad_norm": 1.437494158744812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2735, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.5060138500789697, |
|
"grad_norm": 1.562290906906128, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2962, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.5066213096829061, |
|
"grad_norm": 1.5021467208862305, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1741, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.5072287692868425, |
|
"grad_norm": 1.4091651439666748, |
|
"learning_rate": 0.0002, |
|
"loss": 1.181, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.5078362288907787, |
|
"grad_norm": 1.486215353012085, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2157, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.5084436884947151, |
|
"grad_norm": 1.655120849609375, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2012, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.5090511480986515, |
|
"grad_norm": 1.5554999113082886, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1437, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.5096586077025878, |
|
"grad_norm": 1.3103996515274048, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2077, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.5102660673065241, |
|
"grad_norm": 1.3018100261688232, |
|
"learning_rate": 0.0002, |
|
"loss": 1.199, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.5108735269104605, |
|
"grad_norm": 1.3071420192718506, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2443, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.5114809865143968, |
|
"grad_norm": 1.6154550313949585, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1804, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.5120884461183332, |
|
"grad_norm": 1.4893248081207275, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1531, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.5126959057222694, |
|
"grad_norm": 1.3449759483337402, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2168, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.5133033653262058, |
|
"grad_norm": 1.3129184246063232, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1838, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.5139108249301422, |
|
"grad_norm": 1.427881121635437, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1849, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.5145182845340784, |
|
"grad_norm": 1.4069163799285889, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1692, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.5151257441380148, |
|
"grad_norm": 1.299394965171814, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1636, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.5157332037419512, |
|
"grad_norm": 1.3414911031723022, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1956, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.5163406633458875, |
|
"grad_norm": 1.4938147068023682, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1744, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.5169481229498238, |
|
"grad_norm": 1.7157621383666992, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1168, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.5175555825537602, |
|
"grad_norm": 1.4140446186065674, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1594, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.5181630421576965, |
|
"grad_norm": 1.7376991510391235, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1591, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.5187705017616329, |
|
"grad_norm": 1.7699772119522095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0878, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.5193779613655692, |
|
"grad_norm": 1.2922927141189575, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1929, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.5199854209695055, |
|
"grad_norm": 1.6389902830123901, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2001, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.5205928805734419, |
|
"grad_norm": 1.4543099403381348, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2965, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.5212003401773782, |
|
"grad_norm": 1.6899205446243286, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2332, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.5218077997813145, |
|
"grad_norm": 1.3092981576919556, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2341, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.5224152593852509, |
|
"grad_norm": 1.3344212770462036, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1844, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.5230227189891872, |
|
"grad_norm": 1.2653930187225342, |
|
"learning_rate": 0.0002, |
|
"loss": 1.102, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.5236301785931236, |
|
"grad_norm": 1.3919548988342285, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1462, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.5242376381970599, |
|
"grad_norm": 1.671441674232483, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1795, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.5248450978009962, |
|
"grad_norm": 1.48072350025177, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2004, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.5254525574049326, |
|
"grad_norm": 1.1946160793304443, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2139, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.526060017008869, |
|
"grad_norm": 1.396958827972412, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2258, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.5266674766128052, |
|
"grad_norm": 1.610670566558838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1471, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.5272749362167416, |
|
"grad_norm": 1.920641541481018, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2322, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.527882395820678, |
|
"grad_norm": 1.5889919996261597, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1512, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.5284898554246142, |
|
"grad_norm": 1.353811264038086, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1774, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.5290973150285506, |
|
"grad_norm": 1.4460562467575073, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1651, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.5297047746324869, |
|
"grad_norm": 1.5538369417190552, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1919, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.5303122342364233, |
|
"grad_norm": 1.3139142990112305, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1355, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.5309196938403596, |
|
"grad_norm": 1.3838707208633423, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1766, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.5315271534442959, |
|
"grad_norm": 1.8593472242355347, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3266, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.5321346130482323, |
|
"grad_norm": 1.32051420211792, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2102, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.5327420726521687, |
|
"grad_norm": 1.7253473997116089, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2025, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.5333495322561049, |
|
"grad_norm": 1.6720499992370605, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1767, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.5339569918600413, |
|
"grad_norm": 1.5546365976333618, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1597, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.5345644514639777, |
|
"grad_norm": 1.476245403289795, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1658, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.535171911067914, |
|
"grad_norm": 1.9548064470291138, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1858, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.5357793706718503, |
|
"grad_norm": 1.7661584615707397, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2401, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.5363868302757867, |
|
"grad_norm": 1.3066457509994507, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2359, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.536994289879723, |
|
"grad_norm": 1.6530494689941406, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1856, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.5376017494836594, |
|
"grad_norm": 1.752524733543396, |
|
"learning_rate": 0.0002, |
|
"loss": 1.194, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.5382092090875957, |
|
"grad_norm": 1.2837939262390137, |
|
"learning_rate": 0.0002, |
|
"loss": 1.171, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.538816668691532, |
|
"grad_norm": 1.5185567140579224, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1658, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.5394241282954684, |
|
"grad_norm": 1.5040532350540161, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2165, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.5400315878994046, |
|
"grad_norm": 1.4181594848632812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2066, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.540639047503341, |
|
"grad_norm": 1.5156562328338623, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1431, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.5412465071072774, |
|
"grad_norm": 1.576412558555603, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2022, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.5418539667112137, |
|
"grad_norm": 1.4076834917068481, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1449, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.54246142631515, |
|
"grad_norm": 1.4449139833450317, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1804, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.5430688859190864, |
|
"grad_norm": 1.3994479179382324, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2118, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.5436763455230227, |
|
"grad_norm": 1.4858759641647339, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2036, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.5442838051269591, |
|
"grad_norm": 1.493045687675476, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2006, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.5448912647308954, |
|
"grad_norm": 1.4153680801391602, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1361, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.5454987243348317, |
|
"grad_norm": 1.421522617340088, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2134, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.5461061839387681, |
|
"grad_norm": 1.525323748588562, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2022, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.5467136435427045, |
|
"grad_norm": 1.6441048383712769, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2558, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.5473211031466407, |
|
"grad_norm": 1.4588571786880493, |
|
"learning_rate": 0.0002, |
|
"loss": 1.232, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.5479285627505771, |
|
"grad_norm": 1.3689075708389282, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1705, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.5485360223545134, |
|
"grad_norm": 1.4682143926620483, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1993, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.5491434819584498, |
|
"grad_norm": 1.4524134397506714, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2297, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.5497509415623861, |
|
"grad_norm": 1.4454007148742676, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2216, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.5503584011663224, |
|
"grad_norm": 1.9346940517425537, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2194, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.5509658607702588, |
|
"grad_norm": 2.2555973529815674, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1502, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.5515733203741952, |
|
"grad_norm": 1.398390531539917, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1898, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.5521807799781314, |
|
"grad_norm": 1.600623607635498, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3627, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.5527882395820678, |
|
"grad_norm": 1.5447672605514526, |
|
"learning_rate": 0.0002, |
|
"loss": 1.13, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.5533956991860042, |
|
"grad_norm": 1.5209197998046875, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1547, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.5540031587899404, |
|
"grad_norm": 1.3905166387557983, |
|
"learning_rate": 0.0002, |
|
"loss": 1.163, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.5546106183938768, |
|
"grad_norm": 1.5151902437210083, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1953, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.5552180779978132, |
|
"grad_norm": 1.2856061458587646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1521, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.5558255376017495, |
|
"grad_norm": 1.4850786924362183, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2062, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.5564329972056858, |
|
"grad_norm": 1.5212926864624023, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2404, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.5570404568096221, |
|
"grad_norm": 1.323756456375122, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1079, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.5576479164135585, |
|
"grad_norm": 1.645654559135437, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1951, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.5582553760174949, |
|
"grad_norm": 1.4693900346755981, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2511, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.5588628356214311, |
|
"grad_norm": 1.5780327320098877, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2349, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.5594702952253675, |
|
"grad_norm": 1.5587654113769531, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2189, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.5600777548293039, |
|
"grad_norm": 1.4344000816345215, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2613, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.5606852144332402, |
|
"grad_norm": 1.2892564535140991, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1674, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.5612926740371765, |
|
"grad_norm": 1.510473608970642, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1735, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.5619001336411129, |
|
"grad_norm": 1.5466214418411255, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1557, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.5625075932450492, |
|
"grad_norm": 1.3267711400985718, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1495, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.5631150528489856, |
|
"grad_norm": 1.500229835510254, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1704, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.5637225124529219, |
|
"grad_norm": 1.4810044765472412, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2374, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.5643299720568582, |
|
"grad_norm": 1.819751501083374, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1665, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.5649374316607946, |
|
"grad_norm": 1.721070647239685, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2074, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.5655448912647308, |
|
"grad_norm": 1.28892183303833, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2075, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.5661523508686672, |
|
"grad_norm": 1.4271416664123535, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1722, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.5667598104726036, |
|
"grad_norm": 1.4250295162200928, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2099, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.5673672700765399, |
|
"grad_norm": 1.3933831453323364, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2214, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.5679747296804762, |
|
"grad_norm": 1.4927361011505127, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1964, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.5685821892844126, |
|
"grad_norm": 1.4718621969223022, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2009, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.5691896488883489, |
|
"grad_norm": 1.4115561246871948, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1992, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.5697971084922853, |
|
"grad_norm": 1.333713173866272, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1461, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.5704045680962216, |
|
"grad_norm": 1.387205719947815, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1278, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.5710120277001579, |
|
"grad_norm": 1.4636300802230835, |
|
"learning_rate": 0.0002, |
|
"loss": 1.122, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.5716194873040943, |
|
"grad_norm": 1.4931930303573608, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1957, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.5722269469080307, |
|
"grad_norm": 1.539544939994812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1724, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.5728344065119669, |
|
"grad_norm": 1.4466699361801147, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1914, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.5734418661159033, |
|
"grad_norm": 1.485856056213379, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1715, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.5740493257198396, |
|
"grad_norm": 1.6009708642959595, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2944, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.574656785323776, |
|
"grad_norm": 1.5014270544052124, |
|
"learning_rate": 0.0002, |
|
"loss": 1.183, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.5752642449277123, |
|
"grad_norm": 1.5157901048660278, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1828, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 0.5758717045316486, |
|
"grad_norm": 1.2542632818222046, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1653, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 0.576479164135585, |
|
"grad_norm": 1.5954726934432983, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3099, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 0.5770866237395214, |
|
"grad_norm": 1.5456738471984863, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2264, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.5776940833434576, |
|
"grad_norm": 1.610556721687317, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1486, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 0.578301542947394, |
|
"grad_norm": 1.5603747367858887, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2336, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 0.5789090025513304, |
|
"grad_norm": 1.6448475122451782, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2163, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 0.5795164621552666, |
|
"grad_norm": 1.440741777420044, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3541, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 0.580123921759203, |
|
"grad_norm": 1.4616281986236572, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2192, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 0.5807313813631394, |
|
"grad_norm": 1.7298589944839478, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2013, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 0.5813388409670757, |
|
"grad_norm": 1.6787705421447754, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1879, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 0.581946300571012, |
|
"grad_norm": 1.4008698463439941, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2064, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 0.5825537601749484, |
|
"grad_norm": 1.3439879417419434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1908, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 0.5831612197788847, |
|
"grad_norm": 1.436219573020935, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2103, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.5837686793828211, |
|
"grad_norm": 1.481085181236267, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2041, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.5843761389867573, |
|
"grad_norm": 1.3902438879013062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1871, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 0.5849835985906937, |
|
"grad_norm": 1.6079788208007812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1283, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 0.5855910581946301, |
|
"grad_norm": 1.473951816558838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2437, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 0.5861985177985664, |
|
"grad_norm": 1.6076713800430298, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2463, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 0.5868059774025027, |
|
"grad_norm": 1.4721814393997192, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2204, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 0.5874134370064391, |
|
"grad_norm": 1.4392870664596558, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1332, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 0.5880208966103754, |
|
"grad_norm": 1.3198387622833252, |
|
"learning_rate": 0.0002, |
|
"loss": 1.273, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 0.5886283562143118, |
|
"grad_norm": 1.32366144657135, |
|
"learning_rate": 0.0002, |
|
"loss": 1.273, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 0.5892358158182481, |
|
"grad_norm": 1.3553622961044312, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2246, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.5898432754221844, |
|
"grad_norm": 1.2340115308761597, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1045, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 0.5904507350261208, |
|
"grad_norm": 1.4639991521835327, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2169, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 0.5910581946300572, |
|
"grad_norm": 1.7941631078720093, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1877, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 0.5916656542339934, |
|
"grad_norm": 1.494030475616455, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1785, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 0.5922731138379298, |
|
"grad_norm": 1.371238112449646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.212, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 0.5928805734418661, |
|
"grad_norm": 1.6964439153671265, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2128, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 0.5934880330458024, |
|
"grad_norm": 1.3934699296951294, |
|
"learning_rate": 0.0002, |
|
"loss": 1.183, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 0.5940954926497388, |
|
"grad_norm": 1.505387306213379, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1992, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 0.5947029522536751, |
|
"grad_norm": 1.6573508977890015, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2132, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 0.5953104118576115, |
|
"grad_norm": 1.3351209163665771, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2428, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.5959178714615478, |
|
"grad_norm": 1.2429594993591309, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1371, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 0.5965253310654841, |
|
"grad_norm": 1.5691545009613037, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1295, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 0.5971327906694205, |
|
"grad_norm": 1.4945405721664429, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2581, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 0.5977402502733569, |
|
"grad_norm": 1.513573408126831, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2453, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 0.5983477098772931, |
|
"grad_norm": 1.426198959350586, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0992, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 0.5989551694812295, |
|
"grad_norm": 1.6695457696914673, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2234, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 0.5995626290851659, |
|
"grad_norm": 1.9191772937774658, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1916, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 0.6001700886891022, |
|
"grad_norm": 1.4344862699508667, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1948, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 0.6007775482930385, |
|
"grad_norm": 1.4640361070632935, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2454, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 0.6013850078969748, |
|
"grad_norm": 1.7511848211288452, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2485, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.6019924675009112, |
|
"grad_norm": 1.3894282579421997, |
|
"learning_rate": 0.0002, |
|
"loss": 1.171, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 0.6025999271048476, |
|
"grad_norm": 1.3920185565948486, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2435, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.6032073867087838, |
|
"grad_norm": 1.3208262920379639, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2918, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 0.6038148463127202, |
|
"grad_norm": 1.1784992218017578, |
|
"learning_rate": 0.0002, |
|
"loss": 1.151, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 0.6044223059166566, |
|
"grad_norm": 1.4552900791168213, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2019, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 0.6050297655205928, |
|
"grad_norm": 1.574312686920166, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2686, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 0.6056372251245292, |
|
"grad_norm": 1.6619045734405518, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1878, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 0.6062446847284656, |
|
"grad_norm": 1.2976871728897095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2308, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 0.6068521443324019, |
|
"grad_norm": 2.1094729900360107, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1648, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 0.6074596039363382, |
|
"grad_norm": 1.9893029928207397, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2477, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.6080670635402746, |
|
"grad_norm": 1.579158902168274, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1757, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 0.6086745231442109, |
|
"grad_norm": 1.3016012907028198, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2097, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 0.6092819827481473, |
|
"grad_norm": 1.4439659118652344, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1698, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 0.6098894423520835, |
|
"grad_norm": 1.490735411643982, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2366, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 0.6104969019560199, |
|
"grad_norm": 1.5587278604507446, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1882, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 0.6111043615599563, |
|
"grad_norm": 1.5142701864242554, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1452, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 0.6117118211638926, |
|
"grad_norm": 1.562000036239624, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2219, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 0.6123192807678289, |
|
"grad_norm": 1.5308712720870972, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1221, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 0.6129267403717653, |
|
"grad_norm": 1.441798448562622, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1266, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 0.6135341999757016, |
|
"grad_norm": 1.3092381954193115, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1661, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 0.614141659579638, |
|
"grad_norm": 1.592490315437317, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1633, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 0.6147491191835743, |
|
"grad_norm": 1.5515542030334473, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2181, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 0.6153565787875106, |
|
"grad_norm": 1.8464525938034058, |
|
"learning_rate": 0.0002, |
|
"loss": 1.194, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 0.615964038391447, |
|
"grad_norm": 1.4995911121368408, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1926, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 0.6165714979953834, |
|
"grad_norm": 2.0670576095581055, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2425, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 0.6171789575993196, |
|
"grad_norm": 1.2869359254837036, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2304, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 0.617786417203256, |
|
"grad_norm": 1.6126221418380737, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1576, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 0.6183938768071923, |
|
"grad_norm": 1.5873949527740479, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2378, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 0.6190013364111286, |
|
"grad_norm": 1.394704818725586, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1733, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 0.619608796015065, |
|
"grad_norm": 1.4479098320007324, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1198, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 0.6202162556190013, |
|
"grad_norm": 1.4679584503173828, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1245, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 0.6208237152229377, |
|
"grad_norm": 1.4689127206802368, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1647, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 0.621431174826874, |
|
"grad_norm": 1.3493456840515137, |
|
"learning_rate": 0.0002, |
|
"loss": 1.149, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.6220386344308103, |
|
"grad_norm": 1.480111837387085, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0947, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.6226460940347467, |
|
"grad_norm": 1.4981671571731567, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2457, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 0.6232535536386831, |
|
"grad_norm": 1.4562405347824097, |
|
"learning_rate": 0.0002, |
|
"loss": 1.161, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 0.6238610132426193, |
|
"grad_norm": 1.4748475551605225, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1582, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 0.6244684728465557, |
|
"grad_norm": 1.4762338399887085, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1543, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 0.6250759324504921, |
|
"grad_norm": 1.5699721574783325, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1238, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 0.6256833920544284, |
|
"grad_norm": 1.5905998945236206, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2323, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 0.6262908516583647, |
|
"grad_norm": 1.2949004173278809, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1863, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 0.6268983112623011, |
|
"grad_norm": 1.8533037900924683, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1373, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 0.6275057708662374, |
|
"grad_norm": 1.3952187299728394, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2169, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 0.6281132304701738, |
|
"grad_norm": 1.4759795665740967, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1877, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 0.62872069007411, |
|
"grad_norm": 1.8833881616592407, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2601, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 0.6293281496780464, |
|
"grad_norm": 1.4174413681030273, |
|
"learning_rate": 0.0002, |
|
"loss": 1.232, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 0.6299356092819828, |
|
"grad_norm": 1.5129276514053345, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1903, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 0.630543068885919, |
|
"grad_norm": 1.4858694076538086, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1323, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 0.6311505284898554, |
|
"grad_norm": 1.458954930305481, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1408, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 0.6317579880937918, |
|
"grad_norm": 1.442545771598816, |
|
"learning_rate": 0.0002, |
|
"loss": 1.201, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 0.6323654476977281, |
|
"grad_norm": 1.5205395221710205, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1685, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 0.6329729073016644, |
|
"grad_norm": 1.3119162321090698, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1423, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 0.6335803669056008, |
|
"grad_norm": 1.345941185951233, |
|
"learning_rate": 0.0002, |
|
"loss": 1.192, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 0.6341878265095371, |
|
"grad_norm": 1.6821863651275635, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1688, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 0.6347952861134735, |
|
"grad_norm": 1.6074057817459106, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2167, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 0.6354027457174098, |
|
"grad_norm": 1.5278266668319702, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1838, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 0.6360102053213461, |
|
"grad_norm": 1.3733577728271484, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1441, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 0.6366176649252825, |
|
"grad_norm": 1.5218098163604736, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1277, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 0.6372251245292188, |
|
"grad_norm": 1.6241083145141602, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2118, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 0.6378325841331551, |
|
"grad_norm": 1.836913824081421, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1521, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.6384400437370915, |
|
"grad_norm": 1.609478235244751, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1986, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 0.6390475033410278, |
|
"grad_norm": 1.6075109243392944, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2768, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 0.6396549629449642, |
|
"grad_norm": 1.3840945959091187, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1719, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 0.6402624225489005, |
|
"grad_norm": 1.3323301076889038, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1737, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.6408698821528368, |
|
"grad_norm": 1.513311505317688, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1444, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 0.6414773417567732, |
|
"grad_norm": 1.5211206674575806, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2536, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 0.6420848013607096, |
|
"grad_norm": 1.331162452697754, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2607, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 0.6426922609646458, |
|
"grad_norm": 1.284327745437622, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2564, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 0.6432997205685822, |
|
"grad_norm": 1.2587096691131592, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1948, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 0.6439071801725186, |
|
"grad_norm": 1.4804191589355469, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2251, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 0.6445146397764548, |
|
"grad_norm": 1.3306211233139038, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1484, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 0.6451220993803912, |
|
"grad_norm": 1.4872162342071533, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2206, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 0.6457295589843275, |
|
"grad_norm": 1.479332685470581, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1723, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 0.6463370185882639, |
|
"grad_norm": 1.6562544107437134, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1718, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 0.6469444781922002, |
|
"grad_norm": 1.4814534187316895, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1975, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 0.6475519377961365, |
|
"grad_norm": 1.4615775346755981, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1592, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 0.6481593974000729, |
|
"grad_norm": 1.4853074550628662, |
|
"learning_rate": 0.0002, |
|
"loss": 1.193, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 0.6487668570040093, |
|
"grad_norm": 1.4148366451263428, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1757, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 0.6493743166079455, |
|
"grad_norm": 1.644765853881836, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1298, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 0.6499817762118819, |
|
"grad_norm": 1.6349629163742065, |
|
"learning_rate": 0.0002, |
|
"loss": 1.157, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 0.6505892358158183, |
|
"grad_norm": 1.4590332508087158, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2426, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 0.6511966954197546, |
|
"grad_norm": 1.3630939722061157, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1686, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 0.6518041550236909, |
|
"grad_norm": 1.5206563472747803, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1991, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 0.6524116146276273, |
|
"grad_norm": 1.1928949356079102, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1915, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 0.6530190742315636, |
|
"grad_norm": 1.7482240200042725, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2403, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 0.6536265338355, |
|
"grad_norm": 1.4580903053283691, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2571, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 0.6542339934394362, |
|
"grad_norm": 1.582047939300537, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2438, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 0.6548414530433726, |
|
"grad_norm": 1.415243148803711, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1761, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 0.655448912647309, |
|
"grad_norm": 1.3117393255233765, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1339, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 0.6560563722512452, |
|
"grad_norm": 1.4229295253753662, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1884, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 0.6566638318551816, |
|
"grad_norm": 1.449526309967041, |
|
"learning_rate": 0.0002, |
|
"loss": 1.287, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 0.657271291459118, |
|
"grad_norm": 1.509210228919983, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1541, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 0.6578787510630543, |
|
"grad_norm": 1.6160600185394287, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2228, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 0.6584862106669906, |
|
"grad_norm": 1.3628963232040405, |
|
"learning_rate": 0.0002, |
|
"loss": 1.268, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 0.659093670270927, |
|
"grad_norm": 1.3497792482376099, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1868, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.6597011298748633, |
|
"grad_norm": 1.3454173803329468, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1741, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 0.6603085894787997, |
|
"grad_norm": 1.4797899723052979, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1899, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 0.660916049082736, |
|
"grad_norm": 1.2999018430709839, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2017, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 0.6615235086866723, |
|
"grad_norm": 1.4134020805358887, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2336, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 0.6621309682906087, |
|
"grad_norm": 1.706198811531067, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2633, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 0.6627384278945451, |
|
"grad_norm": 1.3994085788726807, |
|
"learning_rate": 0.0002, |
|
"loss": 1.199, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 0.6633458874984813, |
|
"grad_norm": 1.4403148889541626, |
|
"learning_rate": 0.0002, |
|
"loss": 1.236, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 0.6639533471024177, |
|
"grad_norm": 1.5367127656936646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.228, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 0.664560806706354, |
|
"grad_norm": 1.7260019779205322, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2759, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 0.6651682663102904, |
|
"grad_norm": 1.456173300743103, |
|
"learning_rate": 0.0002, |
|
"loss": 1.138, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 0.6657757259142267, |
|
"grad_norm": 1.9463717937469482, |
|
"learning_rate": 0.0002, |
|
"loss": 1.094, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 0.666383185518163, |
|
"grad_norm": 1.5047192573547363, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2151, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 0.6669906451220994, |
|
"grad_norm": 1.4958069324493408, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1373, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 0.6675981047260358, |
|
"grad_norm": 1.6270114183425903, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1656, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 0.668205564329972, |
|
"grad_norm": 1.494084119796753, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1473, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.6688130239339084, |
|
"grad_norm": 1.6952012777328491, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1886, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 0.6694204835378448, |
|
"grad_norm": 1.2693300247192383, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1854, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 0.670027943141781, |
|
"grad_norm": 1.3508623838424683, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1476, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 0.6706354027457174, |
|
"grad_norm": 1.5516679286956787, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2994, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 0.6712428623496538, |
|
"grad_norm": 1.5078186988830566, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1406, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 0.6718503219535901, |
|
"grad_norm": 1.5944573879241943, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1062, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 0.6724577815575264, |
|
"grad_norm": 1.866337537765503, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2106, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 0.6730652411614627, |
|
"grad_norm": 1.346386432647705, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1681, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 0.6736727007653991, |
|
"grad_norm": 1.4467651844024658, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2039, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 0.6742801603693355, |
|
"grad_norm": 1.404458999633789, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2296, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 0.6748876199732717, |
|
"grad_norm": 1.2593837976455688, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2039, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 0.6754950795772081, |
|
"grad_norm": 1.4862419366836548, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1957, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 0.6761025391811445, |
|
"grad_norm": 1.4536348581314087, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2568, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 0.6767099987850808, |
|
"grad_norm": 1.6232870817184448, |
|
"learning_rate": 0.0002, |
|
"loss": 1.184, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 0.6773174583890171, |
|
"grad_norm": 1.621951937675476, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1848, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 0.6779249179929535, |
|
"grad_norm": 1.462931513786316, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1735, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.6785323775968898, |
|
"grad_norm": 1.4450300931930542, |
|
"learning_rate": 0.0002, |
|
"loss": 1.186, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 0.6791398372008262, |
|
"grad_norm": 1.7563248872756958, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1988, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 0.6797472968047625, |
|
"grad_norm": 1.3302048444747925, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1443, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 0.6803547564086988, |
|
"grad_norm": 1.4724364280700684, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1359, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 0.6809622160126352, |
|
"grad_norm": 1.4847537279129028, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1733, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 0.6815696756165714, |
|
"grad_norm": 1.5312765836715698, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1182, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 0.6821771352205078, |
|
"grad_norm": 1.563795804977417, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1132, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 0.6827845948244442, |
|
"grad_norm": 1.5239176750183105, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2627, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 0.6833920544283805, |
|
"grad_norm": 1.7546555995941162, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3069, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 0.6839995140323168, |
|
"grad_norm": 1.4419306516647339, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2376, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 0.6846069736362532, |
|
"grad_norm": 1.425281286239624, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1672, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 0.6852144332401895, |
|
"grad_norm": 1.3561053276062012, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2377, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 0.6858218928441259, |
|
"grad_norm": 1.4803534746170044, |
|
"learning_rate": 0.0002, |
|
"loss": 1.192, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 0.6864293524480622, |
|
"grad_norm": 1.431349277496338, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2415, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 0.6870368120519985, |
|
"grad_norm": 1.2821253538131714, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1721, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 0.6876442716559349, |
|
"grad_norm": 1.4956986904144287, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2056, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 0.6882517312598713, |
|
"grad_norm": 1.3334455490112305, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1747, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 0.6888591908638075, |
|
"grad_norm": 1.6449331045150757, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2525, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 0.6894666504677439, |
|
"grad_norm": 1.402047872543335, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2468, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 0.6900741100716802, |
|
"grad_norm": 1.4714034795761108, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1789, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 0.6906815696756166, |
|
"grad_norm": 1.4426461458206177, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2036, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 0.6912890292795529, |
|
"grad_norm": 1.3404841423034668, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2156, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 0.6918964888834892, |
|
"grad_norm": 1.3607988357543945, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2025, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 0.6925039484874256, |
|
"grad_norm": 1.4359990358352661, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2282, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 0.693111408091362, |
|
"grad_norm": 1.619635820388794, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1713, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 0.6937188676952982, |
|
"grad_norm": 1.6724573373794556, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2081, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 0.6943263272992346, |
|
"grad_norm": 2.061098098754883, |
|
"learning_rate": 0.0002, |
|
"loss": 1.185, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 0.694933786903171, |
|
"grad_norm": 1.4103002548217773, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1879, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 0.6955412465071072, |
|
"grad_norm": 1.2703981399536133, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2088, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 0.6961487061110436, |
|
"grad_norm": 1.6062376499176025, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2214, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 0.69675616571498, |
|
"grad_norm": 1.5932917594909668, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1671, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.6973636253189163, |
|
"grad_norm": 1.4609780311584473, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1285, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 0.6979710849228526, |
|
"grad_norm": 1.3255772590637207, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0967, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 0.6985785445267889, |
|
"grad_norm": 1.6930729150772095, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1953, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.6991860041307253, |
|
"grad_norm": 1.3310422897338867, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1798, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 0.6997934637346617, |
|
"grad_norm": 1.3762205839157104, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1868, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 0.7004009233385979, |
|
"grad_norm": 1.344161868095398, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2483, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 0.7010083829425343, |
|
"grad_norm": 1.4122248888015747, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2095, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 0.7016158425464707, |
|
"grad_norm": 1.4848181009292603, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2064, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 0.702223302150407, |
|
"grad_norm": 1.3725167512893677, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2049, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 0.7028307617543433, |
|
"grad_norm": 1.5955760478973389, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1277, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 0.7034382213582797, |
|
"grad_norm": 1.461313009262085, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 0.704045680962216, |
|
"grad_norm": 1.5912322998046875, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2243, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 0.7046531405661524, |
|
"grad_norm": 1.641867756843567, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2052, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 0.7052606001700887, |
|
"grad_norm": 1.3238072395324707, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1505, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 0.705868059774025, |
|
"grad_norm": 1.4066652059555054, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2051, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 0.7064755193779614, |
|
"grad_norm": 1.5860298871994019, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1738, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 0.7070829789818978, |
|
"grad_norm": 1.5154073238372803, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2652, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 0.707690438585834, |
|
"grad_norm": 1.6234484910964966, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2645, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 0.7082978981897704, |
|
"grad_norm": 1.5033893585205078, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1698, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 0.7089053577937067, |
|
"grad_norm": 1.4364176988601685, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2689, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 0.709512817397643, |
|
"grad_norm": 1.389700174331665, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2512, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 0.7101202770015794, |
|
"grad_norm": 1.3205703496932983, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1309, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 0.7107277366055157, |
|
"grad_norm": 1.568920612335205, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2776, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 0.7113351962094521, |
|
"grad_norm": 1.4814732074737549, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2819, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 0.7119426558133884, |
|
"grad_norm": 1.6823959350585938, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2476, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 0.7125501154173247, |
|
"grad_norm": 1.338059902191162, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2376, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 0.7131575750212611, |
|
"grad_norm": 1.734095573425293, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2301, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 0.7137650346251975, |
|
"grad_norm": 1.3866161108016968, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1602, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 0.7143724942291337, |
|
"grad_norm": 1.465135931968689, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2681, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 0.7149799538330701, |
|
"grad_norm": 1.3397235870361328, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2742, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 0.7155874134370065, |
|
"grad_norm": 1.38888680934906, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1629, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.7161948730409428, |
|
"grad_norm": 1.4504741430282593, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1503, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 0.7168023326448791, |
|
"grad_norm": 1.4954638481140137, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2249, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 0.7174097922488154, |
|
"grad_norm": 1.841427206993103, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2107, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 0.7180172518527518, |
|
"grad_norm": 1.4142829179763794, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1921, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 0.7186247114566882, |
|
"grad_norm": 1.5487010478973389, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1975, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 0.7192321710606244, |
|
"grad_norm": 1.4076977968215942, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2191, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 0.7198396306645608, |
|
"grad_norm": 1.1866190433502197, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2053, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 0.7204470902684972, |
|
"grad_norm": 1.5468428134918213, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1661, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 0.7210545498724334, |
|
"grad_norm": 1.2567400932312012, |
|
"learning_rate": 0.0002, |
|
"loss": 1.179, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 0.7216620094763698, |
|
"grad_norm": 1.5383141040802002, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2302, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 0.7222694690803062, |
|
"grad_norm": 1.494950532913208, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2232, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 0.7228769286842425, |
|
"grad_norm": 1.8329541683197021, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1461, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 0.7234843882881788, |
|
"grad_norm": 1.46611750125885, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2242, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 0.7240918478921152, |
|
"grad_norm": 1.4088685512542725, |
|
"learning_rate": 0.0002, |
|
"loss": 1.165, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 0.7246993074960515, |
|
"grad_norm": 1.4150999784469604, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1637, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 0.7253067670999879, |
|
"grad_norm": 1.3681480884552002, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1833, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 0.7259142267039241, |
|
"grad_norm": 1.4092365503311157, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1468, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 0.7265216863078605, |
|
"grad_norm": 1.4637953042984009, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2164, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 0.7271291459117969, |
|
"grad_norm": 1.340900182723999, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1661, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 0.7277366055157332, |
|
"grad_norm": 1.4122298955917358, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2512, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 0.7283440651196695, |
|
"grad_norm": 1.5455623865127563, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2112, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 0.7289515247236059, |
|
"grad_norm": 1.3420119285583496, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1923, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.7295589843275422, |
|
"grad_norm": 1.5570626258850098, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1912, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 0.7301664439314786, |
|
"grad_norm": 1.5055865049362183, |
|
"learning_rate": 0.0002, |
|
"loss": 1.185, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 0.7307739035354149, |
|
"grad_norm": 1.5202549695968628, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2013, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 0.7313813631393512, |
|
"grad_norm": 1.3424779176712036, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1318, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 0.7319888227432876, |
|
"grad_norm": 1.838219165802002, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2258, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 0.732596282347224, |
|
"grad_norm": 1.2670247554779053, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1377, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 0.7332037419511602, |
|
"grad_norm": 1.7354373931884766, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1729, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 0.7338112015550966, |
|
"grad_norm": 1.6558679342269897, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2186, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 0.7344186611590329, |
|
"grad_norm": 1.5855995416641235, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2495, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.7350261207629692, |
|
"grad_norm": 1.6140644550323486, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2288, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 0.7356335803669056, |
|
"grad_norm": 1.4002978801727295, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2297, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 0.7362410399708419, |
|
"grad_norm": 1.445778489112854, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1928, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 0.7368484995747783, |
|
"grad_norm": 1.6145892143249512, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1717, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 0.7374559591787146, |
|
"grad_norm": 1.5301674604415894, |
|
"learning_rate": 0.0002, |
|
"loss": 1.122, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 0.7380634187826509, |
|
"grad_norm": 1.5016682147979736, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2023, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 0.7386708783865873, |
|
"grad_norm": 1.5861783027648926, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1168, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 0.7392783379905237, |
|
"grad_norm": 1.6081180572509766, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1411, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 0.7398857975944599, |
|
"grad_norm": 1.5475598573684692, |
|
"learning_rate": 0.0002, |
|
"loss": 1.151, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 0.7404932571983963, |
|
"grad_norm": 1.668095588684082, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1793, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 0.7411007168023327, |
|
"grad_norm": 1.5576446056365967, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2414, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 0.741708176406269, |
|
"grad_norm": 1.7492656707763672, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1558, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 0.7423156360102053, |
|
"grad_norm": 1.527300238609314, |
|
"learning_rate": 0.0002, |
|
"loss": 1.168, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 0.7429230956141416, |
|
"grad_norm": 1.4713521003723145, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1747, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 0.743530555218078, |
|
"grad_norm": 1.3588942289352417, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1781, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 0.7441380148220144, |
|
"grad_norm": 1.6379607915878296, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1654, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 0.7447454744259506, |
|
"grad_norm": 1.337945580482483, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1837, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 0.745352934029887, |
|
"grad_norm": 1.6633474826812744, |
|
"learning_rate": 0.0002, |
|
"loss": 1.21, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 0.7459603936338234, |
|
"grad_norm": 1.4022483825683594, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1702, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 0.7465678532377596, |
|
"grad_norm": 1.5646429061889648, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1522, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 0.747175312841696, |
|
"grad_norm": 1.559167742729187, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1502, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 0.7477827724456324, |
|
"grad_norm": 1.3548595905303955, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1954, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 0.7483902320495687, |
|
"grad_norm": 1.4975273609161377, |
|
"learning_rate": 0.0002, |
|
"loss": 1.216, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 0.748997691653505, |
|
"grad_norm": 1.408597469329834, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2319, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 0.7496051512574414, |
|
"grad_norm": 1.4837566614151, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2854, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 0.7502126108613777, |
|
"grad_norm": 1.2895525693893433, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2207, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 0.7508200704653141, |
|
"grad_norm": 1.3992550373077393, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1627, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 0.7514275300692504, |
|
"grad_norm": 1.6421400308609009, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1692, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 0.7520349896731867, |
|
"grad_norm": 1.799697756767273, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2416, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 0.7526424492771231, |
|
"grad_norm": 1.6245464086532593, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2382, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 0.7532499088810594, |
|
"grad_norm": 1.462110161781311, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1325, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 0.7538573684849957, |
|
"grad_norm": 1.7330759763717651, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1275, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 0.7544648280889321, |
|
"grad_norm": 1.3573112487792969, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1673, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 0.7550722876928684, |
|
"grad_norm": 1.528638482093811, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1091, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 0.7556797472968048, |
|
"grad_norm": 1.4841316938400269, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1973, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 0.7562872069007411, |
|
"grad_norm": 1.8295537233352661, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0993, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 0.7568946665046774, |
|
"grad_norm": 1.580533504486084, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2444, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 0.7575021261086138, |
|
"grad_norm": 1.4425346851348877, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1931, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 0.7581095857125502, |
|
"grad_norm": 1.4134783744812012, |
|
"learning_rate": 0.0002, |
|
"loss": 1.144, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 0.7587170453164864, |
|
"grad_norm": 1.2354546785354614, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1764, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 0.7593245049204228, |
|
"grad_norm": 1.2936530113220215, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1902, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.7599319645243592, |
|
"grad_norm": 1.5252578258514404, |
|
"learning_rate": 0.0002, |
|
"loss": 1.17, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 0.7605394241282954, |
|
"grad_norm": 1.538248062133789, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2061, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 0.7611468837322318, |
|
"grad_norm": 1.61935555934906, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1884, |
|
"step": 12530 |
|
}, |
|
{ |
|
"epoch": 0.7617543433361681, |
|
"grad_norm": 1.254185676574707, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2032, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 0.7623618029401045, |
|
"grad_norm": 1.367621898651123, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2207, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 0.7629692625440408, |
|
"grad_norm": 1.512383222579956, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1971, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 0.7635767221479771, |
|
"grad_norm": 1.4338618516921997, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2558, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 0.7641841817519135, |
|
"grad_norm": 1.322144865989685, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0924, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 0.7647916413558499, |
|
"grad_norm": 1.518500804901123, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1616, |
|
"step": 12590 |
|
}, |
|
{ |
|
"epoch": 0.7653991009597861, |
|
"grad_norm": 1.4812904596328735, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1519, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 0.7660065605637225, |
|
"grad_norm": 1.480425477027893, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2715, |
|
"step": 12610 |
|
}, |
|
{ |
|
"epoch": 0.7666140201676589, |
|
"grad_norm": 1.554589867591858, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2196, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 0.7672214797715952, |
|
"grad_norm": 1.6555403470993042, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1961, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 0.7678289393755315, |
|
"grad_norm": 1.9677565097808838, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1848, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 0.7684363989794679, |
|
"grad_norm": 1.4238587617874146, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1731, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 0.7690438585834042, |
|
"grad_norm": 1.2964900732040405, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1768, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 0.7696513181873406, |
|
"grad_norm": 1.549517273902893, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1728, |
|
"step": 12670 |
|
}, |
|
{ |
|
"epoch": 0.7702587777912768, |
|
"grad_norm": 1.5808876752853394, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1796, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 0.7708662373952132, |
|
"grad_norm": 1.7364044189453125, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 0.7714736969991496, |
|
"grad_norm": 1.4321144819259644, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2613, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 0.7720811566030858, |
|
"grad_norm": 1.4324842691421509, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1764, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 0.7726886162070222, |
|
"grad_norm": 1.518303632736206, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2267, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 0.7732960758109586, |
|
"grad_norm": 1.494956135749817, |
|
"learning_rate": 0.0002, |
|
"loss": 1.18, |
|
"step": 12730 |
|
}, |
|
{ |
|
"epoch": 0.7739035354148949, |
|
"grad_norm": 1.5330840349197388, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2204, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 0.7745109950188312, |
|
"grad_norm": 1.964697003364563, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1896, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 0.7751184546227676, |
|
"grad_norm": 1.4546568393707275, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1851, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 0.7757259142267039, |
|
"grad_norm": 1.5761953592300415, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1721, |
|
"step": 12770 |
|
}, |
|
{ |
|
"epoch": 0.7763333738306403, |
|
"grad_norm": 1.676142692565918, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1536, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 0.7769408334345766, |
|
"grad_norm": 1.4308006763458252, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2011, |
|
"step": 12790 |
|
}, |
|
{ |
|
"epoch": 0.7775482930385129, |
|
"grad_norm": 1.3389242887496948, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2487, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.7781557526424493, |
|
"grad_norm": 1.6429331302642822, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1282, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 0.7787632122463856, |
|
"grad_norm": 1.4515995979309082, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2761, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 0.7793706718503219, |
|
"grad_norm": 1.4249382019042969, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1918, |
|
"step": 12830 |
|
}, |
|
{ |
|
"epoch": 0.7799781314542583, |
|
"grad_norm": 1.55287766456604, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1739, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 0.7805855910581946, |
|
"grad_norm": 1.5151697397232056, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1971, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 0.781193050662131, |
|
"grad_norm": 1.4955744743347168, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1891, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 0.7818005102660673, |
|
"grad_norm": 1.3934895992279053, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2318, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 0.7824079698700036, |
|
"grad_norm": 1.6154847145080566, |
|
"learning_rate": 0.0002, |
|
"loss": 1.175, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 0.78301542947394, |
|
"grad_norm": 1.4619389772415161, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2368, |
|
"step": 12890 |
|
}, |
|
{ |
|
"epoch": 0.7836228890778764, |
|
"grad_norm": 1.4026260375976562, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1869, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 0.7842303486818126, |
|
"grad_norm": 1.633529782295227, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1961, |
|
"step": 12910 |
|
}, |
|
{ |
|
"epoch": 0.784837808285749, |
|
"grad_norm": 1.270814061164856, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2143, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 0.7854452678896854, |
|
"grad_norm": 1.2957777976989746, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1464, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 0.7860527274936217, |
|
"grad_norm": 1.5368854999542236, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1597, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 0.786660187097558, |
|
"grad_norm": 1.471379280090332, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2498, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 0.7872676467014944, |
|
"grad_norm": 1.4442158937454224, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1931, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 0.7878751063054307, |
|
"grad_norm": 1.6200677156448364, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1395, |
|
"step": 12970 |
|
}, |
|
{ |
|
"epoch": 0.788482565909367, |
|
"grad_norm": 1.4099328517913818, |
|
"learning_rate": 0.0002, |
|
"loss": 1.3028, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 0.7890900255133033, |
|
"grad_norm": 1.5445653200149536, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2275, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 0.7896974851172397, |
|
"grad_norm": 1.5414783954620361, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1612, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.7903049447211761, |
|
"grad_norm": 1.3546172380447388, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1443, |
|
"step": 13010 |
|
}, |
|
{ |
|
"epoch": 0.7909124043251123, |
|
"grad_norm": 1.457933783531189, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1875, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 0.7915198639290487, |
|
"grad_norm": 1.7691285610198975, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2228, |
|
"step": 13030 |
|
}, |
|
{ |
|
"epoch": 0.7921273235329851, |
|
"grad_norm": 1.4855856895446777, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2502, |
|
"step": 13040 |
|
}, |
|
{ |
|
"epoch": 0.7927347831369214, |
|
"grad_norm": 1.5997259616851807, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1122, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 0.7933422427408577, |
|
"grad_norm": 1.385907769203186, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2139, |
|
"step": 13060 |
|
}, |
|
{ |
|
"epoch": 0.7939497023447941, |
|
"grad_norm": 1.3917052745819092, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1403, |
|
"step": 13070 |
|
}, |
|
{ |
|
"epoch": 0.7945571619487304, |
|
"grad_norm": 1.5885778665542603, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1718, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 0.7951646215526668, |
|
"grad_norm": 1.5090978145599365, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1756, |
|
"step": 13090 |
|
}, |
|
{ |
|
"epoch": 0.7957720811566031, |
|
"grad_norm": 1.5283892154693604, |
|
"learning_rate": 0.0002, |
|
"loss": 1.218, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 0.7963795407605394, |
|
"grad_norm": 1.446351170539856, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1796, |
|
"step": 13110 |
|
}, |
|
{ |
|
"epoch": 0.7969870003644758, |
|
"grad_norm": 1.3841274976730347, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2957, |
|
"step": 13120 |
|
}, |
|
{ |
|
"epoch": 0.797594459968412, |
|
"grad_norm": 1.398147463798523, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1361, |
|
"step": 13130 |
|
}, |
|
{ |
|
"epoch": 0.7982019195723484, |
|
"grad_norm": 1.4801783561706543, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1428, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 0.7988093791762848, |
|
"grad_norm": 1.4249933958053589, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1931, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 0.7994168387802211, |
|
"grad_norm": 1.6051521301269531, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1448, |
|
"step": 13160 |
|
}, |
|
{ |
|
"epoch": 0.8000242983841575, |
|
"grad_norm": 1.6075396537780762, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2308, |
|
"step": 13170 |
|
}, |
|
{ |
|
"epoch": 0.8006317579880938, |
|
"grad_norm": 1.581541895866394, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2696, |
|
"step": 13180 |
|
}, |
|
{ |
|
"epoch": 0.8012392175920301, |
|
"grad_norm": 1.4118540287017822, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2502, |
|
"step": 13190 |
|
}, |
|
{ |
|
"epoch": 0.8018466771959665, |
|
"grad_norm": 1.604483962059021, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2867, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 0.8024541367999029, |
|
"grad_norm": 1.623291015625, |
|
"learning_rate": 0.0002, |
|
"loss": 1.221, |
|
"step": 13210 |
|
}, |
|
{ |
|
"epoch": 0.8030615964038391, |
|
"grad_norm": 1.4024327993392944, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2307, |
|
"step": 13220 |
|
}, |
|
{ |
|
"epoch": 0.8036690560077755, |
|
"grad_norm": 1.4459460973739624, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2514, |
|
"step": 13230 |
|
}, |
|
{ |
|
"epoch": 0.8042765156117119, |
|
"grad_norm": 1.6799654960632324, |
|
"learning_rate": 0.0002, |
|
"loss": 1.163, |
|
"step": 13240 |
|
}, |
|
{ |
|
"epoch": 0.8048839752156481, |
|
"grad_norm": 1.5066721439361572, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1511, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 0.8054914348195845, |
|
"grad_norm": 1.372342824935913, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1287, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 0.8060988944235208, |
|
"grad_norm": 1.3962156772613525, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2028, |
|
"step": 13270 |
|
}, |
|
{ |
|
"epoch": 0.8067063540274572, |
|
"grad_norm": 1.9565892219543457, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1776, |
|
"step": 13280 |
|
}, |
|
{ |
|
"epoch": 0.8073138136313935, |
|
"grad_norm": 1.2959867715835571, |
|
"learning_rate": 0.0002, |
|
"loss": 1.251, |
|
"step": 13290 |
|
}, |
|
{ |
|
"epoch": 0.8079212732353298, |
|
"grad_norm": 1.5922397375106812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1784, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 0.8085287328392662, |
|
"grad_norm": 1.3177214860916138, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1632, |
|
"step": 13310 |
|
}, |
|
{ |
|
"epoch": 0.8091361924432026, |
|
"grad_norm": 1.3495534658432007, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2253, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 0.8097436520471388, |
|
"grad_norm": 1.482490062713623, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2171, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 0.8103511116510752, |
|
"grad_norm": 1.4726133346557617, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1493, |
|
"step": 13340 |
|
}, |
|
{ |
|
"epoch": 0.8109585712550116, |
|
"grad_norm": 1.4320366382598877, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2101, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 0.8115660308589479, |
|
"grad_norm": 1.3902326822280884, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2068, |
|
"step": 13360 |
|
}, |
|
{ |
|
"epoch": 0.8121734904628842, |
|
"grad_norm": 1.3399302959442139, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0984, |
|
"step": 13370 |
|
}, |
|
{ |
|
"epoch": 0.8127809500668206, |
|
"grad_norm": 1.4275768995285034, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1792, |
|
"step": 13380 |
|
}, |
|
{ |
|
"epoch": 0.8133884096707569, |
|
"grad_norm": 1.271705985069275, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0776, |
|
"step": 13390 |
|
}, |
|
{ |
|
"epoch": 0.8139958692746933, |
|
"grad_norm": 1.4107437133789062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1963, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 0.8146033288786295, |
|
"grad_norm": 1.2505677938461304, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1869, |
|
"step": 13410 |
|
}, |
|
{ |
|
"epoch": 0.8152107884825659, |
|
"grad_norm": 1.4574933052062988, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1719, |
|
"step": 13420 |
|
}, |
|
{ |
|
"epoch": 0.8158182480865023, |
|
"grad_norm": 1.4642574787139893, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1641, |
|
"step": 13430 |
|
}, |
|
{ |
|
"epoch": 0.8164257076904385, |
|
"grad_norm": 1.4101893901824951, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2693, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 0.8170331672943749, |
|
"grad_norm": 1.8907462358474731, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1996, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 0.8176406268983113, |
|
"grad_norm": 1.5065134763717651, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1823, |
|
"step": 13460 |
|
}, |
|
{ |
|
"epoch": 0.8182480865022476, |
|
"grad_norm": 1.4328252077102661, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1492, |
|
"step": 13470 |
|
}, |
|
{ |
|
"epoch": 0.8188555461061839, |
|
"grad_norm": 1.3925843238830566, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1919, |
|
"step": 13480 |
|
}, |
|
{ |
|
"epoch": 0.8194630057101203, |
|
"grad_norm": 1.5043162107467651, |
|
"learning_rate": 0.0002, |
|
"loss": 1.194, |
|
"step": 13490 |
|
}, |
|
{ |
|
"epoch": 0.8200704653140566, |
|
"grad_norm": 1.2992193698883057, |
|
"learning_rate": 0.0002, |
|
"loss": 1.194, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.820677924917993, |
|
"grad_norm": 1.3942667245864868, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1834, |
|
"step": 13510 |
|
}, |
|
{ |
|
"epoch": 0.8212853845219293, |
|
"grad_norm": 1.2020211219787598, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1504, |
|
"step": 13520 |
|
}, |
|
{ |
|
"epoch": 0.8218928441258656, |
|
"grad_norm": 1.2666652202606201, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1739, |
|
"step": 13530 |
|
}, |
|
{ |
|
"epoch": 0.822500303729802, |
|
"grad_norm": 1.597417950630188, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2378, |
|
"step": 13540 |
|
}, |
|
{ |
|
"epoch": 0.8231077633337383, |
|
"grad_norm": 1.4688104391098022, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1803, |
|
"step": 13550 |
|
}, |
|
{ |
|
"epoch": 0.8237152229376746, |
|
"grad_norm": 1.684959053993225, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1498, |
|
"step": 13560 |
|
}, |
|
{ |
|
"epoch": 0.824322682541611, |
|
"grad_norm": 1.3847455978393555, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1951, |
|
"step": 13570 |
|
}, |
|
{ |
|
"epoch": 0.8249301421455473, |
|
"grad_norm": 1.4115383625030518, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1232, |
|
"step": 13580 |
|
}, |
|
{ |
|
"epoch": 0.8255376017494837, |
|
"grad_norm": 1.601866602897644, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1711, |
|
"step": 13590 |
|
}, |
|
{ |
|
"epoch": 0.82614506135342, |
|
"grad_norm": 1.3733577728271484, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2374, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 0.8267525209573563, |
|
"grad_norm": 1.3552721738815308, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0967, |
|
"step": 13610 |
|
}, |
|
{ |
|
"epoch": 0.8273599805612927, |
|
"grad_norm": 1.4282503128051758, |
|
"learning_rate": 0.0002, |
|
"loss": 1.243, |
|
"step": 13620 |
|
}, |
|
{ |
|
"epoch": 0.827967440165229, |
|
"grad_norm": 1.5548524856567383, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1529, |
|
"step": 13630 |
|
}, |
|
{ |
|
"epoch": 0.8285748997691653, |
|
"grad_norm": 1.4593840837478638, |
|
"learning_rate": 0.0002, |
|
"loss": 1.223, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 0.8291823593731017, |
|
"grad_norm": 1.7803534269332886, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1564, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 0.8297898189770381, |
|
"grad_norm": 1.487880825996399, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2527, |
|
"step": 13660 |
|
}, |
|
{ |
|
"epoch": 0.8303972785809743, |
|
"grad_norm": 1.5033938884735107, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1874, |
|
"step": 13670 |
|
}, |
|
{ |
|
"epoch": 0.8310047381849107, |
|
"grad_norm": 1.6714093685150146, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2239, |
|
"step": 13680 |
|
}, |
|
{ |
|
"epoch": 0.8316121977888471, |
|
"grad_norm": 1.5925745964050293, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1186, |
|
"step": 13690 |
|
}, |
|
{ |
|
"epoch": 0.8322196573927834, |
|
"grad_norm": 1.3945070505142212, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2433, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 0.8328271169967197, |
|
"grad_norm": 1.1743574142456055, |
|
"learning_rate": 0.0002, |
|
"loss": 1.187, |
|
"step": 13710 |
|
}, |
|
{ |
|
"epoch": 0.833434576600656, |
|
"grad_norm": 1.4794617891311646, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2547, |
|
"step": 13720 |
|
}, |
|
{ |
|
"epoch": 0.8340420362045924, |
|
"grad_norm": 1.416395902633667, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2056, |
|
"step": 13730 |
|
}, |
|
{ |
|
"epoch": 0.8346494958085288, |
|
"grad_norm": 1.6419976949691772, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1964, |
|
"step": 13740 |
|
}, |
|
{ |
|
"epoch": 0.835256955412465, |
|
"grad_norm": 1.660413384437561, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2177, |
|
"step": 13750 |
|
}, |
|
{ |
|
"epoch": 0.8358644150164014, |
|
"grad_norm": 1.663995623588562, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2605, |
|
"step": 13760 |
|
}, |
|
{ |
|
"epoch": 0.8364718746203378, |
|
"grad_norm": 1.3747984170913696, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2038, |
|
"step": 13770 |
|
}, |
|
{ |
|
"epoch": 0.837079334224274, |
|
"grad_norm": 1.2505345344543457, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2531, |
|
"step": 13780 |
|
}, |
|
{ |
|
"epoch": 0.8376867938282104, |
|
"grad_norm": 1.347054123878479, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2502, |
|
"step": 13790 |
|
}, |
|
{ |
|
"epoch": 0.8382942534321468, |
|
"grad_norm": 1.6971262693405151, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0737, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 0.8389017130360831, |
|
"grad_norm": 1.3297561407089233, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1958, |
|
"step": 13810 |
|
}, |
|
{ |
|
"epoch": 0.8395091726400195, |
|
"grad_norm": 1.3878462314605713, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1701, |
|
"step": 13820 |
|
}, |
|
{ |
|
"epoch": 0.8401166322439558, |
|
"grad_norm": 1.5290884971618652, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2212, |
|
"step": 13830 |
|
}, |
|
{ |
|
"epoch": 0.8407240918478921, |
|
"grad_norm": 1.4563590288162231, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1743, |
|
"step": 13840 |
|
}, |
|
{ |
|
"epoch": 0.8413315514518285, |
|
"grad_norm": 1.5538057088851929, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1825, |
|
"step": 13850 |
|
}, |
|
{ |
|
"epoch": 0.8419390110557647, |
|
"grad_norm": 1.4252513647079468, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1903, |
|
"step": 13860 |
|
}, |
|
{ |
|
"epoch": 0.8425464706597011, |
|
"grad_norm": 1.990596055984497, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2232, |
|
"step": 13870 |
|
}, |
|
{ |
|
"epoch": 0.8431539302636375, |
|
"grad_norm": 1.522567868232727, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2286, |
|
"step": 13880 |
|
}, |
|
{ |
|
"epoch": 0.8437613898675738, |
|
"grad_norm": 1.4994953870773315, |
|
"learning_rate": 0.0002, |
|
"loss": 1.249, |
|
"step": 13890 |
|
}, |
|
{ |
|
"epoch": 0.8443688494715101, |
|
"grad_norm": 1.4408576488494873, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2164, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 0.8449763090754465, |
|
"grad_norm": 1.6632212400436401, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1432, |
|
"step": 13910 |
|
}, |
|
{ |
|
"epoch": 0.8455837686793828, |
|
"grad_norm": 1.4321329593658447, |
|
"learning_rate": 0.0002, |
|
"loss": 1.222, |
|
"step": 13920 |
|
}, |
|
{ |
|
"epoch": 0.8461912282833192, |
|
"grad_norm": 1.3293004035949707, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1502, |
|
"step": 13930 |
|
}, |
|
{ |
|
"epoch": 0.8467986878872555, |
|
"grad_norm": 1.6593652963638306, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2236, |
|
"step": 13940 |
|
}, |
|
{ |
|
"epoch": 0.8474061474911918, |
|
"grad_norm": 1.6421161890029907, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2136, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 0.8480136070951282, |
|
"grad_norm": 1.9198898077011108, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2587, |
|
"step": 13960 |
|
}, |
|
{ |
|
"epoch": 0.8486210666990646, |
|
"grad_norm": 1.4113205671310425, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2418, |
|
"step": 13970 |
|
}, |
|
{ |
|
"epoch": 0.8492285263030008, |
|
"grad_norm": 1.6185822486877441, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2009, |
|
"step": 13980 |
|
}, |
|
{ |
|
"epoch": 0.8498359859069372, |
|
"grad_norm": 1.3802341222763062, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1549, |
|
"step": 13990 |
|
}, |
|
{ |
|
"epoch": 0.8504434455108735, |
|
"grad_norm": 1.6448559761047363, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1319, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.8510509051148099, |
|
"grad_norm": 1.285009503364563, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1371, |
|
"step": 14010 |
|
}, |
|
{ |
|
"epoch": 0.8516583647187462, |
|
"grad_norm": 1.503759503364563, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2582, |
|
"step": 14020 |
|
}, |
|
{ |
|
"epoch": 0.8522658243226825, |
|
"grad_norm": 1.342805027961731, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1629, |
|
"step": 14030 |
|
}, |
|
{ |
|
"epoch": 0.8528732839266189, |
|
"grad_norm": 1.5184295177459717, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2234, |
|
"step": 14040 |
|
}, |
|
{ |
|
"epoch": 0.8534807435305553, |
|
"grad_norm": 1.5007649660110474, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2142, |
|
"step": 14050 |
|
}, |
|
{ |
|
"epoch": 0.8540882031344915, |
|
"grad_norm": 1.497629165649414, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1582, |
|
"step": 14060 |
|
}, |
|
{ |
|
"epoch": 0.8546956627384279, |
|
"grad_norm": 1.5224816799163818, |
|
"learning_rate": 0.0002, |
|
"loss": 1.211, |
|
"step": 14070 |
|
}, |
|
{ |
|
"epoch": 0.8553031223423643, |
|
"grad_norm": 1.434489369392395, |
|
"learning_rate": 0.0002, |
|
"loss": 1.177, |
|
"step": 14080 |
|
}, |
|
{ |
|
"epoch": 0.8559105819463005, |
|
"grad_norm": 1.7039152383804321, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2798, |
|
"step": 14090 |
|
}, |
|
{ |
|
"epoch": 0.8565180415502369, |
|
"grad_norm": 1.3931708335876465, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2218, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 0.8571255011541733, |
|
"grad_norm": 1.5291060209274292, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0807, |
|
"step": 14110 |
|
}, |
|
{ |
|
"epoch": 0.8577329607581096, |
|
"grad_norm": 1.3249680995941162, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1788, |
|
"step": 14120 |
|
}, |
|
{ |
|
"epoch": 0.8583404203620459, |
|
"grad_norm": 1.4922311305999756, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1544, |
|
"step": 14130 |
|
}, |
|
{ |
|
"epoch": 0.8589478799659822, |
|
"grad_norm": 1.4898104667663574, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2412, |
|
"step": 14140 |
|
}, |
|
{ |
|
"epoch": 0.8595553395699186, |
|
"grad_norm": 1.8011696338653564, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1594, |
|
"step": 14150 |
|
}, |
|
{ |
|
"epoch": 0.860162799173855, |
|
"grad_norm": 1.5143295526504517, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2162, |
|
"step": 14160 |
|
}, |
|
{ |
|
"epoch": 0.8607702587777912, |
|
"grad_norm": 1.3194308280944824, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2198, |
|
"step": 14170 |
|
}, |
|
{ |
|
"epoch": 0.8613777183817276, |
|
"grad_norm": 1.537085771560669, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1495, |
|
"step": 14180 |
|
}, |
|
{ |
|
"epoch": 0.861985177985664, |
|
"grad_norm": 1.345809817314148, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2513, |
|
"step": 14190 |
|
}, |
|
{ |
|
"epoch": 0.8625926375896003, |
|
"grad_norm": 1.3840277194976807, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2186, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 0.8632000971935366, |
|
"grad_norm": 1.876142144203186, |
|
"learning_rate": 0.0002, |
|
"loss": 1.213, |
|
"step": 14210 |
|
}, |
|
{ |
|
"epoch": 0.863807556797473, |
|
"grad_norm": 2.500981330871582, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2617, |
|
"step": 14220 |
|
}, |
|
{ |
|
"epoch": 0.8644150164014093, |
|
"grad_norm": 1.5020242929458618, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2177, |
|
"step": 14230 |
|
}, |
|
{ |
|
"epoch": 0.8650224760053457, |
|
"grad_norm": 1.2716339826583862, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1125, |
|
"step": 14240 |
|
}, |
|
{ |
|
"epoch": 0.865629935609282, |
|
"grad_norm": 1.7657488584518433, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2233, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 0.8662373952132183, |
|
"grad_norm": 1.4088470935821533, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2144, |
|
"step": 14260 |
|
}, |
|
{ |
|
"epoch": 0.8668448548171547, |
|
"grad_norm": 1.5708025693893433, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1763, |
|
"step": 14270 |
|
}, |
|
{ |
|
"epoch": 0.8674523144210909, |
|
"grad_norm": 1.4343926906585693, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2017, |
|
"step": 14280 |
|
}, |
|
{ |
|
"epoch": 0.8680597740250273, |
|
"grad_norm": 1.3481239080429077, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1584, |
|
"step": 14290 |
|
}, |
|
{ |
|
"epoch": 0.8686672336289637, |
|
"grad_norm": 1.3848117589950562, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2323, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 0.8692746932329, |
|
"grad_norm": 1.6503533124923706, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2011, |
|
"step": 14310 |
|
}, |
|
{ |
|
"epoch": 0.8698821528368363, |
|
"grad_norm": 1.3893461227416992, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1885, |
|
"step": 14320 |
|
}, |
|
{ |
|
"epoch": 0.8704896124407727, |
|
"grad_norm": 1.670220136642456, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2753, |
|
"step": 14330 |
|
}, |
|
{ |
|
"epoch": 0.871097072044709, |
|
"grad_norm": 1.4308701753616333, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1954, |
|
"step": 14340 |
|
}, |
|
{ |
|
"epoch": 0.8717045316486454, |
|
"grad_norm": 1.6356762647628784, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1151, |
|
"step": 14350 |
|
}, |
|
{ |
|
"epoch": 0.8723119912525817, |
|
"grad_norm": 1.4090251922607422, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1629, |
|
"step": 14360 |
|
}, |
|
{ |
|
"epoch": 0.872919450856518, |
|
"grad_norm": 1.4654072523117065, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1324, |
|
"step": 14370 |
|
}, |
|
{ |
|
"epoch": 0.8735269104604544, |
|
"grad_norm": 1.4703407287597656, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1466, |
|
"step": 14380 |
|
}, |
|
{ |
|
"epoch": 0.8741343700643908, |
|
"grad_norm": 1.6721851825714111, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1348, |
|
"step": 14390 |
|
}, |
|
{ |
|
"epoch": 0.874741829668327, |
|
"grad_norm": 1.5856989622116089, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2488, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 0.8753492892722634, |
|
"grad_norm": 1.6506541967391968, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2272, |
|
"step": 14410 |
|
}, |
|
{ |
|
"epoch": 0.8759567488761998, |
|
"grad_norm": 1.6315511465072632, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2255, |
|
"step": 14420 |
|
}, |
|
{ |
|
"epoch": 0.876564208480136, |
|
"grad_norm": 1.3775477409362793, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2079, |
|
"step": 14430 |
|
}, |
|
{ |
|
"epoch": 0.8771716680840724, |
|
"grad_norm": 1.3681683540344238, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2056, |
|
"step": 14440 |
|
}, |
|
{ |
|
"epoch": 0.8777791276880087, |
|
"grad_norm": 1.6075377464294434, |
|
"learning_rate": 0.0002, |
|
"loss": 1.105, |
|
"step": 14450 |
|
}, |
|
{ |
|
"epoch": 0.8783865872919451, |
|
"grad_norm": 1.5112967491149902, |
|
"learning_rate": 0.0002, |
|
"loss": 1.211, |
|
"step": 14460 |
|
}, |
|
{ |
|
"epoch": 0.8789940468958815, |
|
"grad_norm": 1.3677444458007812, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1737, |
|
"step": 14470 |
|
}, |
|
{ |
|
"epoch": 0.8796015064998177, |
|
"grad_norm": 1.5045770406723022, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1354, |
|
"step": 14480 |
|
}, |
|
{ |
|
"epoch": 0.8802089661037541, |
|
"grad_norm": 1.5426734685897827, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2089, |
|
"step": 14490 |
|
}, |
|
{ |
|
"epoch": 0.8808164257076905, |
|
"grad_norm": 1.314050316810608, |
|
"learning_rate": 0.0002, |
|
"loss": 1.218, |
|
"step": 14500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 49386, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.029197351833076e+18, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|