|
{ |
|
"best_metric": 0.3638736605644226, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-650", |
|
"epoch": 1.3579461065138978, |
|
"eval_steps": 50, |
|
"global_step": 800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001697432633142372, |
|
"grad_norm": 0.22029195725917816, |
|
"learning_rate": 2.9999999999999997e-05, |
|
"loss": 1.2479, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001697432633142372, |
|
"eval_loss": 0.6322442293167114, |
|
"eval_runtime": 65.6226, |
|
"eval_samples_per_second": 2.941, |
|
"eval_steps_per_second": 2.941, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003394865266284744, |
|
"grad_norm": 0.24590638279914856, |
|
"learning_rate": 5.9999999999999995e-05, |
|
"loss": 1.2438, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005092297899427116, |
|
"grad_norm": 0.23055589199066162, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 1.0916, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006789730532569488, |
|
"grad_norm": 0.21309782564640045, |
|
"learning_rate": 0.00011999999999999999, |
|
"loss": 1.0918, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008487163165711862, |
|
"grad_norm": 0.21661481261253357, |
|
"learning_rate": 0.00015, |
|
"loss": 0.8365, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010184595798854232, |
|
"grad_norm": 0.2395806461572647, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 1.0087, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011882028431996604, |
|
"grad_norm": 0.25481849908828735, |
|
"learning_rate": 0.00020999999999999998, |
|
"loss": 0.9591, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.013579461065138977, |
|
"grad_norm": 0.22890186309814453, |
|
"learning_rate": 0.00023999999999999998, |
|
"loss": 0.6067, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.015276893698281349, |
|
"grad_norm": 0.2807515263557434, |
|
"learning_rate": 0.00027, |
|
"loss": 0.6593, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.016974326331423723, |
|
"grad_norm": 0.3522188067436218, |
|
"learning_rate": 0.0003, |
|
"loss": 0.847, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.018671758964566094, |
|
"grad_norm": 0.31385794281959534, |
|
"learning_rate": 0.0002999992447508326, |
|
"loss": 0.7329, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.020369191597708464, |
|
"grad_norm": 0.5269914269447327, |
|
"learning_rate": 0.0002999969790109359, |
|
"loss": 0.8751, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02206662423085084, |
|
"grad_norm": 0.2571907341480255, |
|
"learning_rate": 0.0002999932028031259, |
|
"loss": 0.6175, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02376405686399321, |
|
"grad_norm": 0.2777341306209564, |
|
"learning_rate": 0.0002999879161654289, |
|
"loss": 0.7442, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.025461489497135583, |
|
"grad_norm": 0.19830763339996338, |
|
"learning_rate": 0.00029998111915108125, |
|
"loss": 0.4802, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.027158922130277954, |
|
"grad_norm": 0.21772292256355286, |
|
"learning_rate": 0.00029997281182852885, |
|
"loss": 0.527, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.028856354763420328, |
|
"grad_norm": 0.18149510025978088, |
|
"learning_rate": 0.00029996299428142637, |
|
"loss": 0.4808, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.030553787396562698, |
|
"grad_norm": 0.22295671701431274, |
|
"learning_rate": 0.00029995166660863636, |
|
"loss": 0.5326, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03225122002970507, |
|
"grad_norm": 0.24240700900554657, |
|
"learning_rate": 0.0002999388289242284, |
|
"loss": 0.5986, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.033948652662847446, |
|
"grad_norm": 0.21365882456302643, |
|
"learning_rate": 0.00029992448135747777, |
|
"loss": 0.4533, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03564608529598982, |
|
"grad_norm": 0.13466337323188782, |
|
"learning_rate": 0.00029990862405286433, |
|
"loss": 0.2177, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03734351792913219, |
|
"grad_norm": 0.2027832567691803, |
|
"learning_rate": 0.00029989125717007104, |
|
"loss": 0.4166, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03904095056227456, |
|
"grad_norm": 0.14215879142284393, |
|
"learning_rate": 0.0002998723808839821, |
|
"loss": 0.2648, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04073838319541693, |
|
"grad_norm": 0.10996250063180923, |
|
"learning_rate": 0.00029985199538468154, |
|
"loss": 0.1554, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.042435815828559306, |
|
"grad_norm": 0.10510318726301193, |
|
"learning_rate": 0.0002998301008774512, |
|
"loss": 0.0231, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04413324846170168, |
|
"grad_norm": 0.10925798863172531, |
|
"learning_rate": 0.00029980669758276836, |
|
"loss": 0.1159, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04583068109484405, |
|
"grad_norm": 0.06849532574415207, |
|
"learning_rate": 0.0002997817857363041, |
|
"loss": 0.039, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04752811372798642, |
|
"grad_norm": 0.0729198008775711, |
|
"learning_rate": 0.0002997553655889203, |
|
"loss": 0.0084, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.049225546361128796, |
|
"grad_norm": 0.10013315826654434, |
|
"learning_rate": 0.00029972743740666765, |
|
"loss": 0.0985, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.050922978994271166, |
|
"grad_norm": 0.047209128737449646, |
|
"learning_rate": 0.00029969800147078263, |
|
"loss": 0.0234, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05262041162741354, |
|
"grad_norm": 0.015719635412096977, |
|
"learning_rate": 0.00029966705807768474, |
|
"loss": 0.0014, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05431784426055591, |
|
"grad_norm": 0.032827939838171005, |
|
"learning_rate": 0.0002996346075389736, |
|
"loss": 0.005, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.056015276893698285, |
|
"grad_norm": 0.021124735474586487, |
|
"learning_rate": 0.0002996006501814258, |
|
"loss": 0.0012, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.057712709526840655, |
|
"grad_norm": 0.014757171273231506, |
|
"learning_rate": 0.0002995651863469916, |
|
"loss": 0.0019, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.059410142159983026, |
|
"grad_norm": 0.013099136762320995, |
|
"learning_rate": 0.00029952821639279135, |
|
"loss": 0.0012, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.061107574793125397, |
|
"grad_norm": 0.02350614406168461, |
|
"learning_rate": 0.0002994897406911121, |
|
"loss": 0.0032, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06280500742626777, |
|
"grad_norm": 0.01101439818739891, |
|
"learning_rate": 0.0002994497596294037, |
|
"loss": 0.0006, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06450244005941014, |
|
"grad_norm": 0.08192350715398788, |
|
"learning_rate": 0.00029940827361027504, |
|
"loss": 0.0046, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06619987269255251, |
|
"grad_norm": 0.03598169609904289, |
|
"learning_rate": 0.0002993652830514899, |
|
"loss": 0.0027, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06789730532569489, |
|
"grad_norm": 0.045441944152116776, |
|
"learning_rate": 0.0002993207883859627, |
|
"loss": 0.0076, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06959473795883726, |
|
"grad_norm": 0.048115745186805725, |
|
"learning_rate": 0.00029927479006175417, |
|
"loss": 0.0028, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07129217059197963, |
|
"grad_norm": 0.027597462758421898, |
|
"learning_rate": 0.00029922728854206703, |
|
"loss": 0.002, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.072989603225122, |
|
"grad_norm": 0.0355045348405838, |
|
"learning_rate": 0.00029917828430524096, |
|
"loss": 0.0013, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07468703585826438, |
|
"grad_norm": 0.0412307046353817, |
|
"learning_rate": 0.00029912777784474814, |
|
"loss": 0.003, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07638446849140675, |
|
"grad_norm": 0.051867593079805374, |
|
"learning_rate": 0.0002990757696691881, |
|
"loss": 0.0039, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07808190112454912, |
|
"grad_norm": 0.06054271385073662, |
|
"learning_rate": 0.00029902226030228247, |
|
"loss": 0.0019, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07977933375769149, |
|
"grad_norm": 0.05313190817832947, |
|
"learning_rate": 0.00029896725028287014, |
|
"loss": 0.0041, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08147676639083386, |
|
"grad_norm": 0.18755203485488892, |
|
"learning_rate": 0.00029891074016490126, |
|
"loss": 0.0036, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08317419902397624, |
|
"grad_norm": 0.05305561423301697, |
|
"learning_rate": 0.00029885273051743214, |
|
"loss": 0.0051, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08487163165711861, |
|
"grad_norm": 0.08674199134111404, |
|
"learning_rate": 0.00029879322192461925, |
|
"loss": 0.012, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08487163165711861, |
|
"eval_loss": 0.4556533694267273, |
|
"eval_runtime": 65.6522, |
|
"eval_samples_per_second": 2.94, |
|
"eval_steps_per_second": 2.94, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08656906429026098, |
|
"grad_norm": 0.5487300753593445, |
|
"learning_rate": 0.00029873221498571354, |
|
"loss": 1.0487, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08826649692340335, |
|
"grad_norm": 0.27697888016700745, |
|
"learning_rate": 0.00029866971031505417, |
|
"loss": 0.7504, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08996392955654572, |
|
"grad_norm": 0.2856276035308838, |
|
"learning_rate": 0.00029860570854206244, |
|
"loss": 1.0179, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0916613621896881, |
|
"grad_norm": 0.43166583776474, |
|
"learning_rate": 0.0002985402103112355, |
|
"loss": 0.7947, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09335879482283047, |
|
"grad_norm": 0.26107504963874817, |
|
"learning_rate": 0.0002984732162821399, |
|
"loss": 0.9617, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09505622745597284, |
|
"grad_norm": 0.20953023433685303, |
|
"learning_rate": 0.00029840472712940465, |
|
"loss": 0.7355, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0967536600891152, |
|
"grad_norm": 0.22740969061851501, |
|
"learning_rate": 0.0002983347435427148, |
|
"loss": 0.839, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09845109272225759, |
|
"grad_norm": 0.24932198226451874, |
|
"learning_rate": 0.00029826326622680433, |
|
"loss": 0.775, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10014852535539996, |
|
"grad_norm": 0.260080486536026, |
|
"learning_rate": 0.000298190295901449, |
|
"loss": 0.9275, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10184595798854233, |
|
"grad_norm": 0.22443322837352753, |
|
"learning_rate": 0.00029811583330145914, |
|
"loss": 0.796, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1035433906216847, |
|
"grad_norm": 0.2286413460969925, |
|
"learning_rate": 0.0002980398791766723, |
|
"loss": 0.6944, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10524082325482707, |
|
"grad_norm": 0.18511152267456055, |
|
"learning_rate": 0.00029796243429194575, |
|
"loss": 0.4977, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10693825588796944, |
|
"grad_norm": 0.745172917842865, |
|
"learning_rate": 0.00029788349942714854, |
|
"loss": 0.4391, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.10863568852111181, |
|
"grad_norm": 0.27929672598838806, |
|
"learning_rate": 0.0002978030753771539, |
|
"loss": 0.8052, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11033312115425419, |
|
"grad_norm": 0.3147849440574646, |
|
"learning_rate": 0.0002977211629518312, |
|
"loss": 1.0817, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11203055378739657, |
|
"grad_norm": 0.22278442978858948, |
|
"learning_rate": 0.00029763776297603756, |
|
"loss": 0.5924, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11372798642053894, |
|
"grad_norm": 0.1911861002445221, |
|
"learning_rate": 0.0002975528762896098, |
|
"loss": 0.3729, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11542541905368131, |
|
"grad_norm": 0.21224386990070343, |
|
"learning_rate": 0.000297466503747356, |
|
"loss": 0.569, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11712285168682368, |
|
"grad_norm": 0.25374075770378113, |
|
"learning_rate": 0.0002973786462190466, |
|
"loss": 0.8077, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11882028431996605, |
|
"grad_norm": 0.16218560934066772, |
|
"learning_rate": 0.00029728930458940595, |
|
"loss": 0.3303, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12051771695310842, |
|
"grad_norm": 0.18142607808113098, |
|
"learning_rate": 0.0002971984797581034, |
|
"loss": 0.4428, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12221514958625079, |
|
"grad_norm": 0.22838933765888214, |
|
"learning_rate": 0.0002971061726397438, |
|
"loss": 0.6141, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.12391258221939316, |
|
"grad_norm": 0.16390271484851837, |
|
"learning_rate": 0.00029701238416385896, |
|
"loss": 0.3882, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12561001485253553, |
|
"grad_norm": 0.07758953422307968, |
|
"learning_rate": 0.00029691711527489776, |
|
"loss": 0.0949, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1273074474856779, |
|
"grad_norm": 0.10925949364900589, |
|
"learning_rate": 0.0002968203669322168, |
|
"loss": 0.1791, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12900488011882028, |
|
"grad_norm": 0.021430950611829758, |
|
"learning_rate": 0.0002967221401100708, |
|
"loss": 0.003, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13070231275196265, |
|
"grad_norm": 0.02433953993022442, |
|
"learning_rate": 0.0002966224357976029, |
|
"loss": 0.0021, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13239974538510502, |
|
"grad_norm": 0.11294607818126678, |
|
"learning_rate": 0.0002965212549988342, |
|
"loss": 0.0765, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1340971780182474, |
|
"grad_norm": 0.026264643296599388, |
|
"learning_rate": 0.0002964185987326545, |
|
"loss": 0.0026, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.13579461065138979, |
|
"grad_norm": 0.0762176588177681, |
|
"learning_rate": 0.00029631446803281107, |
|
"loss": 0.0394, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13749204328453216, |
|
"grad_norm": 0.03476507589221001, |
|
"learning_rate": 0.00029620886394789885, |
|
"loss": 0.005, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.13918947591767453, |
|
"grad_norm": 0.03176519274711609, |
|
"learning_rate": 0.00029610178754135, |
|
"loss": 0.0038, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1408869085508169, |
|
"grad_norm": 0.006855768151581287, |
|
"learning_rate": 0.00029599323989142263, |
|
"loss": 0.0004, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.14258434118395927, |
|
"grad_norm": 0.013548546470701694, |
|
"learning_rate": 0.00029588322209119037, |
|
"loss": 0.0007, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14428177381710164, |
|
"grad_norm": 0.032073307782411575, |
|
"learning_rate": 0.00029577173524853123, |
|
"loss": 0.0023, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.145979206450244, |
|
"grad_norm": 0.06568052619695663, |
|
"learning_rate": 0.0002956587804861165, |
|
"loss": 0.003, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14767663908338638, |
|
"grad_norm": 0.017053432762622833, |
|
"learning_rate": 0.0002955443589413994, |
|
"loss": 0.0011, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.14937407171652875, |
|
"grad_norm": 0.027703823521733284, |
|
"learning_rate": 0.0002954284717666036, |
|
"loss": 0.0011, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.15107150434967112, |
|
"grad_norm": 0.028447195887565613, |
|
"learning_rate": 0.00029531112012871175, |
|
"loss": 0.002, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1527689369828135, |
|
"grad_norm": 0.051351480185985565, |
|
"learning_rate": 0.0002951923052094534, |
|
"loss": 0.0023, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15446636961595586, |
|
"grad_norm": 0.04188869521021843, |
|
"learning_rate": 0.0002950720282052936, |
|
"loss": 0.0022, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15616380224909823, |
|
"grad_norm": 0.03857725113630295, |
|
"learning_rate": 0.00029495029032742024, |
|
"loss": 0.0013, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1578612348822406, |
|
"grad_norm": 0.030291402712464333, |
|
"learning_rate": 0.0002948270928017326, |
|
"loss": 0.0015, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.15955866751538297, |
|
"grad_norm": 0.05515044555068016, |
|
"learning_rate": 0.00029470243686882837, |
|
"loss": 0.0008, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16125610014852534, |
|
"grad_norm": 0.0681704506278038, |
|
"learning_rate": 0.00029457632378399127, |
|
"loss": 0.0016, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16295353278166771, |
|
"grad_norm": 0.17659763991832733, |
|
"learning_rate": 0.0002944487548171788, |
|
"loss": 0.0015, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1646509654148101, |
|
"grad_norm": 0.01778286136686802, |
|
"learning_rate": 0.00029431973125300907, |
|
"loss": 0.0003, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16634839804795248, |
|
"grad_norm": 0.052063290029764175, |
|
"learning_rate": 0.0002941892543907478, |
|
"loss": 0.0042, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.16804583068109485, |
|
"grad_norm": 0.08560975641012192, |
|
"learning_rate": 0.00029405732554429564, |
|
"loss": 0.003, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16974326331423722, |
|
"grad_norm": 0.07182486355304718, |
|
"learning_rate": 0.0002939239460421746, |
|
"loss": 0.0091, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16974326331423722, |
|
"eval_loss": 0.4275517165660858, |
|
"eval_runtime": 65.6527, |
|
"eval_samples_per_second": 2.94, |
|
"eval_steps_per_second": 2.94, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1714406959473796, |
|
"grad_norm": 0.33816632628440857, |
|
"learning_rate": 0.0002937891172275147, |
|
"loss": 0.6861, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17313812858052197, |
|
"grad_norm": 0.24974839389324188, |
|
"learning_rate": 0.00029365284045804077, |
|
"loss": 0.7581, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.17483556121366434, |
|
"grad_norm": 0.389892041683197, |
|
"learning_rate": 0.00029351511710605825, |
|
"loss": 0.8517, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1765329938468067, |
|
"grad_norm": 0.21865390241146088, |
|
"learning_rate": 0.0002933759485584397, |
|
"loss": 0.7927, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.17823042647994908, |
|
"grad_norm": 0.2867435812950134, |
|
"learning_rate": 0.00029323533621661106, |
|
"loss": 0.8931, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.17992785911309145, |
|
"grad_norm": 0.19504129886627197, |
|
"learning_rate": 0.0002930932814965369, |
|
"loss": 0.5056, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18162529174623382, |
|
"grad_norm": 0.2704707980155945, |
|
"learning_rate": 0.00029294978582870666, |
|
"loss": 0.7927, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1833227243793762, |
|
"grad_norm": 0.20052890479564667, |
|
"learning_rate": 0.0002928048506581202, |
|
"loss": 0.6507, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18502015701251856, |
|
"grad_norm": 0.20652854442596436, |
|
"learning_rate": 0.00029265847744427303, |
|
"loss": 0.6928, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18671758964566093, |
|
"grad_norm": 0.2864634096622467, |
|
"learning_rate": 0.00029251066766114176, |
|
"loss": 1.0405, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1884150222788033, |
|
"grad_norm": 0.2000068873167038, |
|
"learning_rate": 0.0002923614227971694, |
|
"loss": 0.5787, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.19011245491194567, |
|
"grad_norm": 0.16141685843467712, |
|
"learning_rate": 0.0002922107443552499, |
|
"loss": 0.4154, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.19180988754508804, |
|
"grad_norm": 0.2669724225997925, |
|
"learning_rate": 0.00029205863385271363, |
|
"loss": 0.9767, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1935073201782304, |
|
"grad_norm": 0.7105275988578796, |
|
"learning_rate": 0.00029190509282131153, |
|
"loss": 0.6763, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1952047528113728, |
|
"grad_norm": 0.23595799505710602, |
|
"learning_rate": 0.00029175012280720024, |
|
"loss": 0.6856, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19690218544451518, |
|
"grad_norm": 0.23921440541744232, |
|
"learning_rate": 0.00029159372537092596, |
|
"loss": 0.7075, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.19859961807765755, |
|
"grad_norm": 0.21551960706710815, |
|
"learning_rate": 0.0002914359020874092, |
|
"loss": 0.6004, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.20029705071079992, |
|
"grad_norm": 0.20390333235263824, |
|
"learning_rate": 0.0002912766545459287, |
|
"loss": 0.57, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2019944833439423, |
|
"grad_norm": 0.15612931549549103, |
|
"learning_rate": 0.0002911159843501053, |
|
"loss": 0.2055, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.20369191597708466, |
|
"grad_norm": 0.17460142076015472, |
|
"learning_rate": 0.0002909538931178862, |
|
"loss": 0.371, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.20538934861022703, |
|
"grad_norm": 0.18853937089443207, |
|
"learning_rate": 0.00029079038248152835, |
|
"loss": 0.4252, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2070867812433694, |
|
"grad_norm": 0.14426594972610474, |
|
"learning_rate": 0.0002906254540875819, |
|
"loss": 0.2658, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.20878421387651178, |
|
"grad_norm": 0.16134832799434662, |
|
"learning_rate": 0.0002904591095968741, |
|
"loss": 0.297, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.21048164650965415, |
|
"grad_norm": 0.3797933757305145, |
|
"learning_rate": 0.00029029135068449195, |
|
"loss": 0.2837, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21217907914279652, |
|
"grad_norm": 0.038109440356492996, |
|
"learning_rate": 0.000290122179039766, |
|
"loss": 0.0149, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2138765117759389, |
|
"grad_norm": 0.11762002855539322, |
|
"learning_rate": 0.00028995159636625276, |
|
"loss": 0.0684, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21557394440908126, |
|
"grad_norm": 0.02665620669722557, |
|
"learning_rate": 0.00028977960438171784, |
|
"loss": 0.0044, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.21727137704222363, |
|
"grad_norm": 0.03071051463484764, |
|
"learning_rate": 0.0002896062048181186, |
|
"loss": 0.0018, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.218968809675366, |
|
"grad_norm": 0.009578248485922813, |
|
"learning_rate": 0.00028943139942158683, |
|
"loss": 0.0004, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.22066624230850837, |
|
"grad_norm": 0.0018459237180650234, |
|
"learning_rate": 0.0002892551899524109, |
|
"loss": 0.0001, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22236367494165074, |
|
"grad_norm": 0.09641125798225403, |
|
"learning_rate": 0.0002890775781850181, |
|
"loss": 0.0588, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22406110757479314, |
|
"grad_norm": 0.008955306373536587, |
|
"learning_rate": 0.000288898565907957, |
|
"loss": 0.0002, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2257585402079355, |
|
"grad_norm": 0.4112979471683502, |
|
"learning_rate": 0.0002887181549238793, |
|
"loss": 0.0037, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.22745597284107788, |
|
"grad_norm": 0.0008860170491971076, |
|
"learning_rate": 0.00028853634704952165, |
|
"loss": 0.0001, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.22915340547422025, |
|
"grad_norm": 0.03983623534440994, |
|
"learning_rate": 0.0002883531441156872, |
|
"loss": 0.0014, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23085083810736262, |
|
"grad_norm": 0.13147315382957458, |
|
"learning_rate": 0.00028816854796722747, |
|
"loss": 0.0039, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.232548270740505, |
|
"grad_norm": 0.027041589841246605, |
|
"learning_rate": 0.00028798256046302375, |
|
"loss": 0.003, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23424570337364736, |
|
"grad_norm": 0.012492002919316292, |
|
"learning_rate": 0.000287795183475968, |
|
"loss": 0.0006, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23594313600678973, |
|
"grad_norm": 0.004202733281999826, |
|
"learning_rate": 0.00028760641889294446, |
|
"loss": 0.0002, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2376405686399321, |
|
"grad_norm": 0.004685190040618181, |
|
"learning_rate": 0.0002874162686148104, |
|
"loss": 0.0003, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23933800127307447, |
|
"grad_norm": 0.009362437762320042, |
|
"learning_rate": 0.000287224734556377, |
|
"loss": 0.0007, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.24103543390621684, |
|
"grad_norm": 0.0981183648109436, |
|
"learning_rate": 0.0002870318186463901, |
|
"loss": 0.0042, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24273286653935922, |
|
"grad_norm": 0.015274147503077984, |
|
"learning_rate": 0.00028683752282751074, |
|
"loss": 0.0007, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24443029917250159, |
|
"grad_norm": 0.00900458823889494, |
|
"learning_rate": 0.0002866418490562957, |
|
"loss": 0.0007, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24612773180564396, |
|
"grad_norm": 0.01067203190177679, |
|
"learning_rate": 0.00028644479930317775, |
|
"loss": 0.0006, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.24782516443878633, |
|
"grad_norm": 0.022083261981606483, |
|
"learning_rate": 0.0002862463755524455, |
|
"loss": 0.0006, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2495225970719287, |
|
"grad_norm": 0.003914229571819305, |
|
"learning_rate": 0.00028604657980222417, |
|
"loss": 0.0003, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.25122002970507107, |
|
"grad_norm": 0.005544353276491165, |
|
"learning_rate": 0.0002858454140644546, |
|
"loss": 0.0004, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25291746233821344, |
|
"grad_norm": 0.006152280140668154, |
|
"learning_rate": 0.00028564288036487357, |
|
"loss": 0.0005, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2546148949713558, |
|
"grad_norm": 0.04355669021606445, |
|
"learning_rate": 0.00028543898074299317, |
|
"loss": 0.0053, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2546148949713558, |
|
"eval_loss": 0.42699772119522095, |
|
"eval_runtime": 65.6214, |
|
"eval_samples_per_second": 2.941, |
|
"eval_steps_per_second": 2.941, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2563123276044982, |
|
"grad_norm": 0.33021700382232666, |
|
"learning_rate": 0.00028523371725208035, |
|
"loss": 0.7046, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.25800976023764055, |
|
"grad_norm": 0.29715627431869507, |
|
"learning_rate": 0.00028502709195913614, |
|
"loss": 0.8173, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2597071928707829, |
|
"grad_norm": 0.23798537254333496, |
|
"learning_rate": 0.000284819106944875, |
|
"loss": 0.8376, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2614046255039253, |
|
"grad_norm": 0.24740736186504364, |
|
"learning_rate": 0.0002846097643037037, |
|
"loss": 0.8829, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.26310205813706766, |
|
"grad_norm": 0.8396446704864502, |
|
"learning_rate": 0.00028439906614370034, |
|
"loss": 0.8286, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26479949077021003, |
|
"grad_norm": 0.271604061126709, |
|
"learning_rate": 0.00028418701458659304, |
|
"loss": 0.8996, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2664969234033524, |
|
"grad_norm": 0.2267700582742691, |
|
"learning_rate": 0.00028397361176773855, |
|
"loss": 0.6639, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2681943560364948, |
|
"grad_norm": 0.19427096843719482, |
|
"learning_rate": 0.000283758859836101, |
|
"loss": 0.4771, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2698917886696372, |
|
"grad_norm": 0.1682572066783905, |
|
"learning_rate": 0.0002835427609542298, |
|
"loss": 0.407, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.27158922130277957, |
|
"grad_norm": 0.22453951835632324, |
|
"learning_rate": 0.0002833253172982385, |
|
"loss": 0.6087, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27328665393592194, |
|
"grad_norm": 0.2826938331127167, |
|
"learning_rate": 0.00028310653105778215, |
|
"loss": 0.8161, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2749840865690643, |
|
"grad_norm": 0.26969850063323975, |
|
"learning_rate": 0.0002828864044360358, |
|
"loss": 0.9043, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2766815192022067, |
|
"grad_norm": 0.2145734280347824, |
|
"learning_rate": 0.0002826649396496721, |
|
"loss": 0.5051, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.27837895183534905, |
|
"grad_norm": 0.22222040593624115, |
|
"learning_rate": 0.000282442138928839, |
|
"loss": 0.6356, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2800763844684914, |
|
"grad_norm": 0.1878889799118042, |
|
"learning_rate": 0.0002822180045171373, |
|
"loss": 0.4778, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2817738171016338, |
|
"grad_norm": 0.1969398856163025, |
|
"learning_rate": 0.00028199253867159795, |
|
"loss": 0.4855, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.28347124973477617, |
|
"grad_norm": 0.11540260910987854, |
|
"learning_rate": 0.0002817657436626596, |
|
"loss": 0.1777, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.28516868236791854, |
|
"grad_norm": 0.14375105500221252, |
|
"learning_rate": 0.0002815376217741454, |
|
"loss": 0.3379, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2868661150010609, |
|
"grad_norm": 0.4472449719905853, |
|
"learning_rate": 0.0002813081753032403, |
|
"loss": 0.5727, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2885635476342033, |
|
"grad_norm": 0.24242308735847473, |
|
"learning_rate": 0.0002810774065604677, |
|
"loss": 0.3351, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.29026098026734565, |
|
"grad_norm": 0.13647812604904175, |
|
"learning_rate": 0.0002808453178696663, |
|
"loss": 0.2388, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.291958412900488, |
|
"grad_norm": 0.13304531574249268, |
|
"learning_rate": 0.00028061191156796656, |
|
"loss": 0.2208, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2936558455336304, |
|
"grad_norm": 0.13337798416614532, |
|
"learning_rate": 0.0002803771900057674, |
|
"loss": 0.1881, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.29535327816677276, |
|
"grad_norm": 0.12482510507106781, |
|
"learning_rate": 0.00028014115554671236, |
|
"loss": 0.1659, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.29705071079991513, |
|
"grad_norm": 0.05092178285121918, |
|
"learning_rate": 0.0002799038105676658, |
|
"loss": 0.0022, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2987481434330575, |
|
"grad_norm": 0.03115926869213581, |
|
"learning_rate": 0.000279665157458689, |
|
"loss": 0.007, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.30044557606619987, |
|
"grad_norm": 0.048752959817647934, |
|
"learning_rate": 0.0002794251986230161, |
|
"loss": 0.0231, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.30214300869934224, |
|
"grad_norm": 0.049951329827308655, |
|
"learning_rate": 0.0002791839364770301, |
|
"loss": 0.024, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3038404413324846, |
|
"grad_norm": 0.019635546952486038, |
|
"learning_rate": 0.00027894137345023785, |
|
"loss": 0.0009, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.305537873965627, |
|
"grad_norm": 0.008390502072870731, |
|
"learning_rate": 0.0002786975119852465, |
|
"loss": 0.0002, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.30723530659876935, |
|
"grad_norm": 0.030752401798963547, |
|
"learning_rate": 0.00027845235453773836, |
|
"loss": 0.0012, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3089327392319117, |
|
"grad_norm": 0.004207144025713205, |
|
"learning_rate": 0.000278205903576446, |
|
"loss": 0.0001, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3106301718650541, |
|
"grad_norm": 0.012170841917395592, |
|
"learning_rate": 0.00027795816158312803, |
|
"loss": 0.0004, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.31232760449819646, |
|
"grad_norm": 0.004860470537096262, |
|
"learning_rate": 0.0002777091310525435, |
|
"loss": 0.0002, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.31402503713133884, |
|
"grad_norm": 0.004869968164712191, |
|
"learning_rate": 0.00027745881449242713, |
|
"loss": 0.0002, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3157224697644812, |
|
"grad_norm": 0.011595932766795158, |
|
"learning_rate": 0.00027720721442346387, |
|
"loss": 0.0003, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3174199023976236, |
|
"grad_norm": 0.02118592895567417, |
|
"learning_rate": 0.0002769543333792636, |
|
"loss": 0.0012, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.31911733503076595, |
|
"grad_norm": 0.03935430571436882, |
|
"learning_rate": 0.00027670017390633573, |
|
"loss": 0.0003, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3208147676639083, |
|
"grad_norm": 0.0356944277882576, |
|
"learning_rate": 0.0002764447385640632, |
|
"loss": 0.0008, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3225122002970507, |
|
"grad_norm": 0.05277875065803528, |
|
"learning_rate": 0.0002761880299246772, |
|
"loss": 0.0007, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32420963293019306, |
|
"grad_norm": 0.0025087774265557528, |
|
"learning_rate": 0.0002759300505732307, |
|
"loss": 0.0001, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.32590706556333543, |
|
"grad_norm": 0.002132557798177004, |
|
"learning_rate": 0.00027567080310757306, |
|
"loss": 0.0001, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3276044981964778, |
|
"grad_norm": 0.0010882457718253136, |
|
"learning_rate": 0.0002754102901383233, |
|
"loss": 0.0001, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3293019308296202, |
|
"grad_norm": 0.02085525542497635, |
|
"learning_rate": 0.0002751485142888443, |
|
"loss": 0.0004, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3309993634627626, |
|
"grad_norm": 0.003921423573046923, |
|
"learning_rate": 0.0002748854781952157, |
|
"loss": 0.0001, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.33269679609590497, |
|
"grad_norm": 0.05576641112565994, |
|
"learning_rate": 0.0002746211845062082, |
|
"loss": 0.0027, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.33439422872904734, |
|
"grad_norm": 0.025059282779693604, |
|
"learning_rate": 0.0002743556358832562, |
|
"loss": 0.0009, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3360916613621897, |
|
"grad_norm": 0.14736546576023102, |
|
"learning_rate": 0.00027408883500043156, |
|
"loss": 0.0117, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3377890939953321, |
|
"grad_norm": 0.022192303091287613, |
|
"learning_rate": 0.00027382078454441606, |
|
"loss": 0.0005, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.33948652662847445, |
|
"grad_norm": 0.005171961151063442, |
|
"learning_rate": 0.0002735514872144749, |
|
"loss": 0.0002, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.33948652662847445, |
|
"eval_loss": 0.42559075355529785, |
|
"eval_runtime": 65.6151, |
|
"eval_samples_per_second": 2.941, |
|
"eval_steps_per_second": 2.941, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3411839592616168, |
|
"grad_norm": 0.5887985825538635, |
|
"learning_rate": 0.0002732809457224292, |
|
"loss": 0.9434, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3428813918947592, |
|
"grad_norm": 0.38302966952323914, |
|
"learning_rate": 0.00027300916279262866, |
|
"loss": 1.0988, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.34457882452790156, |
|
"grad_norm": 0.2614414095878601, |
|
"learning_rate": 0.0002727361411619245, |
|
"loss": 0.6772, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.34627625716104393, |
|
"grad_norm": 0.3063081204891205, |
|
"learning_rate": 0.0002724618835796414, |
|
"loss": 0.7314, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3479736897941863, |
|
"grad_norm": 0.18658895790576935, |
|
"learning_rate": 0.0002721863928075503, |
|
"loss": 0.514, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3496711224273287, |
|
"grad_norm": 0.282010018825531, |
|
"learning_rate": 0.0002719096716198402, |
|
"loss": 0.892, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.35136855506047104, |
|
"grad_norm": 0.17541489005088806, |
|
"learning_rate": 0.00027163172280309026, |
|
"loss": 0.5047, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3530659876936134, |
|
"grad_norm": 0.21916832029819489, |
|
"learning_rate": 0.0002713525491562421, |
|
"loss": 0.7146, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3547634203267558, |
|
"grad_norm": 0.20561501383781433, |
|
"learning_rate": 0.0002710721534905712, |
|
"loss": 0.6104, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.35646085295989816, |
|
"grad_norm": 0.1947142332792282, |
|
"learning_rate": 0.00027079053862965875, |
|
"loss": 0.4924, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3581582855930405, |
|
"grad_norm": 0.23798146843910217, |
|
"learning_rate": 0.00027050770740936336, |
|
"loss": 0.6153, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3598557182261829, |
|
"grad_norm": 0.21775560081005096, |
|
"learning_rate": 0.00027022366267779224, |
|
"loss": 0.4658, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.36155315085932527, |
|
"grad_norm": 0.24994409084320068, |
|
"learning_rate": 0.0002699384072952727, |
|
"loss": 0.5979, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.36325058349246764, |
|
"grad_norm": 0.28469640016555786, |
|
"learning_rate": 0.0002696519441343233, |
|
"loss": 0.8796, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.36494801612561, |
|
"grad_norm": 0.2747570276260376, |
|
"learning_rate": 0.0002693642760796248, |
|
"loss": 0.8625, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3666454487587524, |
|
"grad_norm": 0.2469591647386551, |
|
"learning_rate": 0.00026907540602799136, |
|
"loss": 0.6991, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.36834288139189475, |
|
"grad_norm": 0.20425763726234436, |
|
"learning_rate": 0.00026878533688834123, |
|
"loss": 0.5774, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3700403140250371, |
|
"grad_norm": 0.2737872898578644, |
|
"learning_rate": 0.0002684940715816674, |
|
"loss": 0.9062, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3717377466581795, |
|
"grad_norm": 0.2064121514558792, |
|
"learning_rate": 0.00026820161304100823, |
|
"loss": 0.5054, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.37343517929132186, |
|
"grad_norm": 0.14863868057727814, |
|
"learning_rate": 0.00026790796421141813, |
|
"loss": 0.285, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.37513261192446423, |
|
"grad_norm": 0.12498918920755386, |
|
"learning_rate": 0.00026761312804993734, |
|
"loss": 0.1999, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3768300445576066, |
|
"grad_norm": 0.1726280152797699, |
|
"learning_rate": 0.0002673171075255629, |
|
"loss": 0.2852, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.378527477190749, |
|
"grad_norm": 0.1533537358045578, |
|
"learning_rate": 0.0002670199056192181, |
|
"loss": 0.3106, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.38022490982389134, |
|
"grad_norm": 0.125217467546463, |
|
"learning_rate": 0.00026672152532372287, |
|
"loss": 0.1804, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.3819223424570337, |
|
"grad_norm": 0.05522383376955986, |
|
"learning_rate": 0.0002664219696437635, |
|
"loss": 0.0442, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.3836197750901761, |
|
"grad_norm": 0.04138198867440224, |
|
"learning_rate": 0.00026612124159586237, |
|
"loss": 0.023, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.38531720772331846, |
|
"grad_norm": 0.05575822666287422, |
|
"learning_rate": 0.0002658193442083475, |
|
"loss": 0.0024, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.3870146403564608, |
|
"grad_norm": 0.12629126012325287, |
|
"learning_rate": 0.0002655162805213223, |
|
"loss": 0.1524, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.38871207298960325, |
|
"grad_norm": 0.02942221239209175, |
|
"learning_rate": 0.00026521205358663477, |
|
"loss": 0.0096, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.3904095056227456, |
|
"grad_norm": 0.0953650251030922, |
|
"learning_rate": 0.00026490666646784665, |
|
"loss": 0.0043, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.392106938255888, |
|
"grad_norm": 0.005734459031373262, |
|
"learning_rate": 0.00026460012224020297, |
|
"loss": 0.0003, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.39380437088903036, |
|
"grad_norm": 0.010758363641798496, |
|
"learning_rate": 0.0002642924239906006, |
|
"loss": 0.0003, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.39550180352217273, |
|
"grad_norm": 0.01772010512650013, |
|
"learning_rate": 0.0002639835748175575, |
|
"loss": 0.0007, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.3971992361553151, |
|
"grad_norm": 0.005056055262684822, |
|
"learning_rate": 0.0002636735778311815, |
|
"loss": 0.0002, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3988966687884575, |
|
"grad_norm": 0.24263891577720642, |
|
"learning_rate": 0.00026336243615313873, |
|
"loss": 0.0008, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.40059410142159985, |
|
"grad_norm": 0.0014849180588498712, |
|
"learning_rate": 0.0002630501529166224, |
|
"loss": 0.0001, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4022915340547422, |
|
"grad_norm": 0.0037826071493327618, |
|
"learning_rate": 0.00026273673126632133, |
|
"loss": 0.0002, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4039889666878846, |
|
"grad_norm": 0.08331254124641418, |
|
"learning_rate": 0.0002624221743583881, |
|
"loss": 0.0016, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.40568639932102696, |
|
"grad_norm": 0.002364553976804018, |
|
"learning_rate": 0.0002621064853604071, |
|
"loss": 0.0001, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.40738383195416933, |
|
"grad_norm": 0.014542756602168083, |
|
"learning_rate": 0.0002617896674513632, |
|
"loss": 0.0002, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4090812645873117, |
|
"grad_norm": 0.0031418628059327602, |
|
"learning_rate": 0.00026147172382160914, |
|
"loss": 0.0001, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.41077869722045407, |
|
"grad_norm": 0.11094752699136734, |
|
"learning_rate": 0.00026115265767283374, |
|
"loss": 0.0031, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.41247612985359644, |
|
"grad_norm": 0.012769564054906368, |
|
"learning_rate": 0.0002608324722180296, |
|
"loss": 0.0005, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4141735624867388, |
|
"grad_norm": 0.055052801966667175, |
|
"learning_rate": 0.0002605111706814607, |
|
"loss": 0.0023, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.4158709951198812, |
|
"grad_norm": 0.003668338293209672, |
|
"learning_rate": 0.00026018875629862996, |
|
"loss": 0.0002, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.41756842775302355, |
|
"grad_norm": 0.009973675012588501, |
|
"learning_rate": 0.0002598652323162466, |
|
"loss": 0.0003, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4192658603861659, |
|
"grad_norm": 0.02005830593407154, |
|
"learning_rate": 0.0002595406019921936, |
|
"loss": 0.0008, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4209632930193083, |
|
"grad_norm": 0.02860446274280548, |
|
"learning_rate": 0.0002592148685954946, |
|
"loss": 0.0024, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.42266072565245066, |
|
"grad_norm": 0.03582284599542618, |
|
"learning_rate": 0.0002588880354062814, |
|
"loss": 0.0014, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.42435815828559303, |
|
"grad_norm": 0.03657930716872215, |
|
"learning_rate": 0.0002585601057157605, |
|
"loss": 0.0023, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.42435815828559303, |
|
"eval_loss": 0.39342138171195984, |
|
"eval_runtime": 65.6277, |
|
"eval_samples_per_second": 2.941, |
|
"eval_steps_per_second": 2.941, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4260555909187354, |
|
"grad_norm": 0.3402771055698395, |
|
"learning_rate": 0.0002582310828261803, |
|
"loss": 0.955, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4277530235518778, |
|
"grad_norm": 0.2694092392921448, |
|
"learning_rate": 0.00025790097005079764, |
|
"loss": 0.7843, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.42945045618502015, |
|
"grad_norm": 0.22484031319618225, |
|
"learning_rate": 0.00025756977071384455, |
|
"loss": 0.6626, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4311478888181625, |
|
"grad_norm": 0.25034627318382263, |
|
"learning_rate": 0.0002572374881504945, |
|
"loss": 0.8865, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.4328453214513049, |
|
"grad_norm": 0.25369909405708313, |
|
"learning_rate": 0.00025690412570682946, |
|
"loss": 0.7099, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.43454275408444726, |
|
"grad_norm": 0.22795934975147247, |
|
"learning_rate": 0.0002565696867398053, |
|
"loss": 0.7818, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.43624018671758963, |
|
"grad_norm": 0.2158069759607315, |
|
"learning_rate": 0.00025623417461721884, |
|
"loss": 0.6434, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.437937619350732, |
|
"grad_norm": 0.2332068681716919, |
|
"learning_rate": 0.00025589759271767344, |
|
"loss": 0.8126, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.43963505198387437, |
|
"grad_norm": 0.21993213891983032, |
|
"learning_rate": 0.00025555994443054504, |
|
"loss": 0.6689, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.44133248461701674, |
|
"grad_norm": 0.26037323474884033, |
|
"learning_rate": 0.0002552212331559482, |
|
"loss": 0.992, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4430299172501591, |
|
"grad_norm": 0.2357717603445053, |
|
"learning_rate": 0.00025488146230470156, |
|
"loss": 0.7212, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.4447273498833015, |
|
"grad_norm": 0.22752051055431366, |
|
"learning_rate": 0.00025454063529829405, |
|
"loss": 0.7759, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.44642478251644385, |
|
"grad_norm": 0.20641978085041046, |
|
"learning_rate": 0.0002541987555688496, |
|
"loss": 0.6029, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4481222151495863, |
|
"grad_norm": 1.728589415550232, |
|
"learning_rate": 0.0002538558265590934, |
|
"loss": 0.8527, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.44981964778272865, |
|
"grad_norm": 0.3176920711994171, |
|
"learning_rate": 0.0002535118517223168, |
|
"loss": 1.0045, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.451517080415871, |
|
"grad_norm": 0.22813205420970917, |
|
"learning_rate": 0.00025316683452234254, |
|
"loss": 0.5755, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4532145130490134, |
|
"grad_norm": 0.27417638897895813, |
|
"learning_rate": 0.00025282077843349, |
|
"loss": 0.6442, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.45491194568215576, |
|
"grad_norm": 0.23180553317070007, |
|
"learning_rate": 0.00025247368694054017, |
|
"loss": 0.3961, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.45660937831529813, |
|
"grad_norm": 0.21716707944869995, |
|
"learning_rate": 0.0002521255635387005, |
|
"loss": 0.5498, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4583068109484405, |
|
"grad_norm": 0.1608789563179016, |
|
"learning_rate": 0.0002517764117335698, |
|
"loss": 0.3229, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4600042435815829, |
|
"grad_norm": 0.06968680769205093, |
|
"learning_rate": 0.00025142623504110286, |
|
"loss": 0.0545, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.46170167621472524, |
|
"grad_norm": 0.17753787338733673, |
|
"learning_rate": 0.0002510750369875752, |
|
"loss": 0.3944, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.4633991088478676, |
|
"grad_norm": 0.1846492886543274, |
|
"learning_rate": 0.0002507228211095471, |
|
"loss": 0.4219, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.46509654148101, |
|
"grad_norm": 0.12200163304805756, |
|
"learning_rate": 0.0002503695909538287, |
|
"loss": 0.1832, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.46679397411415235, |
|
"grad_norm": 0.08617426455020905, |
|
"learning_rate": 0.00025001535007744373, |
|
"loss": 0.0833, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4684914067472947, |
|
"grad_norm": 0.10826346278190613, |
|
"learning_rate": 0.0002496601020475938, |
|
"loss": 0.1379, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4701888393804371, |
|
"grad_norm": 0.09130895137786865, |
|
"learning_rate": 0.00024930385044162276, |
|
"loss": 0.0909, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.47188627201357947, |
|
"grad_norm": 0.01284122746437788, |
|
"learning_rate": 0.0002489465988469802, |
|
"loss": 0.0011, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.47358370464672184, |
|
"grad_norm": 0.03364328294992447, |
|
"learning_rate": 0.0002485883508611858, |
|
"loss": 0.0151, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.4752811372798642, |
|
"grad_norm": 0.005700011737644672, |
|
"learning_rate": 0.00024822911009179276, |
|
"loss": 0.0004, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4769785699130066, |
|
"grad_norm": 0.026785628870129585, |
|
"learning_rate": 0.0002478688801563516, |
|
"loss": 0.0022, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.47867600254614895, |
|
"grad_norm": 0.014533424749970436, |
|
"learning_rate": 0.00024750766468237387, |
|
"loss": 0.0009, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.4803734351792913, |
|
"grad_norm": 0.02565724588930607, |
|
"learning_rate": 0.0002471454673072953, |
|
"loss": 0.0015, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.4820708678124337, |
|
"grad_norm": 0.01476586889475584, |
|
"learning_rate": 0.0002467822916784394, |
|
"loss": 0.0007, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.48376830044557606, |
|
"grad_norm": 0.0030139784794300795, |
|
"learning_rate": 0.0002464181414529809, |
|
"loss": 0.0001, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.48546573307871843, |
|
"grad_norm": 0.013711950741708279, |
|
"learning_rate": 0.00024605302029790836, |
|
"loss": 0.0002, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.4871631657118608, |
|
"grad_norm": 0.0045742918737232685, |
|
"learning_rate": 0.00024568693188998776, |
|
"loss": 0.0002, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.48886059834500317, |
|
"grad_norm": 0.01617550477385521, |
|
"learning_rate": 0.00024531987991572543, |
|
"loss": 0.0008, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.49055803097814554, |
|
"grad_norm": 0.0015017741825431585, |
|
"learning_rate": 0.00024495186807133056, |
|
"loss": 0.0001, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.4922554636112879, |
|
"grad_norm": 0.04141293093562126, |
|
"learning_rate": 0.00024458290006267833, |
|
"loss": 0.0021, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4939528962444303, |
|
"grad_norm": 0.03005625680088997, |
|
"learning_rate": 0.0002442129796052726, |
|
"loss": 0.0015, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.49565032887757265, |
|
"grad_norm": 0.0004941977094858885, |
|
"learning_rate": 0.00024384211042420822, |
|
"loss": 0.0, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.497347761510715, |
|
"grad_norm": 0.0018400073749944568, |
|
"learning_rate": 0.00024347029625413364, |
|
"loss": 0.0001, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.4990451941438574, |
|
"grad_norm": 0.023964567109942436, |
|
"learning_rate": 0.00024309754083921354, |
|
"loss": 0.0008, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5007426267769998, |
|
"grad_norm": 0.00808011181652546, |
|
"learning_rate": 0.00024272384793309077, |
|
"loss": 0.0003, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5024400594101421, |
|
"grad_norm": 0.026506319642066956, |
|
"learning_rate": 0.0002423492212988487, |
|
"loss": 0.0011, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5041374920432845, |
|
"grad_norm": 0.0033183887135237455, |
|
"learning_rate": 0.0002419736647089735, |
|
"loss": 0.0001, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5058349246764269, |
|
"grad_norm": 0.0012288711732253432, |
|
"learning_rate": 0.00024159718194531572, |
|
"loss": 0.0001, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5075323573095692, |
|
"grad_norm": 0.001825229381211102, |
|
"learning_rate": 0.00024121977679905266, |
|
"loss": 0.0001, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5092297899427116, |
|
"grad_norm": 0.0013848728267475963, |
|
"learning_rate": 0.00024084145307064997, |
|
"loss": 0.0001, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5092297899427116, |
|
"eval_loss": 0.408105731010437, |
|
"eval_runtime": 65.5854, |
|
"eval_samples_per_second": 2.943, |
|
"eval_steps_per_second": 2.943, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.510927222575854, |
|
"grad_norm": 0.6500065922737122, |
|
"learning_rate": 0.0002404622145698234, |
|
"loss": 1.2109, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5126246552089964, |
|
"grad_norm": 0.2953287661075592, |
|
"learning_rate": 0.00024008206511550044, |
|
"loss": 0.8961, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5143220878421387, |
|
"grad_norm": 0.2599642276763916, |
|
"learning_rate": 0.00023970100853578185, |
|
"loss": 0.822, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5160195204752811, |
|
"grad_norm": 0.22075462341308594, |
|
"learning_rate": 0.00023931904866790317, |
|
"loss": 0.7042, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5177169531084235, |
|
"grad_norm": 0.1947249174118042, |
|
"learning_rate": 0.00023893618935819607, |
|
"loss": 0.6038, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5194143857415658, |
|
"grad_norm": 0.1981605589389801, |
|
"learning_rate": 0.00023855243446204946, |
|
"loss": 0.5842, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5211118183747082, |
|
"grad_norm": 0.25752243399620056, |
|
"learning_rate": 0.00023816778784387094, |
|
"loss": 0.7636, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5228092510078506, |
|
"grad_norm": 0.20701764523983002, |
|
"learning_rate": 0.00023778225337704772, |
|
"loss": 0.5711, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.524506683640993, |
|
"grad_norm": 0.1890690177679062, |
|
"learning_rate": 0.00023739583494390752, |
|
"loss": 0.5323, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5262041162741353, |
|
"grad_norm": 0.2634682059288025, |
|
"learning_rate": 0.0002370085364356797, |
|
"loss": 0.8929, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5279015489072777, |
|
"grad_norm": 0.21140585839748383, |
|
"learning_rate": 0.00023662036175245595, |
|
"loss": 0.6722, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5295989815404201, |
|
"grad_norm": 0.1461462825536728, |
|
"learning_rate": 0.00023623131480315107, |
|
"loss": 0.3355, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5312964141735624, |
|
"grad_norm": 0.25913017988204956, |
|
"learning_rate": 0.00023584139950546344, |
|
"loss": 0.809, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5329938468067048, |
|
"grad_norm": 0.20216749608516693, |
|
"learning_rate": 0.0002354506197858358, |
|
"loss": 0.522, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5346912794398472, |
|
"grad_norm": 0.19447529315948486, |
|
"learning_rate": 0.00023505897957941556, |
|
"loss": 0.4756, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5363887120729895, |
|
"grad_norm": 0.20887967944145203, |
|
"learning_rate": 0.00023466648283001538, |
|
"loss": 0.5348, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.538086144706132, |
|
"grad_norm": 0.2380082756280899, |
|
"learning_rate": 0.000234273133490073, |
|
"loss": 0.6786, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5397835773392744, |
|
"grad_norm": 0.13057811558246613, |
|
"learning_rate": 0.00023387893552061199, |
|
"loss": 0.2262, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5414810099724168, |
|
"grad_norm": 0.19371569156646729, |
|
"learning_rate": 0.00023348389289120158, |
|
"loss": 0.4282, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5431784426055591, |
|
"grad_norm": 0.20126739144325256, |
|
"learning_rate": 0.00023308800957991653, |
|
"loss": 0.4524, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5448758752387015, |
|
"grad_norm": 0.1577330082654953, |
|
"learning_rate": 0.00023269128957329748, |
|
"loss": 0.2979, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5465733078718439, |
|
"grad_norm": 0.1581580936908722, |
|
"learning_rate": 0.0002322937368663105, |
|
"loss": 0.2393, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5482707405049863, |
|
"grad_norm": 0.0836775079369545, |
|
"learning_rate": 0.00023189535546230683, |
|
"loss": 0.0756, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5499681731381286, |
|
"grad_norm": 0.13577239215373993, |
|
"learning_rate": 0.00023149614937298296, |
|
"loss": 0.204, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.551665605771271, |
|
"grad_norm": 0.003654525149613619, |
|
"learning_rate": 0.00023109612261833963, |
|
"loss": 0.0001, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5533630384044134, |
|
"grad_norm": 0.0016976917395368218, |
|
"learning_rate": 0.00023069527922664186, |
|
"loss": 0.0001, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5550604710375557, |
|
"grad_norm": 0.006387701723724604, |
|
"learning_rate": 0.00023029362323437818, |
|
"loss": 0.0002, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5567579036706981, |
|
"grad_norm": 0.0010255499510094523, |
|
"learning_rate": 0.00022989115868621995, |
|
"loss": 0.0001, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5584553363038405, |
|
"grad_norm": 0.002511462429538369, |
|
"learning_rate": 0.0002294878896349807, |
|
"loss": 0.0001, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5601527689369828, |
|
"grad_norm": 0.002243687631562352, |
|
"learning_rate": 0.00022908382014157533, |
|
"loss": 0.0001, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5618502015701252, |
|
"grad_norm": 0.0010394121054559946, |
|
"learning_rate": 0.00022867895427497914, |
|
"loss": 0.0, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5635476342032676, |
|
"grad_norm": 0.004921845626085997, |
|
"learning_rate": 0.00022827329611218688, |
|
"loss": 0.0002, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.56524506683641, |
|
"grad_norm": 0.006571977864950895, |
|
"learning_rate": 0.0002278668497381718, |
|
"loss": 0.0003, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5669424994695523, |
|
"grad_norm": 0.06410548835992813, |
|
"learning_rate": 0.00022745961924584428, |
|
"loss": 0.0393, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5686399321026947, |
|
"grad_norm": 0.0016587848076596856, |
|
"learning_rate": 0.00022705160873601096, |
|
"loss": 0.0001, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5703373647358371, |
|
"grad_norm": 0.002642609179019928, |
|
"learning_rate": 0.00022664282231733309, |
|
"loss": 0.0001, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5720347973689794, |
|
"grad_norm": 0.09959813952445984, |
|
"learning_rate": 0.00022623326410628534, |
|
"loss": 0.0009, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5737322300021218, |
|
"grad_norm": 0.0017017674399539828, |
|
"learning_rate": 0.00022582293822711444, |
|
"loss": 0.0001, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5754296626352642, |
|
"grad_norm": 0.0015052666421979666, |
|
"learning_rate": 0.00022541184881179737, |
|
"loss": 0.0001, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.5771270952684066, |
|
"grad_norm": 0.0034250568132847548, |
|
"learning_rate": 0.000225, |
|
"loss": 0.0001, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5788245279015489, |
|
"grad_norm": 0.004533613566309214, |
|
"learning_rate": 0.0002245873959390353, |
|
"loss": 0.0001, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.5805219605346913, |
|
"grad_norm": 0.0014232480898499489, |
|
"learning_rate": 0.00022417404078382152, |
|
"loss": 0.0001, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5822193931678337, |
|
"grad_norm": 0.0018840961856767535, |
|
"learning_rate": 0.00022375993869684058, |
|
"loss": 0.0, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.583916825800976, |
|
"grad_norm": 0.0005610976368188858, |
|
"learning_rate": 0.00022334509384809584, |
|
"loss": 0.0, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.5856142584341184, |
|
"grad_norm": 0.011308716610074043, |
|
"learning_rate": 0.00022292951041507028, |
|
"loss": 0.0003, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5873116910672608, |
|
"grad_norm": 0.008396640419960022, |
|
"learning_rate": 0.00022251319258268453, |
|
"loss": 0.0001, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.5890091237004031, |
|
"grad_norm": 0.016598394140601158, |
|
"learning_rate": 0.00022209614454325459, |
|
"loss": 0.0002, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5907065563335455, |
|
"grad_norm": 0.0011634851107373834, |
|
"learning_rate": 0.00022167837049644947, |
|
"loss": 0.0, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.5924039889666879, |
|
"grad_norm": 0.05063653737306595, |
|
"learning_rate": 0.00022125987464924926, |
|
"loss": 0.0006, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5941014215998303, |
|
"grad_norm": 0.030548613518476486, |
|
"learning_rate": 0.0002208406612159024, |
|
"loss": 0.0005, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5941014215998303, |
|
"eval_loss": 0.403373122215271, |
|
"eval_runtime": 65.5747, |
|
"eval_samples_per_second": 2.943, |
|
"eval_steps_per_second": 2.943, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5957988542329726, |
|
"grad_norm": 0.4255245625972748, |
|
"learning_rate": 0.00022042073441788358, |
|
"loss": 1.1602, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.597496286866115, |
|
"grad_norm": 1.3056901693344116, |
|
"learning_rate": 0.00022000009848385105, |
|
"loss": 0.9796, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5991937194992574, |
|
"grad_norm": 0.21184608340263367, |
|
"learning_rate": 0.0002195787576496039, |
|
"loss": 0.6262, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6008911521323997, |
|
"grad_norm": 0.24157075583934784, |
|
"learning_rate": 0.00021915671615803966, |
|
"loss": 0.8544, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6025885847655421, |
|
"grad_norm": 0.20036348700523376, |
|
"learning_rate": 0.00021873397825911153, |
|
"loss": 0.6267, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6042860173986845, |
|
"grad_norm": 0.20190277695655823, |
|
"learning_rate": 0.00021831054820978544, |
|
"loss": 0.6421, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6059834500318269, |
|
"grad_norm": 0.2218364179134369, |
|
"learning_rate": 0.00021788643027399724, |
|
"loss": 0.7318, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6076808826649692, |
|
"grad_norm": 0.2376060038805008, |
|
"learning_rate": 0.00021746162872260985, |
|
"loss": 0.8077, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6093783152981116, |
|
"grad_norm": 0.19997930526733398, |
|
"learning_rate": 0.0002170361478333702, |
|
"loss": 0.6109, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.611075747931254, |
|
"grad_norm": 0.25473812222480774, |
|
"learning_rate": 0.0002166099918908661, |
|
"loss": 0.7628, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6127731805643963, |
|
"grad_norm": 0.16967284679412842, |
|
"learning_rate": 0.00021618316518648317, |
|
"loss": 0.3639, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6144706131975387, |
|
"grad_norm": 0.2045927196741104, |
|
"learning_rate": 0.0002157556720183616, |
|
"loss": 0.5688, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6161680458306811, |
|
"grad_norm": 0.30063769221305847, |
|
"learning_rate": 0.00021532751669135284, |
|
"loss": 0.8787, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6178654784638234, |
|
"grad_norm": 0.22912342846393585, |
|
"learning_rate": 0.00021489870351697622, |
|
"loss": 0.5724, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6195629110969658, |
|
"grad_norm": 0.1712283343076706, |
|
"learning_rate": 0.00021446923681337575, |
|
"loss": 0.3771, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6212603437301082, |
|
"grad_norm": 0.22983159124851227, |
|
"learning_rate": 0.00021403912090527623, |
|
"loss": 0.6274, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6229577763632506, |
|
"grad_norm": 0.2075144350528717, |
|
"learning_rate": 0.00021360836012394025, |
|
"loss": 0.5276, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6246552089963929, |
|
"grad_norm": 0.1731417328119278, |
|
"learning_rate": 0.00021317695880712398, |
|
"loss": 0.314, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6263526416295353, |
|
"grad_norm": 0.1949385702610016, |
|
"learning_rate": 0.0002127449212990339, |
|
"loss": 0.4633, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6280500742626777, |
|
"grad_norm": 0.20679379999637604, |
|
"learning_rate": 0.00021231225195028297, |
|
"loss": 0.4547, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.62974750689582, |
|
"grad_norm": 0.1853644698858261, |
|
"learning_rate": 0.00021187895511784666, |
|
"loss": 0.3758, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6314449395289624, |
|
"grad_norm": 0.2339005470275879, |
|
"learning_rate": 0.00021144503516501927, |
|
"loss": 0.4251, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6331423721621048, |
|
"grad_norm": 0.12531976401805878, |
|
"learning_rate": 0.00021101049646137003, |
|
"loss": 0.1716, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6348398047952472, |
|
"grad_norm": 0.12999360263347626, |
|
"learning_rate": 0.00021057534338269872, |
|
"loss": 0.2032, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6365372374283895, |
|
"grad_norm": 0.1318761557340622, |
|
"learning_rate": 0.00021013958031099205, |
|
"loss": 0.1968, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6382346700615319, |
|
"grad_norm": 0.06346186250448227, |
|
"learning_rate": 0.00020970321163437934, |
|
"loss": 0.05, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6399321026946743, |
|
"grad_norm": 0.12222940474748611, |
|
"learning_rate": 0.00020926624174708827, |
|
"loss": 0.1675, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6416295353278166, |
|
"grad_norm": 0.12370602786540985, |
|
"learning_rate": 0.0002088286750494008, |
|
"loss": 0.0724, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.643326967960959, |
|
"grad_norm": 0.0037796611431986094, |
|
"learning_rate": 0.00020839051594760872, |
|
"loss": 0.0002, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6450244005941014, |
|
"grad_norm": 0.08905645459890366, |
|
"learning_rate": 0.00020795176885396926, |
|
"loss": 0.0305, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6467218332272437, |
|
"grad_norm": 0.11877533793449402, |
|
"learning_rate": 0.00020751243818666087, |
|
"loss": 0.1818, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6484192658603861, |
|
"grad_norm": 0.0044077117927372456, |
|
"learning_rate": 0.00020707252836973844, |
|
"loss": 0.0001, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6501166984935285, |
|
"grad_norm": 0.0014336027670651674, |
|
"learning_rate": 0.00020663204383308898, |
|
"loss": 0.0001, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6518141311266709, |
|
"grad_norm": 0.025872627273201942, |
|
"learning_rate": 0.0002061909890123868, |
|
"loss": 0.0031, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6535115637598132, |
|
"grad_norm": 0.007007645908743143, |
|
"learning_rate": 0.0002057493683490491, |
|
"loss": 0.0002, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6552089963929556, |
|
"grad_norm": 0.01107161957770586, |
|
"learning_rate": 0.0002053071862901911, |
|
"loss": 0.0003, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6569064290260981, |
|
"grad_norm": 0.014977892860770226, |
|
"learning_rate": 0.00020486444728858117, |
|
"loss": 0.0003, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6586038616592405, |
|
"grad_norm": 0.011495725251734257, |
|
"learning_rate": 0.00020442115580259613, |
|
"loss": 0.0002, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6603012942923828, |
|
"grad_norm": 0.0011856303317472339, |
|
"learning_rate": 0.00020397731629617636, |
|
"loss": 0.0001, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.6619987269255252, |
|
"grad_norm": 0.000914178614038974, |
|
"learning_rate": 0.00020353293323878074, |
|
"loss": 0.0, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6636961595586676, |
|
"grad_norm": 0.01322512049227953, |
|
"learning_rate": 0.00020308801110534178, |
|
"loss": 0.0003, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.6653935921918099, |
|
"grad_norm": 0.07302884012460709, |
|
"learning_rate": 0.00020264255437622036, |
|
"loss": 0.0008, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.6670910248249523, |
|
"grad_norm": 0.002658440498635173, |
|
"learning_rate": 0.00020219656753716074, |
|
"loss": 0.0001, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.6687884574580947, |
|
"grad_norm": 0.0038816186133772135, |
|
"learning_rate": 0.00020175005507924558, |
|
"loss": 0.0001, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.670485890091237, |
|
"grad_norm": 0.0019201135728508234, |
|
"learning_rate": 0.00020130302149885031, |
|
"loss": 0.0, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.6721833227243794, |
|
"grad_norm": 0.005636914633214474, |
|
"learning_rate": 0.00020085547129759806, |
|
"loss": 0.0002, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.6738807553575218, |
|
"grad_norm": 0.003746110713109374, |
|
"learning_rate": 0.00020040740898231448, |
|
"loss": 0.0001, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.6755781879906642, |
|
"grad_norm": 0.004497275687754154, |
|
"learning_rate": 0.0001999588390649821, |
|
"loss": 0.0001, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.6772756206238065, |
|
"grad_norm": 0.004322202410548925, |
|
"learning_rate": 0.00019950976606269497, |
|
"loss": 0.0001, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.6789730532569489, |
|
"grad_norm": 0.025630857795476913, |
|
"learning_rate": 0.00019906019449761325, |
|
"loss": 0.0003, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6789730532569489, |
|
"eval_loss": 0.3837679922580719, |
|
"eval_runtime": 65.6798, |
|
"eval_samples_per_second": 2.938, |
|
"eval_steps_per_second": 2.938, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6806704858900913, |
|
"grad_norm": 0.23363079130649567, |
|
"learning_rate": 0.00019861012889691755, |
|
"loss": 0.6918, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6823679185232336, |
|
"grad_norm": 0.2805746793746948, |
|
"learning_rate": 0.0001981595737927636, |
|
"loss": 0.8612, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.684065351156376, |
|
"grad_norm": 0.2095121443271637, |
|
"learning_rate": 0.00019770853372223625, |
|
"loss": 0.5805, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6857627837895184, |
|
"grad_norm": 0.23234990239143372, |
|
"learning_rate": 0.0001972570132273039, |
|
"loss": 0.7755, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6874602164226608, |
|
"grad_norm": 0.23639503121376038, |
|
"learning_rate": 0.00019680501685477304, |
|
"loss": 0.8386, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6891576490558031, |
|
"grad_norm": 0.21290989220142365, |
|
"learning_rate": 0.0001963525491562421, |
|
"loss": 0.6506, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6908550816889455, |
|
"grad_norm": 0.19381728768348694, |
|
"learning_rate": 0.00019589961468805578, |
|
"loss": 0.5448, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6925525143220879, |
|
"grad_norm": 0.3582805395126343, |
|
"learning_rate": 0.00019544621801125908, |
|
"loss": 1.0269, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.6942499469552302, |
|
"grad_norm": 0.24776633083820343, |
|
"learning_rate": 0.00019499236369155157, |
|
"loss": 0.8699, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6959473795883726, |
|
"grad_norm": 0.24224227666854858, |
|
"learning_rate": 0.00019453805629924124, |
|
"loss": 0.8062, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.697644812221515, |
|
"grad_norm": 0.20559021830558777, |
|
"learning_rate": 0.0001940833004091984, |
|
"loss": 0.5474, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6993422448546573, |
|
"grad_norm": 0.27351394295692444, |
|
"learning_rate": 0.00019362810060080985, |
|
"loss": 0.7381, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7010396774877997, |
|
"grad_norm": 0.22222575545310974, |
|
"learning_rate": 0.00019317246145793263, |
|
"loss": 0.6845, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7027371101209421, |
|
"grad_norm": 0.24615706503391266, |
|
"learning_rate": 0.00019271638756884784, |
|
"loss": 0.8067, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7044345427540845, |
|
"grad_norm": 0.18379613757133484, |
|
"learning_rate": 0.00019225988352621445, |
|
"loss": 0.4555, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7061319753872268, |
|
"grad_norm": 0.22946029901504517, |
|
"learning_rate": 0.00019180295392702315, |
|
"loss": 0.6708, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7078294080203692, |
|
"grad_norm": 0.2477787286043167, |
|
"learning_rate": 0.00019134560337254986, |
|
"loss": 0.749, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7095268406535116, |
|
"grad_norm": 0.15241138637065887, |
|
"learning_rate": 0.00019088783646830967, |
|
"loss": 0.2924, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7112242732866539, |
|
"grad_norm": 0.12446551769971848, |
|
"learning_rate": 0.00019042965782401018, |
|
"loss": 0.1693, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7129217059197963, |
|
"grad_norm": 0.18220585584640503, |
|
"learning_rate": 0.0001899710720535052, |
|
"loss": 0.4431, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7146191385529387, |
|
"grad_norm": 0.17593543231487274, |
|
"learning_rate": 0.00018951208377474847, |
|
"loss": 0.3987, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.716316571186081, |
|
"grad_norm": 0.1010337546467781, |
|
"learning_rate": 0.0001890526976097468, |
|
"loss": 0.1318, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7180140038192234, |
|
"grad_norm": 0.13775108754634857, |
|
"learning_rate": 0.00018859291818451373, |
|
"loss": 0.2565, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7197114364523658, |
|
"grad_norm": 0.23579080402851105, |
|
"learning_rate": 0.00018813275012902306, |
|
"loss": 0.5919, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7214088690855082, |
|
"grad_norm": 0.13805098831653595, |
|
"learning_rate": 0.00018767219807716185, |
|
"loss": 0.2562, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7231063017186505, |
|
"grad_norm": 0.11531147360801697, |
|
"learning_rate": 0.00018721126666668432, |
|
"loss": 0.1649, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7248037343517929, |
|
"grad_norm": 0.09808983653783798, |
|
"learning_rate": 0.00018674996053916456, |
|
"loss": 0.1384, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7265011669849353, |
|
"grad_norm": 0.004477047827094793, |
|
"learning_rate": 0.00018628828433995013, |
|
"loss": 0.0002, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7281985996180776, |
|
"grad_norm": 0.11150182038545609, |
|
"learning_rate": 0.00018582624271811532, |
|
"loss": 0.0681, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.72989603225122, |
|
"grad_norm": 0.005313314031809568, |
|
"learning_rate": 0.0001853638403264141, |
|
"loss": 0.0002, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7315934648843624, |
|
"grad_norm": 0.05417265370488167, |
|
"learning_rate": 0.00018490108182123334, |
|
"loss": 0.0018, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7332908975175048, |
|
"grad_norm": 0.03447933495044708, |
|
"learning_rate": 0.00018443797186254614, |
|
"loss": 0.0021, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7349883301506471, |
|
"grad_norm": 0.0018511873204261065, |
|
"learning_rate": 0.00018397451511386467, |
|
"loss": 0.0001, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7366857627837895, |
|
"grad_norm": 0.0019058893667533994, |
|
"learning_rate": 0.00018351071624219311, |
|
"loss": 0.0001, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7383831954169319, |
|
"grad_norm": 0.006601972505450249, |
|
"learning_rate": 0.0001830465799179811, |
|
"loss": 0.0003, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7400806280500742, |
|
"grad_norm": 0.011643564328551292, |
|
"learning_rate": 0.00018258211081507614, |
|
"loss": 0.0004, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7417780606832166, |
|
"grad_norm": 0.004530356731265783, |
|
"learning_rate": 0.00018211731361067706, |
|
"loss": 0.0002, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.743475493316359, |
|
"grad_norm": 0.0015783226117491722, |
|
"learning_rate": 0.00018165219298528647, |
|
"loss": 0.0001, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7451729259495014, |
|
"grad_norm": 0.03880147635936737, |
|
"learning_rate": 0.00018118675362266385, |
|
"loss": 0.0011, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7468703585826437, |
|
"grad_norm": 0.04943560063838959, |
|
"learning_rate": 0.0001807210002097786, |
|
"loss": 0.0045, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7485677912157861, |
|
"grad_norm": 0.009896587580442429, |
|
"learning_rate": 0.0001802549374367623, |
|
"loss": 0.0005, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7502652238489285, |
|
"grad_norm": 0.009237539954483509, |
|
"learning_rate": 0.0001797885699968618, |
|
"loss": 0.0004, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7519626564820708, |
|
"grad_norm": 0.005970139987766743, |
|
"learning_rate": 0.0001793219025863922, |
|
"loss": 0.0001, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7536600891152132, |
|
"grad_norm": 0.0007102653034962714, |
|
"learning_rate": 0.00017885493990468915, |
|
"loss": 0.0, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.7553575217483556, |
|
"grad_norm": 0.0005071276100352407, |
|
"learning_rate": 0.0001783876866540615, |
|
"loss": 0.0, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.757054954381498, |
|
"grad_norm": 0.0011907740263268352, |
|
"learning_rate": 0.0001779201475397445, |
|
"loss": 0.0001, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7587523870146403, |
|
"grad_norm": 0.0038478299975395203, |
|
"learning_rate": 0.00017745232726985166, |
|
"loss": 0.0002, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.7604498196477827, |
|
"grad_norm": 0.008054184727370739, |
|
"learning_rate": 0.00017698423055532807, |
|
"loss": 0.0002, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.7621472522809251, |
|
"grad_norm": 0.0007232764619402587, |
|
"learning_rate": 0.00017651586210990232, |
|
"loss": 0.0, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.7638446849140674, |
|
"grad_norm": 0.0009626049431972206, |
|
"learning_rate": 0.00017604722665003956, |
|
"loss": 0.0001, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7638446849140674, |
|
"eval_loss": 0.38282719254493713, |
|
"eval_runtime": 65.9216, |
|
"eval_samples_per_second": 2.928, |
|
"eval_steps_per_second": 2.928, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7655421175472098, |
|
"grad_norm": 0.35101303458213806, |
|
"learning_rate": 0.00017557832889489357, |
|
"loss": 1.093, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.7672395501803522, |
|
"grad_norm": 0.23341582715511322, |
|
"learning_rate": 0.0001751091735662596, |
|
"loss": 0.7267, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.7689369828134945, |
|
"grad_norm": 0.25353920459747314, |
|
"learning_rate": 0.00017463976538852654, |
|
"loss": 0.8581, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.7706344154466369, |
|
"grad_norm": 0.21722197532653809, |
|
"learning_rate": 0.00017417010908862962, |
|
"loss": 0.7378, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.7723318480797793, |
|
"grad_norm": 0.22062261402606964, |
|
"learning_rate": 0.00017370020939600248, |
|
"loss": 0.7099, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7740292807129217, |
|
"grad_norm": 0.1780662089586258, |
|
"learning_rate": 0.00017323007104252984, |
|
"loss": 0.4919, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.775726713346064, |
|
"grad_norm": 0.20236726105213165, |
|
"learning_rate": 0.00017275969876249974, |
|
"loss": 0.5919, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.7774241459792065, |
|
"grad_norm": 0.22523203492164612, |
|
"learning_rate": 0.00017228909729255574, |
|
"loss": 0.7686, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.7791215786123489, |
|
"grad_norm": 0.23676562309265137, |
|
"learning_rate": 0.00017181827137164953, |
|
"loss": 0.8026, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.7808190112454912, |
|
"grad_norm": 0.21583965420722961, |
|
"learning_rate": 0.00017134722574099276, |
|
"loss": 0.7097, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7825164438786336, |
|
"grad_norm": 0.2776244282722473, |
|
"learning_rate": 0.0001708759651440098, |
|
"loss": 0.9476, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.784213876511776, |
|
"grad_norm": 0.2028312236070633, |
|
"learning_rate": 0.00017040449432628962, |
|
"loss": 0.6013, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.7859113091449184, |
|
"grad_norm": 0.2275046855211258, |
|
"learning_rate": 0.0001699328180355381, |
|
"loss": 0.7551, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.7876087417780607, |
|
"grad_norm": 0.20202623307704926, |
|
"learning_rate": 0.00016946094102153025, |
|
"loss": 0.4759, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.7893061744112031, |
|
"grad_norm": 0.17477299273014069, |
|
"learning_rate": 0.00016898886803606237, |
|
"loss": 0.4537, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7910036070443455, |
|
"grad_norm": 0.08333531022071838, |
|
"learning_rate": 0.0001685166038329042, |
|
"loss": 0.1224, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7927010396774878, |
|
"grad_norm": 0.21820639073848724, |
|
"learning_rate": 0.000168044153167751, |
|
"loss": 0.5658, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7943984723106302, |
|
"grad_norm": 0.14012931287288666, |
|
"learning_rate": 0.00016757152079817573, |
|
"loss": 0.2818, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.7960959049437726, |
|
"grad_norm": 0.1741451919078827, |
|
"learning_rate": 0.00016709871148358108, |
|
"loss": 0.3492, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.797793337576915, |
|
"grad_norm": 0.1527792066335678, |
|
"learning_rate": 0.00016662572998515164, |
|
"loss": 0.2187, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7994907702100573, |
|
"grad_norm": 0.1383330523967743, |
|
"learning_rate": 0.00016615258106580585, |
|
"loss": 0.2405, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8011882028431997, |
|
"grad_norm": 0.13245010375976562, |
|
"learning_rate": 0.000165679269490148, |
|
"loss": 0.2295, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8028856354763421, |
|
"grad_norm": 0.13676372170448303, |
|
"learning_rate": 0.0001652058000244205, |
|
"loss": 0.2516, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8045830681094844, |
|
"grad_norm": 0.07976588606834412, |
|
"learning_rate": 0.00016473217743645556, |
|
"loss": 0.0916, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8062805007426268, |
|
"grad_norm": 0.11341172456741333, |
|
"learning_rate": 0.00016425840649562736, |
|
"loss": 0.152, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8079779333757692, |
|
"grad_norm": 0.12954847514629364, |
|
"learning_rate": 0.00016378449197280412, |
|
"loss": 0.1525, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8096753660089115, |
|
"grad_norm": 0.08243954181671143, |
|
"learning_rate": 0.0001633104386402997, |
|
"loss": 0.0708, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8113727986420539, |
|
"grad_norm": 0.0030563257168978453, |
|
"learning_rate": 0.00016283625127182596, |
|
"loss": 0.0001, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8130702312751963, |
|
"grad_norm": 0.0008315025479532778, |
|
"learning_rate": 0.00016236193464244444, |
|
"loss": 0.0, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8147676639083387, |
|
"grad_norm": 0.006789859849959612, |
|
"learning_rate": 0.00016188749352851825, |
|
"loss": 0.0002, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.816465096541481, |
|
"grad_norm": 0.009863720275461674, |
|
"learning_rate": 0.00016141293270766424, |
|
"loss": 0.0002, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8181625291746234, |
|
"grad_norm": 0.059226732701063156, |
|
"learning_rate": 0.00016093825695870462, |
|
"loss": 0.0008, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8198599618077658, |
|
"grad_norm": 0.0056890202686190605, |
|
"learning_rate": 0.00016046347106161876, |
|
"loss": 0.0003, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8215573944409081, |
|
"grad_norm": 0.0004136976203881204, |
|
"learning_rate": 0.0001599885797974956, |
|
"loss": 0.0, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8232548270740505, |
|
"grad_norm": 0.016998767852783203, |
|
"learning_rate": 0.00015951358794848465, |
|
"loss": 0.0004, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8249522597071929, |
|
"grad_norm": 0.0017255417769774795, |
|
"learning_rate": 0.00015903850029774878, |
|
"loss": 0.0001, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8266496923403353, |
|
"grad_norm": 0.0012270875740796328, |
|
"learning_rate": 0.0001585633216294152, |
|
"loss": 0.0001, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8283471249734776, |
|
"grad_norm": 0.0009051132365129888, |
|
"learning_rate": 0.0001580880567285279, |
|
"loss": 0.0, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.83004455760662, |
|
"grad_norm": 0.0009166182717308402, |
|
"learning_rate": 0.00015761271038099912, |
|
"loss": 0.0001, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8317419902397624, |
|
"grad_norm": 0.002149962354451418, |
|
"learning_rate": 0.00015713728737356137, |
|
"loss": 0.0001, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8334394228729047, |
|
"grad_norm": 0.008419407531619072, |
|
"learning_rate": 0.00015666179249371892, |
|
"loss": 0.0004, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8351368555060471, |
|
"grad_norm": 0.0006536226137541234, |
|
"learning_rate": 0.00015618623052970006, |
|
"loss": 0.0, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8368342881391895, |
|
"grad_norm": 0.035275768488645554, |
|
"learning_rate": 0.0001557106062704085, |
|
"loss": 0.0064, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8385317207723318, |
|
"grad_norm": 0.002518226159736514, |
|
"learning_rate": 0.00015523492450537517, |
|
"loss": 0.0001, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8402291534054742, |
|
"grad_norm": 0.00048825182602740824, |
|
"learning_rate": 0.00015475919002471016, |
|
"loss": 0.0, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8419265860386166, |
|
"grad_norm": 0.007141390815377235, |
|
"learning_rate": 0.0001542834076190544, |
|
"loss": 0.0001, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.843624018671759, |
|
"grad_norm": 0.048018842935562134, |
|
"learning_rate": 0.00015380758207953155, |
|
"loss": 0.0004, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8453214513049013, |
|
"grad_norm": 0.0009412519866600633, |
|
"learning_rate": 0.0001533317181976994, |
|
"loss": 0.0, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8470188839380437, |
|
"grad_norm": 0.031883303076028824, |
|
"learning_rate": 0.00015285582076550198, |
|
"loss": 0.0037, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8487163165711861, |
|
"grad_norm": 0.016174526885151863, |
|
"learning_rate": 0.00015237989457522118, |
|
"loss": 0.0007, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8487163165711861, |
|
"eval_loss": 0.372147798538208, |
|
"eval_runtime": 65.9897, |
|
"eval_samples_per_second": 2.925, |
|
"eval_steps_per_second": 2.925, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8504137492043284, |
|
"grad_norm": 0.23917846381664276, |
|
"learning_rate": 0.00015190394441942843, |
|
"loss": 0.8684, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8521111818374708, |
|
"grad_norm": 0.27708154916763306, |
|
"learning_rate": 0.0001514279750909365, |
|
"loss": 1.0004, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.8538086144706132, |
|
"grad_norm": 0.25657930970191956, |
|
"learning_rate": 0.00015095199138275128, |
|
"loss": 0.7568, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.8555060471037556, |
|
"grad_norm": 0.2313452512025833, |
|
"learning_rate": 0.00015047599808802332, |
|
"loss": 0.8288, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8572034797368979, |
|
"grad_norm": 0.2528156042098999, |
|
"learning_rate": 0.00015, |
|
"loss": 1.011, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.8589009123700403, |
|
"grad_norm": 0.1938907355070114, |
|
"learning_rate": 0.00014952400191197665, |
|
"loss": 0.5598, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.8605983450031827, |
|
"grad_norm": 0.23120371997356415, |
|
"learning_rate": 0.00014904800861724872, |
|
"loss": 0.6959, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.862295777636325, |
|
"grad_norm": 0.23072639107704163, |
|
"learning_rate": 0.00014857202490906347, |
|
"loss": 0.7868, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.8639932102694674, |
|
"grad_norm": 0.21651454269886017, |
|
"learning_rate": 0.00014809605558057157, |
|
"loss": 0.6725, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.8656906429026098, |
|
"grad_norm": 0.1905306875705719, |
|
"learning_rate": 0.0001476201054247788, |
|
"loss": 0.5756, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8673880755357521, |
|
"grad_norm": 0.35889434814453125, |
|
"learning_rate": 0.00014714417923449797, |
|
"loss": 0.6115, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.8690855081688945, |
|
"grad_norm": 0.19174250960350037, |
|
"learning_rate": 0.00014666828180230057, |
|
"loss": 0.4859, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.8707829408020369, |
|
"grad_norm": 0.2138870805501938, |
|
"learning_rate": 0.0001461924179204684, |
|
"loss": 0.6636, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.8724803734351793, |
|
"grad_norm": 0.17262116074562073, |
|
"learning_rate": 0.00014571659238094556, |
|
"loss": 0.42, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.8741778060683216, |
|
"grad_norm": 0.22339358925819397, |
|
"learning_rate": 0.00014524080997528987, |
|
"loss": 0.6612, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.875875238701464, |
|
"grad_norm": 0.1979471892118454, |
|
"learning_rate": 0.0001447650754946249, |
|
"loss": 0.5441, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.8775726713346064, |
|
"grad_norm": 0.20259279012680054, |
|
"learning_rate": 0.00014428939372959152, |
|
"loss": 0.5254, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.8792701039677487, |
|
"grad_norm": 0.12251409888267517, |
|
"learning_rate": 0.0001438137694702999, |
|
"loss": 0.2171, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.8809675366008911, |
|
"grad_norm": 0.16714578866958618, |
|
"learning_rate": 0.00014333820750628105, |
|
"loss": 0.311, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.8826649692340335, |
|
"grad_norm": 0.24203087389469147, |
|
"learning_rate": 0.00014286271262643866, |
|
"loss": 0.6175, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8843624018671759, |
|
"grad_norm": 0.1858789026737213, |
|
"learning_rate": 0.00014238728961900088, |
|
"loss": 0.3565, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.8860598345003182, |
|
"grad_norm": 0.09111540019512177, |
|
"learning_rate": 0.00014191194327147212, |
|
"loss": 0.1199, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.8877572671334606, |
|
"grad_norm": 0.13533198833465576, |
|
"learning_rate": 0.00014143667837058477, |
|
"loss": 0.2471, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.889454699766603, |
|
"grad_norm": 0.17338241636753082, |
|
"learning_rate": 0.00014096149970225122, |
|
"loss": 0.3255, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.8911521323997453, |
|
"grad_norm": 0.05573137849569321, |
|
"learning_rate": 0.00014048641205151533, |
|
"loss": 0.0455, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8928495650328877, |
|
"grad_norm": 0.007357200141996145, |
|
"learning_rate": 0.0001400114202025044, |
|
"loss": 0.0004, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.8945469976660301, |
|
"grad_norm": 0.00043303659185767174, |
|
"learning_rate": 0.00013953652893838119, |
|
"loss": 0.0, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.8962444302991726, |
|
"grad_norm": 0.02541971206665039, |
|
"learning_rate": 0.0001390617430412954, |
|
"loss": 0.0028, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.8979418629323149, |
|
"grad_norm": 0.010525004006922245, |
|
"learning_rate": 0.0001385870672923357, |
|
"loss": 0.0005, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.8996392955654573, |
|
"grad_norm": 0.03903070092201233, |
|
"learning_rate": 0.0001381125064714817, |
|
"loss": 0.0003, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9013367281985997, |
|
"grad_norm": 0.010076366364955902, |
|
"learning_rate": 0.00013763806535755562, |
|
"loss": 0.0002, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.903034160831742, |
|
"grad_norm": 0.0008758578333072364, |
|
"learning_rate": 0.00013716374872817407, |
|
"loss": 0.0, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9047315934648844, |
|
"grad_norm": 0.0009034467511810362, |
|
"learning_rate": 0.0001366895613597003, |
|
"loss": 0.0, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9064290260980268, |
|
"grad_norm": 0.0004988125874660909, |
|
"learning_rate": 0.00013621550802719588, |
|
"loss": 0.0, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9081264587311692, |
|
"grad_norm": 0.012061301618814468, |
|
"learning_rate": 0.00013574159350437261, |
|
"loss": 0.0006, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9098238913643115, |
|
"grad_norm": 0.0005069606122560799, |
|
"learning_rate": 0.0001352678225635444, |
|
"loss": 0.0, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9115213239974539, |
|
"grad_norm": 0.003097748151049018, |
|
"learning_rate": 0.00013479419997557948, |
|
"loss": 0.0001, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9132187566305963, |
|
"grad_norm": 0.010489325039088726, |
|
"learning_rate": 0.000134320730509852, |
|
"loss": 0.0002, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9149161892637386, |
|
"grad_norm": 0.00030282657826319337, |
|
"learning_rate": 0.00013384741893419415, |
|
"loss": 0.0, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.916613621896881, |
|
"grad_norm": 0.0403389073908329, |
|
"learning_rate": 0.00013337427001484836, |
|
"loss": 0.0005, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9183110545300234, |
|
"grad_norm": 0.003200069535523653, |
|
"learning_rate": 0.0001329012885164189, |
|
"loss": 0.0001, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9200084871631657, |
|
"grad_norm": 0.007805091328918934, |
|
"learning_rate": 0.00013242847920182424, |
|
"loss": 0.0002, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9217059197963081, |
|
"grad_norm": 0.004255454055964947, |
|
"learning_rate": 0.000131955846832249, |
|
"loss": 0.0001, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9234033524294505, |
|
"grad_norm": 0.0008626742055639625, |
|
"learning_rate": 0.00013148339616709577, |
|
"loss": 0.0, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9251007850625929, |
|
"grad_norm": 0.005825830157846212, |
|
"learning_rate": 0.00013101113196393758, |
|
"loss": 0.0002, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9267982176957352, |
|
"grad_norm": 0.00038926751585677266, |
|
"learning_rate": 0.00013053905897846972, |
|
"loss": 0.0, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9284956503288776, |
|
"grad_norm": 0.025748664513230324, |
|
"learning_rate": 0.00013006718196446188, |
|
"loss": 0.0007, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.93019308296202, |
|
"grad_norm": 0.000722411903552711, |
|
"learning_rate": 0.0001295955056737104, |
|
"loss": 0.0, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.9318905155951623, |
|
"grad_norm": 0.013827555812895298, |
|
"learning_rate": 0.0001291240348559902, |
|
"loss": 0.0003, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9335879482283047, |
|
"grad_norm": 0.0006142717902548611, |
|
"learning_rate": 0.00012865277425900724, |
|
"loss": 0.0, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9335879482283047, |
|
"eval_loss": 0.37203362584114075, |
|
"eval_runtime": 65.9103, |
|
"eval_samples_per_second": 2.928, |
|
"eval_steps_per_second": 2.928, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9352853808614471, |
|
"grad_norm": 0.28680503368377686, |
|
"learning_rate": 0.0001281817286283505, |
|
"loss": 0.9974, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.9369828134945895, |
|
"grad_norm": 0.21847711503505707, |
|
"learning_rate": 0.00012771090270744426, |
|
"loss": 0.5843, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.9386802461277318, |
|
"grad_norm": 0.21124739944934845, |
|
"learning_rate": 0.00012724030123750026, |
|
"loss": 0.7508, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.9403776787608742, |
|
"grad_norm": 0.26063379645347595, |
|
"learning_rate": 0.00012676992895747016, |
|
"loss": 1.0164, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.9420751113940166, |
|
"grad_norm": 0.22532792389392853, |
|
"learning_rate": 0.0001262997906039975, |
|
"loss": 0.7664, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9437725440271589, |
|
"grad_norm": 0.233880877494812, |
|
"learning_rate": 0.00012582989091137038, |
|
"loss": 0.7483, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.9454699766603013, |
|
"grad_norm": 0.20552365481853485, |
|
"learning_rate": 0.00012536023461147346, |
|
"loss": 0.6613, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9471674092934437, |
|
"grad_norm": 0.20067091286182404, |
|
"learning_rate": 0.0001248908264337404, |
|
"loss": 0.5999, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.948864841926586, |
|
"grad_norm": 0.19979502260684967, |
|
"learning_rate": 0.00012442167110510643, |
|
"loss": 0.5804, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.9505622745597284, |
|
"grad_norm": 1.8899680376052856, |
|
"learning_rate": 0.00012395277334996044, |
|
"loss": 1.2513, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9522597071928708, |
|
"grad_norm": 1.1816651821136475, |
|
"learning_rate": 0.0001234841378900976, |
|
"loss": 0.5431, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.9539571398260132, |
|
"grad_norm": 0.7981509566307068, |
|
"learning_rate": 0.0001230157694446719, |
|
"loss": 0.4656, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.9556545724591555, |
|
"grad_norm": 0.15804308652877808, |
|
"learning_rate": 0.00012254767273014828, |
|
"loss": 0.3057, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.9573520050922979, |
|
"grad_norm": 0.25787147879600525, |
|
"learning_rate": 0.00012207985246025547, |
|
"loss": 0.5838, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.9590494377254403, |
|
"grad_norm": 0.2439812868833542, |
|
"learning_rate": 0.00012161231334593851, |
|
"loss": 0.4171, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.9607468703585826, |
|
"grad_norm": 0.2657896876335144, |
|
"learning_rate": 0.0001211450600953109, |
|
"loss": 0.4894, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.962444302991725, |
|
"grad_norm": 0.19345270097255707, |
|
"learning_rate": 0.00012067809741360779, |
|
"loss": 0.4258, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.9641417356248674, |
|
"grad_norm": 0.2370881736278534, |
|
"learning_rate": 0.0001202114300031382, |
|
"loss": 0.5128, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.9658391682580098, |
|
"grad_norm": 0.15387995541095734, |
|
"learning_rate": 0.00011974506256323775, |
|
"loss": 0.3073, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.9675366008911521, |
|
"grad_norm": 0.12014123052358627, |
|
"learning_rate": 0.0001192789997902214, |
|
"loss": 0.1645, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9692340335242945, |
|
"grad_norm": 0.07062285393476486, |
|
"learning_rate": 0.00011881324637733611, |
|
"loss": 0.057, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.9709314661574369, |
|
"grad_norm": 0.017441993579268456, |
|
"learning_rate": 0.00011834780701471354, |
|
"loss": 0.0017, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.9726288987905792, |
|
"grad_norm": 0.09837821871042252, |
|
"learning_rate": 0.00011788268638932295, |
|
"loss": 0.0769, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.9743263314237216, |
|
"grad_norm": 0.006846334785223007, |
|
"learning_rate": 0.00011741788918492386, |
|
"loss": 0.0002, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.976023764056864, |
|
"grad_norm": 0.07346488535404205, |
|
"learning_rate": 0.00011695342008201888, |
|
"loss": 0.0527, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9777211966900063, |
|
"grad_norm": 0.022366201505064964, |
|
"learning_rate": 0.00011648928375780686, |
|
"loss": 0.0002, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.9794186293231487, |
|
"grad_norm": 0.02840852551162243, |
|
"learning_rate": 0.00011602548488613531, |
|
"loss": 0.0024, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.9811160619562911, |
|
"grad_norm": 0.004534956067800522, |
|
"learning_rate": 0.00011556202813745382, |
|
"loss": 0.0002, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.9828134945894335, |
|
"grad_norm": 0.023318735882639885, |
|
"learning_rate": 0.00011509891817876662, |
|
"loss": 0.0048, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.9845109272225758, |
|
"grad_norm": 0.003532181493937969, |
|
"learning_rate": 0.00011463615967358588, |
|
"loss": 0.0002, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9862083598557182, |
|
"grad_norm": 0.004218058194965124, |
|
"learning_rate": 0.00011417375728188464, |
|
"loss": 0.0002, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.9879057924888606, |
|
"grad_norm": 0.036903224885463715, |
|
"learning_rate": 0.00011371171566004985, |
|
"loss": 0.0045, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.9896032251220029, |
|
"grad_norm": 0.0010676698293536901, |
|
"learning_rate": 0.00011325003946083548, |
|
"loss": 0.0001, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.9913006577551453, |
|
"grad_norm": 0.0011252000695094466, |
|
"learning_rate": 0.00011278873333331572, |
|
"loss": 0.0001, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.9929980903882877, |
|
"grad_norm": 0.015459141694009304, |
|
"learning_rate": 0.00011232780192283812, |
|
"loss": 0.0005, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.99469552302143, |
|
"grad_norm": 0.0027658988256007433, |
|
"learning_rate": 0.00011186724987097698, |
|
"loss": 0.0002, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.9963929556545724, |
|
"grad_norm": 0.0068977042101323605, |
|
"learning_rate": 0.00011140708181548628, |
|
"loss": 0.0003, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.9980903882877148, |
|
"grad_norm": 0.024217281490564346, |
|
"learning_rate": 0.00011094730239025318, |
|
"loss": 0.0007, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.9997878209208572, |
|
"grad_norm": 0.1039547249674797, |
|
"learning_rate": 0.0001104879162252515, |
|
"loss": 0.0096, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.0014852535539995, |
|
"grad_norm": 0.5956621170043945, |
|
"learning_rate": 0.00011002892794649476, |
|
"loss": 0.918, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.003182686187142, |
|
"grad_norm": 0.28132206201553345, |
|
"learning_rate": 0.00010957034217598982, |
|
"loss": 0.5421, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0048801188202843, |
|
"grad_norm": 0.29155442118644714, |
|
"learning_rate": 0.00010911216353169033, |
|
"loss": 0.7092, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0065775514534268, |
|
"grad_norm": 0.20578022301197052, |
|
"learning_rate": 0.00010865439662745013, |
|
"loss": 0.4683, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.008274984086569, |
|
"grad_norm": 0.21167628467082977, |
|
"learning_rate": 0.00010819704607297685, |
|
"loss": 0.4827, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.0099724167197115, |
|
"grad_norm": 0.23780500888824463, |
|
"learning_rate": 0.00010774011647378553, |
|
"loss": 0.506, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.0116698493528538, |
|
"grad_norm": 0.34926292300224304, |
|
"learning_rate": 0.00010728361243115215, |
|
"loss": 0.654, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.0133672819859962, |
|
"grad_norm": 0.29102039337158203, |
|
"learning_rate": 0.00010682753854206733, |
|
"loss": 0.5844, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.0150647146191385, |
|
"grad_norm": 0.23482352495193481, |
|
"learning_rate": 0.00010637189939919014, |
|
"loss": 0.4841, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.016762147252281, |
|
"grad_norm": 0.25120899081230164, |
|
"learning_rate": 0.00010591669959080162, |
|
"loss": 0.5328, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.0184595798854232, |
|
"grad_norm": 0.26982659101486206, |
|
"learning_rate": 0.00010546194370075881, |
|
"loss": 0.6439, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0184595798854232, |
|
"eval_loss": 0.3681555986404419, |
|
"eval_runtime": 65.873, |
|
"eval_samples_per_second": 2.93, |
|
"eval_steps_per_second": 2.93, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0201570125185657, |
|
"grad_norm": 0.24521693587303162, |
|
"learning_rate": 0.00010500763630844842, |
|
"loss": 0.471, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.021854445151708, |
|
"grad_norm": 0.24358759820461273, |
|
"learning_rate": 0.00010455378198874092, |
|
"loss": 0.5164, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.0235518777848505, |
|
"grad_norm": 0.25223568081855774, |
|
"learning_rate": 0.00010410038531194427, |
|
"loss": 0.5172, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.0252493104179927, |
|
"grad_norm": 0.2088579535484314, |
|
"learning_rate": 0.0001036474508437579, |
|
"loss": 0.3021, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.0269467430511352, |
|
"grad_norm": 0.24515843391418457, |
|
"learning_rate": 0.00010319498314522693, |
|
"loss": 0.5318, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.0286441756842775, |
|
"grad_norm": 0.21473954617977142, |
|
"learning_rate": 0.0001027429867726961, |
|
"loss": 0.3774, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.03034160831742, |
|
"grad_norm": 0.15975739061832428, |
|
"learning_rate": 0.00010229146627776376, |
|
"loss": 0.2362, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.0320390409505622, |
|
"grad_norm": 0.11923979967832565, |
|
"learning_rate": 0.00010184042620723637, |
|
"loss": 0.1048, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.0337364735837047, |
|
"grad_norm": 0.28334590792655945, |
|
"learning_rate": 0.00010138987110308241, |
|
"loss": 0.5413, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.035433906216847, |
|
"grad_norm": 0.15508587658405304, |
|
"learning_rate": 0.00010093980550238675, |
|
"loss": 0.1701, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0371313388499894, |
|
"grad_norm": 0.16336044669151306, |
|
"learning_rate": 0.00010049023393730502, |
|
"loss": 0.2, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.0388287714831317, |
|
"grad_norm": 0.15937276184558868, |
|
"learning_rate": 0.00010004116093501789, |
|
"loss": 0.2001, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.0405262041162742, |
|
"grad_norm": 0.16733552515506744, |
|
"learning_rate": 9.959259101768545e-05, |
|
"loss": 0.2056, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.0422236367494164, |
|
"grad_norm": 0.16601236164569855, |
|
"learning_rate": 9.914452870240188e-05, |
|
"loss": 0.1972, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.043921069382559, |
|
"grad_norm": 0.16554437577724457, |
|
"learning_rate": 9.869697850114969e-05, |
|
"loss": 0.1963, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.0456185020157012, |
|
"grad_norm": 0.08596237003803253, |
|
"learning_rate": 9.824994492075444e-05, |
|
"loss": 0.0567, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.0473159346488436, |
|
"grad_norm": 0.0006779459654353559, |
|
"learning_rate": 9.780343246283923e-05, |
|
"loss": 0.0, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.049013367281986, |
|
"grad_norm": 0.00339099601842463, |
|
"learning_rate": 9.735744562377968e-05, |
|
"loss": 0.0001, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.0507107999151284, |
|
"grad_norm": 0.09683417528867722, |
|
"learning_rate": 9.691198889465824e-05, |
|
"loss": 0.0579, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.0524082325482707, |
|
"grad_norm": 0.003139798529446125, |
|
"learning_rate": 9.646706676121923e-05, |
|
"loss": 0.0001, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0541056651814131, |
|
"grad_norm": 0.01017076801508665, |
|
"learning_rate": 9.602268370382363e-05, |
|
"loss": 0.0004, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.0558030978145554, |
|
"grad_norm": 0.0703444853425026, |
|
"learning_rate": 9.557884419740386e-05, |
|
"loss": 0.0398, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.0575005304476979, |
|
"grad_norm": 0.002169876592233777, |
|
"learning_rate": 9.513555271141882e-05, |
|
"loss": 0.0001, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.0591979630808401, |
|
"grad_norm": 0.00480187963694334, |
|
"learning_rate": 9.46928137098089e-05, |
|
"loss": 0.0001, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.0608953957139826, |
|
"grad_norm": 0.0012345153372734785, |
|
"learning_rate": 9.425063165095088e-05, |
|
"loss": 0.0, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.0625928283471249, |
|
"grad_norm": 0.0015107739018276334, |
|
"learning_rate": 9.380901098761319e-05, |
|
"loss": 0.0001, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.0642902609802674, |
|
"grad_norm": 0.00046349604963324964, |
|
"learning_rate": 9.336795616691103e-05, |
|
"loss": 0.0, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.0659876936134096, |
|
"grad_norm": 0.023465273901820183, |
|
"learning_rate": 9.292747163026154e-05, |
|
"loss": 0.0007, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.067685126246552, |
|
"grad_norm": 0.0014659131411463022, |
|
"learning_rate": 9.24875618133391e-05, |
|
"loss": 0.0, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.0693825588796946, |
|
"grad_norm": 0.0005259321187622845, |
|
"learning_rate": 9.204823114603068e-05, |
|
"loss": 0.0, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.0710799915128368, |
|
"grad_norm": 0.018575303256511688, |
|
"learning_rate": 9.160948405239128e-05, |
|
"loss": 0.0007, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.072777424145979, |
|
"grad_norm": 0.026870638132095337, |
|
"learning_rate": 9.117132495059916e-05, |
|
"loss": 0.0008, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.0744748567791216, |
|
"grad_norm": 0.0007909996784292161, |
|
"learning_rate": 9.07337582529117e-05, |
|
"loss": 0.0, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.076172289412264, |
|
"grad_norm": 0.000697901239618659, |
|
"learning_rate": 9.02967883656207e-05, |
|
"loss": 0.0, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.0778697220454063, |
|
"grad_norm": 0.0005197379505261779, |
|
"learning_rate": 8.986041968900796e-05, |
|
"loss": 0.0, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.0795671546785486, |
|
"grad_norm": 0.0005938683752901852, |
|
"learning_rate": 8.942465661730129e-05, |
|
"loss": 0.0, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.081264587311691, |
|
"grad_norm": 0.006787140388041735, |
|
"learning_rate": 8.898950353862998e-05, |
|
"loss": 0.0002, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.0829620199448335, |
|
"grad_norm": 0.008357354439795017, |
|
"learning_rate": 8.85549648349807e-05, |
|
"loss": 0.0002, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.0846594525779758, |
|
"grad_norm": 0.018981877714395523, |
|
"learning_rate": 8.812104488215332e-05, |
|
"loss": 0.0005, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.0863568852111183, |
|
"grad_norm": 0.2934776842594147, |
|
"learning_rate": 8.768774804971705e-05, |
|
"loss": 0.614, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.0880543178442605, |
|
"grad_norm": 0.3190712034702301, |
|
"learning_rate": 8.725507870096609e-05, |
|
"loss": 0.7782, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.089751750477403, |
|
"grad_norm": 0.27697721123695374, |
|
"learning_rate": 8.6823041192876e-05, |
|
"loss": 0.6048, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.0914491831105453, |
|
"grad_norm": 0.28390857577323914, |
|
"learning_rate": 8.639163987605976e-05, |
|
"loss": 0.7607, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.0931466157436878, |
|
"grad_norm": 0.2731177806854248, |
|
"learning_rate": 8.596087909472373e-05, |
|
"loss": 0.6359, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.09484404837683, |
|
"grad_norm": 0.2403832972049713, |
|
"learning_rate": 8.553076318662425e-05, |
|
"loss": 0.549, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.0965414810099725, |
|
"grad_norm": 0.2551855742931366, |
|
"learning_rate": 8.510129648302372e-05, |
|
"loss": 0.6247, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.0982389136431148, |
|
"grad_norm": 0.25632765889167786, |
|
"learning_rate": 8.467248330864718e-05, |
|
"loss": 0.5901, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.0999363462762572, |
|
"grad_norm": 0.22331362962722778, |
|
"learning_rate": 8.424432798163836e-05, |
|
"loss": 0.5206, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.1016337789093995, |
|
"grad_norm": 0.20115035772323608, |
|
"learning_rate": 8.381683481351676e-05, |
|
"loss": 0.3558, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.103331211542542, |
|
"grad_norm": 0.21585069596767426, |
|
"learning_rate": 8.339000810913386e-05, |
|
"loss": 0.479, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.103331211542542, |
|
"eval_loss": 0.3638736605644226, |
|
"eval_runtime": 65.9097, |
|
"eval_samples_per_second": 2.928, |
|
"eval_steps_per_second": 2.928, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1050286441756842, |
|
"grad_norm": 1.5040993690490723, |
|
"learning_rate": 8.296385216662983e-05, |
|
"loss": 0.8622, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.1067260768088267, |
|
"grad_norm": 0.20278804004192352, |
|
"learning_rate": 8.253837127739014e-05, |
|
"loss": 0.3313, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.108423509441969, |
|
"grad_norm": 0.22194603085517883, |
|
"learning_rate": 8.21135697260028e-05, |
|
"loss": 0.436, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.1101209420751115, |
|
"grad_norm": 0.26204535365104675, |
|
"learning_rate": 8.168945179021458e-05, |
|
"loss": 0.5942, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.1118183747082537, |
|
"grad_norm": 0.2720772922039032, |
|
"learning_rate": 8.126602174088843e-05, |
|
"loss": 0.5629, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.1135158073413962, |
|
"grad_norm": 0.3167116641998291, |
|
"learning_rate": 8.084328384196034e-05, |
|
"loss": 0.7216, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.1152132399745385, |
|
"grad_norm": 0.1929994821548462, |
|
"learning_rate": 8.042124235039612e-05, |
|
"loss": 0.3211, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.116910672607681, |
|
"grad_norm": 0.256496787071228, |
|
"learning_rate": 7.999990151614894e-05, |
|
"loss": 0.4641, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.1186081052408232, |
|
"grad_norm": 0.18538035452365875, |
|
"learning_rate": 7.957926558211642e-05, |
|
"loss": 0.2509, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.1203055378739657, |
|
"grad_norm": 0.16746576130390167, |
|
"learning_rate": 7.915933878409761e-05, |
|
"loss": 0.2268, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.122002970507108, |
|
"grad_norm": 0.14101870357990265, |
|
"learning_rate": 7.874012535075076e-05, |
|
"loss": 0.1466, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.1237004031402504, |
|
"grad_norm": 0.1386355608701706, |
|
"learning_rate": 7.832162950355055e-05, |
|
"loss": 0.1181, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.1253978357733927, |
|
"grad_norm": 0.17996515333652496, |
|
"learning_rate": 7.79038554567454e-05, |
|
"loss": 0.2161, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.1270952684065352, |
|
"grad_norm": 0.10335368663072586, |
|
"learning_rate": 7.74868074173154e-05, |
|
"loss": 0.0803, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.1287927010396774, |
|
"grad_norm": 0.08208036422729492, |
|
"learning_rate": 7.707048958492972e-05, |
|
"loss": 0.0472, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.13049013367282, |
|
"grad_norm": 0.0008246850338764489, |
|
"learning_rate": 7.665490615190418e-05, |
|
"loss": 0.0, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.1321875663059622, |
|
"grad_norm": 0.10074999183416367, |
|
"learning_rate": 7.624006130315941e-05, |
|
"loss": 0.0596, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.1338849989391047, |
|
"grad_norm": 0.005631667096167803, |
|
"learning_rate": 7.582595921617849e-05, |
|
"loss": 0.0001, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.135582431572247, |
|
"grad_norm": 0.0009247282869182527, |
|
"learning_rate": 7.541260406096477e-05, |
|
"loss": 0.0, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.1372798642053894, |
|
"grad_norm": 0.0009505698108114302, |
|
"learning_rate": 7.500000000000002e-05, |
|
"loss": 0.0, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.1389772968385317, |
|
"grad_norm": 0.0032684446778148413, |
|
"learning_rate": 7.458815118820262e-05, |
|
"loss": 0.0001, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.1406747294716741, |
|
"grad_norm": 0.0013583129039034247, |
|
"learning_rate": 7.417706177288559e-05, |
|
"loss": 0.0001, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.1423721621048164, |
|
"grad_norm": 0.01342520397156477, |
|
"learning_rate": 7.376673589371466e-05, |
|
"loss": 0.0001, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.1440695947379589, |
|
"grad_norm": 0.0014044721610844135, |
|
"learning_rate": 7.335717768266692e-05, |
|
"loss": 0.0001, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.1457670273711011, |
|
"grad_norm": 0.00327441468834877, |
|
"learning_rate": 7.294839126398908e-05, |
|
"loss": 0.0001, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.1474644600042436, |
|
"grad_norm": 0.0010030365083366632, |
|
"learning_rate": 7.254038075415572e-05, |
|
"loss": 0.0001, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.1491618926373859, |
|
"grad_norm": 0.0014624049654230475, |
|
"learning_rate": 7.21331502618282e-05, |
|
"loss": 0.0001, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.1508593252705284, |
|
"grad_norm": 0.0015749256126582623, |
|
"learning_rate": 7.172670388781313e-05, |
|
"loss": 0.0001, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.1525567579036706, |
|
"grad_norm": 0.0019347285851836205, |
|
"learning_rate": 7.132104572502085e-05, |
|
"loss": 0.0001, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.154254190536813, |
|
"grad_norm": 0.0021744261030107737, |
|
"learning_rate": 7.091617985842462e-05, |
|
"loss": 0.0001, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.1559516231699554, |
|
"grad_norm": 0.001666248426772654, |
|
"learning_rate": 7.051211036501928e-05, |
|
"loss": 0.0001, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.1576490558030978, |
|
"grad_norm": 0.0027833145577460527, |
|
"learning_rate": 7.010884131378001e-05, |
|
"loss": 0.0001, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.15934648843624, |
|
"grad_norm": 0.0027128716465085745, |
|
"learning_rate": 6.970637676562177e-05, |
|
"loss": 0.0001, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.1610439210693826, |
|
"grad_norm": 0.0008519128314219415, |
|
"learning_rate": 6.930472077335813e-05, |
|
"loss": 0.0, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.1627413537025248, |
|
"grad_norm": 0.0009717077482491732, |
|
"learning_rate": 6.890387738166041e-05, |
|
"loss": 0.0, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.1644387863356673, |
|
"grad_norm": 0.0008186842896975577, |
|
"learning_rate": 6.850385062701707e-05, |
|
"loss": 0.0, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.1661362189688096, |
|
"grad_norm": 0.00198109308257699, |
|
"learning_rate": 6.810464453769313e-05, |
|
"loss": 0.0001, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.167833651601952, |
|
"grad_norm": 0.0016451094998046756, |
|
"learning_rate": 6.770626313368954e-05, |
|
"loss": 0.0001, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.1695310842350943, |
|
"grad_norm": 0.012841903604567051, |
|
"learning_rate": 6.730871042670253e-05, |
|
"loss": 0.0001, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.1712285168682368, |
|
"grad_norm": 0.24701042473316193, |
|
"learning_rate": 6.691199042008345e-05, |
|
"loss": 0.5718, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.172925949501379, |
|
"grad_norm": 0.2914482057094574, |
|
"learning_rate": 6.651610710879847e-05, |
|
"loss": 0.6846, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.1746233821345216, |
|
"grad_norm": 0.2963961362838745, |
|
"learning_rate": 6.612106447938799e-05, |
|
"loss": 0.6591, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.1763208147676638, |
|
"grad_norm": 0.29871684312820435, |
|
"learning_rate": 6.572686650992696e-05, |
|
"loss": 0.7258, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.1780182474008063, |
|
"grad_norm": 0.29003167152404785, |
|
"learning_rate": 6.533351716998465e-05, |
|
"loss": 0.7024, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.1797156800339486, |
|
"grad_norm": 0.21721364557743073, |
|
"learning_rate": 6.49410204205844e-05, |
|
"loss": 0.4072, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.181413112667091, |
|
"grad_norm": 0.26777032017707825, |
|
"learning_rate": 6.454938021416417e-05, |
|
"loss": 0.6083, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.1831105453002335, |
|
"grad_norm": 0.25797197222709656, |
|
"learning_rate": 6.415860049453657e-05, |
|
"loss": 0.5343, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.1848079779333758, |
|
"grad_norm": 0.27494683861732483, |
|
"learning_rate": 6.376868519684892e-05, |
|
"loss": 0.6913, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.186505410566518, |
|
"grad_norm": 0.22641535103321075, |
|
"learning_rate": 6.337963824754399e-05, |
|
"loss": 0.4186, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.1882028431996605, |
|
"grad_norm": 0.21553833782672882, |
|
"learning_rate": 6.299146356432029e-05, |
|
"loss": 0.4194, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.1882028431996605, |
|
"eval_loss": 0.36409613490104675, |
|
"eval_runtime": 65.9367, |
|
"eval_samples_per_second": 2.927, |
|
"eval_steps_per_second": 2.927, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.189900275832803, |
|
"grad_norm": 0.23178090155124664, |
|
"learning_rate": 6.260416505609246e-05, |
|
"loss": 0.4426, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.1915977084659453, |
|
"grad_norm": 0.3357522785663605, |
|
"learning_rate": 6.221774662295231e-05, |
|
"loss": 0.6644, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.1932951410990875, |
|
"grad_norm": 0.22388817369937897, |
|
"learning_rate": 6.183221215612904e-05, |
|
"loss": 0.4101, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.19499257373223, |
|
"grad_norm": 0.2008681744337082, |
|
"learning_rate": 6.144756553795056e-05, |
|
"loss": 0.3087, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.1966900063653725, |
|
"grad_norm": 0.22424735128879547, |
|
"learning_rate": 6.106381064180395e-05, |
|
"loss": 0.4092, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.1983874389985147, |
|
"grad_norm": 0.26054248213768005, |
|
"learning_rate": 6.068095133209681e-05, |
|
"loss": 0.498, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.200084871631657, |
|
"grad_norm": 0.1590735763311386, |
|
"learning_rate": 6.0298991464218164e-05, |
|
"loss": 0.1884, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.2017823042647995, |
|
"grad_norm": 0.20946505665779114, |
|
"learning_rate": 5.991793488449955e-05, |
|
"loss": 0.3051, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.203479736897942, |
|
"grad_norm": 0.1654549539089203, |
|
"learning_rate": 5.953778543017656e-05, |
|
"loss": 0.2029, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.2051771695310842, |
|
"grad_norm": 0.11620619148015976, |
|
"learning_rate": 5.915854692935002e-05, |
|
"loss": 0.0943, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2068746021642265, |
|
"grad_norm": 0.19273479282855988, |
|
"learning_rate": 5.8780223200947325e-05, |
|
"loss": 0.2541, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.208572034797369, |
|
"grad_norm": 0.22115856409072876, |
|
"learning_rate": 5.840281805468426e-05, |
|
"loss": 0.335, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.2102694674305114, |
|
"grad_norm": 0.17392244935035706, |
|
"learning_rate": 5.8026335291026534e-05, |
|
"loss": 0.1842, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.2119669000636537, |
|
"grad_norm": 0.04149521142244339, |
|
"learning_rate": 5.765077870115125e-05, |
|
"loss": 0.0099, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.2136643326967962, |
|
"grad_norm": 0.14882031083106995, |
|
"learning_rate": 5.72761520669092e-05, |
|
"loss": 0.1405, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.2153617653299384, |
|
"grad_norm": 0.04529095068573952, |
|
"learning_rate": 5.6902459160786444e-05, |
|
"loss": 0.0137, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.217059197963081, |
|
"grad_norm": 0.031042039394378662, |
|
"learning_rate": 5.6529703745866324e-05, |
|
"loss": 0.0031, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.2187566305962232, |
|
"grad_norm": 0.00039801717502996325, |
|
"learning_rate": 5.6157889575791764e-05, |
|
"loss": 0.0, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.2204540632293657, |
|
"grad_norm": 0.0020159813575446606, |
|
"learning_rate": 5.578702039472738e-05, |
|
"loss": 0.0001, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.222151495862508, |
|
"grad_norm": 0.0011594186071306467, |
|
"learning_rate": 5.541709993732167e-05, |
|
"loss": 0.0, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.2238489284956504, |
|
"grad_norm": 0.003208533627912402, |
|
"learning_rate": 5.504813192866945e-05, |
|
"loss": 0.0, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.2255463611287927, |
|
"grad_norm": 0.001415073755197227, |
|
"learning_rate": 5.4680120084274574e-05, |
|
"loss": 0.0, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.2272437937619352, |
|
"grad_norm": 0.0004124438273720443, |
|
"learning_rate": 5.4313068110012245e-05, |
|
"loss": 0.0, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.2289412263950774, |
|
"grad_norm": 0.00023227222845889628, |
|
"learning_rate": 5.394697970209166e-05, |
|
"loss": 0.0, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.23063865902822, |
|
"grad_norm": 0.0005153273814357817, |
|
"learning_rate": 5.358185854701909e-05, |
|
"loss": 0.0, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.2323360916613622, |
|
"grad_norm": 0.03805829957127571, |
|
"learning_rate": 5.321770832156057e-05, |
|
"loss": 0.0003, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.2340335242945046, |
|
"grad_norm": 0.00048098803381435573, |
|
"learning_rate": 5.285453269270471e-05, |
|
"loss": 0.0, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.235730956927647, |
|
"grad_norm": 0.03290550410747528, |
|
"learning_rate": 5.249233531762612e-05, |
|
"loss": 0.0008, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.2374283895607894, |
|
"grad_norm": 0.00626510102301836, |
|
"learning_rate": 5.213111984364839e-05, |
|
"loss": 0.0002, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.2391258221939316, |
|
"grad_norm": 0.006389813032001257, |
|
"learning_rate": 5.1770889908207245e-05, |
|
"loss": 0.0001, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.2408232548270741, |
|
"grad_norm": 0.0006512326071970165, |
|
"learning_rate": 5.141164913881417e-05, |
|
"loss": 0.0, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.2425206874602164, |
|
"grad_norm": 0.0029449777211993933, |
|
"learning_rate": 5.105340115301977e-05, |
|
"loss": 0.0001, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.2442181200933589, |
|
"grad_norm": 0.0047165704891085625, |
|
"learning_rate": 5.0696149558377215e-05, |
|
"loss": 0.0001, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.2459155527265011, |
|
"grad_norm": 0.0005647541838698089, |
|
"learning_rate": 5.0339897952406123e-05, |
|
"loss": 0.0, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.2476129853596436, |
|
"grad_norm": 0.001746086752973497, |
|
"learning_rate": 4.998464992255627e-05, |
|
"loss": 0.0001, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.2493104179927859, |
|
"grad_norm": 0.00047052078298293054, |
|
"learning_rate": 4.963040904617131e-05, |
|
"loss": 0.0, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.2510078506259283, |
|
"grad_norm": 0.0064684562385082245, |
|
"learning_rate": 4.9277178890452896e-05, |
|
"loss": 0.0002, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.2527052832590706, |
|
"grad_norm": 0.0021551090758293867, |
|
"learning_rate": 4.892496301242482e-05, |
|
"loss": 0.0, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.254402715892213, |
|
"grad_norm": 0.0014455855125561357, |
|
"learning_rate": 4.857376495889712e-05, |
|
"loss": 0.0001, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.2561001485253553, |
|
"grad_norm": 0.23304064571857452, |
|
"learning_rate": 4.8223588266430186e-05, |
|
"loss": 0.4913, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.2577975811584978, |
|
"grad_norm": 0.28668463230133057, |
|
"learning_rate": 4.787443646129948e-05, |
|
"loss": 0.6474, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.25949501379164, |
|
"grad_norm": 0.334522545337677, |
|
"learning_rate": 4.752631305945985e-05, |
|
"loss": 0.7787, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.2611924464247826, |
|
"grad_norm": 0.28749802708625793, |
|
"learning_rate": 4.717922156651001e-05, |
|
"loss": 0.6218, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.2628898790579248, |
|
"grad_norm": 0.2978324890136719, |
|
"learning_rate": 4.6833165477657446e-05, |
|
"loss": 0.7438, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.2645873116910673, |
|
"grad_norm": 0.2923458218574524, |
|
"learning_rate": 4.648814827768322e-05, |
|
"loss": 0.6148, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.2662847443242096, |
|
"grad_norm": 0.26087573170661926, |
|
"learning_rate": 4.614417344090659e-05, |
|
"loss": 0.5136, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.267982176957352, |
|
"grad_norm": 0.19124242663383484, |
|
"learning_rate": 4.5801244431150394e-05, |
|
"loss": 0.3201, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.2696796095904943, |
|
"grad_norm": 0.28308284282684326, |
|
"learning_rate": 4.5459364701705995e-05, |
|
"loss": 0.684, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.2713770422236368, |
|
"grad_norm": 0.2998988628387451, |
|
"learning_rate": 4.51185376952984e-05, |
|
"loss": 0.6558, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.273074474856779, |
|
"grad_norm": 0.2379872053861618, |
|
"learning_rate": 4.477876684405179e-05, |
|
"loss": 0.4853, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.273074474856779, |
|
"eval_loss": 0.3654167652130127, |
|
"eval_runtime": 65.9099, |
|
"eval_samples_per_second": 2.928, |
|
"eval_steps_per_second": 2.928, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.2747719074899215, |
|
"grad_norm": 0.2629989981651306, |
|
"learning_rate": 4.4440055569454936e-05, |
|
"loss": 0.5687, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.2764693401230638, |
|
"grad_norm": 0.29386061429977417, |
|
"learning_rate": 4.410240728232653e-05, |
|
"loss": 0.4559, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.2781667727562063, |
|
"grad_norm": 0.32051095366477966, |
|
"learning_rate": 4.376582538278114e-05, |
|
"loss": 0.7155, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.2798642053893485, |
|
"grad_norm": 0.2592981159687042, |
|
"learning_rate": 4.3430313260194697e-05, |
|
"loss": 0.4972, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.281561638022491, |
|
"grad_norm": 0.23039335012435913, |
|
"learning_rate": 4.309587429317061e-05, |
|
"loss": 0.4049, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.2832590706556333, |
|
"grad_norm": 0.2293672114610672, |
|
"learning_rate": 4.2762511849505476e-05, |
|
"loss": 0.4085, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.2849565032887758, |
|
"grad_norm": 0.2180144041776657, |
|
"learning_rate": 4.2430229286155484e-05, |
|
"loss": 0.3829, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.286653935921918, |
|
"grad_norm": 0.22328434884548187, |
|
"learning_rate": 4.209902994920235e-05, |
|
"loss": 0.381, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.2883513685550605, |
|
"grad_norm": 0.19186857342720032, |
|
"learning_rate": 4.176891717381967e-05, |
|
"loss": 0.2676, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.290048801188203, |
|
"grad_norm": 0.19644580781459808, |
|
"learning_rate": 4.143989428423947e-05, |
|
"loss": 0.2604, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.2917462338213452, |
|
"grad_norm": 0.1470167487859726, |
|
"learning_rate": 4.111196459371862e-05, |
|
"loss": 0.1697, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.2934436664544875, |
|
"grad_norm": 0.10344719886779785, |
|
"learning_rate": 4.0785131404505376e-05, |
|
"loss": 0.0742, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.29514109908763, |
|
"grad_norm": 0.08508791774511337, |
|
"learning_rate": 4.045939800780639e-05, |
|
"loss": 0.0492, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.2968385317207725, |
|
"grad_norm": 0.08410584181547165, |
|
"learning_rate": 4.0134767683753385e-05, |
|
"loss": 0.049, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.2985359643539147, |
|
"grad_norm": 0.001906770863570273, |
|
"learning_rate": 3.981124370137001e-05, |
|
"loss": 0.0001, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.300233396987057, |
|
"grad_norm": 0.14595600962638855, |
|
"learning_rate": 3.948882931853924e-05, |
|
"loss": 0.119, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.3019308296201995, |
|
"grad_norm": 0.00104467140045017, |
|
"learning_rate": 3.916752778197039e-05, |
|
"loss": 0.0001, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.303628262253342, |
|
"grad_norm": 0.00029723646002821624, |
|
"learning_rate": 3.8847342327166244e-05, |
|
"loss": 0.0, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.3053256948864842, |
|
"grad_norm": 0.07592643052339554, |
|
"learning_rate": 3.852827617839084e-05, |
|
"loss": 0.026, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.3070231275196265, |
|
"grad_norm": 0.00041246655746363103, |
|
"learning_rate": 3.8210332548636796e-05, |
|
"loss": 0.0, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.308720560152769, |
|
"grad_norm": 0.002659448655322194, |
|
"learning_rate": 3.7893514639592895e-05, |
|
"loss": 0.0001, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.3104179927859114, |
|
"grad_norm": 0.00021023498266004026, |
|
"learning_rate": 3.757782564161191e-05, |
|
"loss": 0.0, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.3121154254190537, |
|
"grad_norm": 0.0010800276650115848, |
|
"learning_rate": 3.7263268733678606e-05, |
|
"loss": 0.0, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.313812858052196, |
|
"grad_norm": 0.0006105787470005453, |
|
"learning_rate": 3.694984708337756e-05, |
|
"loss": 0.0, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.3155102906853384, |
|
"grad_norm": 0.0007423445931635797, |
|
"learning_rate": 3.663756384686127e-05, |
|
"loss": 0.0, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.317207723318481, |
|
"grad_norm": 0.0007892303401604295, |
|
"learning_rate": 3.632642216881847e-05, |
|
"loss": 0.0, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.3189051559516232, |
|
"grad_norm": 0.0015005484456196427, |
|
"learning_rate": 3.601642518244247e-05, |
|
"loss": 0.0, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.3206025885847654, |
|
"grad_norm": 0.0008817919879220426, |
|
"learning_rate": 3.570757600939939e-05, |
|
"loss": 0.0, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.322300021217908, |
|
"grad_norm": 0.0003715140337590128, |
|
"learning_rate": 3.5399877759797e-05, |
|
"loss": 0.0, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.3239974538510504, |
|
"grad_norm": 0.001832145731896162, |
|
"learning_rate": 3.509333353215331e-05, |
|
"loss": 0.0001, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.3256948864841926, |
|
"grad_norm": 0.0006237781490199268, |
|
"learning_rate": 3.47879464133652e-05, |
|
"loss": 0.0, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.327392319117335, |
|
"grad_norm": 0.0015418545808643103, |
|
"learning_rate": 3.448371947867763e-05, |
|
"loss": 0.0001, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.3290897517504774, |
|
"grad_norm": 0.0035702355671674013, |
|
"learning_rate": 3.4180655791652476e-05, |
|
"loss": 0.0001, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.3307871843836199, |
|
"grad_norm": 0.0013561249943450093, |
|
"learning_rate": 3.3878758404137624e-05, |
|
"loss": 0.0001, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.3324846170167621, |
|
"grad_norm": 0.001489428337663412, |
|
"learning_rate": 3.3578030356236455e-05, |
|
"loss": 0.0001, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.3341820496499044, |
|
"grad_norm": 0.0011818065540865064, |
|
"learning_rate": 3.3278474676277114e-05, |
|
"loss": 0.0, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.3358794822830469, |
|
"grad_norm": 0.0076867276802659035, |
|
"learning_rate": 3.298009438078194e-05, |
|
"loss": 0.0003, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.3375769149161894, |
|
"grad_norm": 0.002071813913062215, |
|
"learning_rate": 3.268289247443713e-05, |
|
"loss": 0.0001, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.3392743475493316, |
|
"grad_norm": 0.002192431129515171, |
|
"learning_rate": 3.238687195006264e-05, |
|
"loss": 0.0001, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.3409717801824739, |
|
"grad_norm": 0.24951210618019104, |
|
"learning_rate": 3.209203578858191e-05, |
|
"loss": 0.6115, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.3426692128156164, |
|
"grad_norm": 0.29264548420906067, |
|
"learning_rate": 3.1798386958991714e-05, |
|
"loss": 0.71, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.3443666454487588, |
|
"grad_norm": 0.2815876603126526, |
|
"learning_rate": 3.1505928418332574e-05, |
|
"loss": 0.6586, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.346064078081901, |
|
"grad_norm": 0.25446027517318726, |
|
"learning_rate": 3.121466311165875e-05, |
|
"loss": 0.5623, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.3477615107150436, |
|
"grad_norm": 0.2643551230430603, |
|
"learning_rate": 3.092459397200861e-05, |
|
"loss": 0.6298, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.3494589433481858, |
|
"grad_norm": 0.24555166065692902, |
|
"learning_rate": 3.0635723920375164e-05, |
|
"loss": 0.5237, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.3511563759813283, |
|
"grad_norm": 0.2268795222043991, |
|
"learning_rate": 3.0348055865676707e-05, |
|
"loss": 0.3814, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.3528538086144706, |
|
"grad_norm": 0.25621020793914795, |
|
"learning_rate": 3.0061592704727257e-05, |
|
"loss": 0.5183, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.354551241247613, |
|
"grad_norm": 0.2662016451358795, |
|
"learning_rate": 2.9776337322207687e-05, |
|
"loss": 0.5679, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.3562486738807553, |
|
"grad_norm": 0.20577247440814972, |
|
"learning_rate": 2.9492292590636613e-05, |
|
"loss": 0.3231, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.3579461065138978, |
|
"grad_norm": 0.23013556003570557, |
|
"learning_rate": 2.9209461370341204e-05, |
|
"loss": 0.4158, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.3579461065138978, |
|
"eval_loss": 0.36441436409950256, |
|
"eval_runtime": 65.9028, |
|
"eval_samples_per_second": 2.929, |
|
"eval_steps_per_second": 2.929, |
|
"step": 800 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 3, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 3 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.185996556504924e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|