{ "best_metric": null, "best_model_checkpoint": null, "epoch": 0.99609375, "eval_steps": 500, "global_step": 10200, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 9.765625e-05, "grad_norm": 13.125093460083008, "learning_rate": 1.6666666666666669e-06, "loss": 6.0473, "step": 1 }, { "epoch": 0.0001953125, "grad_norm": 13.109691619873047, "learning_rate": 3.3333333333333337e-06, "loss": 6.0151, "step": 2 }, { "epoch": 0.00029296875, "grad_norm": 12.975897789001465, "learning_rate": 5e-06, "loss": 6.0374, "step": 3 }, { "epoch": 0.000390625, "grad_norm": 12.861470222473145, "learning_rate": 6.6666666666666675e-06, "loss": 5.9534, "step": 4 }, { "epoch": 0.00048828125, "grad_norm": 11.551366806030273, "learning_rate": 8.333333333333334e-06, "loss": 5.7805, "step": 5 }, { "epoch": 0.0005859375, "grad_norm": 7.968364238739014, "learning_rate": 1e-05, "loss": 5.5664, "step": 6 }, { "epoch": 0.00068359375, "grad_norm": 6.854646682739258, "learning_rate": 1.1666666666666668e-05, "loss": 5.4982, "step": 7 }, { "epoch": 0.00078125, "grad_norm": 3.759798049926758, "learning_rate": 1.3333333333333335e-05, "loss": 5.1824, "step": 8 }, { "epoch": 0.00087890625, "grad_norm": 3.48822283744812, "learning_rate": 1.5e-05, "loss": 5.0667, "step": 9 }, { "epoch": 0.0009765625, "grad_norm": 3.1011886596679688, "learning_rate": 1.6666666666666667e-05, "loss": 5.0028, "step": 10 }, { "epoch": 0.00107421875, "grad_norm": 2.5868849754333496, "learning_rate": 1.8333333333333333e-05, "loss": 4.7787, "step": 11 }, { "epoch": 0.001171875, "grad_norm": 2.378497838973999, "learning_rate": 2e-05, "loss": 4.6855, "step": 12 }, { "epoch": 0.00126953125, "grad_norm": 2.2101738452911377, "learning_rate": 2.1666666666666667e-05, "loss": 4.6042, "step": 13 }, { "epoch": 0.0013671875, "grad_norm": 1.9938961267471313, "learning_rate": 2.3333333333333336e-05, "loss": 4.3843, "step": 14 }, { "epoch": 0.00146484375, "grad_norm": 1.9345463514328003, "learning_rate": 2.5e-05, "loss": 4.2548, "step": 15 }, { "epoch": 0.0015625, "grad_norm": 1.7730776071548462, "learning_rate": 2.666666666666667e-05, "loss": 4.1363, "step": 16 }, { "epoch": 0.00166015625, "grad_norm": 1.58334481716156, "learning_rate": 2.8333333333333332e-05, "loss": 3.9704, "step": 17 }, { "epoch": 0.0017578125, "grad_norm": 1.4827001094818115, "learning_rate": 3e-05, "loss": 3.81, "step": 18 }, { "epoch": 0.00185546875, "grad_norm": 1.4140430688858032, "learning_rate": 3.166666666666667e-05, "loss": 3.7305, "step": 19 }, { "epoch": 0.001953125, "grad_norm": 1.3934496641159058, "learning_rate": 3.3333333333333335e-05, "loss": 3.5375, "step": 20 }, { "epoch": 0.00205078125, "grad_norm": 1.3384771347045898, "learning_rate": 3.5000000000000004e-05, "loss": 3.5217, "step": 21 }, { "epoch": 0.0021484375, "grad_norm": 1.2228978872299194, "learning_rate": 3.6666666666666666e-05, "loss": 3.3742, "step": 22 }, { "epoch": 0.00224609375, "grad_norm": 1.1775028705596924, "learning_rate": 3.8333333333333334e-05, "loss": 3.2626, "step": 23 }, { "epoch": 0.00234375, "grad_norm": 1.0886842012405396, "learning_rate": 4e-05, "loss": 3.1864, "step": 24 }, { "epoch": 0.00244140625, "grad_norm": 1.0981535911560059, "learning_rate": 4.1666666666666665e-05, "loss": 3.0962, "step": 25 }, { "epoch": 0.0025390625, "grad_norm": 1.009294867515564, "learning_rate": 4.3333333333333334e-05, "loss": 3.0507, "step": 26 }, { "epoch": 0.00263671875, "grad_norm": 0.9390103816986084, "learning_rate": 4.4999999999999996e-05, "loss": 2.9579, "step": 27 }, { "epoch": 0.002734375, "grad_norm": 0.8647847175598145, "learning_rate": 4.666666666666667e-05, "loss": 2.8166, "step": 28 }, { "epoch": 0.00283203125, "grad_norm": 0.8606237769126892, "learning_rate": 4.8333333333333334e-05, "loss": 2.7919, "step": 29 }, { "epoch": 0.0029296875, "grad_norm": 0.8069576025009155, "learning_rate": 5e-05, "loss": 2.7426, "step": 30 }, { "epoch": 0.00302734375, "grad_norm": 0.7007808685302734, "learning_rate": 5.1666666666666664e-05, "loss": 2.6926, "step": 31 }, { "epoch": 0.003125, "grad_norm": 0.6825646162033081, "learning_rate": 5.333333333333334e-05, "loss": 2.6783, "step": 32 }, { "epoch": 0.00322265625, "grad_norm": 0.7054751515388489, "learning_rate": 5.5e-05, "loss": 2.6239, "step": 33 }, { "epoch": 0.0033203125, "grad_norm": 0.7101700305938721, "learning_rate": 5.6666666666666664e-05, "loss": 2.5993, "step": 34 }, { "epoch": 0.00341796875, "grad_norm": 0.56828773021698, "learning_rate": 5.833333333333333e-05, "loss": 2.5258, "step": 35 }, { "epoch": 0.003515625, "grad_norm": 0.5513983368873596, "learning_rate": 6e-05, "loss": 2.5018, "step": 36 }, { "epoch": 0.00361328125, "grad_norm": 0.573391318321228, "learning_rate": 6.166666666666667e-05, "loss": 2.4881, "step": 37 }, { "epoch": 0.0037109375, "grad_norm": 0.4520920515060425, "learning_rate": 6.333333333333335e-05, "loss": 2.4489, "step": 38 }, { "epoch": 0.00380859375, "grad_norm": 0.43038809299468994, "learning_rate": 6.500000000000001e-05, "loss": 2.4387, "step": 39 }, { "epoch": 0.00390625, "grad_norm": 0.4071808457374573, "learning_rate": 6.666666666666667e-05, "loss": 2.4261, "step": 40 }, { "epoch": 0.00400390625, "grad_norm": 0.3631410300731659, "learning_rate": 6.833333333333333e-05, "loss": 2.3656, "step": 41 }, { "epoch": 0.0041015625, "grad_norm": 0.33457818627357483, "learning_rate": 7.000000000000001e-05, "loss": 2.4011, "step": 42 }, { "epoch": 0.00419921875, "grad_norm": 0.3582305610179901, "learning_rate": 7.166666666666667e-05, "loss": 2.3662, "step": 43 }, { "epoch": 0.004296875, "grad_norm": 0.4090467691421509, "learning_rate": 7.333333333333333e-05, "loss": 2.3282, "step": 44 }, { "epoch": 0.00439453125, "grad_norm": 0.34438556432724, "learning_rate": 7.5e-05, "loss": 2.2991, "step": 45 }, { "epoch": 0.0044921875, "grad_norm": 0.29381102323532104, "learning_rate": 7.666666666666667e-05, "loss": 2.2506, "step": 46 }, { "epoch": 0.00458984375, "grad_norm": 0.4019562304019928, "learning_rate": 7.833333333333334e-05, "loss": 2.2494, "step": 47 }, { "epoch": 0.0046875, "grad_norm": 0.3183911144733429, "learning_rate": 8e-05, "loss": 2.2853, "step": 48 }, { "epoch": 0.00478515625, "grad_norm": 0.33041393756866455, "learning_rate": 8.166666666666667e-05, "loss": 2.2251, "step": 49 }, { "epoch": 0.0048828125, "grad_norm": 0.28936123847961426, "learning_rate": 8.333333333333333e-05, "loss": 2.2308, "step": 50 }, { "epoch": 0.00498046875, "grad_norm": 0.36125779151916504, "learning_rate": 8.5e-05, "loss": 2.2473, "step": 51 }, { "epoch": 0.005078125, "grad_norm": 0.2832348942756653, "learning_rate": 8.666666666666667e-05, "loss": 2.201, "step": 52 }, { "epoch": 0.00517578125, "grad_norm": 0.3250133693218231, "learning_rate": 8.833333333333333e-05, "loss": 2.1596, "step": 53 }, { "epoch": 0.0052734375, "grad_norm": 0.31502828001976013, "learning_rate": 8.999999999999999e-05, "loss": 2.2183, "step": 54 }, { "epoch": 0.00537109375, "grad_norm": 0.33068645000457764, "learning_rate": 9.166666666666667e-05, "loss": 2.2254, "step": 55 }, { "epoch": 0.00546875, "grad_norm": 0.30684661865234375, "learning_rate": 9.333333333333334e-05, "loss": 2.2572, "step": 56 }, { "epoch": 0.00556640625, "grad_norm": 0.31540846824645996, "learning_rate": 9.5e-05, "loss": 2.1594, "step": 57 }, { "epoch": 0.0056640625, "grad_norm": 0.23259752988815308, "learning_rate": 9.666666666666667e-05, "loss": 2.172, "step": 58 }, { "epoch": 0.00576171875, "grad_norm": 0.26666638255119324, "learning_rate": 9.833333333333333e-05, "loss": 2.1961, "step": 59 }, { "epoch": 0.005859375, "grad_norm": 0.26295527815818787, "learning_rate": 0.0001, "loss": 2.1811, "step": 60 }, { "epoch": 0.00595703125, "grad_norm": 0.2302207201719284, "learning_rate": 0.00010166666666666667, "loss": 2.1431, "step": 61 }, { "epoch": 0.0060546875, "grad_norm": 0.468537837266922, "learning_rate": 0.00010333333333333333, "loss": 2.133, "step": 62 }, { "epoch": 0.00615234375, "grad_norm": 0.27946797013282776, "learning_rate": 0.000105, "loss": 2.1917, "step": 63 }, { "epoch": 0.00625, "grad_norm": 0.26789286732673645, "learning_rate": 0.00010666666666666668, "loss": 2.1417, "step": 64 }, { "epoch": 0.00634765625, "grad_norm": 0.22977805137634277, "learning_rate": 0.00010833333333333334, "loss": 2.1371, "step": 65 }, { "epoch": 0.0064453125, "grad_norm": 0.2590682804584503, "learning_rate": 0.00011, "loss": 2.113, "step": 66 }, { "epoch": 0.00654296875, "grad_norm": 0.2660805583000183, "learning_rate": 0.00011166666666666667, "loss": 2.1346, "step": 67 }, { "epoch": 0.006640625, "grad_norm": 0.24538874626159668, "learning_rate": 0.00011333333333333333, "loss": 2.1078, "step": 68 }, { "epoch": 0.00673828125, "grad_norm": 0.2986001968383789, "learning_rate": 0.000115, "loss": 2.1048, "step": 69 }, { "epoch": 0.0068359375, "grad_norm": 0.30193910002708435, "learning_rate": 0.00011666666666666667, "loss": 2.135, "step": 70 }, { "epoch": 0.00693359375, "grad_norm": 0.25153177976608276, "learning_rate": 0.00011833333333333334, "loss": 2.1101, "step": 71 }, { "epoch": 0.00703125, "grad_norm": 0.7685809135437012, "learning_rate": 0.00012, "loss": 2.0833, "step": 72 }, { "epoch": 0.00712890625, "grad_norm": 0.24468126893043518, "learning_rate": 0.00012166666666666668, "loss": 2.1409, "step": 73 }, { "epoch": 0.0072265625, "grad_norm": 0.3070752024650574, "learning_rate": 0.00012333333333333334, "loss": 2.1402, "step": 74 }, { "epoch": 0.00732421875, "grad_norm": 0.352273553609848, "learning_rate": 0.000125, "loss": 2.171, "step": 75 }, { "epoch": 0.007421875, "grad_norm": 0.34806111454963684, "learning_rate": 0.0001266666666666667, "loss": 2.1155, "step": 76 }, { "epoch": 0.00751953125, "grad_norm": 0.317414253950119, "learning_rate": 0.00012833333333333333, "loss": 2.0812, "step": 77 }, { "epoch": 0.0076171875, "grad_norm": 0.3320539891719818, "learning_rate": 0.00013000000000000002, "loss": 2.0987, "step": 78 }, { "epoch": 0.00771484375, "grad_norm": 0.3315901756286621, "learning_rate": 0.00013166666666666665, "loss": 2.1025, "step": 79 }, { "epoch": 0.0078125, "grad_norm": 0.29021286964416504, "learning_rate": 0.00013333333333333334, "loss": 2.1474, "step": 80 }, { "epoch": 0.00791015625, "grad_norm": 0.2593044936656952, "learning_rate": 0.000135, "loss": 2.0667, "step": 81 }, { "epoch": 0.0080078125, "grad_norm": 0.3192054033279419, "learning_rate": 0.00013666666666666666, "loss": 2.0764, "step": 82 }, { "epoch": 0.00810546875, "grad_norm": 0.26053664088249207, "learning_rate": 0.00013833333333333333, "loss": 2.0698, "step": 83 }, { "epoch": 0.008203125, "grad_norm": 0.24790963530540466, "learning_rate": 0.00014000000000000001, "loss": 2.124, "step": 84 }, { "epoch": 0.00830078125, "grad_norm": 0.2546316087245941, "learning_rate": 0.00014166666666666668, "loss": 2.1025, "step": 85 }, { "epoch": 0.0083984375, "grad_norm": 0.22275258600711823, "learning_rate": 0.00014333333333333334, "loss": 2.0778, "step": 86 }, { "epoch": 0.00849609375, "grad_norm": 0.22855599224567413, "learning_rate": 0.000145, "loss": 2.1113, "step": 87 }, { "epoch": 0.00859375, "grad_norm": 0.2456902116537094, "learning_rate": 0.00014666666666666666, "loss": 2.1399, "step": 88 }, { "epoch": 0.00869140625, "grad_norm": 0.22144420444965363, "learning_rate": 0.00014833333333333335, "loss": 2.0211, "step": 89 }, { "epoch": 0.0087890625, "grad_norm": 0.254894495010376, "learning_rate": 0.00015, "loss": 2.1382, "step": 90 }, { "epoch": 0.00888671875, "grad_norm": 0.2729082703590393, "learning_rate": 0.00015166666666666668, "loss": 2.1271, "step": 91 }, { "epoch": 0.008984375, "grad_norm": 0.2564642131328583, "learning_rate": 0.00015333333333333334, "loss": 2.0423, "step": 92 }, { "epoch": 0.00908203125, "grad_norm": 0.24927419424057007, "learning_rate": 0.000155, "loss": 2.0603, "step": 93 }, { "epoch": 0.0091796875, "grad_norm": 0.2591923475265503, "learning_rate": 0.0001566666666666667, "loss": 2.0522, "step": 94 }, { "epoch": 0.00927734375, "grad_norm": 0.3350547254085541, "learning_rate": 0.00015833333333333332, "loss": 2.1095, "step": 95 }, { "epoch": 0.009375, "grad_norm": 0.30227839946746826, "learning_rate": 0.00016, "loss": 2.121, "step": 96 }, { "epoch": 0.00947265625, "grad_norm": 0.3027198314666748, "learning_rate": 0.00016166666666666665, "loss": 2.0288, "step": 97 }, { "epoch": 0.0095703125, "grad_norm": 0.29791951179504395, "learning_rate": 0.00016333333333333334, "loss": 2.0471, "step": 98 }, { "epoch": 0.00966796875, "grad_norm": 0.30918803811073303, "learning_rate": 0.000165, "loss": 2.1036, "step": 99 }, { "epoch": 0.009765625, "grad_norm": 0.2510216534137726, "learning_rate": 0.00016666666666666666, "loss": 2.0217, "step": 100 }, { "epoch": 0.00986328125, "grad_norm": 0.288231760263443, "learning_rate": 0.00016833333333333335, "loss": 2.0178, "step": 101 }, { "epoch": 0.0099609375, "grad_norm": 0.3326691687107086, "learning_rate": 0.00017, "loss": 2.0947, "step": 102 }, { "epoch": 0.01005859375, "grad_norm": 0.3552946150302887, "learning_rate": 0.00017166666666666667, "loss": 1.9953, "step": 103 }, { "epoch": 0.01015625, "grad_norm": 0.34882935881614685, "learning_rate": 0.00017333333333333334, "loss": 2.1223, "step": 104 }, { "epoch": 0.01025390625, "grad_norm": 0.35487979650497437, "learning_rate": 0.000175, "loss": 2.0599, "step": 105 }, { "epoch": 0.0103515625, "grad_norm": 0.33561578392982483, "learning_rate": 0.00017666666666666666, "loss": 2.0398, "step": 106 }, { "epoch": 0.01044921875, "grad_norm": 0.27415409684181213, "learning_rate": 0.00017833333333333335, "loss": 2.0982, "step": 107 }, { "epoch": 0.010546875, "grad_norm": 0.28592920303344727, "learning_rate": 0.00017999999999999998, "loss": 2.0571, "step": 108 }, { "epoch": 0.01064453125, "grad_norm": 0.3228552043437958, "learning_rate": 0.00018166666666666667, "loss": 1.9923, "step": 109 }, { "epoch": 0.0107421875, "grad_norm": 0.29005661606788635, "learning_rate": 0.00018333333333333334, "loss": 2.0394, "step": 110 }, { "epoch": 0.01083984375, "grad_norm": 0.36677825450897217, "learning_rate": 0.000185, "loss": 2.0153, "step": 111 }, { "epoch": 0.0109375, "grad_norm": 0.2562806308269501, "learning_rate": 0.0001866666666666667, "loss": 2.05, "step": 112 }, { "epoch": 0.01103515625, "grad_norm": 0.2748093008995056, "learning_rate": 0.00018833333333333332, "loss": 2.0449, "step": 113 }, { "epoch": 0.0111328125, "grad_norm": 0.3924459218978882, "learning_rate": 0.00019, "loss": 2.0409, "step": 114 }, { "epoch": 0.01123046875, "grad_norm": 0.4368191063404083, "learning_rate": 0.00019166666666666667, "loss": 2.0178, "step": 115 }, { "epoch": 0.011328125, "grad_norm": 0.4236035943031311, "learning_rate": 0.00019333333333333333, "loss": 2.0518, "step": 116 }, { "epoch": 0.01142578125, "grad_norm": 0.28912851214408875, "learning_rate": 0.00019500000000000002, "loss": 2.0058, "step": 117 }, { "epoch": 0.0115234375, "grad_norm": 0.38847798109054565, "learning_rate": 0.00019666666666666666, "loss": 2.0818, "step": 118 }, { "epoch": 0.01162109375, "grad_norm": 0.29429811239242554, "learning_rate": 0.00019833333333333335, "loss": 2.004, "step": 119 }, { "epoch": 0.01171875, "grad_norm": 0.2969271242618561, "learning_rate": 0.0002, "loss": 2.046, "step": 120 }, { "epoch": 0.01181640625, "grad_norm": 0.297894150018692, "learning_rate": 0.00020166666666666667, "loss": 2.0204, "step": 121 }, { "epoch": 0.0119140625, "grad_norm": 0.28794237971305847, "learning_rate": 0.00020333333333333333, "loss": 2.0663, "step": 122 }, { "epoch": 0.01201171875, "grad_norm": 0.2766033411026001, "learning_rate": 0.000205, "loss": 2.0399, "step": 123 }, { "epoch": 0.012109375, "grad_norm": 0.3516612648963928, "learning_rate": 0.00020666666666666666, "loss": 2.0389, "step": 124 }, { "epoch": 0.01220703125, "grad_norm": 0.28204381465911865, "learning_rate": 0.00020833333333333335, "loss": 2.0084, "step": 125 }, { "epoch": 0.0123046875, "grad_norm": 0.3306240737438202, "learning_rate": 0.00021, "loss": 2.013, "step": 126 }, { "epoch": 0.01240234375, "grad_norm": 0.3221111595630646, "learning_rate": 0.00021166666666666667, "loss": 2.0312, "step": 127 }, { "epoch": 0.0125, "grad_norm": 0.2742249071598053, "learning_rate": 0.00021333333333333336, "loss": 2.0375, "step": 128 }, { "epoch": 0.01259765625, "grad_norm": 0.2785228490829468, "learning_rate": 0.000215, "loss": 2.0449, "step": 129 }, { "epoch": 0.0126953125, "grad_norm": 0.2666397988796234, "learning_rate": 0.00021666666666666668, "loss": 2.0355, "step": 130 }, { "epoch": 0.01279296875, "grad_norm": 0.22522135078907013, "learning_rate": 0.00021833333333333332, "loss": 1.9693, "step": 131 }, { "epoch": 0.012890625, "grad_norm": 0.2724483013153076, "learning_rate": 0.00022, "loss": 2.0671, "step": 132 }, { "epoch": 0.01298828125, "grad_norm": 0.2655040919780731, "learning_rate": 0.00022166666666666667, "loss": 1.9805, "step": 133 }, { "epoch": 0.0130859375, "grad_norm": 0.3194504678249359, "learning_rate": 0.00022333333333333333, "loss": 2.067, "step": 134 }, { "epoch": 0.01318359375, "grad_norm": 0.25511813163757324, "learning_rate": 0.00022500000000000002, "loss": 2.0141, "step": 135 }, { "epoch": 0.01328125, "grad_norm": 0.3995087444782257, "learning_rate": 0.00022666666666666666, "loss": 2.0278, "step": 136 }, { "epoch": 0.01337890625, "grad_norm": 0.2623380422592163, "learning_rate": 0.00022833333333333334, "loss": 1.9751, "step": 137 }, { "epoch": 0.0134765625, "grad_norm": 0.23814889788627625, "learning_rate": 0.00023, "loss": 1.969, "step": 138 }, { "epoch": 0.01357421875, "grad_norm": 0.23566491901874542, "learning_rate": 0.00023166666666666667, "loss": 2.0135, "step": 139 }, { "epoch": 0.013671875, "grad_norm": 0.2437373697757721, "learning_rate": 0.00023333333333333333, "loss": 2.0044, "step": 140 }, { "epoch": 0.01376953125, "grad_norm": 0.2861543595790863, "learning_rate": 0.000235, "loss": 2.0378, "step": 141 }, { "epoch": 0.0138671875, "grad_norm": 0.318050354719162, "learning_rate": 0.00023666666666666668, "loss": 2.0857, "step": 142 }, { "epoch": 0.01396484375, "grad_norm": 0.39669227600097656, "learning_rate": 0.00023833333333333334, "loss": 2.0535, "step": 143 }, { "epoch": 0.0140625, "grad_norm": 0.4359401762485504, "learning_rate": 0.00024, "loss": 2.0432, "step": 144 }, { "epoch": 0.01416015625, "grad_norm": 0.4532039165496826, "learning_rate": 0.00024166666666666667, "loss": 1.9909, "step": 145 }, { "epoch": 0.0142578125, "grad_norm": 0.4570695161819458, "learning_rate": 0.00024333333333333336, "loss": 2.0123, "step": 146 }, { "epoch": 0.01435546875, "grad_norm": 0.36623403429985046, "learning_rate": 0.000245, "loss": 2.0179, "step": 147 }, { "epoch": 0.014453125, "grad_norm": 0.3069714307785034, "learning_rate": 0.0002466666666666667, "loss": 2.0014, "step": 148 }, { "epoch": 0.01455078125, "grad_norm": 0.3980304002761841, "learning_rate": 0.0002483333333333333, "loss": 2.0489, "step": 149 }, { "epoch": 0.0146484375, "grad_norm": 0.31907564401626587, "learning_rate": 0.00025, "loss": 2.013, "step": 150 }, { "epoch": 0.01474609375, "grad_norm": 0.2952549159526825, "learning_rate": 0.00025166666666666664, "loss": 2.0709, "step": 151 }, { "epoch": 0.01484375, "grad_norm": 0.29451197385787964, "learning_rate": 0.0002533333333333334, "loss": 1.9613, "step": 152 }, { "epoch": 0.01494140625, "grad_norm": 0.2893507778644562, "learning_rate": 0.000255, "loss": 2.0048, "step": 153 }, { "epoch": 0.0150390625, "grad_norm": 0.24850639700889587, "learning_rate": 0.00025666666666666665, "loss": 2.0198, "step": 154 }, { "epoch": 0.01513671875, "grad_norm": 0.24297639727592468, "learning_rate": 0.00025833333333333334, "loss": 2.0561, "step": 155 }, { "epoch": 0.015234375, "grad_norm": 0.2777438461780548, "learning_rate": 0.00026000000000000003, "loss": 2.0206, "step": 156 }, { "epoch": 0.01533203125, "grad_norm": 0.28714093565940857, "learning_rate": 0.00026166666666666667, "loss": 1.9891, "step": 157 }, { "epoch": 0.0154296875, "grad_norm": 0.25796255469322205, "learning_rate": 0.0002633333333333333, "loss": 2.0369, "step": 158 }, { "epoch": 0.01552734375, "grad_norm": 0.2391008883714676, "learning_rate": 0.00026500000000000004, "loss": 2.0015, "step": 159 }, { "epoch": 0.015625, "grad_norm": 0.3203892409801483, "learning_rate": 0.0002666666666666667, "loss": 2.0213, "step": 160 }, { "epoch": 0.01572265625, "grad_norm": 0.3396870791912079, "learning_rate": 0.0002683333333333333, "loss": 2.0333, "step": 161 }, { "epoch": 0.0158203125, "grad_norm": 0.315060555934906, "learning_rate": 0.00027, "loss": 2.0195, "step": 162 }, { "epoch": 0.01591796875, "grad_norm": 0.2672436535358429, "learning_rate": 0.0002716666666666667, "loss": 1.9946, "step": 163 }, { "epoch": 0.016015625, "grad_norm": 0.2996402382850647, "learning_rate": 0.00027333333333333333, "loss": 2.0112, "step": 164 }, { "epoch": 0.01611328125, "grad_norm": 0.2894189953804016, "learning_rate": 0.000275, "loss": 2.0157, "step": 165 }, { "epoch": 0.0162109375, "grad_norm": 0.26241254806518555, "learning_rate": 0.00027666666666666665, "loss": 2.0177, "step": 166 }, { "epoch": 0.01630859375, "grad_norm": 0.22900305688381195, "learning_rate": 0.00027833333333333334, "loss": 1.9834, "step": 167 }, { "epoch": 0.01640625, "grad_norm": 0.2373427301645279, "learning_rate": 0.00028000000000000003, "loss": 1.9792, "step": 168 }, { "epoch": 0.01650390625, "grad_norm": 0.2663004696369171, "learning_rate": 0.00028166666666666666, "loss": 2.0203, "step": 169 }, { "epoch": 0.0166015625, "grad_norm": 0.31653544306755066, "learning_rate": 0.00028333333333333335, "loss": 2.0216, "step": 170 }, { "epoch": 0.01669921875, "grad_norm": 0.3077234923839569, "learning_rate": 0.000285, "loss": 2.0361, "step": 171 }, { "epoch": 0.016796875, "grad_norm": 0.25555703043937683, "learning_rate": 0.0002866666666666667, "loss": 2.0102, "step": 172 }, { "epoch": 0.01689453125, "grad_norm": 0.29817435145378113, "learning_rate": 0.0002883333333333333, "loss": 1.972, "step": 173 }, { "epoch": 0.0169921875, "grad_norm": 0.3075692355632782, "learning_rate": 0.00029, "loss": 2.0195, "step": 174 }, { "epoch": 0.01708984375, "grad_norm": 0.29917964339256287, "learning_rate": 0.0002916666666666667, "loss": 1.9972, "step": 175 }, { "epoch": 0.0171875, "grad_norm": 0.32018229365348816, "learning_rate": 0.0002933333333333333, "loss": 1.9895, "step": 176 }, { "epoch": 0.01728515625, "grad_norm": 0.2907097339630127, "learning_rate": 0.000295, "loss": 1.9777, "step": 177 }, { "epoch": 0.0173828125, "grad_norm": 0.22390642762184143, "learning_rate": 0.0002966666666666667, "loss": 1.9961, "step": 178 }, { "epoch": 0.01748046875, "grad_norm": 0.25350186228752136, "learning_rate": 0.00029833333333333334, "loss": 1.9875, "step": 179 }, { "epoch": 0.017578125, "grad_norm": 0.22856706380844116, "learning_rate": 0.0003, "loss": 2.0169, "step": 180 }, { "epoch": 0.01767578125, "grad_norm": 0.2288493812084198, "learning_rate": 0.0003016666666666667, "loss": 2.0238, "step": 181 }, { "epoch": 0.0177734375, "grad_norm": 0.27326855063438416, "learning_rate": 0.00030333333333333335, "loss": 2.0134, "step": 182 }, { "epoch": 0.01787109375, "grad_norm": 0.2447524517774582, "learning_rate": 0.000305, "loss": 1.9852, "step": 183 }, { "epoch": 0.01796875, "grad_norm": 0.4363366663455963, "learning_rate": 0.0003066666666666667, "loss": 1.9921, "step": 184 }, { "epoch": 0.01806640625, "grad_norm": 0.4569666385650635, "learning_rate": 0.00030833333333333337, "loss": 1.9997, "step": 185 }, { "epoch": 0.0181640625, "grad_norm": 0.43348655104637146, "learning_rate": 0.00031, "loss": 2.0584, "step": 186 }, { "epoch": 0.01826171875, "grad_norm": 0.3844921588897705, "learning_rate": 0.00031166666666666663, "loss": 2.0035, "step": 187 }, { "epoch": 0.018359375, "grad_norm": 0.3427641987800598, "learning_rate": 0.0003133333333333334, "loss": 1.9536, "step": 188 }, { "epoch": 0.01845703125, "grad_norm": 0.33557865023612976, "learning_rate": 0.000315, "loss": 1.972, "step": 189 }, { "epoch": 0.0185546875, "grad_norm": 0.4006612300872803, "learning_rate": 0.00031666666666666665, "loss": 2.0652, "step": 190 }, { "epoch": 0.01865234375, "grad_norm": 0.3158099055290222, "learning_rate": 0.00031833333333333334, "loss": 2.0516, "step": 191 }, { "epoch": 0.01875, "grad_norm": 0.3799190819263458, "learning_rate": 0.00032, "loss": 2.011, "step": 192 }, { "epoch": 0.01884765625, "grad_norm": 0.2948876619338989, "learning_rate": 0.00032166666666666666, "loss": 2.0109, "step": 193 }, { "epoch": 0.0189453125, "grad_norm": 0.24561335146427155, "learning_rate": 0.0003233333333333333, "loss": 2.0264, "step": 194 }, { "epoch": 0.01904296875, "grad_norm": 0.24896866083145142, "learning_rate": 0.00032500000000000004, "loss": 1.9726, "step": 195 }, { "epoch": 0.019140625, "grad_norm": 0.26887547969818115, "learning_rate": 0.0003266666666666667, "loss": 2.0036, "step": 196 }, { "epoch": 0.01923828125, "grad_norm": 0.3186735212802887, "learning_rate": 0.0003283333333333333, "loss": 2.0174, "step": 197 }, { "epoch": 0.0193359375, "grad_norm": 0.3317165672779083, "learning_rate": 0.00033, "loss": 2.0047, "step": 198 }, { "epoch": 0.01943359375, "grad_norm": 0.3068574070930481, "learning_rate": 0.0003316666666666667, "loss": 2.0032, "step": 199 }, { "epoch": 0.01953125, "grad_norm": 0.29292526841163635, "learning_rate": 0.0003333333333333333, "loss": 2.0533, "step": 200 }, { "epoch": 0.01962890625, "grad_norm": 0.2519834041595459, "learning_rate": 0.000335, "loss": 2.0113, "step": 201 }, { "epoch": 0.0197265625, "grad_norm": 0.25766584277153015, "learning_rate": 0.0003366666666666667, "loss": 2.0278, "step": 202 }, { "epoch": 0.01982421875, "grad_norm": 0.2704983055591583, "learning_rate": 0.00033833333333333334, "loss": 1.9725, "step": 203 }, { "epoch": 0.019921875, "grad_norm": 0.2882053256034851, "learning_rate": 0.00034, "loss": 2.0706, "step": 204 }, { "epoch": 0.02001953125, "grad_norm": 0.34524375200271606, "learning_rate": 0.00034166666666666666, "loss": 1.9868, "step": 205 }, { "epoch": 0.0201171875, "grad_norm": 0.3718552887439728, "learning_rate": 0.00034333333333333335, "loss": 1.9596, "step": 206 }, { "epoch": 0.02021484375, "grad_norm": 0.2747247815132141, "learning_rate": 0.000345, "loss": 2.0125, "step": 207 }, { "epoch": 0.0203125, "grad_norm": 0.3062858283519745, "learning_rate": 0.00034666666666666667, "loss": 2.0224, "step": 208 }, { "epoch": 0.02041015625, "grad_norm": 0.32505863904953003, "learning_rate": 0.00034833333333333336, "loss": 2.0376, "step": 209 }, { "epoch": 0.0205078125, "grad_norm": 0.35048386454582214, "learning_rate": 0.00035, "loss": 2.0268, "step": 210 }, { "epoch": 0.02060546875, "grad_norm": 0.31204426288604736, "learning_rate": 0.0003516666666666667, "loss": 2.0198, "step": 211 }, { "epoch": 0.020703125, "grad_norm": 0.24253524839878082, "learning_rate": 0.0003533333333333333, "loss": 2.028, "step": 212 }, { "epoch": 0.02080078125, "grad_norm": 0.286915123462677, "learning_rate": 0.000355, "loss": 1.9358, "step": 213 }, { "epoch": 0.0208984375, "grad_norm": 0.2800680994987488, "learning_rate": 0.0003566666666666667, "loss": 1.9905, "step": 214 }, { "epoch": 0.02099609375, "grad_norm": 0.2718358635902405, "learning_rate": 0.00035833333333333333, "loss": 1.9902, "step": 215 }, { "epoch": 0.02109375, "grad_norm": 0.28583604097366333, "learning_rate": 0.00035999999999999997, "loss": 1.991, "step": 216 }, { "epoch": 0.02119140625, "grad_norm": 0.2911478281021118, "learning_rate": 0.0003616666666666667, "loss": 1.973, "step": 217 }, { "epoch": 0.0212890625, "grad_norm": 0.3601188361644745, "learning_rate": 0.00036333333333333335, "loss": 1.9727, "step": 218 }, { "epoch": 0.02138671875, "grad_norm": 0.2888337969779968, "learning_rate": 0.000365, "loss": 1.988, "step": 219 }, { "epoch": 0.021484375, "grad_norm": 0.25628700852394104, "learning_rate": 0.00036666666666666667, "loss": 1.9984, "step": 220 }, { "epoch": 0.02158203125, "grad_norm": 0.2637641429901123, "learning_rate": 0.00036833333333333336, "loss": 2.0229, "step": 221 }, { "epoch": 0.0216796875, "grad_norm": 0.23845899105072021, "learning_rate": 0.00037, "loss": 1.9985, "step": 222 }, { "epoch": 0.02177734375, "grad_norm": 0.28519535064697266, "learning_rate": 0.00037166666666666663, "loss": 2.0061, "step": 223 }, { "epoch": 0.021875, "grad_norm": 0.31845173239707947, "learning_rate": 0.0003733333333333334, "loss": 2.0081, "step": 224 }, { "epoch": 0.02197265625, "grad_norm": 0.3725838363170624, "learning_rate": 0.000375, "loss": 2.0032, "step": 225 }, { "epoch": 0.0220703125, "grad_norm": 0.49783870577812195, "learning_rate": 0.00037666666666666664, "loss": 2.0404, "step": 226 }, { "epoch": 0.02216796875, "grad_norm": 0.5059479475021362, "learning_rate": 0.0003783333333333334, "loss": 2.0498, "step": 227 }, { "epoch": 0.022265625, "grad_norm": 0.461291640996933, "learning_rate": 0.00038, "loss": 2.0078, "step": 228 }, { "epoch": 0.02236328125, "grad_norm": 0.3970203399658203, "learning_rate": 0.00038166666666666666, "loss": 1.9966, "step": 229 }, { "epoch": 0.0224609375, "grad_norm": 0.3155679404735565, "learning_rate": 0.00038333333333333334, "loss": 2.0263, "step": 230 }, { "epoch": 0.02255859375, "grad_norm": 0.32979920506477356, "learning_rate": 0.00038500000000000003, "loss": 1.9967, "step": 231 }, { "epoch": 0.02265625, "grad_norm": 0.27470117807388306, "learning_rate": 0.00038666666666666667, "loss": 2.0461, "step": 232 }, { "epoch": 0.02275390625, "grad_norm": 0.2981088161468506, "learning_rate": 0.0003883333333333333, "loss": 1.9944, "step": 233 }, { "epoch": 0.0228515625, "grad_norm": 0.3496599495410919, "learning_rate": 0.00039000000000000005, "loss": 1.9907, "step": 234 }, { "epoch": 0.02294921875, "grad_norm": 0.3318106532096863, "learning_rate": 0.0003916666666666667, "loss": 2.0576, "step": 235 }, { "epoch": 0.023046875, "grad_norm": 0.29498377442359924, "learning_rate": 0.0003933333333333333, "loss": 2.0242, "step": 236 }, { "epoch": 0.02314453125, "grad_norm": 0.2970214784145355, "learning_rate": 0.000395, "loss": 2.0087, "step": 237 }, { "epoch": 0.0232421875, "grad_norm": 0.37431418895721436, "learning_rate": 0.0003966666666666667, "loss": 2.0657, "step": 238 }, { "epoch": 0.02333984375, "grad_norm": 0.30095174908638, "learning_rate": 0.00039833333333333333, "loss": 2.0217, "step": 239 }, { "epoch": 0.0234375, "grad_norm": 0.24695053696632385, "learning_rate": 0.0004, "loss": 1.9833, "step": 240 }, { "epoch": 0.02353515625, "grad_norm": 0.2923540771007538, "learning_rate": 0.00040166666666666665, "loss": 2.0272, "step": 241 }, { "epoch": 0.0236328125, "grad_norm": 0.2788209915161133, "learning_rate": 0.00040333333333333334, "loss": 2.0104, "step": 242 }, { "epoch": 0.02373046875, "grad_norm": 0.2529614567756653, "learning_rate": 0.00040500000000000003, "loss": 2.003, "step": 243 }, { "epoch": 0.023828125, "grad_norm": 0.2551966905593872, "learning_rate": 0.00040666666666666667, "loss": 2.001, "step": 244 }, { "epoch": 0.02392578125, "grad_norm": 0.2613292634487152, "learning_rate": 0.00040833333333333336, "loss": 1.9822, "step": 245 }, { "epoch": 0.0240234375, "grad_norm": 0.3060430884361267, "learning_rate": 0.00041, "loss": 2.0024, "step": 246 }, { "epoch": 0.02412109375, "grad_norm": 0.33755916357040405, "learning_rate": 0.0004116666666666667, "loss": 2.0023, "step": 247 }, { "epoch": 0.02421875, "grad_norm": 0.33021774888038635, "learning_rate": 0.0004133333333333333, "loss": 1.9086, "step": 248 }, { "epoch": 0.02431640625, "grad_norm": 0.26662060618400574, "learning_rate": 0.000415, "loss": 2.009, "step": 249 }, { "epoch": 0.0244140625, "grad_norm": 0.27698251605033875, "learning_rate": 0.0004166666666666667, "loss": 2.0183, "step": 250 }, { "epoch": 0.02451171875, "grad_norm": 0.2582184970378876, "learning_rate": 0.00041833333333333333, "loss": 1.9932, "step": 251 }, { "epoch": 0.024609375, "grad_norm": 0.28684699535369873, "learning_rate": 0.00042, "loss": 2.0021, "step": 252 }, { "epoch": 0.02470703125, "grad_norm": 0.33535540103912354, "learning_rate": 0.0004216666666666667, "loss": 2.0387, "step": 253 }, { "epoch": 0.0248046875, "grad_norm": 0.3330588638782501, "learning_rate": 0.00042333333333333334, "loss": 1.9776, "step": 254 }, { "epoch": 0.02490234375, "grad_norm": 0.27919256687164307, "learning_rate": 0.000425, "loss": 2.0111, "step": 255 }, { "epoch": 0.025, "grad_norm": 0.25296416878700256, "learning_rate": 0.0004266666666666667, "loss": 1.9755, "step": 256 }, { "epoch": 0.02509765625, "grad_norm": 0.31288138031959534, "learning_rate": 0.00042833333333333335, "loss": 2.032, "step": 257 }, { "epoch": 0.0251953125, "grad_norm": 0.3360923230648041, "learning_rate": 0.00043, "loss": 2.0837, "step": 258 }, { "epoch": 0.02529296875, "grad_norm": 0.36317816376686096, "learning_rate": 0.0004316666666666667, "loss": 1.9696, "step": 259 }, { "epoch": 0.025390625, "grad_norm": 0.366953581571579, "learning_rate": 0.00043333333333333337, "loss": 2.031, "step": 260 }, { "epoch": 0.02548828125, "grad_norm": 0.34289368987083435, "learning_rate": 0.000435, "loss": 1.9968, "step": 261 }, { "epoch": 0.0255859375, "grad_norm": 0.35170793533325195, "learning_rate": 0.00043666666666666664, "loss": 1.9963, "step": 262 }, { "epoch": 0.02568359375, "grad_norm": 0.28625521063804626, "learning_rate": 0.0004383333333333334, "loss": 1.9932, "step": 263 }, { "epoch": 0.02578125, "grad_norm": 0.2861610949039459, "learning_rate": 0.00044, "loss": 2.0297, "step": 264 }, { "epoch": 0.02587890625, "grad_norm": 0.30467647314071655, "learning_rate": 0.00044166666666666665, "loss": 1.992, "step": 265 }, { "epoch": 0.0259765625, "grad_norm": 0.31711357831954956, "learning_rate": 0.00044333333333333334, "loss": 2.0135, "step": 266 }, { "epoch": 0.02607421875, "grad_norm": 0.341530978679657, "learning_rate": 0.00044500000000000003, "loss": 1.983, "step": 267 }, { "epoch": 0.026171875, "grad_norm": 0.373901903629303, "learning_rate": 0.00044666666666666666, "loss": 2.0048, "step": 268 }, { "epoch": 0.02626953125, "grad_norm": 0.3105134665966034, "learning_rate": 0.0004483333333333333, "loss": 2.0368, "step": 269 }, { "epoch": 0.0263671875, "grad_norm": 0.29363134503364563, "learning_rate": 0.00045000000000000004, "loss": 1.978, "step": 270 }, { "epoch": 0.02646484375, "grad_norm": 0.3060167133808136, "learning_rate": 0.0004516666666666667, "loss": 1.9479, "step": 271 }, { "epoch": 0.0265625, "grad_norm": 0.30803290009498596, "learning_rate": 0.0004533333333333333, "loss": 1.9662, "step": 272 }, { "epoch": 0.02666015625, "grad_norm": 0.3324045240879059, "learning_rate": 0.000455, "loss": 2.0299, "step": 273 }, { "epoch": 0.0267578125, "grad_norm": 0.39051148295402527, "learning_rate": 0.0004566666666666667, "loss": 1.9856, "step": 274 }, { "epoch": 0.02685546875, "grad_norm": 0.4288715124130249, "learning_rate": 0.0004583333333333333, "loss": 2.0264, "step": 275 }, { "epoch": 0.026953125, "grad_norm": 0.34478962421417236, "learning_rate": 0.00046, "loss": 1.9824, "step": 276 }, { "epoch": 0.02705078125, "grad_norm": 0.2766290009021759, "learning_rate": 0.0004616666666666667, "loss": 2.0066, "step": 277 }, { "epoch": 0.0271484375, "grad_norm": 0.2508682608604431, "learning_rate": 0.00046333333333333334, "loss": 1.9663, "step": 278 }, { "epoch": 0.02724609375, "grad_norm": 0.26924827694892883, "learning_rate": 0.000465, "loss": 1.9903, "step": 279 }, { "epoch": 0.02734375, "grad_norm": 0.27668496966362, "learning_rate": 0.00046666666666666666, "loss": 2.0097, "step": 280 }, { "epoch": 0.02744140625, "grad_norm": 0.25026220083236694, "learning_rate": 0.00046833333333333335, "loss": 2.0583, "step": 281 }, { "epoch": 0.0275390625, "grad_norm": 0.2158055454492569, "learning_rate": 0.00047, "loss": 2.0137, "step": 282 }, { "epoch": 0.02763671875, "grad_norm": 0.22540244460105896, "learning_rate": 0.0004716666666666667, "loss": 1.994, "step": 283 }, { "epoch": 0.027734375, "grad_norm": 0.26405519247055054, "learning_rate": 0.00047333333333333336, "loss": 2.0221, "step": 284 }, { "epoch": 0.02783203125, "grad_norm": 0.2979099452495575, "learning_rate": 0.000475, "loss": 2.0047, "step": 285 }, { "epoch": 0.0279296875, "grad_norm": 0.34131935238838196, "learning_rate": 0.0004766666666666667, "loss": 1.9907, "step": 286 }, { "epoch": 0.02802734375, "grad_norm": 0.37178686261177063, "learning_rate": 0.0004783333333333333, "loss": 1.9806, "step": 287 }, { "epoch": 0.028125, "grad_norm": 0.36835598945617676, "learning_rate": 0.00048, "loss": 2.0134, "step": 288 }, { "epoch": 0.02822265625, "grad_norm": 0.29690125584602356, "learning_rate": 0.0004816666666666667, "loss": 2.0261, "step": 289 }, { "epoch": 0.0283203125, "grad_norm": 0.2690771818161011, "learning_rate": 0.00048333333333333334, "loss": 1.9718, "step": 290 }, { "epoch": 0.02841796875, "grad_norm": 0.3377201557159424, "learning_rate": 0.00048499999999999997, "loss": 1.99, "step": 291 }, { "epoch": 0.028515625, "grad_norm": 0.34973010420799255, "learning_rate": 0.0004866666666666667, "loss": 1.9721, "step": 292 }, { "epoch": 0.02861328125, "grad_norm": 0.3172457218170166, "learning_rate": 0.0004883333333333333, "loss": 1.9928, "step": 293 }, { "epoch": 0.0287109375, "grad_norm": 0.34357598423957825, "learning_rate": 0.00049, "loss": 1.9995, "step": 294 }, { "epoch": 0.02880859375, "grad_norm": 0.3824540376663208, "learning_rate": 0.0004916666666666666, "loss": 1.9772, "step": 295 }, { "epoch": 0.02890625, "grad_norm": 0.3704535663127899, "learning_rate": 0.0004933333333333334, "loss": 1.9829, "step": 296 }, { "epoch": 0.02900390625, "grad_norm": 0.2571757733821869, "learning_rate": 0.000495, "loss": 1.9966, "step": 297 }, { "epoch": 0.0291015625, "grad_norm": 0.3970927894115448, "learning_rate": 0.0004966666666666666, "loss": 1.9841, "step": 298 }, { "epoch": 0.02919921875, "grad_norm": 0.3420144319534302, "learning_rate": 0.0004983333333333334, "loss": 1.9747, "step": 299 }, { "epoch": 0.029296875, "grad_norm": 0.30147823691368103, "learning_rate": 0.0005, "loss": 1.9999, "step": 300 }, { "epoch": 0.02939453125, "grad_norm": 0.33727970719337463, "learning_rate": 0.0004999999887622467, "loss": 2.0084, "step": 301 }, { "epoch": 0.0294921875, "grad_norm": 0.29407384991645813, "learning_rate": 0.0004999999550489878, "loss": 2.0144, "step": 302 }, { "epoch": 0.02958984375, "grad_norm": 0.3489755094051361, "learning_rate": 0.0004999998988602267, "loss": 2.0058, "step": 303 }, { "epoch": 0.0296875, "grad_norm": 0.3327770233154297, "learning_rate": 0.0004999998201959691, "loss": 2.0166, "step": 304 }, { "epoch": 0.02978515625, "grad_norm": 0.2923370599746704, "learning_rate": 0.0004999997190562227, "loss": 2.0148, "step": 305 }, { "epoch": 0.0298828125, "grad_norm": 0.31616437435150146, "learning_rate": 0.0004999995954409976, "loss": 1.9772, "step": 306 }, { "epoch": 0.02998046875, "grad_norm": 0.22982288897037506, "learning_rate": 0.0004999994493503064, "loss": 2.0584, "step": 307 }, { "epoch": 0.030078125, "grad_norm": 0.2886744737625122, "learning_rate": 0.0004999992807841634, "loss": 2.0114, "step": 308 }, { "epoch": 0.03017578125, "grad_norm": 0.3027271032333374, "learning_rate": 0.0004999990897425856, "loss": 2.011, "step": 309 }, { "epoch": 0.0302734375, "grad_norm": 0.3191162645816803, "learning_rate": 0.0004999988762255922, "loss": 1.9962, "step": 310 }, { "epoch": 0.03037109375, "grad_norm": 0.34986981749534607, "learning_rate": 0.0004999986402332042, "loss": 1.9612, "step": 311 }, { "epoch": 0.03046875, "grad_norm": 0.36431390047073364, "learning_rate": 0.0004999983817654454, "loss": 2.045, "step": 312 }, { "epoch": 0.03056640625, "grad_norm": 0.4198042154312134, "learning_rate": 0.0004999981008223416, "loss": 2.0132, "step": 313 }, { "epoch": 0.0306640625, "grad_norm": 0.43374890089035034, "learning_rate": 0.0004999977974039207, "loss": 2.0578, "step": 314 }, { "epoch": 0.03076171875, "grad_norm": 0.3654812276363373, "learning_rate": 0.0004999974715102132, "loss": 1.9721, "step": 315 }, { "epoch": 0.030859375, "grad_norm": 0.29420921206474304, "learning_rate": 0.0004999971231412517, "loss": 2.0296, "step": 316 }, { "epoch": 0.03095703125, "grad_norm": 0.31979072093963623, "learning_rate": 0.0004999967522970708, "loss": 1.9623, "step": 317 }, { "epoch": 0.0310546875, "grad_norm": 0.3570129871368408, "learning_rate": 0.0004999963589777076, "loss": 1.9966, "step": 318 }, { "epoch": 0.03115234375, "grad_norm": 0.29743143916130066, "learning_rate": 0.0004999959431832016, "loss": 1.9535, "step": 319 }, { "epoch": 0.03125, "grad_norm": 0.23641493916511536, "learning_rate": 0.000499995504913594, "loss": 2.0304, "step": 320 }, { "epoch": 0.03134765625, "grad_norm": 0.3556622564792633, "learning_rate": 0.0004999950441689288, "loss": 1.971, "step": 321 }, { "epoch": 0.0314453125, "grad_norm": 0.323939710855484, "learning_rate": 0.0004999945609492519, "loss": 2.0153, "step": 322 }, { "epoch": 0.03154296875, "grad_norm": 0.22797244787216187, "learning_rate": 0.0004999940552546118, "loss": 1.9807, "step": 323 }, { "epoch": 0.031640625, "grad_norm": 0.2641647458076477, "learning_rate": 0.0004999935270850587, "loss": 1.9988, "step": 324 }, { "epoch": 0.03173828125, "grad_norm": 0.25289344787597656, "learning_rate": 0.0004999929764406455, "loss": 1.9748, "step": 325 }, { "epoch": 0.0318359375, "grad_norm": 0.2377796769142151, "learning_rate": 0.0004999924033214274, "loss": 1.9983, "step": 326 }, { "epoch": 0.03193359375, "grad_norm": 0.2711915671825409, "learning_rate": 0.0004999918077274612, "loss": 1.9643, "step": 327 }, { "epoch": 0.03203125, "grad_norm": 0.2866462767124176, "learning_rate": 0.0004999911896588068, "loss": 2.0196, "step": 328 }, { "epoch": 0.03212890625, "grad_norm": 0.25075578689575195, "learning_rate": 0.0004999905491155257, "loss": 2.0426, "step": 329 }, { "epoch": 0.0322265625, "grad_norm": 0.266648530960083, "learning_rate": 0.000499989886097682, "loss": 2.0318, "step": 330 }, { "epoch": 0.03232421875, "grad_norm": 0.22494247555732727, "learning_rate": 0.0004999892006053421, "loss": 1.9839, "step": 331 }, { "epoch": 0.032421875, "grad_norm": 0.3179854452610016, "learning_rate": 0.0004999884926385741, "loss": 1.9981, "step": 332 }, { "epoch": 0.03251953125, "grad_norm": 0.2754990756511688, "learning_rate": 0.000499987762197449, "loss": 1.9879, "step": 333 }, { "epoch": 0.0326171875, "grad_norm": 0.2669137418270111, "learning_rate": 0.0004999870092820395, "loss": 2.0083, "step": 334 }, { "epoch": 0.03271484375, "grad_norm": 0.31376007199287415, "learning_rate": 0.0004999862338924212, "loss": 2.0166, "step": 335 }, { "epoch": 0.0328125, "grad_norm": 0.32645899057388306, "learning_rate": 0.0004999854360286712, "loss": 2.0019, "step": 336 }, { "epoch": 0.03291015625, "grad_norm": 0.3550071120262146, "learning_rate": 0.0004999846156908692, "loss": 1.9744, "step": 337 }, { "epoch": 0.0330078125, "grad_norm": 0.30171769857406616, "learning_rate": 0.0004999837728790975, "loss": 2.0231, "step": 338 }, { "epoch": 0.03310546875, "grad_norm": 0.23128142952919006, "learning_rate": 0.00049998290759344, "loss": 2.001, "step": 339 }, { "epoch": 0.033203125, "grad_norm": 0.2433364987373352, "learning_rate": 0.0004999820198339832, "loss": 1.9945, "step": 340 }, { "epoch": 0.03330078125, "grad_norm": 0.30562201142311096, "learning_rate": 0.0004999811096008159, "loss": 2.0228, "step": 341 }, { "epoch": 0.0333984375, "grad_norm": 0.33020487427711487, "learning_rate": 0.0004999801768940287, "loss": 1.9691, "step": 342 }, { "epoch": 0.03349609375, "grad_norm": 0.3125375807285309, "learning_rate": 0.0004999792217137151, "loss": 1.9728, "step": 343 }, { "epoch": 0.03359375, "grad_norm": 0.26769348978996277, "learning_rate": 0.0004999782440599702, "loss": 2.0066, "step": 344 }, { "epoch": 0.03369140625, "grad_norm": 0.3596431314945221, "learning_rate": 0.0004999772439328921, "loss": 2.0426, "step": 345 }, { "epoch": 0.0337890625, "grad_norm": 0.34117281436920166, "learning_rate": 0.0004999762213325803, "loss": 1.9985, "step": 346 }, { "epoch": 0.03388671875, "grad_norm": 0.2922564148902893, "learning_rate": 0.0004999751762591371, "loss": 2.0346, "step": 347 }, { "epoch": 0.033984375, "grad_norm": 0.2813419997692108, "learning_rate": 0.0004999741087126669, "loss": 2.0573, "step": 348 }, { "epoch": 0.03408203125, "grad_norm": 0.27346089482307434, "learning_rate": 0.0004999730186932764, "loss": 1.9655, "step": 349 }, { "epoch": 0.0341796875, "grad_norm": 0.24704065918922424, "learning_rate": 0.0004999719062010745, "loss": 1.9542, "step": 350 }, { "epoch": 0.03427734375, "grad_norm": 0.24761976301670074, "learning_rate": 0.000499970771236172, "loss": 1.9526, "step": 351 }, { "epoch": 0.034375, "grad_norm": 0.2508860230445862, "learning_rate": 0.0004999696137986826, "loss": 2.0119, "step": 352 }, { "epoch": 0.03447265625, "grad_norm": 0.24160990118980408, "learning_rate": 0.0004999684338887219, "loss": 2.0148, "step": 353 }, { "epoch": 0.0345703125, "grad_norm": 0.2879098355770111, "learning_rate": 0.0004999672315064076, "loss": 2.0206, "step": 354 }, { "epoch": 0.03466796875, "grad_norm": 0.28366580605506897, "learning_rate": 0.0004999660066518601, "loss": 2.0737, "step": 355 }, { "epoch": 0.034765625, "grad_norm": 0.3401612937450409, "learning_rate": 0.0004999647593252013, "loss": 1.9868, "step": 356 }, { "epoch": 0.03486328125, "grad_norm": 0.4696269631385803, "learning_rate": 0.0004999634895265562, "loss": 2.0079, "step": 357 }, { "epoch": 0.0349609375, "grad_norm": 0.596168041229248, "learning_rate": 0.0004999621972560515, "loss": 2.0084, "step": 358 }, { "epoch": 0.03505859375, "grad_norm": 0.5319205522537231, "learning_rate": 0.0004999608825138162, "loss": 2.0084, "step": 359 }, { "epoch": 0.03515625, "grad_norm": 0.24298223853111267, "learning_rate": 0.0004999595452999818, "loss": 1.9183, "step": 360 }, { "epoch": 0.03525390625, "grad_norm": 0.39650923013687134, "learning_rate": 0.0004999581856146817, "loss": 1.9886, "step": 361 }, { "epoch": 0.0353515625, "grad_norm": 0.3819667100906372, "learning_rate": 0.0004999568034580518, "loss": 2.0047, "step": 362 }, { "epoch": 0.03544921875, "grad_norm": 0.3298279345035553, "learning_rate": 0.0004999553988302303, "loss": 1.9466, "step": 363 }, { "epoch": 0.035546875, "grad_norm": 0.2343115359544754, "learning_rate": 0.0004999539717313573, "loss": 1.9828, "step": 364 }, { "epoch": 0.03564453125, "grad_norm": 0.31238943338394165, "learning_rate": 0.0004999525221615755, "loss": 2.0721, "step": 365 }, { "epoch": 0.0357421875, "grad_norm": 0.27481910586357117, "learning_rate": 0.0004999510501210295, "loss": 2.0106, "step": 366 }, { "epoch": 0.03583984375, "grad_norm": 0.24870915710926056, "learning_rate": 0.0004999495556098666, "loss": 1.9846, "step": 367 }, { "epoch": 0.0359375, "grad_norm": 0.2666539251804352, "learning_rate": 0.0004999480386282359, "loss": 1.9988, "step": 368 }, { "epoch": 0.03603515625, "grad_norm": 0.23231451213359833, "learning_rate": 0.000499946499176289, "loss": 2.0235, "step": 369 }, { "epoch": 0.0361328125, "grad_norm": 0.19276577234268188, "learning_rate": 0.0004999449372541798, "loss": 1.9832, "step": 370 }, { "epoch": 0.03623046875, "grad_norm": 0.23545822501182556, "learning_rate": 0.000499943352862064, "loss": 1.9879, "step": 371 }, { "epoch": 0.036328125, "grad_norm": 0.2608807384967804, "learning_rate": 0.0004999417460001002, "loss": 2.047, "step": 372 }, { "epoch": 0.03642578125, "grad_norm": 0.27395492792129517, "learning_rate": 0.0004999401166684487, "loss": 1.9496, "step": 373 }, { "epoch": 0.0365234375, "grad_norm": 0.24661333858966827, "learning_rate": 0.0004999384648672724, "loss": 1.9924, "step": 374 }, { "epoch": 0.03662109375, "grad_norm": 0.23559828102588654, "learning_rate": 0.0004999367905967362, "loss": 1.979, "step": 375 }, { "epoch": 0.03671875, "grad_norm": 0.2556357681751251, "learning_rate": 0.0004999350938570074, "loss": 1.9793, "step": 376 }, { "epoch": 0.03681640625, "grad_norm": 0.29081088304519653, "learning_rate": 0.0004999333746482555, "loss": 1.962, "step": 377 }, { "epoch": 0.0369140625, "grad_norm": 0.2583800256252289, "learning_rate": 0.0004999316329706521, "loss": 1.9765, "step": 378 }, { "epoch": 0.03701171875, "grad_norm": 0.2506580054759979, "learning_rate": 0.0004999298688243714, "loss": 2.0302, "step": 379 }, { "epoch": 0.037109375, "grad_norm": 0.289530485868454, "learning_rate": 0.0004999280822095895, "loss": 1.9514, "step": 380 }, { "epoch": 0.03720703125, "grad_norm": 0.24479152262210846, "learning_rate": 0.0004999262731264848, "loss": 1.9742, "step": 381 }, { "epoch": 0.0373046875, "grad_norm": 0.23676550388336182, "learning_rate": 0.0004999244415752381, "loss": 2.0161, "step": 382 }, { "epoch": 0.03740234375, "grad_norm": 0.2447502315044403, "learning_rate": 0.0004999225875560323, "loss": 2.0114, "step": 383 }, { "epoch": 0.0375, "grad_norm": 0.20993874967098236, "learning_rate": 0.0004999207110690528, "loss": 1.9786, "step": 384 }, { "epoch": 0.03759765625, "grad_norm": 0.23405557870864868, "learning_rate": 0.0004999188121144867, "loss": 2.0004, "step": 385 }, { "epoch": 0.0376953125, "grad_norm": 0.2310025990009308, "learning_rate": 0.0004999168906925238, "loss": 1.924, "step": 386 }, { "epoch": 0.03779296875, "grad_norm": 0.2507460117340088, "learning_rate": 0.0004999149468033564, "loss": 2.0301, "step": 387 }, { "epoch": 0.037890625, "grad_norm": 0.30916762351989746, "learning_rate": 0.0004999129804471782, "loss": 1.9948, "step": 388 }, { "epoch": 0.03798828125, "grad_norm": 0.42094507813453674, "learning_rate": 0.0004999109916241858, "loss": 2.0128, "step": 389 }, { "epoch": 0.0380859375, "grad_norm": 0.5398088693618774, "learning_rate": 0.0004999089803345779, "loss": 1.9866, "step": 390 }, { "epoch": 0.03818359375, "grad_norm": 0.4601620137691498, "learning_rate": 0.0004999069465785554, "loss": 1.9807, "step": 391 }, { "epoch": 0.03828125, "grad_norm": 0.24873095750808716, "learning_rate": 0.0004999048903563213, "loss": 1.9225, "step": 392 }, { "epoch": 0.03837890625, "grad_norm": 0.37363573908805847, "learning_rate": 0.0004999028116680814, "loss": 2.0212, "step": 393 }, { "epoch": 0.0384765625, "grad_norm": 0.27399107813835144, "learning_rate": 0.0004999007105140428, "loss": 1.9797, "step": 394 }, { "epoch": 0.03857421875, "grad_norm": 0.2717953622341156, "learning_rate": 0.0004998985868944158, "loss": 2.0119, "step": 395 }, { "epoch": 0.038671875, "grad_norm": 0.2766648530960083, "learning_rate": 0.0004998964408094124, "loss": 1.9936, "step": 396 }, { "epoch": 0.03876953125, "grad_norm": 0.2989843487739563, "learning_rate": 0.0004998942722592469, "loss": 2.0261, "step": 397 }, { "epoch": 0.0388671875, "grad_norm": 0.31954097747802734, "learning_rate": 0.000499892081244136, "loss": 1.9713, "step": 398 }, { "epoch": 0.03896484375, "grad_norm": 0.31199777126312256, "learning_rate": 0.0004998898677642987, "loss": 1.9548, "step": 399 }, { "epoch": 0.0390625, "grad_norm": 0.2475418746471405, "learning_rate": 0.0004998876318199557, "loss": 1.9878, "step": 400 }, { "epoch": 0.03916015625, "grad_norm": 0.25001809000968933, "learning_rate": 0.0004998853734113308, "loss": 1.991, "step": 401 }, { "epoch": 0.0392578125, "grad_norm": 0.2468329817056656, "learning_rate": 0.0004998830925386492, "loss": 1.9988, "step": 402 }, { "epoch": 0.03935546875, "grad_norm": 0.23273305594921112, "learning_rate": 0.000499880789202139, "loss": 2.0138, "step": 403 }, { "epoch": 0.039453125, "grad_norm": 0.21512626111507416, "learning_rate": 0.0004998784634020303, "loss": 1.9326, "step": 404 }, { "epoch": 0.03955078125, "grad_norm": 0.2123369723558426, "learning_rate": 0.0004998761151385554, "loss": 1.9954, "step": 405 }, { "epoch": 0.0396484375, "grad_norm": 0.20985403656959534, "learning_rate": 0.0004998737444119488, "loss": 1.996, "step": 406 }, { "epoch": 0.03974609375, "grad_norm": 0.19709332287311554, "learning_rate": 0.0004998713512224473, "loss": 1.9642, "step": 407 }, { "epoch": 0.03984375, "grad_norm": 0.21997478604316711, "learning_rate": 0.00049986893557029, "loss": 1.9925, "step": 408 }, { "epoch": 0.03994140625, "grad_norm": 0.2419801503419876, "learning_rate": 0.0004998664974557182, "loss": 2.0262, "step": 409 }, { "epoch": 0.0400390625, "grad_norm": 0.2527197003364563, "learning_rate": 0.0004998640368789754, "loss": 2.0011, "step": 410 }, { "epoch": 0.04013671875, "grad_norm": 0.2539777159690857, "learning_rate": 0.0004998615538403074, "loss": 1.9799, "step": 411 }, { "epoch": 0.040234375, "grad_norm": 0.2419251948595047, "learning_rate": 0.0004998590483399623, "loss": 2.0035, "step": 412 }, { "epoch": 0.04033203125, "grad_norm": 0.26659199595451355, "learning_rate": 0.0004998565203781904, "loss": 1.9743, "step": 413 }, { "epoch": 0.0404296875, "grad_norm": 0.3216726779937744, "learning_rate": 0.0004998539699552441, "loss": 1.979, "step": 414 }, { "epoch": 0.04052734375, "grad_norm": 0.2780512571334839, "learning_rate": 0.0004998513970713783, "loss": 1.9892, "step": 415 }, { "epoch": 0.040625, "grad_norm": 0.2947809100151062, "learning_rate": 0.00049984880172685, "loss": 1.9836, "step": 416 }, { "epoch": 0.04072265625, "grad_norm": 0.33937209844589233, "learning_rate": 0.0004998461839219182, "loss": 2.0007, "step": 417 }, { "epoch": 0.0408203125, "grad_norm": 0.3530365228652954, "learning_rate": 0.0004998435436568446, "loss": 1.9462, "step": 418 }, { "epoch": 0.04091796875, "grad_norm": 0.34433260560035706, "learning_rate": 0.000499840880931893, "loss": 1.9733, "step": 419 }, { "epoch": 0.041015625, "grad_norm": 0.323081910610199, "learning_rate": 0.0004998381957473293, "loss": 1.9522, "step": 420 }, { "epoch": 0.04111328125, "grad_norm": 0.22895868122577667, "learning_rate": 0.0004998354881034217, "loss": 1.9672, "step": 421 }, { "epoch": 0.0412109375, "grad_norm": 0.2543105185031891, "learning_rate": 0.0004998327580004408, "loss": 1.9429, "step": 422 }, { "epoch": 0.04130859375, "grad_norm": 0.2795581519603729, "learning_rate": 0.0004998300054386591, "loss": 1.9902, "step": 423 }, { "epoch": 0.04140625, "grad_norm": 0.2201048880815506, "learning_rate": 0.0004998272304183517, "loss": 1.9365, "step": 424 }, { "epoch": 0.04150390625, "grad_norm": 0.26653197407722473, "learning_rate": 0.0004998244329397958, "loss": 2.0178, "step": 425 }, { "epoch": 0.0416015625, "grad_norm": 0.26959821581840515, "learning_rate": 0.0004998216130032708, "loss": 1.9625, "step": 426 }, { "epoch": 0.04169921875, "grad_norm": 0.2673629820346832, "learning_rate": 0.0004998187706090584, "loss": 1.995, "step": 427 }, { "epoch": 0.041796875, "grad_norm": 0.3190925121307373, "learning_rate": 0.0004998159057574426, "loss": 2.0094, "step": 428 }, { "epoch": 0.04189453125, "grad_norm": 0.3376927673816681, "learning_rate": 0.0004998130184487094, "loss": 2.0535, "step": 429 }, { "epoch": 0.0419921875, "grad_norm": 0.35333067178726196, "learning_rate": 0.0004998101086831474, "loss": 2.0256, "step": 430 }, { "epoch": 0.04208984375, "grad_norm": 0.3051585853099823, "learning_rate": 0.0004998071764610471, "loss": 1.9798, "step": 431 }, { "epoch": 0.0421875, "grad_norm": 0.21881523728370667, "learning_rate": 0.0004998042217827015, "loss": 1.9726, "step": 432 }, { "epoch": 0.04228515625, "grad_norm": 0.2296576052904129, "learning_rate": 0.0004998012446484057, "loss": 1.9675, "step": 433 }, { "epoch": 0.0423828125, "grad_norm": 0.24055050313472748, "learning_rate": 0.0004997982450584572, "loss": 1.9488, "step": 434 }, { "epoch": 0.04248046875, "grad_norm": 0.22267191112041473, "learning_rate": 0.0004997952230131555, "loss": 1.9617, "step": 435 }, { "epoch": 0.042578125, "grad_norm": 0.2590855658054352, "learning_rate": 0.0004997921785128026, "loss": 1.9745, "step": 436 }, { "epoch": 0.04267578125, "grad_norm": 0.2995806336402893, "learning_rate": 0.0004997891115577025, "loss": 1.9667, "step": 437 }, { "epoch": 0.0427734375, "grad_norm": 0.3083277642726898, "learning_rate": 0.0004997860221481616, "loss": 1.9661, "step": 438 }, { "epoch": 0.04287109375, "grad_norm": 0.31743094325065613, "learning_rate": 0.0004997829102844885, "loss": 2.0172, "step": 439 }, { "epoch": 0.04296875, "grad_norm": 0.37507855892181396, "learning_rate": 0.0004997797759669941, "loss": 2.0211, "step": 440 }, { "epoch": 0.04306640625, "grad_norm": 0.35785287618637085, "learning_rate": 0.0004997766191959914, "loss": 2.038, "step": 441 }, { "epoch": 0.0431640625, "grad_norm": 0.3099903166294098, "learning_rate": 0.0004997734399717958, "loss": 1.9879, "step": 442 }, { "epoch": 0.04326171875, "grad_norm": 0.32211577892303467, "learning_rate": 0.0004997702382947248, "loss": 1.987, "step": 443 }, { "epoch": 0.043359375, "grad_norm": 0.24999700486660004, "learning_rate": 0.0004997670141650984, "loss": 1.9765, "step": 444 }, { "epoch": 0.04345703125, "grad_norm": 0.23301391303539276, "learning_rate": 0.0004997637675832386, "loss": 1.9592, "step": 445 }, { "epoch": 0.0435546875, "grad_norm": 0.22780932486057281, "learning_rate": 0.0004997604985494694, "loss": 1.9762, "step": 446 }, { "epoch": 0.04365234375, "grad_norm": 0.2347191423177719, "learning_rate": 0.0004997572070641178, "loss": 1.9791, "step": 447 }, { "epoch": 0.04375, "grad_norm": 0.3288608193397522, "learning_rate": 0.0004997538931275123, "loss": 1.9664, "step": 448 }, { "epoch": 0.04384765625, "grad_norm": 0.29897212982177734, "learning_rate": 0.000499750556739984, "loss": 1.9876, "step": 449 }, { "epoch": 0.0439453125, "grad_norm": 0.24389693140983582, "learning_rate": 0.0004997471979018663, "loss": 1.9742, "step": 450 }, { "epoch": 0.04404296875, "grad_norm": 0.3172384798526764, "learning_rate": 0.0004997438166134945, "loss": 1.969, "step": 451 }, { "epoch": 0.044140625, "grad_norm": 0.22951611876487732, "learning_rate": 0.0004997404128752065, "loss": 1.9832, "step": 452 }, { "epoch": 0.04423828125, "grad_norm": 0.2356499582529068, "learning_rate": 0.0004997369866873423, "loss": 2.0236, "step": 453 }, { "epoch": 0.0443359375, "grad_norm": 0.2687929570674896, "learning_rate": 0.000499733538050244, "loss": 1.9521, "step": 454 }, { "epoch": 0.04443359375, "grad_norm": 0.2512679398059845, "learning_rate": 0.0004997300669642564, "loss": 1.9754, "step": 455 }, { "epoch": 0.04453125, "grad_norm": 0.22110989689826965, "learning_rate": 0.0004997265734297259, "loss": 1.9931, "step": 456 }, { "epoch": 0.04462890625, "grad_norm": 0.2548464834690094, "learning_rate": 0.0004997230574470017, "loss": 2.001, "step": 457 }, { "epoch": 0.0447265625, "grad_norm": 0.3107970356941223, "learning_rate": 0.0004997195190164349, "loss": 1.9855, "step": 458 }, { "epoch": 0.04482421875, "grad_norm": 0.2989678680896759, "learning_rate": 0.0004997159581383789, "loss": 1.9646, "step": 459 }, { "epoch": 0.044921875, "grad_norm": 0.3651764690876007, "learning_rate": 0.0004997123748131896, "loss": 1.9865, "step": 460 }, { "epoch": 0.04501953125, "grad_norm": 0.41693389415740967, "learning_rate": 0.0004997087690412248, "loss": 1.9756, "step": 461 }, { "epoch": 0.0451171875, "grad_norm": 0.425959974527359, "learning_rate": 0.0004997051408228447, "loss": 1.9887, "step": 462 }, { "epoch": 0.04521484375, "grad_norm": 0.38966843485832214, "learning_rate": 0.0004997014901584118, "loss": 1.9532, "step": 463 }, { "epoch": 0.0453125, "grad_norm": 0.29931432008743286, "learning_rate": 0.0004996978170482906, "loss": 1.9806, "step": 464 }, { "epoch": 0.04541015625, "grad_norm": 0.23077397048473358, "learning_rate": 0.0004996941214928482, "loss": 2.0258, "step": 465 }, { "epoch": 0.0455078125, "grad_norm": 0.32101985812187195, "learning_rate": 0.0004996904034924536, "loss": 2.0023, "step": 466 }, { "epoch": 0.04560546875, "grad_norm": 0.3225870430469513, "learning_rate": 0.0004996866630474783, "loss": 1.9385, "step": 467 }, { "epoch": 0.045703125, "grad_norm": 0.2592551112174988, "learning_rate": 0.0004996829001582959, "loss": 2.0176, "step": 468 }, { "epoch": 0.04580078125, "grad_norm": 0.24367393553256989, "learning_rate": 0.0004996791148252824, "loss": 1.9844, "step": 469 }, { "epoch": 0.0458984375, "grad_norm": 0.26423218846321106, "learning_rate": 0.0004996753070488156, "loss": 1.9675, "step": 470 }, { "epoch": 0.04599609375, "grad_norm": 0.27801692485809326, "learning_rate": 0.0004996714768292762, "loss": 1.9265, "step": 471 }, { "epoch": 0.04609375, "grad_norm": 0.3210003077983856, "learning_rate": 0.0004996676241670467, "loss": 1.9937, "step": 472 }, { "epoch": 0.04619140625, "grad_norm": 0.3082759380340576, "learning_rate": 0.0004996637490625118, "loss": 1.9997, "step": 473 }, { "epoch": 0.0462890625, "grad_norm": 0.2486785650253296, "learning_rate": 0.0004996598515160589, "loss": 2.0687, "step": 474 }, { "epoch": 0.04638671875, "grad_norm": 0.31927499175071716, "learning_rate": 0.000499655931528077, "loss": 1.9853, "step": 475 }, { "epoch": 0.046484375, "grad_norm": 0.2736678123474121, "learning_rate": 0.0004996519890989578, "loss": 1.9916, "step": 476 }, { "epoch": 0.04658203125, "grad_norm": 0.29522353410720825, "learning_rate": 0.0004996480242290952, "loss": 1.9554, "step": 477 }, { "epoch": 0.0466796875, "grad_norm": 0.26841938495635986, "learning_rate": 0.0004996440369188851, "loss": 1.9275, "step": 478 }, { "epoch": 0.04677734375, "grad_norm": 0.2695488929748535, "learning_rate": 0.0004996400271687259, "loss": 1.9898, "step": 479 }, { "epoch": 0.046875, "grad_norm": 0.36634454131126404, "learning_rate": 0.0004996359949790181, "loss": 1.9799, "step": 480 }, { "epoch": 0.04697265625, "grad_norm": 0.37612974643707275, "learning_rate": 0.0004996319403501647, "loss": 1.9645, "step": 481 }, { "epoch": 0.0470703125, "grad_norm": 0.29007261991500854, "learning_rate": 0.0004996278632825703, "loss": 1.9486, "step": 482 }, { "epoch": 0.04716796875, "grad_norm": 0.30373314023017883, "learning_rate": 0.0004996237637766424, "loss": 1.9913, "step": 483 }, { "epoch": 0.047265625, "grad_norm": 0.2949075400829315, "learning_rate": 0.0004996196418327906, "loss": 1.9894, "step": 484 }, { "epoch": 0.04736328125, "grad_norm": 0.30601680278778076, "learning_rate": 0.0004996154974514264, "loss": 1.9835, "step": 485 }, { "epoch": 0.0474609375, "grad_norm": 0.2896345853805542, "learning_rate": 0.000499611330632964, "loss": 2.0043, "step": 486 }, { "epoch": 0.04755859375, "grad_norm": 0.3836851418018341, "learning_rate": 0.0004996071413778195, "loss": 1.9782, "step": 487 }, { "epoch": 0.04765625, "grad_norm": 0.4167245626449585, "learning_rate": 0.0004996029296864114, "loss": 1.9792, "step": 488 }, { "epoch": 0.04775390625, "grad_norm": 0.3247157335281372, "learning_rate": 0.0004995986955591606, "loss": 1.9548, "step": 489 }, { "epoch": 0.0478515625, "grad_norm": 0.36726462841033936, "learning_rate": 0.0004995944389964897, "loss": 1.9844, "step": 490 }, { "epoch": 0.04794921875, "grad_norm": 0.27440258860588074, "learning_rate": 0.0004995901599988241, "loss": 1.9608, "step": 491 }, { "epoch": 0.048046875, "grad_norm": 0.271771103143692, "learning_rate": 0.0004995858585665912, "loss": 2.022, "step": 492 }, { "epoch": 0.04814453125, "grad_norm": 0.3079121708869934, "learning_rate": 0.0004995815347002208, "loss": 1.9804, "step": 493 }, { "epoch": 0.0482421875, "grad_norm": 0.2976233661174774, "learning_rate": 0.0004995771884001445, "loss": 1.9679, "step": 494 }, { "epoch": 0.04833984375, "grad_norm": 0.31318438053131104, "learning_rate": 0.0004995728196667969, "loss": 2.0342, "step": 495 }, { "epoch": 0.0484375, "grad_norm": 0.2397848516702652, "learning_rate": 0.0004995684285006139, "loss": 1.9801, "step": 496 }, { "epoch": 0.04853515625, "grad_norm": 0.2205589860677719, "learning_rate": 0.0004995640149020346, "loss": 2.0088, "step": 497 }, { "epoch": 0.0486328125, "grad_norm": 0.23892101645469666, "learning_rate": 0.0004995595788714995, "loss": 1.9888, "step": 498 }, { "epoch": 0.04873046875, "grad_norm": 0.2999497354030609, "learning_rate": 0.000499555120409452, "loss": 1.9896, "step": 499 }, { "epoch": 0.048828125, "grad_norm": 0.3309103548526764, "learning_rate": 0.0004995506395163372, "loss": 1.9394, "step": 500 }, { "epoch": 0.04892578125, "grad_norm": 0.36243122816085815, "learning_rate": 0.000499546136192603, "loss": 1.9604, "step": 501 }, { "epoch": 0.0490234375, "grad_norm": 0.3052852153778076, "learning_rate": 0.0004995416104386991, "loss": 1.9887, "step": 502 }, { "epoch": 0.04912109375, "grad_norm": 0.2957259714603424, "learning_rate": 0.0004995370622550775, "loss": 1.9876, "step": 503 }, { "epoch": 0.04921875, "grad_norm": 0.3991003632545471, "learning_rate": 0.0004995324916421926, "loss": 1.9932, "step": 504 }, { "epoch": 0.04931640625, "grad_norm": 0.27617356181144714, "learning_rate": 0.000499527898600501, "loss": 1.9387, "step": 505 }, { "epoch": 0.0494140625, "grad_norm": 0.2820669412612915, "learning_rate": 0.0004995232831304614, "loss": 2.012, "step": 506 }, { "epoch": 0.04951171875, "grad_norm": 0.3026665449142456, "learning_rate": 0.0004995186452325351, "loss": 1.9709, "step": 507 }, { "epoch": 0.049609375, "grad_norm": 0.2331731766462326, "learning_rate": 0.000499513984907185, "loss": 1.9617, "step": 508 }, { "epoch": 0.04970703125, "grad_norm": 0.2791639268398285, "learning_rate": 0.0004995093021548768, "loss": 1.9635, "step": 509 }, { "epoch": 0.0498046875, "grad_norm": 0.257519006729126, "learning_rate": 0.0004995045969760785, "loss": 1.8908, "step": 510 }, { "epoch": 0.04990234375, "grad_norm": 0.26278433203697205, "learning_rate": 0.0004994998693712598, "loss": 1.9418, "step": 511 }, { "epoch": 0.05, "grad_norm": 0.2534024715423584, "learning_rate": 0.0004994951193408929, "loss": 1.9821, "step": 512 }, { "epoch": 0.05009765625, "grad_norm": 0.272348552942276, "learning_rate": 0.0004994903468854527, "loss": 1.982, "step": 513 }, { "epoch": 0.0501953125, "grad_norm": 0.23883765935897827, "learning_rate": 0.0004994855520054154, "loss": 2.006, "step": 514 }, { "epoch": 0.05029296875, "grad_norm": 0.25627830624580383, "learning_rate": 0.0004994807347012603, "loss": 2.0008, "step": 515 }, { "epoch": 0.050390625, "grad_norm": 0.33997049927711487, "learning_rate": 0.0004994758949734686, "loss": 2.0339, "step": 516 }, { "epoch": 0.05048828125, "grad_norm": 0.4115971028804779, "learning_rate": 0.0004994710328225236, "loss": 1.9982, "step": 517 }, { "epoch": 0.0505859375, "grad_norm": 0.4417625069618225, "learning_rate": 0.000499466148248911, "loss": 1.9742, "step": 518 }, { "epoch": 0.05068359375, "grad_norm": 0.4123833477497101, "learning_rate": 0.0004994612412531189, "loss": 2.0148, "step": 519 }, { "epoch": 0.05078125, "grad_norm": 0.2234133630990982, "learning_rate": 0.0004994563118356373, "loss": 1.9679, "step": 520 }, { "epoch": 0.05087890625, "grad_norm": 0.35439473390579224, "learning_rate": 0.0004994513599969586, "loss": 1.9602, "step": 521 }, { "epoch": 0.0509765625, "grad_norm": 0.32346996665000916, "learning_rate": 0.0004994463857375776, "loss": 2.0106, "step": 522 }, { "epoch": 0.05107421875, "grad_norm": 0.24506594240665436, "learning_rate": 0.000499441389057991, "loss": 1.993, "step": 523 }, { "epoch": 0.051171875, "grad_norm": 0.26808494329452515, "learning_rate": 0.000499436369958698, "loss": 1.9764, "step": 524 }, { "epoch": 0.05126953125, "grad_norm": 0.21936193108558655, "learning_rate": 0.0004994313284401999, "loss": 1.9619, "step": 525 }, { "epoch": 0.0513671875, "grad_norm": 0.2545083165168762, "learning_rate": 0.0004994262645030005, "loss": 1.9825, "step": 526 }, { "epoch": 0.05146484375, "grad_norm": 0.26330727338790894, "learning_rate": 0.0004994211781476055, "loss": 1.9512, "step": 527 }, { "epoch": 0.0515625, "grad_norm": 0.2708059549331665, "learning_rate": 0.0004994160693745229, "loss": 1.9761, "step": 528 }, { "epoch": 0.05166015625, "grad_norm": 0.2882954478263855, "learning_rate": 0.0004994109381842632, "loss": 2.0223, "step": 529 }, { "epoch": 0.0517578125, "grad_norm": 0.25243133306503296, "learning_rate": 0.0004994057845773389, "loss": 1.976, "step": 530 }, { "epoch": 0.05185546875, "grad_norm": 0.23666390776634216, "learning_rate": 0.0004994006085542648, "loss": 1.9729, "step": 531 }, { "epoch": 0.051953125, "grad_norm": 0.24242763221263885, "learning_rate": 0.0004993954101155578, "loss": 1.9452, "step": 532 }, { "epoch": 0.05205078125, "grad_norm": 0.259750634431839, "learning_rate": 0.0004993901892617373, "loss": 1.9582, "step": 533 }, { "epoch": 0.0521484375, "grad_norm": 0.28516751527786255, "learning_rate": 0.0004993849459933249, "loss": 1.9367, "step": 534 }, { "epoch": 0.05224609375, "grad_norm": 0.26534441113471985, "learning_rate": 0.0004993796803108442, "loss": 2.0137, "step": 535 }, { "epoch": 0.05234375, "grad_norm": 0.28771716356277466, "learning_rate": 0.0004993743922148213, "loss": 1.927, "step": 536 }, { "epoch": 0.05244140625, "grad_norm": 0.2747785449028015, "learning_rate": 0.0004993690817057844, "loss": 1.9729, "step": 537 }, { "epoch": 0.0525390625, "grad_norm": 0.29758942127227783, "learning_rate": 0.0004993637487842639, "loss": 1.9713, "step": 538 }, { "epoch": 0.05263671875, "grad_norm": 0.3059535026550293, "learning_rate": 0.0004993583934507927, "loss": 2.0313, "step": 539 }, { "epoch": 0.052734375, "grad_norm": 0.36820656061172485, "learning_rate": 0.0004993530157059056, "loss": 1.9476, "step": 540 }, { "epoch": 0.05283203125, "grad_norm": 0.39971432089805603, "learning_rate": 0.0004993476155501396, "loss": 2.0238, "step": 541 }, { "epoch": 0.0529296875, "grad_norm": 0.3369678854942322, "learning_rate": 0.0004993421929840346, "loss": 1.9502, "step": 542 }, { "epoch": 0.05302734375, "grad_norm": 0.3733840882778168, "learning_rate": 0.000499336748008132, "loss": 2.0114, "step": 543 }, { "epoch": 0.053125, "grad_norm": 0.3251579999923706, "learning_rate": 0.0004993312806229757, "loss": 1.964, "step": 544 }, { "epoch": 0.05322265625, "grad_norm": 0.4093327522277832, "learning_rate": 0.0004993257908291117, "loss": 1.9853, "step": 545 }, { "epoch": 0.0533203125, "grad_norm": 0.26201343536376953, "learning_rate": 0.0004993202786270888, "loss": 2.0297, "step": 546 }, { "epoch": 0.05341796875, "grad_norm": 0.27842357754707336, "learning_rate": 0.0004993147440174572, "loss": 2.003, "step": 547 }, { "epoch": 0.053515625, "grad_norm": 0.2930687367916107, "learning_rate": 0.00049930918700077, "loss": 1.9634, "step": 548 }, { "epoch": 0.05361328125, "grad_norm": 0.2799973785877228, "learning_rate": 0.0004993036075775821, "loss": 2.0023, "step": 549 }, { "epoch": 0.0537109375, "grad_norm": 0.35084354877471924, "learning_rate": 0.000499298005748451, "loss": 2.0508, "step": 550 }, { "epoch": 0.05380859375, "grad_norm": 0.2895161211490631, "learning_rate": 0.0004992923815139362, "loss": 2.0074, "step": 551 }, { "epoch": 0.05390625, "grad_norm": 0.2637045979499817, "learning_rate": 0.0004992867348745997, "loss": 1.9559, "step": 552 }, { "epoch": 0.05400390625, "grad_norm": 0.2837629020214081, "learning_rate": 0.0004992810658310052, "loss": 1.9603, "step": 553 }, { "epoch": 0.0541015625, "grad_norm": 0.2644243836402893, "learning_rate": 0.0004992753743837193, "loss": 1.9767, "step": 554 }, { "epoch": 0.05419921875, "grad_norm": 0.21815134584903717, "learning_rate": 0.0004992696605333103, "loss": 1.9684, "step": 555 }, { "epoch": 0.054296875, "grad_norm": 0.28167852759361267, "learning_rate": 0.0004992639242803492, "loss": 1.97, "step": 556 }, { "epoch": 0.05439453125, "grad_norm": 0.26942357420921326, "learning_rate": 0.0004992581656254087, "loss": 1.9825, "step": 557 }, { "epoch": 0.0544921875, "grad_norm": 0.25316697359085083, "learning_rate": 0.0004992523845690644, "loss": 1.9698, "step": 558 }, { "epoch": 0.05458984375, "grad_norm": 0.29587239027023315, "learning_rate": 0.0004992465811118934, "loss": 2.0063, "step": 559 }, { "epoch": 0.0546875, "grad_norm": 0.3209396004676819, "learning_rate": 0.0004992407552544757, "loss": 2.0002, "step": 560 }, { "epoch": 0.05478515625, "grad_norm": 0.2541654109954834, "learning_rate": 0.0004992349069973931, "loss": 2.0039, "step": 561 }, { "epoch": 0.0548828125, "grad_norm": 0.34102505445480347, "learning_rate": 0.0004992290363412298, "loss": 1.9181, "step": 562 }, { "epoch": 0.05498046875, "grad_norm": 0.36123788356781006, "learning_rate": 0.0004992231432865723, "loss": 2.042, "step": 563 }, { "epoch": 0.055078125, "grad_norm": 0.27344051003456116, "learning_rate": 0.0004992172278340093, "loss": 2.0236, "step": 564 }, { "epoch": 0.05517578125, "grad_norm": 0.34781453013420105, "learning_rate": 0.0004992112899841315, "loss": 2.0017, "step": 565 }, { "epoch": 0.0552734375, "grad_norm": 0.2822319567203522, "learning_rate": 0.0004992053297375322, "loss": 1.9607, "step": 566 }, { "epoch": 0.05537109375, "grad_norm": 0.27945882081985474, "learning_rate": 0.0004991993470948066, "loss": 1.9707, "step": 567 }, { "epoch": 0.05546875, "grad_norm": 0.31535595655441284, "learning_rate": 0.0004991933420565527, "loss": 1.9708, "step": 568 }, { "epoch": 0.05556640625, "grad_norm": 0.2658466398715973, "learning_rate": 0.00049918731462337, "loss": 1.9763, "step": 569 }, { "epoch": 0.0556640625, "grad_norm": 0.3618657886981964, "learning_rate": 0.0004991812647958607, "loss": 2.0154, "step": 570 }, { "epoch": 0.05576171875, "grad_norm": 0.34552058577537537, "learning_rate": 0.000499175192574629, "loss": 1.978, "step": 571 }, { "epoch": 0.055859375, "grad_norm": 0.32903313636779785, "learning_rate": 0.0004991690979602817, "loss": 1.9762, "step": 572 }, { "epoch": 0.05595703125, "grad_norm": 0.30814382433891296, "learning_rate": 0.0004991629809534275, "loss": 1.9944, "step": 573 }, { "epoch": 0.0560546875, "grad_norm": 0.21156492829322815, "learning_rate": 0.0004991568415546775, "loss": 1.9919, "step": 574 }, { "epoch": 0.05615234375, "grad_norm": 0.32283446192741394, "learning_rate": 0.0004991506797646446, "loss": 1.9697, "step": 575 }, { "epoch": 0.05625, "grad_norm": 0.2915102541446686, "learning_rate": 0.0004991444955839447, "loss": 1.9736, "step": 576 }, { "epoch": 0.05634765625, "grad_norm": 0.28996148705482483, "learning_rate": 0.0004991382890131955, "loss": 2.002, "step": 577 }, { "epoch": 0.0564453125, "grad_norm": 0.3859401345252991, "learning_rate": 0.0004991320600530168, "loss": 1.9753, "step": 578 }, { "epoch": 0.05654296875, "grad_norm": 0.28169432282447815, "learning_rate": 0.0004991258087040312, "loss": 1.9798, "step": 579 }, { "epoch": 0.056640625, "grad_norm": 0.30669984221458435, "learning_rate": 0.0004991195349668626, "loss": 2.0007, "step": 580 }, { "epoch": 0.05673828125, "grad_norm": 0.26832297444343567, "learning_rate": 0.000499113238842138, "loss": 2.0146, "step": 581 }, { "epoch": 0.0568359375, "grad_norm": 0.25695785880088806, "learning_rate": 0.0004991069203304865, "loss": 1.9814, "step": 582 }, { "epoch": 0.05693359375, "grad_norm": 0.25700533390045166, "learning_rate": 0.0004991005794325389, "loss": 1.9623, "step": 583 }, { "epoch": 0.05703125, "grad_norm": 0.20786869525909424, "learning_rate": 0.0004990942161489288, "loss": 1.9728, "step": 584 }, { "epoch": 0.05712890625, "grad_norm": 0.23988646268844604, "learning_rate": 0.0004990878304802918, "loss": 1.9749, "step": 585 }, { "epoch": 0.0572265625, "grad_norm": 0.2418268769979477, "learning_rate": 0.0004990814224272658, "loss": 1.928, "step": 586 }, { "epoch": 0.05732421875, "grad_norm": 0.27739882469177246, "learning_rate": 0.0004990749919904909, "loss": 1.964, "step": 587 }, { "epoch": 0.057421875, "grad_norm": 0.2773842513561249, "learning_rate": 0.0004990685391706094, "loss": 1.9589, "step": 588 }, { "epoch": 0.05751953125, "grad_norm": 0.24483682215213776, "learning_rate": 0.0004990620639682659, "loss": 1.9965, "step": 589 }, { "epoch": 0.0576171875, "grad_norm": 0.27345889806747437, "learning_rate": 0.0004990555663841071, "loss": 1.9444, "step": 590 }, { "epoch": 0.05771484375, "grad_norm": 0.26006460189819336, "learning_rate": 0.0004990490464187824, "loss": 1.9391, "step": 591 }, { "epoch": 0.0578125, "grad_norm": 0.1935306191444397, "learning_rate": 0.0004990425040729427, "loss": 1.9679, "step": 592 }, { "epoch": 0.05791015625, "grad_norm": 0.26823100447654724, "learning_rate": 0.0004990359393472418, "loss": 2.0055, "step": 593 }, { "epoch": 0.0580078125, "grad_norm": 0.2706959545612335, "learning_rate": 0.0004990293522423352, "loss": 1.9853, "step": 594 }, { "epoch": 0.05810546875, "grad_norm": 0.2099694162607193, "learning_rate": 0.0004990227427588811, "loss": 1.959, "step": 595 }, { "epoch": 0.058203125, "grad_norm": 0.22345881164073944, "learning_rate": 0.0004990161108975398, "loss": 1.9867, "step": 596 }, { "epoch": 0.05830078125, "grad_norm": 0.34050315618515015, "learning_rate": 0.0004990094566589734, "loss": 1.9555, "step": 597 }, { "epoch": 0.0583984375, "grad_norm": 0.5298358798027039, "learning_rate": 0.0004990027800438468, "loss": 1.9824, "step": 598 }, { "epoch": 0.05849609375, "grad_norm": 0.6408056616783142, "learning_rate": 0.0004989960810528271, "loss": 1.9647, "step": 599 }, { "epoch": 0.05859375, "grad_norm": 0.4058885872364044, "learning_rate": 0.0004989893596865833, "loss": 1.9564, "step": 600 }, { "epoch": 0.05869140625, "grad_norm": 0.2911423146724701, "learning_rate": 0.0004989826159457869, "loss": 1.9485, "step": 601 }, { "epoch": 0.0587890625, "grad_norm": 0.40600210428237915, "learning_rate": 0.0004989758498311114, "loss": 1.993, "step": 602 }, { "epoch": 0.05888671875, "grad_norm": 0.3920167088508606, "learning_rate": 0.0004989690613432327, "loss": 2.0188, "step": 603 }, { "epoch": 0.058984375, "grad_norm": 0.2924116551876068, "learning_rate": 0.0004989622504828291, "loss": 1.9749, "step": 604 }, { "epoch": 0.05908203125, "grad_norm": 0.33497628569602966, "learning_rate": 0.0004989554172505807, "loss": 1.9358, "step": 605 }, { "epoch": 0.0591796875, "grad_norm": 0.31751033663749695, "learning_rate": 0.0004989485616471702, "loss": 1.9886, "step": 606 }, { "epoch": 0.05927734375, "grad_norm": 0.2204602211713791, "learning_rate": 0.0004989416836732825, "loss": 1.9715, "step": 607 }, { "epoch": 0.059375, "grad_norm": 0.2980511784553528, "learning_rate": 0.0004989347833296044, "loss": 1.9479, "step": 608 }, { "epoch": 0.05947265625, "grad_norm": 0.2874164879322052, "learning_rate": 0.0004989278606168253, "loss": 1.9717, "step": 609 }, { "epoch": 0.0595703125, "grad_norm": 0.2526591718196869, "learning_rate": 0.0004989209155356368, "loss": 2.0009, "step": 610 }, { "epoch": 0.05966796875, "grad_norm": 0.2890442907810211, "learning_rate": 0.0004989139480867327, "loss": 1.9862, "step": 611 }, { "epoch": 0.059765625, "grad_norm": 0.26463401317596436, "learning_rate": 0.0004989069582708088, "loss": 2.0143, "step": 612 }, { "epoch": 0.05986328125, "grad_norm": 0.19906644523143768, "learning_rate": 0.0004988999460885634, "loss": 1.9845, "step": 613 }, { "epoch": 0.0599609375, "grad_norm": 0.28752684593200684, "learning_rate": 0.000498892911540697, "loss": 1.9421, "step": 614 }, { "epoch": 0.06005859375, "grad_norm": 0.2739415466785431, "learning_rate": 0.0004988858546279123, "loss": 1.967, "step": 615 }, { "epoch": 0.06015625, "grad_norm": 0.27040478587150574, "learning_rate": 0.000498878775350914, "loss": 2.0001, "step": 616 }, { "epoch": 0.06025390625, "grad_norm": 0.28345200419425964, "learning_rate": 0.0004988716737104096, "loss": 1.9637, "step": 617 }, { "epoch": 0.0603515625, "grad_norm": 0.2515316903591156, "learning_rate": 0.0004988645497071082, "loss": 1.9511, "step": 618 }, { "epoch": 0.06044921875, "grad_norm": 0.22784312069416046, "learning_rate": 0.0004988574033417216, "loss": 1.9752, "step": 619 }, { "epoch": 0.060546875, "grad_norm": 0.23128612339496613, "learning_rate": 0.0004988502346149636, "loss": 1.9803, "step": 620 }, { "epoch": 0.06064453125, "grad_norm": 0.22022667527198792, "learning_rate": 0.0004988430435275503, "loss": 1.9592, "step": 621 }, { "epoch": 0.0607421875, "grad_norm": 0.2470719963312149, "learning_rate": 0.0004988358300802, "loss": 2.0063, "step": 622 }, { "epoch": 0.06083984375, "grad_norm": 0.22507692873477936, "learning_rate": 0.0004988285942736332, "loss": 1.9873, "step": 623 }, { "epoch": 0.0609375, "grad_norm": 0.2285120040178299, "learning_rate": 0.0004988213361085729, "loss": 1.973, "step": 624 }, { "epoch": 0.06103515625, "grad_norm": 0.2436297982931137, "learning_rate": 0.000498814055585744, "loss": 1.9786, "step": 625 }, { "epoch": 0.0611328125, "grad_norm": 0.2736089825630188, "learning_rate": 0.0004988067527058737, "loss": 1.9552, "step": 626 }, { "epoch": 0.06123046875, "grad_norm": 0.2813490033149719, "learning_rate": 0.0004987994274696917, "loss": 2.0173, "step": 627 }, { "epoch": 0.061328125, "grad_norm": 0.3024250864982605, "learning_rate": 0.0004987920798779294, "loss": 1.9232, "step": 628 }, { "epoch": 0.06142578125, "grad_norm": 0.22795617580413818, "learning_rate": 0.000498784709931321, "loss": 1.9981, "step": 629 }, { "epoch": 0.0615234375, "grad_norm": 0.2585217356681824, "learning_rate": 0.0004987773176306026, "loss": 2.0197, "step": 630 }, { "epoch": 0.06162109375, "grad_norm": 0.27120688557624817, "learning_rate": 0.0004987699029765127, "loss": 1.961, "step": 631 }, { "epoch": 0.06171875, "grad_norm": 0.23527094721794128, "learning_rate": 0.000498762465969792, "loss": 1.9996, "step": 632 }, { "epoch": 0.06181640625, "grad_norm": 0.2519530653953552, "learning_rate": 0.0004987550066111832, "loss": 1.9926, "step": 633 }, { "epoch": 0.0619140625, "grad_norm": 0.3199564218521118, "learning_rate": 0.0004987475249014315, "loss": 2.0066, "step": 634 }, { "epoch": 0.06201171875, "grad_norm": 0.3830641806125641, "learning_rate": 0.0004987400208412843, "loss": 1.9368, "step": 635 }, { "epoch": 0.062109375, "grad_norm": 0.3777483403682709, "learning_rate": 0.0004987324944314913, "loss": 1.9792, "step": 636 }, { "epoch": 0.06220703125, "grad_norm": 0.420107901096344, "learning_rate": 0.0004987249456728041, "loss": 1.9765, "step": 637 }, { "epoch": 0.0623046875, "grad_norm": 0.3748738467693329, "learning_rate": 0.0004987173745659768, "loss": 1.9826, "step": 638 }, { "epoch": 0.06240234375, "grad_norm": 0.3142974078655243, "learning_rate": 0.0004987097811117658, "loss": 1.9393, "step": 639 }, { "epoch": 0.0625, "grad_norm": 0.24331647157669067, "learning_rate": 0.0004987021653109296, "loss": 1.9305, "step": 640 }, { "epoch": 0.06259765625, "grad_norm": 0.2792535424232483, "learning_rate": 0.0004986945271642289, "loss": 1.9437, "step": 641 }, { "epoch": 0.0626953125, "grad_norm": 0.24984711408615112, "learning_rate": 0.0004986868666724267, "loss": 1.9838, "step": 642 }, { "epoch": 0.06279296875, "grad_norm": 0.2477482557296753, "learning_rate": 0.0004986791838362881, "loss": 1.9981, "step": 643 }, { "epoch": 0.062890625, "grad_norm": 0.3012976348400116, "learning_rate": 0.0004986714786565807, "loss": 1.9963, "step": 644 }, { "epoch": 0.06298828125, "grad_norm": 0.2480735331773758, "learning_rate": 0.0004986637511340742, "loss": 1.9567, "step": 645 }, { "epoch": 0.0630859375, "grad_norm": 0.20119979977607727, "learning_rate": 0.0004986560012695403, "loss": 2.0012, "step": 646 }, { "epoch": 0.06318359375, "grad_norm": 0.27725833654403687, "learning_rate": 0.0004986482290637535, "loss": 1.9751, "step": 647 }, { "epoch": 0.06328125, "grad_norm": 0.27348190546035767, "learning_rate": 0.0004986404345174898, "loss": 1.976, "step": 648 }, { "epoch": 0.06337890625, "grad_norm": 0.29639676213264465, "learning_rate": 0.000498632617631528, "loss": 1.9835, "step": 649 }, { "epoch": 0.0634765625, "grad_norm": 0.2911996841430664, "learning_rate": 0.000498624778406649, "loss": 2.0107, "step": 650 }, { "epoch": 0.06357421875, "grad_norm": 0.30200451612472534, "learning_rate": 0.0004986169168436355, "loss": 1.9433, "step": 651 }, { "epoch": 0.063671875, "grad_norm": 0.288396954536438, "learning_rate": 0.0004986090329432734, "loss": 1.9759, "step": 652 }, { "epoch": 0.06376953125, "grad_norm": 0.26006969809532166, "learning_rate": 0.0004986011267063497, "loss": 1.977, "step": 653 }, { "epoch": 0.0638671875, "grad_norm": 0.2467355877161026, "learning_rate": 0.0004985931981336544, "loss": 1.9892, "step": 654 }, { "epoch": 0.06396484375, "grad_norm": 0.2180376946926117, "learning_rate": 0.0004985852472259797, "loss": 2.0012, "step": 655 }, { "epoch": 0.0640625, "grad_norm": 0.23811106383800507, "learning_rate": 0.0004985772739841193, "loss": 1.969, "step": 656 }, { "epoch": 0.06416015625, "grad_norm": 0.2517051696777344, "learning_rate": 0.00049856927840887, "loss": 1.9607, "step": 657 }, { "epoch": 0.0642578125, "grad_norm": 0.2900746464729309, "learning_rate": 0.0004985612605010305, "loss": 1.9865, "step": 658 }, { "epoch": 0.06435546875, "grad_norm": 0.2779560983181, "learning_rate": 0.0004985532202614017, "loss": 1.9787, "step": 659 }, { "epoch": 0.064453125, "grad_norm": 0.2686167061328888, "learning_rate": 0.0004985451576907865, "loss": 1.9279, "step": 660 }, { "epoch": 0.06455078125, "grad_norm": 0.25136813521385193, "learning_rate": 0.0004985370727899907, "loss": 1.9502, "step": 661 }, { "epoch": 0.0646484375, "grad_norm": 0.26508840918540955, "learning_rate": 0.0004985289655598216, "loss": 1.9588, "step": 662 }, { "epoch": 0.06474609375, "grad_norm": 0.2178860753774643, "learning_rate": 0.000498520836001089, "loss": 1.9445, "step": 663 }, { "epoch": 0.06484375, "grad_norm": 0.21833141148090363, "learning_rate": 0.0004985126841146052, "loss": 1.9963, "step": 664 }, { "epoch": 0.06494140625, "grad_norm": 0.21572785079479218, "learning_rate": 0.0004985045099011844, "loss": 1.9789, "step": 665 }, { "epoch": 0.0650390625, "grad_norm": 0.23386414349079132, "learning_rate": 0.0004984963133616432, "loss": 1.9829, "step": 666 }, { "epoch": 0.06513671875, "grad_norm": 0.25145047903060913, "learning_rate": 0.0004984880944968003, "loss": 2.0106, "step": 667 }, { "epoch": 0.065234375, "grad_norm": 0.23240795731544495, "learning_rate": 0.0004984798533074767, "loss": 1.999, "step": 668 }, { "epoch": 0.06533203125, "grad_norm": 0.22223520278930664, "learning_rate": 0.0004984715897944954, "loss": 1.9872, "step": 669 }, { "epoch": 0.0654296875, "grad_norm": 0.2768714725971222, "learning_rate": 0.0004984633039586823, "loss": 1.9656, "step": 670 }, { "epoch": 0.06552734375, "grad_norm": 0.35761523246765137, "learning_rate": 0.0004984549958008646, "loss": 1.9984, "step": 671 }, { "epoch": 0.065625, "grad_norm": 0.4054514169692993, "learning_rate": 0.0004984466653218726, "loss": 1.9406, "step": 672 }, { "epoch": 0.06572265625, "grad_norm": 0.40501922369003296, "learning_rate": 0.0004984383125225383, "loss": 1.9682, "step": 673 }, { "epoch": 0.0658203125, "grad_norm": 0.31225842237472534, "learning_rate": 0.0004984299374036961, "loss": 1.9833, "step": 674 }, { "epoch": 0.06591796875, "grad_norm": 0.24159114062786102, "learning_rate": 0.0004984215399661825, "loss": 1.9634, "step": 675 }, { "epoch": 0.066015625, "grad_norm": 0.2537291944026947, "learning_rate": 0.0004984131202108364, "loss": 1.9563, "step": 676 }, { "epoch": 0.06611328125, "grad_norm": 0.29919305443763733, "learning_rate": 0.0004984046781384988, "loss": 1.9594, "step": 677 }, { "epoch": 0.0662109375, "grad_norm": 0.28458645939826965, "learning_rate": 0.0004983962137500133, "loss": 1.9313, "step": 678 }, { "epoch": 0.06630859375, "grad_norm": 0.2523960471153259, "learning_rate": 0.0004983877270462249, "loss": 1.9616, "step": 679 }, { "epoch": 0.06640625, "grad_norm": 0.2605716288089752, "learning_rate": 0.0004983792180279816, "loss": 1.9631, "step": 680 }, { "epoch": 0.06650390625, "grad_norm": 0.280000239610672, "learning_rate": 0.0004983706866961335, "loss": 1.9945, "step": 681 }, { "epoch": 0.0666015625, "grad_norm": 0.2881770730018616, "learning_rate": 0.0004983621330515326, "loss": 1.9639, "step": 682 }, { "epoch": 0.06669921875, "grad_norm": 0.27240094542503357, "learning_rate": 0.0004983535570950334, "loss": 1.9226, "step": 683 }, { "epoch": 0.066796875, "grad_norm": 0.2580428123474121, "learning_rate": 0.0004983449588274926, "loss": 1.9625, "step": 684 }, { "epoch": 0.06689453125, "grad_norm": 0.2719865143299103, "learning_rate": 0.0004983363382497692, "loss": 1.9941, "step": 685 }, { "epoch": 0.0669921875, "grad_norm": 0.2593652904033661, "learning_rate": 0.000498327695362724, "loss": 1.9558, "step": 686 }, { "epoch": 0.06708984375, "grad_norm": 0.2575826346874237, "learning_rate": 0.0004983190301672207, "loss": 2.0188, "step": 687 }, { "epoch": 0.0671875, "grad_norm": 0.23726949095726013, "learning_rate": 0.0004983103426641246, "loss": 1.9568, "step": 688 }, { "epoch": 0.06728515625, "grad_norm": 0.3027707040309906, "learning_rate": 0.0004983016328543037, "loss": 1.9708, "step": 689 }, { "epoch": 0.0673828125, "grad_norm": 0.29431256651878357, "learning_rate": 0.0004982929007386279, "loss": 1.9668, "step": 690 }, { "epoch": 0.06748046875, "grad_norm": 0.24073566496372223, "learning_rate": 0.0004982841463179694, "loss": 1.9746, "step": 691 }, { "epoch": 0.067578125, "grad_norm": 0.27540677785873413, "learning_rate": 0.000498275369593203, "loss": 1.868, "step": 692 }, { "epoch": 0.06767578125, "grad_norm": 0.2665644884109497, "learning_rate": 0.0004982665705652052, "loss": 1.9528, "step": 693 }, { "epoch": 0.0677734375, "grad_norm": 0.2117265909910202, "learning_rate": 0.0004982577492348549, "loss": 1.9977, "step": 694 }, { "epoch": 0.06787109375, "grad_norm": 0.30092543363571167, "learning_rate": 0.0004982489056030334, "loss": 1.9636, "step": 695 }, { "epoch": 0.06796875, "grad_norm": 0.2956363558769226, "learning_rate": 0.0004982400396706239, "loss": 1.9519, "step": 696 }, { "epoch": 0.06806640625, "grad_norm": 0.3209246098995209, "learning_rate": 0.0004982311514385124, "loss": 1.9761, "step": 697 }, { "epoch": 0.0681640625, "grad_norm": 0.4527253806591034, "learning_rate": 0.0004982222409075863, "loss": 1.9405, "step": 698 }, { "epoch": 0.06826171875, "grad_norm": 0.4838014245033264, "learning_rate": 0.000498213308078736, "loss": 1.9434, "step": 699 }, { "epoch": 0.068359375, "grad_norm": 0.4865509569644928, "learning_rate": 0.0004982043529528536, "loss": 1.9706, "step": 700 }, { "epoch": 0.06845703125, "grad_norm": 0.38092029094696045, "learning_rate": 0.0004981953755308339, "loss": 1.9168, "step": 701 }, { "epoch": 0.0685546875, "grad_norm": 0.24092066287994385, "learning_rate": 0.0004981863758135735, "loss": 1.9693, "step": 702 }, { "epoch": 0.06865234375, "grad_norm": 0.3893338143825531, "learning_rate": 0.0004981773538019714, "loss": 1.9812, "step": 703 }, { "epoch": 0.06875, "grad_norm": 0.3019930422306061, "learning_rate": 0.0004981683094969287, "loss": 1.9207, "step": 704 }, { "epoch": 0.06884765625, "grad_norm": 0.2746503949165344, "learning_rate": 0.0004981592428993492, "loss": 1.9416, "step": 705 }, { "epoch": 0.0689453125, "grad_norm": 0.2998049557209015, "learning_rate": 0.0004981501540101382, "loss": 1.9685, "step": 706 }, { "epoch": 0.06904296875, "grad_norm": 0.2986246645450592, "learning_rate": 0.0004981410428302037, "loss": 1.9827, "step": 707 }, { "epoch": 0.069140625, "grad_norm": 0.28714433312416077, "learning_rate": 0.000498131909360456, "loss": 1.904, "step": 708 }, { "epoch": 0.06923828125, "grad_norm": 0.21930216252803802, "learning_rate": 0.0004981227536018073, "loss": 1.966, "step": 709 }, { "epoch": 0.0693359375, "grad_norm": 0.2309812754392624, "learning_rate": 0.0004981135755551722, "loss": 1.9983, "step": 710 }, { "epoch": 0.06943359375, "grad_norm": 0.2016872614622116, "learning_rate": 0.0004981043752214674, "loss": 1.9382, "step": 711 }, { "epoch": 0.06953125, "grad_norm": 0.235287606716156, "learning_rate": 0.0004980951526016122, "loss": 1.9914, "step": 712 }, { "epoch": 0.06962890625, "grad_norm": 0.23780131340026855, "learning_rate": 0.0004980859076965276, "loss": 1.9404, "step": 713 }, { "epoch": 0.0697265625, "grad_norm": 0.2195412665605545, "learning_rate": 0.0004980766405071372, "loss": 1.9526, "step": 714 }, { "epoch": 0.06982421875, "grad_norm": 0.2391882836818695, "learning_rate": 0.0004980673510343668, "loss": 1.9179, "step": 715 }, { "epoch": 0.069921875, "grad_norm": 0.27623385190963745, "learning_rate": 0.000498058039279144, "loss": 1.9519, "step": 716 }, { "epoch": 0.07001953125, "grad_norm": 0.27679604291915894, "learning_rate": 0.0004980487052423994, "loss": 2.0292, "step": 717 }, { "epoch": 0.0701171875, "grad_norm": 0.2968669533729553, "learning_rate": 0.0004980393489250651, "loss": 1.9162, "step": 718 }, { "epoch": 0.07021484375, "grad_norm": 0.2479199320077896, "learning_rate": 0.0004980299703280758, "loss": 1.9544, "step": 719 }, { "epoch": 0.0703125, "grad_norm": 0.2597293555736542, "learning_rate": 0.0004980205694523682, "loss": 1.9273, "step": 720 }, { "epoch": 0.07041015625, "grad_norm": 0.23868051171302795, "learning_rate": 0.0004980111462988817, "loss": 1.9527, "step": 721 }, { "epoch": 0.0705078125, "grad_norm": 0.28150075674057007, "learning_rate": 0.0004980017008685573, "loss": 1.9523, "step": 722 }, { "epoch": 0.07060546875, "grad_norm": 0.3689873516559601, "learning_rate": 0.0004979922331623387, "loss": 1.9167, "step": 723 }, { "epoch": 0.070703125, "grad_norm": 0.380206823348999, "learning_rate": 0.0004979827431811713, "loss": 1.9025, "step": 724 }, { "epoch": 0.07080078125, "grad_norm": 0.2919938266277313, "learning_rate": 0.0004979732309260036, "loss": 1.9996, "step": 725 }, { "epoch": 0.0708984375, "grad_norm": 0.302680104970932, "learning_rate": 0.0004979636963977853, "loss": 1.95, "step": 726 }, { "epoch": 0.07099609375, "grad_norm": 0.25307831168174744, "learning_rate": 0.0004979541395974691, "loss": 1.9723, "step": 727 }, { "epoch": 0.07109375, "grad_norm": 0.304147869348526, "learning_rate": 0.0004979445605260096, "loss": 2.0359, "step": 728 }, { "epoch": 0.07119140625, "grad_norm": 0.35432276129722595, "learning_rate": 0.0004979349591843635, "loss": 1.9384, "step": 729 }, { "epoch": 0.0712890625, "grad_norm": 0.23866906762123108, "learning_rate": 0.00049792533557349, "loss": 1.9341, "step": 730 }, { "epoch": 0.07138671875, "grad_norm": 0.30183711647987366, "learning_rate": 0.0004979156896943504, "loss": 1.9603, "step": 731 }, { "epoch": 0.071484375, "grad_norm": 0.30881768465042114, "learning_rate": 0.0004979060215479083, "loss": 1.982, "step": 732 }, { "epoch": 0.07158203125, "grad_norm": 0.2996484041213989, "learning_rate": 0.0004978963311351295, "loss": 1.9536, "step": 733 }, { "epoch": 0.0716796875, "grad_norm": 0.33594587445259094, "learning_rate": 0.0004978866184569818, "loss": 1.9813, "step": 734 }, { "epoch": 0.07177734375, "grad_norm": 0.30377864837646484, "learning_rate": 0.0004978768835144355, "loss": 1.9417, "step": 735 }, { "epoch": 0.071875, "grad_norm": 0.34858009219169617, "learning_rate": 0.0004978671263084631, "loss": 1.9368, "step": 736 }, { "epoch": 0.07197265625, "grad_norm": 0.28119683265686035, "learning_rate": 0.0004978573468400392, "loss": 1.9223, "step": 737 }, { "epoch": 0.0720703125, "grad_norm": 0.2478085160255432, "learning_rate": 0.0004978475451101408, "loss": 1.9744, "step": 738 }, { "epoch": 0.07216796875, "grad_norm": 0.2461906224489212, "learning_rate": 0.0004978377211197466, "loss": 1.961, "step": 739 }, { "epoch": 0.072265625, "grad_norm": 0.2495601922273636, "learning_rate": 0.0004978278748698385, "loss": 1.9602, "step": 740 }, { "epoch": 0.07236328125, "grad_norm": 0.3028644323348999, "learning_rate": 0.0004978180063613997, "loss": 1.95, "step": 741 }, { "epoch": 0.0724609375, "grad_norm": 0.3690338730812073, "learning_rate": 0.000497808115595416, "loss": 1.9625, "step": 742 }, { "epoch": 0.07255859375, "grad_norm": 0.3791324198246002, "learning_rate": 0.0004977982025728755, "loss": 1.9747, "step": 743 }, { "epoch": 0.07265625, "grad_norm": 0.29515188932418823, "learning_rate": 0.0004977882672947683, "loss": 1.9774, "step": 744 }, { "epoch": 0.07275390625, "grad_norm": 0.29207196831703186, "learning_rate": 0.000497778309762087, "loss": 1.9704, "step": 745 }, { "epoch": 0.0728515625, "grad_norm": 0.23698067665100098, "learning_rate": 0.0004977683299758262, "loss": 1.978, "step": 746 }, { "epoch": 0.07294921875, "grad_norm": 0.2712165415287018, "learning_rate": 0.0004977583279369827, "loss": 1.9524, "step": 747 }, { "epoch": 0.073046875, "grad_norm": 0.32161638140678406, "learning_rate": 0.0004977483036465558, "loss": 1.9532, "step": 748 }, { "epoch": 0.07314453125, "grad_norm": 0.25917574763298035, "learning_rate": 0.0004977382571055467, "loss": 1.9307, "step": 749 }, { "epoch": 0.0732421875, "grad_norm": 0.27562615275382996, "learning_rate": 0.000497728188314959, "loss": 1.9842, "step": 750 }, { "epoch": 0.07333984375, "grad_norm": 0.23242716491222382, "learning_rate": 0.0004977180972757986, "loss": 1.9097, "step": 751 }, { "epoch": 0.0734375, "grad_norm": 0.3111313283443451, "learning_rate": 0.0004977079839890732, "loss": 1.9812, "step": 752 }, { "epoch": 0.07353515625, "grad_norm": 0.3295767605304718, "learning_rate": 0.0004976978484557933, "loss": 1.9206, "step": 753 }, { "epoch": 0.0736328125, "grad_norm": 0.3592395782470703, "learning_rate": 0.0004976876906769713, "loss": 1.9817, "step": 754 }, { "epoch": 0.07373046875, "grad_norm": 0.3530953824520111, "learning_rate": 0.0004976775106536218, "loss": 1.9831, "step": 755 }, { "epoch": 0.073828125, "grad_norm": 0.3064524829387665, "learning_rate": 0.0004976673083867617, "loss": 1.9207, "step": 756 }, { "epoch": 0.07392578125, "grad_norm": 0.281597763299942, "learning_rate": 0.0004976570838774102, "loss": 1.9781, "step": 757 }, { "epoch": 0.0740234375, "grad_norm": 0.296041876077652, "learning_rate": 0.0004976468371265886, "loss": 1.9708, "step": 758 }, { "epoch": 0.07412109375, "grad_norm": 0.3518696427345276, "learning_rate": 0.0004976365681353203, "loss": 1.9333, "step": 759 }, { "epoch": 0.07421875, "grad_norm": 0.21943148970603943, "learning_rate": 0.0004976262769046313, "loss": 1.9416, "step": 760 }, { "epoch": 0.07431640625, "grad_norm": 0.2540123760700226, "learning_rate": 0.0004976159634355495, "loss": 1.989, "step": 761 }, { "epoch": 0.0744140625, "grad_norm": 0.3120715320110321, "learning_rate": 0.0004976056277291053, "loss": 1.95, "step": 762 }, { "epoch": 0.07451171875, "grad_norm": 0.23797550797462463, "learning_rate": 0.0004975952697863308, "loss": 1.9534, "step": 763 }, { "epoch": 0.074609375, "grad_norm": 0.30815449357032776, "learning_rate": 0.000497584889608261, "loss": 2.0283, "step": 764 }, { "epoch": 0.07470703125, "grad_norm": 0.37625932693481445, "learning_rate": 0.0004975744871959327, "loss": 1.9658, "step": 765 }, { "epoch": 0.0748046875, "grad_norm": 0.27407678961753845, "learning_rate": 0.0004975640625503848, "loss": 1.9458, "step": 766 }, { "epoch": 0.07490234375, "grad_norm": 0.26577049493789673, "learning_rate": 0.000497553615672659, "loss": 1.9667, "step": 767 }, { "epoch": 0.075, "grad_norm": 0.29821062088012695, "learning_rate": 0.0004975431465637983, "loss": 1.9805, "step": 768 }, { "epoch": 0.07509765625, "grad_norm": 0.2582527995109558, "learning_rate": 0.0004975326552248491, "loss": 1.9904, "step": 769 }, { "epoch": 0.0751953125, "grad_norm": 0.2533649802207947, "learning_rate": 0.0004975221416568591, "loss": 1.9383, "step": 770 }, { "epoch": 0.07529296875, "grad_norm": 0.253592312335968, "learning_rate": 0.0004975116058608784, "loss": 1.9855, "step": 771 }, { "epoch": 0.075390625, "grad_norm": 0.2658921182155609, "learning_rate": 0.0004975010478379596, "loss": 1.9845, "step": 772 }, { "epoch": 0.07548828125, "grad_norm": 0.25861868262290955, "learning_rate": 0.0004974904675891573, "loss": 1.9776, "step": 773 }, { "epoch": 0.0755859375, "grad_norm": 0.272682249546051, "learning_rate": 0.0004974798651155284, "loss": 1.9357, "step": 774 }, { "epoch": 0.07568359375, "grad_norm": 0.2574002742767334, "learning_rate": 0.0004974692404181319, "loss": 1.9677, "step": 775 }, { "epoch": 0.07578125, "grad_norm": 0.3008319139480591, "learning_rate": 0.0004974585934980293, "loss": 1.983, "step": 776 }, { "epoch": 0.07587890625, "grad_norm": 0.3242790699005127, "learning_rate": 0.0004974479243562839, "loss": 1.9846, "step": 777 }, { "epoch": 0.0759765625, "grad_norm": 0.29044070839881897, "learning_rate": 0.0004974372329939617, "loss": 2.0012, "step": 778 }, { "epoch": 0.07607421875, "grad_norm": 0.32275035977363586, "learning_rate": 0.0004974265194121303, "loss": 1.9842, "step": 779 }, { "epoch": 0.076171875, "grad_norm": 0.31752893328666687, "learning_rate": 0.0004974157836118604, "loss": 1.9368, "step": 780 }, { "epoch": 0.07626953125, "grad_norm": 0.27947962284088135, "learning_rate": 0.0004974050255942239, "loss": 1.9794, "step": 781 }, { "epoch": 0.0763671875, "grad_norm": 0.29937905073165894, "learning_rate": 0.000497394245360296, "loss": 1.9597, "step": 782 }, { "epoch": 0.07646484375, "grad_norm": 0.2969702482223511, "learning_rate": 0.0004973834429111529, "loss": 1.945, "step": 783 }, { "epoch": 0.0765625, "grad_norm": 0.3729354441165924, "learning_rate": 0.0004973726182478741, "loss": 1.9032, "step": 784 }, { "epoch": 0.07666015625, "grad_norm": 0.32856106758117676, "learning_rate": 0.0004973617713715408, "loss": 1.9551, "step": 785 }, { "epoch": 0.0767578125, "grad_norm": 0.23239392042160034, "learning_rate": 0.0004973509022832364, "loss": 1.9632, "step": 786 }, { "epoch": 0.07685546875, "grad_norm": 0.23353087902069092, "learning_rate": 0.000497340010984047, "loss": 1.9514, "step": 787 }, { "epoch": 0.076953125, "grad_norm": 0.23280592262744904, "learning_rate": 0.00049732909747506, "loss": 1.9696, "step": 788 }, { "epoch": 0.07705078125, "grad_norm": 0.2193138748407364, "learning_rate": 0.0004973181617573658, "loss": 1.9734, "step": 789 }, { "epoch": 0.0771484375, "grad_norm": 0.3234734833240509, "learning_rate": 0.0004973072038320569, "loss": 1.9874, "step": 790 }, { "epoch": 0.07724609375, "grad_norm": 0.35144004225730896, "learning_rate": 0.0004972962237002278, "loss": 1.9331, "step": 791 }, { "epoch": 0.07734375, "grad_norm": 0.37716230750083923, "learning_rate": 0.0004972852213629753, "loss": 1.9731, "step": 792 }, { "epoch": 0.07744140625, "grad_norm": 0.3221200108528137, "learning_rate": 0.0004972741968213985, "loss": 2.0043, "step": 793 }, { "epoch": 0.0775390625, "grad_norm": 0.23167745769023895, "learning_rate": 0.0004972631500765985, "loss": 1.9634, "step": 794 }, { "epoch": 0.07763671875, "grad_norm": 0.4177049398422241, "learning_rate": 0.0004972520811296789, "loss": 1.9533, "step": 795 }, { "epoch": 0.077734375, "grad_norm": 0.39038336277008057, "learning_rate": 0.0004972409899817455, "loss": 1.9508, "step": 796 }, { "epoch": 0.07783203125, "grad_norm": 0.4033074676990509, "learning_rate": 0.0004972298766339061, "loss": 1.965, "step": 797 }, { "epoch": 0.0779296875, "grad_norm": 0.3271097242832184, "learning_rate": 0.0004972187410872707, "loss": 1.9893, "step": 798 }, { "epoch": 0.07802734375, "grad_norm": 0.30553460121154785, "learning_rate": 0.0004972075833429516, "loss": 1.9735, "step": 799 }, { "epoch": 0.078125, "grad_norm": 0.28286099433898926, "learning_rate": 0.0004971964034020637, "loss": 1.9704, "step": 800 }, { "epoch": 0.07822265625, "grad_norm": 0.28064650297164917, "learning_rate": 0.0004971852012657235, "loss": 1.9452, "step": 801 }, { "epoch": 0.0783203125, "grad_norm": 0.2370154857635498, "learning_rate": 0.0004971739769350502, "loss": 2.0068, "step": 802 }, { "epoch": 0.07841796875, "grad_norm": 0.26157453656196594, "learning_rate": 0.0004971627304111648, "loss": 1.9209, "step": 803 }, { "epoch": 0.078515625, "grad_norm": 0.2244575023651123, "learning_rate": 0.0004971514616951908, "loss": 1.9952, "step": 804 }, { "epoch": 0.07861328125, "grad_norm": 0.241064190864563, "learning_rate": 0.0004971401707882538, "loss": 1.9616, "step": 805 }, { "epoch": 0.0787109375, "grad_norm": 0.23037207126617432, "learning_rate": 0.0004971288576914819, "loss": 1.9792, "step": 806 }, { "epoch": 0.07880859375, "grad_norm": 0.2162441909313202, "learning_rate": 0.0004971175224060049, "loss": 1.9556, "step": 807 }, { "epoch": 0.07890625, "grad_norm": 0.2614160180091858, "learning_rate": 0.0004971061649329551, "loss": 1.9247, "step": 808 }, { "epoch": 0.07900390625, "grad_norm": 0.22268271446228027, "learning_rate": 0.0004970947852734672, "loss": 1.9596, "step": 809 }, { "epoch": 0.0791015625, "grad_norm": 0.26265689730644226, "learning_rate": 0.0004970833834286779, "loss": 1.9722, "step": 810 }, { "epoch": 0.07919921875, "grad_norm": 0.29413190484046936, "learning_rate": 0.000497071959399726, "loss": 1.9808, "step": 811 }, { "epoch": 0.079296875, "grad_norm": 0.3389902114868164, "learning_rate": 0.0004970605131877527, "loss": 1.9986, "step": 812 }, { "epoch": 0.07939453125, "grad_norm": 0.3456060290336609, "learning_rate": 0.0004970490447939016, "loss": 1.9556, "step": 813 }, { "epoch": 0.0794921875, "grad_norm": 0.3039393126964569, "learning_rate": 0.0004970375542193179, "loss": 1.9359, "step": 814 }, { "epoch": 0.07958984375, "grad_norm": 0.46128201484680176, "learning_rate": 0.0004970260414651497, "loss": 1.935, "step": 815 }, { "epoch": 0.0796875, "grad_norm": 0.5321182608604431, "learning_rate": 0.000497014506532547, "loss": 1.9767, "step": 816 }, { "epoch": 0.07978515625, "grad_norm": 0.39562714099884033, "learning_rate": 0.0004970029494226618, "loss": 1.9803, "step": 817 }, { "epoch": 0.0798828125, "grad_norm": 0.2717466950416565, "learning_rate": 0.0004969913701366488, "loss": 1.9675, "step": 818 }, { "epoch": 0.07998046875, "grad_norm": 0.32809481024742126, "learning_rate": 0.0004969797686756646, "loss": 1.9869, "step": 819 }, { "epoch": 0.080078125, "grad_norm": 0.3525324761867523, "learning_rate": 0.0004969681450408681, "loss": 1.9858, "step": 820 }, { "epoch": 0.08017578125, "grad_norm": 0.30448198318481445, "learning_rate": 0.0004969564992334202, "loss": 2.0067, "step": 821 }, { "epoch": 0.0802734375, "grad_norm": 0.27088144421577454, "learning_rate": 0.0004969448312544845, "loss": 2.0068, "step": 822 }, { "epoch": 0.08037109375, "grad_norm": 0.26881691813468933, "learning_rate": 0.0004969331411052265, "loss": 1.9418, "step": 823 }, { "epoch": 0.08046875, "grad_norm": 0.31323063373565674, "learning_rate": 0.0004969214287868138, "loss": 1.9662, "step": 824 }, { "epoch": 0.08056640625, "grad_norm": 0.25669077038764954, "learning_rate": 0.0004969096943004163, "loss": 1.9733, "step": 825 }, { "epoch": 0.0806640625, "grad_norm": 0.27999046444892883, "learning_rate": 0.0004968979376472065, "loss": 1.9611, "step": 826 }, { "epoch": 0.08076171875, "grad_norm": 0.35361799597740173, "learning_rate": 0.0004968861588283584, "loss": 1.9035, "step": 827 }, { "epoch": 0.080859375, "grad_norm": 0.290341317653656, "learning_rate": 0.0004968743578450489, "loss": 2.0007, "step": 828 }, { "epoch": 0.08095703125, "grad_norm": 0.29945844411849976, "learning_rate": 0.0004968625346984565, "loss": 1.9724, "step": 829 }, { "epoch": 0.0810546875, "grad_norm": 0.2630455493927002, "learning_rate": 0.0004968506893897626, "loss": 1.9765, "step": 830 }, { "epoch": 0.08115234375, "grad_norm": 0.2183736264705658, "learning_rate": 0.0004968388219201501, "loss": 1.9414, "step": 831 }, { "epoch": 0.08125, "grad_norm": 0.24703919887542725, "learning_rate": 0.0004968269322908046, "loss": 1.9725, "step": 832 }, { "epoch": 0.08134765625, "grad_norm": 0.22808632254600525, "learning_rate": 0.0004968150205029139, "loss": 1.9121, "step": 833 }, { "epoch": 0.0814453125, "grad_norm": 0.18958304822444916, "learning_rate": 0.0004968030865576677, "loss": 1.971, "step": 834 }, { "epoch": 0.08154296875, "grad_norm": 0.22013933956623077, "learning_rate": 0.0004967911304562581, "loss": 1.9703, "step": 835 }, { "epoch": 0.081640625, "grad_norm": 0.21636781096458435, "learning_rate": 0.0004967791521998795, "loss": 1.992, "step": 836 }, { "epoch": 0.08173828125, "grad_norm": 0.20808343589305878, "learning_rate": 0.0004967671517897284, "loss": 1.9881, "step": 837 }, { "epoch": 0.0818359375, "grad_norm": 0.23455387353897095, "learning_rate": 0.0004967551292270034, "loss": 1.9635, "step": 838 }, { "epoch": 0.08193359375, "grad_norm": 0.2605065703392029, "learning_rate": 0.0004967430845129057, "loss": 1.8877, "step": 839 }, { "epoch": 0.08203125, "grad_norm": 0.30832260847091675, "learning_rate": 0.0004967310176486382, "loss": 1.9557, "step": 840 }, { "epoch": 0.08212890625, "grad_norm": 0.32088643312454224, "learning_rate": 0.0004967189286354066, "loss": 1.9352, "step": 841 }, { "epoch": 0.0822265625, "grad_norm": 0.3073631823062897, "learning_rate": 0.0004967068174744181, "loss": 1.9487, "step": 842 }, { "epoch": 0.08232421875, "grad_norm": 0.23467223346233368, "learning_rate": 0.0004966946841668828, "loss": 1.9514, "step": 843 }, { "epoch": 0.082421875, "grad_norm": 0.2447347342967987, "learning_rate": 0.0004966825287140125, "loss": 1.9435, "step": 844 }, { "epoch": 0.08251953125, "grad_norm": 0.2715449631214142, "learning_rate": 0.0004966703511170215, "loss": 1.9591, "step": 845 }, { "epoch": 0.0826171875, "grad_norm": 0.28944098949432373, "learning_rate": 0.0004966581513771263, "loss": 1.952, "step": 846 }, { "epoch": 0.08271484375, "grad_norm": 0.25582170486450195, "learning_rate": 0.0004966459294955455, "loss": 1.9486, "step": 847 }, { "epoch": 0.0828125, "grad_norm": 0.305513858795166, "learning_rate": 0.0004966336854734999, "loss": 1.9416, "step": 848 }, { "epoch": 0.08291015625, "grad_norm": 0.28799137473106384, "learning_rate": 0.0004966214193122127, "loss": 1.9417, "step": 849 }, { "epoch": 0.0830078125, "grad_norm": 0.22209681570529938, "learning_rate": 0.000496609131012909, "loss": 1.9641, "step": 850 }, { "epoch": 0.08310546875, "grad_norm": 0.35411134362220764, "learning_rate": 0.0004965968205768165, "loss": 1.9497, "step": 851 }, { "epoch": 0.083203125, "grad_norm": 0.2821413576602936, "learning_rate": 0.0004965844880051647, "loss": 1.9398, "step": 852 }, { "epoch": 0.08330078125, "grad_norm": 0.31592175364494324, "learning_rate": 0.0004965721332991856, "loss": 1.9407, "step": 853 }, { "epoch": 0.0833984375, "grad_norm": 0.3579123318195343, "learning_rate": 0.0004965597564601134, "loss": 1.9767, "step": 854 }, { "epoch": 0.08349609375, "grad_norm": 0.3371833860874176, "learning_rate": 0.0004965473574891845, "loss": 2.0025, "step": 855 }, { "epoch": 0.08359375, "grad_norm": 0.39282771944999695, "learning_rate": 0.0004965349363876371, "loss": 1.9303, "step": 856 }, { "epoch": 0.08369140625, "grad_norm": 0.34869179129600525, "learning_rate": 0.0004965224931567122, "loss": 1.9613, "step": 857 }, { "epoch": 0.0837890625, "grad_norm": 0.2918208837509155, "learning_rate": 0.0004965100277976528, "loss": 1.9242, "step": 858 }, { "epoch": 0.08388671875, "grad_norm": 0.25440847873687744, "learning_rate": 0.0004964975403117041, "loss": 1.9223, "step": 859 }, { "epoch": 0.083984375, "grad_norm": 0.27069318294525146, "learning_rate": 0.0004964850307001132, "loss": 1.9711, "step": 860 }, { "epoch": 0.08408203125, "grad_norm": 0.27405187487602234, "learning_rate": 0.0004964724989641301, "loss": 1.9646, "step": 861 }, { "epoch": 0.0841796875, "grad_norm": 0.3116970956325531, "learning_rate": 0.0004964599451050064, "loss": 1.9231, "step": 862 }, { "epoch": 0.08427734375, "grad_norm": 0.3815697729587555, "learning_rate": 0.000496447369123996, "loss": 1.9773, "step": 863 }, { "epoch": 0.084375, "grad_norm": 0.32103797793388367, "learning_rate": 0.0004964347710223555, "loss": 1.936, "step": 864 }, { "epoch": 0.08447265625, "grad_norm": 0.42826011776924133, "learning_rate": 0.0004964221508013429, "loss": 1.9315, "step": 865 }, { "epoch": 0.0845703125, "grad_norm": 0.3091580271720886, "learning_rate": 0.0004964095084622192, "loss": 1.9362, "step": 866 }, { "epoch": 0.08466796875, "grad_norm": 0.2816343605518341, "learning_rate": 0.0004963968440062471, "loss": 2.0076, "step": 867 }, { "epoch": 0.084765625, "grad_norm": 0.35303476452827454, "learning_rate": 0.0004963841574346917, "loss": 1.9399, "step": 868 }, { "epoch": 0.08486328125, "grad_norm": 0.27021846175193787, "learning_rate": 0.0004963714487488202, "loss": 1.9556, "step": 869 }, { "epoch": 0.0849609375, "grad_norm": 0.28174805641174316, "learning_rate": 0.0004963587179499021, "loss": 1.9749, "step": 870 }, { "epoch": 0.08505859375, "grad_norm": 0.2774275243282318, "learning_rate": 0.0004963459650392093, "loss": 1.9457, "step": 871 }, { "epoch": 0.08515625, "grad_norm": 0.2178190052509308, "learning_rate": 0.0004963331900180154, "loss": 2.0192, "step": 872 }, { "epoch": 0.08525390625, "grad_norm": 0.2983119487762451, "learning_rate": 0.0004963203928875967, "loss": 1.9659, "step": 873 }, { "epoch": 0.0853515625, "grad_norm": 0.2878938913345337, "learning_rate": 0.0004963075736492315, "loss": 1.9581, "step": 874 }, { "epoch": 0.08544921875, "grad_norm": 0.19974899291992188, "learning_rate": 0.0004962947323042002, "loss": 1.9898, "step": 875 }, { "epoch": 0.085546875, "grad_norm": 0.31860581040382385, "learning_rate": 0.0004962818688537857, "loss": 1.9737, "step": 876 }, { "epoch": 0.08564453125, "grad_norm": 0.2651256322860718, "learning_rate": 0.0004962689832992729, "loss": 1.9977, "step": 877 }, { "epoch": 0.0857421875, "grad_norm": 0.286821573972702, "learning_rate": 0.0004962560756419489, "loss": 1.9317, "step": 878 }, { "epoch": 0.08583984375, "grad_norm": 0.3837340772151947, "learning_rate": 0.0004962431458831032, "loss": 1.9419, "step": 879 }, { "epoch": 0.0859375, "grad_norm": 0.32222527265548706, "learning_rate": 0.0004962301940240272, "loss": 1.9603, "step": 880 }, { "epoch": 0.08603515625, "grad_norm": 0.33181512355804443, "learning_rate": 0.0004962172200660146, "loss": 1.9646, "step": 881 }, { "epoch": 0.0861328125, "grad_norm": 0.2813611626625061, "learning_rate": 0.0004962042240103617, "loss": 1.9706, "step": 882 }, { "epoch": 0.08623046875, "grad_norm": 0.2933270335197449, "learning_rate": 0.0004961912058583664, "loss": 1.9529, "step": 883 }, { "epoch": 0.086328125, "grad_norm": 0.31641921401023865, "learning_rate": 0.0004961781656113292, "loss": 1.9437, "step": 884 }, { "epoch": 0.08642578125, "grad_norm": 0.39271247386932373, "learning_rate": 0.0004961651032705528, "loss": 1.957, "step": 885 }, { "epoch": 0.0865234375, "grad_norm": 0.3720405697822571, "learning_rate": 0.000496152018837342, "loss": 1.9393, "step": 886 }, { "epoch": 0.08662109375, "grad_norm": 0.27104172110557556, "learning_rate": 0.0004961389123130037, "loss": 1.9547, "step": 887 }, { "epoch": 0.08671875, "grad_norm": 0.24182896316051483, "learning_rate": 0.0004961257836988472, "loss": 1.9456, "step": 888 }, { "epoch": 0.08681640625, "grad_norm": 0.22501450777053833, "learning_rate": 0.0004961126329961838, "loss": 1.9336, "step": 889 }, { "epoch": 0.0869140625, "grad_norm": 0.2969515323638916, "learning_rate": 0.0004960994602063273, "loss": 1.9415, "step": 890 }, { "epoch": 0.08701171875, "grad_norm": 0.3117569386959076, "learning_rate": 0.0004960862653305934, "loss": 1.8993, "step": 891 }, { "epoch": 0.087109375, "grad_norm": 0.32299551367759705, "learning_rate": 0.0004960730483703004, "loss": 1.9879, "step": 892 }, { "epoch": 0.08720703125, "grad_norm": 0.335592657327652, "learning_rate": 0.0004960598093267682, "loss": 1.9665, "step": 893 }, { "epoch": 0.0873046875, "grad_norm": 0.21160855889320374, "learning_rate": 0.0004960465482013197, "loss": 1.8927, "step": 894 }, { "epoch": 0.08740234375, "grad_norm": 0.30621418356895447, "learning_rate": 0.0004960332649952792, "loss": 1.9828, "step": 895 }, { "epoch": 0.0875, "grad_norm": 0.3541114330291748, "learning_rate": 0.0004960199597099737, "loss": 1.9478, "step": 896 }, { "epoch": 0.08759765625, "grad_norm": 0.31596067547798157, "learning_rate": 0.0004960066323467323, "loss": 1.9355, "step": 897 }, { "epoch": 0.0876953125, "grad_norm": 0.3170725107192993, "learning_rate": 0.0004959932829068863, "loss": 1.9501, "step": 898 }, { "epoch": 0.08779296875, "grad_norm": 0.26953962445259094, "learning_rate": 0.0004959799113917691, "loss": 1.9488, "step": 899 }, { "epoch": 0.087890625, "grad_norm": 0.23792020976543427, "learning_rate": 0.0004959665178027165, "loss": 1.941, "step": 900 }, { "epoch": 0.08798828125, "grad_norm": 0.2375611513853073, "learning_rate": 0.0004959531021410664, "loss": 1.9133, "step": 901 }, { "epoch": 0.0880859375, "grad_norm": 0.20919479429721832, "learning_rate": 0.0004959396644081588, "loss": 1.9624, "step": 902 }, { "epoch": 0.08818359375, "grad_norm": 0.22488316893577576, "learning_rate": 0.0004959262046053361, "loss": 1.9408, "step": 903 }, { "epoch": 0.08828125, "grad_norm": 0.2249651849269867, "learning_rate": 0.0004959127227339427, "loss": 2.0022, "step": 904 }, { "epoch": 0.08837890625, "grad_norm": 0.19234181940555573, "learning_rate": 0.0004958992187953256, "loss": 1.9221, "step": 905 }, { "epoch": 0.0884765625, "grad_norm": 0.21266022324562073, "learning_rate": 0.0004958856927908334, "loss": 1.9796, "step": 906 }, { "epoch": 0.08857421875, "grad_norm": 0.22636987268924713, "learning_rate": 0.0004958721447218174, "loss": 1.9799, "step": 907 }, { "epoch": 0.088671875, "grad_norm": 0.21743637323379517, "learning_rate": 0.0004958585745896308, "loss": 1.9156, "step": 908 }, { "epoch": 0.08876953125, "grad_norm": 0.316840797662735, "learning_rate": 0.0004958449823956294, "loss": 1.9856, "step": 909 }, { "epoch": 0.0888671875, "grad_norm": 0.2583603262901306, "learning_rate": 0.0004958313681411706, "loss": 1.9601, "step": 910 }, { "epoch": 0.08896484375, "grad_norm": 0.2872125208377838, "learning_rate": 0.0004958177318276146, "loss": 1.9526, "step": 911 }, { "epoch": 0.0890625, "grad_norm": 0.37896475195884705, "learning_rate": 0.0004958040734563235, "loss": 1.9465, "step": 912 }, { "epoch": 0.08916015625, "grad_norm": 0.3056401014328003, "learning_rate": 0.0004957903930286616, "loss": 1.9344, "step": 913 }, { "epoch": 0.0892578125, "grad_norm": 0.35092082619667053, "learning_rate": 0.0004957766905459953, "loss": 1.9294, "step": 914 }, { "epoch": 0.08935546875, "grad_norm": 0.2681847810745239, "learning_rate": 0.0004957629660096937, "loss": 1.9436, "step": 915 }, { "epoch": 0.089453125, "grad_norm": 0.25762611627578735, "learning_rate": 0.0004957492194211275, "loss": 1.931, "step": 916 }, { "epoch": 0.08955078125, "grad_norm": 0.24634388089179993, "learning_rate": 0.00049573545078167, "loss": 1.9373, "step": 917 }, { "epoch": 0.0896484375, "grad_norm": 0.27374333143234253, "learning_rate": 0.0004957216600926965, "loss": 2.046, "step": 918 }, { "epoch": 0.08974609375, "grad_norm": 0.338375061750412, "learning_rate": 0.0004957078473555845, "loss": 2.0259, "step": 919 }, { "epoch": 0.08984375, "grad_norm": 0.27678367495536804, "learning_rate": 0.0004956940125717139, "loss": 1.9947, "step": 920 }, { "epoch": 0.08994140625, "grad_norm": 0.266079306602478, "learning_rate": 0.0004956801557424666, "loss": 2.0097, "step": 921 }, { "epoch": 0.0900390625, "grad_norm": 0.28650638461112976, "learning_rate": 0.0004956662768692268, "loss": 1.9893, "step": 922 }, { "epoch": 0.09013671875, "grad_norm": 0.2717742323875427, "learning_rate": 0.0004956523759533809, "loss": 1.976, "step": 923 }, { "epoch": 0.090234375, "grad_norm": 0.3267741799354553, "learning_rate": 0.0004956384529963174, "loss": 1.9803, "step": 924 }, { "epoch": 0.09033203125, "grad_norm": 0.32973724603652954, "learning_rate": 0.0004956245079994272, "loss": 1.942, "step": 925 }, { "epoch": 0.0904296875, "grad_norm": 0.3257177770137787, "learning_rate": 0.0004956105409641031, "loss": 1.9584, "step": 926 }, { "epoch": 0.09052734375, "grad_norm": 0.31881803274154663, "learning_rate": 0.0004955965518917405, "loss": 1.9386, "step": 927 }, { "epoch": 0.090625, "grad_norm": 0.2814694046974182, "learning_rate": 0.0004955825407837365, "loss": 1.9114, "step": 928 }, { "epoch": 0.09072265625, "grad_norm": 0.24232172966003418, "learning_rate": 0.000495568507641491, "loss": 1.954, "step": 929 }, { "epoch": 0.0908203125, "grad_norm": 0.2062203288078308, "learning_rate": 0.0004955544524664056, "loss": 1.9682, "step": 930 }, { "epoch": 0.09091796875, "grad_norm": 0.2777712047100067, "learning_rate": 0.0004955403752598844, "loss": 1.983, "step": 931 }, { "epoch": 0.091015625, "grad_norm": 0.31253954768180847, "learning_rate": 0.0004955262760233335, "loss": 1.9211, "step": 932 }, { "epoch": 0.09111328125, "grad_norm": 0.292059987783432, "learning_rate": 0.0004955121547581613, "loss": 1.9745, "step": 933 }, { "epoch": 0.0912109375, "grad_norm": 0.309123694896698, "learning_rate": 0.0004954980114657783, "loss": 1.966, "step": 934 }, { "epoch": 0.09130859375, "grad_norm": 0.2765352129936218, "learning_rate": 0.0004954838461475975, "loss": 1.9535, "step": 935 }, { "epoch": 0.09140625, "grad_norm": 0.2576250731945038, "learning_rate": 0.0004954696588050338, "loss": 1.9002, "step": 936 }, { "epoch": 0.09150390625, "grad_norm": 0.3167097866535187, "learning_rate": 0.0004954554494395044, "loss": 1.9249, "step": 937 }, { "epoch": 0.0916015625, "grad_norm": 0.32505497336387634, "learning_rate": 0.0004954412180524285, "loss": 1.9669, "step": 938 }, { "epoch": 0.09169921875, "grad_norm": 0.3114219307899475, "learning_rate": 0.000495426964645228, "loss": 1.9276, "step": 939 }, { "epoch": 0.091796875, "grad_norm": 0.2786170542240143, "learning_rate": 0.0004954126892193264, "loss": 1.965, "step": 940 }, { "epoch": 0.09189453125, "grad_norm": 0.2753596901893616, "learning_rate": 0.00049539839177615, "loss": 1.9925, "step": 941 }, { "epoch": 0.0919921875, "grad_norm": 0.29401320219039917, "learning_rate": 0.0004953840723171268, "loss": 1.9772, "step": 942 }, { "epoch": 0.09208984375, "grad_norm": 0.3157658278942108, "learning_rate": 0.0004953697308436871, "loss": 2.0019, "step": 943 }, { "epoch": 0.0921875, "grad_norm": 0.29067566990852356, "learning_rate": 0.0004953553673572636, "loss": 1.9753, "step": 944 }, { "epoch": 0.09228515625, "grad_norm": 0.28693830966949463, "learning_rate": 0.0004953409818592911, "loss": 1.9436, "step": 945 }, { "epoch": 0.0923828125, "grad_norm": 0.2536534070968628, "learning_rate": 0.0004953265743512065, "loss": 1.9801, "step": 946 }, { "epoch": 0.09248046875, "grad_norm": 0.3041675090789795, "learning_rate": 0.000495312144834449, "loss": 1.9378, "step": 947 }, { "epoch": 0.092578125, "grad_norm": 0.3237321376800537, "learning_rate": 0.0004952976933104602, "loss": 1.9781, "step": 948 }, { "epoch": 0.09267578125, "grad_norm": 0.25767290592193604, "learning_rate": 0.0004952832197806835, "loss": 1.9031, "step": 949 }, { "epoch": 0.0927734375, "grad_norm": 0.25587791204452515, "learning_rate": 0.0004952687242465645, "loss": 1.9405, "step": 950 }, { "epoch": 0.09287109375, "grad_norm": 0.2901047468185425, "learning_rate": 0.0004952542067095515, "loss": 1.9567, "step": 951 }, { "epoch": 0.09296875, "grad_norm": 0.31517183780670166, "learning_rate": 0.0004952396671710945, "loss": 1.9464, "step": 952 }, { "epoch": 0.09306640625, "grad_norm": 0.30896711349487305, "learning_rate": 0.0004952251056326458, "loss": 1.9096, "step": 953 }, { "epoch": 0.0931640625, "grad_norm": 0.26281988620758057, "learning_rate": 0.0004952105220956602, "loss": 1.9616, "step": 954 }, { "epoch": 0.09326171875, "grad_norm": 0.23971204459667206, "learning_rate": 0.0004951959165615942, "loss": 1.9928, "step": 955 }, { "epoch": 0.093359375, "grad_norm": 0.22277623414993286, "learning_rate": 0.000495181289031907, "loss": 1.9652, "step": 956 }, { "epoch": 0.09345703125, "grad_norm": 0.26211845874786377, "learning_rate": 0.0004951666395080596, "loss": 1.9617, "step": 957 }, { "epoch": 0.0935546875, "grad_norm": 0.24874535202980042, "learning_rate": 0.0004951519679915155, "loss": 1.9509, "step": 958 }, { "epoch": 0.09365234375, "grad_norm": 0.21750757098197937, "learning_rate": 0.0004951372744837401, "loss": 1.966, "step": 959 }, { "epoch": 0.09375, "grad_norm": 0.2798830270767212, "learning_rate": 0.0004951225589862012, "loss": 1.9574, "step": 960 }, { "epoch": 0.09384765625, "grad_norm": 0.33166754245758057, "learning_rate": 0.0004951078215003688, "loss": 1.9672, "step": 961 }, { "epoch": 0.0939453125, "grad_norm": 0.3122573792934418, "learning_rate": 0.0004950930620277149, "loss": 1.9205, "step": 962 }, { "epoch": 0.09404296875, "grad_norm": 0.233395054936409, "learning_rate": 0.0004950782805697141, "loss": 1.9389, "step": 963 }, { "epoch": 0.094140625, "grad_norm": 0.2523380219936371, "learning_rate": 0.0004950634771278427, "loss": 1.9643, "step": 964 }, { "epoch": 0.09423828125, "grad_norm": 0.2774633765220642, "learning_rate": 0.0004950486517035796, "loss": 1.9467, "step": 965 }, { "epoch": 0.0943359375, "grad_norm": 0.2782750129699707, "learning_rate": 0.0004950338042984056, "loss": 1.9296, "step": 966 }, { "epoch": 0.09443359375, "grad_norm": 0.3408234417438507, "learning_rate": 0.0004950189349138039, "loss": 1.9705, "step": 967 }, { "epoch": 0.09453125, "grad_norm": 0.27266553044319153, "learning_rate": 0.0004950040435512598, "loss": 1.9292, "step": 968 }, { "epoch": 0.09462890625, "grad_norm": 0.2276991307735443, "learning_rate": 0.0004949891302122607, "loss": 1.9793, "step": 969 }, { "epoch": 0.0947265625, "grad_norm": 0.23914872109889984, "learning_rate": 0.0004949741948982965, "loss": 1.9632, "step": 970 }, { "epoch": 0.09482421875, "grad_norm": 0.24836085736751556, "learning_rate": 0.000494959237610859, "loss": 1.9304, "step": 971 }, { "epoch": 0.094921875, "grad_norm": 0.1932704746723175, "learning_rate": 0.0004949442583514425, "loss": 1.9483, "step": 972 }, { "epoch": 0.09501953125, "grad_norm": 0.27378395199775696, "learning_rate": 0.0004949292571215429, "loss": 1.9687, "step": 973 }, { "epoch": 0.0951171875, "grad_norm": 0.3175489008426666, "learning_rate": 0.0004949142339226591, "loss": 2.0036, "step": 974 }, { "epoch": 0.09521484375, "grad_norm": 0.34563618898391724, "learning_rate": 0.0004948991887562914, "loss": 1.9429, "step": 975 }, { "epoch": 0.0953125, "grad_norm": 0.2931719720363617, "learning_rate": 0.000494884121623943, "loss": 1.9424, "step": 976 }, { "epoch": 0.09541015625, "grad_norm": 0.29774370789527893, "learning_rate": 0.0004948690325271188, "loss": 1.9297, "step": 977 }, { "epoch": 0.0955078125, "grad_norm": 0.31513288617134094, "learning_rate": 0.0004948539214673261, "loss": 1.9917, "step": 978 }, { "epoch": 0.09560546875, "grad_norm": 0.2150924950838089, "learning_rate": 0.0004948387884460744, "loss": 1.9529, "step": 979 }, { "epoch": 0.095703125, "grad_norm": 0.27547362446784973, "learning_rate": 0.0004948236334648753, "loss": 1.9885, "step": 980 }, { "epoch": 0.09580078125, "grad_norm": 0.3271898925304413, "learning_rate": 0.0004948084565252427, "loss": 1.9232, "step": 981 }, { "epoch": 0.0958984375, "grad_norm": 0.2923772931098938, "learning_rate": 0.0004947932576286928, "loss": 1.9429, "step": 982 }, { "epoch": 0.09599609375, "grad_norm": 0.2936458885669708, "learning_rate": 0.0004947780367767434, "loss": 1.9773, "step": 983 }, { "epoch": 0.09609375, "grad_norm": 0.29706791043281555, "learning_rate": 0.0004947627939709153, "loss": 1.9284, "step": 984 }, { "epoch": 0.09619140625, "grad_norm": 0.22721995413303375, "learning_rate": 0.000494747529212731, "loss": 1.9281, "step": 985 }, { "epoch": 0.0962890625, "grad_norm": 0.27041006088256836, "learning_rate": 0.0004947322425037152, "loss": 1.9735, "step": 986 }, { "epoch": 0.09638671875, "grad_norm": 0.26037418842315674, "learning_rate": 0.0004947169338453952, "loss": 1.9426, "step": 987 }, { "epoch": 0.096484375, "grad_norm": 0.23383264243602753, "learning_rate": 0.0004947016032392999, "loss": 1.923, "step": 988 }, { "epoch": 0.09658203125, "grad_norm": 0.29238423705101013, "learning_rate": 0.0004946862506869609, "loss": 1.9803, "step": 989 }, { "epoch": 0.0966796875, "grad_norm": 0.223761647939682, "learning_rate": 0.0004946708761899116, "loss": 1.9377, "step": 990 }, { "epoch": 0.09677734375, "grad_norm": 0.2669995427131653, "learning_rate": 0.0004946554797496881, "loss": 1.9694, "step": 991 }, { "epoch": 0.096875, "grad_norm": 0.2847992181777954, "learning_rate": 0.0004946400613678279, "loss": 1.9501, "step": 992 }, { "epoch": 0.09697265625, "grad_norm": 0.25099360942840576, "learning_rate": 0.0004946246210458715, "loss": 1.9707, "step": 993 }, { "epoch": 0.0970703125, "grad_norm": 0.23306165635585785, "learning_rate": 0.0004946091587853611, "loss": 1.9174, "step": 994 }, { "epoch": 0.09716796875, "grad_norm": 0.2907635569572449, "learning_rate": 0.0004945936745878414, "loss": 1.9521, "step": 995 }, { "epoch": 0.097265625, "grad_norm": 0.3234463334083557, "learning_rate": 0.0004945781684548591, "loss": 1.9667, "step": 996 }, { "epoch": 0.09736328125, "grad_norm": 0.36099013686180115, "learning_rate": 0.0004945626403879629, "loss": 1.9257, "step": 997 }, { "epoch": 0.0974609375, "grad_norm": 0.3735397160053253, "learning_rate": 0.0004945470903887041, "loss": 1.9512, "step": 998 }, { "epoch": 0.09755859375, "grad_norm": 0.3653644025325775, "learning_rate": 0.0004945315184586359, "loss": 1.9664, "step": 999 }, { "epoch": 0.09765625, "grad_norm": 0.2931279242038727, "learning_rate": 0.000494515924599314, "loss": 1.9209, "step": 1000 }, { "epoch": 0.09775390625, "grad_norm": 0.31677475571632385, "learning_rate": 0.000494500308812296, "loss": 1.9989, "step": 1001 }, { "epoch": 0.0978515625, "grad_norm": 0.2640211582183838, "learning_rate": 0.0004944846710991416, "loss": 1.8792, "step": 1002 }, { "epoch": 0.09794921875, "grad_norm": 0.3207566738128662, "learning_rate": 0.0004944690114614132, "loss": 1.955, "step": 1003 }, { "epoch": 0.098046875, "grad_norm": 0.29306915402412415, "learning_rate": 0.0004944533299006749, "loss": 1.9264, "step": 1004 }, { "epoch": 0.09814453125, "grad_norm": 0.2783614993095398, "learning_rate": 0.000494437626418493, "loss": 1.9341, "step": 1005 }, { "epoch": 0.0982421875, "grad_norm": 0.346975713968277, "learning_rate": 0.0004944219010164361, "loss": 1.9599, "step": 1006 }, { "epoch": 0.09833984375, "grad_norm": 0.2799683213233948, "learning_rate": 0.0004944061536960755, "loss": 1.9201, "step": 1007 }, { "epoch": 0.0984375, "grad_norm": 0.27122223377227783, "learning_rate": 0.0004943903844589837, "loss": 1.9685, "step": 1008 }, { "epoch": 0.09853515625, "grad_norm": 0.34742453694343567, "learning_rate": 0.0004943745933067362, "loss": 1.9533, "step": 1009 }, { "epoch": 0.0986328125, "grad_norm": 0.2686299681663513, "learning_rate": 0.0004943587802409103, "loss": 1.9149, "step": 1010 }, { "epoch": 0.09873046875, "grad_norm": 0.3223266303539276, "learning_rate": 0.0004943429452630856, "loss": 1.8887, "step": 1011 }, { "epoch": 0.098828125, "grad_norm": 0.2767395079135895, "learning_rate": 0.0004943270883748439, "loss": 1.9756, "step": 1012 }, { "epoch": 0.09892578125, "grad_norm": 0.2761443257331848, "learning_rate": 0.0004943112095777691, "loss": 1.981, "step": 1013 }, { "epoch": 0.0990234375, "grad_norm": 0.22587864100933075, "learning_rate": 0.0004942953088734474, "loss": 1.939, "step": 1014 }, { "epoch": 0.09912109375, "grad_norm": 0.2596238851547241, "learning_rate": 0.0004942793862634671, "loss": 1.9546, "step": 1015 }, { "epoch": 0.09921875, "grad_norm": 0.3035464584827423, "learning_rate": 0.0004942634417494188, "loss": 2.0001, "step": 1016 }, { "epoch": 0.09931640625, "grad_norm": 0.32868078351020813, "learning_rate": 0.0004942474753328952, "loss": 1.9248, "step": 1017 }, { "epoch": 0.0994140625, "grad_norm": 0.34833693504333496, "learning_rate": 0.000494231487015491, "loss": 1.9148, "step": 1018 }, { "epoch": 0.09951171875, "grad_norm": 0.2557753920555115, "learning_rate": 0.0004942154767988037, "loss": 1.9585, "step": 1019 }, { "epoch": 0.099609375, "grad_norm": 0.25750526785850525, "learning_rate": 0.0004941994446844321, "loss": 2.0257, "step": 1020 }, { "epoch": 0.09970703125, "grad_norm": 0.22088824212551117, "learning_rate": 0.000494183390673978, "loss": 1.9373, "step": 1021 }, { "epoch": 0.0998046875, "grad_norm": 0.23292425274848938, "learning_rate": 0.000494167314769045, "loss": 1.9462, "step": 1022 }, { "epoch": 0.09990234375, "grad_norm": 0.24202801287174225, "learning_rate": 0.0004941512169712389, "loss": 2.0237, "step": 1023 }, { "epoch": 0.1, "grad_norm": 0.28507405519485474, "learning_rate": 0.0004941350972821678, "loss": 1.9373, "step": 1024 }, { "epoch": 0.10009765625, "grad_norm": 0.22370004653930664, "learning_rate": 0.0004941189557034417, "loss": 1.9332, "step": 1025 }, { "epoch": 0.1001953125, "grad_norm": 0.2707929313182831, "learning_rate": 0.0004941027922366732, "loss": 1.9908, "step": 1026 }, { "epoch": 0.10029296875, "grad_norm": 0.29618075489997864, "learning_rate": 0.0004940866068834769, "loss": 1.9317, "step": 1027 }, { "epoch": 0.100390625, "grad_norm": 0.27780136466026306, "learning_rate": 0.0004940703996454695, "loss": 1.9225, "step": 1028 }, { "epoch": 0.10048828125, "grad_norm": 0.33955708146095276, "learning_rate": 0.0004940541705242698, "loss": 1.9232, "step": 1029 }, { "epoch": 0.1005859375, "grad_norm": 0.3132137954235077, "learning_rate": 0.0004940379195214992, "loss": 1.976, "step": 1030 }, { "epoch": 0.10068359375, "grad_norm": 0.2742595076560974, "learning_rate": 0.000494021646638781, "loss": 1.9138, "step": 1031 }, { "epoch": 0.10078125, "grad_norm": 0.2921305298805237, "learning_rate": 0.0004940053518777406, "loss": 1.9178, "step": 1032 }, { "epoch": 0.10087890625, "grad_norm": 0.2586468458175659, "learning_rate": 0.0004939890352400058, "loss": 1.9385, "step": 1033 }, { "epoch": 0.1009765625, "grad_norm": 0.30906325578689575, "learning_rate": 0.0004939726967272064, "loss": 1.9368, "step": 1034 }, { "epoch": 0.10107421875, "grad_norm": 0.3483060300350189, "learning_rate": 0.0004939563363409745, "loss": 1.9922, "step": 1035 }, { "epoch": 0.101171875, "grad_norm": 0.3559792637825012, "learning_rate": 0.0004939399540829445, "loss": 1.9564, "step": 1036 }, { "epoch": 0.10126953125, "grad_norm": 0.33506283164024353, "learning_rate": 0.0004939235499547526, "loss": 1.9784, "step": 1037 }, { "epoch": 0.1013671875, "grad_norm": 0.23956939578056335, "learning_rate": 0.0004939071239580377, "loss": 1.9755, "step": 1038 }, { "epoch": 0.10146484375, "grad_norm": 0.33348920941352844, "learning_rate": 0.0004938906760944403, "loss": 1.9128, "step": 1039 }, { "epoch": 0.1015625, "grad_norm": 0.2843862771987915, "learning_rate": 0.0004938742063656036, "loss": 1.9297, "step": 1040 }, { "epoch": 0.10166015625, "grad_norm": 0.2671546936035156, "learning_rate": 0.0004938577147731727, "loss": 1.9391, "step": 1041 }, { "epoch": 0.1017578125, "grad_norm": 0.2899993360042572, "learning_rate": 0.000493841201318795, "loss": 1.9198, "step": 1042 }, { "epoch": 0.10185546875, "grad_norm": 0.20722518861293793, "learning_rate": 0.0004938246660041201, "loss": 1.9295, "step": 1043 }, { "epoch": 0.101953125, "grad_norm": 0.22667360305786133, "learning_rate": 0.0004938081088307997, "loss": 1.9371, "step": 1044 }, { "epoch": 0.10205078125, "grad_norm": 0.275579035282135, "learning_rate": 0.0004937915298004877, "loss": 1.9383, "step": 1045 }, { "epoch": 0.1021484375, "grad_norm": 0.2900732159614563, "learning_rate": 0.0004937749289148401, "loss": 1.9805, "step": 1046 }, { "epoch": 0.10224609375, "grad_norm": 0.26479572057724, "learning_rate": 0.0004937583061755153, "loss": 1.911, "step": 1047 }, { "epoch": 0.10234375, "grad_norm": 0.2967575490474701, "learning_rate": 0.0004937416615841739, "loss": 1.9654, "step": 1048 }, { "epoch": 0.10244140625, "grad_norm": 0.2840502858161926, "learning_rate": 0.0004937249951424782, "loss": 1.9332, "step": 1049 }, { "epoch": 0.1025390625, "grad_norm": 0.2664666175842285, "learning_rate": 0.0004937083068520931, "loss": 1.973, "step": 1050 }, { "epoch": 0.10263671875, "grad_norm": 0.2535867989063263, "learning_rate": 0.000493691596714686, "loss": 1.9421, "step": 1051 }, { "epoch": 0.102734375, "grad_norm": 0.3005189895629883, "learning_rate": 0.0004936748647319256, "loss": 1.9434, "step": 1052 }, { "epoch": 0.10283203125, "grad_norm": 0.2663765549659729, "learning_rate": 0.0004936581109054837, "loss": 1.9947, "step": 1053 }, { "epoch": 0.1029296875, "grad_norm": 0.29048940539360046, "learning_rate": 0.0004936413352370335, "loss": 1.9737, "step": 1054 }, { "epoch": 0.10302734375, "grad_norm": 0.2594558596611023, "learning_rate": 0.000493624537728251, "loss": 1.9601, "step": 1055 }, { "epoch": 0.103125, "grad_norm": 0.27011555433273315, "learning_rate": 0.000493607718380814, "loss": 1.9459, "step": 1056 }, { "epoch": 0.10322265625, "grad_norm": 0.3043757677078247, "learning_rate": 0.0004935908771964026, "loss": 1.9494, "step": 1057 }, { "epoch": 0.1033203125, "grad_norm": 0.26345208287239075, "learning_rate": 0.0004935740141766991, "loss": 1.896, "step": 1058 }, { "epoch": 0.10341796875, "grad_norm": 0.20400577783584595, "learning_rate": 0.000493557129323388, "loss": 1.9276, "step": 1059 }, { "epoch": 0.103515625, "grad_norm": 0.2836177349090576, "learning_rate": 0.0004935402226381558, "loss": 1.921, "step": 1060 }, { "epoch": 0.10361328125, "grad_norm": 0.2904714047908783, "learning_rate": 0.0004935232941226916, "loss": 1.9631, "step": 1061 }, { "epoch": 0.1037109375, "grad_norm": 0.25980454683303833, "learning_rate": 0.0004935063437786863, "loss": 1.9757, "step": 1062 }, { "epoch": 0.10380859375, "grad_norm": 0.27394455671310425, "learning_rate": 0.0004934893716078329, "loss": 1.9647, "step": 1063 }, { "epoch": 0.10390625, "grad_norm": 0.23419839143753052, "learning_rate": 0.000493472377611827, "loss": 1.962, "step": 1064 }, { "epoch": 0.10400390625, "grad_norm": 0.2323913723230362, "learning_rate": 0.0004934553617923661, "loss": 1.9759, "step": 1065 }, { "epoch": 0.1041015625, "grad_norm": 0.2700064778327942, "learning_rate": 0.0004934383241511498, "loss": 1.9206, "step": 1066 }, { "epoch": 0.10419921875, "grad_norm": 0.3530958294868469, "learning_rate": 0.0004934212646898802, "loss": 1.979, "step": 1067 }, { "epoch": 0.104296875, "grad_norm": 0.30405712127685547, "learning_rate": 0.0004934041834102612, "loss": 1.951, "step": 1068 }, { "epoch": 0.10439453125, "grad_norm": 0.2923583984375, "learning_rate": 0.0004933870803139992, "loss": 2.0158, "step": 1069 }, { "epoch": 0.1044921875, "grad_norm": 0.23782993853092194, "learning_rate": 0.0004933699554028027, "loss": 1.896, "step": 1070 }, { "epoch": 0.10458984375, "grad_norm": 0.3045034110546112, "learning_rate": 0.0004933528086783821, "loss": 1.9555, "step": 1071 }, { "epoch": 0.1046875, "grad_norm": 0.40851131081581116, "learning_rate": 0.0004933356401424505, "loss": 1.9108, "step": 1072 }, { "epoch": 0.10478515625, "grad_norm": 0.39787429571151733, "learning_rate": 0.0004933184497967226, "loss": 1.9472, "step": 1073 }, { "epoch": 0.1048828125, "grad_norm": 0.24665354192256927, "learning_rate": 0.0004933012376429158, "loss": 1.9617, "step": 1074 }, { "epoch": 0.10498046875, "grad_norm": 0.28025394678115845, "learning_rate": 0.0004932840036827492, "loss": 1.898, "step": 1075 }, { "epoch": 0.105078125, "grad_norm": 0.3374732434749603, "learning_rate": 0.0004932667479179445, "loss": 1.9323, "step": 1076 }, { "epoch": 0.10517578125, "grad_norm": 0.2941461503505707, "learning_rate": 0.0004932494703502254, "loss": 1.964, "step": 1077 }, { "epoch": 0.1052734375, "grad_norm": 0.2827446758747101, "learning_rate": 0.0004932321709813177, "loss": 1.9554, "step": 1078 }, { "epoch": 0.10537109375, "grad_norm": 0.32132452726364136, "learning_rate": 0.0004932148498129495, "loss": 1.9333, "step": 1079 }, { "epoch": 0.10546875, "grad_norm": 0.2732277810573578, "learning_rate": 0.000493197506846851, "loss": 1.932, "step": 1080 }, { "epoch": 0.10556640625, "grad_norm": 0.2594139575958252, "learning_rate": 0.0004931801420847545, "loss": 1.9447, "step": 1081 }, { "epoch": 0.1056640625, "grad_norm": 0.24399201571941376, "learning_rate": 0.0004931627555283948, "loss": 1.9163, "step": 1082 }, { "epoch": 0.10576171875, "grad_norm": 0.22696708142757416, "learning_rate": 0.0004931453471795087, "loss": 1.9734, "step": 1083 }, { "epoch": 0.105859375, "grad_norm": 0.25717827677726746, "learning_rate": 0.0004931279170398349, "loss": 1.9767, "step": 1084 }, { "epoch": 0.10595703125, "grad_norm": 0.23899690806865692, "learning_rate": 0.0004931104651111146, "loss": 1.9519, "step": 1085 }, { "epoch": 0.1060546875, "grad_norm": 0.27689501643180847, "learning_rate": 0.0004930929913950911, "loss": 1.9582, "step": 1086 }, { "epoch": 0.10615234375, "grad_norm": 0.23959669470787048, "learning_rate": 0.00049307549589351, "loss": 1.9369, "step": 1087 }, { "epoch": 0.10625, "grad_norm": 0.25673651695251465, "learning_rate": 0.0004930579786081188, "loss": 1.9711, "step": 1088 }, { "epoch": 0.10634765625, "grad_norm": 0.30868396162986755, "learning_rate": 0.0004930404395406674, "loss": 1.9539, "step": 1089 }, { "epoch": 0.1064453125, "grad_norm": 0.2462042272090912, "learning_rate": 0.0004930228786929076, "loss": 1.9061, "step": 1090 }, { "epoch": 0.10654296875, "grad_norm": 0.24275164306163788, "learning_rate": 0.0004930052960665938, "loss": 1.8992, "step": 1091 }, { "epoch": 0.106640625, "grad_norm": 0.27242550253868103, "learning_rate": 0.0004929876916634823, "loss": 1.8997, "step": 1092 }, { "epoch": 0.10673828125, "grad_norm": 0.2655372619628906, "learning_rate": 0.0004929700654853316, "loss": 1.8905, "step": 1093 }, { "epoch": 0.1068359375, "grad_norm": 0.27779421210289, "learning_rate": 0.0004929524175339024, "loss": 1.9485, "step": 1094 }, { "epoch": 0.10693359375, "grad_norm": 0.3308311402797699, "learning_rate": 0.0004929347478109576, "loss": 1.896, "step": 1095 }, { "epoch": 0.10703125, "grad_norm": 0.2988120913505554, "learning_rate": 0.0004929170563182621, "loss": 1.9508, "step": 1096 }, { "epoch": 0.10712890625, "grad_norm": 0.24604608118534088, "learning_rate": 0.0004928993430575834, "loss": 1.9912, "step": 1097 }, { "epoch": 0.1072265625, "grad_norm": 0.31434887647628784, "learning_rate": 0.0004928816080306906, "loss": 1.9592, "step": 1098 }, { "epoch": 0.10732421875, "grad_norm": 0.21227046847343445, "learning_rate": 0.0004928638512393555, "loss": 1.9523, "step": 1099 }, { "epoch": 0.107421875, "grad_norm": 0.26643091440200806, "learning_rate": 0.0004928460726853518, "loss": 1.9506, "step": 1100 }, { "epoch": 0.10751953125, "grad_norm": 0.2661186754703522, "learning_rate": 0.0004928282723704554, "loss": 1.9425, "step": 1101 }, { "epoch": 0.1076171875, "grad_norm": 0.25391289591789246, "learning_rate": 0.0004928104502964442, "loss": 1.9551, "step": 1102 }, { "epoch": 0.10771484375, "grad_norm": 0.276909202337265, "learning_rate": 0.0004927926064650988, "loss": 1.9416, "step": 1103 }, { "epoch": 0.1078125, "grad_norm": 0.25135815143585205, "learning_rate": 0.0004927747408782014, "loss": 1.9197, "step": 1104 }, { "epoch": 0.10791015625, "grad_norm": 0.2529551684856415, "learning_rate": 0.0004927568535375368, "loss": 1.9561, "step": 1105 }, { "epoch": 0.1080078125, "grad_norm": 0.2697699964046478, "learning_rate": 0.0004927389444448915, "loss": 1.9369, "step": 1106 }, { "epoch": 0.10810546875, "grad_norm": 0.2805427014827728, "learning_rate": 0.0004927210136020548, "loss": 1.9535, "step": 1107 }, { "epoch": 0.108203125, "grad_norm": 0.33747565746307373, "learning_rate": 0.0004927030610108176, "loss": 1.9582, "step": 1108 }, { "epoch": 0.10830078125, "grad_norm": 0.30215132236480713, "learning_rate": 0.0004926850866729734, "loss": 1.9245, "step": 1109 }, { "epoch": 0.1083984375, "grad_norm": 0.3483729064464569, "learning_rate": 0.0004926670905903174, "loss": 1.9959, "step": 1110 }, { "epoch": 0.10849609375, "grad_norm": 0.23263247311115265, "learning_rate": 0.0004926490727646476, "loss": 1.9224, "step": 1111 }, { "epoch": 0.10859375, "grad_norm": 0.3023853898048401, "learning_rate": 0.0004926310331977633, "loss": 1.9493, "step": 1112 }, { "epoch": 0.10869140625, "grad_norm": 0.3592170774936676, "learning_rate": 0.0004926129718914672, "loss": 1.9082, "step": 1113 }, { "epoch": 0.1087890625, "grad_norm": 0.29466864466667175, "learning_rate": 0.0004925948888475628, "loss": 2.016, "step": 1114 }, { "epoch": 0.10888671875, "grad_norm": 0.3136419355869293, "learning_rate": 0.0004925767840678569, "loss": 1.9592, "step": 1115 }, { "epoch": 0.108984375, "grad_norm": 0.21686695516109467, "learning_rate": 0.0004925586575541578, "loss": 1.9081, "step": 1116 }, { "epoch": 0.10908203125, "grad_norm": 0.2520851790904999, "learning_rate": 0.0004925405093082761, "loss": 1.9418, "step": 1117 }, { "epoch": 0.1091796875, "grad_norm": 0.28278669714927673, "learning_rate": 0.0004925223393320247, "loss": 1.9815, "step": 1118 }, { "epoch": 0.10927734375, "grad_norm": 0.2773968577384949, "learning_rate": 0.0004925041476272189, "loss": 1.8888, "step": 1119 }, { "epoch": 0.109375, "grad_norm": 0.24399970471858978, "learning_rate": 0.0004924859341956755, "loss": 1.9548, "step": 1120 }, { "epoch": 0.10947265625, "grad_norm": 0.26073598861694336, "learning_rate": 0.0004924676990392141, "loss": 1.9112, "step": 1121 }, { "epoch": 0.1095703125, "grad_norm": 0.24541547894477844, "learning_rate": 0.0004924494421596562, "loss": 1.914, "step": 1122 }, { "epoch": 0.10966796875, "grad_norm": 0.21719145774841309, "learning_rate": 0.0004924311635588254, "loss": 1.9204, "step": 1123 }, { "epoch": 0.109765625, "grad_norm": 0.295460045337677, "learning_rate": 0.0004924128632385476, "loss": 1.9545, "step": 1124 }, { "epoch": 0.10986328125, "grad_norm": 0.3058072626590729, "learning_rate": 0.0004923945412006509, "loss": 1.9048, "step": 1125 }, { "epoch": 0.1099609375, "grad_norm": 0.19512003660202026, "learning_rate": 0.0004923761974469655, "loss": 1.9581, "step": 1126 }, { "epoch": 0.11005859375, "grad_norm": 0.21684713661670685, "learning_rate": 0.0004923578319793237, "loss": 1.8765, "step": 1127 }, { "epoch": 0.11015625, "grad_norm": 0.2229132503271103, "learning_rate": 0.0004923394447995601, "loss": 1.8599, "step": 1128 }, { "epoch": 0.11025390625, "grad_norm": 0.20479273796081543, "learning_rate": 0.0004923210359095115, "loss": 1.938, "step": 1129 }, { "epoch": 0.1103515625, "grad_norm": 0.2477833479642868, "learning_rate": 0.0004923026053110167, "loss": 1.9655, "step": 1130 }, { "epoch": 0.11044921875, "grad_norm": 0.23927846550941467, "learning_rate": 0.0004922841530059167, "loss": 1.9461, "step": 1131 }, { "epoch": 0.110546875, "grad_norm": 0.24189342558383942, "learning_rate": 0.0004922656789960547, "loss": 1.9338, "step": 1132 }, { "epoch": 0.11064453125, "grad_norm": 0.20208942890167236, "learning_rate": 0.0004922471832832763, "loss": 1.973, "step": 1133 }, { "epoch": 0.1107421875, "grad_norm": 0.20967181026935577, "learning_rate": 0.0004922286658694289, "loss": 1.9391, "step": 1134 }, { "epoch": 0.11083984375, "grad_norm": 0.2538667917251587, "learning_rate": 0.0004922101267563622, "loss": 1.9159, "step": 1135 }, { "epoch": 0.1109375, "grad_norm": 0.2945930063724518, "learning_rate": 0.0004921915659459281, "loss": 1.915, "step": 1136 }, { "epoch": 0.11103515625, "grad_norm": 0.3631344437599182, "learning_rate": 0.0004921729834399808, "loss": 1.9926, "step": 1137 }, { "epoch": 0.1111328125, "grad_norm": 0.32187792658805847, "learning_rate": 0.0004921543792403765, "loss": 1.9137, "step": 1138 }, { "epoch": 0.11123046875, "grad_norm": 0.276929646730423, "learning_rate": 0.0004921357533489734, "loss": 1.9427, "step": 1139 }, { "epoch": 0.111328125, "grad_norm": 0.20455695688724518, "learning_rate": 0.0004921171057676323, "loss": 1.9118, "step": 1140 }, { "epoch": 0.11142578125, "grad_norm": 0.2824450731277466, "learning_rate": 0.0004920984364982157, "loss": 1.9471, "step": 1141 }, { "epoch": 0.1115234375, "grad_norm": 0.2931687831878662, "learning_rate": 0.0004920797455425887, "loss": 1.9546, "step": 1142 }, { "epoch": 0.11162109375, "grad_norm": 0.31234028935432434, "learning_rate": 0.0004920610329026181, "loss": 1.9466, "step": 1143 }, { "epoch": 0.11171875, "grad_norm": 0.39190223813056946, "learning_rate": 0.0004920422985801735, "loss": 1.9394, "step": 1144 }, { "epoch": 0.11181640625, "grad_norm": 0.26808232069015503, "learning_rate": 0.0004920235425771261, "loss": 1.9278, "step": 1145 }, { "epoch": 0.1119140625, "grad_norm": 0.22590310871601105, "learning_rate": 0.0004920047648953493, "loss": 1.9467, "step": 1146 }, { "epoch": 0.11201171875, "grad_norm": 0.2531168758869171, "learning_rate": 0.0004919859655367192, "loss": 1.958, "step": 1147 }, { "epoch": 0.112109375, "grad_norm": 0.207502081990242, "learning_rate": 0.0004919671445031132, "loss": 2.0148, "step": 1148 }, { "epoch": 0.11220703125, "grad_norm": 0.23596623539924622, "learning_rate": 0.0004919483017964118, "loss": 1.9617, "step": 1149 }, { "epoch": 0.1123046875, "grad_norm": 0.25810694694519043, "learning_rate": 0.000491929437418497, "loss": 2.0262, "step": 1150 }, { "epoch": 0.11240234375, "grad_norm": 0.27257680892944336, "learning_rate": 0.0004919105513712533, "loss": 1.9285, "step": 1151 }, { "epoch": 0.1125, "grad_norm": 0.2861190736293793, "learning_rate": 0.000491891643656567, "loss": 1.965, "step": 1152 }, { "epoch": 0.11259765625, "grad_norm": 0.2790696620941162, "learning_rate": 0.0004918727142763271, "loss": 1.9572, "step": 1153 }, { "epoch": 0.1126953125, "grad_norm": 0.35226336121559143, "learning_rate": 0.0004918537632324243, "loss": 1.9338, "step": 1154 }, { "epoch": 0.11279296875, "grad_norm": 0.2735171616077423, "learning_rate": 0.0004918347905267517, "loss": 1.9304, "step": 1155 }, { "epoch": 0.112890625, "grad_norm": 0.2629276514053345, "learning_rate": 0.0004918157961612046, "loss": 1.9383, "step": 1156 }, { "epoch": 0.11298828125, "grad_norm": 0.3583827614784241, "learning_rate": 0.0004917967801376802, "loss": 1.9448, "step": 1157 }, { "epoch": 0.1130859375, "grad_norm": 0.2546387016773224, "learning_rate": 0.0004917777424580781, "loss": 1.9506, "step": 1158 }, { "epoch": 0.11318359375, "grad_norm": 0.35346466302871704, "learning_rate": 0.0004917586831243001, "loss": 1.9207, "step": 1159 }, { "epoch": 0.11328125, "grad_norm": 0.2943922281265259, "learning_rate": 0.0004917396021382499, "loss": 1.961, "step": 1160 }, { "epoch": 0.11337890625, "grad_norm": 0.24972833693027496, "learning_rate": 0.0004917204995018336, "loss": 1.9271, "step": 1161 }, { "epoch": 0.1134765625, "grad_norm": 0.31200850009918213, "learning_rate": 0.0004917013752169594, "loss": 1.9444, "step": 1162 }, { "epoch": 0.11357421875, "grad_norm": 0.3580279052257538, "learning_rate": 0.0004916822292855375, "loss": 1.9132, "step": 1163 }, { "epoch": 0.113671875, "grad_norm": 0.31269049644470215, "learning_rate": 0.0004916630617094807, "loss": 1.9283, "step": 1164 }, { "epoch": 0.11376953125, "grad_norm": 0.25159773230552673, "learning_rate": 0.0004916438724907033, "loss": 1.9018, "step": 1165 }, { "epoch": 0.1138671875, "grad_norm": 0.22316916286945343, "learning_rate": 0.0004916246616311225, "loss": 1.8821, "step": 1166 }, { "epoch": 0.11396484375, "grad_norm": 0.31150946021080017, "learning_rate": 0.0004916054291326571, "loss": 1.9565, "step": 1167 }, { "epoch": 0.1140625, "grad_norm": 0.29112792015075684, "learning_rate": 0.0004915861749972281, "loss": 1.9582, "step": 1168 }, { "epoch": 0.11416015625, "grad_norm": 0.26860764622688293, "learning_rate": 0.0004915668992267593, "loss": 1.9299, "step": 1169 }, { "epoch": 0.1142578125, "grad_norm": 0.24470628798007965, "learning_rate": 0.0004915476018231757, "loss": 1.9131, "step": 1170 }, { "epoch": 0.11435546875, "grad_norm": 0.26279789209365845, "learning_rate": 0.0004915282827884051, "loss": 1.9055, "step": 1171 }, { "epoch": 0.114453125, "grad_norm": 0.21272587776184082, "learning_rate": 0.0004915089421243773, "loss": 1.9694, "step": 1172 }, { "epoch": 0.11455078125, "grad_norm": 0.21711157262325287, "learning_rate": 0.0004914895798330244, "loss": 1.9644, "step": 1173 }, { "epoch": 0.1146484375, "grad_norm": 0.21599583327770233, "learning_rate": 0.0004914701959162803, "loss": 1.9528, "step": 1174 }, { "epoch": 0.11474609375, "grad_norm": 0.22919407486915588, "learning_rate": 0.0004914507903760814, "loss": 1.9353, "step": 1175 }, { "epoch": 0.11484375, "grad_norm": 0.22974969446659088, "learning_rate": 0.0004914313632143661, "loss": 1.9148, "step": 1176 }, { "epoch": 0.11494140625, "grad_norm": 0.2370641529560089, "learning_rate": 0.000491411914433075, "loss": 1.9056, "step": 1177 }, { "epoch": 0.1150390625, "grad_norm": 0.27402329444885254, "learning_rate": 0.000491392444034151, "loss": 1.9483, "step": 1178 }, { "epoch": 0.11513671875, "grad_norm": 0.2953847050666809, "learning_rate": 0.0004913729520195388, "loss": 1.9553, "step": 1179 }, { "epoch": 0.115234375, "grad_norm": 0.26085543632507324, "learning_rate": 0.0004913534383911857, "loss": 1.9307, "step": 1180 }, { "epoch": 0.11533203125, "grad_norm": 0.2782340943813324, "learning_rate": 0.0004913339031510408, "loss": 1.9733, "step": 1181 }, { "epoch": 0.1154296875, "grad_norm": 0.2568228244781494, "learning_rate": 0.0004913143463010554, "loss": 1.9402, "step": 1182 }, { "epoch": 0.11552734375, "grad_norm": 0.2584892809391022, "learning_rate": 0.0004912947678431833, "loss": 1.9254, "step": 1183 }, { "epoch": 0.115625, "grad_norm": 0.2621140778064728, "learning_rate": 0.00049127516777938, "loss": 1.9323, "step": 1184 }, { "epoch": 0.11572265625, "grad_norm": 0.28239843249320984, "learning_rate": 0.0004912555461116035, "loss": 1.9447, "step": 1185 }, { "epoch": 0.1158203125, "grad_norm": 0.3147210478782654, "learning_rate": 0.0004912359028418138, "loss": 1.8867, "step": 1186 }, { "epoch": 0.11591796875, "grad_norm": 0.25541990995407104, "learning_rate": 0.0004912162379719731, "loss": 1.871, "step": 1187 }, { "epoch": 0.116015625, "grad_norm": 0.24328139424324036, "learning_rate": 0.0004911965515040457, "loss": 1.9316, "step": 1188 }, { "epoch": 0.11611328125, "grad_norm": 0.20230042934417725, "learning_rate": 0.0004911768434399981, "loss": 1.9298, "step": 1189 }, { "epoch": 0.1162109375, "grad_norm": 0.32389941811561584, "learning_rate": 0.0004911571137817991, "loss": 1.8941, "step": 1190 }, { "epoch": 0.11630859375, "grad_norm": 0.3173327147960663, "learning_rate": 0.0004911373625314194, "loss": 1.9222, "step": 1191 }, { "epoch": 0.11640625, "grad_norm": 0.32637453079223633, "learning_rate": 0.0004911175896908319, "loss": 1.9273, "step": 1192 }, { "epoch": 0.11650390625, "grad_norm": 0.307746022939682, "learning_rate": 0.0004910977952620119, "loss": 1.9352, "step": 1193 }, { "epoch": 0.1166015625, "grad_norm": 0.21165402233600616, "learning_rate": 0.0004910779792469365, "loss": 1.8867, "step": 1194 }, { "epoch": 0.11669921875, "grad_norm": 0.3059009909629822, "learning_rate": 0.0004910581416475854, "loss": 1.9187, "step": 1195 }, { "epoch": 0.116796875, "grad_norm": 0.2690452039241791, "learning_rate": 0.00049103828246594, "loss": 1.9723, "step": 1196 }, { "epoch": 0.11689453125, "grad_norm": 0.25487321615219116, "learning_rate": 0.0004910184017039841, "loss": 1.9525, "step": 1197 }, { "epoch": 0.1169921875, "grad_norm": 0.33227404952049255, "learning_rate": 0.0004909984993637036, "loss": 1.9062, "step": 1198 }, { "epoch": 0.11708984375, "grad_norm": 0.32993388175964355, "learning_rate": 0.0004909785754470866, "loss": 1.9327, "step": 1199 }, { "epoch": 0.1171875, "grad_norm": 0.28574901819229126, "learning_rate": 0.0004909586299561233, "loss": 1.9481, "step": 1200 }, { "epoch": 0.11728515625, "grad_norm": 0.27981141209602356, "learning_rate": 0.0004909386628928061, "loss": 1.9284, "step": 1201 }, { "epoch": 0.1173828125, "grad_norm": 0.26131895184516907, "learning_rate": 0.0004909186742591296, "loss": 1.9276, "step": 1202 }, { "epoch": 0.11748046875, "grad_norm": 0.3690519630908966, "learning_rate": 0.0004908986640570903, "loss": 1.8733, "step": 1203 }, { "epoch": 0.117578125, "grad_norm": 0.38982152938842773, "learning_rate": 0.000490878632288687, "loss": 1.9204, "step": 1204 }, { "epoch": 0.11767578125, "grad_norm": 0.2588847875595093, "learning_rate": 0.000490858578955921, "loss": 1.8941, "step": 1205 }, { "epoch": 0.1177734375, "grad_norm": 0.3161073625087738, "learning_rate": 0.0004908385040607954, "loss": 1.9077, "step": 1206 }, { "epoch": 0.11787109375, "grad_norm": 0.26444011926651, "learning_rate": 0.0004908184076053153, "loss": 1.9471, "step": 1207 }, { "epoch": 0.11796875, "grad_norm": 0.28895479440689087, "learning_rate": 0.0004907982895914883, "loss": 1.9159, "step": 1208 }, { "epoch": 0.11806640625, "grad_norm": 0.264992356300354, "learning_rate": 0.000490778150021324, "loss": 1.9383, "step": 1209 }, { "epoch": 0.1181640625, "grad_norm": 0.25046393275260925, "learning_rate": 0.0004907579888968341, "loss": 1.9756, "step": 1210 }, { "epoch": 0.11826171875, "grad_norm": 0.2875134348869324, "learning_rate": 0.0004907378062200325, "loss": 1.9368, "step": 1211 }, { "epoch": 0.118359375, "grad_norm": 0.24127423763275146, "learning_rate": 0.0004907176019929353, "loss": 1.9399, "step": 1212 }, { "epoch": 0.11845703125, "grad_norm": 0.25520575046539307, "learning_rate": 0.0004906973762175608, "loss": 1.9662, "step": 1213 }, { "epoch": 0.1185546875, "grad_norm": 0.23595774173736572, "learning_rate": 0.0004906771288959293, "loss": 1.9302, "step": 1214 }, { "epoch": 0.11865234375, "grad_norm": 0.2919490337371826, "learning_rate": 0.0004906568600300635, "loss": 1.9421, "step": 1215 }, { "epoch": 0.11875, "grad_norm": 0.23202334344387054, "learning_rate": 0.0004906365696219877, "loss": 1.8934, "step": 1216 }, { "epoch": 0.11884765625, "grad_norm": 0.2639678716659546, "learning_rate": 0.0004906162576737292, "loss": 1.9272, "step": 1217 }, { "epoch": 0.1189453125, "grad_norm": 0.24962055683135986, "learning_rate": 0.0004905959241873165, "loss": 1.9605, "step": 1218 }, { "epoch": 0.11904296875, "grad_norm": 0.2519899308681488, "learning_rate": 0.0004905755691647812, "loss": 1.9757, "step": 1219 }, { "epoch": 0.119140625, "grad_norm": 0.2044687420129776, "learning_rate": 0.0004905551926081563, "loss": 1.9817, "step": 1220 }, { "epoch": 0.11923828125, "grad_norm": 0.21942391991615295, "learning_rate": 0.0004905347945194772, "loss": 1.9705, "step": 1221 }, { "epoch": 0.1193359375, "grad_norm": 0.26614969968795776, "learning_rate": 0.0004905143749007817, "loss": 1.9375, "step": 1222 }, { "epoch": 0.11943359375, "grad_norm": 0.23212748765945435, "learning_rate": 0.0004904939337541093, "loss": 1.9728, "step": 1223 }, { "epoch": 0.11953125, "grad_norm": 0.28004127740859985, "learning_rate": 0.0004904734710815022, "loss": 1.9082, "step": 1224 }, { "epoch": 0.11962890625, "grad_norm": 0.3012614846229553, "learning_rate": 0.0004904529868850042, "loss": 1.9347, "step": 1225 }, { "epoch": 0.1197265625, "grad_norm": 0.2914253771305084, "learning_rate": 0.0004904324811666615, "loss": 1.9435, "step": 1226 }, { "epoch": 0.11982421875, "grad_norm": 0.2315436154603958, "learning_rate": 0.0004904119539285225, "loss": 1.9503, "step": 1227 }, { "epoch": 0.119921875, "grad_norm": 0.2043173611164093, "learning_rate": 0.0004903914051726377, "loss": 1.937, "step": 1228 }, { "epoch": 0.12001953125, "grad_norm": 0.26184800267219543, "learning_rate": 0.0004903708349010598, "loss": 1.9459, "step": 1229 }, { "epoch": 0.1201171875, "grad_norm": 0.24898020923137665, "learning_rate": 0.0004903502431158434, "loss": 1.935, "step": 1230 }, { "epoch": 0.12021484375, "grad_norm": 0.29706010222435, "learning_rate": 0.0004903296298190456, "loss": 1.9741, "step": 1231 }, { "epoch": 0.1203125, "grad_norm": 0.3235405385494232, "learning_rate": 0.0004903089950127254, "loss": 1.9319, "step": 1232 }, { "epoch": 0.12041015625, "grad_norm": 0.25629666447639465, "learning_rate": 0.0004902883386989441, "loss": 1.9533, "step": 1233 }, { "epoch": 0.1205078125, "grad_norm": 0.23315580189228058, "learning_rate": 0.0004902676608797649, "loss": 1.9033, "step": 1234 }, { "epoch": 0.12060546875, "grad_norm": 0.2336481362581253, "learning_rate": 0.0004902469615572537, "loss": 1.9123, "step": 1235 }, { "epoch": 0.120703125, "grad_norm": 0.27162161469459534, "learning_rate": 0.0004902262407334779, "loss": 1.9253, "step": 1236 }, { "epoch": 0.12080078125, "grad_norm": 0.2913827896118164, "learning_rate": 0.0004902054984105073, "loss": 1.9526, "step": 1237 }, { "epoch": 0.1208984375, "grad_norm": 0.30284538865089417, "learning_rate": 0.0004901847345904141, "loss": 1.9588, "step": 1238 }, { "epoch": 0.12099609375, "grad_norm": 0.2579716742038727, "learning_rate": 0.000490163949275272, "loss": 1.892, "step": 1239 }, { "epoch": 0.12109375, "grad_norm": 0.21740810573101044, "learning_rate": 0.0004901431424671577, "loss": 1.9584, "step": 1240 }, { "epoch": 0.12119140625, "grad_norm": 0.3371858298778534, "learning_rate": 0.0004901223141681496, "loss": 1.9415, "step": 1241 }, { "epoch": 0.1212890625, "grad_norm": 0.34596753120422363, "learning_rate": 0.000490101464380328, "loss": 1.9384, "step": 1242 }, { "epoch": 0.12138671875, "grad_norm": 0.2591091990470886, "learning_rate": 0.0004900805931057757, "loss": 1.9433, "step": 1243 }, { "epoch": 0.121484375, "grad_norm": 0.27759140729904175, "learning_rate": 0.0004900597003465777, "loss": 1.9124, "step": 1244 }, { "epoch": 0.12158203125, "grad_norm": 0.2503783106803894, "learning_rate": 0.0004900387861048208, "loss": 1.9426, "step": 1245 }, { "epoch": 0.1216796875, "grad_norm": 0.36085277795791626, "learning_rate": 0.0004900178503825942, "loss": 1.9141, "step": 1246 }, { "epoch": 0.12177734375, "grad_norm": 0.2831665873527527, "learning_rate": 0.0004899968931819893, "loss": 1.9515, "step": 1247 }, { "epoch": 0.121875, "grad_norm": 0.26685988903045654, "learning_rate": 0.0004899759145050996, "loss": 1.9752, "step": 1248 }, { "epoch": 0.12197265625, "grad_norm": 0.3139730393886566, "learning_rate": 0.0004899549143540205, "loss": 1.9298, "step": 1249 }, { "epoch": 0.1220703125, "grad_norm": 0.20647141337394714, "learning_rate": 0.0004899338927308497, "loss": 1.9233, "step": 1250 }, { "epoch": 0.12216796875, "grad_norm": 0.293544203042984, "learning_rate": 0.0004899128496376871, "loss": 1.9534, "step": 1251 }, { "epoch": 0.122265625, "grad_norm": 0.28824836015701294, "learning_rate": 0.0004898917850766349, "loss": 1.96, "step": 1252 }, { "epoch": 0.12236328125, "grad_norm": 0.23198992013931274, "learning_rate": 0.0004898706990497971, "loss": 1.9275, "step": 1253 }, { "epoch": 0.1224609375, "grad_norm": 0.27970659732818604, "learning_rate": 0.00048984959155928, "loss": 1.8998, "step": 1254 }, { "epoch": 0.12255859375, "grad_norm": 0.2355661541223526, "learning_rate": 0.0004898284626071921, "loss": 1.9313, "step": 1255 }, { "epoch": 0.12265625, "grad_norm": 0.3456537127494812, "learning_rate": 0.0004898073121956441, "loss": 1.9164, "step": 1256 }, { "epoch": 0.12275390625, "grad_norm": 0.32490289211273193, "learning_rate": 0.0004897861403267486, "loss": 1.9568, "step": 1257 }, { "epoch": 0.1228515625, "grad_norm": 0.37740960717201233, "learning_rate": 0.0004897649470026205, "loss": 1.9618, "step": 1258 }, { "epoch": 0.12294921875, "grad_norm": 0.34424281120300293, "learning_rate": 0.0004897437322253767, "loss": 1.9192, "step": 1259 }, { "epoch": 0.123046875, "grad_norm": 0.1956419050693512, "learning_rate": 0.0004897224959971368, "loss": 1.9128, "step": 1260 }, { "epoch": 0.12314453125, "grad_norm": 0.30533185601234436, "learning_rate": 0.0004897012383200216, "loss": 1.9378, "step": 1261 }, { "epoch": 0.1232421875, "grad_norm": 0.28745076060295105, "learning_rate": 0.0004896799591961548, "loss": 1.9734, "step": 1262 }, { "epoch": 0.12333984375, "grad_norm": 0.3063562512397766, "learning_rate": 0.000489658658627662, "loss": 1.9713, "step": 1263 }, { "epoch": 0.1234375, "grad_norm": 0.26775872707366943, "learning_rate": 0.000489637336616671, "loss": 1.9305, "step": 1264 }, { "epoch": 0.12353515625, "grad_norm": 0.2045804262161255, "learning_rate": 0.0004896159931653114, "loss": 1.9018, "step": 1265 }, { "epoch": 0.1236328125, "grad_norm": 0.26752471923828125, "learning_rate": 0.0004895946282757155, "loss": 1.9334, "step": 1266 }, { "epoch": 0.12373046875, "grad_norm": 0.3504626452922821, "learning_rate": 0.0004895732419500174, "loss": 1.9678, "step": 1267 }, { "epoch": 0.123828125, "grad_norm": 0.2588043808937073, "learning_rate": 0.0004895518341903534, "loss": 1.9336, "step": 1268 }, { "epoch": 0.12392578125, "grad_norm": 0.23393654823303223, "learning_rate": 0.0004895304049988619, "loss": 1.9386, "step": 1269 }, { "epoch": 0.1240234375, "grad_norm": 0.3162434697151184, "learning_rate": 0.0004895089543776834, "loss": 1.9752, "step": 1270 }, { "epoch": 0.12412109375, "grad_norm": 0.28987085819244385, "learning_rate": 0.0004894874823289609, "loss": 1.9346, "step": 1271 }, { "epoch": 0.12421875, "grad_norm": 0.2837126553058624, "learning_rate": 0.000489465988854839, "loss": 1.9375, "step": 1272 }, { "epoch": 0.12431640625, "grad_norm": 0.26402387022972107, "learning_rate": 0.0004894444739574649, "loss": 1.9235, "step": 1273 }, { "epoch": 0.1244140625, "grad_norm": 0.21077674627304077, "learning_rate": 0.0004894229376389875, "loss": 1.9524, "step": 1274 }, { "epoch": 0.12451171875, "grad_norm": 0.18328504264354706, "learning_rate": 0.0004894013799015583, "loss": 1.9496, "step": 1275 }, { "epoch": 0.124609375, "grad_norm": 0.22258712351322174, "learning_rate": 0.0004893798007473307, "loss": 1.9661, "step": 1276 }, { "epoch": 0.12470703125, "grad_norm": 0.20786426961421967, "learning_rate": 0.0004893582001784604, "loss": 1.9263, "step": 1277 }, { "epoch": 0.1248046875, "grad_norm": 0.1959957331418991, "learning_rate": 0.0004893365781971047, "loss": 1.9525, "step": 1278 }, { "epoch": 0.12490234375, "grad_norm": 0.2622356712818146, "learning_rate": 0.0004893149348054238, "loss": 1.9433, "step": 1279 }, { "epoch": 0.125, "grad_norm": 0.2329346388578415, "learning_rate": 0.0004892932700055797, "loss": 1.9293, "step": 1280 }, { "epoch": 0.12509765625, "grad_norm": 0.26422178745269775, "learning_rate": 0.0004892715837997361, "loss": 1.9252, "step": 1281 }, { "epoch": 0.1251953125, "grad_norm": 0.30737435817718506, "learning_rate": 0.0004892498761900597, "loss": 1.9171, "step": 1282 }, { "epoch": 0.12529296875, "grad_norm": 0.30651819705963135, "learning_rate": 0.0004892281471787187, "loss": 1.937, "step": 1283 }, { "epoch": 0.125390625, "grad_norm": 0.23027795553207397, "learning_rate": 0.0004892063967678838, "loss": 1.9491, "step": 1284 }, { "epoch": 0.12548828125, "grad_norm": 0.30681830644607544, "learning_rate": 0.0004891846249597276, "loss": 1.905, "step": 1285 }, { "epoch": 0.1255859375, "grad_norm": 0.34682074189186096, "learning_rate": 0.0004891628317564247, "loss": 1.9157, "step": 1286 }, { "epoch": 0.12568359375, "grad_norm": 0.26123619079589844, "learning_rate": 0.0004891410171601523, "loss": 1.9382, "step": 1287 }, { "epoch": 0.12578125, "grad_norm": 0.32031431794166565, "learning_rate": 0.0004891191811730894, "loss": 1.9234, "step": 1288 }, { "epoch": 0.12587890625, "grad_norm": 0.24917498230934143, "learning_rate": 0.0004890973237974172, "loss": 1.919, "step": 1289 }, { "epoch": 0.1259765625, "grad_norm": 0.29161348938941956, "learning_rate": 0.0004890754450353192, "loss": 1.9405, "step": 1290 }, { "epoch": 0.12607421875, "grad_norm": 0.31046807765960693, "learning_rate": 0.0004890535448889807, "loss": 1.9298, "step": 1291 }, { "epoch": 0.126171875, "grad_norm": 0.30187463760375977, "learning_rate": 0.0004890316233605893, "loss": 1.9329, "step": 1292 }, { "epoch": 0.12626953125, "grad_norm": 0.3013293445110321, "learning_rate": 0.000489009680452335, "loss": 1.9228, "step": 1293 }, { "epoch": 0.1263671875, "grad_norm": 0.2492775321006775, "learning_rate": 0.0004889877161664096, "loss": 1.9565, "step": 1294 }, { "epoch": 0.12646484375, "grad_norm": 0.29035332798957825, "learning_rate": 0.000488965730505007, "loss": 1.9184, "step": 1295 }, { "epoch": 0.1265625, "grad_norm": 0.1983688622713089, "learning_rate": 0.0004889437234703236, "loss": 1.97, "step": 1296 }, { "epoch": 0.12666015625, "grad_norm": 0.24074336886405945, "learning_rate": 0.0004889216950645576, "loss": 1.9289, "step": 1297 }, { "epoch": 0.1267578125, "grad_norm": 0.2601425051689148, "learning_rate": 0.0004888996452899093, "loss": 1.9374, "step": 1298 }, { "epoch": 0.12685546875, "grad_norm": 0.21484848856925964, "learning_rate": 0.0004888775741485816, "loss": 1.9075, "step": 1299 }, { "epoch": 0.126953125, "grad_norm": 0.2370765060186386, "learning_rate": 0.0004888554816427789, "loss": 1.9131, "step": 1300 }, { "epoch": 0.12705078125, "grad_norm": 0.22826816141605377, "learning_rate": 0.0004888333677747082, "loss": 1.9514, "step": 1301 }, { "epoch": 0.1271484375, "grad_norm": 0.2141917198896408, "learning_rate": 0.0004888112325465785, "loss": 1.8406, "step": 1302 }, { "epoch": 0.12724609375, "grad_norm": 0.22987738251686096, "learning_rate": 0.0004887890759606008, "loss": 1.9498, "step": 1303 }, { "epoch": 0.12734375, "grad_norm": 0.2592550814151764, "learning_rate": 0.0004887668980189885, "loss": 1.9427, "step": 1304 }, { "epoch": 0.12744140625, "grad_norm": 0.24788032472133636, "learning_rate": 0.0004887446987239567, "loss": 1.9718, "step": 1305 }, { "epoch": 0.1275390625, "grad_norm": 0.3112640380859375, "learning_rate": 0.0004887224780777233, "loss": 1.972, "step": 1306 }, { "epoch": 0.12763671875, "grad_norm": 0.2711445987224579, "learning_rate": 0.0004887002360825077, "loss": 1.9636, "step": 1307 }, { "epoch": 0.127734375, "grad_norm": 0.19959181547164917, "learning_rate": 0.0004886779727405318, "loss": 1.9142, "step": 1308 }, { "epoch": 0.12783203125, "grad_norm": 0.24054476618766785, "learning_rate": 0.0004886556880540192, "loss": 1.8724, "step": 1309 }, { "epoch": 0.1279296875, "grad_norm": 0.21965253353118896, "learning_rate": 0.0004886333820251963, "loss": 1.9295, "step": 1310 }, { "epoch": 0.12802734375, "grad_norm": 0.2383204847574234, "learning_rate": 0.0004886110546562912, "loss": 1.8945, "step": 1311 }, { "epoch": 0.128125, "grad_norm": 0.20343053340911865, "learning_rate": 0.0004885887059495342, "loss": 1.9364, "step": 1312 }, { "epoch": 0.12822265625, "grad_norm": 0.24133317172527313, "learning_rate": 0.0004885663359071576, "loss": 1.9023, "step": 1313 }, { "epoch": 0.1283203125, "grad_norm": 0.27090492844581604, "learning_rate": 0.0004885439445313962, "loss": 1.9007, "step": 1314 }, { "epoch": 0.12841796875, "grad_norm": 0.2153930515050888, "learning_rate": 0.0004885215318244865, "loss": 1.9475, "step": 1315 }, { "epoch": 0.128515625, "grad_norm": 0.3661288022994995, "learning_rate": 0.0004884990977886673, "loss": 1.9858, "step": 1316 }, { "epoch": 0.12861328125, "grad_norm": 0.4454726576805115, "learning_rate": 0.0004884766424261798, "loss": 1.9207, "step": 1317 }, { "epoch": 0.1287109375, "grad_norm": 0.4249671995639801, "learning_rate": 0.000488454165739267, "loss": 1.9889, "step": 1318 }, { "epoch": 0.12880859375, "grad_norm": 0.3520338237285614, "learning_rate": 0.0004884316677301739, "loss": 1.9564, "step": 1319 }, { "epoch": 0.12890625, "grad_norm": 0.28052136301994324, "learning_rate": 0.0004884091484011482, "loss": 1.9677, "step": 1320 }, { "epoch": 0.12900390625, "grad_norm": 0.31618887186050415, "learning_rate": 0.0004883866077544391, "loss": 1.9306, "step": 1321 }, { "epoch": 0.1291015625, "grad_norm": 0.29120492935180664, "learning_rate": 0.0004883640457922984, "loss": 1.9205, "step": 1322 }, { "epoch": 0.12919921875, "grad_norm": 0.23774267733097076, "learning_rate": 0.0004883414625169797, "loss": 1.9078, "step": 1323 }, { "epoch": 0.129296875, "grad_norm": 0.3183814287185669, "learning_rate": 0.0004883188579307389, "loss": 1.7927, "step": 1324 }, { "epoch": 0.12939453125, "grad_norm": 0.2845805585384369, "learning_rate": 0.0004882962320358341, "loss": 1.888, "step": 1325 }, { "epoch": 0.1294921875, "grad_norm": 0.2877943515777588, "learning_rate": 0.00048827358483452527, "loss": 1.9307, "step": 1326 }, { "epoch": 0.12958984375, "grad_norm": 0.2753991186618805, "learning_rate": 0.0004882509163290748, "loss": 1.9306, "step": 1327 }, { "epoch": 0.1296875, "grad_norm": 0.2753439247608185, "learning_rate": 0.00048822822652174705, "loss": 1.9573, "step": 1328 }, { "epoch": 0.12978515625, "grad_norm": 0.2690608501434326, "learning_rate": 0.00048820551541480845, "loss": 1.9229, "step": 1329 }, { "epoch": 0.1298828125, "grad_norm": 0.2276460975408554, "learning_rate": 0.00048818278301052776, "loss": 1.9871, "step": 1330 }, { "epoch": 0.12998046875, "grad_norm": 0.23562243580818176, "learning_rate": 0.00048816002931117557, "loss": 1.9489, "step": 1331 }, { "epoch": 0.130078125, "grad_norm": 0.27421703934669495, "learning_rate": 0.00048813725431902496, "loss": 1.922, "step": 1332 }, { "epoch": 0.13017578125, "grad_norm": 0.1969674974679947, "learning_rate": 0.00048811445803635087, "loss": 1.929, "step": 1333 }, { "epoch": 0.1302734375, "grad_norm": 0.22562848031520844, "learning_rate": 0.00048809164046543043, "loss": 1.9183, "step": 1334 }, { "epoch": 0.13037109375, "grad_norm": 0.2235979586839676, "learning_rate": 0.0004880688016085429, "loss": 1.9155, "step": 1335 }, { "epoch": 0.13046875, "grad_norm": 0.23072493076324463, "learning_rate": 0.00048804594146796977, "loss": 1.9647, "step": 1336 }, { "epoch": 0.13056640625, "grad_norm": 0.2528524398803711, "learning_rate": 0.00048802306004599446, "loss": 1.979, "step": 1337 }, { "epoch": 0.1306640625, "grad_norm": 0.2586860954761505, "learning_rate": 0.00048800015734490267, "loss": 1.9371, "step": 1338 }, { "epoch": 0.13076171875, "grad_norm": 0.21609428524971008, "learning_rate": 0.0004879772333669822, "loss": 1.9319, "step": 1339 }, { "epoch": 0.130859375, "grad_norm": 0.28810006380081177, "learning_rate": 0.0004879542881145229, "loss": 1.9218, "step": 1340 }, { "epoch": 0.13095703125, "grad_norm": 0.288849413394928, "learning_rate": 0.0004879313215898168, "loss": 1.929, "step": 1341 }, { "epoch": 0.1310546875, "grad_norm": 0.3168569803237915, "learning_rate": 0.00048790833379515813, "loss": 1.9276, "step": 1342 }, { "epoch": 0.13115234375, "grad_norm": 0.3000844120979309, "learning_rate": 0.00048788532473284307, "loss": 1.9337, "step": 1343 }, { "epoch": 0.13125, "grad_norm": 0.2524905204772949, "learning_rate": 0.00048786229440517004, "loss": 1.9105, "step": 1344 }, { "epoch": 0.13134765625, "grad_norm": 0.25065815448760986, "learning_rate": 0.0004878392428144396, "loss": 1.9161, "step": 1345 }, { "epoch": 0.1314453125, "grad_norm": 0.3235273063182831, "learning_rate": 0.00048781616996295443, "loss": 1.9159, "step": 1346 }, { "epoch": 0.13154296875, "grad_norm": 0.23726877570152283, "learning_rate": 0.0004877930758530192, "loss": 1.9063, "step": 1347 }, { "epoch": 0.131640625, "grad_norm": 0.2991824150085449, "learning_rate": 0.000487769960486941, "loss": 1.9885, "step": 1348 }, { "epoch": 0.13173828125, "grad_norm": 0.24819223582744598, "learning_rate": 0.0004877468238670287, "loss": 1.9311, "step": 1349 }, { "epoch": 0.1318359375, "grad_norm": 0.23198595643043518, "learning_rate": 0.00048772366599559333, "loss": 1.9278, "step": 1350 }, { "epoch": 0.13193359375, "grad_norm": 0.3012297451496124, "learning_rate": 0.0004877004868749483, "loss": 1.9258, "step": 1351 }, { "epoch": 0.13203125, "grad_norm": 0.21619653701782227, "learning_rate": 0.0004876772865074091, "loss": 1.9121, "step": 1352 }, { "epoch": 0.13212890625, "grad_norm": 0.24853789806365967, "learning_rate": 0.00048765406489529315, "loss": 1.9533, "step": 1353 }, { "epoch": 0.1322265625, "grad_norm": 0.33258283138275146, "learning_rate": 0.00048763082204091995, "loss": 1.9424, "step": 1354 }, { "epoch": 0.13232421875, "grad_norm": 0.3056190609931946, "learning_rate": 0.00048760755794661137, "loss": 1.9605, "step": 1355 }, { "epoch": 0.132421875, "grad_norm": 0.20714829862117767, "learning_rate": 0.00048758427261469135, "loss": 1.9416, "step": 1356 }, { "epoch": 0.13251953125, "grad_norm": 0.2573312520980835, "learning_rate": 0.0004875609660474857, "loss": 1.9198, "step": 1357 }, { "epoch": 0.1326171875, "grad_norm": 0.24284306168556213, "learning_rate": 0.0004875376382473228, "loss": 1.9308, "step": 1358 }, { "epoch": 0.13271484375, "grad_norm": 0.20341083407402039, "learning_rate": 0.0004875142892165327, "loss": 1.8933, "step": 1359 }, { "epoch": 0.1328125, "grad_norm": 0.3155312240123749, "learning_rate": 0.0004874909189574478, "loss": 1.8825, "step": 1360 }, { "epoch": 0.13291015625, "grad_norm": 0.2829397916793823, "learning_rate": 0.0004874675274724026, "loss": 1.9229, "step": 1361 }, { "epoch": 0.1330078125, "grad_norm": 0.34132909774780273, "learning_rate": 0.00048744411476373374, "loss": 1.8935, "step": 1362 }, { "epoch": 0.13310546875, "grad_norm": 0.41804420948028564, "learning_rate": 0.0004874206808337799, "loss": 1.9241, "step": 1363 }, { "epoch": 0.133203125, "grad_norm": 0.3885767161846161, "learning_rate": 0.0004873972256848819, "loss": 1.9391, "step": 1364 }, { "epoch": 0.13330078125, "grad_norm": 0.3422619104385376, "learning_rate": 0.00048737374931938273, "loss": 1.9113, "step": 1365 }, { "epoch": 0.1333984375, "grad_norm": 0.33838555216789246, "learning_rate": 0.00048735025173962746, "loss": 1.8967, "step": 1366 }, { "epoch": 0.13349609375, "grad_norm": 0.3656925857067108, "learning_rate": 0.0004873267329479633, "loss": 1.9405, "step": 1367 }, { "epoch": 0.13359375, "grad_norm": 0.3086741268634796, "learning_rate": 0.00048730319294673955, "loss": 1.9518, "step": 1368 }, { "epoch": 0.13369140625, "grad_norm": 0.24022363126277924, "learning_rate": 0.0004872796317383077, "loss": 1.9368, "step": 1369 }, { "epoch": 0.1337890625, "grad_norm": 0.2861846089363098, "learning_rate": 0.00048725604932502116, "loss": 1.9186, "step": 1370 }, { "epoch": 0.13388671875, "grad_norm": 0.29110145568847656, "learning_rate": 0.0004872324457092358, "loss": 1.8963, "step": 1371 }, { "epoch": 0.133984375, "grad_norm": 0.2232580929994583, "learning_rate": 0.0004872088208933093, "loss": 1.929, "step": 1372 }, { "epoch": 0.13408203125, "grad_norm": 0.3331020772457123, "learning_rate": 0.0004871851748796016, "loss": 1.936, "step": 1373 }, { "epoch": 0.1341796875, "grad_norm": 0.2353833019733429, "learning_rate": 0.00048716150767047473, "loss": 1.9047, "step": 1374 }, { "epoch": 0.13427734375, "grad_norm": 0.25953710079193115, "learning_rate": 0.0004871378192682928, "loss": 1.9812, "step": 1375 }, { "epoch": 0.134375, "grad_norm": 0.24382802844047546, "learning_rate": 0.0004871141096754222, "loss": 1.9599, "step": 1376 }, { "epoch": 0.13447265625, "grad_norm": 0.2496114820241928, "learning_rate": 0.00048709037889423107, "loss": 1.9094, "step": 1377 }, { "epoch": 0.1345703125, "grad_norm": 0.26899224519729614, "learning_rate": 0.0004870666269270901, "loss": 1.9467, "step": 1378 }, { "epoch": 0.13466796875, "grad_norm": 0.23490363359451294, "learning_rate": 0.00048704285377637184, "loss": 1.9682, "step": 1379 }, { "epoch": 0.134765625, "grad_norm": 0.23190240561962128, "learning_rate": 0.000487019059444451, "loss": 1.9324, "step": 1380 }, { "epoch": 0.13486328125, "grad_norm": 0.2239055186510086, "learning_rate": 0.0004869952439337045, "loss": 1.995, "step": 1381 }, { "epoch": 0.1349609375, "grad_norm": 0.23679253458976746, "learning_rate": 0.00048697140724651114, "loss": 1.8685, "step": 1382 }, { "epoch": 0.13505859375, "grad_norm": 0.2584695816040039, "learning_rate": 0.0004869475493852521, "loss": 1.9421, "step": 1383 }, { "epoch": 0.13515625, "grad_norm": 0.219711571931839, "learning_rate": 0.0004869236703523106, "loss": 1.9326, "step": 1384 }, { "epoch": 0.13525390625, "grad_norm": 0.2176481932401657, "learning_rate": 0.00048689977015007197, "loss": 1.8959, "step": 1385 }, { "epoch": 0.1353515625, "grad_norm": 0.22419661283493042, "learning_rate": 0.0004868758487809235, "loss": 1.9095, "step": 1386 }, { "epoch": 0.13544921875, "grad_norm": 0.255984365940094, "learning_rate": 0.0004868519062472548, "loss": 1.9756, "step": 1387 }, { "epoch": 0.135546875, "grad_norm": 0.217428520321846, "learning_rate": 0.0004868279425514575, "loss": 1.9481, "step": 1388 }, { "epoch": 0.13564453125, "grad_norm": 0.19990497827529907, "learning_rate": 0.00048680395769592534, "loss": 1.9279, "step": 1389 }, { "epoch": 0.1357421875, "grad_norm": 0.2209380716085434, "learning_rate": 0.00048677995168305426, "loss": 1.9148, "step": 1390 }, { "epoch": 0.13583984375, "grad_norm": 0.25395241379737854, "learning_rate": 0.00048675592451524226, "loss": 1.9688, "step": 1391 }, { "epoch": 0.1359375, "grad_norm": 0.1944066882133484, "learning_rate": 0.0004867318761948892, "loss": 1.9005, "step": 1392 }, { "epoch": 0.13603515625, "grad_norm": 0.22825607657432556, "learning_rate": 0.0004867078067243977, "loss": 1.9538, "step": 1393 }, { "epoch": 0.1361328125, "grad_norm": 0.23758959770202637, "learning_rate": 0.0004866837161061717, "loss": 1.9887, "step": 1394 }, { "epoch": 0.13623046875, "grad_norm": 0.26341712474823, "learning_rate": 0.00048665960434261796, "loss": 1.9217, "step": 1395 }, { "epoch": 0.136328125, "grad_norm": 0.283316045999527, "learning_rate": 0.0004866354714361448, "loss": 1.9576, "step": 1396 }, { "epoch": 0.13642578125, "grad_norm": 0.32178181409835815, "learning_rate": 0.000486611317389163, "loss": 1.9651, "step": 1397 }, { "epoch": 0.1365234375, "grad_norm": 0.4065782129764557, "learning_rate": 0.0004865871422040853, "loss": 1.897, "step": 1398 }, { "epoch": 0.13662109375, "grad_norm": 0.3661766052246094, "learning_rate": 0.00048656294588332653, "loss": 1.9472, "step": 1399 }, { "epoch": 0.13671875, "grad_norm": 0.24434155225753784, "learning_rate": 0.0004865387284293038, "loss": 1.9179, "step": 1400 }, { "epoch": 0.13681640625, "grad_norm": 0.4203675091266632, "learning_rate": 0.0004865144898444361, "loss": 1.9583, "step": 1401 }, { "epoch": 0.1369140625, "grad_norm": 0.4013305902481079, "learning_rate": 0.0004864902301311448, "loss": 1.9322, "step": 1402 }, { "epoch": 0.13701171875, "grad_norm": 0.30299681425094604, "learning_rate": 0.00048646594929185313, "loss": 1.9152, "step": 1403 }, { "epoch": 0.137109375, "grad_norm": 0.30768755078315735, "learning_rate": 0.0004864416473289865, "loss": 1.966, "step": 1404 }, { "epoch": 0.13720703125, "grad_norm": 0.2759581506252289, "learning_rate": 0.00048641732424497256, "loss": 1.9211, "step": 1405 }, { "epoch": 0.1373046875, "grad_norm": 0.2976212799549103, "learning_rate": 0.00048639298004224087, "loss": 1.9376, "step": 1406 }, { "epoch": 0.13740234375, "grad_norm": 0.21789424121379852, "learning_rate": 0.0004863686147232232, "loss": 1.994, "step": 1407 }, { "epoch": 0.1375, "grad_norm": 0.2543160617351532, "learning_rate": 0.0004863442282903535, "loss": 1.9153, "step": 1408 }, { "epoch": 0.13759765625, "grad_norm": 0.2789456248283386, "learning_rate": 0.0004863198207460678, "loss": 1.9238, "step": 1409 }, { "epoch": 0.1376953125, "grad_norm": 0.21548853814601898, "learning_rate": 0.0004862953920928041, "loss": 1.9003, "step": 1410 }, { "epoch": 0.13779296875, "grad_norm": 0.2607603967189789, "learning_rate": 0.00048627094233300266, "loss": 1.951, "step": 1411 }, { "epoch": 0.137890625, "grad_norm": 0.21658223867416382, "learning_rate": 0.00048624647146910575, "loss": 1.9818, "step": 1412 }, { "epoch": 0.13798828125, "grad_norm": 0.24333196878433228, "learning_rate": 0.00048622197950355786, "loss": 1.8933, "step": 1413 }, { "epoch": 0.1380859375, "grad_norm": 0.22348099946975708, "learning_rate": 0.0004861974664388054, "loss": 1.9302, "step": 1414 }, { "epoch": 0.13818359375, "grad_norm": 0.1976805180311203, "learning_rate": 0.0004861729322772971, "loss": 1.9044, "step": 1415 }, { "epoch": 0.13828125, "grad_norm": 0.22617708146572113, "learning_rate": 0.00048614837702148374, "loss": 1.9445, "step": 1416 }, { "epoch": 0.13837890625, "grad_norm": 0.2243851125240326, "learning_rate": 0.0004861238006738181, "loss": 1.9215, "step": 1417 }, { "epoch": 0.1384765625, "grad_norm": 0.21161694824695587, "learning_rate": 0.0004860992032367551, "loss": 1.9443, "step": 1418 }, { "epoch": 0.13857421875, "grad_norm": 0.22872799634933472, "learning_rate": 0.0004860745847127519, "loss": 1.8174, "step": 1419 }, { "epoch": 0.138671875, "grad_norm": 0.27401795983314514, "learning_rate": 0.0004860499451042677, "loss": 1.919, "step": 1420 }, { "epoch": 0.13876953125, "grad_norm": 0.2309410721063614, "learning_rate": 0.00048602528441376365, "loss": 1.9025, "step": 1421 }, { "epoch": 0.1388671875, "grad_norm": 0.21750116348266602, "learning_rate": 0.0004860006026437033, "loss": 1.9242, "step": 1422 }, { "epoch": 0.13896484375, "grad_norm": 0.24858328700065613, "learning_rate": 0.0004859758997965519, "loss": 1.9562, "step": 1423 }, { "epoch": 0.1390625, "grad_norm": 0.23306035995483398, "learning_rate": 0.00048595117587477724, "loss": 1.9504, "step": 1424 }, { "epoch": 0.13916015625, "grad_norm": 0.24605897068977356, "learning_rate": 0.00048592643088084904, "loss": 1.9502, "step": 1425 }, { "epoch": 0.1392578125, "grad_norm": 0.23687416315078735, "learning_rate": 0.0004859016648172389, "loss": 1.9035, "step": 1426 }, { "epoch": 0.13935546875, "grad_norm": 0.33239808678627014, "learning_rate": 0.00048587687768642104, "loss": 1.9469, "step": 1427 }, { "epoch": 0.139453125, "grad_norm": 0.3443644344806671, "learning_rate": 0.00048585206949087117, "loss": 1.9126, "step": 1428 }, { "epoch": 0.13955078125, "grad_norm": 0.4044901430606842, "learning_rate": 0.0004858272402330676, "loss": 1.928, "step": 1429 }, { "epoch": 0.1396484375, "grad_norm": 0.3553960919380188, "learning_rate": 0.0004858023899154905, "loss": 1.9782, "step": 1430 }, { "epoch": 0.13974609375, "grad_norm": 0.24459035694599152, "learning_rate": 0.00048577751854062207, "loss": 1.9228, "step": 1431 }, { "epoch": 0.13984375, "grad_norm": 0.26586851477622986, "learning_rate": 0.000485752626110947, "loss": 1.9607, "step": 1432 }, { "epoch": 0.13994140625, "grad_norm": 0.2949119210243225, "learning_rate": 0.0004857277126289516, "loss": 1.9418, "step": 1433 }, { "epoch": 0.1400390625, "grad_norm": 0.23309734463691711, "learning_rate": 0.0004857027780971246, "loss": 1.9036, "step": 1434 }, { "epoch": 0.14013671875, "grad_norm": 0.27574148774147034, "learning_rate": 0.00048567782251795674, "loss": 1.883, "step": 1435 }, { "epoch": 0.140234375, "grad_norm": 0.2791823446750641, "learning_rate": 0.0004856528458939409, "loss": 1.9021, "step": 1436 }, { "epoch": 0.14033203125, "grad_norm": 0.3588283061981201, "learning_rate": 0.00048562784822757187, "loss": 1.9417, "step": 1437 }, { "epoch": 0.1404296875, "grad_norm": 0.3069411814212799, "learning_rate": 0.00048560282952134687, "loss": 1.9055, "step": 1438 }, { "epoch": 0.14052734375, "grad_norm": 0.3290352523326874, "learning_rate": 0.00048557778977776495, "loss": 1.9674, "step": 1439 }, { "epoch": 0.140625, "grad_norm": 0.21290095150470734, "learning_rate": 0.0004855527289993274, "loss": 1.9099, "step": 1440 }, { "epoch": 0.14072265625, "grad_norm": 0.28244757652282715, "learning_rate": 0.00048552764718853755, "loss": 1.9821, "step": 1441 }, { "epoch": 0.1408203125, "grad_norm": 0.2707923948764801, "learning_rate": 0.0004855025443479008, "loss": 1.92, "step": 1442 }, { "epoch": 0.14091796875, "grad_norm": 0.2617279589176178, "learning_rate": 0.0004854774204799248, "loss": 1.9425, "step": 1443 }, { "epoch": 0.141015625, "grad_norm": 0.2797471880912781, "learning_rate": 0.00048545227558711914, "loss": 1.9239, "step": 1444 }, { "epoch": 0.14111328125, "grad_norm": 0.21120323240756989, "learning_rate": 0.00048542710967199563, "loss": 1.9323, "step": 1445 }, { "epoch": 0.1412109375, "grad_norm": 0.23532937467098236, "learning_rate": 0.00048540192273706807, "loss": 1.9033, "step": 1446 }, { "epoch": 0.14130859375, "grad_norm": 0.26372474431991577, "learning_rate": 0.00048537671478485233, "loss": 1.9109, "step": 1447 }, { "epoch": 0.14140625, "grad_norm": 0.2063819169998169, "learning_rate": 0.0004853514858178667, "loss": 1.8868, "step": 1448 }, { "epoch": 0.14150390625, "grad_norm": 0.2495461255311966, "learning_rate": 0.0004853262358386311, "loss": 1.9261, "step": 1449 }, { "epoch": 0.1416015625, "grad_norm": 0.24477644264698029, "learning_rate": 0.00048530096484966786, "loss": 1.8921, "step": 1450 }, { "epoch": 0.14169921875, "grad_norm": 0.20758818089962006, "learning_rate": 0.0004852756728535013, "loss": 1.9274, "step": 1451 }, { "epoch": 0.141796875, "grad_norm": 0.24920731782913208, "learning_rate": 0.000485250359852658, "loss": 1.9203, "step": 1452 }, { "epoch": 0.14189453125, "grad_norm": 0.27062875032424927, "learning_rate": 0.00048522502584966626, "loss": 1.9225, "step": 1453 }, { "epoch": 0.1419921875, "grad_norm": 0.26489493250846863, "learning_rate": 0.00048519967084705696, "loss": 1.9797, "step": 1454 }, { "epoch": 0.14208984375, "grad_norm": 0.2739894688129425, "learning_rate": 0.0004851742948473627, "loss": 1.8824, "step": 1455 }, { "epoch": 0.1421875, "grad_norm": 0.24199414253234863, "learning_rate": 0.0004851488978531184, "loss": 1.9439, "step": 1456 }, { "epoch": 0.14228515625, "grad_norm": 0.21669942140579224, "learning_rate": 0.00048512347986686094, "loss": 1.8901, "step": 1457 }, { "epoch": 0.1423828125, "grad_norm": 0.27475011348724365, "learning_rate": 0.0004850980408911294, "loss": 1.877, "step": 1458 }, { "epoch": 0.14248046875, "grad_norm": 0.24100442230701447, "learning_rate": 0.0004850725809284648, "loss": 1.9286, "step": 1459 }, { "epoch": 0.142578125, "grad_norm": 0.24343852698802948, "learning_rate": 0.0004850470999814105, "loss": 1.9503, "step": 1460 }, { "epoch": 0.14267578125, "grad_norm": 0.22666993737220764, "learning_rate": 0.0004850215980525118, "loss": 1.8722, "step": 1461 }, { "epoch": 0.1427734375, "grad_norm": 0.22877848148345947, "learning_rate": 0.00048499607514431597, "loss": 1.8726, "step": 1462 }, { "epoch": 0.14287109375, "grad_norm": 0.21896860003471375, "learning_rate": 0.0004849705312593727, "loss": 1.9109, "step": 1463 }, { "epoch": 0.14296875, "grad_norm": 0.2896305024623871, "learning_rate": 0.00048494496640023356, "loss": 1.9293, "step": 1464 }, { "epoch": 0.14306640625, "grad_norm": 0.2690875232219696, "learning_rate": 0.00048491938056945214, "loss": 1.9191, "step": 1465 }, { "epoch": 0.1431640625, "grad_norm": 0.27440106868743896, "learning_rate": 0.0004848937737695844, "loss": 1.9729, "step": 1466 }, { "epoch": 0.14326171875, "grad_norm": 0.26410380005836487, "learning_rate": 0.000484868146003188, "loss": 1.8782, "step": 1467 }, { "epoch": 0.143359375, "grad_norm": 0.2948254644870758, "learning_rate": 0.00048484249727282326, "loss": 1.9726, "step": 1468 }, { "epoch": 0.14345703125, "grad_norm": 0.34213653206825256, "learning_rate": 0.000484816827581052, "loss": 1.8893, "step": 1469 }, { "epoch": 0.1435546875, "grad_norm": 0.2864801287651062, "learning_rate": 0.0004847911369304385, "loss": 1.9717, "step": 1470 }, { "epoch": 0.14365234375, "grad_norm": 0.23252104222774506, "learning_rate": 0.00048476542532354896, "loss": 1.9256, "step": 1471 }, { "epoch": 0.14375, "grad_norm": 0.24983392655849457, "learning_rate": 0.00048473969276295175, "loss": 1.9275, "step": 1472 }, { "epoch": 0.14384765625, "grad_norm": 0.24153786897659302, "learning_rate": 0.0004847139392512175, "loss": 1.8821, "step": 1473 }, { "epoch": 0.1439453125, "grad_norm": 0.2148951143026352, "learning_rate": 0.00048468816479091853, "loss": 1.9125, "step": 1474 }, { "epoch": 0.14404296875, "grad_norm": 0.20793981850147247, "learning_rate": 0.00048466236938462956, "loss": 1.9336, "step": 1475 }, { "epoch": 0.144140625, "grad_norm": 0.22007182240486145, "learning_rate": 0.0004846365530349273, "loss": 1.9383, "step": 1476 }, { "epoch": 0.14423828125, "grad_norm": 0.21164564788341522, "learning_rate": 0.00048461071574439063, "loss": 1.9188, "step": 1477 }, { "epoch": 0.1443359375, "grad_norm": 0.26241186261177063, "learning_rate": 0.00048458485751560045, "loss": 1.9348, "step": 1478 }, { "epoch": 0.14443359375, "grad_norm": 0.19936083257198334, "learning_rate": 0.00048455897835113974, "loss": 1.8605, "step": 1479 }, { "epoch": 0.14453125, "grad_norm": 0.2293226420879364, "learning_rate": 0.0004845330782535937, "loss": 1.8666, "step": 1480 }, { "epoch": 0.14462890625, "grad_norm": 0.27344003319740295, "learning_rate": 0.0004845071572255494, "loss": 1.9281, "step": 1481 }, { "epoch": 0.1447265625, "grad_norm": 0.2886092960834503, "learning_rate": 0.0004844812152695961, "loss": 1.9106, "step": 1482 }, { "epoch": 0.14482421875, "grad_norm": 0.345197856426239, "learning_rate": 0.00048445525238832526, "loss": 1.8825, "step": 1483 }, { "epoch": 0.144921875, "grad_norm": 0.32034623622894287, "learning_rate": 0.0004844292685843303, "loss": 1.9103, "step": 1484 }, { "epoch": 0.14501953125, "grad_norm": 0.30518102645874023, "learning_rate": 0.00048440326386020685, "loss": 1.9614, "step": 1485 }, { "epoch": 0.1451171875, "grad_norm": 0.28948330879211426, "learning_rate": 0.00048437723821855244, "loss": 1.8966, "step": 1486 }, { "epoch": 0.14521484375, "grad_norm": 0.28401121497154236, "learning_rate": 0.00048435119166196684, "loss": 1.9455, "step": 1487 }, { "epoch": 0.1453125, "grad_norm": 0.2352132648229599, "learning_rate": 0.0004843251241930519, "loss": 1.9285, "step": 1488 }, { "epoch": 0.14541015625, "grad_norm": 0.231564462184906, "learning_rate": 0.0004842990358144115, "loss": 1.9204, "step": 1489 }, { "epoch": 0.1455078125, "grad_norm": 0.2393004596233368, "learning_rate": 0.00048427292652865165, "loss": 1.9449, "step": 1490 }, { "epoch": 0.14560546875, "grad_norm": 0.2513711154460907, "learning_rate": 0.00048424679633838046, "loss": 1.9092, "step": 1491 }, { "epoch": 0.145703125, "grad_norm": 0.22026249766349792, "learning_rate": 0.000484220645246208, "loss": 1.9439, "step": 1492 }, { "epoch": 0.14580078125, "grad_norm": 0.29993200302124023, "learning_rate": 0.0004841944732547467, "loss": 1.8934, "step": 1493 }, { "epoch": 0.1458984375, "grad_norm": 0.27871406078338623, "learning_rate": 0.0004841682803666108, "loss": 1.8947, "step": 1494 }, { "epoch": 0.14599609375, "grad_norm": 0.21927416324615479, "learning_rate": 0.0004841420665844166, "loss": 1.9472, "step": 1495 }, { "epoch": 0.14609375, "grad_norm": 0.24884743988513947, "learning_rate": 0.0004841158319107829, "loss": 1.9099, "step": 1496 }, { "epoch": 0.14619140625, "grad_norm": 0.256428986787796, "learning_rate": 0.0004840895763483302, "loss": 1.9266, "step": 1497 }, { "epoch": 0.1462890625, "grad_norm": 0.2777554392814636, "learning_rate": 0.0004840632998996812, "loss": 1.9764, "step": 1498 }, { "epoch": 0.14638671875, "grad_norm": 0.3199704587459564, "learning_rate": 0.00048403700256746066, "loss": 1.9391, "step": 1499 }, { "epoch": 0.146484375, "grad_norm": 0.33837810158729553, "learning_rate": 0.00048401068435429545, "loss": 1.9131, "step": 1500 }, { "epoch": 0.14658203125, "grad_norm": 0.25376418232917786, "learning_rate": 0.00048398434526281456, "loss": 1.9391, "step": 1501 }, { "epoch": 0.1466796875, "grad_norm": 0.2878507673740387, "learning_rate": 0.00048395798529564897, "loss": 1.9226, "step": 1502 }, { "epoch": 0.14677734375, "grad_norm": 0.3166121244430542, "learning_rate": 0.0004839316044554319, "loss": 1.9024, "step": 1503 }, { "epoch": 0.146875, "grad_norm": 0.30799803137779236, "learning_rate": 0.00048390520274479857, "loss": 1.9076, "step": 1504 }, { "epoch": 0.14697265625, "grad_norm": 0.3012866675853729, "learning_rate": 0.0004838787801663861, "loss": 1.9282, "step": 1505 }, { "epoch": 0.1470703125, "grad_norm": 0.29956120252609253, "learning_rate": 0.000483852336722834, "loss": 1.8934, "step": 1506 }, { "epoch": 0.14716796875, "grad_norm": 0.2174024134874344, "learning_rate": 0.0004838258724167838, "loss": 1.9434, "step": 1507 }, { "epoch": 0.147265625, "grad_norm": 0.31407755613327026, "learning_rate": 0.0004837993872508789, "loss": 1.8654, "step": 1508 }, { "epoch": 0.14736328125, "grad_norm": 0.22638960182666779, "learning_rate": 0.00048377288122776506, "loss": 1.937, "step": 1509 }, { "epoch": 0.1474609375, "grad_norm": 0.26045146584510803, "learning_rate": 0.0004837463543500899, "loss": 1.8986, "step": 1510 }, { "epoch": 0.14755859375, "grad_norm": 0.2935525178909302, "learning_rate": 0.0004837198066205034, "loss": 1.9554, "step": 1511 }, { "epoch": 0.14765625, "grad_norm": 0.20349502563476562, "learning_rate": 0.0004836932380416571, "loss": 1.893, "step": 1512 }, { "epoch": 0.14775390625, "grad_norm": 0.28056201338768005, "learning_rate": 0.0004836666486162053, "loss": 1.983, "step": 1513 }, { "epoch": 0.1478515625, "grad_norm": 0.27571672201156616, "learning_rate": 0.00048364003834680395, "loss": 1.9399, "step": 1514 }, { "epoch": 0.14794921875, "grad_norm": 0.28008097410202026, "learning_rate": 0.0004836134072361111, "loss": 1.9549, "step": 1515 }, { "epoch": 0.148046875, "grad_norm": 0.2211456298828125, "learning_rate": 0.00048358675528678704, "loss": 1.9245, "step": 1516 }, { "epoch": 0.14814453125, "grad_norm": 0.259030818939209, "learning_rate": 0.00048356008250149404, "loss": 1.8919, "step": 1517 }, { "epoch": 0.1482421875, "grad_norm": 0.2772839665412903, "learning_rate": 0.00048353338888289645, "loss": 1.9386, "step": 1518 }, { "epoch": 0.14833984375, "grad_norm": 0.3579663336277008, "learning_rate": 0.00048350667443366075, "loss": 1.9375, "step": 1519 }, { "epoch": 0.1484375, "grad_norm": 0.37625831365585327, "learning_rate": 0.0004834799391564555, "loss": 1.9508, "step": 1520 }, { "epoch": 0.14853515625, "grad_norm": 0.23583106696605682, "learning_rate": 0.0004834531830539512, "loss": 1.8934, "step": 1521 }, { "epoch": 0.1486328125, "grad_norm": 0.2464006394147873, "learning_rate": 0.00048342640612882075, "loss": 1.8932, "step": 1522 }, { "epoch": 0.14873046875, "grad_norm": 0.30233830213546753, "learning_rate": 0.0004833996083837388, "loss": 1.9117, "step": 1523 }, { "epoch": 0.148828125, "grad_norm": 0.23098327219486237, "learning_rate": 0.0004833727898213822, "loss": 1.9229, "step": 1524 }, { "epoch": 0.14892578125, "grad_norm": 0.2418309450149536, "learning_rate": 0.00048334595044442996, "loss": 1.9369, "step": 1525 }, { "epoch": 0.1490234375, "grad_norm": 0.2360529899597168, "learning_rate": 0.000483319090255563, "loss": 1.8828, "step": 1526 }, { "epoch": 0.14912109375, "grad_norm": 0.24173954129219055, "learning_rate": 0.0004832922092574645, "loss": 1.9349, "step": 1527 }, { "epoch": 0.14921875, "grad_norm": 0.21543891727924347, "learning_rate": 0.0004832653074528196, "loss": 1.9317, "step": 1528 }, { "epoch": 0.14931640625, "grad_norm": 0.21203261613845825, "learning_rate": 0.0004832383848443155, "loss": 1.9261, "step": 1529 }, { "epoch": 0.1494140625, "grad_norm": 0.19579768180847168, "learning_rate": 0.0004832114414346417, "loss": 1.878, "step": 1530 }, { "epoch": 0.14951171875, "grad_norm": 0.23105336725711823, "learning_rate": 0.0004831844772264894, "loss": 1.8957, "step": 1531 }, { "epoch": 0.149609375, "grad_norm": 0.1797371655702591, "learning_rate": 0.0004831574922225521, "loss": 1.9036, "step": 1532 }, { "epoch": 0.14970703125, "grad_norm": 0.22427886724472046, "learning_rate": 0.00048313048642552553, "loss": 1.9143, "step": 1533 }, { "epoch": 0.1498046875, "grad_norm": 0.2837050259113312, "learning_rate": 0.00048310345983810723, "loss": 1.9411, "step": 1534 }, { "epoch": 0.14990234375, "grad_norm": 0.25001662969589233, "learning_rate": 0.0004830764124629969, "loss": 1.9157, "step": 1535 }, { "epoch": 0.15, "grad_norm": 0.21143823862075806, "learning_rate": 0.00048304934430289635, "loss": 1.9654, "step": 1536 }, { "epoch": 0.15009765625, "grad_norm": 0.27027738094329834, "learning_rate": 0.00048302225536050954, "loss": 1.8961, "step": 1537 }, { "epoch": 0.1501953125, "grad_norm": 0.2757401168346405, "learning_rate": 0.00048299514563854225, "loss": 1.9537, "step": 1538 }, { "epoch": 0.15029296875, "grad_norm": 0.30425819754600525, "learning_rate": 0.0004829680151397025, "loss": 1.8713, "step": 1539 }, { "epoch": 0.150390625, "grad_norm": 0.3185950815677643, "learning_rate": 0.0004829408638667006, "loss": 1.987, "step": 1540 }, { "epoch": 0.15048828125, "grad_norm": 0.29616236686706543, "learning_rate": 0.0004829136918222486, "loss": 1.9329, "step": 1541 }, { "epoch": 0.1505859375, "grad_norm": 0.2886502146720886, "learning_rate": 0.00048288649900906066, "loss": 1.9178, "step": 1542 }, { "epoch": 0.15068359375, "grad_norm": 0.27268344163894653, "learning_rate": 0.0004828592854298532, "loss": 1.941, "step": 1543 }, { "epoch": 0.15078125, "grad_norm": 0.27281153202056885, "learning_rate": 0.00048283205108734463, "loss": 1.9668, "step": 1544 }, { "epoch": 0.15087890625, "grad_norm": 0.34445685148239136, "learning_rate": 0.0004828047959842554, "loss": 1.9153, "step": 1545 }, { "epoch": 0.1509765625, "grad_norm": 0.3492426872253418, "learning_rate": 0.0004827775201233079, "loss": 1.9123, "step": 1546 }, { "epoch": 0.15107421875, "grad_norm": 0.23972700536251068, "learning_rate": 0.000482750223507227, "loss": 1.9189, "step": 1547 }, { "epoch": 0.151171875, "grad_norm": 0.28324854373931885, "learning_rate": 0.0004827229061387393, "loss": 1.9113, "step": 1548 }, { "epoch": 0.15126953125, "grad_norm": 0.26321306824684143, "learning_rate": 0.0004826955680205734, "loss": 1.9568, "step": 1549 }, { "epoch": 0.1513671875, "grad_norm": 0.26853737235069275, "learning_rate": 0.0004826682091554604, "loss": 1.8798, "step": 1550 }, { "epoch": 0.15146484375, "grad_norm": 0.32173535227775574, "learning_rate": 0.00048264082954613296, "loss": 1.9564, "step": 1551 }, { "epoch": 0.1515625, "grad_norm": 0.2380555272102356, "learning_rate": 0.0004826134291953263, "loss": 1.883, "step": 1552 }, { "epoch": 0.15166015625, "grad_norm": 0.251919150352478, "learning_rate": 0.0004825860081057773, "loss": 1.8876, "step": 1553 }, { "epoch": 0.1517578125, "grad_norm": 0.26018527150154114, "learning_rate": 0.00048255856628022514, "loss": 1.9115, "step": 1554 }, { "epoch": 0.15185546875, "grad_norm": 0.21712715923786163, "learning_rate": 0.00048253110372141104, "loss": 1.9097, "step": 1555 }, { "epoch": 0.151953125, "grad_norm": 0.2382095605134964, "learning_rate": 0.0004825036204320782, "loss": 1.937, "step": 1556 }, { "epoch": 0.15205078125, "grad_norm": 0.231217160820961, "learning_rate": 0.000482476116414972, "loss": 1.9344, "step": 1557 }, { "epoch": 0.1521484375, "grad_norm": 0.20458881556987762, "learning_rate": 0.0004824485916728398, "loss": 1.9036, "step": 1558 }, { "epoch": 0.15224609375, "grad_norm": 0.205721914768219, "learning_rate": 0.00048242104620843125, "loss": 1.8998, "step": 1559 }, { "epoch": 0.15234375, "grad_norm": 0.23993369936943054, "learning_rate": 0.0004823934800244976, "loss": 1.8706, "step": 1560 }, { "epoch": 0.15244140625, "grad_norm": 0.23396044969558716, "learning_rate": 0.00048236589312379276, "loss": 1.8834, "step": 1561 }, { "epoch": 0.1525390625, "grad_norm": 0.24282585084438324, "learning_rate": 0.0004823382855090723, "loss": 1.8873, "step": 1562 }, { "epoch": 0.15263671875, "grad_norm": 0.25903937220573425, "learning_rate": 0.00048231065718309387, "loss": 1.9357, "step": 1563 }, { "epoch": 0.152734375, "grad_norm": 0.24471600353717804, "learning_rate": 0.0004822830081486175, "loss": 1.8975, "step": 1564 }, { "epoch": 0.15283203125, "grad_norm": 0.21125410497188568, "learning_rate": 0.00048225533840840493, "loss": 1.9513, "step": 1565 }, { "epoch": 0.1529296875, "grad_norm": 0.22737543284893036, "learning_rate": 0.00048222764796522017, "loss": 1.8962, "step": 1566 }, { "epoch": 0.15302734375, "grad_norm": 0.1857408881187439, "learning_rate": 0.0004821999368218292, "loss": 1.9054, "step": 1567 }, { "epoch": 0.153125, "grad_norm": 0.23223114013671875, "learning_rate": 0.0004821722049810002, "loss": 1.9118, "step": 1568 }, { "epoch": 0.15322265625, "grad_norm": 0.25117412209510803, "learning_rate": 0.0004821444524455034, "loss": 1.8889, "step": 1569 }, { "epoch": 0.1533203125, "grad_norm": 0.23292681574821472, "learning_rate": 0.00048211667921811084, "loss": 1.9495, "step": 1570 }, { "epoch": 0.15341796875, "grad_norm": 0.24508163332939148, "learning_rate": 0.0004820888853015969, "loss": 1.922, "step": 1571 }, { "epoch": 0.153515625, "grad_norm": 0.26448142528533936, "learning_rate": 0.000482061070698738, "loss": 1.9811, "step": 1572 }, { "epoch": 0.15361328125, "grad_norm": 0.24274107813835144, "learning_rate": 0.0004820332354123125, "loss": 1.9134, "step": 1573 }, { "epoch": 0.1537109375, "grad_norm": 0.2016661912202835, "learning_rate": 0.00048200537944510097, "loss": 1.9151, "step": 1574 }, { "epoch": 0.15380859375, "grad_norm": 0.21885213255882263, "learning_rate": 0.00048197750279988595, "loss": 1.9613, "step": 1575 }, { "epoch": 0.15390625, "grad_norm": 0.2705588936805725, "learning_rate": 0.000481949605479452, "loss": 1.9535, "step": 1576 }, { "epoch": 0.15400390625, "grad_norm": 0.31828904151916504, "learning_rate": 0.00048192168748658595, "loss": 1.9592, "step": 1577 }, { "epoch": 0.1541015625, "grad_norm": 0.38848698139190674, "learning_rate": 0.0004818937488240764, "loss": 1.9027, "step": 1578 }, { "epoch": 0.15419921875, "grad_norm": 0.2857527732849121, "learning_rate": 0.00048186578949471434, "loss": 1.9888, "step": 1579 }, { "epoch": 0.154296875, "grad_norm": 0.30096855759620667, "learning_rate": 0.0004818378095012925, "loss": 1.9199, "step": 1580 }, { "epoch": 0.15439453125, "grad_norm": 0.3158320486545563, "learning_rate": 0.00048180980884660594, "loss": 1.9081, "step": 1581 }, { "epoch": 0.1544921875, "grad_norm": 0.22765566408634186, "learning_rate": 0.00048178178753345157, "loss": 1.9181, "step": 1582 }, { "epoch": 0.15458984375, "grad_norm": 0.28936147689819336, "learning_rate": 0.0004817537455646286, "loss": 1.9424, "step": 1583 }, { "epoch": 0.1546875, "grad_norm": 0.3088884651660919, "learning_rate": 0.00048172568294293816, "loss": 1.9088, "step": 1584 }, { "epoch": 0.15478515625, "grad_norm": 0.2758021950721741, "learning_rate": 0.00048169759967118343, "loss": 1.9298, "step": 1585 }, { "epoch": 0.1548828125, "grad_norm": 0.2999780774116516, "learning_rate": 0.0004816694957521696, "loss": 1.9335, "step": 1586 }, { "epoch": 0.15498046875, "grad_norm": 0.22048184275627136, "learning_rate": 0.0004816413711887041, "loss": 1.8843, "step": 1587 }, { "epoch": 0.155078125, "grad_norm": 0.26630088686943054, "learning_rate": 0.0004816132259835963, "loss": 1.9073, "step": 1588 }, { "epoch": 0.15517578125, "grad_norm": 0.2615686058998108, "learning_rate": 0.0004815850601396577, "loss": 1.9313, "step": 1589 }, { "epoch": 0.1552734375, "grad_norm": 0.2473047822713852, "learning_rate": 0.00048155687365970167, "loss": 1.8878, "step": 1590 }, { "epoch": 0.15537109375, "grad_norm": 0.3328106999397278, "learning_rate": 0.000481528666546544, "loss": 1.9424, "step": 1591 }, { "epoch": 0.15546875, "grad_norm": 0.26094701886177063, "learning_rate": 0.0004815004388030022, "loss": 1.9623, "step": 1592 }, { "epoch": 0.15556640625, "grad_norm": 0.3564736247062683, "learning_rate": 0.0004814721904318959, "loss": 1.8966, "step": 1593 }, { "epoch": 0.1556640625, "grad_norm": 0.3765439987182617, "learning_rate": 0.000481443921436047, "loss": 1.9189, "step": 1594 }, { "epoch": 0.15576171875, "grad_norm": 0.36775973439216614, "learning_rate": 0.0004814156318182793, "loss": 1.9427, "step": 1595 }, { "epoch": 0.155859375, "grad_norm": 0.3822444975376129, "learning_rate": 0.00048138732158141867, "loss": 1.9329, "step": 1596 }, { "epoch": 0.15595703125, "grad_norm": 0.26008525490760803, "learning_rate": 0.00048135899072829295, "loss": 1.928, "step": 1597 }, { "epoch": 0.1560546875, "grad_norm": 0.32837969064712524, "learning_rate": 0.0004813306392617324, "loss": 1.9409, "step": 1598 }, { "epoch": 0.15615234375, "grad_norm": 0.30874279141426086, "learning_rate": 0.0004813022671845687, "loss": 1.9378, "step": 1599 }, { "epoch": 0.15625, "grad_norm": 0.2988525629043579, "learning_rate": 0.00048127387449963633, "loss": 1.9091, "step": 1600 }, { "epoch": 0.15634765625, "grad_norm": 0.4251779615879059, "learning_rate": 0.0004812454612097713, "loss": 1.9693, "step": 1601 }, { "epoch": 0.1564453125, "grad_norm": 0.2827276885509491, "learning_rate": 0.00048121702731781184, "loss": 1.9177, "step": 1602 }, { "epoch": 0.15654296875, "grad_norm": 0.24983742833137512, "learning_rate": 0.00048118857282659834, "loss": 1.9235, "step": 1603 }, { "epoch": 0.156640625, "grad_norm": 0.25737494230270386, "learning_rate": 0.000481160097738973, "loss": 1.9678, "step": 1604 }, { "epoch": 0.15673828125, "grad_norm": 0.38471952080726624, "learning_rate": 0.00048113160205778035, "loss": 1.9403, "step": 1605 }, { "epoch": 0.1568359375, "grad_norm": 0.30553486943244934, "learning_rate": 0.0004811030857858667, "loss": 1.939, "step": 1606 }, { "epoch": 0.15693359375, "grad_norm": 0.3170030117034912, "learning_rate": 0.0004810745489260808, "loss": 1.9331, "step": 1607 }, { "epoch": 0.15703125, "grad_norm": 0.3244478404521942, "learning_rate": 0.0004810459914812731, "loss": 1.8986, "step": 1608 }, { "epoch": 0.15712890625, "grad_norm": 0.24035608768463135, "learning_rate": 0.0004810174134542963, "loss": 1.9542, "step": 1609 }, { "epoch": 0.1572265625, "grad_norm": 0.29910752177238464, "learning_rate": 0.00048098881484800505, "loss": 1.8521, "step": 1610 }, { "epoch": 0.15732421875, "grad_norm": 0.2832294702529907, "learning_rate": 0.000480960195665256, "loss": 1.9567, "step": 1611 }, { "epoch": 0.157421875, "grad_norm": 0.3093940317630768, "learning_rate": 0.0004809315559089081, "loss": 1.9332, "step": 1612 }, { "epoch": 0.15751953125, "grad_norm": 0.2190847098827362, "learning_rate": 0.00048090289558182215, "loss": 1.8979, "step": 1613 }, { "epoch": 0.1576171875, "grad_norm": 0.2461078017950058, "learning_rate": 0.00048087421468686107, "loss": 1.9032, "step": 1614 }, { "epoch": 0.15771484375, "grad_norm": 0.2852519750595093, "learning_rate": 0.0004808455132268898, "loss": 1.9457, "step": 1615 }, { "epoch": 0.1578125, "grad_norm": 0.23334845900535583, "learning_rate": 0.00048081679120477543, "loss": 1.8718, "step": 1616 }, { "epoch": 0.15791015625, "grad_norm": 0.2714453637599945, "learning_rate": 0.000480788048623387, "loss": 1.9212, "step": 1617 }, { "epoch": 0.1580078125, "grad_norm": 0.2445923238992691, "learning_rate": 0.0004807592854855955, "loss": 1.9322, "step": 1618 }, { "epoch": 0.15810546875, "grad_norm": 0.2522878050804138, "learning_rate": 0.0004807305017942744, "loss": 1.939, "step": 1619 }, { "epoch": 0.158203125, "grad_norm": 0.28525444865226746, "learning_rate": 0.0004807016975522987, "loss": 1.929, "step": 1620 }, { "epoch": 0.15830078125, "grad_norm": 0.22004222869873047, "learning_rate": 0.0004806728727625458, "loss": 1.9431, "step": 1621 }, { "epoch": 0.1583984375, "grad_norm": 0.20370328426361084, "learning_rate": 0.000480644027427895, "loss": 1.9063, "step": 1622 }, { "epoch": 0.15849609375, "grad_norm": 0.21263106167316437, "learning_rate": 0.00048061516155122763, "loss": 1.9098, "step": 1623 }, { "epoch": 0.15859375, "grad_norm": 0.25402888655662537, "learning_rate": 0.0004805862751354273, "loss": 1.8932, "step": 1624 }, { "epoch": 0.15869140625, "grad_norm": 0.21452878415584564, "learning_rate": 0.0004805573681833794, "loss": 1.9674, "step": 1625 }, { "epoch": 0.1587890625, "grad_norm": 0.2378520965576172, "learning_rate": 0.0004805284406979715, "loss": 1.8841, "step": 1626 }, { "epoch": 0.15888671875, "grad_norm": 0.20481383800506592, "learning_rate": 0.0004804994926820932, "loss": 1.8836, "step": 1627 }, { "epoch": 0.158984375, "grad_norm": 0.19195064902305603, "learning_rate": 0.00048047052413863607, "loss": 1.9694, "step": 1628 }, { "epoch": 0.15908203125, "grad_norm": 0.228655144572258, "learning_rate": 0.0004804415350704939, "loss": 1.9323, "step": 1629 }, { "epoch": 0.1591796875, "grad_norm": 0.2156544327735901, "learning_rate": 0.0004804125254805625, "loss": 1.9236, "step": 1630 }, { "epoch": 0.15927734375, "grad_norm": 0.24159860610961914, "learning_rate": 0.00048038349537173953, "loss": 1.9611, "step": 1631 }, { "epoch": 0.159375, "grad_norm": 0.21641957759857178, "learning_rate": 0.0004803544447469249, "loss": 1.9237, "step": 1632 }, { "epoch": 0.15947265625, "grad_norm": 0.21140074729919434, "learning_rate": 0.0004803253736090205, "loss": 1.9297, "step": 1633 }, { "epoch": 0.1595703125, "grad_norm": 0.23588809370994568, "learning_rate": 0.00048029628196093047, "loss": 1.8995, "step": 1634 }, { "epoch": 0.15966796875, "grad_norm": 0.2083786576986313, "learning_rate": 0.00048026716980556044, "loss": 1.9154, "step": 1635 }, { "epoch": 0.159765625, "grad_norm": 0.21067486703395844, "learning_rate": 0.00048023803714581867, "loss": 1.9032, "step": 1636 }, { "epoch": 0.15986328125, "grad_norm": 0.19560249149799347, "learning_rate": 0.0004802088839846153, "loss": 1.9352, "step": 1637 }, { "epoch": 0.1599609375, "grad_norm": 0.21477989852428436, "learning_rate": 0.0004801797103248624, "loss": 1.9192, "step": 1638 }, { "epoch": 0.16005859375, "grad_norm": 0.27302122116088867, "learning_rate": 0.0004801505161694741, "loss": 1.9177, "step": 1639 }, { "epoch": 0.16015625, "grad_norm": 0.251449316740036, "learning_rate": 0.00048012130152136676, "loss": 1.9383, "step": 1640 }, { "epoch": 0.16025390625, "grad_norm": 0.29725855588912964, "learning_rate": 0.00048009206638345857, "loss": 1.9027, "step": 1641 }, { "epoch": 0.1603515625, "grad_norm": 0.3364620804786682, "learning_rate": 0.00048006281075866996, "loss": 1.8999, "step": 1642 }, { "epoch": 0.16044921875, "grad_norm": 0.33240416646003723, "learning_rate": 0.00048003353464992323, "loss": 1.9023, "step": 1643 }, { "epoch": 0.160546875, "grad_norm": 0.29456380009651184, "learning_rate": 0.0004800042380601428, "loss": 1.8874, "step": 1644 }, { "epoch": 0.16064453125, "grad_norm": 0.20352846384048462, "learning_rate": 0.00047997492099225515, "loss": 1.8813, "step": 1645 }, { "epoch": 0.1607421875, "grad_norm": 0.2666397988796234, "learning_rate": 0.0004799455834491889, "loss": 1.9582, "step": 1646 }, { "epoch": 0.16083984375, "grad_norm": 0.34426215291023254, "learning_rate": 0.0004799162254338744, "loss": 1.9372, "step": 1647 }, { "epoch": 0.1609375, "grad_norm": 0.3887472450733185, "learning_rate": 0.00047988684694924447, "loss": 1.9381, "step": 1648 }, { "epoch": 0.16103515625, "grad_norm": 0.3413529694080353, "learning_rate": 0.0004798574479982336, "loss": 1.8966, "step": 1649 }, { "epoch": 0.1611328125, "grad_norm": 0.2743453085422516, "learning_rate": 0.00047982802858377853, "loss": 1.9228, "step": 1650 }, { "epoch": 0.16123046875, "grad_norm": 0.29950737953186035, "learning_rate": 0.00047979858870881805, "loss": 1.9204, "step": 1651 }, { "epoch": 0.161328125, "grad_norm": 0.273075133562088, "learning_rate": 0.00047976912837629295, "loss": 1.8683, "step": 1652 }, { "epoch": 0.16142578125, "grad_norm": 0.2547522485256195, "learning_rate": 0.00047973964758914596, "loss": 1.9275, "step": 1653 }, { "epoch": 0.1615234375, "grad_norm": 0.2716279923915863, "learning_rate": 0.00047971014635032205, "loss": 1.8917, "step": 1654 }, { "epoch": 0.16162109375, "grad_norm": 0.28852027654647827, "learning_rate": 0.00047968062466276803, "loss": 1.9418, "step": 1655 }, { "epoch": 0.16171875, "grad_norm": 0.19788789749145508, "learning_rate": 0.00047965108252943293, "loss": 1.922, "step": 1656 }, { "epoch": 0.16181640625, "grad_norm": 0.33596405386924744, "learning_rate": 0.0004796215199532678, "loss": 1.9291, "step": 1657 }, { "epoch": 0.1619140625, "grad_norm": 0.32864540815353394, "learning_rate": 0.00047959193693722553, "loss": 1.9793, "step": 1658 }, { "epoch": 0.16201171875, "grad_norm": 0.25629329681396484, "learning_rate": 0.0004795623334842613, "loss": 1.9453, "step": 1659 }, { "epoch": 0.162109375, "grad_norm": 0.2877620458602905, "learning_rate": 0.0004795327095973322, "loss": 1.9225, "step": 1660 }, { "epoch": 0.16220703125, "grad_norm": 0.24507132172584534, "learning_rate": 0.00047950306527939744, "loss": 1.8917, "step": 1661 }, { "epoch": 0.1623046875, "grad_norm": 0.2875724732875824, "learning_rate": 0.0004794734005334182, "loss": 1.8819, "step": 1662 }, { "epoch": 0.16240234375, "grad_norm": 0.2515362501144409, "learning_rate": 0.0004794437153623577, "loss": 1.9232, "step": 1663 }, { "epoch": 0.1625, "grad_norm": 0.27037444710731506, "learning_rate": 0.00047941400976918123, "loss": 1.9567, "step": 1664 }, { "epoch": 0.16259765625, "grad_norm": 0.29190242290496826, "learning_rate": 0.0004793842837568562, "loss": 1.9266, "step": 1665 }, { "epoch": 0.1626953125, "grad_norm": 0.26770535111427307, "learning_rate": 0.0004793545373283518, "loss": 1.9163, "step": 1666 }, { "epoch": 0.16279296875, "grad_norm": 0.25330448150634766, "learning_rate": 0.0004793247704866396, "loss": 1.9182, "step": 1667 }, { "epoch": 0.162890625, "grad_norm": 0.27300313115119934, "learning_rate": 0.00047929498323469295, "loss": 1.9592, "step": 1668 }, { "epoch": 0.16298828125, "grad_norm": 0.21413999795913696, "learning_rate": 0.00047926517557548746, "loss": 1.919, "step": 1669 }, { "epoch": 0.1630859375, "grad_norm": 0.27660825848579407, "learning_rate": 0.0004792353475120004, "loss": 1.9176, "step": 1670 }, { "epoch": 0.16318359375, "grad_norm": 0.2682628631591797, "learning_rate": 0.00047920549904721156, "loss": 1.9381, "step": 1671 }, { "epoch": 0.16328125, "grad_norm": 0.2719988226890564, "learning_rate": 0.00047917563018410247, "loss": 1.9137, "step": 1672 }, { "epoch": 0.16337890625, "grad_norm": 0.23097728192806244, "learning_rate": 0.0004791457409256568, "loss": 1.9128, "step": 1673 }, { "epoch": 0.1634765625, "grad_norm": 0.27505284547805786, "learning_rate": 0.00047911583127486015, "loss": 1.9026, "step": 1674 }, { "epoch": 0.16357421875, "grad_norm": 0.31115567684173584, "learning_rate": 0.0004790859012347002, "loss": 1.9438, "step": 1675 }, { "epoch": 0.163671875, "grad_norm": 0.2766979932785034, "learning_rate": 0.0004790559508081668, "loss": 1.9389, "step": 1676 }, { "epoch": 0.16376953125, "grad_norm": 0.25364476442337036, "learning_rate": 0.0004790259799982517, "loss": 1.9621, "step": 1677 }, { "epoch": 0.1638671875, "grad_norm": 0.2380513697862625, "learning_rate": 0.0004789959888079487, "loss": 1.8891, "step": 1678 }, { "epoch": 0.16396484375, "grad_norm": 0.21375243365764618, "learning_rate": 0.0004789659772402536, "loss": 1.8864, "step": 1679 }, { "epoch": 0.1640625, "grad_norm": 0.2307070791721344, "learning_rate": 0.00047893594529816445, "loss": 1.8975, "step": 1680 }, { "epoch": 0.16416015625, "grad_norm": 0.20362551510334015, "learning_rate": 0.000478905892984681, "loss": 1.9489, "step": 1681 }, { "epoch": 0.1642578125, "grad_norm": 0.23438306152820587, "learning_rate": 0.00047887582030280536, "loss": 1.9874, "step": 1682 }, { "epoch": 0.16435546875, "grad_norm": 0.19733543694019318, "learning_rate": 0.00047884572725554137, "loss": 1.9147, "step": 1683 }, { "epoch": 0.164453125, "grad_norm": 0.2748172879219055, "learning_rate": 0.0004788156138458952, "loss": 1.863, "step": 1684 }, { "epoch": 0.16455078125, "grad_norm": 0.26911303400993347, "learning_rate": 0.00047878548007687476, "loss": 1.8852, "step": 1685 }, { "epoch": 0.1646484375, "grad_norm": 0.20826198160648346, "learning_rate": 0.0004787553259514903, "loss": 1.933, "step": 1686 }, { "epoch": 0.16474609375, "grad_norm": 0.3136061429977417, "learning_rate": 0.00047872515147275393, "loss": 1.9264, "step": 1687 }, { "epoch": 0.16484375, "grad_norm": 0.22931191325187683, "learning_rate": 0.0004786949566436797, "loss": 1.9175, "step": 1688 }, { "epoch": 0.16494140625, "grad_norm": 0.24129685759544373, "learning_rate": 0.0004786647414672839, "loss": 1.9114, "step": 1689 }, { "epoch": 0.1650390625, "grad_norm": 0.2148384302854538, "learning_rate": 0.00047863450594658473, "loss": 1.9189, "step": 1690 }, { "epoch": 0.16513671875, "grad_norm": 0.24560751020908356, "learning_rate": 0.0004786042500846025, "loss": 1.9459, "step": 1691 }, { "epoch": 0.165234375, "grad_norm": 0.2328975796699524, "learning_rate": 0.0004785739738843594, "loss": 1.9037, "step": 1692 }, { "epoch": 0.16533203125, "grad_norm": 0.19054313004016876, "learning_rate": 0.00047854367734887984, "loss": 1.9035, "step": 1693 }, { "epoch": 0.1654296875, "grad_norm": 0.19610174000263214, "learning_rate": 0.00047851336048119016, "loss": 1.8709, "step": 1694 }, { "epoch": 0.16552734375, "grad_norm": 0.22446346282958984, "learning_rate": 0.0004784830232843186, "loss": 1.8866, "step": 1695 }, { "epoch": 0.165625, "grad_norm": 0.22055386006832123, "learning_rate": 0.0004784526657612959, "loss": 1.9104, "step": 1696 }, { "epoch": 0.16572265625, "grad_norm": 0.19776830077171326, "learning_rate": 0.0004784222879151542, "loss": 1.9302, "step": 1697 }, { "epoch": 0.1658203125, "grad_norm": 0.19348515570163727, "learning_rate": 0.0004783918897489282, "loss": 1.944, "step": 1698 }, { "epoch": 0.16591796875, "grad_norm": 0.21035566926002502, "learning_rate": 0.0004783614712656542, "loss": 1.9697, "step": 1699 }, { "epoch": 0.166015625, "grad_norm": 0.20167845487594604, "learning_rate": 0.0004783310324683709, "loss": 1.9087, "step": 1700 }, { "epoch": 0.16611328125, "grad_norm": 0.24482861161231995, "learning_rate": 0.0004783005733601188, "loss": 1.9316, "step": 1701 }, { "epoch": 0.1662109375, "grad_norm": 0.2270205020904541, "learning_rate": 0.00047827009394394045, "loss": 1.885, "step": 1702 }, { "epoch": 0.16630859375, "grad_norm": 0.2608849108219147, "learning_rate": 0.0004782395942228806, "loss": 1.9639, "step": 1703 }, { "epoch": 0.16640625, "grad_norm": 0.29610663652420044, "learning_rate": 0.0004782090741999858, "loss": 1.9248, "step": 1704 }, { "epoch": 0.16650390625, "grad_norm": 0.44481170177459717, "learning_rate": 0.0004781785338783047, "loss": 1.9352, "step": 1705 }, { "epoch": 0.1666015625, "grad_norm": 0.4372797906398773, "learning_rate": 0.0004781479732608881, "loss": 1.9557, "step": 1706 }, { "epoch": 0.16669921875, "grad_norm": 0.2859649956226349, "learning_rate": 0.00047811739235078876, "loss": 1.9753, "step": 1707 }, { "epoch": 0.166796875, "grad_norm": 0.26134198904037476, "learning_rate": 0.00047808679115106135, "loss": 1.9513, "step": 1708 }, { "epoch": 0.16689453125, "grad_norm": 0.30859240889549255, "learning_rate": 0.0004780561696647627, "loss": 1.9084, "step": 1709 }, { "epoch": 0.1669921875, "grad_norm": 0.33629563450813293, "learning_rate": 0.0004780255278949516, "loss": 1.928, "step": 1710 }, { "epoch": 0.16708984375, "grad_norm": 0.2672228217124939, "learning_rate": 0.0004779948658446889, "loss": 1.9033, "step": 1711 }, { "epoch": 0.1671875, "grad_norm": 0.2845448851585388, "learning_rate": 0.00047796418351703744, "loss": 1.8892, "step": 1712 }, { "epoch": 0.16728515625, "grad_norm": 0.236465185880661, "learning_rate": 0.0004779334809150622, "loss": 1.9363, "step": 1713 }, { "epoch": 0.1673828125, "grad_norm": 0.2997010052204132, "learning_rate": 0.00047790275804183004, "loss": 1.8917, "step": 1714 }, { "epoch": 0.16748046875, "grad_norm": 0.3848778307437897, "learning_rate": 0.00047787201490040985, "loss": 1.959, "step": 1715 }, { "epoch": 0.167578125, "grad_norm": 0.2715388834476471, "learning_rate": 0.00047784125149387277, "loss": 1.9463, "step": 1716 }, { "epoch": 0.16767578125, "grad_norm": 0.3423638343811035, "learning_rate": 0.0004778104678252916, "loss": 1.9393, "step": 1717 }, { "epoch": 0.1677734375, "grad_norm": 0.3216153383255005, "learning_rate": 0.0004777796638977414, "loss": 1.9688, "step": 1718 }, { "epoch": 0.16787109375, "grad_norm": 0.24108828604221344, "learning_rate": 0.00047774883971429935, "loss": 1.8822, "step": 1719 }, { "epoch": 0.16796875, "grad_norm": 0.3516473174095154, "learning_rate": 0.0004777179952780443, "loss": 1.912, "step": 1720 }, { "epoch": 0.16806640625, "grad_norm": 0.2251645028591156, "learning_rate": 0.0004776871305920575, "loss": 1.8989, "step": 1721 }, { "epoch": 0.1681640625, "grad_norm": 0.282797634601593, "learning_rate": 0.000477656245659422, "loss": 1.8483, "step": 1722 }, { "epoch": 0.16826171875, "grad_norm": 0.3045579195022583, "learning_rate": 0.0004776253404832229, "loss": 1.8934, "step": 1723 }, { "epoch": 0.168359375, "grad_norm": 0.27416738867759705, "learning_rate": 0.00047759441506654745, "loss": 1.924, "step": 1724 }, { "epoch": 0.16845703125, "grad_norm": 0.2600431442260742, "learning_rate": 0.0004775634694124847, "loss": 1.9507, "step": 1725 }, { "epoch": 0.1685546875, "grad_norm": 0.22953875362873077, "learning_rate": 0.0004775325035241259, "loss": 1.9062, "step": 1726 }, { "epoch": 0.16865234375, "grad_norm": 0.20656803250312805, "learning_rate": 0.00047750151740456426, "loss": 1.9312, "step": 1727 }, { "epoch": 0.16875, "grad_norm": 0.21857194602489471, "learning_rate": 0.0004774705110568951, "loss": 1.931, "step": 1728 }, { "epoch": 0.16884765625, "grad_norm": 0.2585659325122833, "learning_rate": 0.0004774394844842155, "loss": 1.9149, "step": 1729 }, { "epoch": 0.1689453125, "grad_norm": 0.23686473071575165, "learning_rate": 0.000477408437689625, "loss": 1.9316, "step": 1730 }, { "epoch": 0.16904296875, "grad_norm": 0.21281464397907257, "learning_rate": 0.0004773773706762247, "loss": 1.9278, "step": 1731 }, { "epoch": 0.169140625, "grad_norm": 0.2731045186519623, "learning_rate": 0.00047734628344711796, "loss": 1.9088, "step": 1732 }, { "epoch": 0.16923828125, "grad_norm": 0.21235254406929016, "learning_rate": 0.00047731517600541014, "loss": 1.9418, "step": 1733 }, { "epoch": 0.1693359375, "grad_norm": 0.2536609172821045, "learning_rate": 0.0004772840483542085, "loss": 1.9651, "step": 1734 }, { "epoch": 0.16943359375, "grad_norm": 0.22243930399417877, "learning_rate": 0.00047725290049662264, "loss": 1.9158, "step": 1735 }, { "epoch": 0.16953125, "grad_norm": 0.24414226412773132, "learning_rate": 0.00047722173243576374, "loss": 1.9167, "step": 1736 }, { "epoch": 0.16962890625, "grad_norm": 0.25493788719177246, "learning_rate": 0.0004771905441747453, "loss": 1.9244, "step": 1737 }, { "epoch": 0.1697265625, "grad_norm": 0.21703946590423584, "learning_rate": 0.0004771593357166827, "loss": 1.8882, "step": 1738 }, { "epoch": 0.16982421875, "grad_norm": 0.27353623509407043, "learning_rate": 0.00047712810706469354, "loss": 1.9134, "step": 1739 }, { "epoch": 0.169921875, "grad_norm": 0.2163049578666687, "learning_rate": 0.0004770968582218971, "loss": 1.9226, "step": 1740 }, { "epoch": 0.17001953125, "grad_norm": 0.1948620080947876, "learning_rate": 0.000477065589191415, "loss": 1.8663, "step": 1741 }, { "epoch": 0.1701171875, "grad_norm": 0.22654829919338226, "learning_rate": 0.0004770342999763706, "loss": 1.9327, "step": 1742 }, { "epoch": 0.17021484375, "grad_norm": 0.24024905264377594, "learning_rate": 0.00047700299057988957, "loss": 1.9581, "step": 1743 }, { "epoch": 0.1703125, "grad_norm": 0.2727830111980438, "learning_rate": 0.0004769716610050993, "loss": 1.9158, "step": 1744 }, { "epoch": 0.17041015625, "grad_norm": 0.2848643362522125, "learning_rate": 0.0004769403112551295, "loss": 1.8943, "step": 1745 }, { "epoch": 0.1705078125, "grad_norm": 0.29621654748916626, "learning_rate": 0.00047690894133311157, "loss": 1.9182, "step": 1746 }, { "epoch": 0.17060546875, "grad_norm": 0.317327618598938, "learning_rate": 0.00047687755124217914, "loss": 1.9624, "step": 1747 }, { "epoch": 0.170703125, "grad_norm": 0.3084838390350342, "learning_rate": 0.00047684614098546783, "loss": 1.919, "step": 1748 }, { "epoch": 0.17080078125, "grad_norm": 0.26487037539482117, "learning_rate": 0.00047681471056611527, "loss": 1.9005, "step": 1749 }, { "epoch": 0.1708984375, "grad_norm": 0.2641773223876953, "learning_rate": 0.000476783259987261, "loss": 1.9085, "step": 1750 }, { "epoch": 0.17099609375, "grad_norm": 0.30503541231155396, "learning_rate": 0.00047675178925204667, "loss": 1.9148, "step": 1751 }, { "epoch": 0.17109375, "grad_norm": 0.2899084985256195, "learning_rate": 0.00047672029836361596, "loss": 1.9369, "step": 1752 }, { "epoch": 0.17119140625, "grad_norm": 0.3330906629562378, "learning_rate": 0.00047668878732511457, "loss": 1.8529, "step": 1753 }, { "epoch": 0.1712890625, "grad_norm": 0.32898756861686707, "learning_rate": 0.00047665725613969015, "loss": 1.8918, "step": 1754 }, { "epoch": 0.17138671875, "grad_norm": 0.21951165795326233, "learning_rate": 0.0004766257048104923, "loss": 1.8868, "step": 1755 }, { "epoch": 0.171484375, "grad_norm": 0.20533274114131927, "learning_rate": 0.00047659413334067276, "loss": 1.8993, "step": 1756 }, { "epoch": 0.17158203125, "grad_norm": 0.19753561913967133, "learning_rate": 0.0004765625417333853, "loss": 1.913, "step": 1757 }, { "epoch": 0.1716796875, "grad_norm": 0.20068642497062683, "learning_rate": 0.0004765309299917856, "loss": 1.8639, "step": 1758 }, { "epoch": 0.17177734375, "grad_norm": 0.22603540122509003, "learning_rate": 0.00047649929811903143, "loss": 1.9014, "step": 1759 }, { "epoch": 0.171875, "grad_norm": 0.18157346546649933, "learning_rate": 0.00047646764611828247, "loss": 1.8956, "step": 1760 }, { "epoch": 0.17197265625, "grad_norm": 0.21239453554153442, "learning_rate": 0.0004764359739927005, "loss": 1.9636, "step": 1761 }, { "epoch": 0.1720703125, "grad_norm": 0.25655651092529297, "learning_rate": 0.0004764042817454494, "loss": 1.8901, "step": 1762 }, { "epoch": 0.17216796875, "grad_norm": 0.20679731667041779, "learning_rate": 0.0004763725693796947, "loss": 1.8908, "step": 1763 }, { "epoch": 0.172265625, "grad_norm": 0.20754674077033997, "learning_rate": 0.0004763408368986043, "loss": 1.9445, "step": 1764 }, { "epoch": 0.17236328125, "grad_norm": 0.18912455439567566, "learning_rate": 0.0004763090843053481, "loss": 1.9176, "step": 1765 }, { "epoch": 0.1724609375, "grad_norm": 0.20957696437835693, "learning_rate": 0.0004762773116030978, "loss": 1.9206, "step": 1766 }, { "epoch": 0.17255859375, "grad_norm": 0.1931106299161911, "learning_rate": 0.0004762455187950272, "loss": 1.9051, "step": 1767 }, { "epoch": 0.17265625, "grad_norm": 0.218103289604187, "learning_rate": 0.00047621370588431217, "loss": 1.9326, "step": 1768 }, { "epoch": 0.17275390625, "grad_norm": 0.20649409294128418, "learning_rate": 0.0004761818728741305, "loss": 1.9156, "step": 1769 }, { "epoch": 0.1728515625, "grad_norm": 0.2224937528371811, "learning_rate": 0.000476150019767662, "loss": 1.9181, "step": 1770 }, { "epoch": 0.17294921875, "grad_norm": 0.30575814843177795, "learning_rate": 0.00047611814656808865, "loss": 1.9276, "step": 1771 }, { "epoch": 0.173046875, "grad_norm": 0.34954264760017395, "learning_rate": 0.0004760862532785942, "loss": 1.8818, "step": 1772 }, { "epoch": 0.17314453125, "grad_norm": 0.2726197838783264, "learning_rate": 0.0004760543399023644, "loss": 1.9279, "step": 1773 }, { "epoch": 0.1732421875, "grad_norm": 0.2664092779159546, "learning_rate": 0.00047602240644258726, "loss": 1.9055, "step": 1774 }, { "epoch": 0.17333984375, "grad_norm": 0.22661474347114563, "learning_rate": 0.00047599045290245277, "loss": 1.9406, "step": 1775 }, { "epoch": 0.1734375, "grad_norm": 0.20825809240341187, "learning_rate": 0.00047595847928515243, "loss": 1.9189, "step": 1776 }, { "epoch": 0.17353515625, "grad_norm": 0.200863778591156, "learning_rate": 0.0004759264855938805, "loss": 1.8985, "step": 1777 }, { "epoch": 0.1736328125, "grad_norm": 0.18549345433712006, "learning_rate": 0.00047589447183183254, "loss": 1.8855, "step": 1778 }, { "epoch": 0.17373046875, "grad_norm": 0.19504952430725098, "learning_rate": 0.00047586243800220673, "loss": 1.9494, "step": 1779 }, { "epoch": 0.173828125, "grad_norm": 0.2109365612268448, "learning_rate": 0.0004758303841082029, "loss": 1.9223, "step": 1780 }, { "epoch": 0.17392578125, "grad_norm": 0.20636534690856934, "learning_rate": 0.00047579831015302277, "loss": 1.8939, "step": 1781 }, { "epoch": 0.1740234375, "grad_norm": 0.2035193145275116, "learning_rate": 0.0004757662161398704, "loss": 1.933, "step": 1782 }, { "epoch": 0.17412109375, "grad_norm": 0.20754744112491608, "learning_rate": 0.0004757341020719517, "loss": 1.8626, "step": 1783 }, { "epoch": 0.17421875, "grad_norm": 0.29153457283973694, "learning_rate": 0.00047570196795247455, "loss": 1.8666, "step": 1784 }, { "epoch": 0.17431640625, "grad_norm": 0.36487746238708496, "learning_rate": 0.00047566981378464884, "loss": 1.8917, "step": 1785 }, { "epoch": 0.1744140625, "grad_norm": 0.32802101969718933, "learning_rate": 0.0004756376395716865, "loss": 1.8797, "step": 1786 }, { "epoch": 0.17451171875, "grad_norm": 0.2676317095756531, "learning_rate": 0.0004756054453168015, "loss": 1.9127, "step": 1787 }, { "epoch": 0.174609375, "grad_norm": 0.21236379444599152, "learning_rate": 0.0004755732310232097, "loss": 1.8628, "step": 1788 }, { "epoch": 0.17470703125, "grad_norm": 0.2659597396850586, "learning_rate": 0.000475540996694129, "loss": 1.8725, "step": 1789 }, { "epoch": 0.1748046875, "grad_norm": 0.2773102819919586, "learning_rate": 0.00047550874233277937, "loss": 1.9394, "step": 1790 }, { "epoch": 0.17490234375, "grad_norm": 0.2603963613510132, "learning_rate": 0.00047547646794238277, "loss": 1.9164, "step": 1791 }, { "epoch": 0.175, "grad_norm": 0.21353478729724884, "learning_rate": 0.00047544417352616305, "loss": 1.9091, "step": 1792 }, { "epoch": 0.17509765625, "grad_norm": 0.29758280515670776, "learning_rate": 0.0004754118590873462, "loss": 1.9137, "step": 1793 }, { "epoch": 0.1751953125, "grad_norm": 0.27226370573043823, "learning_rate": 0.0004753795246291601, "loss": 1.944, "step": 1794 }, { "epoch": 0.17529296875, "grad_norm": 0.3030240833759308, "learning_rate": 0.0004753471701548347, "loss": 1.8922, "step": 1795 }, { "epoch": 0.175390625, "grad_norm": 0.2778298258781433, "learning_rate": 0.0004753147956676019, "loss": 1.9321, "step": 1796 }, { "epoch": 0.17548828125, "grad_norm": 0.2760380506515503, "learning_rate": 0.0004752824011706956, "loss": 1.9063, "step": 1797 }, { "epoch": 0.1755859375, "grad_norm": 0.2793836295604706, "learning_rate": 0.00047524998666735184, "loss": 1.9186, "step": 1798 }, { "epoch": 0.17568359375, "grad_norm": 0.25465574860572815, "learning_rate": 0.00047521755216080845, "loss": 1.9247, "step": 1799 }, { "epoch": 0.17578125, "grad_norm": 0.23424822092056274, "learning_rate": 0.00047518509765430536, "loss": 1.924, "step": 1800 }, { "epoch": 0.17587890625, "grad_norm": 0.25181999802589417, "learning_rate": 0.00047515262315108456, "loss": 1.9536, "step": 1801 }, { "epoch": 0.1759765625, "grad_norm": 0.2854491174221039, "learning_rate": 0.00047512012865438976, "loss": 1.9098, "step": 1802 }, { "epoch": 0.17607421875, "grad_norm": 0.3238297998905182, "learning_rate": 0.0004750876141674671, "loss": 1.8833, "step": 1803 }, { "epoch": 0.176171875, "grad_norm": 0.23803670704364777, "learning_rate": 0.0004750550796935644, "loss": 1.9685, "step": 1804 }, { "epoch": 0.17626953125, "grad_norm": 0.2600926458835602, "learning_rate": 0.00047502252523593154, "loss": 1.913, "step": 1805 }, { "epoch": 0.1763671875, "grad_norm": 0.30193689465522766, "learning_rate": 0.00047498995079782046, "loss": 1.9453, "step": 1806 }, { "epoch": 0.17646484375, "grad_norm": 0.2594965994358063, "learning_rate": 0.0004749573563824851, "loss": 1.8932, "step": 1807 }, { "epoch": 0.1765625, "grad_norm": 0.25451624393463135, "learning_rate": 0.0004749247419931812, "loss": 1.9312, "step": 1808 }, { "epoch": 0.17666015625, "grad_norm": 0.2194494754076004, "learning_rate": 0.00047489210763316674, "loss": 1.8783, "step": 1809 }, { "epoch": 0.1767578125, "grad_norm": 0.2316436916589737, "learning_rate": 0.00047485945330570173, "loss": 1.8924, "step": 1810 }, { "epoch": 0.17685546875, "grad_norm": 0.2362855076789856, "learning_rate": 0.00047482677901404785, "loss": 1.8883, "step": 1811 }, { "epoch": 0.176953125, "grad_norm": 0.19301187992095947, "learning_rate": 0.00047479408476146906, "loss": 1.8778, "step": 1812 }, { "epoch": 0.17705078125, "grad_norm": 0.2641616463661194, "learning_rate": 0.0004747613705512313, "loss": 1.8932, "step": 1813 }, { "epoch": 0.1771484375, "grad_norm": 0.24684876203536987, "learning_rate": 0.00047472863638660227, "loss": 1.8961, "step": 1814 }, { "epoch": 0.17724609375, "grad_norm": 0.26180848479270935, "learning_rate": 0.0004746958822708519, "loss": 1.8992, "step": 1815 }, { "epoch": 0.17734375, "grad_norm": 0.21258847415447235, "learning_rate": 0.0004746631082072521, "loss": 1.9513, "step": 1816 }, { "epoch": 0.17744140625, "grad_norm": 0.23707158863544464, "learning_rate": 0.00047463031419907656, "loss": 1.9015, "step": 1817 }, { "epoch": 0.1775390625, "grad_norm": 0.2716965675354004, "learning_rate": 0.0004745975002496012, "loss": 1.9023, "step": 1818 }, { "epoch": 0.17763671875, "grad_norm": 0.19453154504299164, "learning_rate": 0.0004745646663621038, "loss": 1.8883, "step": 1819 }, { "epoch": 0.177734375, "grad_norm": 0.28068456053733826, "learning_rate": 0.00047453181253986437, "loss": 1.893, "step": 1820 }, { "epoch": 0.17783203125, "grad_norm": 0.24416638910770416, "learning_rate": 0.0004744989387861645, "loss": 1.9337, "step": 1821 }, { "epoch": 0.1779296875, "grad_norm": 0.24311622977256775, "learning_rate": 0.000474466045104288, "loss": 1.9139, "step": 1822 }, { "epoch": 0.17802734375, "grad_norm": 0.3018793761730194, "learning_rate": 0.00047443313149752073, "loss": 1.8941, "step": 1823 }, { "epoch": 0.178125, "grad_norm": 0.2428286373615265, "learning_rate": 0.00047440019796915044, "loss": 1.9238, "step": 1824 }, { "epoch": 0.17822265625, "grad_norm": 0.2676742970943451, "learning_rate": 0.0004743672445224669, "loss": 1.8603, "step": 1825 }, { "epoch": 0.1783203125, "grad_norm": 0.27228862047195435, "learning_rate": 0.00047433427116076184, "loss": 1.9091, "step": 1826 }, { "epoch": 0.17841796875, "grad_norm": 0.23455603420734406, "learning_rate": 0.0004743012778873291, "loss": 1.8429, "step": 1827 }, { "epoch": 0.178515625, "grad_norm": 0.26803067326545715, "learning_rate": 0.0004742682647054643, "loss": 1.8386, "step": 1828 }, { "epoch": 0.17861328125, "grad_norm": 0.30642521381378174, "learning_rate": 0.0004742352316184652, "loss": 1.8997, "step": 1829 }, { "epoch": 0.1787109375, "grad_norm": 0.23812299966812134, "learning_rate": 0.00047420217862963155, "loss": 1.9538, "step": 1830 }, { "epoch": 0.17880859375, "grad_norm": 0.21221864223480225, "learning_rate": 0.000474169105742265, "loss": 1.9395, "step": 1831 }, { "epoch": 0.17890625, "grad_norm": 0.2294473797082901, "learning_rate": 0.0004741360129596693, "loss": 1.9051, "step": 1832 }, { "epoch": 0.17900390625, "grad_norm": 0.27911970019340515, "learning_rate": 0.00047410290028515004, "loss": 1.9032, "step": 1833 }, { "epoch": 0.1791015625, "grad_norm": 0.3017365336418152, "learning_rate": 0.000474069767722015, "loss": 1.9179, "step": 1834 }, { "epoch": 0.17919921875, "grad_norm": 0.21640396118164062, "learning_rate": 0.0004740366152735738, "loss": 1.9204, "step": 1835 }, { "epoch": 0.179296875, "grad_norm": 0.2559264302253723, "learning_rate": 0.0004740034429431379, "loss": 1.9145, "step": 1836 }, { "epoch": 0.17939453125, "grad_norm": 0.24724526703357697, "learning_rate": 0.00047397025073402116, "loss": 1.9107, "step": 1837 }, { "epoch": 0.1794921875, "grad_norm": 0.20995844900608063, "learning_rate": 0.0004739370386495391, "loss": 1.9408, "step": 1838 }, { "epoch": 0.17958984375, "grad_norm": 0.24171245098114014, "learning_rate": 0.00047390380669300923, "loss": 1.9023, "step": 1839 }, { "epoch": 0.1796875, "grad_norm": 0.2987540066242218, "learning_rate": 0.00047387055486775123, "loss": 1.932, "step": 1840 }, { "epoch": 0.17978515625, "grad_norm": 0.2575046420097351, "learning_rate": 0.0004738372831770866, "loss": 1.8957, "step": 1841 }, { "epoch": 0.1798828125, "grad_norm": 0.26643261313438416, "learning_rate": 0.00047380399162433903, "loss": 1.927, "step": 1842 }, { "epoch": 0.17998046875, "grad_norm": 0.25104060769081116, "learning_rate": 0.00047377068021283385, "loss": 1.9379, "step": 1843 }, { "epoch": 0.180078125, "grad_norm": 0.2125697284936905, "learning_rate": 0.00047373734894589877, "loss": 1.9262, "step": 1844 }, { "epoch": 0.18017578125, "grad_norm": 0.3097136616706848, "learning_rate": 0.0004737039978268631, "loss": 1.9351, "step": 1845 }, { "epoch": 0.1802734375, "grad_norm": 0.28434544801712036, "learning_rate": 0.00047367062685905835, "loss": 1.9399, "step": 1846 }, { "epoch": 0.18037109375, "grad_norm": 0.2924177944660187, "learning_rate": 0.00047363723604581815, "loss": 1.8924, "step": 1847 }, { "epoch": 0.18046875, "grad_norm": 0.2927818298339844, "learning_rate": 0.0004736038253904778, "loss": 1.8868, "step": 1848 }, { "epoch": 0.18056640625, "grad_norm": 0.24946413934230804, "learning_rate": 0.0004735703948963747, "loss": 1.9145, "step": 1849 }, { "epoch": 0.1806640625, "grad_norm": 0.2521260380744934, "learning_rate": 0.00047353694456684844, "loss": 1.9579, "step": 1850 }, { "epoch": 0.18076171875, "grad_norm": 0.30722346901893616, "learning_rate": 0.0004735034744052402, "loss": 1.9279, "step": 1851 }, { "epoch": 0.180859375, "grad_norm": 0.24488766491413116, "learning_rate": 0.00047346998441489353, "loss": 1.9055, "step": 1852 }, { "epoch": 0.18095703125, "grad_norm": 0.29834532737731934, "learning_rate": 0.00047343647459915364, "loss": 1.9511, "step": 1853 }, { "epoch": 0.1810546875, "grad_norm": 0.252108097076416, "learning_rate": 0.000473402944961368, "loss": 1.8903, "step": 1854 }, { "epoch": 0.18115234375, "grad_norm": 0.21066221594810486, "learning_rate": 0.00047336939550488575, "loss": 1.9162, "step": 1855 }, { "epoch": 0.18125, "grad_norm": 0.23784129321575165, "learning_rate": 0.0004733358262330584, "loss": 1.8824, "step": 1856 }, { "epoch": 0.18134765625, "grad_norm": 0.20668673515319824, "learning_rate": 0.000473302237149239, "loss": 1.9197, "step": 1857 }, { "epoch": 0.1814453125, "grad_norm": 0.21304477751255035, "learning_rate": 0.00047326862825678296, "loss": 1.883, "step": 1858 }, { "epoch": 0.18154296875, "grad_norm": 0.240382581949234, "learning_rate": 0.0004732349995590474, "loss": 1.9065, "step": 1859 }, { "epoch": 0.181640625, "grad_norm": 0.27892357110977173, "learning_rate": 0.00047320135105939165, "loss": 1.9338, "step": 1860 }, { "epoch": 0.18173828125, "grad_norm": 0.22719697654247284, "learning_rate": 0.0004731676827611768, "loss": 1.8927, "step": 1861 }, { "epoch": 0.1818359375, "grad_norm": 0.19273899495601654, "learning_rate": 0.00047313399466776605, "loss": 1.9132, "step": 1862 }, { "epoch": 0.18193359375, "grad_norm": 0.2563495934009552, "learning_rate": 0.0004731002867825246, "loss": 1.9206, "step": 1863 }, { "epoch": 0.18203125, "grad_norm": 0.29519474506378174, "learning_rate": 0.0004730665591088195, "loss": 1.9254, "step": 1864 }, { "epoch": 0.18212890625, "grad_norm": 0.2755618095397949, "learning_rate": 0.0004730328116500198, "loss": 1.9348, "step": 1865 }, { "epoch": 0.1822265625, "grad_norm": 0.25873810052871704, "learning_rate": 0.00047299904440949663, "loss": 1.9285, "step": 1866 }, { "epoch": 0.18232421875, "grad_norm": 0.19655470550060272, "learning_rate": 0.0004729652573906231, "loss": 1.9268, "step": 1867 }, { "epoch": 0.182421875, "grad_norm": 0.288585901260376, "learning_rate": 0.00047293145059677403, "loss": 1.9379, "step": 1868 }, { "epoch": 0.18251953125, "grad_norm": 0.2560109794139862, "learning_rate": 0.00047289762403132673, "loss": 1.9258, "step": 1869 }, { "epoch": 0.1826171875, "grad_norm": 0.2627304196357727, "learning_rate": 0.00047286377769766, "loss": 1.8885, "step": 1870 }, { "epoch": 0.18271484375, "grad_norm": 0.3108537793159485, "learning_rate": 0.0004728299115991547, "loss": 1.9106, "step": 1871 }, { "epoch": 0.1828125, "grad_norm": 0.24468936026096344, "learning_rate": 0.00047279602573919386, "loss": 1.9436, "step": 1872 }, { "epoch": 0.18291015625, "grad_norm": 0.3040415048599243, "learning_rate": 0.0004727621201211623, "loss": 1.977, "step": 1873 }, { "epoch": 0.1830078125, "grad_norm": 0.3214859366416931, "learning_rate": 0.0004727281947484471, "loss": 1.9259, "step": 1874 }, { "epoch": 0.18310546875, "grad_norm": 0.3081786632537842, "learning_rate": 0.0004726942496244369, "loss": 1.8833, "step": 1875 }, { "epoch": 0.183203125, "grad_norm": 0.2123297154903412, "learning_rate": 0.0004726602847525225, "loss": 1.9327, "step": 1876 }, { "epoch": 0.18330078125, "grad_norm": 0.20726516842842102, "learning_rate": 0.00047262630013609694, "loss": 1.932, "step": 1877 }, { "epoch": 0.1833984375, "grad_norm": 0.2831561863422394, "learning_rate": 0.0004725922957785547, "loss": 1.8966, "step": 1878 }, { "epoch": 0.18349609375, "grad_norm": 0.25883564352989197, "learning_rate": 0.00047255827168329255, "loss": 1.8872, "step": 1879 }, { "epoch": 0.18359375, "grad_norm": 0.24307870864868164, "learning_rate": 0.00047252422785370936, "loss": 1.935, "step": 1880 }, { "epoch": 0.18369140625, "grad_norm": 0.2524338662624359, "learning_rate": 0.00047249016429320567, "loss": 1.887, "step": 1881 }, { "epoch": 0.1837890625, "grad_norm": 0.22779789566993713, "learning_rate": 0.0004724560810051843, "loss": 1.9536, "step": 1882 }, { "epoch": 0.18388671875, "grad_norm": 0.23726516962051392, "learning_rate": 0.0004724219779930496, "loss": 1.9267, "step": 1883 }, { "epoch": 0.183984375, "grad_norm": 0.2780471742153168, "learning_rate": 0.0004723878552602083, "loss": 1.9097, "step": 1884 }, { "epoch": 0.18408203125, "grad_norm": 0.2215142548084259, "learning_rate": 0.000472353712810069, "loss": 1.8851, "step": 1885 }, { "epoch": 0.1841796875, "grad_norm": 0.22764411568641663, "learning_rate": 0.0004723195506460422, "loss": 1.9108, "step": 1886 }, { "epoch": 0.18427734375, "grad_norm": 0.2349424958229065, "learning_rate": 0.0004722853687715404, "loss": 1.9079, "step": 1887 }, { "epoch": 0.184375, "grad_norm": 0.23532803356647491, "learning_rate": 0.00047225116718997804, "loss": 1.9198, "step": 1888 }, { "epoch": 0.18447265625, "grad_norm": 0.2555566728115082, "learning_rate": 0.00047221694590477147, "loss": 1.8961, "step": 1889 }, { "epoch": 0.1845703125, "grad_norm": 0.30815714597702026, "learning_rate": 0.00047218270491933934, "loss": 1.8792, "step": 1890 }, { "epoch": 0.18466796875, "grad_norm": 0.21387292444705963, "learning_rate": 0.00047214844423710173, "loss": 1.918, "step": 1891 }, { "epoch": 0.184765625, "grad_norm": 0.24553145468235016, "learning_rate": 0.00047211416386148123, "loss": 1.8908, "step": 1892 }, { "epoch": 0.18486328125, "grad_norm": 0.3307762145996094, "learning_rate": 0.00047207986379590197, "loss": 1.9298, "step": 1893 }, { "epoch": 0.1849609375, "grad_norm": 0.22250519692897797, "learning_rate": 0.00047204554404379036, "loss": 1.886, "step": 1894 }, { "epoch": 0.18505859375, "grad_norm": 0.2863367199897766, "learning_rate": 0.0004720112046085745, "loss": 1.8674, "step": 1895 }, { "epoch": 0.18515625, "grad_norm": 0.34826767444610596, "learning_rate": 0.00047197684549368465, "loss": 1.9282, "step": 1896 }, { "epoch": 0.18525390625, "grad_norm": 0.289986252784729, "learning_rate": 0.00047194246670255295, "loss": 1.9078, "step": 1897 }, { "epoch": 0.1853515625, "grad_norm": 0.29067522287368774, "learning_rate": 0.0004719080682386137, "loss": 1.8943, "step": 1898 }, { "epoch": 0.18544921875, "grad_norm": 0.2805711627006531, "learning_rate": 0.0004718736501053028, "loss": 1.8729, "step": 1899 }, { "epoch": 0.185546875, "grad_norm": 0.1988450288772583, "learning_rate": 0.0004718392123060584, "loss": 1.897, "step": 1900 }, { "epoch": 0.18564453125, "grad_norm": 0.28334251046180725, "learning_rate": 0.0004718047548443206, "loss": 1.8796, "step": 1901 }, { "epoch": 0.1857421875, "grad_norm": 0.24370847642421722, "learning_rate": 0.00047177027772353134, "loss": 1.9136, "step": 1902 }, { "epoch": 0.18583984375, "grad_norm": 0.25666430592536926, "learning_rate": 0.0004717357809471344, "loss": 1.9396, "step": 1903 }, { "epoch": 0.1859375, "grad_norm": 0.3168308138847351, "learning_rate": 0.00047170126451857603, "loss": 1.8627, "step": 1904 }, { "epoch": 0.18603515625, "grad_norm": 0.3242661952972412, "learning_rate": 0.0004716667284413039, "loss": 1.9534, "step": 1905 }, { "epoch": 0.1861328125, "grad_norm": 0.23754087090492249, "learning_rate": 0.0004716321727187678, "loss": 1.935, "step": 1906 }, { "epoch": 0.18623046875, "grad_norm": 0.2671366035938263, "learning_rate": 0.0004715975973544198, "loss": 1.9543, "step": 1907 }, { "epoch": 0.186328125, "grad_norm": 0.2637098729610443, "learning_rate": 0.00047156300235171353, "loss": 1.8575, "step": 1908 }, { "epoch": 0.18642578125, "grad_norm": 0.22229737043380737, "learning_rate": 0.0004715283877141046, "loss": 1.8757, "step": 1909 }, { "epoch": 0.1865234375, "grad_norm": 0.2838973104953766, "learning_rate": 0.00047149375344505084, "loss": 1.9114, "step": 1910 }, { "epoch": 0.18662109375, "grad_norm": 0.30752044916152954, "learning_rate": 0.00047145909954801195, "loss": 1.8879, "step": 1911 }, { "epoch": 0.18671875, "grad_norm": 0.2020583301782608, "learning_rate": 0.0004714244260264495, "loss": 1.8957, "step": 1912 }, { "epoch": 0.18681640625, "grad_norm": 0.25839710235595703, "learning_rate": 0.000471389732883827, "loss": 1.9356, "step": 1913 }, { "epoch": 0.1869140625, "grad_norm": 0.2770155668258667, "learning_rate": 0.0004713550201236101, "loss": 1.9127, "step": 1914 }, { "epoch": 0.18701171875, "grad_norm": 0.20789627730846405, "learning_rate": 0.0004713202877492661, "loss": 1.9254, "step": 1915 }, { "epoch": 0.187109375, "grad_norm": 0.2471790611743927, "learning_rate": 0.00047128553576426477, "loss": 1.958, "step": 1916 }, { "epoch": 0.18720703125, "grad_norm": 0.2624439597129822, "learning_rate": 0.0004712507641720772, "loss": 1.9403, "step": 1917 }, { "epoch": 0.1873046875, "grad_norm": 0.2492697685956955, "learning_rate": 0.00047121597297617704, "loss": 1.8928, "step": 1918 }, { "epoch": 0.18740234375, "grad_norm": 0.20261016488075256, "learning_rate": 0.0004711811621800394, "loss": 1.925, "step": 1919 }, { "epoch": 0.1875, "grad_norm": 0.20547007024288177, "learning_rate": 0.0004711463317871417, "loss": 1.9058, "step": 1920 }, { "epoch": 0.18759765625, "grad_norm": 0.24086418747901917, "learning_rate": 0.0004711114818009632, "loss": 1.9377, "step": 1921 }, { "epoch": 0.1876953125, "grad_norm": 0.28701573610305786, "learning_rate": 0.00047107661222498497, "loss": 1.8958, "step": 1922 }, { "epoch": 0.18779296875, "grad_norm": 0.2246810346841812, "learning_rate": 0.0004710417230626904, "loss": 1.8759, "step": 1923 }, { "epoch": 0.187890625, "grad_norm": 0.1889760047197342, "learning_rate": 0.00047100681431756433, "loss": 1.947, "step": 1924 }, { "epoch": 0.18798828125, "grad_norm": 0.22808609902858734, "learning_rate": 0.0004709718859930941, "loss": 1.9428, "step": 1925 }, { "epoch": 0.1880859375, "grad_norm": 0.2610793709754944, "learning_rate": 0.00047093693809276843, "loss": 1.964, "step": 1926 }, { "epoch": 0.18818359375, "grad_norm": 0.30671024322509766, "learning_rate": 0.00047090197062007864, "loss": 1.9101, "step": 1927 }, { "epoch": 0.18828125, "grad_norm": 0.27946990728378296, "learning_rate": 0.0004708669835785175, "loss": 1.9487, "step": 1928 }, { "epoch": 0.18837890625, "grad_norm": 0.2439918965101242, "learning_rate": 0.0004708319769715799, "loss": 1.9037, "step": 1929 }, { "epoch": 0.1884765625, "grad_norm": 0.2517712414264679, "learning_rate": 0.0004707969508027627, "loss": 1.9059, "step": 1930 }, { "epoch": 0.18857421875, "grad_norm": 0.2609822750091553, "learning_rate": 0.0004707619050755648, "loss": 1.9119, "step": 1931 }, { "epoch": 0.188671875, "grad_norm": 0.2705680727958679, "learning_rate": 0.0004707268397934868, "loss": 1.9264, "step": 1932 }, { "epoch": 0.18876953125, "grad_norm": 0.27703019976615906, "learning_rate": 0.00047069175496003147, "loss": 1.8859, "step": 1933 }, { "epoch": 0.1888671875, "grad_norm": 0.4434245228767395, "learning_rate": 0.00047065665057870355, "loss": 2.0193, "step": 1934 }, { "epoch": 0.18896484375, "grad_norm": 0.25824934244155884, "learning_rate": 0.0004706215266530096, "loss": 1.9491, "step": 1935 }, { "epoch": 0.1890625, "grad_norm": 0.3010888397693634, "learning_rate": 0.00047058638318645815, "loss": 1.9461, "step": 1936 }, { "epoch": 0.18916015625, "grad_norm": 0.2592420279979706, "learning_rate": 0.0004705512201825597, "loss": 1.9329, "step": 1937 }, { "epoch": 0.1892578125, "grad_norm": 0.2857649326324463, "learning_rate": 0.0004705160376448269, "loss": 1.913, "step": 1938 }, { "epoch": 0.18935546875, "grad_norm": 0.3039281964302063, "learning_rate": 0.000470480835576774, "loss": 1.9253, "step": 1939 }, { "epoch": 0.189453125, "grad_norm": 0.23744364082813263, "learning_rate": 0.00047044561398191744, "loss": 1.9015, "step": 1940 }, { "epoch": 0.18955078125, "grad_norm": 0.24613125622272491, "learning_rate": 0.0004704103728637756, "loss": 1.9214, "step": 1941 }, { "epoch": 0.1896484375, "grad_norm": 0.2505404055118561, "learning_rate": 0.0004703751122258686, "loss": 1.9532, "step": 1942 }, { "epoch": 0.18974609375, "grad_norm": 0.2505582571029663, "learning_rate": 0.0004703398320717188, "loss": 1.932, "step": 1943 }, { "epoch": 0.18984375, "grad_norm": 0.2656853199005127, "learning_rate": 0.00047030453240485035, "loss": 1.8801, "step": 1944 }, { "epoch": 0.18994140625, "grad_norm": 0.2200378179550171, "learning_rate": 0.0004702692132287892, "loss": 1.9601, "step": 1945 }, { "epoch": 0.1900390625, "grad_norm": 0.260431170463562, "learning_rate": 0.00047023387454706375, "loss": 1.9112, "step": 1946 }, { "epoch": 0.19013671875, "grad_norm": 0.2725105881690979, "learning_rate": 0.00047019851636320384, "loss": 1.9616, "step": 1947 }, { "epoch": 0.190234375, "grad_norm": 0.21269826591014862, "learning_rate": 0.0004701631386807414, "loss": 1.9063, "step": 1948 }, { "epoch": 0.19033203125, "grad_norm": 0.2665625512599945, "learning_rate": 0.0004701277415032104, "loss": 1.9361, "step": 1949 }, { "epoch": 0.1904296875, "grad_norm": 0.2765948176383972, "learning_rate": 0.0004700923248341467, "loss": 1.9528, "step": 1950 }, { "epoch": 0.19052734375, "grad_norm": 0.22930294275283813, "learning_rate": 0.00047005688867708814, "loss": 1.8957, "step": 1951 }, { "epoch": 0.190625, "grad_norm": 0.22256456315517426, "learning_rate": 0.00047002143303557435, "loss": 1.9489, "step": 1952 }, { "epoch": 0.19072265625, "grad_norm": 0.24130624532699585, "learning_rate": 0.00046998595791314726, "loss": 1.9574, "step": 1953 }, { "epoch": 0.1908203125, "grad_norm": 0.292375773191452, "learning_rate": 0.00046995046331335026, "loss": 1.9176, "step": 1954 }, { "epoch": 0.19091796875, "grad_norm": 0.3528386950492859, "learning_rate": 0.0004699149492397292, "loss": 1.9511, "step": 1955 }, { "epoch": 0.191015625, "grad_norm": 0.26164865493774414, "learning_rate": 0.00046987941569583147, "loss": 1.9094, "step": 1956 }, { "epoch": 0.19111328125, "grad_norm": 0.27652114629745483, "learning_rate": 0.0004698438626852066, "loss": 1.9011, "step": 1957 }, { "epoch": 0.1912109375, "grad_norm": 0.31081855297088623, "learning_rate": 0.00046980829021140596, "loss": 1.9041, "step": 1958 }, { "epoch": 0.19130859375, "grad_norm": 0.29934486746788025, "learning_rate": 0.00046977269827798304, "loss": 1.9301, "step": 1959 }, { "epoch": 0.19140625, "grad_norm": 0.31369319558143616, "learning_rate": 0.000469737086888493, "loss": 1.9194, "step": 1960 }, { "epoch": 0.19150390625, "grad_norm": 0.21880319714546204, "learning_rate": 0.00046970145604649326, "loss": 1.9029, "step": 1961 }, { "epoch": 0.1916015625, "grad_norm": 0.21874625980854034, "learning_rate": 0.00046966580575554304, "loss": 1.9073, "step": 1962 }, { "epoch": 0.19169921875, "grad_norm": 0.27970215678215027, "learning_rate": 0.0004696301360192033, "loss": 1.9589, "step": 1963 }, { "epoch": 0.191796875, "grad_norm": 0.22823897004127502, "learning_rate": 0.00046959444684103725, "loss": 1.8801, "step": 1964 }, { "epoch": 0.19189453125, "grad_norm": 0.22956214845180511, "learning_rate": 0.0004695587382246099, "loss": 1.908, "step": 1965 }, { "epoch": 0.1919921875, "grad_norm": 0.2367463856935501, "learning_rate": 0.00046952301017348826, "loss": 1.9049, "step": 1966 }, { "epoch": 0.19208984375, "grad_norm": 0.2357739508152008, "learning_rate": 0.0004694872626912412, "loss": 1.9173, "step": 1967 }, { "epoch": 0.1921875, "grad_norm": 0.25406715273857117, "learning_rate": 0.0004694514957814396, "loss": 1.9309, "step": 1968 }, { "epoch": 0.19228515625, "grad_norm": 0.2415982335805893, "learning_rate": 0.0004694157094476563, "loss": 1.8976, "step": 1969 }, { "epoch": 0.1923828125, "grad_norm": 0.2648661434650421, "learning_rate": 0.000469379903693466, "loss": 1.9001, "step": 1970 }, { "epoch": 0.19248046875, "grad_norm": 0.186269611120224, "learning_rate": 0.00046934407852244534, "loss": 1.8917, "step": 1971 }, { "epoch": 0.192578125, "grad_norm": 0.30947110056877136, "learning_rate": 0.00046930823393817296, "loss": 1.8892, "step": 1972 }, { "epoch": 0.19267578125, "grad_norm": 0.2893858850002289, "learning_rate": 0.00046927236994422945, "loss": 1.9225, "step": 1973 }, { "epoch": 0.1927734375, "grad_norm": 0.24984197318553925, "learning_rate": 0.0004692364865441973, "loss": 1.9367, "step": 1974 }, { "epoch": 0.19287109375, "grad_norm": 0.27524879574775696, "learning_rate": 0.0004692005837416609, "loss": 1.9141, "step": 1975 }, { "epoch": 0.19296875, "grad_norm": 0.22581790387630463, "learning_rate": 0.00046916466154020656, "loss": 1.9093, "step": 1976 }, { "epoch": 0.19306640625, "grad_norm": 0.2497783899307251, "learning_rate": 0.0004691287199434228, "loss": 1.9136, "step": 1977 }, { "epoch": 0.1931640625, "grad_norm": 0.22558996081352234, "learning_rate": 0.0004690927589548997, "loss": 1.9372, "step": 1978 }, { "epoch": 0.19326171875, "grad_norm": 0.2091677039861679, "learning_rate": 0.0004690567785782295, "loss": 1.9425, "step": 1979 }, { "epoch": 0.193359375, "grad_norm": 0.26007091999053955, "learning_rate": 0.0004690207788170063, "loss": 1.8893, "step": 1980 }, { "epoch": 0.19345703125, "grad_norm": 0.2551250755786896, "learning_rate": 0.0004689847596748261, "loss": 1.924, "step": 1981 }, { "epoch": 0.1935546875, "grad_norm": 0.30021947622299194, "learning_rate": 0.000468948721155287, "loss": 1.9103, "step": 1982 }, { "epoch": 0.19365234375, "grad_norm": 0.2922191917896271, "learning_rate": 0.0004689126632619889, "loss": 1.9284, "step": 1983 }, { "epoch": 0.19375, "grad_norm": 0.24520905315876007, "learning_rate": 0.00046887658599853373, "loss": 1.8806, "step": 1984 }, { "epoch": 0.19384765625, "grad_norm": 0.27852240204811096, "learning_rate": 0.00046884048936852513, "loss": 1.9375, "step": 1985 }, { "epoch": 0.1939453125, "grad_norm": 0.26879197359085083, "learning_rate": 0.0004688043733755689, "loss": 1.9054, "step": 1986 }, { "epoch": 0.19404296875, "grad_norm": 0.2576071321964264, "learning_rate": 0.00046876823802327274, "loss": 1.9758, "step": 1987 }, { "epoch": 0.194140625, "grad_norm": 0.24778784811496735, "learning_rate": 0.0004687320833152463, "loss": 1.8767, "step": 1988 }, { "epoch": 0.19423828125, "grad_norm": 0.2549983859062195, "learning_rate": 0.000468695909255101, "loss": 1.9532, "step": 1989 }, { "epoch": 0.1943359375, "grad_norm": 0.24673303961753845, "learning_rate": 0.00046865971584645027, "loss": 1.9304, "step": 1990 }, { "epoch": 0.19443359375, "grad_norm": 0.23759478330612183, "learning_rate": 0.00046862350309290977, "loss": 1.8887, "step": 1991 }, { "epoch": 0.19453125, "grad_norm": 0.28576332330703735, "learning_rate": 0.00046858727099809643, "loss": 1.8766, "step": 1992 }, { "epoch": 0.19462890625, "grad_norm": 0.23105448484420776, "learning_rate": 0.00046855101956563, "loss": 1.8956, "step": 1993 }, { "epoch": 0.1947265625, "grad_norm": 0.27505502104759216, "learning_rate": 0.0004685147487991312, "loss": 1.8915, "step": 1994 }, { "epoch": 0.19482421875, "grad_norm": 0.2024049013853073, "learning_rate": 0.00046847845870222354, "loss": 1.8735, "step": 1995 }, { "epoch": 0.194921875, "grad_norm": 0.26097309589385986, "learning_rate": 0.0004684421492785318, "loss": 1.9298, "step": 1996 }, { "epoch": 0.19501953125, "grad_norm": 0.2358173280954361, "learning_rate": 0.0004684058205316832, "loss": 1.9121, "step": 1997 }, { "epoch": 0.1951171875, "grad_norm": 0.2725095748901367, "learning_rate": 0.00046836947246530646, "loss": 1.8684, "step": 1998 }, { "epoch": 0.19521484375, "grad_norm": 0.25193265080451965, "learning_rate": 0.0004683331050830326, "loss": 1.9188, "step": 1999 }, { "epoch": 0.1953125, "grad_norm": 0.23056617379188538, "learning_rate": 0.00046829671838849424, "loss": 1.8628, "step": 2000 }, { "epoch": 0.19541015625, "grad_norm": 0.21980516612529755, "learning_rate": 0.00046826031238532623, "loss": 1.9392, "step": 2001 }, { "epoch": 0.1955078125, "grad_norm": 0.244726300239563, "learning_rate": 0.0004682238870771651, "loss": 1.9079, "step": 2002 }, { "epoch": 0.19560546875, "grad_norm": 0.21440590918064117, "learning_rate": 0.00046818744246764955, "loss": 1.9059, "step": 2003 }, { "epoch": 0.195703125, "grad_norm": 0.25402185320854187, "learning_rate": 0.0004681509785604199, "loss": 1.9226, "step": 2004 }, { "epoch": 0.19580078125, "grad_norm": 0.24775418639183044, "learning_rate": 0.00046811449535911863, "loss": 1.8361, "step": 2005 }, { "epoch": 0.1958984375, "grad_norm": 0.2603761553764343, "learning_rate": 0.0004680779928673902, "loss": 1.9252, "step": 2006 }, { "epoch": 0.19599609375, "grad_norm": 0.25276273488998413, "learning_rate": 0.0004680414710888808, "loss": 1.9072, "step": 2007 }, { "epoch": 0.19609375, "grad_norm": 0.23971259593963623, "learning_rate": 0.00046800493002723854, "loss": 1.9073, "step": 2008 }, { "epoch": 0.19619140625, "grad_norm": 0.21876166760921478, "learning_rate": 0.00046796836968611373, "loss": 1.8839, "step": 2009 }, { "epoch": 0.1962890625, "grad_norm": 0.22767984867095947, "learning_rate": 0.00046793179006915825, "loss": 1.8852, "step": 2010 }, { "epoch": 0.19638671875, "grad_norm": 0.1997627317905426, "learning_rate": 0.0004678951911800262, "loss": 1.8831, "step": 2011 }, { "epoch": 0.196484375, "grad_norm": 0.23563456535339355, "learning_rate": 0.00046785857302237345, "loss": 1.8791, "step": 2012 }, { "epoch": 0.19658203125, "grad_norm": 0.1879824995994568, "learning_rate": 0.0004678219355998578, "loss": 1.8986, "step": 2013 }, { "epoch": 0.1966796875, "grad_norm": 0.26914268732070923, "learning_rate": 0.00046778527891613907, "loss": 1.9269, "step": 2014 }, { "epoch": 0.19677734375, "grad_norm": 0.20528461039066315, "learning_rate": 0.000467748602974879, "loss": 1.9475, "step": 2015 }, { "epoch": 0.196875, "grad_norm": 0.260937362909317, "learning_rate": 0.0004677119077797409, "loss": 1.9036, "step": 2016 }, { "epoch": 0.19697265625, "grad_norm": 0.233351469039917, "learning_rate": 0.0004676751933343906, "loss": 1.9155, "step": 2017 }, { "epoch": 0.1970703125, "grad_norm": 0.24918298423290253, "learning_rate": 0.0004676384596424954, "loss": 1.9169, "step": 2018 }, { "epoch": 0.19716796875, "grad_norm": 0.331938236951828, "learning_rate": 0.00046760170670772473, "loss": 1.8655, "step": 2019 }, { "epoch": 0.197265625, "grad_norm": 0.31642693281173706, "learning_rate": 0.0004675649345337498, "loss": 1.926, "step": 2020 }, { "epoch": 0.19736328125, "grad_norm": 0.28673624992370605, "learning_rate": 0.00046752814312424394, "loss": 1.8992, "step": 2021 }, { "epoch": 0.1974609375, "grad_norm": 0.27498120069503784, "learning_rate": 0.0004674913324828822, "loss": 1.9125, "step": 2022 }, { "epoch": 0.19755859375, "grad_norm": 0.3634410500526428, "learning_rate": 0.0004674545026133417, "loss": 1.9216, "step": 2023 }, { "epoch": 0.19765625, "grad_norm": 0.2195715606212616, "learning_rate": 0.00046741765351930137, "loss": 1.9071, "step": 2024 }, { "epoch": 0.19775390625, "grad_norm": 0.30638429522514343, "learning_rate": 0.0004673807852044421, "loss": 1.9037, "step": 2025 }, { "epoch": 0.1978515625, "grad_norm": 0.30112913250923157, "learning_rate": 0.0004673438976724468, "loss": 1.9117, "step": 2026 }, { "epoch": 0.19794921875, "grad_norm": 0.2405427247285843, "learning_rate": 0.00046730699092700003, "loss": 1.8978, "step": 2027 }, { "epoch": 0.198046875, "grad_norm": 0.38917067646980286, "learning_rate": 0.0004672700649717886, "loss": 1.9055, "step": 2028 }, { "epoch": 0.19814453125, "grad_norm": 0.31037402153015137, "learning_rate": 0.000467233119810501, "loss": 1.8982, "step": 2029 }, { "epoch": 0.1982421875, "grad_norm": 0.31107863783836365, "learning_rate": 0.00046719615544682784, "loss": 1.8812, "step": 2030 }, { "epoch": 0.19833984375, "grad_norm": 0.3467962145805359, "learning_rate": 0.00046715917188446134, "loss": 1.8957, "step": 2031 }, { "epoch": 0.1984375, "grad_norm": 0.25533223152160645, "learning_rate": 0.0004671221691270961, "loss": 1.9409, "step": 2032 }, { "epoch": 0.19853515625, "grad_norm": 0.3249166011810303, "learning_rate": 0.0004670851471784281, "loss": 1.9037, "step": 2033 }, { "epoch": 0.1986328125, "grad_norm": 0.2961365282535553, "learning_rate": 0.00046704810604215565, "loss": 1.8862, "step": 2034 }, { "epoch": 0.19873046875, "grad_norm": 0.30742213129997253, "learning_rate": 0.0004670110457219788, "loss": 1.8787, "step": 2035 }, { "epoch": 0.198828125, "grad_norm": 0.20632188022136688, "learning_rate": 0.00046697396622159956, "loss": 1.8716, "step": 2036 }, { "epoch": 0.19892578125, "grad_norm": 0.3098668158054352, "learning_rate": 0.0004669368675447218, "loss": 1.9131, "step": 2037 }, { "epoch": 0.1990234375, "grad_norm": 0.210395947098732, "learning_rate": 0.00046689974969505143, "loss": 1.8658, "step": 2038 }, { "epoch": 0.19912109375, "grad_norm": 0.2865216135978699, "learning_rate": 0.00046686261267629605, "loss": 1.9041, "step": 2039 }, { "epoch": 0.19921875, "grad_norm": 0.2984568774700165, "learning_rate": 0.00046682545649216544, "loss": 1.9122, "step": 2040 }, { "epoch": 0.19931640625, "grad_norm": 0.285224586725235, "learning_rate": 0.00046678828114637126, "loss": 1.9139, "step": 2041 }, { "epoch": 0.1994140625, "grad_norm": 0.23279589414596558, "learning_rate": 0.0004667510866426268, "loss": 1.8941, "step": 2042 }, { "epoch": 0.19951171875, "grad_norm": 0.20549869537353516, "learning_rate": 0.0004667138729846475, "loss": 1.8947, "step": 2043 }, { "epoch": 0.199609375, "grad_norm": 0.26036345958709717, "learning_rate": 0.0004666766401761508, "loss": 1.9111, "step": 2044 }, { "epoch": 0.19970703125, "grad_norm": 0.2697281837463379, "learning_rate": 0.00046663938822085583, "loss": 1.909, "step": 2045 }, { "epoch": 0.1998046875, "grad_norm": 0.23096928000450134, "learning_rate": 0.00046660211712248374, "loss": 1.9102, "step": 2046 }, { "epoch": 0.19990234375, "grad_norm": 0.21505551040172577, "learning_rate": 0.00046656482688475754, "loss": 1.9257, "step": 2047 }, { "epoch": 0.2, "grad_norm": 0.2264963835477829, "learning_rate": 0.0004665275175114024, "loss": 1.9256, "step": 2048 }, { "epoch": 0.20009765625, "grad_norm": 0.29406315088272095, "learning_rate": 0.0004664901890061449, "loss": 1.9512, "step": 2049 }, { "epoch": 0.2001953125, "grad_norm": 0.2077530175447464, "learning_rate": 0.0004664528413727139, "loss": 1.9162, "step": 2050 }, { "epoch": 0.20029296875, "grad_norm": 0.23434431850910187, "learning_rate": 0.00046641547461484035, "loss": 1.8639, "step": 2051 }, { "epoch": 0.200390625, "grad_norm": 0.3102989196777344, "learning_rate": 0.0004663780887362566, "loss": 1.8729, "step": 2052 }, { "epoch": 0.20048828125, "grad_norm": 0.2750473618507385, "learning_rate": 0.0004663406837406973, "loss": 1.8936, "step": 2053 }, { "epoch": 0.2005859375, "grad_norm": 0.24825125932693481, "learning_rate": 0.00046630325963189866, "loss": 1.8843, "step": 2054 }, { "epoch": 0.20068359375, "grad_norm": 0.2034183144569397, "learning_rate": 0.0004662658164135993, "loss": 1.9049, "step": 2055 }, { "epoch": 0.20078125, "grad_norm": 0.26106199622154236, "learning_rate": 0.0004662283540895394, "loss": 1.9034, "step": 2056 }, { "epoch": 0.20087890625, "grad_norm": 0.22128944098949432, "learning_rate": 0.0004661908726634609, "loss": 1.8846, "step": 2057 }, { "epoch": 0.2009765625, "grad_norm": 0.19975769519805908, "learning_rate": 0.0004661533721391081, "loss": 1.9336, "step": 2058 }, { "epoch": 0.20107421875, "grad_norm": 0.21513822674751282, "learning_rate": 0.0004661158525202269, "loss": 1.8887, "step": 2059 }, { "epoch": 0.201171875, "grad_norm": 0.2806141972541809, "learning_rate": 0.00046607831381056513, "loss": 1.8993, "step": 2060 }, { "epoch": 0.20126953125, "grad_norm": 0.24645020067691803, "learning_rate": 0.00046604075601387263, "loss": 1.9206, "step": 2061 }, { "epoch": 0.2013671875, "grad_norm": 0.19376933574676514, "learning_rate": 0.0004660031791339011, "loss": 1.9024, "step": 2062 }, { "epoch": 0.20146484375, "grad_norm": 0.3149716556072235, "learning_rate": 0.00046596558317440415, "loss": 1.9294, "step": 2063 }, { "epoch": 0.2015625, "grad_norm": 0.2759716808795929, "learning_rate": 0.00046592796813913715, "loss": 1.8748, "step": 2064 }, { "epoch": 0.20166015625, "grad_norm": 0.2037348598241806, "learning_rate": 0.0004658903340318577, "loss": 1.8803, "step": 2065 }, { "epoch": 0.2017578125, "grad_norm": 0.21185782551765442, "learning_rate": 0.0004658526808563248, "loss": 1.9478, "step": 2066 }, { "epoch": 0.20185546875, "grad_norm": 0.2070467174053192, "learning_rate": 0.0004658150086163001, "loss": 1.9131, "step": 2067 }, { "epoch": 0.201953125, "grad_norm": 0.22816209495067596, "learning_rate": 0.00046577731731554646, "loss": 1.9053, "step": 2068 }, { "epoch": 0.20205078125, "grad_norm": 0.2772216796875, "learning_rate": 0.0004657396069578289, "loss": 1.9048, "step": 2069 }, { "epoch": 0.2021484375, "grad_norm": 0.27881917357444763, "learning_rate": 0.0004657018775469145, "loss": 1.9161, "step": 2070 }, { "epoch": 0.20224609375, "grad_norm": 0.2583853602409363, "learning_rate": 0.0004656641290865719, "loss": 1.8961, "step": 2071 }, { "epoch": 0.20234375, "grad_norm": 0.2798794209957123, "learning_rate": 0.00046562636158057203, "loss": 1.8912, "step": 2072 }, { "epoch": 0.20244140625, "grad_norm": 0.25222232937812805, "learning_rate": 0.0004655885750326874, "loss": 1.9522, "step": 2073 }, { "epoch": 0.2025390625, "grad_norm": 0.2819882929325104, "learning_rate": 0.0004655507694466925, "loss": 1.8891, "step": 2074 }, { "epoch": 0.20263671875, "grad_norm": 0.21219240128993988, "learning_rate": 0.0004655129448263639, "loss": 1.9383, "step": 2075 }, { "epoch": 0.202734375, "grad_norm": 0.2394660860300064, "learning_rate": 0.00046547510117547997, "loss": 1.9034, "step": 2076 }, { "epoch": 0.20283203125, "grad_norm": 0.22925956547260284, "learning_rate": 0.00046543723849782085, "loss": 1.9189, "step": 2077 }, { "epoch": 0.2029296875, "grad_norm": 0.27168065309524536, "learning_rate": 0.00046539935679716876, "loss": 1.8803, "step": 2078 }, { "epoch": 0.20302734375, "grad_norm": 0.3125360906124115, "learning_rate": 0.00046536145607730767, "loss": 1.9144, "step": 2079 }, { "epoch": 0.203125, "grad_norm": 0.2742547392845154, "learning_rate": 0.00046532353634202355, "loss": 1.9011, "step": 2080 }, { "epoch": 0.20322265625, "grad_norm": 0.30199679732322693, "learning_rate": 0.0004652855975951044, "loss": 1.8952, "step": 2081 }, { "epoch": 0.2033203125, "grad_norm": 0.2881486117839813, "learning_rate": 0.0004652476398403397, "loss": 1.8888, "step": 2082 }, { "epoch": 0.20341796875, "grad_norm": 0.30965685844421387, "learning_rate": 0.0004652096630815212, "loss": 1.9286, "step": 2083 }, { "epoch": 0.203515625, "grad_norm": 0.38387784361839294, "learning_rate": 0.00046517166732244254, "loss": 1.922, "step": 2084 }, { "epoch": 0.20361328125, "grad_norm": 0.35733339190483093, "learning_rate": 0.00046513365256689907, "loss": 1.9086, "step": 2085 }, { "epoch": 0.2037109375, "grad_norm": 0.28068143129348755, "learning_rate": 0.00046509561881868814, "loss": 1.8819, "step": 2086 }, { "epoch": 0.20380859375, "grad_norm": 0.2930566668510437, "learning_rate": 0.00046505756608160895, "loss": 1.8737, "step": 2087 }, { "epoch": 0.20390625, "grad_norm": 0.2810053527355194, "learning_rate": 0.0004650194943594627, "loss": 1.9112, "step": 2088 }, { "epoch": 0.20400390625, "grad_norm": 0.2667030394077301, "learning_rate": 0.00046498140365605243, "loss": 1.9051, "step": 2089 }, { "epoch": 0.2041015625, "grad_norm": 0.20515574514865875, "learning_rate": 0.00046494329397518284, "loss": 1.9115, "step": 2090 }, { "epoch": 0.20419921875, "grad_norm": 0.24670284986495972, "learning_rate": 0.00046490516532066107, "loss": 1.885, "step": 2091 }, { "epoch": 0.204296875, "grad_norm": 0.2236049324274063, "learning_rate": 0.0004648670176962957, "loss": 1.9239, "step": 2092 }, { "epoch": 0.20439453125, "grad_norm": 0.20630759000778198, "learning_rate": 0.00046482885110589727, "loss": 1.9762, "step": 2093 }, { "epoch": 0.2044921875, "grad_norm": 0.25111427903175354, "learning_rate": 0.0004647906655532784, "loss": 1.9006, "step": 2094 }, { "epoch": 0.20458984375, "grad_norm": 0.2501910626888275, "learning_rate": 0.0004647524610422535, "loss": 1.9586, "step": 2095 }, { "epoch": 0.2046875, "grad_norm": 0.27904069423675537, "learning_rate": 0.00046471423757663867, "loss": 1.8846, "step": 2096 }, { "epoch": 0.20478515625, "grad_norm": 0.24683643877506256, "learning_rate": 0.0004646759951602523, "loss": 1.8941, "step": 2097 }, { "epoch": 0.2048828125, "grad_norm": 0.21275731921195984, "learning_rate": 0.00046463773379691424, "loss": 1.9168, "step": 2098 }, { "epoch": 0.20498046875, "grad_norm": 0.2217152714729309, "learning_rate": 0.0004645994534904468, "loss": 1.8554, "step": 2099 }, { "epoch": 0.205078125, "grad_norm": 0.20951475203037262, "learning_rate": 0.0004645611542446736, "loss": 1.9226, "step": 2100 }, { "epoch": 0.20517578125, "grad_norm": 0.1962258368730545, "learning_rate": 0.00046452283606342046, "loss": 1.9244, "step": 2101 }, { "epoch": 0.2052734375, "grad_norm": 0.25039345026016235, "learning_rate": 0.000464484498950515, "loss": 1.9237, "step": 2102 }, { "epoch": 0.20537109375, "grad_norm": 0.2845058739185333, "learning_rate": 0.00046444614290978674, "loss": 1.8962, "step": 2103 }, { "epoch": 0.20546875, "grad_norm": 0.22429201006889343, "learning_rate": 0.0004644077679450672, "loss": 1.9632, "step": 2104 }, { "epoch": 0.20556640625, "grad_norm": 0.20088529586791992, "learning_rate": 0.00046436937406018963, "loss": 1.9211, "step": 2105 }, { "epoch": 0.2056640625, "grad_norm": 0.2091323882341385, "learning_rate": 0.0004643309612589893, "loss": 1.913, "step": 2106 }, { "epoch": 0.20576171875, "grad_norm": 0.24969804286956787, "learning_rate": 0.00046429252954530316, "loss": 1.8726, "step": 2107 }, { "epoch": 0.205859375, "grad_norm": 0.30802738666534424, "learning_rate": 0.00046425407892297025, "loss": 1.9256, "step": 2108 }, { "epoch": 0.20595703125, "grad_norm": 0.27234166860580444, "learning_rate": 0.0004642156093958317, "loss": 1.9548, "step": 2109 }, { "epoch": 0.2060546875, "grad_norm": 0.19717903435230255, "learning_rate": 0.00046417712096772994, "loss": 1.9694, "step": 2110 }, { "epoch": 0.20615234375, "grad_norm": 0.3146752715110779, "learning_rate": 0.0004641386136425098, "loss": 1.922, "step": 2111 }, { "epoch": 0.20625, "grad_norm": 0.29423826932907104, "learning_rate": 0.0004641000874240178, "loss": 1.8961, "step": 2112 }, { "epoch": 0.20634765625, "grad_norm": 0.23182527720928192, "learning_rate": 0.0004640615423161022, "loss": 1.9062, "step": 2113 }, { "epoch": 0.2064453125, "grad_norm": 0.3178667426109314, "learning_rate": 0.00046402297832261354, "loss": 1.9098, "step": 2114 }, { "epoch": 0.20654296875, "grad_norm": 0.3038199841976166, "learning_rate": 0.000463984395447404, "loss": 1.8738, "step": 2115 }, { "epoch": 0.206640625, "grad_norm": 0.29474422335624695, "learning_rate": 0.00046394579369432755, "loss": 1.8506, "step": 2116 }, { "epoch": 0.20673828125, "grad_norm": 0.30578935146331787, "learning_rate": 0.00046390717306724023, "loss": 1.8775, "step": 2117 }, { "epoch": 0.2068359375, "grad_norm": 0.27422910928726196, "learning_rate": 0.0004638685335699999, "loss": 1.9101, "step": 2118 }, { "epoch": 0.20693359375, "grad_norm": 0.2473389059305191, "learning_rate": 0.00046382987520646624, "loss": 1.9342, "step": 2119 }, { "epoch": 0.20703125, "grad_norm": 0.2475956827402115, "learning_rate": 0.00046379119798050094, "loss": 1.9242, "step": 2120 }, { "epoch": 0.20712890625, "grad_norm": 0.2836126983165741, "learning_rate": 0.0004637525018959675, "loss": 1.9263, "step": 2121 }, { "epoch": 0.2072265625, "grad_norm": 0.20722109079360962, "learning_rate": 0.00046371378695673137, "loss": 1.9077, "step": 2122 }, { "epoch": 0.20732421875, "grad_norm": 0.2630356550216675, "learning_rate": 0.00046367505316665975, "loss": 1.9109, "step": 2123 }, { "epoch": 0.207421875, "grad_norm": 0.26636844873428345, "learning_rate": 0.0004636363005296218, "loss": 1.9011, "step": 2124 }, { "epoch": 0.20751953125, "grad_norm": 0.3061290681362152, "learning_rate": 0.0004635975290494887, "loss": 1.8875, "step": 2125 }, { "epoch": 0.2076171875, "grad_norm": 0.2455659955739975, "learning_rate": 0.00046355873873013315, "loss": 1.8745, "step": 2126 }, { "epoch": 0.20771484375, "grad_norm": 0.226226806640625, "learning_rate": 0.0004635199295754301, "loss": 1.9186, "step": 2127 }, { "epoch": 0.2078125, "grad_norm": 0.27755168080329895, "learning_rate": 0.0004634811015892563, "loss": 1.878, "step": 2128 }, { "epoch": 0.20791015625, "grad_norm": 0.24024835228919983, "learning_rate": 0.00046344225477549015, "loss": 1.8886, "step": 2129 }, { "epoch": 0.2080078125, "grad_norm": 0.2625919282436371, "learning_rate": 0.0004634033891380122, "loss": 1.8997, "step": 2130 }, { "epoch": 0.20810546875, "grad_norm": 0.2913138270378113, "learning_rate": 0.00046336450468070485, "loss": 1.8711, "step": 2131 }, { "epoch": 0.208203125, "grad_norm": 0.24820668995380402, "learning_rate": 0.00046332560140745216, "loss": 1.9243, "step": 2132 }, { "epoch": 0.20830078125, "grad_norm": 0.2551608085632324, "learning_rate": 0.00046328667932214035, "loss": 1.8974, "step": 2133 }, { "epoch": 0.2083984375, "grad_norm": 0.28112420439720154, "learning_rate": 0.0004632477384286574, "loss": 1.9122, "step": 2134 }, { "epoch": 0.20849609375, "grad_norm": 0.23654119670391083, "learning_rate": 0.0004632087787308931, "loss": 1.93, "step": 2135 }, { "epoch": 0.20859375, "grad_norm": 0.26119500398635864, "learning_rate": 0.00046316980023273905, "loss": 1.8936, "step": 2136 }, { "epoch": 0.20869140625, "grad_norm": 0.26187509298324585, "learning_rate": 0.0004631308029380891, "loss": 1.9257, "step": 2137 }, { "epoch": 0.2087890625, "grad_norm": 0.26531320810317993, "learning_rate": 0.00046309178685083856, "loss": 1.9203, "step": 2138 }, { "epoch": 0.20888671875, "grad_norm": 0.28738391399383545, "learning_rate": 0.00046305275197488496, "loss": 1.9092, "step": 2139 }, { "epoch": 0.208984375, "grad_norm": 0.2203925997018814, "learning_rate": 0.0004630136983141273, "loss": 1.9086, "step": 2140 }, { "epoch": 0.20908203125, "grad_norm": 0.337566077709198, "learning_rate": 0.0004629746258724669, "loss": 1.896, "step": 2141 }, { "epoch": 0.2091796875, "grad_norm": 0.22496585547924042, "learning_rate": 0.0004629355346538067, "loss": 1.9216, "step": 2142 }, { "epoch": 0.20927734375, "grad_norm": 0.22534677386283875, "learning_rate": 0.0004628964246620516, "loss": 1.9074, "step": 2143 }, { "epoch": 0.209375, "grad_norm": 0.24914197623729706, "learning_rate": 0.0004628572959011082, "loss": 1.9033, "step": 2144 }, { "epoch": 0.20947265625, "grad_norm": 0.20345696806907654, "learning_rate": 0.0004628181483748852, "loss": 1.9218, "step": 2145 }, { "epoch": 0.2095703125, "grad_norm": 0.22977322340011597, "learning_rate": 0.00046277898208729306, "loss": 1.8171, "step": 2146 }, { "epoch": 0.20966796875, "grad_norm": 0.24085818231105804, "learning_rate": 0.0004627397970422443, "loss": 1.867, "step": 2147 }, { "epoch": 0.209765625, "grad_norm": 0.26558682322502136, "learning_rate": 0.00046270059324365296, "loss": 1.9171, "step": 2148 }, { "epoch": 0.20986328125, "grad_norm": 0.27644988894462585, "learning_rate": 0.00046266137069543524, "loss": 1.908, "step": 2149 }, { "epoch": 0.2099609375, "grad_norm": 0.2609714865684509, "learning_rate": 0.00046262212940150914, "loss": 1.9649, "step": 2150 }, { "epoch": 0.21005859375, "grad_norm": 0.27116766571998596, "learning_rate": 0.00046258286936579453, "loss": 1.9123, "step": 2151 }, { "epoch": 0.21015625, "grad_norm": 0.21386384963989258, "learning_rate": 0.0004625435905922131, "loss": 1.9453, "step": 2152 }, { "epoch": 0.21025390625, "grad_norm": 0.21030083298683167, "learning_rate": 0.00046250429308468833, "loss": 1.9349, "step": 2153 }, { "epoch": 0.2103515625, "grad_norm": 0.267782062292099, "learning_rate": 0.0004624649768471459, "loss": 1.9266, "step": 2154 }, { "epoch": 0.21044921875, "grad_norm": 0.2850502133369446, "learning_rate": 0.0004624256418835131, "loss": 1.884, "step": 2155 }, { "epoch": 0.210546875, "grad_norm": 0.3452877700328827, "learning_rate": 0.0004623862881977191, "loss": 1.9651, "step": 2156 }, { "epoch": 0.21064453125, "grad_norm": 0.2617965638637543, "learning_rate": 0.00046234691579369504, "loss": 1.8952, "step": 2157 }, { "epoch": 0.2107421875, "grad_norm": 0.2741365134716034, "learning_rate": 0.00046230752467537375, "loss": 1.8951, "step": 2158 }, { "epoch": 0.21083984375, "grad_norm": 0.37253716588020325, "learning_rate": 0.00046226811484669015, "loss": 1.9242, "step": 2159 }, { "epoch": 0.2109375, "grad_norm": 0.28392404317855835, "learning_rate": 0.00046222868631158105, "loss": 1.9025, "step": 2160 }, { "epoch": 0.21103515625, "grad_norm": 0.2524137794971466, "learning_rate": 0.00046218923907398473, "loss": 1.9166, "step": 2161 }, { "epoch": 0.2111328125, "grad_norm": 0.2966980040073395, "learning_rate": 0.0004621497731378419, "loss": 1.9083, "step": 2162 }, { "epoch": 0.21123046875, "grad_norm": 0.28292515873908997, "learning_rate": 0.0004621102885070946, "loss": 1.9049, "step": 2163 }, { "epoch": 0.211328125, "grad_norm": 0.2480284720659256, "learning_rate": 0.00046207078518568717, "loss": 1.9063, "step": 2164 }, { "epoch": 0.21142578125, "grad_norm": 0.2493746429681778, "learning_rate": 0.0004620312631775657, "loss": 1.8636, "step": 2165 }, { "epoch": 0.2115234375, "grad_norm": 0.19225762784481049, "learning_rate": 0.0004619917224866779, "loss": 1.8916, "step": 2166 }, { "epoch": 0.21162109375, "grad_norm": 0.24277755618095398, "learning_rate": 0.00046195216311697355, "loss": 1.9265, "step": 2167 }, { "epoch": 0.21171875, "grad_norm": 0.2281288206577301, "learning_rate": 0.0004619125850724045, "loss": 1.9044, "step": 2168 }, { "epoch": 0.21181640625, "grad_norm": 0.24480199813842773, "learning_rate": 0.000461872988356924, "loss": 1.8734, "step": 2169 }, { "epoch": 0.2119140625, "grad_norm": 0.2577948570251465, "learning_rate": 0.0004618333729744876, "loss": 1.9017, "step": 2170 }, { "epoch": 0.21201171875, "grad_norm": 0.22131475806236267, "learning_rate": 0.00046179373892905233, "loss": 1.9071, "step": 2171 }, { "epoch": 0.212109375, "grad_norm": 0.24380597472190857, "learning_rate": 0.00046175408622457743, "loss": 1.9291, "step": 2172 }, { "epoch": 0.21220703125, "grad_norm": 0.25301221013069153, "learning_rate": 0.0004617144148650238, "loss": 1.8888, "step": 2173 }, { "epoch": 0.2123046875, "grad_norm": 0.2644136846065521, "learning_rate": 0.00046167472485435424, "loss": 1.8995, "step": 2174 }, { "epoch": 0.21240234375, "grad_norm": 0.2486433982849121, "learning_rate": 0.0004616350161965335, "loss": 1.9229, "step": 2175 }, { "epoch": 0.2125, "grad_norm": 0.3275326192378998, "learning_rate": 0.0004615952888955281, "loss": 1.9174, "step": 2176 }, { "epoch": 0.21259765625, "grad_norm": 0.22458401322364807, "learning_rate": 0.00046155554295530636, "loss": 1.8847, "step": 2177 }, { "epoch": 0.2126953125, "grad_norm": 0.2412051111459732, "learning_rate": 0.0004615157783798387, "loss": 1.9229, "step": 2178 }, { "epoch": 0.21279296875, "grad_norm": 0.21972574293613434, "learning_rate": 0.00046147599517309707, "loss": 1.9243, "step": 2179 }, { "epoch": 0.212890625, "grad_norm": 0.2897114157676697, "learning_rate": 0.0004614361933390555, "loss": 1.9192, "step": 2180 }, { "epoch": 0.21298828125, "grad_norm": 0.2781403660774231, "learning_rate": 0.00046139637288169003, "loss": 1.8995, "step": 2181 }, { "epoch": 0.2130859375, "grad_norm": 0.2788412272930145, "learning_rate": 0.00046135653380497814, "loss": 1.9041, "step": 2182 }, { "epoch": 0.21318359375, "grad_norm": 0.28828147053718567, "learning_rate": 0.0004613166761128996, "loss": 1.9016, "step": 2183 }, { "epoch": 0.21328125, "grad_norm": 0.26774659752845764, "learning_rate": 0.00046127679980943563, "loss": 1.9703, "step": 2184 }, { "epoch": 0.21337890625, "grad_norm": 0.2517338991165161, "learning_rate": 0.00046123690489856966, "loss": 1.8761, "step": 2185 }, { "epoch": 0.2134765625, "grad_norm": 0.25602057576179504, "learning_rate": 0.00046119699138428676, "loss": 1.9538, "step": 2186 }, { "epoch": 0.21357421875, "grad_norm": 0.20147521793842316, "learning_rate": 0.0004611570592705741, "loss": 1.8697, "step": 2187 }, { "epoch": 0.213671875, "grad_norm": 0.22156640887260437, "learning_rate": 0.0004611171085614203, "loss": 1.8982, "step": 2188 }, { "epoch": 0.21376953125, "grad_norm": 0.23269224166870117, "learning_rate": 0.00046107713926081626, "loss": 1.8925, "step": 2189 }, { "epoch": 0.2138671875, "grad_norm": 0.21519561111927032, "learning_rate": 0.00046103715137275447, "loss": 1.8476, "step": 2190 }, { "epoch": 0.21396484375, "grad_norm": 0.2535529136657715, "learning_rate": 0.0004609971449012294, "loss": 1.901, "step": 2191 }, { "epoch": 0.2140625, "grad_norm": 0.24704895913600922, "learning_rate": 0.0004609571198502374, "loss": 1.8946, "step": 2192 }, { "epoch": 0.21416015625, "grad_norm": 0.24865074455738068, "learning_rate": 0.00046091707622377646, "loss": 1.8576, "step": 2193 }, { "epoch": 0.2142578125, "grad_norm": 0.2663979232311249, "learning_rate": 0.00046087701402584683, "loss": 1.89, "step": 2194 }, { "epoch": 0.21435546875, "grad_norm": 0.2134150117635727, "learning_rate": 0.00046083693326045004, "loss": 1.9338, "step": 2195 }, { "epoch": 0.214453125, "grad_norm": 0.20297788083553314, "learning_rate": 0.00046079683393159004, "loss": 1.9193, "step": 2196 }, { "epoch": 0.21455078125, "grad_norm": 0.22827771306037903, "learning_rate": 0.0004607567160432724, "loss": 1.9089, "step": 2197 }, { "epoch": 0.2146484375, "grad_norm": 0.2234564572572708, "learning_rate": 0.0004607165795995044, "loss": 1.9069, "step": 2198 }, { "epoch": 0.21474609375, "grad_norm": 0.23381231725215912, "learning_rate": 0.0004606764246042955, "loss": 1.9059, "step": 2199 }, { "epoch": 0.21484375, "grad_norm": 0.24767504632472992, "learning_rate": 0.0004606362510616567, "loss": 1.9448, "step": 2200 }, { "epoch": 0.21494140625, "grad_norm": 0.23719260096549988, "learning_rate": 0.00046059605897560095, "loss": 1.8839, "step": 2201 }, { "epoch": 0.2150390625, "grad_norm": 0.35658907890319824, "learning_rate": 0.00046055584835014325, "loss": 1.912, "step": 2202 }, { "epoch": 0.21513671875, "grad_norm": 0.35971537232398987, "learning_rate": 0.0004605156191893001, "loss": 1.9476, "step": 2203 }, { "epoch": 0.215234375, "grad_norm": 0.242756187915802, "learning_rate": 0.0004604753714970901, "loss": 1.8945, "step": 2204 }, { "epoch": 0.21533203125, "grad_norm": 0.299610435962677, "learning_rate": 0.0004604351052775337, "loss": 1.9209, "step": 2205 }, { "epoch": 0.2154296875, "grad_norm": 0.21338649094104767, "learning_rate": 0.000460394820534653, "loss": 1.8975, "step": 2206 }, { "epoch": 0.21552734375, "grad_norm": 0.2629339098930359, "learning_rate": 0.00046035451727247226, "loss": 1.8938, "step": 2207 }, { "epoch": 0.215625, "grad_norm": 0.2574734091758728, "learning_rate": 0.00046031419549501733, "loss": 1.9167, "step": 2208 }, { "epoch": 0.21572265625, "grad_norm": 0.20906251668930054, "learning_rate": 0.000460273855206316, "loss": 1.8635, "step": 2209 }, { "epoch": 0.2158203125, "grad_norm": 0.265655517578125, "learning_rate": 0.00046023349641039786, "loss": 1.9223, "step": 2210 }, { "epoch": 0.21591796875, "grad_norm": 0.308352530002594, "learning_rate": 0.0004601931191112945, "loss": 1.9127, "step": 2211 }, { "epoch": 0.216015625, "grad_norm": 0.26350700855255127, "learning_rate": 0.0004601527233130392, "loss": 1.8908, "step": 2212 }, { "epoch": 0.21611328125, "grad_norm": 0.28734543919563293, "learning_rate": 0.00046011230901966716, "loss": 1.9426, "step": 2213 }, { "epoch": 0.2162109375, "grad_norm": 0.24283497035503387, "learning_rate": 0.0004600718762352154, "loss": 1.915, "step": 2214 }, { "epoch": 0.21630859375, "grad_norm": 0.24215170741081238, "learning_rate": 0.00046003142496372275, "loss": 1.9154, "step": 2215 }, { "epoch": 0.21640625, "grad_norm": 0.25598227977752686, "learning_rate": 0.0004599909552092301, "loss": 1.9225, "step": 2216 }, { "epoch": 0.21650390625, "grad_norm": 0.2031700313091278, "learning_rate": 0.00045995046697577975, "loss": 1.8965, "step": 2217 }, { "epoch": 0.2166015625, "grad_norm": 0.28480711579322815, "learning_rate": 0.0004599099602674163, "loss": 1.9212, "step": 2218 }, { "epoch": 0.21669921875, "grad_norm": 0.22612528502941132, "learning_rate": 0.000459869435088186, "loss": 1.9439, "step": 2219 }, { "epoch": 0.216796875, "grad_norm": 0.22312787175178528, "learning_rate": 0.0004598288914421369, "loss": 1.9238, "step": 2220 }, { "epoch": 0.21689453125, "grad_norm": 0.24936683475971222, "learning_rate": 0.000459788329333319, "loss": 1.8923, "step": 2221 }, { "epoch": 0.2169921875, "grad_norm": 0.2038477212190628, "learning_rate": 0.00045974774876578406, "loss": 1.8993, "step": 2222 }, { "epoch": 0.21708984375, "grad_norm": 0.20470038056373596, "learning_rate": 0.00045970714974358576, "loss": 1.901, "step": 2223 }, { "epoch": 0.2171875, "grad_norm": 0.20012640953063965, "learning_rate": 0.00045966653227077955, "loss": 1.8809, "step": 2224 }, { "epoch": 0.21728515625, "grad_norm": 0.1954420655965805, "learning_rate": 0.0004596258963514228, "loss": 1.9468, "step": 2225 }, { "epoch": 0.2173828125, "grad_norm": 0.19190649688243866, "learning_rate": 0.00045958524198957463, "loss": 1.8548, "step": 2226 }, { "epoch": 0.21748046875, "grad_norm": 0.2360774725675583, "learning_rate": 0.000459544569189296, "loss": 1.8779, "step": 2227 }, { "epoch": 0.217578125, "grad_norm": 0.24308523535728455, "learning_rate": 0.0004595038779546499, "loss": 1.9122, "step": 2228 }, { "epoch": 0.21767578125, "grad_norm": 0.31237441301345825, "learning_rate": 0.00045946316828970093, "loss": 1.9101, "step": 2229 }, { "epoch": 0.2177734375, "grad_norm": 0.26211169362068176, "learning_rate": 0.00045942244019851557, "loss": 1.8804, "step": 2230 }, { "epoch": 0.21787109375, "grad_norm": 0.2568318843841553, "learning_rate": 0.0004593816936851623, "loss": 1.8625, "step": 2231 }, { "epoch": 0.21796875, "grad_norm": 0.2667257487773895, "learning_rate": 0.00045934092875371134, "loss": 1.8856, "step": 2232 }, { "epoch": 0.21806640625, "grad_norm": 0.24612100422382355, "learning_rate": 0.0004593001454082347, "loss": 1.8921, "step": 2233 }, { "epoch": 0.2181640625, "grad_norm": 0.2511996626853943, "learning_rate": 0.0004592593436528063, "loss": 1.8955, "step": 2234 }, { "epoch": 0.21826171875, "grad_norm": 0.2945897877216339, "learning_rate": 0.0004592185234915019, "loss": 1.8911, "step": 2235 }, { "epoch": 0.218359375, "grad_norm": 0.34499260783195496, "learning_rate": 0.00045917768492839895, "loss": 1.9234, "step": 2236 }, { "epoch": 0.21845703125, "grad_norm": 0.20355650782585144, "learning_rate": 0.000459136827967577, "loss": 1.8892, "step": 2237 }, { "epoch": 0.2185546875, "grad_norm": 0.31010961532592773, "learning_rate": 0.0004590959526131172, "loss": 1.8915, "step": 2238 }, { "epoch": 0.21865234375, "grad_norm": 0.3139292001724243, "learning_rate": 0.0004590550588691028, "loss": 1.9084, "step": 2239 }, { "epoch": 0.21875, "grad_norm": 0.2442852258682251, "learning_rate": 0.00045901414673961844, "loss": 1.8947, "step": 2240 }, { "epoch": 0.21884765625, "grad_norm": 0.27955976128578186, "learning_rate": 0.00045897321622875105, "loss": 1.8675, "step": 2241 }, { "epoch": 0.2189453125, "grad_norm": 0.1819334477186203, "learning_rate": 0.00045893226734058923, "loss": 1.8999, "step": 2242 }, { "epoch": 0.21904296875, "grad_norm": 0.24583211541175842, "learning_rate": 0.00045889130007922347, "loss": 1.8846, "step": 2243 }, { "epoch": 0.219140625, "grad_norm": 0.2537577450275421, "learning_rate": 0.00045885031444874597, "loss": 1.9059, "step": 2244 }, { "epoch": 0.21923828125, "grad_norm": 0.22242943942546844, "learning_rate": 0.00045880931045325074, "loss": 1.8862, "step": 2245 }, { "epoch": 0.2193359375, "grad_norm": 0.2518673241138458, "learning_rate": 0.0004587682880968338, "loss": 1.9075, "step": 2246 }, { "epoch": 0.21943359375, "grad_norm": 0.2151048630475998, "learning_rate": 0.00045872724738359297, "loss": 1.9299, "step": 2247 }, { "epoch": 0.21953125, "grad_norm": 0.24207067489624023, "learning_rate": 0.00045868618831762775, "loss": 1.8986, "step": 2248 }, { "epoch": 0.21962890625, "grad_norm": 0.2483375072479248, "learning_rate": 0.0004586451109030397, "loss": 1.9244, "step": 2249 }, { "epoch": 0.2197265625, "grad_norm": 0.19953475892543793, "learning_rate": 0.000458604015143932, "loss": 1.9284, "step": 2250 }, { "epoch": 0.21982421875, "grad_norm": 0.20768243074417114, "learning_rate": 0.00045856290104440974, "loss": 1.8846, "step": 2251 }, { "epoch": 0.219921875, "grad_norm": 0.21332871913909912, "learning_rate": 0.0004585217686085799, "loss": 1.8777, "step": 2252 }, { "epoch": 0.22001953125, "grad_norm": 0.22202058136463165, "learning_rate": 0.0004584806178405512, "loss": 1.8778, "step": 2253 }, { "epoch": 0.2201171875, "grad_norm": 0.22092659771442413, "learning_rate": 0.00045843944874443435, "loss": 1.9006, "step": 2254 }, { "epoch": 0.22021484375, "grad_norm": 0.2076462358236313, "learning_rate": 0.0004583982613243416, "loss": 1.8781, "step": 2255 }, { "epoch": 0.2203125, "grad_norm": 0.26965686678886414, "learning_rate": 0.0004583570555843874, "loss": 1.8818, "step": 2256 }, { "epoch": 0.22041015625, "grad_norm": 0.23709794878959656, "learning_rate": 0.0004583158315286877, "loss": 1.799, "step": 2257 }, { "epoch": 0.2205078125, "grad_norm": 0.21832555532455444, "learning_rate": 0.00045827458916136043, "loss": 1.9321, "step": 2258 }, { "epoch": 0.22060546875, "grad_norm": 0.268564909696579, "learning_rate": 0.0004582333284865254, "loss": 1.9043, "step": 2259 }, { "epoch": 0.220703125, "grad_norm": 0.2340027242898941, "learning_rate": 0.00045819204950830426, "loss": 1.8435, "step": 2260 }, { "epoch": 0.22080078125, "grad_norm": 0.2653496563434601, "learning_rate": 0.00045815075223082016, "loss": 1.8857, "step": 2261 }, { "epoch": 0.2208984375, "grad_norm": 0.2956698536872864, "learning_rate": 0.00045810943665819853, "loss": 1.9683, "step": 2262 }, { "epoch": 0.22099609375, "grad_norm": 0.2561390697956085, "learning_rate": 0.0004580681027945663, "loss": 1.9258, "step": 2263 }, { "epoch": 0.22109375, "grad_norm": 0.2162081003189087, "learning_rate": 0.00045802675064405266, "loss": 1.9093, "step": 2264 }, { "epoch": 0.22119140625, "grad_norm": 0.2608626186847687, "learning_rate": 0.0004579853802107879, "loss": 1.8742, "step": 2265 }, { "epoch": 0.2212890625, "grad_norm": 0.2352752387523651, "learning_rate": 0.0004579439914989049, "loss": 1.9224, "step": 2266 }, { "epoch": 0.22138671875, "grad_norm": 0.20996929705142975, "learning_rate": 0.00045790258451253776, "loss": 1.9081, "step": 2267 }, { "epoch": 0.221484375, "grad_norm": 0.2689194083213806, "learning_rate": 0.00045786115925582283, "loss": 1.928, "step": 2268 }, { "epoch": 0.22158203125, "grad_norm": 0.27555009722709656, "learning_rate": 0.0004578197157328981, "loss": 1.8619, "step": 2269 }, { "epoch": 0.2216796875, "grad_norm": 0.23493286967277527, "learning_rate": 0.00045777825394790344, "loss": 1.9058, "step": 2270 }, { "epoch": 0.22177734375, "grad_norm": 0.18501000106334686, "learning_rate": 0.0004577367739049804, "loss": 1.935, "step": 2271 }, { "epoch": 0.221875, "grad_norm": 0.2200579047203064, "learning_rate": 0.00045769527560827263, "loss": 1.9219, "step": 2272 }, { "epoch": 0.22197265625, "grad_norm": 0.24917781352996826, "learning_rate": 0.0004576537590619254, "loss": 1.8729, "step": 2273 }, { "epoch": 0.2220703125, "grad_norm": 0.2548181116580963, "learning_rate": 0.0004576122242700857, "loss": 1.8637, "step": 2274 }, { "epoch": 0.22216796875, "grad_norm": 0.27391573786735535, "learning_rate": 0.0004575706712369027, "loss": 1.9145, "step": 2275 }, { "epoch": 0.222265625, "grad_norm": 0.21581903100013733, "learning_rate": 0.00045752909996652695, "loss": 1.9498, "step": 2276 }, { "epoch": 0.22236328125, "grad_norm": 0.2738785147666931, "learning_rate": 0.00045748751046311125, "loss": 1.9297, "step": 2277 }, { "epoch": 0.2224609375, "grad_norm": 0.2592880427837372, "learning_rate": 0.00045744590273080987, "loss": 1.9323, "step": 2278 }, { "epoch": 0.22255859375, "grad_norm": 0.21176159381866455, "learning_rate": 0.00045740427677377926, "loss": 1.9299, "step": 2279 }, { "epoch": 0.22265625, "grad_norm": 0.2628064751625061, "learning_rate": 0.00045736263259617727, "loss": 1.9047, "step": 2280 }, { "epoch": 0.22275390625, "grad_norm": 0.199008047580719, "learning_rate": 0.00045732097020216393, "loss": 1.8896, "step": 2281 }, { "epoch": 0.2228515625, "grad_norm": 0.2588101923465729, "learning_rate": 0.00045727928959590086, "loss": 1.9043, "step": 2282 }, { "epoch": 0.22294921875, "grad_norm": 0.2765114903450012, "learning_rate": 0.00045723759078155165, "loss": 1.8701, "step": 2283 }, { "epoch": 0.223046875, "grad_norm": 0.22020654380321503, "learning_rate": 0.0004571958737632815, "loss": 1.842, "step": 2284 }, { "epoch": 0.22314453125, "grad_norm": 0.25397929549217224, "learning_rate": 0.0004571541385452577, "loss": 1.9356, "step": 2285 }, { "epoch": 0.2232421875, "grad_norm": 0.3181978464126587, "learning_rate": 0.0004571123851316492, "loss": 1.8812, "step": 2286 }, { "epoch": 0.22333984375, "grad_norm": 0.24803847074508667, "learning_rate": 0.0004570706135266268, "loss": 1.9017, "step": 2287 }, { "epoch": 0.2234375, "grad_norm": 0.2591163218021393, "learning_rate": 0.00045702882373436317, "loss": 1.8597, "step": 2288 }, { "epoch": 0.22353515625, "grad_norm": 0.2701427936553955, "learning_rate": 0.00045698701575903265, "loss": 1.9202, "step": 2289 }, { "epoch": 0.2236328125, "grad_norm": 0.259224534034729, "learning_rate": 0.00045694518960481145, "loss": 1.8726, "step": 2290 }, { "epoch": 0.22373046875, "grad_norm": 0.2605198919773102, "learning_rate": 0.00045690334527587786, "loss": 1.8942, "step": 2291 }, { "epoch": 0.223828125, "grad_norm": 0.2564171254634857, "learning_rate": 0.00045686148277641143, "loss": 1.8893, "step": 2292 }, { "epoch": 0.22392578125, "grad_norm": 0.2904933989048004, "learning_rate": 0.0004568196021105941, "loss": 1.8712, "step": 2293 }, { "epoch": 0.2240234375, "grad_norm": 0.19899466633796692, "learning_rate": 0.0004567777032826092, "loss": 1.882, "step": 2294 }, { "epoch": 0.22412109375, "grad_norm": 0.3275931477546692, "learning_rate": 0.00045673578629664227, "loss": 1.9256, "step": 2295 }, { "epoch": 0.22421875, "grad_norm": 0.25155848264694214, "learning_rate": 0.0004566938511568802, "loss": 1.8914, "step": 2296 }, { "epoch": 0.22431640625, "grad_norm": 0.2259829044342041, "learning_rate": 0.00045665189786751214, "loss": 1.9638, "step": 2297 }, { "epoch": 0.2244140625, "grad_norm": 0.21916180849075317, "learning_rate": 0.0004566099264327288, "loss": 1.8701, "step": 2298 }, { "epoch": 0.22451171875, "grad_norm": 0.2307112216949463, "learning_rate": 0.0004565679368567227, "loss": 1.9039, "step": 2299 }, { "epoch": 0.224609375, "grad_norm": 0.25406181812286377, "learning_rate": 0.00045652592914368826, "loss": 1.8874, "step": 2300 }, { "epoch": 0.22470703125, "grad_norm": 0.23334388434886932, "learning_rate": 0.0004564839032978216, "loss": 1.9329, "step": 2301 }, { "epoch": 0.2248046875, "grad_norm": 0.312667578458786, "learning_rate": 0.0004564418593233209, "loss": 1.8981, "step": 2302 }, { "epoch": 0.22490234375, "grad_norm": 0.21629926562309265, "learning_rate": 0.00045639979722438586, "loss": 1.9121, "step": 2303 }, { "epoch": 0.225, "grad_norm": 0.2639700174331665, "learning_rate": 0.0004563577170052182, "loss": 1.8917, "step": 2304 }, { "epoch": 0.22509765625, "grad_norm": 0.28482767939567566, "learning_rate": 0.00045631561867002114, "loss": 1.8615, "step": 2305 }, { "epoch": 0.2251953125, "grad_norm": 0.21128278970718384, "learning_rate": 0.0004562735022230002, "loss": 1.8742, "step": 2306 }, { "epoch": 0.22529296875, "grad_norm": 0.22739477455615997, "learning_rate": 0.00045623136766836226, "loss": 1.8953, "step": 2307 }, { "epoch": 0.225390625, "grad_norm": 0.2244565635919571, "learning_rate": 0.0004561892150103163, "loss": 1.8773, "step": 2308 }, { "epoch": 0.22548828125, "grad_norm": 0.24845881760120392, "learning_rate": 0.00045614704425307295, "loss": 1.9182, "step": 2309 }, { "epoch": 0.2255859375, "grad_norm": 0.24752497673034668, "learning_rate": 0.0004561048554008447, "loss": 1.8817, "step": 2310 }, { "epoch": 0.22568359375, "grad_norm": 0.20452168583869934, "learning_rate": 0.00045606264845784577, "loss": 1.8662, "step": 2311 }, { "epoch": 0.22578125, "grad_norm": 0.22137118875980377, "learning_rate": 0.00045602042342829244, "loss": 1.8966, "step": 2312 }, { "epoch": 0.22587890625, "grad_norm": 0.2197723239660263, "learning_rate": 0.00045597818031640244, "loss": 1.9152, "step": 2313 }, { "epoch": 0.2259765625, "grad_norm": 0.19033396244049072, "learning_rate": 0.0004559359191263955, "loss": 1.92, "step": 2314 }, { "epoch": 0.22607421875, "grad_norm": 0.21937741339206696, "learning_rate": 0.0004558936398624933, "loss": 1.9554, "step": 2315 }, { "epoch": 0.226171875, "grad_norm": 0.20096588134765625, "learning_rate": 0.000455851342528919, "loss": 1.9401, "step": 2316 }, { "epoch": 0.22626953125, "grad_norm": 0.22804000973701477, "learning_rate": 0.00045580902712989783, "loss": 1.8753, "step": 2317 }, { "epoch": 0.2263671875, "grad_norm": 0.2619505226612091, "learning_rate": 0.0004557666936696566, "loss": 1.8913, "step": 2318 }, { "epoch": 0.22646484375, "grad_norm": 0.2752479016780853, "learning_rate": 0.0004557243421524241, "loss": 1.8744, "step": 2319 }, { "epoch": 0.2265625, "grad_norm": 0.2984503209590912, "learning_rate": 0.0004556819725824309, "loss": 1.8951, "step": 2320 }, { "epoch": 0.22666015625, "grad_norm": 0.22869320213794708, "learning_rate": 0.0004556395849639094, "loss": 1.9453, "step": 2321 }, { "epoch": 0.2267578125, "grad_norm": 0.2314172238111496, "learning_rate": 0.0004555971793010937, "loss": 1.8549, "step": 2322 }, { "epoch": 0.22685546875, "grad_norm": 0.2702145278453827, "learning_rate": 0.0004555547555982197, "loss": 1.8916, "step": 2323 }, { "epoch": 0.226953125, "grad_norm": 0.29122287034988403, "learning_rate": 0.00045551231385952516, "loss": 1.9072, "step": 2324 }, { "epoch": 0.22705078125, "grad_norm": 0.26239168643951416, "learning_rate": 0.0004554698540892497, "loss": 1.9243, "step": 2325 }, { "epoch": 0.2271484375, "grad_norm": 0.25048989057540894, "learning_rate": 0.0004554273762916346, "loss": 1.8799, "step": 2326 }, { "epoch": 0.22724609375, "grad_norm": 0.21093380451202393, "learning_rate": 0.0004553848804709231, "loss": 1.928, "step": 2327 }, { "epoch": 0.22734375, "grad_norm": 0.24841107428073883, "learning_rate": 0.00045534236663136, "loss": 1.867, "step": 2328 }, { "epoch": 0.22744140625, "grad_norm": 0.3298115134239197, "learning_rate": 0.0004552998347771923, "loss": 1.9387, "step": 2329 }, { "epoch": 0.2275390625, "grad_norm": 0.2536830008029938, "learning_rate": 0.0004552572849126683, "loss": 1.8998, "step": 2330 }, { "epoch": 0.22763671875, "grad_norm": 0.25619152188301086, "learning_rate": 0.0004552147170420386, "loss": 1.9069, "step": 2331 }, { "epoch": 0.227734375, "grad_norm": 0.21483200788497925, "learning_rate": 0.00045517213116955514, "loss": 1.8838, "step": 2332 }, { "epoch": 0.22783203125, "grad_norm": 0.24529071152210236, "learning_rate": 0.00045512952729947196, "loss": 1.8954, "step": 2333 }, { "epoch": 0.2279296875, "grad_norm": 0.2747213840484619, "learning_rate": 0.0004550869054360448, "loss": 1.8707, "step": 2334 }, { "epoch": 0.22802734375, "grad_norm": 0.24370868504047394, "learning_rate": 0.00045504426558353123, "loss": 1.8723, "step": 2335 }, { "epoch": 0.228125, "grad_norm": 0.2064986526966095, "learning_rate": 0.0004550016077461906, "loss": 1.893, "step": 2336 }, { "epoch": 0.22822265625, "grad_norm": 0.28048041462898254, "learning_rate": 0.000454958931928284, "loss": 1.8731, "step": 2337 }, { "epoch": 0.2283203125, "grad_norm": 0.2515961527824402, "learning_rate": 0.0004549162381340744, "loss": 1.9322, "step": 2338 }, { "epoch": 0.22841796875, "grad_norm": 0.21575289964675903, "learning_rate": 0.0004548735263678266, "loss": 1.8622, "step": 2339 }, { "epoch": 0.228515625, "grad_norm": 0.22191932797431946, "learning_rate": 0.00045483079663380686, "loss": 1.9339, "step": 2340 }, { "epoch": 0.22861328125, "grad_norm": 0.27168577909469604, "learning_rate": 0.0004547880489362838, "loss": 1.8946, "step": 2341 }, { "epoch": 0.2287109375, "grad_norm": 0.2825971841812134, "learning_rate": 0.0004547452832795275, "loss": 1.9222, "step": 2342 }, { "epoch": 0.22880859375, "grad_norm": 0.2461196780204773, "learning_rate": 0.00045470249966780976, "loss": 1.8803, "step": 2343 }, { "epoch": 0.22890625, "grad_norm": 0.2580738067626953, "learning_rate": 0.00045465969810540427, "loss": 1.9044, "step": 2344 }, { "epoch": 0.22900390625, "grad_norm": 0.22742857038974762, "learning_rate": 0.0004546168785965866, "loss": 1.8423, "step": 2345 }, { "epoch": 0.2291015625, "grad_norm": 0.2607462406158447, "learning_rate": 0.00045457404114563406, "loss": 1.9238, "step": 2346 }, { "epoch": 0.22919921875, "grad_norm": 0.21950286626815796, "learning_rate": 0.00045453118575682565, "loss": 1.8932, "step": 2347 }, { "epoch": 0.229296875, "grad_norm": 0.2564532160758972, "learning_rate": 0.0004544883124344423, "loss": 1.8941, "step": 2348 }, { "epoch": 0.22939453125, "grad_norm": 0.2837105393409729, "learning_rate": 0.0004544454211827667, "loss": 1.9385, "step": 2349 }, { "epoch": 0.2294921875, "grad_norm": 0.268484503030777, "learning_rate": 0.00045440251200608316, "loss": 1.8984, "step": 2350 }, { "epoch": 0.22958984375, "grad_norm": 0.3416820466518402, "learning_rate": 0.0004543595849086782, "loss": 1.7954, "step": 2351 }, { "epoch": 0.2296875, "grad_norm": 0.3154846727848053, "learning_rate": 0.00045431663989483955, "loss": 1.8759, "step": 2352 }, { "epoch": 0.22978515625, "grad_norm": 0.29342061281204224, "learning_rate": 0.00045427367696885727, "loss": 1.8975, "step": 2353 }, { "epoch": 0.2298828125, "grad_norm": 0.27414578199386597, "learning_rate": 0.00045423069613502275, "loss": 1.8983, "step": 2354 }, { "epoch": 0.22998046875, "grad_norm": 0.2255915254354477, "learning_rate": 0.00045418769739762966, "loss": 1.9371, "step": 2355 }, { "epoch": 0.230078125, "grad_norm": 0.21754321455955505, "learning_rate": 0.000454144680760973, "loss": 1.8771, "step": 2356 }, { "epoch": 0.23017578125, "grad_norm": 0.26277226209640503, "learning_rate": 0.0004541016462293498, "loss": 1.8672, "step": 2357 }, { "epoch": 0.2302734375, "grad_norm": 0.22035324573516846, "learning_rate": 0.0004540585938070589, "loss": 1.8724, "step": 2358 }, { "epoch": 0.23037109375, "grad_norm": 0.22731097042560577, "learning_rate": 0.00045401552349840077, "loss": 1.9341, "step": 2359 }, { "epoch": 0.23046875, "grad_norm": 0.23395659029483795, "learning_rate": 0.00045397243530767773, "loss": 1.9038, "step": 2360 }, { "epoch": 0.23056640625, "grad_norm": 0.24531114101409912, "learning_rate": 0.000453929329239194, "loss": 1.8877, "step": 2361 }, { "epoch": 0.2306640625, "grad_norm": 0.25629279017448425, "learning_rate": 0.00045388620529725546, "loss": 1.8831, "step": 2362 }, { "epoch": 0.23076171875, "grad_norm": 0.24789990484714508, "learning_rate": 0.00045384306348616977, "loss": 1.8992, "step": 2363 }, { "epoch": 0.230859375, "grad_norm": 0.23063692450523376, "learning_rate": 0.00045379990381024644, "loss": 1.8645, "step": 2364 }, { "epoch": 0.23095703125, "grad_norm": 0.274730920791626, "learning_rate": 0.0004537567262737968, "loss": 1.8846, "step": 2365 }, { "epoch": 0.2310546875, "grad_norm": 0.21056944131851196, "learning_rate": 0.00045371353088113377, "loss": 1.8942, "step": 2366 }, { "epoch": 0.23115234375, "grad_norm": 0.2247105836868286, "learning_rate": 0.0004536703176365723, "loss": 1.9012, "step": 2367 }, { "epoch": 0.23125, "grad_norm": 0.23797035217285156, "learning_rate": 0.00045362708654442897, "loss": 1.8886, "step": 2368 }, { "epoch": 0.23134765625, "grad_norm": 0.2110954225063324, "learning_rate": 0.0004535838376090222, "loss": 1.8937, "step": 2369 }, { "epoch": 0.2314453125, "grad_norm": 0.2745433747768402, "learning_rate": 0.00045354057083467217, "loss": 1.8799, "step": 2370 }, { "epoch": 0.23154296875, "grad_norm": 0.20481610298156738, "learning_rate": 0.0004534972862257008, "loss": 1.8282, "step": 2371 }, { "epoch": 0.231640625, "grad_norm": 0.4868389368057251, "learning_rate": 0.00045345398378643186, "loss": 1.8975, "step": 2372 }, { "epoch": 0.23173828125, "grad_norm": 0.2749464213848114, "learning_rate": 0.00045341066352119096, "loss": 1.8683, "step": 2373 }, { "epoch": 0.2318359375, "grad_norm": 0.2716321051120758, "learning_rate": 0.00045336732543430536, "loss": 1.8833, "step": 2374 }, { "epoch": 0.23193359375, "grad_norm": 0.2876909077167511, "learning_rate": 0.00045332396953010415, "loss": 1.881, "step": 2375 }, { "epoch": 0.23203125, "grad_norm": 0.2515881061553955, "learning_rate": 0.0004532805958129181, "loss": 1.9023, "step": 2376 }, { "epoch": 0.23212890625, "grad_norm": 0.2852384150028229, "learning_rate": 0.00045323720428708003, "loss": 1.924, "step": 2377 }, { "epoch": 0.2322265625, "grad_norm": 0.22907400131225586, "learning_rate": 0.0004531937949569243, "loss": 1.8765, "step": 2378 }, { "epoch": 0.23232421875, "grad_norm": 0.2648322582244873, "learning_rate": 0.0004531503678267871, "loss": 1.9247, "step": 2379 }, { "epoch": 0.232421875, "grad_norm": 0.28034675121307373, "learning_rate": 0.00045310692290100637, "loss": 1.8948, "step": 2380 }, { "epoch": 0.23251953125, "grad_norm": 0.29645946621894836, "learning_rate": 0.00045306346018392197, "loss": 1.9011, "step": 2381 }, { "epoch": 0.2326171875, "grad_norm": 0.24943086504936218, "learning_rate": 0.0004530199796798754, "loss": 1.9089, "step": 2382 }, { "epoch": 0.23271484375, "grad_norm": 0.279883474111557, "learning_rate": 0.00045297648139321, "loss": 1.8779, "step": 2383 }, { "epoch": 0.2328125, "grad_norm": 0.22576992213726044, "learning_rate": 0.00045293296532827074, "loss": 1.9037, "step": 2384 }, { "epoch": 0.23291015625, "grad_norm": 0.2487352192401886, "learning_rate": 0.0004528894314894047, "loss": 1.9089, "step": 2385 }, { "epoch": 0.2330078125, "grad_norm": 0.25155317783355713, "learning_rate": 0.0004528458798809603, "loss": 1.8979, "step": 2386 }, { "epoch": 0.23310546875, "grad_norm": 0.25363826751708984, "learning_rate": 0.00045280231050728815, "loss": 1.9251, "step": 2387 }, { "epoch": 0.233203125, "grad_norm": 0.2635079026222229, "learning_rate": 0.0004527587233727404, "loss": 1.9152, "step": 2388 }, { "epoch": 0.23330078125, "grad_norm": 0.2683078944683075, "learning_rate": 0.0004527151184816709, "loss": 1.8777, "step": 2389 }, { "epoch": 0.2333984375, "grad_norm": 0.23932483792304993, "learning_rate": 0.00045267149583843555, "loss": 1.8953, "step": 2390 }, { "epoch": 0.23349609375, "grad_norm": 0.20382820069789886, "learning_rate": 0.00045262785544739173, "loss": 1.9141, "step": 2391 }, { "epoch": 0.23359375, "grad_norm": 0.21496914327144623, "learning_rate": 0.00045258419731289884, "loss": 1.9053, "step": 2392 }, { "epoch": 0.23369140625, "grad_norm": 0.3160604238510132, "learning_rate": 0.00045254052143931786, "loss": 1.9264, "step": 2393 }, { "epoch": 0.2337890625, "grad_norm": 0.3175326883792877, "learning_rate": 0.0004524968278310117, "loss": 1.8834, "step": 2394 }, { "epoch": 0.23388671875, "grad_norm": 0.2783551514148712, "learning_rate": 0.0004524531164923448, "loss": 1.8688, "step": 2395 }, { "epoch": 0.233984375, "grad_norm": 0.24283266067504883, "learning_rate": 0.0004524093874276838, "loss": 1.9118, "step": 2396 }, { "epoch": 0.23408203125, "grad_norm": 0.26176488399505615, "learning_rate": 0.0004523656406413967, "loss": 1.9251, "step": 2397 }, { "epoch": 0.2341796875, "grad_norm": 0.24710685014724731, "learning_rate": 0.0004523218761378533, "loss": 1.8714, "step": 2398 }, { "epoch": 0.23427734375, "grad_norm": 0.21796317398548126, "learning_rate": 0.00045227809392142546, "loss": 1.8935, "step": 2399 }, { "epoch": 0.234375, "grad_norm": 0.25704821944236755, "learning_rate": 0.00045223429399648664, "loss": 1.876, "step": 2400 }, { "epoch": 0.23447265625, "grad_norm": 0.2729525864124298, "learning_rate": 0.0004521904763674119, "loss": 1.9012, "step": 2401 }, { "epoch": 0.2345703125, "grad_norm": 0.18755966424942017, "learning_rate": 0.00045214664103857845, "loss": 1.8964, "step": 2402 }, { "epoch": 0.23466796875, "grad_norm": 0.2789023816585541, "learning_rate": 0.0004521027880143648, "loss": 1.8949, "step": 2403 }, { "epoch": 0.234765625, "grad_norm": 0.21166810393333435, "learning_rate": 0.00045205891729915176, "loss": 1.9133, "step": 2404 }, { "epoch": 0.23486328125, "grad_norm": 0.22952231764793396, "learning_rate": 0.00045201502889732144, "loss": 1.9143, "step": 2405 }, { "epoch": 0.2349609375, "grad_norm": 0.23634375631809235, "learning_rate": 0.00045197112281325784, "loss": 1.8645, "step": 2406 }, { "epoch": 0.23505859375, "grad_norm": 0.25142142176628113, "learning_rate": 0.00045192719905134705, "loss": 1.9343, "step": 2407 }, { "epoch": 0.23515625, "grad_norm": 0.2976498007774353, "learning_rate": 0.0004518832576159764, "loss": 1.918, "step": 2408 }, { "epoch": 0.23525390625, "grad_norm": 0.2667110562324524, "learning_rate": 0.0004518392985115354, "loss": 1.9093, "step": 2409 }, { "epoch": 0.2353515625, "grad_norm": 0.2608035206794739, "learning_rate": 0.00045179532174241516, "loss": 1.9234, "step": 2410 }, { "epoch": 0.23544921875, "grad_norm": 0.26505768299102783, "learning_rate": 0.0004517513273130085, "loss": 1.8844, "step": 2411 }, { "epoch": 0.235546875, "grad_norm": 0.31293320655822754, "learning_rate": 0.0004517073152277101, "loss": 1.907, "step": 2412 }, { "epoch": 0.23564453125, "grad_norm": 0.27297648787498474, "learning_rate": 0.00045166328549091643, "loss": 1.9158, "step": 2413 }, { "epoch": 0.2357421875, "grad_norm": 0.2741674780845642, "learning_rate": 0.00045161923810702563, "loss": 1.9011, "step": 2414 }, { "epoch": 0.23583984375, "grad_norm": 0.29327699542045593, "learning_rate": 0.00045157517308043774, "loss": 1.894, "step": 2415 }, { "epoch": 0.2359375, "grad_norm": 0.28643742203712463, "learning_rate": 0.00045153109041555425, "loss": 1.8993, "step": 2416 }, { "epoch": 0.23603515625, "grad_norm": 0.46471482515335083, "learning_rate": 0.00045148699011677873, "loss": 1.9068, "step": 2417 }, { "epoch": 0.2361328125, "grad_norm": 0.331559956073761, "learning_rate": 0.0004514428721885165, "loss": 1.9106, "step": 2418 }, { "epoch": 0.23623046875, "grad_norm": 0.2610962688922882, "learning_rate": 0.00045139873663517445, "loss": 1.8757, "step": 2419 }, { "epoch": 0.236328125, "grad_norm": 0.32770147919654846, "learning_rate": 0.0004513545834611614, "loss": 1.9095, "step": 2420 }, { "epoch": 0.23642578125, "grad_norm": 0.2246219664812088, "learning_rate": 0.00045131041267088787, "loss": 1.8643, "step": 2421 }, { "epoch": 0.2365234375, "grad_norm": 0.31902652978897095, "learning_rate": 0.00045126622426876596, "loss": 1.9061, "step": 2422 }, { "epoch": 0.23662109375, "grad_norm": 0.20017661154270172, "learning_rate": 0.00045122201825920983, "loss": 1.9162, "step": 2423 }, { "epoch": 0.23671875, "grad_norm": 0.30280932784080505, "learning_rate": 0.0004511777946466353, "loss": 1.8944, "step": 2424 }, { "epoch": 0.23681640625, "grad_norm": 0.20749028027057648, "learning_rate": 0.00045113355343545994, "loss": 1.871, "step": 2425 }, { "epoch": 0.2369140625, "grad_norm": 0.2936987280845642, "learning_rate": 0.000451089294630103, "loss": 1.8808, "step": 2426 }, { "epoch": 0.23701171875, "grad_norm": 0.18236121535301208, "learning_rate": 0.0004510450182349854, "loss": 1.8534, "step": 2427 }, { "epoch": 0.237109375, "grad_norm": 0.25331661105155945, "learning_rate": 0.0004510007242545302, "loss": 1.8707, "step": 2428 }, { "epoch": 0.23720703125, "grad_norm": 0.2117713838815689, "learning_rate": 0.00045095641269316184, "loss": 1.9254, "step": 2429 }, { "epoch": 0.2373046875, "grad_norm": 0.25148898363113403, "learning_rate": 0.0004509120835553067, "loss": 1.8973, "step": 2430 }, { "epoch": 0.23740234375, "grad_norm": 0.24941754341125488, "learning_rate": 0.0004508677368453929, "loss": 1.8605, "step": 2431 }, { "epoch": 0.2375, "grad_norm": 0.20781292021274567, "learning_rate": 0.00045082337256785015, "loss": 1.8802, "step": 2432 }, { "epoch": 0.23759765625, "grad_norm": 0.2777664363384247, "learning_rate": 0.0004507789907271102, "loss": 1.8958, "step": 2433 }, { "epoch": 0.2376953125, "grad_norm": 0.2164555788040161, "learning_rate": 0.0004507345913276063, "loss": 1.8586, "step": 2434 }, { "epoch": 0.23779296875, "grad_norm": 0.2553316354751587, "learning_rate": 0.0004506901743737736, "loss": 1.8892, "step": 2435 }, { "epoch": 0.237890625, "grad_norm": 0.23472066223621368, "learning_rate": 0.0004506457398700489, "loss": 1.8896, "step": 2436 }, { "epoch": 0.23798828125, "grad_norm": 0.2276161015033722, "learning_rate": 0.00045060128782087094, "loss": 1.8851, "step": 2437 }, { "epoch": 0.2380859375, "grad_norm": 0.21676012873649597, "learning_rate": 0.00045055681823068006, "loss": 1.8854, "step": 2438 }, { "epoch": 0.23818359375, "grad_norm": 0.22366516292095184, "learning_rate": 0.00045051233110391823, "loss": 1.9309, "step": 2439 }, { "epoch": 0.23828125, "grad_norm": 0.28527531027793884, "learning_rate": 0.00045046782644502946, "loss": 1.8901, "step": 2440 }, { "epoch": 0.23837890625, "grad_norm": 0.3597792088985443, "learning_rate": 0.0004504233042584594, "loss": 1.8695, "step": 2441 }, { "epoch": 0.2384765625, "grad_norm": 0.2790103554725647, "learning_rate": 0.00045037876454865526, "loss": 1.8756, "step": 2442 }, { "epoch": 0.23857421875, "grad_norm": 0.3042321503162384, "learning_rate": 0.0004503342073200663, "loss": 1.8643, "step": 2443 }, { "epoch": 0.238671875, "grad_norm": 0.24617038667201996, "learning_rate": 0.0004502896325771433, "loss": 1.9004, "step": 2444 }, { "epoch": 0.23876953125, "grad_norm": 0.29559648036956787, "learning_rate": 0.000450245040324339, "loss": 1.9126, "step": 2445 }, { "epoch": 0.2388671875, "grad_norm": 0.28401464223861694, "learning_rate": 0.0004502004305661077, "loss": 1.8639, "step": 2446 }, { "epoch": 0.23896484375, "grad_norm": 0.2113400399684906, "learning_rate": 0.00045015580330690547, "loss": 1.8863, "step": 2447 }, { "epoch": 0.2390625, "grad_norm": 0.28481796383857727, "learning_rate": 0.0004501111585511903, "loss": 1.8826, "step": 2448 }, { "epoch": 0.23916015625, "grad_norm": 0.25684455037117004, "learning_rate": 0.00045006649630342164, "loss": 1.8952, "step": 2449 }, { "epoch": 0.2392578125, "grad_norm": 0.23974579572677612, "learning_rate": 0.00045002181656806105, "loss": 1.8615, "step": 2450 }, { "epoch": 0.23935546875, "grad_norm": 0.24866057932376862, "learning_rate": 0.0004499771193495715, "loss": 1.9343, "step": 2451 }, { "epoch": 0.239453125, "grad_norm": 0.22083763778209686, "learning_rate": 0.0004499324046524179, "loss": 1.8349, "step": 2452 }, { "epoch": 0.23955078125, "grad_norm": 0.33529961109161377, "learning_rate": 0.00044988767248106676, "loss": 1.9153, "step": 2453 }, { "epoch": 0.2396484375, "grad_norm": 0.26694998145103455, "learning_rate": 0.0004498429228399866, "loss": 1.8993, "step": 2454 }, { "epoch": 0.23974609375, "grad_norm": 0.29110854864120483, "learning_rate": 0.0004497981557336474, "loss": 1.9531, "step": 2455 }, { "epoch": 0.23984375, "grad_norm": 0.3439907729625702, "learning_rate": 0.00044975337116652097, "loss": 1.8862, "step": 2456 }, { "epoch": 0.23994140625, "grad_norm": 0.2403963953256607, "learning_rate": 0.00044970856914308097, "loss": 1.8965, "step": 2457 }, { "epoch": 0.2400390625, "grad_norm": 0.28895774483680725, "learning_rate": 0.00044966374966780266, "loss": 1.8895, "step": 2458 }, { "epoch": 0.24013671875, "grad_norm": 0.2802990972995758, "learning_rate": 0.00044961891274516313, "loss": 1.8932, "step": 2459 }, { "epoch": 0.240234375, "grad_norm": 0.251863032579422, "learning_rate": 0.0004495740583796413, "loss": 1.9222, "step": 2460 }, { "epoch": 0.24033203125, "grad_norm": 0.22680626809597015, "learning_rate": 0.0004495291865757175, "loss": 1.8676, "step": 2461 }, { "epoch": 0.2404296875, "grad_norm": 0.24981464445590973, "learning_rate": 0.0004494842973378742, "loss": 1.8868, "step": 2462 }, { "epoch": 0.24052734375, "grad_norm": 0.19781389832496643, "learning_rate": 0.0004494393906705954, "loss": 1.9079, "step": 2463 }, { "epoch": 0.240625, "grad_norm": 0.22644692659378052, "learning_rate": 0.00044939446657836686, "loss": 1.9074, "step": 2464 }, { "epoch": 0.24072265625, "grad_norm": 0.2107515037059784, "learning_rate": 0.0004493495250656761, "loss": 1.8994, "step": 2465 }, { "epoch": 0.2408203125, "grad_norm": 0.18987129628658295, "learning_rate": 0.00044930456613701236, "loss": 1.9397, "step": 2466 }, { "epoch": 0.24091796875, "grad_norm": 0.21435169875621796, "learning_rate": 0.00044925958979686676, "loss": 1.9007, "step": 2467 }, { "epoch": 0.241015625, "grad_norm": 0.1948396861553192, "learning_rate": 0.00044921459604973184, "loss": 1.8762, "step": 2468 }, { "epoch": 0.24111328125, "grad_norm": 0.2153381109237671, "learning_rate": 0.00044916958490010217, "loss": 1.9325, "step": 2469 }, { "epoch": 0.2412109375, "grad_norm": 0.21237823367118835, "learning_rate": 0.00044912455635247404, "loss": 1.9006, "step": 2470 }, { "epoch": 0.24130859375, "grad_norm": 0.19011497497558594, "learning_rate": 0.0004490795104113453, "loss": 1.9169, "step": 2471 }, { "epoch": 0.24140625, "grad_norm": 0.2259521484375, "learning_rate": 0.0004490344470812157, "loss": 1.8984, "step": 2472 }, { "epoch": 0.24150390625, "grad_norm": 0.1522991955280304, "learning_rate": 0.0004489893663665866, "loss": 1.838, "step": 2473 }, { "epoch": 0.2416015625, "grad_norm": 0.21235786378383636, "learning_rate": 0.00044894426827196124, "loss": 1.9033, "step": 2474 }, { "epoch": 0.24169921875, "grad_norm": 0.20965880155563354, "learning_rate": 0.0004488991528018445, "loss": 1.9381, "step": 2475 }, { "epoch": 0.241796875, "grad_norm": 0.25868213176727295, "learning_rate": 0.0004488540199607429, "loss": 1.8907, "step": 2476 }, { "epoch": 0.24189453125, "grad_norm": 0.2545277178287506, "learning_rate": 0.00044880886975316503, "loss": 1.8905, "step": 2477 }, { "epoch": 0.2419921875, "grad_norm": 0.17712853848934174, "learning_rate": 0.00044876370218362085, "loss": 1.9022, "step": 2478 }, { "epoch": 0.24208984375, "grad_norm": 0.24900510907173157, "learning_rate": 0.0004487185172566222, "loss": 1.9247, "step": 2479 }, { "epoch": 0.2421875, "grad_norm": 0.24943552911281586, "learning_rate": 0.0004486733149766827, "loss": 1.9378, "step": 2480 }, { "epoch": 0.24228515625, "grad_norm": 0.27004435658454895, "learning_rate": 0.0004486280953483177, "loss": 1.8644, "step": 2481 }, { "epoch": 0.2423828125, "grad_norm": 0.21977658569812775, "learning_rate": 0.0004485828583760441, "loss": 1.9166, "step": 2482 }, { "epoch": 0.24248046875, "grad_norm": 0.17602773010730743, "learning_rate": 0.0004485376040643808, "loss": 1.8733, "step": 2483 }, { "epoch": 0.242578125, "grad_norm": 0.2778310179710388, "learning_rate": 0.0004484923324178483, "loss": 1.8788, "step": 2484 }, { "epoch": 0.24267578125, "grad_norm": 0.33434948325157166, "learning_rate": 0.00044844704344096863, "loss": 1.8932, "step": 2485 }, { "epoch": 0.2427734375, "grad_norm": 0.2541770040988922, "learning_rate": 0.00044840173713826603, "loss": 1.8727, "step": 2486 }, { "epoch": 0.24287109375, "grad_norm": 0.21049551665782928, "learning_rate": 0.00044835641351426614, "loss": 1.8567, "step": 2487 }, { "epoch": 0.24296875, "grad_norm": 0.2456684708595276, "learning_rate": 0.0004483110725734962, "loss": 1.907, "step": 2488 }, { "epoch": 0.24306640625, "grad_norm": 0.2708995044231415, "learning_rate": 0.0004482657143204857, "loss": 1.9146, "step": 2489 }, { "epoch": 0.2431640625, "grad_norm": 0.2767466604709625, "learning_rate": 0.00044822033875976513, "loss": 1.9058, "step": 2490 }, { "epoch": 0.24326171875, "grad_norm": 0.21536017954349518, "learning_rate": 0.00044817494589586746, "loss": 1.8867, "step": 2491 }, { "epoch": 0.243359375, "grad_norm": 0.32470428943634033, "learning_rate": 0.0004481295357333268, "loss": 1.8651, "step": 2492 }, { "epoch": 0.24345703125, "grad_norm": 0.35625115036964417, "learning_rate": 0.00044808410827667936, "loss": 1.8959, "step": 2493 }, { "epoch": 0.2435546875, "grad_norm": 0.2434857189655304, "learning_rate": 0.0004480386635304629, "loss": 1.8805, "step": 2494 }, { "epoch": 0.24365234375, "grad_norm": 0.2814987003803253, "learning_rate": 0.000447993201499217, "loss": 1.9388, "step": 2495 }, { "epoch": 0.24375, "grad_norm": 0.3357986807823181, "learning_rate": 0.00044794772218748275, "loss": 1.9182, "step": 2496 }, { "epoch": 0.24384765625, "grad_norm": 0.26190170645713806, "learning_rate": 0.00044790222559980313, "loss": 1.9124, "step": 2497 }, { "epoch": 0.2439453125, "grad_norm": 0.2307833731174469, "learning_rate": 0.0004478567117407231, "loss": 1.9049, "step": 2498 }, { "epoch": 0.24404296875, "grad_norm": 0.24210835993289948, "learning_rate": 0.00044781118061478897, "loss": 1.8924, "step": 2499 }, { "epoch": 0.244140625, "grad_norm": 0.2340785562992096, "learning_rate": 0.00044776563222654876, "loss": 1.8846, "step": 2500 }, { "epoch": 0.24423828125, "grad_norm": 0.20149527490139008, "learning_rate": 0.0004477200665805525, "loss": 1.8752, "step": 2501 }, { "epoch": 0.2443359375, "grad_norm": 0.21381939947605133, "learning_rate": 0.00044767448368135176, "loss": 1.8537, "step": 2502 }, { "epoch": 0.24443359375, "grad_norm": 0.20310865342617035, "learning_rate": 0.0004476288835334999, "loss": 1.926, "step": 2503 }, { "epoch": 0.24453125, "grad_norm": 0.21173352003097534, "learning_rate": 0.0004475832661415518, "loss": 1.9143, "step": 2504 }, { "epoch": 0.24462890625, "grad_norm": 0.22699382901191711, "learning_rate": 0.0004475376315100645, "loss": 1.9025, "step": 2505 }, { "epoch": 0.2447265625, "grad_norm": 0.2502591907978058, "learning_rate": 0.0004474919796435963, "loss": 1.9133, "step": 2506 }, { "epoch": 0.24482421875, "grad_norm": 0.18316835165023804, "learning_rate": 0.00044744631054670743, "loss": 1.8408, "step": 2507 }, { "epoch": 0.244921875, "grad_norm": 0.2320968210697174, "learning_rate": 0.00044740062422395994, "loss": 1.924, "step": 2508 }, { "epoch": 0.24501953125, "grad_norm": 0.22780783474445343, "learning_rate": 0.0004473549206799174, "loss": 1.8782, "step": 2509 }, { "epoch": 0.2451171875, "grad_norm": 0.22676895558834076, "learning_rate": 0.0004473091999191452, "loss": 1.8904, "step": 2510 }, { "epoch": 0.24521484375, "grad_norm": 0.22365321218967438, "learning_rate": 0.0004472634619462105, "loss": 1.9029, "step": 2511 }, { "epoch": 0.2453125, "grad_norm": 0.24881736934185028, "learning_rate": 0.00044721770676568203, "loss": 1.8722, "step": 2512 }, { "epoch": 0.24541015625, "grad_norm": 0.1900773048400879, "learning_rate": 0.0004471719343821303, "loss": 1.9069, "step": 2513 }, { "epoch": 0.2455078125, "grad_norm": 0.253597229719162, "learning_rate": 0.00044712614480012773, "loss": 1.9053, "step": 2514 }, { "epoch": 0.24560546875, "grad_norm": 0.2389373481273651, "learning_rate": 0.0004470803380242481, "loss": 1.892, "step": 2515 }, { "epoch": 0.245703125, "grad_norm": 0.21958309412002563, "learning_rate": 0.0004470345140590673, "loss": 1.9074, "step": 2516 }, { "epoch": 0.24580078125, "grad_norm": 0.24225091934204102, "learning_rate": 0.00044698867290916263, "loss": 1.8854, "step": 2517 }, { "epoch": 0.2458984375, "grad_norm": 0.19851315021514893, "learning_rate": 0.0004469428145791132, "loss": 1.9327, "step": 2518 }, { "epoch": 0.24599609375, "grad_norm": 0.27460166811943054, "learning_rate": 0.0004468969390734998, "loss": 1.8819, "step": 2519 }, { "epoch": 0.24609375, "grad_norm": 0.250698447227478, "learning_rate": 0.00044685104639690515, "loss": 1.8329, "step": 2520 }, { "epoch": 0.24619140625, "grad_norm": 0.22741656005382538, "learning_rate": 0.0004468051365539133, "loss": 1.9376, "step": 2521 }, { "epoch": 0.2462890625, "grad_norm": 0.2052220106124878, "learning_rate": 0.00044675920954911045, "loss": 1.9158, "step": 2522 }, { "epoch": 0.24638671875, "grad_norm": 0.20518803596496582, "learning_rate": 0.0004467132653870842, "loss": 1.8723, "step": 2523 }, { "epoch": 0.246484375, "grad_norm": 0.2157951146364212, "learning_rate": 0.00044666730407242407, "loss": 1.8463, "step": 2524 }, { "epoch": 0.24658203125, "grad_norm": 0.2367670238018036, "learning_rate": 0.000446621325609721, "loss": 1.9047, "step": 2525 }, { "epoch": 0.2466796875, "grad_norm": 0.2216240018606186, "learning_rate": 0.00044657533000356793, "loss": 1.8725, "step": 2526 }, { "epoch": 0.24677734375, "grad_norm": 0.20380742847919464, "learning_rate": 0.00044652931725855946, "loss": 1.9393, "step": 2527 }, { "epoch": 0.246875, "grad_norm": 0.2439001351594925, "learning_rate": 0.00044648328737929176, "loss": 1.8832, "step": 2528 }, { "epoch": 0.24697265625, "grad_norm": 0.19296857714653015, "learning_rate": 0.00044643724037036293, "loss": 1.8894, "step": 2529 }, { "epoch": 0.2470703125, "grad_norm": 0.23245202004909515, "learning_rate": 0.00044639117623637253, "loss": 1.9035, "step": 2530 }, { "epoch": 0.24716796875, "grad_norm": 0.23975121974945068, "learning_rate": 0.00044634509498192197, "loss": 1.9336, "step": 2531 }, { "epoch": 0.247265625, "grad_norm": 0.20720966160297394, "learning_rate": 0.0004462989966116145, "loss": 1.9538, "step": 2532 }, { "epoch": 0.24736328125, "grad_norm": 0.23715616762638092, "learning_rate": 0.0004462528811300548, "loss": 1.8944, "step": 2533 }, { "epoch": 0.2474609375, "grad_norm": 0.22838465869426727, "learning_rate": 0.00044620674854184937, "loss": 1.8472, "step": 2534 }, { "epoch": 0.24755859375, "grad_norm": 0.19445407390594482, "learning_rate": 0.00044616059885160657, "loss": 1.9021, "step": 2535 }, { "epoch": 0.24765625, "grad_norm": 0.22580035030841827, "learning_rate": 0.00044611443206393634, "loss": 1.9035, "step": 2536 }, { "epoch": 0.24775390625, "grad_norm": 0.18850651383399963, "learning_rate": 0.0004460682481834503, "loss": 1.8769, "step": 2537 }, { "epoch": 0.2478515625, "grad_norm": 0.20952773094177246, "learning_rate": 0.0004460220472147617, "loss": 1.9035, "step": 2538 }, { "epoch": 0.24794921875, "grad_norm": 0.2210458666086197, "learning_rate": 0.00044597582916248567, "loss": 1.882, "step": 2539 }, { "epoch": 0.248046875, "grad_norm": 0.22584138810634613, "learning_rate": 0.0004459295940312391, "loss": 1.9031, "step": 2540 }, { "epoch": 0.24814453125, "grad_norm": 0.22548091411590576, "learning_rate": 0.0004458833418256404, "loss": 1.9215, "step": 2541 }, { "epoch": 0.2482421875, "grad_norm": 0.22244450449943542, "learning_rate": 0.0004458370725503097, "loss": 1.8845, "step": 2542 }, { "epoch": 0.24833984375, "grad_norm": 0.25378334522247314, "learning_rate": 0.00044579078620986896, "loss": 1.908, "step": 2543 }, { "epoch": 0.2484375, "grad_norm": 0.3173237144947052, "learning_rate": 0.0004457444828089417, "loss": 1.9347, "step": 2544 }, { "epoch": 0.24853515625, "grad_norm": 0.3610992431640625, "learning_rate": 0.00044569816235215333, "loss": 1.8956, "step": 2545 }, { "epoch": 0.2486328125, "grad_norm": 0.24675659835338593, "learning_rate": 0.00044565182484413075, "loss": 1.905, "step": 2546 }, { "epoch": 0.24873046875, "grad_norm": 0.22318989038467407, "learning_rate": 0.0004456054702895027, "loss": 1.8726, "step": 2547 }, { "epoch": 0.248828125, "grad_norm": 0.3162762522697449, "learning_rate": 0.0004455590986928996, "loss": 1.8969, "step": 2548 }, { "epoch": 0.24892578125, "grad_norm": 0.2868858575820923, "learning_rate": 0.0004455127100589536, "loss": 1.8387, "step": 2549 }, { "epoch": 0.2490234375, "grad_norm": 0.20986978709697723, "learning_rate": 0.0004454663043922984, "loss": 1.86, "step": 2550 }, { "epoch": 0.24912109375, "grad_norm": 0.20684708654880524, "learning_rate": 0.0004454198816975697, "loss": 1.8546, "step": 2551 }, { "epoch": 0.24921875, "grad_norm": 0.2441062480211258, "learning_rate": 0.00044537344197940457, "loss": 1.8749, "step": 2552 }, { "epoch": 0.24931640625, "grad_norm": 0.2610075771808624, "learning_rate": 0.00044532698524244184, "loss": 1.8743, "step": 2553 }, { "epoch": 0.2494140625, "grad_norm": 0.21725575625896454, "learning_rate": 0.0004452805114913223, "loss": 1.942, "step": 2554 }, { "epoch": 0.24951171875, "grad_norm": 0.3064901828765869, "learning_rate": 0.0004452340207306883, "loss": 1.8555, "step": 2555 }, { "epoch": 0.249609375, "grad_norm": 0.3561396300792694, "learning_rate": 0.00044518751296518367, "loss": 1.9103, "step": 2556 }, { "epoch": 0.24970703125, "grad_norm": 0.22716249525547028, "learning_rate": 0.0004451409881994543, "loss": 1.8788, "step": 2557 }, { "epoch": 0.2498046875, "grad_norm": 0.20097126066684723, "learning_rate": 0.00044509444643814745, "loss": 1.8081, "step": 2558 }, { "epoch": 0.24990234375, "grad_norm": 0.2304898500442505, "learning_rate": 0.0004450478876859123, "loss": 1.8918, "step": 2559 }, { "epoch": 0.25, "grad_norm": 0.22153149545192719, "learning_rate": 0.00044500131194739976, "loss": 1.8949, "step": 2560 }, { "epoch": 0.25009765625, "grad_norm": 0.21728168427944183, "learning_rate": 0.0004449547192272622, "loss": 1.8991, "step": 2561 }, { "epoch": 0.2501953125, "grad_norm": 0.191215381026268, "learning_rate": 0.00044490810953015376, "loss": 1.8753, "step": 2562 }, { "epoch": 0.25029296875, "grad_norm": 0.20973442494869232, "learning_rate": 0.00044486148286073047, "loss": 1.8833, "step": 2563 }, { "epoch": 0.250390625, "grad_norm": 0.16450902819633484, "learning_rate": 0.00044481483922364986, "loss": 1.8508, "step": 2564 }, { "epoch": 0.25048828125, "grad_norm": 0.19612371921539307, "learning_rate": 0.00044476817862357135, "loss": 1.8916, "step": 2565 }, { "epoch": 0.2505859375, "grad_norm": 0.17144112288951874, "learning_rate": 0.00044472150106515565, "loss": 1.9255, "step": 2566 }, { "epoch": 0.25068359375, "grad_norm": 0.2133835256099701, "learning_rate": 0.00044467480655306565, "loss": 1.8678, "step": 2567 }, { "epoch": 0.25078125, "grad_norm": 0.21594852209091187, "learning_rate": 0.0004446280950919657, "loss": 1.8686, "step": 2568 }, { "epoch": 0.25087890625, "grad_norm": 0.24870210886001587, "learning_rate": 0.00044458136668652185, "loss": 1.9342, "step": 2569 }, { "epoch": 0.2509765625, "grad_norm": 0.2506871223449707, "learning_rate": 0.0004445346213414017, "loss": 1.9142, "step": 2570 }, { "epoch": 0.25107421875, "grad_norm": 0.21057821810245514, "learning_rate": 0.0004444878590612749, "loss": 1.8809, "step": 2571 }, { "epoch": 0.251171875, "grad_norm": 0.2578639090061188, "learning_rate": 0.0004444410798508125, "loss": 1.8706, "step": 2572 }, { "epoch": 0.25126953125, "grad_norm": 0.23633147776126862, "learning_rate": 0.00044439428371468724, "loss": 1.8912, "step": 2573 }, { "epoch": 0.2513671875, "grad_norm": 0.222493976354599, "learning_rate": 0.00044434747065757383, "loss": 1.8508, "step": 2574 }, { "epoch": 0.25146484375, "grad_norm": 0.2404196411371231, "learning_rate": 0.00044430064068414843, "loss": 1.8879, "step": 2575 }, { "epoch": 0.2515625, "grad_norm": 0.2464357167482376, "learning_rate": 0.0004442537937990889, "loss": 1.9289, "step": 2576 }, { "epoch": 0.25166015625, "grad_norm": 0.2768436372280121, "learning_rate": 0.0004442069300070747, "loss": 1.8553, "step": 2577 }, { "epoch": 0.2517578125, "grad_norm": 0.22688689827919006, "learning_rate": 0.0004441600493127873, "loss": 1.8665, "step": 2578 }, { "epoch": 0.25185546875, "grad_norm": 0.28142839670181274, "learning_rate": 0.0004441131517209096, "loss": 1.8982, "step": 2579 }, { "epoch": 0.251953125, "grad_norm": 0.22705985605716705, "learning_rate": 0.0004440662372361262, "loss": 1.8778, "step": 2580 }, { "epoch": 0.25205078125, "grad_norm": 0.24169547855854034, "learning_rate": 0.0004440193058631236, "loss": 1.9192, "step": 2581 }, { "epoch": 0.2521484375, "grad_norm": 0.2988471984863281, "learning_rate": 0.00044397235760658963, "loss": 1.8754, "step": 2582 }, { "epoch": 0.25224609375, "grad_norm": 0.343605101108551, "learning_rate": 0.0004439253924712141, "loss": 1.8933, "step": 2583 }, { "epoch": 0.25234375, "grad_norm": 0.3238380551338196, "learning_rate": 0.00044387841046168844, "loss": 1.8798, "step": 2584 }, { "epoch": 0.25244140625, "grad_norm": 0.21151965856552124, "learning_rate": 0.0004438314115827057, "loss": 1.88, "step": 2585 }, { "epoch": 0.2525390625, "grad_norm": 0.3310568630695343, "learning_rate": 0.00044378439583896064, "loss": 1.9042, "step": 2586 }, { "epoch": 0.25263671875, "grad_norm": 0.30944469571113586, "learning_rate": 0.00044373736323514973, "loss": 1.8837, "step": 2587 }, { "epoch": 0.252734375, "grad_norm": 0.23433548212051392, "learning_rate": 0.00044369031377597114, "loss": 1.9025, "step": 2588 }, { "epoch": 0.25283203125, "grad_norm": 0.2772672176361084, "learning_rate": 0.0004436432474661246, "loss": 1.8573, "step": 2589 }, { "epoch": 0.2529296875, "grad_norm": 0.251200407743454, "learning_rate": 0.00044359616431031176, "loss": 1.869, "step": 2590 }, { "epoch": 0.25302734375, "grad_norm": 0.28617560863494873, "learning_rate": 0.0004435490643132357, "loss": 1.91, "step": 2591 }, { "epoch": 0.253125, "grad_norm": 0.23629052937030792, "learning_rate": 0.0004435019474796013, "loss": 1.9149, "step": 2592 }, { "epoch": 0.25322265625, "grad_norm": 0.2794038653373718, "learning_rate": 0.0004434548138141152, "loss": 1.9415, "step": 2593 }, { "epoch": 0.2533203125, "grad_norm": 0.22255775332450867, "learning_rate": 0.0004434076633214855, "loss": 1.8657, "step": 2594 }, { "epoch": 0.25341796875, "grad_norm": 0.28047165274620056, "learning_rate": 0.00044336049600642225, "loss": 1.9193, "step": 2595 }, { "epoch": 0.253515625, "grad_norm": 0.16849219799041748, "learning_rate": 0.0004433133118736369, "loss": 1.8809, "step": 2596 }, { "epoch": 0.25361328125, "grad_norm": 0.26295021176338196, "learning_rate": 0.00044326611092784297, "loss": 1.8732, "step": 2597 }, { "epoch": 0.2537109375, "grad_norm": 0.2265648990869522, "learning_rate": 0.00044321889317375517, "loss": 1.8861, "step": 2598 }, { "epoch": 0.25380859375, "grad_norm": 0.28177958726882935, "learning_rate": 0.0004431716586160902, "loss": 1.9275, "step": 2599 }, { "epoch": 0.25390625, "grad_norm": 0.34475746750831604, "learning_rate": 0.0004431244072595665, "loss": 1.8884, "step": 2600 }, { "epoch": 0.25400390625, "grad_norm": 0.32186293601989746, "learning_rate": 0.0004430771391089038, "loss": 1.9329, "step": 2601 }, { "epoch": 0.2541015625, "grad_norm": 0.2824218273162842, "learning_rate": 0.00044302985416882405, "loss": 1.9011, "step": 2602 }, { "epoch": 0.25419921875, "grad_norm": 0.33732739090919495, "learning_rate": 0.0004429825524440505, "loss": 1.9148, "step": 2603 }, { "epoch": 0.254296875, "grad_norm": 0.23914110660552979, "learning_rate": 0.00044293523393930807, "loss": 1.866, "step": 2604 }, { "epoch": 0.25439453125, "grad_norm": 0.2791132926940918, "learning_rate": 0.0004428878986593236, "loss": 1.8849, "step": 2605 }, { "epoch": 0.2544921875, "grad_norm": 0.20789825916290283, "learning_rate": 0.0004428405466088253, "loss": 1.9007, "step": 2606 }, { "epoch": 0.25458984375, "grad_norm": 0.27687475085258484, "learning_rate": 0.0004427931777925435, "loss": 1.8936, "step": 2607 }, { "epoch": 0.2546875, "grad_norm": 0.29951584339141846, "learning_rate": 0.00044274579221520964, "loss": 1.8735, "step": 2608 }, { "epoch": 0.25478515625, "grad_norm": 0.25301870703697205, "learning_rate": 0.00044269838988155726, "loss": 1.8829, "step": 2609 }, { "epoch": 0.2548828125, "grad_norm": 0.254138320684433, "learning_rate": 0.0004426509707963214, "loss": 1.8881, "step": 2610 }, { "epoch": 0.25498046875, "grad_norm": 0.2853766679763794, "learning_rate": 0.00044260353496423883, "loss": 1.9376, "step": 2611 }, { "epoch": 0.255078125, "grad_norm": 0.22820377349853516, "learning_rate": 0.00044255608239004795, "loss": 1.8961, "step": 2612 }, { "epoch": 0.25517578125, "grad_norm": 0.269482284784317, "learning_rate": 0.00044250861307848884, "loss": 1.8892, "step": 2613 }, { "epoch": 0.2552734375, "grad_norm": 0.21227295696735382, "learning_rate": 0.0004424611270343033, "loss": 1.9393, "step": 2614 }, { "epoch": 0.25537109375, "grad_norm": 0.21992765367031097, "learning_rate": 0.00044241362426223463, "loss": 1.8699, "step": 2615 }, { "epoch": 0.25546875, "grad_norm": 0.26295095682144165, "learning_rate": 0.0004423661047670282, "loss": 1.9119, "step": 2616 }, { "epoch": 0.25556640625, "grad_norm": 0.1936780959367752, "learning_rate": 0.00044231856855343055, "loss": 1.8939, "step": 2617 }, { "epoch": 0.2556640625, "grad_norm": 0.27874645590782166, "learning_rate": 0.0004422710156261903, "loss": 1.9553, "step": 2618 }, { "epoch": 0.25576171875, "grad_norm": 0.2704470753669739, "learning_rate": 0.0004422234459900574, "loss": 1.8742, "step": 2619 }, { "epoch": 0.255859375, "grad_norm": 0.29514744877815247, "learning_rate": 0.00044217585964978365, "loss": 1.8803, "step": 2620 }, { "epoch": 0.25595703125, "grad_norm": 0.22899918258190155, "learning_rate": 0.0004421282566101226, "loss": 1.8898, "step": 2621 }, { "epoch": 0.2560546875, "grad_norm": 0.3023613691329956, "learning_rate": 0.00044208063687582944, "loss": 1.917, "step": 2622 }, { "epoch": 0.25615234375, "grad_norm": 0.2797189950942993, "learning_rate": 0.0004420330004516608, "loss": 1.89, "step": 2623 }, { "epoch": 0.25625, "grad_norm": 0.23178371787071228, "learning_rate": 0.0004419853473423751, "loss": 1.8528, "step": 2624 }, { "epoch": 0.25634765625, "grad_norm": 0.2943453788757324, "learning_rate": 0.0004419376775527326, "loss": 1.8675, "step": 2625 }, { "epoch": 0.2564453125, "grad_norm": 0.2193489670753479, "learning_rate": 0.00044188999108749516, "loss": 1.8843, "step": 2626 }, { "epoch": 0.25654296875, "grad_norm": 0.2941931486129761, "learning_rate": 0.00044184228795142607, "loss": 1.8976, "step": 2627 }, { "epoch": 0.256640625, "grad_norm": 0.22620061039924622, "learning_rate": 0.00044179456814929044, "loss": 1.8674, "step": 2628 }, { "epoch": 0.25673828125, "grad_norm": 0.2290557473897934, "learning_rate": 0.0004417468316858552, "loss": 1.9054, "step": 2629 }, { "epoch": 0.2568359375, "grad_norm": 0.24180883169174194, "learning_rate": 0.00044169907856588865, "loss": 1.8484, "step": 2630 }, { "epoch": 0.25693359375, "grad_norm": 0.19765223562717438, "learning_rate": 0.00044165130879416097, "loss": 1.8613, "step": 2631 }, { "epoch": 0.25703125, "grad_norm": 0.20855452120304108, "learning_rate": 0.00044160352237544406, "loss": 1.8621, "step": 2632 }, { "epoch": 0.25712890625, "grad_norm": 0.23405295610427856, "learning_rate": 0.00044155571931451105, "loss": 1.8958, "step": 2633 }, { "epoch": 0.2572265625, "grad_norm": 0.27464863657951355, "learning_rate": 0.0004415078996161374, "loss": 1.8959, "step": 2634 }, { "epoch": 0.25732421875, "grad_norm": 0.2642795145511627, "learning_rate": 0.00044146006328509956, "loss": 1.8968, "step": 2635 }, { "epoch": 0.257421875, "grad_norm": 0.29816219210624695, "learning_rate": 0.00044141221032617626, "loss": 1.8845, "step": 2636 }, { "epoch": 0.25751953125, "grad_norm": 0.251708984375, "learning_rate": 0.0004413643407441473, "loss": 1.9025, "step": 2637 }, { "epoch": 0.2576171875, "grad_norm": 0.28949257731437683, "learning_rate": 0.0004413164545437946, "loss": 1.8946, "step": 2638 }, { "epoch": 0.25771484375, "grad_norm": 0.24704940617084503, "learning_rate": 0.0004412685517299015, "loss": 1.8745, "step": 2639 }, { "epoch": 0.2578125, "grad_norm": 0.25325044989585876, "learning_rate": 0.0004412206323072532, "loss": 1.8796, "step": 2640 }, { "epoch": 0.25791015625, "grad_norm": 0.22802412509918213, "learning_rate": 0.00044117269628063616, "loss": 1.909, "step": 2641 }, { "epoch": 0.2580078125, "grad_norm": 0.21644683182239532, "learning_rate": 0.00044112474365483903, "loss": 1.846, "step": 2642 }, { "epoch": 0.25810546875, "grad_norm": 0.20541459321975708, "learning_rate": 0.00044107677443465165, "loss": 1.8854, "step": 2643 }, { "epoch": 0.258203125, "grad_norm": 0.2531701326370239, "learning_rate": 0.00044102878862486587, "loss": 1.9159, "step": 2644 }, { "epoch": 0.25830078125, "grad_norm": 0.20743289589881897, "learning_rate": 0.00044098078623027495, "loss": 1.8419, "step": 2645 }, { "epoch": 0.2583984375, "grad_norm": 0.23902501165866852, "learning_rate": 0.00044093276725567403, "loss": 1.883, "step": 2646 }, { "epoch": 0.25849609375, "grad_norm": 0.260051965713501, "learning_rate": 0.00044088473170585964, "loss": 1.9066, "step": 2647 }, { "epoch": 0.25859375, "grad_norm": 0.22198320925235748, "learning_rate": 0.00044083667958563007, "loss": 1.8814, "step": 2648 }, { "epoch": 0.25869140625, "grad_norm": 0.20246773958206177, "learning_rate": 0.0004407886108997855, "loss": 1.923, "step": 2649 }, { "epoch": 0.2587890625, "grad_norm": 0.26774612069129944, "learning_rate": 0.0004407405256531274, "loss": 1.8811, "step": 2650 }, { "epoch": 0.25888671875, "grad_norm": 0.2821747064590454, "learning_rate": 0.0004406924238504592, "loss": 1.9521, "step": 2651 }, { "epoch": 0.258984375, "grad_norm": 0.2747217118740082, "learning_rate": 0.0004406443054965857, "loss": 1.863, "step": 2652 }, { "epoch": 0.25908203125, "grad_norm": 0.24007029831409454, "learning_rate": 0.0004405961705963135, "loss": 1.8777, "step": 2653 }, { "epoch": 0.2591796875, "grad_norm": 0.2279479056596756, "learning_rate": 0.000440548019154451, "loss": 1.8902, "step": 2654 }, { "epoch": 0.25927734375, "grad_norm": 0.24569466710090637, "learning_rate": 0.00044049985117580796, "loss": 1.8834, "step": 2655 }, { "epoch": 0.259375, "grad_norm": 0.22550983726978302, "learning_rate": 0.00044045166666519604, "loss": 1.8953, "step": 2656 }, { "epoch": 0.25947265625, "grad_norm": 0.2546038031578064, "learning_rate": 0.00044040346562742834, "loss": 1.8834, "step": 2657 }, { "epoch": 0.2595703125, "grad_norm": 0.21834610402584076, "learning_rate": 0.00044035524806731975, "loss": 1.8543, "step": 2658 }, { "epoch": 0.25966796875, "grad_norm": 0.22710272669792175, "learning_rate": 0.0004403070139896869, "loss": 1.8995, "step": 2659 }, { "epoch": 0.259765625, "grad_norm": 0.19748175144195557, "learning_rate": 0.0004402587633993477, "loss": 1.864, "step": 2660 }, { "epoch": 0.25986328125, "grad_norm": 0.23321975767612457, "learning_rate": 0.0004402104963011222, "loss": 1.9256, "step": 2661 }, { "epoch": 0.2599609375, "grad_norm": 0.19426730275154114, "learning_rate": 0.0004401622126998317, "loss": 1.8956, "step": 2662 }, { "epoch": 0.26005859375, "grad_norm": 0.2513621747493744, "learning_rate": 0.0004401139126002993, "loss": 1.9298, "step": 2663 }, { "epoch": 0.26015625, "grad_norm": 0.2774468660354614, "learning_rate": 0.00044006559600734993, "loss": 1.8946, "step": 2664 }, { "epoch": 0.26025390625, "grad_norm": 0.2116554081439972, "learning_rate": 0.00044001726292580977, "loss": 1.8713, "step": 2665 }, { "epoch": 0.2603515625, "grad_norm": 0.24314185976982117, "learning_rate": 0.000439968913360507, "loss": 1.8807, "step": 2666 }, { "epoch": 0.26044921875, "grad_norm": 0.22041741013526917, "learning_rate": 0.00043992054731627135, "loss": 1.9022, "step": 2667 }, { "epoch": 0.260546875, "grad_norm": 0.214686781167984, "learning_rate": 0.00043987216479793404, "loss": 1.8687, "step": 2668 }, { "epoch": 0.26064453125, "grad_norm": 0.254207581281662, "learning_rate": 0.0004398237658103281, "loss": 1.8489, "step": 2669 }, { "epoch": 0.2607421875, "grad_norm": 0.21913522481918335, "learning_rate": 0.0004397753503582881, "loss": 1.9079, "step": 2670 }, { "epoch": 0.26083984375, "grad_norm": 0.2150515615940094, "learning_rate": 0.0004397269184466505, "loss": 1.8985, "step": 2671 }, { "epoch": 0.2609375, "grad_norm": 0.26243162155151367, "learning_rate": 0.000439678470080253, "loss": 1.8512, "step": 2672 }, { "epoch": 0.26103515625, "grad_norm": 0.20125195384025574, "learning_rate": 0.0004396300052639353, "loss": 1.8621, "step": 2673 }, { "epoch": 0.2611328125, "grad_norm": 0.2900947630405426, "learning_rate": 0.00043958152400253855, "loss": 1.9354, "step": 2674 }, { "epoch": 0.26123046875, "grad_norm": 0.2357524037361145, "learning_rate": 0.00043953302630090566, "loss": 1.9136, "step": 2675 }, { "epoch": 0.261328125, "grad_norm": 0.23963971436023712, "learning_rate": 0.000439484512163881, "loss": 1.867, "step": 2676 }, { "epoch": 0.26142578125, "grad_norm": 0.27860602736473083, "learning_rate": 0.00043943598159631075, "loss": 1.9149, "step": 2677 }, { "epoch": 0.2615234375, "grad_norm": 0.22854892909526825, "learning_rate": 0.00043938743460304275, "loss": 1.8415, "step": 2678 }, { "epoch": 0.26162109375, "grad_norm": 0.27040791511535645, "learning_rate": 0.0004393388711889264, "loss": 1.9017, "step": 2679 }, { "epoch": 0.26171875, "grad_norm": 0.2952374219894409, "learning_rate": 0.0004392902913588128, "loss": 1.9406, "step": 2680 }, { "epoch": 0.26181640625, "grad_norm": 0.27148759365081787, "learning_rate": 0.0004392416951175544, "loss": 1.8999, "step": 2681 }, { "epoch": 0.2619140625, "grad_norm": 0.24533145129680634, "learning_rate": 0.00043919308247000575, "loss": 1.8835, "step": 2682 }, { "epoch": 0.26201171875, "grad_norm": 0.2174772322177887, "learning_rate": 0.0004391444534210229, "loss": 1.8424, "step": 2683 }, { "epoch": 0.262109375, "grad_norm": 0.19030643999576569, "learning_rate": 0.0004390958079754633, "loss": 1.8736, "step": 2684 }, { "epoch": 0.26220703125, "grad_norm": 0.26530101895332336, "learning_rate": 0.0004390471461381862, "loss": 1.8988, "step": 2685 }, { "epoch": 0.2623046875, "grad_norm": 0.24065977334976196, "learning_rate": 0.0004389984679140525, "loss": 1.9052, "step": 2686 }, { "epoch": 0.26240234375, "grad_norm": 0.2812477946281433, "learning_rate": 0.00043894977330792486, "loss": 1.8987, "step": 2687 }, { "epoch": 0.2625, "grad_norm": 0.2505251169204712, "learning_rate": 0.00043890106232466724, "loss": 1.8922, "step": 2688 }, { "epoch": 0.26259765625, "grad_norm": 0.2238491326570511, "learning_rate": 0.0004388523349691455, "loss": 1.9183, "step": 2689 }, { "epoch": 0.2626953125, "grad_norm": 0.26334211230278015, "learning_rate": 0.00043880359124622714, "loss": 1.8899, "step": 2690 }, { "epoch": 0.26279296875, "grad_norm": 0.24818876385688782, "learning_rate": 0.0004387548311607812, "loss": 1.9063, "step": 2691 }, { "epoch": 0.262890625, "grad_norm": 0.22725768387317657, "learning_rate": 0.0004387060547176784, "loss": 1.8828, "step": 2692 }, { "epoch": 0.26298828125, "grad_norm": 0.2095717042684555, "learning_rate": 0.00043865726192179096, "loss": 1.8658, "step": 2693 }, { "epoch": 0.2630859375, "grad_norm": 0.2227015644311905, "learning_rate": 0.000438608452777993, "loss": 1.9546, "step": 2694 }, { "epoch": 0.26318359375, "grad_norm": 0.20986926555633545, "learning_rate": 0.00043855962729116, "loss": 1.878, "step": 2695 }, { "epoch": 0.26328125, "grad_norm": 0.24293240904808044, "learning_rate": 0.00043851078546616924, "loss": 1.889, "step": 2696 }, { "epoch": 0.26337890625, "grad_norm": 0.23340186476707458, "learning_rate": 0.0004384619273078996, "loss": 1.9285, "step": 2697 }, { "epoch": 0.2634765625, "grad_norm": 0.22993077337741852, "learning_rate": 0.00043841305282123157, "loss": 1.9004, "step": 2698 }, { "epoch": 0.26357421875, "grad_norm": 0.25231555104255676, "learning_rate": 0.00043836416201104727, "loss": 1.886, "step": 2699 }, { "epoch": 0.263671875, "grad_norm": 0.18800829350948334, "learning_rate": 0.0004383152548822304, "loss": 1.9054, "step": 2700 }, { "epoch": 0.26376953125, "grad_norm": 0.31697878241539, "learning_rate": 0.0004382663314396665, "loss": 1.8387, "step": 2701 }, { "epoch": 0.2638671875, "grad_norm": 0.31109264492988586, "learning_rate": 0.00043821739168824246, "loss": 1.922, "step": 2702 }, { "epoch": 0.26396484375, "grad_norm": 0.23077085614204407, "learning_rate": 0.00043816843563284697, "loss": 1.8771, "step": 2703 }, { "epoch": 0.2640625, "grad_norm": 0.31502625346183777, "learning_rate": 0.0004381194632783703, "loss": 1.8655, "step": 2704 }, { "epoch": 0.26416015625, "grad_norm": 0.24782267212867737, "learning_rate": 0.00043807047462970434, "loss": 1.8778, "step": 2705 }, { "epoch": 0.2642578125, "grad_norm": 0.25926321744918823, "learning_rate": 0.00043802146969174267, "loss": 1.8437, "step": 2706 }, { "epoch": 0.26435546875, "grad_norm": 0.27319055795669556, "learning_rate": 0.0004379724484693804, "loss": 1.8433, "step": 2707 }, { "epoch": 0.264453125, "grad_norm": 0.23524881899356842, "learning_rate": 0.0004379234109675143, "loss": 1.9148, "step": 2708 }, { "epoch": 0.26455078125, "grad_norm": 0.28222718834877014, "learning_rate": 0.0004378743571910429, "loss": 1.9035, "step": 2709 }, { "epoch": 0.2646484375, "grad_norm": 0.271083801984787, "learning_rate": 0.00043782528714486613, "loss": 1.8691, "step": 2710 }, { "epoch": 0.26474609375, "grad_norm": 0.23110386729240417, "learning_rate": 0.0004377762008338856, "loss": 1.8794, "step": 2711 }, { "epoch": 0.26484375, "grad_norm": 0.24147644639015198, "learning_rate": 0.0004377270982630048, "loss": 1.8577, "step": 2712 }, { "epoch": 0.26494140625, "grad_norm": 0.23246973752975464, "learning_rate": 0.0004376779794371284, "loss": 1.8876, "step": 2713 }, { "epoch": 0.2650390625, "grad_norm": 0.23443640768527985, "learning_rate": 0.00043762884436116315, "loss": 1.8693, "step": 2714 }, { "epoch": 0.26513671875, "grad_norm": 0.2223389744758606, "learning_rate": 0.00043757969304001704, "loss": 1.8308, "step": 2715 }, { "epoch": 0.265234375, "grad_norm": 0.25246211886405945, "learning_rate": 0.0004375305254785999, "loss": 1.8723, "step": 2716 }, { "epoch": 0.26533203125, "grad_norm": 0.2547222077846527, "learning_rate": 0.0004374813416818232, "loss": 1.8826, "step": 2717 }, { "epoch": 0.2654296875, "grad_norm": 0.2161521464586258, "learning_rate": 0.0004374321416545999, "loss": 1.9017, "step": 2718 }, { "epoch": 0.26552734375, "grad_norm": 0.2493196278810501, "learning_rate": 0.0004373829254018447, "loss": 1.86, "step": 2719 }, { "epoch": 0.265625, "grad_norm": 0.2375672310590744, "learning_rate": 0.00043733369292847386, "loss": 1.8784, "step": 2720 }, { "epoch": 0.26572265625, "grad_norm": 0.34022462368011475, "learning_rate": 0.00043728444423940516, "loss": 1.8915, "step": 2721 }, { "epoch": 0.2658203125, "grad_norm": 0.25120794773101807, "learning_rate": 0.0004372351793395582, "loss": 1.8894, "step": 2722 }, { "epoch": 0.26591796875, "grad_norm": 0.2032475620508194, "learning_rate": 0.0004371858982338542, "loss": 1.8942, "step": 2723 }, { "epoch": 0.266015625, "grad_norm": 0.2685857117176056, "learning_rate": 0.00043713660092721573, "loss": 1.9001, "step": 2724 }, { "epoch": 0.26611328125, "grad_norm": 0.296835333108902, "learning_rate": 0.00043708728742456723, "loss": 1.8994, "step": 2725 }, { "epoch": 0.2662109375, "grad_norm": 0.27104151248931885, "learning_rate": 0.00043703795773083467, "loss": 1.9119, "step": 2726 }, { "epoch": 0.26630859375, "grad_norm": 0.25494450330734253, "learning_rate": 0.0004369886118509457, "loss": 1.9188, "step": 2727 }, { "epoch": 0.26640625, "grad_norm": 0.3688213527202606, "learning_rate": 0.0004369392497898294, "loss": 1.9021, "step": 2728 }, { "epoch": 0.26650390625, "grad_norm": 0.24721217155456543, "learning_rate": 0.00043688987155241676, "loss": 1.8895, "step": 2729 }, { "epoch": 0.2666015625, "grad_norm": 0.290102481842041, "learning_rate": 0.0004368404771436402, "loss": 1.8565, "step": 2730 }, { "epoch": 0.26669921875, "grad_norm": 0.3468306064605713, "learning_rate": 0.0004367910665684338, "loss": 1.8795, "step": 2731 }, { "epoch": 0.266796875, "grad_norm": 0.23682735860347748, "learning_rate": 0.00043674163983173304, "loss": 1.8749, "step": 2732 }, { "epoch": 0.26689453125, "grad_norm": 0.24279217422008514, "learning_rate": 0.00043669219693847543, "loss": 1.887, "step": 2733 }, { "epoch": 0.2669921875, "grad_norm": 0.25867339968681335, "learning_rate": 0.0004366427378935998, "loss": 1.8708, "step": 2734 }, { "epoch": 0.26708984375, "grad_norm": 0.28321221470832825, "learning_rate": 0.0004365932627020467, "loss": 1.9077, "step": 2735 }, { "epoch": 0.2671875, "grad_norm": 0.2243577092885971, "learning_rate": 0.00043654377136875827, "loss": 1.8902, "step": 2736 }, { "epoch": 0.26728515625, "grad_norm": 0.27794283628463745, "learning_rate": 0.0004364942638986782, "loss": 1.8659, "step": 2737 }, { "epoch": 0.2673828125, "grad_norm": 0.2588692903518677, "learning_rate": 0.00043644474029675193, "loss": 1.8494, "step": 2738 }, { "epoch": 0.26748046875, "grad_norm": 0.22354848682880402, "learning_rate": 0.00043639520056792627, "loss": 1.895, "step": 2739 }, { "epoch": 0.267578125, "grad_norm": 0.3032642900943756, "learning_rate": 0.00043634564471715, "loss": 1.8643, "step": 2740 }, { "epoch": 0.26767578125, "grad_norm": 0.25967633724212646, "learning_rate": 0.0004362960727493732, "loss": 1.8712, "step": 2741 }, { "epoch": 0.2677734375, "grad_norm": 0.2637724280357361, "learning_rate": 0.0004362464846695476, "loss": 1.8512, "step": 2742 }, { "epoch": 0.26787109375, "grad_norm": 0.2793140709400177, "learning_rate": 0.00043619688048262677, "loss": 1.9086, "step": 2743 }, { "epoch": 0.26796875, "grad_norm": 0.2188546061515808, "learning_rate": 0.00043614726019356565, "loss": 1.8649, "step": 2744 }, { "epoch": 0.26806640625, "grad_norm": 0.21244989335536957, "learning_rate": 0.00043609762380732083, "loss": 1.8744, "step": 2745 }, { "epoch": 0.2681640625, "grad_norm": 0.2601993978023529, "learning_rate": 0.00043604797132885064, "loss": 1.8649, "step": 2746 }, { "epoch": 0.26826171875, "grad_norm": 0.2278032749891281, "learning_rate": 0.00043599830276311487, "loss": 1.8662, "step": 2747 }, { "epoch": 0.268359375, "grad_norm": 0.24005573987960815, "learning_rate": 0.000435948618115075, "loss": 1.8916, "step": 2748 }, { "epoch": 0.26845703125, "grad_norm": 0.28929322957992554, "learning_rate": 0.00043589891738969393, "loss": 1.9566, "step": 2749 }, { "epoch": 0.2685546875, "grad_norm": 0.21914543211460114, "learning_rate": 0.00043584920059193654, "loss": 1.9043, "step": 2750 }, { "epoch": 0.26865234375, "grad_norm": 0.24009595811367035, "learning_rate": 0.000435799467726769, "loss": 1.8493, "step": 2751 }, { "epoch": 0.26875, "grad_norm": 0.20415683090686798, "learning_rate": 0.0004357497187991593, "loss": 1.8954, "step": 2752 }, { "epoch": 0.26884765625, "grad_norm": 0.1941215842962265, "learning_rate": 0.0004356999538140766, "loss": 1.8634, "step": 2753 }, { "epoch": 0.2689453125, "grad_norm": 0.2202758938074112, "learning_rate": 0.0004356501727764923, "loss": 1.8706, "step": 2754 }, { "epoch": 0.26904296875, "grad_norm": 0.21078895032405853, "learning_rate": 0.000435600375691379, "loss": 1.8942, "step": 2755 }, { "epoch": 0.269140625, "grad_norm": 0.21176692843437195, "learning_rate": 0.00043555056256371083, "loss": 1.8814, "step": 2756 }, { "epoch": 0.26923828125, "grad_norm": 0.20595595240592957, "learning_rate": 0.00043550073339846394, "loss": 1.8919, "step": 2757 }, { "epoch": 0.2693359375, "grad_norm": 0.2053648680448532, "learning_rate": 0.00043545088820061564, "loss": 1.9387, "step": 2758 }, { "epoch": 0.26943359375, "grad_norm": 0.2367970496416092, "learning_rate": 0.00043540102697514513, "loss": 1.8863, "step": 2759 }, { "epoch": 0.26953125, "grad_norm": 0.1842896044254303, "learning_rate": 0.0004353511497270329, "loss": 1.8805, "step": 2760 }, { "epoch": 0.26962890625, "grad_norm": 0.25769177079200745, "learning_rate": 0.00043530125646126154, "loss": 1.8833, "step": 2761 }, { "epoch": 0.2697265625, "grad_norm": 0.2760721743106842, "learning_rate": 0.0004352513471828148, "loss": 1.8924, "step": 2762 }, { "epoch": 0.26982421875, "grad_norm": 0.17177268862724304, "learning_rate": 0.0004352014218966781, "loss": 1.8441, "step": 2763 }, { "epoch": 0.269921875, "grad_norm": 0.22797243297100067, "learning_rate": 0.0004351514806078387, "loss": 1.8753, "step": 2764 }, { "epoch": 0.27001953125, "grad_norm": 0.22956639528274536, "learning_rate": 0.0004351015233212851, "loss": 1.9303, "step": 2765 }, { "epoch": 0.2701171875, "grad_norm": 0.20838238298892975, "learning_rate": 0.0004350515500420077, "loss": 1.8689, "step": 2766 }, { "epoch": 0.27021484375, "grad_norm": 0.2128564864397049, "learning_rate": 0.00043500156077499834, "loss": 1.9195, "step": 2767 }, { "epoch": 0.2703125, "grad_norm": 0.21279101073741913, "learning_rate": 0.00043495155552525074, "loss": 1.8944, "step": 2768 }, { "epoch": 0.27041015625, "grad_norm": 0.22925125062465668, "learning_rate": 0.00043490153429775964, "loss": 1.8632, "step": 2769 }, { "epoch": 0.2705078125, "grad_norm": 0.23544587194919586, "learning_rate": 0.00043485149709752193, "loss": 1.9066, "step": 2770 }, { "epoch": 0.27060546875, "grad_norm": 0.22419407963752747, "learning_rate": 0.00043480144392953567, "loss": 1.8416, "step": 2771 }, { "epoch": 0.270703125, "grad_norm": 0.21770869195461273, "learning_rate": 0.00043475137479880103, "loss": 1.9117, "step": 2772 }, { "epoch": 0.27080078125, "grad_norm": 0.2358229011297226, "learning_rate": 0.0004347012897103192, "loss": 1.9187, "step": 2773 }, { "epoch": 0.2708984375, "grad_norm": 0.30673930048942566, "learning_rate": 0.00043465118866909346, "loss": 1.8928, "step": 2774 }, { "epoch": 0.27099609375, "grad_norm": 0.21168282628059387, "learning_rate": 0.00043460107168012837, "loss": 1.9002, "step": 2775 }, { "epoch": 0.27109375, "grad_norm": 0.22865557670593262, "learning_rate": 0.0004345509387484301, "loss": 1.8956, "step": 2776 }, { "epoch": 0.27119140625, "grad_norm": 0.3253214955329895, "learning_rate": 0.00043450078987900654, "loss": 1.9049, "step": 2777 }, { "epoch": 0.2712890625, "grad_norm": 0.2774486541748047, "learning_rate": 0.00043445062507686713, "loss": 1.8817, "step": 2778 }, { "epoch": 0.27138671875, "grad_norm": 0.22928157448768616, "learning_rate": 0.00043440044434702287, "loss": 1.9167, "step": 2779 }, { "epoch": 0.271484375, "grad_norm": 0.2905370891094208, "learning_rate": 0.0004343502476944864, "loss": 1.8434, "step": 2780 }, { "epoch": 0.27158203125, "grad_norm": 0.19285933673381805, "learning_rate": 0.0004343000351242719, "loss": 1.8287, "step": 2781 }, { "epoch": 0.2716796875, "grad_norm": 0.25820720195770264, "learning_rate": 0.0004342498066413951, "loss": 1.8488, "step": 2782 }, { "epoch": 0.27177734375, "grad_norm": 0.2616766095161438, "learning_rate": 0.00043419956225087354, "loss": 1.8627, "step": 2783 }, { "epoch": 0.271875, "grad_norm": 0.18560141324996948, "learning_rate": 0.000434149301957726, "loss": 1.8272, "step": 2784 }, { "epoch": 0.27197265625, "grad_norm": 0.3137165307998657, "learning_rate": 0.0004340990257669732, "loss": 1.8952, "step": 2785 }, { "epoch": 0.2720703125, "grad_norm": 0.231571763753891, "learning_rate": 0.00043404873368363726, "loss": 1.9044, "step": 2786 }, { "epoch": 0.27216796875, "grad_norm": 0.2827242314815521, "learning_rate": 0.00043399842571274175, "loss": 1.8984, "step": 2787 }, { "epoch": 0.272265625, "grad_norm": 0.2350980043411255, "learning_rate": 0.0004339481018593121, "loss": 1.8902, "step": 2788 }, { "epoch": 0.27236328125, "grad_norm": 0.2104049175977707, "learning_rate": 0.00043389776212837537, "loss": 1.8853, "step": 2789 }, { "epoch": 0.2724609375, "grad_norm": 0.3253656327724457, "learning_rate": 0.0004338474065249598, "loss": 1.8982, "step": 2790 }, { "epoch": 0.27255859375, "grad_norm": 0.2520630359649658, "learning_rate": 0.0004337970350540956, "loss": 1.8806, "step": 2791 }, { "epoch": 0.27265625, "grad_norm": 0.2999001145362854, "learning_rate": 0.0004337466477208145, "loss": 1.9165, "step": 2792 }, { "epoch": 0.27275390625, "grad_norm": 0.20815780758857727, "learning_rate": 0.0004336962445301495, "loss": 1.9357, "step": 2793 }, { "epoch": 0.2728515625, "grad_norm": 0.24039196968078613, "learning_rate": 0.0004336458254871357, "loss": 1.84, "step": 2794 }, { "epoch": 0.27294921875, "grad_norm": 0.24862824380397797, "learning_rate": 0.0004335953905968094, "loss": 1.8991, "step": 2795 }, { "epoch": 0.273046875, "grad_norm": 0.2109254151582718, "learning_rate": 0.0004335449398642086, "loss": 1.9077, "step": 2796 }, { "epoch": 0.27314453125, "grad_norm": 0.27603134512901306, "learning_rate": 0.00043349447329437286, "loss": 1.8703, "step": 2797 }, { "epoch": 0.2732421875, "grad_norm": 0.20653647184371948, "learning_rate": 0.00043344399089234343, "loss": 1.9106, "step": 2798 }, { "epoch": 0.27333984375, "grad_norm": 0.26815375685691833, "learning_rate": 0.000433393492663163, "loss": 1.8934, "step": 2799 }, { "epoch": 0.2734375, "grad_norm": 0.2597907781600952, "learning_rate": 0.0004333429786118758, "loss": 1.8716, "step": 2800 }, { "epoch": 0.27353515625, "grad_norm": 0.2918902337551117, "learning_rate": 0.00043329244874352785, "loss": 1.8949, "step": 2801 }, { "epoch": 0.2736328125, "grad_norm": 0.24110248684883118, "learning_rate": 0.0004332419030631667, "loss": 1.91, "step": 2802 }, { "epoch": 0.27373046875, "grad_norm": 0.26264944672584534, "learning_rate": 0.00043319134157584127, "loss": 1.8656, "step": 2803 }, { "epoch": 0.273828125, "grad_norm": 0.30166885256767273, "learning_rate": 0.00043314076428660237, "loss": 1.896, "step": 2804 }, { "epoch": 0.27392578125, "grad_norm": 0.22887808084487915, "learning_rate": 0.000433090171200502, "loss": 1.8776, "step": 2805 }, { "epoch": 0.2740234375, "grad_norm": 0.3068729043006897, "learning_rate": 0.00043303956232259406, "loss": 1.9091, "step": 2806 }, { "epoch": 0.27412109375, "grad_norm": 0.20829269289970398, "learning_rate": 0.0004329889376579341, "loss": 1.8863, "step": 2807 }, { "epoch": 0.27421875, "grad_norm": 0.3112751245498657, "learning_rate": 0.00043293829721157876, "loss": 1.8886, "step": 2808 }, { "epoch": 0.27431640625, "grad_norm": 0.2697749137878418, "learning_rate": 0.00043288764098858677, "loss": 1.8382, "step": 2809 }, { "epoch": 0.2744140625, "grad_norm": 0.27988728880882263, "learning_rate": 0.0004328369689940182, "loss": 1.8885, "step": 2810 }, { "epoch": 0.27451171875, "grad_norm": 0.2760201394557953, "learning_rate": 0.0004327862812329348, "loss": 1.8547, "step": 2811 }, { "epoch": 0.274609375, "grad_norm": 0.2351469248533249, "learning_rate": 0.0004327355777103997, "loss": 1.9064, "step": 2812 }, { "epoch": 0.27470703125, "grad_norm": 0.2867816686630249, "learning_rate": 0.0004326848584314779, "loss": 1.9231, "step": 2813 }, { "epoch": 0.2748046875, "grad_norm": 0.2675870954990387, "learning_rate": 0.00043263412340123556, "loss": 1.9473, "step": 2814 }, { "epoch": 0.27490234375, "grad_norm": 0.2948281466960907, "learning_rate": 0.0004325833726247409, "loss": 1.8853, "step": 2815 }, { "epoch": 0.275, "grad_norm": 0.2507423460483551, "learning_rate": 0.0004325326061070634, "loss": 1.8954, "step": 2816 }, { "epoch": 0.27509765625, "grad_norm": 0.328163743019104, "learning_rate": 0.0004324818238532741, "loss": 1.8834, "step": 2817 }, { "epoch": 0.2751953125, "grad_norm": 0.24358054995536804, "learning_rate": 0.0004324310258684458, "loss": 1.9065, "step": 2818 }, { "epoch": 0.27529296875, "grad_norm": 0.2406340092420578, "learning_rate": 0.0004323802121576527, "loss": 1.8833, "step": 2819 }, { "epoch": 0.275390625, "grad_norm": 0.22684375941753387, "learning_rate": 0.00043232938272597067, "loss": 1.8548, "step": 2820 }, { "epoch": 0.27548828125, "grad_norm": 0.26202863454818726, "learning_rate": 0.00043227853757847715, "loss": 1.8859, "step": 2821 }, { "epoch": 0.2755859375, "grad_norm": 0.3058609962463379, "learning_rate": 0.00043222767672025105, "loss": 1.8672, "step": 2822 }, { "epoch": 0.27568359375, "grad_norm": 0.20576193928718567, "learning_rate": 0.000432176800156373, "loss": 1.8819, "step": 2823 }, { "epoch": 0.27578125, "grad_norm": 0.2687084376811981, "learning_rate": 0.000432125907891925, "loss": 1.8617, "step": 2824 }, { "epoch": 0.27587890625, "grad_norm": 0.29834067821502686, "learning_rate": 0.00043207499993199084, "loss": 1.9432, "step": 2825 }, { "epoch": 0.2759765625, "grad_norm": 0.2804504930973053, "learning_rate": 0.0004320240762816558, "loss": 1.9183, "step": 2826 }, { "epoch": 0.27607421875, "grad_norm": 0.2958560883998871, "learning_rate": 0.00043197313694600666, "loss": 1.8613, "step": 2827 }, { "epoch": 0.276171875, "grad_norm": 0.19655700027942657, "learning_rate": 0.0004319221819301317, "loss": 1.9211, "step": 2828 }, { "epoch": 0.27626953125, "grad_norm": 0.24349285662174225, "learning_rate": 0.00043187121123912104, "loss": 1.8844, "step": 2829 }, { "epoch": 0.2763671875, "grad_norm": 0.21005815267562866, "learning_rate": 0.0004318202248780661, "loss": 1.8959, "step": 2830 }, { "epoch": 0.27646484375, "grad_norm": 0.20665428042411804, "learning_rate": 0.00043176922285205997, "loss": 1.9001, "step": 2831 }, { "epoch": 0.2765625, "grad_norm": 0.2562062740325928, "learning_rate": 0.00043171820516619744, "loss": 1.8792, "step": 2832 }, { "epoch": 0.27666015625, "grad_norm": 0.18265512585639954, "learning_rate": 0.00043166717182557455, "loss": 1.9092, "step": 2833 }, { "epoch": 0.2767578125, "grad_norm": 0.2603815793991089, "learning_rate": 0.0004316161228352891, "loss": 1.9011, "step": 2834 }, { "epoch": 0.27685546875, "grad_norm": 0.25518473982810974, "learning_rate": 0.0004315650582004405, "loss": 1.8912, "step": 2835 }, { "epoch": 0.276953125, "grad_norm": 0.20616333186626434, "learning_rate": 0.00043151397792612963, "loss": 1.8852, "step": 2836 }, { "epoch": 0.27705078125, "grad_norm": 0.21389137208461761, "learning_rate": 0.00043146288201745893, "loss": 1.8471, "step": 2837 }, { "epoch": 0.2771484375, "grad_norm": 0.20006729662418365, "learning_rate": 0.00043141177047953253, "loss": 1.8815, "step": 2838 }, { "epoch": 0.27724609375, "grad_norm": 0.22058548033237457, "learning_rate": 0.0004313606433174559, "loss": 1.879, "step": 2839 }, { "epoch": 0.27734375, "grad_norm": 0.21773791313171387, "learning_rate": 0.0004313095005363362, "loss": 1.8815, "step": 2840 }, { "epoch": 0.27744140625, "grad_norm": 0.20097985863685608, "learning_rate": 0.0004312583421412822, "loss": 1.8748, "step": 2841 }, { "epoch": 0.2775390625, "grad_norm": 0.22170375287532806, "learning_rate": 0.00043120716813740414, "loss": 1.8611, "step": 2842 }, { "epoch": 0.27763671875, "grad_norm": 0.20943517982959747, "learning_rate": 0.00043115597852981383, "loss": 1.8566, "step": 2843 }, { "epoch": 0.277734375, "grad_norm": 0.22400376200675964, "learning_rate": 0.0004311047733236247, "loss": 1.8881, "step": 2844 }, { "epoch": 0.27783203125, "grad_norm": 0.2491447776556015, "learning_rate": 0.0004310535525239517, "loss": 1.902, "step": 2845 }, { "epoch": 0.2779296875, "grad_norm": 0.2509153187274933, "learning_rate": 0.0004310023161359113, "loss": 1.8923, "step": 2846 }, { "epoch": 0.27802734375, "grad_norm": 0.25616857409477234, "learning_rate": 0.00043095106416462153, "loss": 1.9018, "step": 2847 }, { "epoch": 0.278125, "grad_norm": 0.2445756494998932, "learning_rate": 0.0004308997966152022, "loss": 1.8785, "step": 2848 }, { "epoch": 0.27822265625, "grad_norm": 0.2224990576505661, "learning_rate": 0.00043084851349277413, "loss": 1.8688, "step": 2849 }, { "epoch": 0.2783203125, "grad_norm": 0.2594734728336334, "learning_rate": 0.0004307972148024604, "loss": 1.8407, "step": 2850 }, { "epoch": 0.27841796875, "grad_norm": 0.2550109326839447, "learning_rate": 0.00043074590054938513, "loss": 1.9068, "step": 2851 }, { "epoch": 0.278515625, "grad_norm": 0.24641066789627075, "learning_rate": 0.0004306945707386741, "loss": 1.8777, "step": 2852 }, { "epoch": 0.27861328125, "grad_norm": 0.230759397149086, "learning_rate": 0.0004306432253754549, "loss": 1.8931, "step": 2853 }, { "epoch": 0.2787109375, "grad_norm": 0.25458577275276184, "learning_rate": 0.0004305918644648562, "loss": 1.9012, "step": 2854 }, { "epoch": 0.27880859375, "grad_norm": 0.23450514674186707, "learning_rate": 0.00043054048801200877, "loss": 1.8963, "step": 2855 }, { "epoch": 0.27890625, "grad_norm": 0.2019408941268921, "learning_rate": 0.0004304890960220446, "loss": 1.9174, "step": 2856 }, { "epoch": 0.27900390625, "grad_norm": 0.24433976411819458, "learning_rate": 0.0004304376885000972, "loss": 1.8657, "step": 2857 }, { "epoch": 0.2791015625, "grad_norm": 0.23637981712818146, "learning_rate": 0.0004303862654513017, "loss": 1.8746, "step": 2858 }, { "epoch": 0.27919921875, "grad_norm": 0.23872153460979462, "learning_rate": 0.000430334826880795, "loss": 1.8968, "step": 2859 }, { "epoch": 0.279296875, "grad_norm": 0.30130165815353394, "learning_rate": 0.0004302833727937151, "loss": 1.8725, "step": 2860 }, { "epoch": 0.27939453125, "grad_norm": 0.26016566157341003, "learning_rate": 0.00043023190319520205, "loss": 1.9012, "step": 2861 }, { "epoch": 0.2794921875, "grad_norm": 0.22780142724514008, "learning_rate": 0.0004301804180903971, "loss": 1.829, "step": 2862 }, { "epoch": 0.27958984375, "grad_norm": 0.23210953176021576, "learning_rate": 0.00043012891748444313, "loss": 1.893, "step": 2863 }, { "epoch": 0.2796875, "grad_norm": 0.2725372612476349, "learning_rate": 0.00043007740138248466, "loss": 1.8397, "step": 2864 }, { "epoch": 0.27978515625, "grad_norm": 0.23973391950130463, "learning_rate": 0.0004300258697896675, "loss": 1.8947, "step": 2865 }, { "epoch": 0.2798828125, "grad_norm": 0.2913033664226532, "learning_rate": 0.0004299743227111395, "loss": 1.863, "step": 2866 }, { "epoch": 0.27998046875, "grad_norm": 0.23055680096149445, "learning_rate": 0.0004299227601520496, "loss": 1.8307, "step": 2867 }, { "epoch": 0.280078125, "grad_norm": 0.26329392194747925, "learning_rate": 0.00042987118211754844, "loss": 1.8753, "step": 2868 }, { "epoch": 0.28017578125, "grad_norm": 0.3165866732597351, "learning_rate": 0.0004298195886127882, "loss": 1.8636, "step": 2869 }, { "epoch": 0.2802734375, "grad_norm": 0.2055087685585022, "learning_rate": 0.0004297679796429226, "loss": 1.8981, "step": 2870 }, { "epoch": 0.28037109375, "grad_norm": 0.23618242144584656, "learning_rate": 0.00042971635521310705, "loss": 1.935, "step": 2871 }, { "epoch": 0.28046875, "grad_norm": 0.21628020703792572, "learning_rate": 0.0004296647153284982, "loss": 1.8618, "step": 2872 }, { "epoch": 0.28056640625, "grad_norm": 0.24062804877758026, "learning_rate": 0.0004296130599942546, "loss": 1.8927, "step": 2873 }, { "epoch": 0.2806640625, "grad_norm": 0.25762608647346497, "learning_rate": 0.00042956138921553596, "loss": 1.8769, "step": 2874 }, { "epoch": 0.28076171875, "grad_norm": 0.2140209972858429, "learning_rate": 0.00042950970299750386, "loss": 1.8871, "step": 2875 }, { "epoch": 0.280859375, "grad_norm": 0.24165524542331696, "learning_rate": 0.0004294580013453213, "loss": 1.9134, "step": 2876 }, { "epoch": 0.28095703125, "grad_norm": 0.21578289568424225, "learning_rate": 0.00042940628426415265, "loss": 1.868, "step": 2877 }, { "epoch": 0.2810546875, "grad_norm": 0.24762646853923798, "learning_rate": 0.00042935455175916414, "loss": 1.8629, "step": 2878 }, { "epoch": 0.28115234375, "grad_norm": 0.22082191705703735, "learning_rate": 0.0004293028038355235, "loss": 1.8574, "step": 2879 }, { "epoch": 0.28125, "grad_norm": 0.22337709367275238, "learning_rate": 0.0004292510404983997, "loss": 1.903, "step": 2880 }, { "epoch": 0.28134765625, "grad_norm": 0.23375888168811798, "learning_rate": 0.0004291992617529635, "loss": 1.8598, "step": 2881 }, { "epoch": 0.2814453125, "grad_norm": 0.23466967046260834, "learning_rate": 0.00042914746760438707, "loss": 1.9082, "step": 2882 }, { "epoch": 0.28154296875, "grad_norm": 0.2091575413942337, "learning_rate": 0.0004290956580578443, "loss": 1.8389, "step": 2883 }, { "epoch": 0.281640625, "grad_norm": 0.27531692385673523, "learning_rate": 0.0004290438331185105, "loss": 1.876, "step": 2884 }, { "epoch": 0.28173828125, "grad_norm": 0.24806031584739685, "learning_rate": 0.0004289919927915624, "loss": 1.8586, "step": 2885 }, { "epoch": 0.2818359375, "grad_norm": 0.19965526461601257, "learning_rate": 0.00042894013708217845, "loss": 1.8679, "step": 2886 }, { "epoch": 0.28193359375, "grad_norm": 0.2199181616306305, "learning_rate": 0.00042888826599553863, "loss": 1.9465, "step": 2887 }, { "epoch": 0.28203125, "grad_norm": 0.2529730200767517, "learning_rate": 0.0004288363795368244, "loss": 1.9187, "step": 2888 }, { "epoch": 0.28212890625, "grad_norm": 0.299998939037323, "learning_rate": 0.00042878447771121865, "loss": 1.8727, "step": 2889 }, { "epoch": 0.2822265625, "grad_norm": 0.30152031779289246, "learning_rate": 0.000428732560523906, "loss": 1.9078, "step": 2890 }, { "epoch": 0.28232421875, "grad_norm": 0.28815585374832153, "learning_rate": 0.0004286806279800726, "loss": 1.9571, "step": 2891 }, { "epoch": 0.282421875, "grad_norm": 0.2583948075771332, "learning_rate": 0.00042862868008490583, "loss": 1.9134, "step": 2892 }, { "epoch": 0.28251953125, "grad_norm": 0.23865336179733276, "learning_rate": 0.000428576716843595, "loss": 1.8894, "step": 2893 }, { "epoch": 0.2826171875, "grad_norm": 0.2828342020511627, "learning_rate": 0.00042852473826133076, "loss": 1.8806, "step": 2894 }, { "epoch": 0.28271484375, "grad_norm": 0.23666423559188843, "learning_rate": 0.0004284727443433052, "loss": 1.8645, "step": 2895 }, { "epoch": 0.2828125, "grad_norm": 0.23653465509414673, "learning_rate": 0.0004284207350947122, "loss": 1.8928, "step": 2896 }, { "epoch": 0.28291015625, "grad_norm": 0.21783040463924408, "learning_rate": 0.0004283687105207469, "loss": 1.9072, "step": 2897 }, { "epoch": 0.2830078125, "grad_norm": 0.2457730621099472, "learning_rate": 0.0004283166706266061, "loss": 1.8927, "step": 2898 }, { "epoch": 0.28310546875, "grad_norm": 0.19717064499855042, "learning_rate": 0.0004282646154174882, "loss": 1.8715, "step": 2899 }, { "epoch": 0.283203125, "grad_norm": 0.22203285992145538, "learning_rate": 0.00042821254489859307, "loss": 1.8398, "step": 2900 }, { "epoch": 0.28330078125, "grad_norm": 0.21506066620349884, "learning_rate": 0.00042816045907512207, "loss": 1.9277, "step": 2901 }, { "epoch": 0.2833984375, "grad_norm": 0.268706351518631, "learning_rate": 0.00042810835795227797, "loss": 1.8684, "step": 2902 }, { "epoch": 0.28349609375, "grad_norm": 0.19960474967956543, "learning_rate": 0.0004280562415352654, "loss": 1.9536, "step": 2903 }, { "epoch": 0.28359375, "grad_norm": 0.216094970703125, "learning_rate": 0.00042800410982929025, "loss": 1.8531, "step": 2904 }, { "epoch": 0.28369140625, "grad_norm": 0.25142574310302734, "learning_rate": 0.00042795196283956007, "loss": 1.8876, "step": 2905 }, { "epoch": 0.2837890625, "grad_norm": 0.24804849922657013, "learning_rate": 0.0004278998005712837, "loss": 1.8942, "step": 2906 }, { "epoch": 0.28388671875, "grad_norm": 0.2586076557636261, "learning_rate": 0.000427847623029672, "loss": 1.9142, "step": 2907 }, { "epoch": 0.283984375, "grad_norm": 0.2308054268360138, "learning_rate": 0.0004277954302199368, "loss": 1.8652, "step": 2908 }, { "epoch": 0.28408203125, "grad_norm": 0.24985018372535706, "learning_rate": 0.00042774322214729173, "loss": 1.911, "step": 2909 }, { "epoch": 0.2841796875, "grad_norm": 0.3349003493785858, "learning_rate": 0.00042769099881695194, "loss": 1.955, "step": 2910 }, { "epoch": 0.28427734375, "grad_norm": 0.2801406979560852, "learning_rate": 0.0004276387602341341, "loss": 1.878, "step": 2911 }, { "epoch": 0.284375, "grad_norm": 0.2786557674407959, "learning_rate": 0.0004275865064040564, "loss": 1.9241, "step": 2912 }, { "epoch": 0.28447265625, "grad_norm": 0.3159611225128174, "learning_rate": 0.0004275342373319385, "loss": 1.8619, "step": 2913 }, { "epoch": 0.2845703125, "grad_norm": 0.3373079001903534, "learning_rate": 0.0004274819530230016, "loss": 1.8697, "step": 2914 }, { "epoch": 0.28466796875, "grad_norm": 0.22254876792430878, "learning_rate": 0.00042742965348246855, "loss": 1.9184, "step": 2915 }, { "epoch": 0.284765625, "grad_norm": 0.2410246878862381, "learning_rate": 0.00042737733871556335, "loss": 1.9146, "step": 2916 }, { "epoch": 0.28486328125, "grad_norm": 0.2892058491706848, "learning_rate": 0.0004273250087275121, "loss": 1.8803, "step": 2917 }, { "epoch": 0.2849609375, "grad_norm": 0.27044740319252014, "learning_rate": 0.0004272726635235419, "loss": 1.8896, "step": 2918 }, { "epoch": 0.28505859375, "grad_norm": 0.18537931144237518, "learning_rate": 0.0004272203031088816, "loss": 1.8655, "step": 2919 }, { "epoch": 0.28515625, "grad_norm": 0.307284414768219, "learning_rate": 0.00042716792748876157, "loss": 1.9111, "step": 2920 }, { "epoch": 0.28525390625, "grad_norm": 0.30498889088630676, "learning_rate": 0.00042711553666841375, "loss": 1.8797, "step": 2921 }, { "epoch": 0.2853515625, "grad_norm": 0.22686442732810974, "learning_rate": 0.0004270631306530714, "loss": 1.8822, "step": 2922 }, { "epoch": 0.28544921875, "grad_norm": 0.27979257702827454, "learning_rate": 0.0004270107094479694, "loss": 1.9223, "step": 2923 }, { "epoch": 0.285546875, "grad_norm": 0.2563009262084961, "learning_rate": 0.00042695827305834434, "loss": 1.8761, "step": 2924 }, { "epoch": 0.28564453125, "grad_norm": 0.2728877067565918, "learning_rate": 0.000426905821489434, "loss": 1.9284, "step": 2925 }, { "epoch": 0.2857421875, "grad_norm": 0.23300801217556, "learning_rate": 0.00042685335474647773, "loss": 1.8814, "step": 2926 }, { "epoch": 0.28583984375, "grad_norm": 0.3054649233818054, "learning_rate": 0.0004268008728347168, "loss": 1.8466, "step": 2927 }, { "epoch": 0.2859375, "grad_norm": 0.210355743765831, "learning_rate": 0.00042674837575939334, "loss": 1.8857, "step": 2928 }, { "epoch": 0.28603515625, "grad_norm": 0.25826016068458557, "learning_rate": 0.00042669586352575155, "loss": 1.8571, "step": 2929 }, { "epoch": 0.2861328125, "grad_norm": 0.231784388422966, "learning_rate": 0.00042664333613903695, "loss": 1.8342, "step": 2930 }, { "epoch": 0.28623046875, "grad_norm": 0.22335542738437653, "learning_rate": 0.00042659079360449654, "loss": 1.8573, "step": 2931 }, { "epoch": 0.286328125, "grad_norm": 0.22986647486686707, "learning_rate": 0.0004265382359273788, "loss": 1.9117, "step": 2932 }, { "epoch": 0.28642578125, "grad_norm": 0.27009549736976624, "learning_rate": 0.0004264856631129338, "loss": 1.8988, "step": 2933 }, { "epoch": 0.2865234375, "grad_norm": 0.23258306086063385, "learning_rate": 0.000426433075166413, "loss": 1.8654, "step": 2934 }, { "epoch": 0.28662109375, "grad_norm": 0.23832274973392487, "learning_rate": 0.0004263804720930697, "loss": 1.8759, "step": 2935 }, { "epoch": 0.28671875, "grad_norm": 0.31919005513191223, "learning_rate": 0.00042632785389815824, "loss": 1.8901, "step": 2936 }, { "epoch": 0.28681640625, "grad_norm": 0.2530565559864044, "learning_rate": 0.00042627522058693483, "loss": 1.8673, "step": 2937 }, { "epoch": 0.2869140625, "grad_norm": 0.26637616753578186, "learning_rate": 0.00042622257216465715, "loss": 1.8791, "step": 2938 }, { "epoch": 0.28701171875, "grad_norm": 0.2281026691198349, "learning_rate": 0.00042616990863658416, "loss": 1.8664, "step": 2939 }, { "epoch": 0.287109375, "grad_norm": 0.23909759521484375, "learning_rate": 0.0004261172300079766, "loss": 1.8193, "step": 2940 }, { "epoch": 0.28720703125, "grad_norm": 0.22489655017852783, "learning_rate": 0.0004260645362840965, "loss": 1.8929, "step": 2941 }, { "epoch": 0.2873046875, "grad_norm": 0.20088250935077667, "learning_rate": 0.00042601182747020744, "loss": 1.8831, "step": 2942 }, { "epoch": 0.28740234375, "grad_norm": 0.3041893541812897, "learning_rate": 0.0004259591035715748, "loss": 1.8802, "step": 2943 }, { "epoch": 0.2875, "grad_norm": 0.19707295298576355, "learning_rate": 0.00042590636459346505, "loss": 1.8729, "step": 2944 }, { "epoch": 0.28759765625, "grad_norm": 0.26492106914520264, "learning_rate": 0.00042585361054114636, "loss": 1.9214, "step": 2945 }, { "epoch": 0.2876953125, "grad_norm": 0.23438039422035217, "learning_rate": 0.00042580084141988846, "loss": 1.9079, "step": 2946 }, { "epoch": 0.28779296875, "grad_norm": 0.2153705209493637, "learning_rate": 0.0004257480572349625, "loss": 1.8923, "step": 2947 }, { "epoch": 0.287890625, "grad_norm": 0.2112143486738205, "learning_rate": 0.000425695257991641, "loss": 1.844, "step": 2948 }, { "epoch": 0.28798828125, "grad_norm": 0.25230222940444946, "learning_rate": 0.00042564244369519837, "loss": 1.8469, "step": 2949 }, { "epoch": 0.2880859375, "grad_norm": 0.2979770600795746, "learning_rate": 0.00042558961435091013, "loss": 1.8678, "step": 2950 }, { "epoch": 0.28818359375, "grad_norm": 0.21866418421268463, "learning_rate": 0.00042553676996405363, "loss": 1.8903, "step": 2951 }, { "epoch": 0.28828125, "grad_norm": 0.23139160871505737, "learning_rate": 0.0004254839105399073, "loss": 1.8967, "step": 2952 }, { "epoch": 0.28837890625, "grad_norm": 0.22103898227214813, "learning_rate": 0.0004254310360837516, "loss": 1.8925, "step": 2953 }, { "epoch": 0.2884765625, "grad_norm": 0.21849390864372253, "learning_rate": 0.00042537814660086797, "loss": 1.8718, "step": 2954 }, { "epoch": 0.28857421875, "grad_norm": 0.23933055996894836, "learning_rate": 0.0004253252420965398, "loss": 1.8892, "step": 2955 }, { "epoch": 0.288671875, "grad_norm": 0.20929864048957825, "learning_rate": 0.00042527232257605173, "loss": 1.8613, "step": 2956 }, { "epoch": 0.28876953125, "grad_norm": 0.24336673319339752, "learning_rate": 0.0004252193880446899, "loss": 1.9355, "step": 2957 }, { "epoch": 0.2888671875, "grad_norm": 0.2018166482448578, "learning_rate": 0.000425166438507742, "loss": 1.8684, "step": 2958 }, { "epoch": 0.28896484375, "grad_norm": 0.1958925426006317, "learning_rate": 0.0004251134739704973, "loss": 1.8739, "step": 2959 }, { "epoch": 0.2890625, "grad_norm": 0.2154635787010193, "learning_rate": 0.0004250604944382464, "loss": 1.8547, "step": 2960 }, { "epoch": 0.28916015625, "grad_norm": 0.21988588571548462, "learning_rate": 0.0004250074999162816, "loss": 1.8857, "step": 2961 }, { "epoch": 0.2892578125, "grad_norm": 0.1953074336051941, "learning_rate": 0.0004249544904098964, "loss": 1.8928, "step": 2962 }, { "epoch": 0.28935546875, "grad_norm": 0.18395958840847015, "learning_rate": 0.00042490146592438614, "loss": 1.8739, "step": 2963 }, { "epoch": 0.289453125, "grad_norm": 0.21299080550670624, "learning_rate": 0.0004248484264650474, "loss": 1.8542, "step": 2964 }, { "epoch": 0.28955078125, "grad_norm": 0.23363280296325684, "learning_rate": 0.0004247953720371784, "loss": 1.8985, "step": 2965 }, { "epoch": 0.2896484375, "grad_norm": 0.23393043875694275, "learning_rate": 0.00042474230264607886, "loss": 1.899, "step": 2966 }, { "epoch": 0.28974609375, "grad_norm": 0.202012300491333, "learning_rate": 0.0004246892182970499, "loss": 1.8999, "step": 2967 }, { "epoch": 0.28984375, "grad_norm": 0.23654010891914368, "learning_rate": 0.000424636118995394, "loss": 1.8621, "step": 2968 }, { "epoch": 0.28994140625, "grad_norm": 0.18624065816402435, "learning_rate": 0.0004245830047464156, "loss": 1.8794, "step": 2969 }, { "epoch": 0.2900390625, "grad_norm": 0.22178705036640167, "learning_rate": 0.00042452987555542025, "loss": 1.9132, "step": 2970 }, { "epoch": 0.29013671875, "grad_norm": 0.2666608691215515, "learning_rate": 0.0004244767314277149, "loss": 1.842, "step": 2971 }, { "epoch": 0.290234375, "grad_norm": 0.21921314299106598, "learning_rate": 0.0004244235723686084, "loss": 1.8707, "step": 2972 }, { "epoch": 0.29033203125, "grad_norm": 0.26558271050453186, "learning_rate": 0.00042437039838341087, "loss": 1.8919, "step": 2973 }, { "epoch": 0.2904296875, "grad_norm": 0.2301444709300995, "learning_rate": 0.00042431720947743373, "loss": 1.8578, "step": 2974 }, { "epoch": 0.29052734375, "grad_norm": 0.21512138843536377, "learning_rate": 0.0004242640056559902, "loss": 1.8769, "step": 2975 }, { "epoch": 0.290625, "grad_norm": 0.20394618809223175, "learning_rate": 0.00042421078692439496, "loss": 1.9135, "step": 2976 }, { "epoch": 0.29072265625, "grad_norm": 0.22402624785900116, "learning_rate": 0.000424157553287964, "loss": 1.9198, "step": 2977 }, { "epoch": 0.2908203125, "grad_norm": 0.2281763255596161, "learning_rate": 0.0004241043047520147, "loss": 1.8726, "step": 2978 }, { "epoch": 0.29091796875, "grad_norm": 0.22465281188488007, "learning_rate": 0.0004240510413218665, "loss": 1.8684, "step": 2979 }, { "epoch": 0.291015625, "grad_norm": 0.26606932282447815, "learning_rate": 0.00042399776300283963, "loss": 1.9309, "step": 2980 }, { "epoch": 0.29111328125, "grad_norm": 0.2602376639842987, "learning_rate": 0.0004239444698002563, "loss": 1.819, "step": 2981 }, { "epoch": 0.2912109375, "grad_norm": 0.2876492440700531, "learning_rate": 0.00042389116171943995, "loss": 1.8752, "step": 2982 }, { "epoch": 0.29130859375, "grad_norm": 0.24838502705097198, "learning_rate": 0.00042383783876571556, "loss": 1.8708, "step": 2983 }, { "epoch": 0.29140625, "grad_norm": 0.2866203188896179, "learning_rate": 0.0004237845009444097, "loss": 1.8873, "step": 2984 }, { "epoch": 0.29150390625, "grad_norm": 0.3191507160663605, "learning_rate": 0.0004237311482608503, "loss": 1.8515, "step": 2985 }, { "epoch": 0.2916015625, "grad_norm": 0.306711345911026, "learning_rate": 0.00042367778072036684, "loss": 1.8751, "step": 2986 }, { "epoch": 0.29169921875, "grad_norm": 0.23548844456672668, "learning_rate": 0.00042362439832829027, "loss": 1.867, "step": 2987 }, { "epoch": 0.291796875, "grad_norm": 0.27832359075546265, "learning_rate": 0.00042357100108995297, "loss": 1.8451, "step": 2988 }, { "epoch": 0.29189453125, "grad_norm": 0.35388869047164917, "learning_rate": 0.0004235175890106889, "loss": 1.8578, "step": 2989 }, { "epoch": 0.2919921875, "grad_norm": 0.21929793059825897, "learning_rate": 0.00042346416209583336, "loss": 1.8308, "step": 2990 }, { "epoch": 0.29208984375, "grad_norm": 0.2917014956474304, "learning_rate": 0.00042341072035072345, "loss": 1.888, "step": 2991 }, { "epoch": 0.2921875, "grad_norm": 0.2988588511943817, "learning_rate": 0.0004233572637806972, "loss": 1.8833, "step": 2992 }, { "epoch": 0.29228515625, "grad_norm": 0.24378128349781036, "learning_rate": 0.0004233037923910947, "loss": 1.8409, "step": 2993 }, { "epoch": 0.2923828125, "grad_norm": 0.2123136818408966, "learning_rate": 0.00042325030618725726, "loss": 1.8501, "step": 2994 }, { "epoch": 0.29248046875, "grad_norm": 0.2910783886909485, "learning_rate": 0.0004231968051745275, "loss": 1.8987, "step": 2995 }, { "epoch": 0.292578125, "grad_norm": 0.3038376271724701, "learning_rate": 0.00042314328935824994, "loss": 1.9294, "step": 2996 }, { "epoch": 0.29267578125, "grad_norm": 0.2226715087890625, "learning_rate": 0.0004230897587437701, "loss": 1.8779, "step": 2997 }, { "epoch": 0.2927734375, "grad_norm": 0.24885967373847961, "learning_rate": 0.0004230362133364354, "loss": 1.854, "step": 2998 }, { "epoch": 0.29287109375, "grad_norm": 0.22683629393577576, "learning_rate": 0.0004229826531415943, "loss": 1.8815, "step": 2999 }, { "epoch": 0.29296875, "grad_norm": 0.2502784729003906, "learning_rate": 0.00042292907816459736, "loss": 1.8787, "step": 3000 }, { "epoch": 0.29306640625, "grad_norm": 0.22668708860874176, "learning_rate": 0.00042287548841079593, "loss": 1.867, "step": 3001 }, { "epoch": 0.2931640625, "grad_norm": 0.2151390165090561, "learning_rate": 0.0004228218838855433, "loss": 1.9048, "step": 3002 }, { "epoch": 0.29326171875, "grad_norm": 0.2484009712934494, "learning_rate": 0.0004227682645941941, "loss": 1.8894, "step": 3003 }, { "epoch": 0.293359375, "grad_norm": 0.2573185861110687, "learning_rate": 0.0004227146305421043, "loss": 1.8898, "step": 3004 }, { "epoch": 0.29345703125, "grad_norm": 0.23028187453746796, "learning_rate": 0.0004226609817346316, "loss": 1.8766, "step": 3005 }, { "epoch": 0.2935546875, "grad_norm": 0.2626796364784241, "learning_rate": 0.00042260731817713495, "loss": 1.9076, "step": 3006 }, { "epoch": 0.29365234375, "grad_norm": 0.23662570118904114, "learning_rate": 0.0004225536398749749, "loss": 1.8151, "step": 3007 }, { "epoch": 0.29375, "grad_norm": 0.20050571858882904, "learning_rate": 0.00042249994683351346, "loss": 1.86, "step": 3008 }, { "epoch": 0.29384765625, "grad_norm": 0.2129492163658142, "learning_rate": 0.00042244623905811395, "loss": 1.9022, "step": 3009 }, { "epoch": 0.2939453125, "grad_norm": 0.19077609479427338, "learning_rate": 0.00042239251655414153, "loss": 1.8774, "step": 3010 }, { "epoch": 0.29404296875, "grad_norm": 0.21290133893489838, "learning_rate": 0.00042233877932696247, "loss": 1.8751, "step": 3011 }, { "epoch": 0.294140625, "grad_norm": 0.23995377123355865, "learning_rate": 0.0004222850273819447, "loss": 1.9138, "step": 3012 }, { "epoch": 0.29423828125, "grad_norm": 0.2578481435775757, "learning_rate": 0.00042223126072445745, "loss": 1.8922, "step": 3013 }, { "epoch": 0.2943359375, "grad_norm": 0.2387392222881317, "learning_rate": 0.00042217747935987166, "loss": 1.8643, "step": 3014 }, { "epoch": 0.29443359375, "grad_norm": 0.2922835946083069, "learning_rate": 0.00042212368329355953, "loss": 1.81, "step": 3015 }, { "epoch": 0.29453125, "grad_norm": 0.20787885785102844, "learning_rate": 0.00042206987253089477, "loss": 1.8931, "step": 3016 }, { "epoch": 0.29462890625, "grad_norm": 0.2633446753025055, "learning_rate": 0.0004220160470772528, "loss": 1.8675, "step": 3017 }, { "epoch": 0.2947265625, "grad_norm": 0.1937829703092575, "learning_rate": 0.0004219622069380101, "loss": 1.8966, "step": 3018 }, { "epoch": 0.29482421875, "grad_norm": 0.26633307337760925, "learning_rate": 0.0004219083521185449, "loss": 1.8361, "step": 3019 }, { "epoch": 0.294921875, "grad_norm": 0.2277202308177948, "learning_rate": 0.00042185448262423673, "loss": 1.8753, "step": 3020 }, { "epoch": 0.29501953125, "grad_norm": 0.19210180640220642, "learning_rate": 0.00042180059846046685, "loss": 1.8922, "step": 3021 }, { "epoch": 0.2951171875, "grad_norm": 0.20748457312583923, "learning_rate": 0.0004217466996326178, "loss": 1.898, "step": 3022 }, { "epoch": 0.29521484375, "grad_norm": 0.1865435540676117, "learning_rate": 0.0004216927861460733, "loss": 1.9276, "step": 3023 }, { "epoch": 0.2953125, "grad_norm": 0.2428085207939148, "learning_rate": 0.0004216388580062192, "loss": 1.8543, "step": 3024 }, { "epoch": 0.29541015625, "grad_norm": 0.22671908140182495, "learning_rate": 0.00042158491521844234, "loss": 1.8916, "step": 3025 }, { "epoch": 0.2955078125, "grad_norm": 0.22087739408016205, "learning_rate": 0.00042153095778813096, "loss": 1.8361, "step": 3026 }, { "epoch": 0.29560546875, "grad_norm": 0.21374981105327606, "learning_rate": 0.00042147698572067506, "loss": 1.8736, "step": 3027 }, { "epoch": 0.295703125, "grad_norm": 0.2175641655921936, "learning_rate": 0.000421422999021466, "loss": 1.8394, "step": 3028 }, { "epoch": 0.29580078125, "grad_norm": 0.28677627444267273, "learning_rate": 0.0004213689976958965, "loss": 1.8548, "step": 3029 }, { "epoch": 0.2958984375, "grad_norm": 0.2653857469558716, "learning_rate": 0.00042131498174936084, "loss": 1.873, "step": 3030 }, { "epoch": 0.29599609375, "grad_norm": 0.23550917208194733, "learning_rate": 0.00042126095118725474, "loss": 1.8541, "step": 3031 }, { "epoch": 0.29609375, "grad_norm": 0.2541399598121643, "learning_rate": 0.00042120690601497537, "loss": 1.8607, "step": 3032 }, { "epoch": 0.29619140625, "grad_norm": 0.2753686010837555, "learning_rate": 0.00042115284623792137, "loss": 1.9019, "step": 3033 }, { "epoch": 0.2962890625, "grad_norm": 0.2758835554122925, "learning_rate": 0.0004210987718614928, "loss": 1.9017, "step": 3034 }, { "epoch": 0.29638671875, "grad_norm": 0.22177983820438385, "learning_rate": 0.0004210446828910913, "loss": 1.8971, "step": 3035 }, { "epoch": 0.296484375, "grad_norm": 0.22884881496429443, "learning_rate": 0.00042099057933211976, "loss": 1.8347, "step": 3036 }, { "epoch": 0.29658203125, "grad_norm": 0.27076879143714905, "learning_rate": 0.00042093646118998275, "loss": 1.86, "step": 3037 }, { "epoch": 0.2966796875, "grad_norm": 0.32488006353378296, "learning_rate": 0.0004208823284700861, "loss": 1.9228, "step": 3038 }, { "epoch": 0.29677734375, "grad_norm": 0.22296027839183807, "learning_rate": 0.00042082818117783726, "loss": 1.9108, "step": 3039 }, { "epoch": 0.296875, "grad_norm": 0.29426977038383484, "learning_rate": 0.00042077401931864505, "loss": 1.8609, "step": 3040 }, { "epoch": 0.29697265625, "grad_norm": 0.3135233521461487, "learning_rate": 0.00042071984289791976, "loss": 1.8902, "step": 3041 }, { "epoch": 0.2970703125, "grad_norm": 0.3182580769062042, "learning_rate": 0.00042066565192107314, "loss": 1.8717, "step": 3042 }, { "epoch": 0.29716796875, "grad_norm": 0.2791113257408142, "learning_rate": 0.00042061144639351833, "loss": 1.9108, "step": 3043 }, { "epoch": 0.297265625, "grad_norm": 0.24948930740356445, "learning_rate": 0.00042055722632067006, "loss": 1.8509, "step": 3044 }, { "epoch": 0.29736328125, "grad_norm": 0.270920991897583, "learning_rate": 0.0004205029917079444, "loss": 1.8868, "step": 3045 }, { "epoch": 0.2974609375, "grad_norm": 0.2594306766986847, "learning_rate": 0.0004204487425607589, "loss": 1.8583, "step": 3046 }, { "epoch": 0.29755859375, "grad_norm": 0.24974964559078217, "learning_rate": 0.00042039447888453255, "loss": 1.8907, "step": 3047 }, { "epoch": 0.29765625, "grad_norm": 0.25526463985443115, "learning_rate": 0.00042034020068468583, "loss": 1.904, "step": 3048 }, { "epoch": 0.29775390625, "grad_norm": 0.24909386038780212, "learning_rate": 0.00042028590796664073, "loss": 1.8768, "step": 3049 }, { "epoch": 0.2978515625, "grad_norm": 0.20612211525440216, "learning_rate": 0.0004202316007358205, "loss": 1.9036, "step": 3050 }, { "epoch": 0.29794921875, "grad_norm": 0.26715099811553955, "learning_rate": 0.00042017727899764995, "loss": 1.9315, "step": 3051 }, { "epoch": 0.298046875, "grad_norm": 0.21301884949207306, "learning_rate": 0.00042012294275755544, "loss": 1.8762, "step": 3052 }, { "epoch": 0.29814453125, "grad_norm": 0.23274759948253632, "learning_rate": 0.00042006859202096464, "loss": 1.9061, "step": 3053 }, { "epoch": 0.2982421875, "grad_norm": 0.23647235333919525, "learning_rate": 0.00042001422679330665, "loss": 1.8344, "step": 3054 }, { "epoch": 0.29833984375, "grad_norm": 0.22753413021564484, "learning_rate": 0.00041995984708001217, "loss": 1.8933, "step": 3055 }, { "epoch": 0.2984375, "grad_norm": 0.25154316425323486, "learning_rate": 0.0004199054528865131, "loss": 1.8551, "step": 3056 }, { "epoch": 0.29853515625, "grad_norm": 0.22566568851470947, "learning_rate": 0.0004198510442182431, "loss": 1.8548, "step": 3057 }, { "epoch": 0.2986328125, "grad_norm": 0.21213006973266602, "learning_rate": 0.000419796621080637, "loss": 1.8801, "step": 3058 }, { "epoch": 0.29873046875, "grad_norm": 0.2567862570285797, "learning_rate": 0.00041974218347913127, "loss": 1.8701, "step": 3059 }, { "epoch": 0.298828125, "grad_norm": 0.25658440589904785, "learning_rate": 0.0004196877314191637, "loss": 1.9191, "step": 3060 }, { "epoch": 0.29892578125, "grad_norm": 0.21651284396648407, "learning_rate": 0.0004196332649061736, "loss": 1.8633, "step": 3061 }, { "epoch": 0.2990234375, "grad_norm": 0.23801001906394958, "learning_rate": 0.00041957878394560167, "loss": 1.9149, "step": 3062 }, { "epoch": 0.29912109375, "grad_norm": 0.21144436299800873, "learning_rate": 0.00041952428854289005, "loss": 1.8981, "step": 3063 }, { "epoch": 0.29921875, "grad_norm": 0.2747276723384857, "learning_rate": 0.0004194697787034824, "loss": 1.8633, "step": 3064 }, { "epoch": 0.29931640625, "grad_norm": 0.2292105257511139, "learning_rate": 0.0004194152544328237, "loss": 1.8304, "step": 3065 }, { "epoch": 0.2994140625, "grad_norm": 0.19762180745601654, "learning_rate": 0.0004193607157363606, "loss": 1.8328, "step": 3066 }, { "epoch": 0.29951171875, "grad_norm": 0.27750471234321594, "learning_rate": 0.0004193061626195408, "loss": 1.8736, "step": 3067 }, { "epoch": 0.299609375, "grad_norm": 0.19942927360534668, "learning_rate": 0.0004192515950878138, "loss": 1.8863, "step": 3068 }, { "epoch": 0.29970703125, "grad_norm": 0.23817166686058044, "learning_rate": 0.0004191970131466304, "loss": 1.8638, "step": 3069 }, { "epoch": 0.2998046875, "grad_norm": 0.20811405777931213, "learning_rate": 0.0004191424168014429, "loss": 1.9001, "step": 3070 }, { "epoch": 0.29990234375, "grad_norm": 0.23758384585380554, "learning_rate": 0.00041908780605770497, "loss": 1.9237, "step": 3071 }, { "epoch": 0.3, "grad_norm": 0.20853745937347412, "learning_rate": 0.0004190331809208717, "loss": 1.9003, "step": 3072 }, { "epoch": 0.30009765625, "grad_norm": 0.23026727139949799, "learning_rate": 0.00041897854139639963, "loss": 1.8439, "step": 3073 }, { "epoch": 0.3001953125, "grad_norm": 0.1935022622346878, "learning_rate": 0.00041892388748974687, "loss": 1.8977, "step": 3074 }, { "epoch": 0.30029296875, "grad_norm": 0.22622525691986084, "learning_rate": 0.0004188692192063727, "loss": 1.8909, "step": 3075 }, { "epoch": 0.300390625, "grad_norm": 0.2203913778066635, "learning_rate": 0.00041881453655173825, "loss": 1.8693, "step": 3076 }, { "epoch": 0.30048828125, "grad_norm": 0.18365997076034546, "learning_rate": 0.00041875983953130563, "loss": 1.8997, "step": 3077 }, { "epoch": 0.3005859375, "grad_norm": 0.1894032061100006, "learning_rate": 0.0004187051281505386, "loss": 1.884, "step": 3078 }, { "epoch": 0.30068359375, "grad_norm": 0.17142687737941742, "learning_rate": 0.0004186504024149023, "loss": 1.8654, "step": 3079 }, { "epoch": 0.30078125, "grad_norm": 0.20617616176605225, "learning_rate": 0.00041859566232986355, "loss": 1.8877, "step": 3080 }, { "epoch": 0.30087890625, "grad_norm": 0.2163764387369156, "learning_rate": 0.0004185409079008903, "loss": 1.8872, "step": 3081 }, { "epoch": 0.3009765625, "grad_norm": 0.22180713713169098, "learning_rate": 0.0004184861391334519, "loss": 1.8953, "step": 3082 }, { "epoch": 0.30107421875, "grad_norm": 0.24861370027065277, "learning_rate": 0.00041843135603301945, "loss": 1.9071, "step": 3083 }, { "epoch": 0.301171875, "grad_norm": 0.20080408453941345, "learning_rate": 0.0004183765586050652, "loss": 1.8864, "step": 3084 }, { "epoch": 0.30126953125, "grad_norm": 0.22699226438999176, "learning_rate": 0.00041832174685506296, "loss": 1.8346, "step": 3085 }, { "epoch": 0.3013671875, "grad_norm": 0.23934027552604675, "learning_rate": 0.00041826692078848787, "loss": 1.9335, "step": 3086 }, { "epoch": 0.30146484375, "grad_norm": 0.2329765111207962, "learning_rate": 0.0004182120804108167, "loss": 1.8811, "step": 3087 }, { "epoch": 0.3015625, "grad_norm": 0.21421007812023163, "learning_rate": 0.0004181572257275274, "loss": 1.9029, "step": 3088 }, { "epoch": 0.30166015625, "grad_norm": 0.22248250246047974, "learning_rate": 0.00041810235674409954, "loss": 1.8565, "step": 3089 }, { "epoch": 0.3017578125, "grad_norm": 0.2047787755727768, "learning_rate": 0.0004180474734660139, "loss": 1.8361, "step": 3090 }, { "epoch": 0.30185546875, "grad_norm": 0.2110084891319275, "learning_rate": 0.00041799257589875306, "loss": 1.9514, "step": 3091 }, { "epoch": 0.301953125, "grad_norm": 0.2262723594903946, "learning_rate": 0.0004179376640478007, "loss": 1.8518, "step": 3092 }, { "epoch": 0.30205078125, "grad_norm": 0.23749500513076782, "learning_rate": 0.0004178827379186419, "loss": 1.895, "step": 3093 }, { "epoch": 0.3021484375, "grad_norm": 0.21547161042690277, "learning_rate": 0.0004178277975167635, "loss": 1.8941, "step": 3094 }, { "epoch": 0.30224609375, "grad_norm": 0.22931936383247375, "learning_rate": 0.00041777284284765344, "loss": 1.8962, "step": 3095 }, { "epoch": 0.30234375, "grad_norm": 0.21383483707904816, "learning_rate": 0.0004177178739168013, "loss": 1.8861, "step": 3096 }, { "epoch": 0.30244140625, "grad_norm": 0.21916463971138, "learning_rate": 0.00041766289072969786, "loss": 1.8892, "step": 3097 }, { "epoch": 0.3025390625, "grad_norm": 0.23424991965293884, "learning_rate": 0.00041760789329183554, "loss": 1.8532, "step": 3098 }, { "epoch": 0.30263671875, "grad_norm": 0.2427515983581543, "learning_rate": 0.0004175528816087081, "loss": 1.9044, "step": 3099 }, { "epoch": 0.302734375, "grad_norm": 0.17762315273284912, "learning_rate": 0.00041749785568581064, "loss": 1.8533, "step": 3100 }, { "epoch": 0.30283203125, "grad_norm": 0.24818706512451172, "learning_rate": 0.0004174428155286397, "loss": 1.8871, "step": 3101 }, { "epoch": 0.3029296875, "grad_norm": 0.26211288571357727, "learning_rate": 0.0004173877611426936, "loss": 1.8751, "step": 3102 }, { "epoch": 0.30302734375, "grad_norm": 0.27161601185798645, "learning_rate": 0.00041733269253347156, "loss": 1.9253, "step": 3103 }, { "epoch": 0.303125, "grad_norm": 0.2077367603778839, "learning_rate": 0.00041727760970647446, "loss": 1.8505, "step": 3104 }, { "epoch": 0.30322265625, "grad_norm": 0.19965888559818268, "learning_rate": 0.00041722251266720467, "loss": 1.8617, "step": 3105 }, { "epoch": 0.3033203125, "grad_norm": 0.2016630917787552, "learning_rate": 0.0004171674014211658, "loss": 1.892, "step": 3106 }, { "epoch": 0.30341796875, "grad_norm": 0.2344559282064438, "learning_rate": 0.000417112275973863, "loss": 1.8858, "step": 3107 }, { "epoch": 0.303515625, "grad_norm": 0.21691851317882538, "learning_rate": 0.00041705713633080285, "loss": 1.9133, "step": 3108 }, { "epoch": 0.30361328125, "grad_norm": 0.1909625083208084, "learning_rate": 0.00041700198249749326, "loss": 1.868, "step": 3109 }, { "epoch": 0.3037109375, "grad_norm": 0.1911030411720276, "learning_rate": 0.0004169468144794437, "loss": 1.8663, "step": 3110 }, { "epoch": 0.30380859375, "grad_norm": 0.23135457932949066, "learning_rate": 0.0004168916322821649, "loss": 1.8741, "step": 3111 }, { "epoch": 0.30390625, "grad_norm": 0.17381919920444489, "learning_rate": 0.0004168364359111691, "loss": 1.8367, "step": 3112 }, { "epoch": 0.30400390625, "grad_norm": 0.22277741134166718, "learning_rate": 0.0004167812253719698, "loss": 1.9282, "step": 3113 }, { "epoch": 0.3041015625, "grad_norm": 0.22977705299854279, "learning_rate": 0.0004167260006700823, "loss": 1.829, "step": 3114 }, { "epoch": 0.30419921875, "grad_norm": 0.17196373641490936, "learning_rate": 0.0004166707618110228, "loss": 1.9048, "step": 3115 }, { "epoch": 0.304296875, "grad_norm": 0.2016908824443817, "learning_rate": 0.0004166155088003094, "loss": 1.89, "step": 3116 }, { "epoch": 0.30439453125, "grad_norm": 0.20606733858585358, "learning_rate": 0.00041656024164346116, "loss": 1.8202, "step": 3117 }, { "epoch": 0.3044921875, "grad_norm": 0.22512635588645935, "learning_rate": 0.00041650496034599895, "loss": 1.8737, "step": 3118 }, { "epoch": 0.30458984375, "grad_norm": 0.23294253647327423, "learning_rate": 0.00041644966491344483, "loss": 1.8862, "step": 3119 }, { "epoch": 0.3046875, "grad_norm": 0.19793957471847534, "learning_rate": 0.0004163943553513222, "loss": 1.8846, "step": 3120 }, { "epoch": 0.30478515625, "grad_norm": 0.22563882172107697, "learning_rate": 0.00041633903166515623, "loss": 1.8554, "step": 3121 }, { "epoch": 0.3048828125, "grad_norm": 0.20656296610832214, "learning_rate": 0.00041628369386047313, "loss": 1.8328, "step": 3122 }, { "epoch": 0.30498046875, "grad_norm": 0.2028147280216217, "learning_rate": 0.00041622834194280067, "loss": 1.8821, "step": 3123 }, { "epoch": 0.305078125, "grad_norm": 0.22198504209518433, "learning_rate": 0.00041617297591766794, "loss": 1.8595, "step": 3124 }, { "epoch": 0.30517578125, "grad_norm": 0.18492154777050018, "learning_rate": 0.0004161175957906058, "loss": 1.8535, "step": 3125 }, { "epoch": 0.3052734375, "grad_norm": 0.28677722811698914, "learning_rate": 0.00041606220156714587, "loss": 1.8726, "step": 3126 }, { "epoch": 0.30537109375, "grad_norm": 0.28656336665153503, "learning_rate": 0.00041600679325282173, "loss": 1.8607, "step": 3127 }, { "epoch": 0.30546875, "grad_norm": 0.2700391709804535, "learning_rate": 0.0004159513708531681, "loss": 1.8796, "step": 3128 }, { "epoch": 0.30556640625, "grad_norm": 0.254393607378006, "learning_rate": 0.00041589593437372135, "loss": 1.9238, "step": 3129 }, { "epoch": 0.3056640625, "grad_norm": 0.21035775542259216, "learning_rate": 0.000415840483820019, "loss": 1.9173, "step": 3130 }, { "epoch": 0.30576171875, "grad_norm": 0.2469177544116974, "learning_rate": 0.0004157850191975999, "loss": 1.8812, "step": 3131 }, { "epoch": 0.305859375, "grad_norm": 0.3074721693992615, "learning_rate": 0.00041572954051200466, "loss": 1.8538, "step": 3132 }, { "epoch": 0.30595703125, "grad_norm": 0.25160425901412964, "learning_rate": 0.00041567404776877515, "loss": 1.9504, "step": 3133 }, { "epoch": 0.3060546875, "grad_norm": 0.24114885926246643, "learning_rate": 0.0004156185409734545, "loss": 1.8791, "step": 3134 }, { "epoch": 0.30615234375, "grad_norm": 0.26935282349586487, "learning_rate": 0.00041556302013158735, "loss": 1.9038, "step": 3135 }, { "epoch": 0.30625, "grad_norm": 0.23060311377048492, "learning_rate": 0.0004155074852487198, "loss": 1.8994, "step": 3136 }, { "epoch": 0.30634765625, "grad_norm": 0.2560269832611084, "learning_rate": 0.0004154519363303993, "loss": 1.858, "step": 3137 }, { "epoch": 0.3064453125, "grad_norm": 0.25312769412994385, "learning_rate": 0.0004153963733821745, "loss": 1.9092, "step": 3138 }, { "epoch": 0.30654296875, "grad_norm": 0.26236045360565186, "learning_rate": 0.0004153407964095959, "loss": 1.9356, "step": 3139 }, { "epoch": 0.306640625, "grad_norm": 0.24468998610973358, "learning_rate": 0.0004152852054182151, "loss": 1.8854, "step": 3140 }, { "epoch": 0.30673828125, "grad_norm": 0.21663248538970947, "learning_rate": 0.000415229600413585, "loss": 1.7767, "step": 3141 }, { "epoch": 0.3068359375, "grad_norm": 0.2483324110507965, "learning_rate": 0.0004151739814012602, "loss": 1.876, "step": 3142 }, { "epoch": 0.30693359375, "grad_norm": 0.26244667172431946, "learning_rate": 0.00041511834838679643, "loss": 1.8613, "step": 3143 }, { "epoch": 0.30703125, "grad_norm": 0.23940621316432953, "learning_rate": 0.00041506270137575105, "loss": 1.8662, "step": 3144 }, { "epoch": 0.30712890625, "grad_norm": 0.2661633789539337, "learning_rate": 0.00041500704037368254, "loss": 1.8443, "step": 3145 }, { "epoch": 0.3072265625, "grad_norm": 0.24965199828147888, "learning_rate": 0.0004149513653861512, "loss": 1.8688, "step": 3146 }, { "epoch": 0.30732421875, "grad_norm": 0.25112828612327576, "learning_rate": 0.00041489567641871827, "loss": 1.8736, "step": 3147 }, { "epoch": 0.307421875, "grad_norm": 0.2768343985080719, "learning_rate": 0.00041483997347694653, "loss": 1.8818, "step": 3148 }, { "epoch": 0.30751953125, "grad_norm": 0.2519071698188782, "learning_rate": 0.0004147842565664004, "loss": 1.8826, "step": 3149 }, { "epoch": 0.3076171875, "grad_norm": 0.24958127737045288, "learning_rate": 0.00041472852569264545, "loss": 1.8985, "step": 3150 }, { "epoch": 0.30771484375, "grad_norm": 0.18655551970005035, "learning_rate": 0.0004146727808612486, "loss": 1.8777, "step": 3151 }, { "epoch": 0.3078125, "grad_norm": 0.21460086107254028, "learning_rate": 0.0004146170220777783, "loss": 1.8659, "step": 3152 }, { "epoch": 0.30791015625, "grad_norm": 0.20738814771175385, "learning_rate": 0.0004145612493478044, "loss": 1.882, "step": 3153 }, { "epoch": 0.3080078125, "grad_norm": 0.2415093630552292, "learning_rate": 0.00041450546267689817, "loss": 1.9349, "step": 3154 }, { "epoch": 0.30810546875, "grad_norm": 0.1886264681816101, "learning_rate": 0.0004144496620706321, "loss": 1.8221, "step": 3155 }, { "epoch": 0.308203125, "grad_norm": 0.21637070178985596, "learning_rate": 0.00041439384753458016, "loss": 1.886, "step": 3156 }, { "epoch": 0.30830078125, "grad_norm": 0.19392646849155426, "learning_rate": 0.0004143380190743178, "loss": 1.8497, "step": 3157 }, { "epoch": 0.3083984375, "grad_norm": 0.20178459584712982, "learning_rate": 0.0004142821766954218, "loss": 1.9139, "step": 3158 }, { "epoch": 0.30849609375, "grad_norm": 0.22789818048477173, "learning_rate": 0.0004142263204034702, "loss": 1.8608, "step": 3159 }, { "epoch": 0.30859375, "grad_norm": 0.19092601537704468, "learning_rate": 0.00041417045020404267, "loss": 1.9351, "step": 3160 }, { "epoch": 0.30869140625, "grad_norm": 0.2776362895965576, "learning_rate": 0.0004141145661027201, "loss": 1.9225, "step": 3161 }, { "epoch": 0.3087890625, "grad_norm": 0.2744249999523163, "learning_rate": 0.0004140586681050848, "loss": 1.8647, "step": 3162 }, { "epoch": 0.30888671875, "grad_norm": 0.2721487283706665, "learning_rate": 0.0004140027562167205, "loss": 1.859, "step": 3163 }, { "epoch": 0.308984375, "grad_norm": 0.29660922288894653, "learning_rate": 0.00041394683044321235, "loss": 1.9029, "step": 3164 }, { "epoch": 0.30908203125, "grad_norm": 0.28247225284576416, "learning_rate": 0.0004138908907901468, "loss": 1.8388, "step": 3165 }, { "epoch": 0.3091796875, "grad_norm": 0.18411090970039368, "learning_rate": 0.0004138349372631116, "loss": 1.9018, "step": 3166 }, { "epoch": 0.30927734375, "grad_norm": 0.31455278396606445, "learning_rate": 0.00041377896986769625, "loss": 1.8912, "step": 3167 }, { "epoch": 0.309375, "grad_norm": 0.24204468727111816, "learning_rate": 0.0004137229886094913, "loss": 1.9221, "step": 3168 }, { "epoch": 0.30947265625, "grad_norm": 0.24073071777820587, "learning_rate": 0.0004136669934940886, "loss": 1.8923, "step": 3169 }, { "epoch": 0.3095703125, "grad_norm": 0.25627797842025757, "learning_rate": 0.00041361098452708195, "loss": 1.8898, "step": 3170 }, { "epoch": 0.30966796875, "grad_norm": 0.21789778769016266, "learning_rate": 0.0004135549617140658, "loss": 1.8953, "step": 3171 }, { "epoch": 0.309765625, "grad_norm": 0.2414090931415558, "learning_rate": 0.00041349892506063654, "loss": 1.8784, "step": 3172 }, { "epoch": 0.30986328125, "grad_norm": 0.25079676508903503, "learning_rate": 0.0004134428745723916, "loss": 1.9243, "step": 3173 }, { "epoch": 0.3099609375, "grad_norm": 0.261700838804245, "learning_rate": 0.00041338681025492995, "loss": 1.8871, "step": 3174 }, { "epoch": 0.31005859375, "grad_norm": 0.2135803997516632, "learning_rate": 0.000413330732113852, "loss": 1.8587, "step": 3175 }, { "epoch": 0.31015625, "grad_norm": 0.22568845748901367, "learning_rate": 0.00041327464015475943, "loss": 1.877, "step": 3176 }, { "epoch": 0.31025390625, "grad_norm": 0.2725401520729065, "learning_rate": 0.0004132185343832553, "loss": 1.8597, "step": 3177 }, { "epoch": 0.3103515625, "grad_norm": 0.24924452602863312, "learning_rate": 0.00041316241480494404, "loss": 1.9168, "step": 3178 }, { "epoch": 0.31044921875, "grad_norm": 0.24290919303894043, "learning_rate": 0.00041310628142543157, "loss": 1.9064, "step": 3179 }, { "epoch": 0.310546875, "grad_norm": 0.2438277006149292, "learning_rate": 0.00041305013425032514, "loss": 1.8389, "step": 3180 }, { "epoch": 0.31064453125, "grad_norm": 0.24335592985153198, "learning_rate": 0.00041299397328523323, "loss": 1.8927, "step": 3181 }, { "epoch": 0.3107421875, "grad_norm": 0.2722269296646118, "learning_rate": 0.00041293779853576604, "loss": 1.8622, "step": 3182 }, { "epoch": 0.31083984375, "grad_norm": 0.20185427367687225, "learning_rate": 0.0004128816100075347, "loss": 1.8776, "step": 3183 }, { "epoch": 0.3109375, "grad_norm": 0.24118156731128693, "learning_rate": 0.00041282540770615206, "loss": 1.8891, "step": 3184 }, { "epoch": 0.31103515625, "grad_norm": 0.2829108238220215, "learning_rate": 0.0004127691916372322, "loss": 1.893, "step": 3185 }, { "epoch": 0.3111328125, "grad_norm": 0.20308874547481537, "learning_rate": 0.00041271296180639064, "loss": 1.8062, "step": 3186 }, { "epoch": 0.31123046875, "grad_norm": 0.26868608593940735, "learning_rate": 0.00041265671821924424, "loss": 1.8488, "step": 3187 }, { "epoch": 0.311328125, "grad_norm": 0.20011785626411438, "learning_rate": 0.00041260046088141116, "loss": 1.901, "step": 3188 }, { "epoch": 0.31142578125, "grad_norm": 0.22014351189136505, "learning_rate": 0.0004125441897985111, "loss": 1.9075, "step": 3189 }, { "epoch": 0.3115234375, "grad_norm": 0.21280397474765778, "learning_rate": 0.0004124879049761651, "loss": 1.901, "step": 3190 }, { "epoch": 0.31162109375, "grad_norm": 0.19353657960891724, "learning_rate": 0.00041243160641999527, "loss": 1.8421, "step": 3191 }, { "epoch": 0.31171875, "grad_norm": 0.2556135952472687, "learning_rate": 0.00041237529413562556, "loss": 1.8846, "step": 3192 }, { "epoch": 0.31181640625, "grad_norm": 0.1932743787765503, "learning_rate": 0.00041231896812868096, "loss": 1.8886, "step": 3193 }, { "epoch": 0.3119140625, "grad_norm": 0.21884669363498688, "learning_rate": 0.00041226262840478805, "loss": 1.923, "step": 3194 }, { "epoch": 0.31201171875, "grad_norm": 0.2124539613723755, "learning_rate": 0.0004122062749695746, "loss": 1.8899, "step": 3195 }, { "epoch": 0.312109375, "grad_norm": 0.20314374566078186, "learning_rate": 0.0004121499078286697, "loss": 1.8547, "step": 3196 }, { "epoch": 0.31220703125, "grad_norm": 0.24776391685009003, "learning_rate": 0.0004120935269877043, "loss": 1.8759, "step": 3197 }, { "epoch": 0.3123046875, "grad_norm": 0.1929536908864975, "learning_rate": 0.0004120371324523098, "loss": 1.8746, "step": 3198 }, { "epoch": 0.31240234375, "grad_norm": 0.21577607095241547, "learning_rate": 0.00041198072422812, "loss": 1.8879, "step": 3199 }, { "epoch": 0.3125, "grad_norm": 0.19796085357666016, "learning_rate": 0.00041192430232076926, "loss": 1.8855, "step": 3200 }, { "epoch": 0.31259765625, "grad_norm": 0.24222511053085327, "learning_rate": 0.00041186786673589384, "loss": 1.9066, "step": 3201 }, { "epoch": 0.3126953125, "grad_norm": 0.24613934755325317, "learning_rate": 0.00041181141747913106, "loss": 1.8442, "step": 3202 }, { "epoch": 0.31279296875, "grad_norm": 0.2534504532814026, "learning_rate": 0.0004117549545561197, "loss": 1.8432, "step": 3203 }, { "epoch": 0.312890625, "grad_norm": 0.23667122423648834, "learning_rate": 0.0004116984779724999, "loss": 1.8551, "step": 3204 }, { "epoch": 0.31298828125, "grad_norm": 0.21642574667930603, "learning_rate": 0.0004116419877339133, "loss": 1.9147, "step": 3205 }, { "epoch": 0.3130859375, "grad_norm": 0.2664399743080139, "learning_rate": 0.0004115854838460026, "loss": 1.9004, "step": 3206 }, { "epoch": 0.31318359375, "grad_norm": 0.27308887243270874, "learning_rate": 0.00041152896631441204, "loss": 1.8451, "step": 3207 }, { "epoch": 0.31328125, "grad_norm": 0.287526398897171, "learning_rate": 0.0004114724351447873, "loss": 1.8346, "step": 3208 }, { "epoch": 0.31337890625, "grad_norm": 0.24078041315078735, "learning_rate": 0.0004114158903427754, "loss": 1.8837, "step": 3209 }, { "epoch": 0.3134765625, "grad_norm": 0.2905030846595764, "learning_rate": 0.00041135933191402456, "loss": 1.8353, "step": 3210 }, { "epoch": 0.31357421875, "grad_norm": 0.3086714446544647, "learning_rate": 0.0004113027598641845, "loss": 1.8614, "step": 3211 }, { "epoch": 0.313671875, "grad_norm": 0.21693044900894165, "learning_rate": 0.00041124617419890626, "loss": 1.8456, "step": 3212 }, { "epoch": 0.31376953125, "grad_norm": 0.2788260579109192, "learning_rate": 0.0004111895749238422, "loss": 1.8971, "step": 3213 }, { "epoch": 0.3138671875, "grad_norm": 0.24722589552402496, "learning_rate": 0.0004111329620446462, "loss": 1.8583, "step": 3214 }, { "epoch": 0.31396484375, "grad_norm": 0.19354423880577087, "learning_rate": 0.00041107633556697326, "loss": 1.8987, "step": 3215 }, { "epoch": 0.3140625, "grad_norm": 0.2138962596654892, "learning_rate": 0.00041101969549648, "loss": 1.832, "step": 3216 }, { "epoch": 0.31416015625, "grad_norm": 0.23529689013957977, "learning_rate": 0.00041096304183882416, "loss": 1.8557, "step": 3217 }, { "epoch": 0.3142578125, "grad_norm": 0.2264302521944046, "learning_rate": 0.000410906374599665, "loss": 1.9008, "step": 3218 }, { "epoch": 0.31435546875, "grad_norm": 0.27234387397766113, "learning_rate": 0.00041084969378466294, "loss": 1.851, "step": 3219 }, { "epoch": 0.314453125, "grad_norm": 0.18110591173171997, "learning_rate": 0.00041079299939948, "loss": 1.8876, "step": 3220 }, { "epoch": 0.31455078125, "grad_norm": 0.3096265196800232, "learning_rate": 0.0004107362914497795, "loss": 1.8731, "step": 3221 }, { "epoch": 0.3146484375, "grad_norm": 0.26124435663223267, "learning_rate": 0.000410679569941226, "loss": 1.9025, "step": 3222 }, { "epoch": 0.31474609375, "grad_norm": 0.2412269413471222, "learning_rate": 0.00041062283487948545, "loss": 1.8518, "step": 3223 }, { "epoch": 0.31484375, "grad_norm": 0.29512709379196167, "learning_rate": 0.0004105660862702252, "loss": 1.8892, "step": 3224 }, { "epoch": 0.31494140625, "grad_norm": 0.18174058198928833, "learning_rate": 0.0004105093241191139, "loss": 1.8556, "step": 3225 }, { "epoch": 0.3150390625, "grad_norm": 0.30046752095222473, "learning_rate": 0.0004104525484318217, "loss": 1.8334, "step": 3226 }, { "epoch": 0.31513671875, "grad_norm": 0.21368588507175446, "learning_rate": 0.0004103957592140199, "loss": 1.8858, "step": 3227 }, { "epoch": 0.315234375, "grad_norm": 0.32934334874153137, "learning_rate": 0.00041033895647138126, "loss": 1.8602, "step": 3228 }, { "epoch": 0.31533203125, "grad_norm": 0.25783514976501465, "learning_rate": 0.0004102821402095798, "loss": 1.875, "step": 3229 }, { "epoch": 0.3154296875, "grad_norm": 0.280341774225235, "learning_rate": 0.0004102253104342911, "loss": 1.8705, "step": 3230 }, { "epoch": 0.31552734375, "grad_norm": 0.25460684299468994, "learning_rate": 0.00041016846715119184, "loss": 1.8636, "step": 3231 }, { "epoch": 0.315625, "grad_norm": 0.28985974192619324, "learning_rate": 0.0004101116103659602, "loss": 1.85, "step": 3232 }, { "epoch": 0.31572265625, "grad_norm": 0.22553981840610504, "learning_rate": 0.00041005474008427563, "loss": 1.8803, "step": 3233 }, { "epoch": 0.3158203125, "grad_norm": 0.23986418545246124, "learning_rate": 0.0004099978563118191, "loss": 1.8554, "step": 3234 }, { "epoch": 0.31591796875, "grad_norm": 0.2207421511411667, "learning_rate": 0.0004099409590542726, "loss": 1.9089, "step": 3235 }, { "epoch": 0.316015625, "grad_norm": 0.21956925094127655, "learning_rate": 0.00040988404831731977, "loss": 1.9027, "step": 3236 }, { "epoch": 0.31611328125, "grad_norm": 0.2621534466743469, "learning_rate": 0.00040982712410664557, "loss": 1.9038, "step": 3237 }, { "epoch": 0.3162109375, "grad_norm": 0.26143181324005127, "learning_rate": 0.000409770186427936, "loss": 1.8658, "step": 3238 }, { "epoch": 0.31630859375, "grad_norm": 0.25201505422592163, "learning_rate": 0.0004097132352868788, "loss": 1.8346, "step": 3239 }, { "epoch": 0.31640625, "grad_norm": 0.20869751274585724, "learning_rate": 0.0004096562706891629, "loss": 1.8839, "step": 3240 }, { "epoch": 0.31650390625, "grad_norm": 0.26148557662963867, "learning_rate": 0.00040959929264047855, "loss": 1.8852, "step": 3241 }, { "epoch": 0.3166015625, "grad_norm": 0.2597649097442627, "learning_rate": 0.0004095423011465172, "loss": 1.8635, "step": 3242 }, { "epoch": 0.31669921875, "grad_norm": 0.24144643545150757, "learning_rate": 0.0004094852962129719, "loss": 1.8667, "step": 3243 }, { "epoch": 0.316796875, "grad_norm": 0.2437174767255783, "learning_rate": 0.00040942827784553703, "loss": 1.8658, "step": 3244 }, { "epoch": 0.31689453125, "grad_norm": 0.20527783036231995, "learning_rate": 0.00040937124604990805, "loss": 1.8821, "step": 3245 }, { "epoch": 0.3169921875, "grad_norm": 0.23653388023376465, "learning_rate": 0.00040931420083178206, "loss": 1.8754, "step": 3246 }, { "epoch": 0.31708984375, "grad_norm": 0.2562256455421448, "learning_rate": 0.0004092571421968573, "loss": 1.8726, "step": 3247 }, { "epoch": 0.3171875, "grad_norm": 0.21176593005657196, "learning_rate": 0.0004092000701508335, "loss": 1.8885, "step": 3248 }, { "epoch": 0.31728515625, "grad_norm": 0.22442927956581116, "learning_rate": 0.0004091429846994115, "loss": 1.8794, "step": 3249 }, { "epoch": 0.3173828125, "grad_norm": 0.20349444448947906, "learning_rate": 0.0004090858858482938, "loss": 1.8958, "step": 3250 }, { "epoch": 0.31748046875, "grad_norm": 0.2182251513004303, "learning_rate": 0.000409028773603184, "loss": 1.8684, "step": 3251 }, { "epoch": 0.317578125, "grad_norm": 0.21095684170722961, "learning_rate": 0.00040897164796978704, "loss": 1.8564, "step": 3252 }, { "epoch": 0.31767578125, "grad_norm": 0.2585778534412384, "learning_rate": 0.0004089145089538094, "loss": 1.8582, "step": 3253 }, { "epoch": 0.3177734375, "grad_norm": 0.23634901642799377, "learning_rate": 0.00040885735656095865, "loss": 1.8564, "step": 3254 }, { "epoch": 0.31787109375, "grad_norm": 0.2584446966648102, "learning_rate": 0.0004088001907969439, "loss": 1.9066, "step": 3255 }, { "epoch": 0.31796875, "grad_norm": 0.24975843727588654, "learning_rate": 0.00040874301166747535, "loss": 1.8652, "step": 3256 }, { "epoch": 0.31806640625, "grad_norm": 0.2328788936138153, "learning_rate": 0.0004086858191782649, "loss": 1.8959, "step": 3257 }, { "epoch": 0.3181640625, "grad_norm": 0.198279470205307, "learning_rate": 0.00040862861333502536, "loss": 1.8361, "step": 3258 }, { "epoch": 0.31826171875, "grad_norm": 0.21981123089790344, "learning_rate": 0.00040857139414347127, "loss": 1.8791, "step": 3259 }, { "epoch": 0.318359375, "grad_norm": 0.19764897227287292, "learning_rate": 0.0004085141616093182, "loss": 1.8708, "step": 3260 }, { "epoch": 0.31845703125, "grad_norm": 0.23870030045509338, "learning_rate": 0.0004084569157382833, "loss": 1.8774, "step": 3261 }, { "epoch": 0.3185546875, "grad_norm": 0.23177386820316315, "learning_rate": 0.00040839965653608477, "loss": 1.9043, "step": 3262 }, { "epoch": 0.31865234375, "grad_norm": 0.21266654133796692, "learning_rate": 0.00040834238400844236, "loss": 1.9, "step": 3263 }, { "epoch": 0.31875, "grad_norm": 0.20720922946929932, "learning_rate": 0.0004082850981610771, "loss": 1.8461, "step": 3264 }, { "epoch": 0.31884765625, "grad_norm": 0.18572098016738892, "learning_rate": 0.00040822779899971133, "loss": 1.8771, "step": 3265 }, { "epoch": 0.3189453125, "grad_norm": 0.1953069120645523, "learning_rate": 0.0004081704865300688, "loss": 1.9015, "step": 3266 }, { "epoch": 0.31904296875, "grad_norm": 0.19447281956672668, "learning_rate": 0.00040811316075787434, "loss": 1.8834, "step": 3267 }, { "epoch": 0.319140625, "grad_norm": 0.17827379703521729, "learning_rate": 0.00040805582168885443, "loss": 1.7889, "step": 3268 }, { "epoch": 0.31923828125, "grad_norm": 0.20843571424484253, "learning_rate": 0.0004079984693287368, "loss": 1.8655, "step": 3269 }, { "epoch": 0.3193359375, "grad_norm": 0.1796947717666626, "learning_rate": 0.00040794110368325023, "loss": 1.9053, "step": 3270 }, { "epoch": 0.31943359375, "grad_norm": 0.1886366456747055, "learning_rate": 0.0004078837247581252, "loss": 1.8844, "step": 3271 }, { "epoch": 0.31953125, "grad_norm": 0.2169746607542038, "learning_rate": 0.00040782633255909324, "loss": 1.8976, "step": 3272 }, { "epoch": 0.31962890625, "grad_norm": 0.1918516904115677, "learning_rate": 0.00040776892709188745, "loss": 1.9083, "step": 3273 }, { "epoch": 0.3197265625, "grad_norm": 0.22316913306713104, "learning_rate": 0.0004077115083622421, "loss": 1.8755, "step": 3274 }, { "epoch": 0.31982421875, "grad_norm": 0.19910778105258942, "learning_rate": 0.0004076540763758927, "loss": 1.8486, "step": 3275 }, { "epoch": 0.319921875, "grad_norm": 0.23334568738937378, "learning_rate": 0.0004075966311385764, "loss": 1.8551, "step": 3276 }, { "epoch": 0.32001953125, "grad_norm": 0.25610196590423584, "learning_rate": 0.00040753917265603124, "loss": 1.8898, "step": 3277 }, { "epoch": 0.3201171875, "grad_norm": 0.245743066072464, "learning_rate": 0.000407481700933997, "loss": 1.8583, "step": 3278 }, { "epoch": 0.32021484375, "grad_norm": 0.3178372383117676, "learning_rate": 0.0004074242159782145, "loss": 1.875, "step": 3279 }, { "epoch": 0.3203125, "grad_norm": 0.2785772979259491, "learning_rate": 0.000407366717794426, "loss": 1.8141, "step": 3280 }, { "epoch": 0.32041015625, "grad_norm": 0.2157028317451477, "learning_rate": 0.000407309206388375, "loss": 1.8298, "step": 3281 }, { "epoch": 0.3205078125, "grad_norm": 0.28972703218460083, "learning_rate": 0.0004072516817658065, "loss": 1.8843, "step": 3282 }, { "epoch": 0.32060546875, "grad_norm": 0.2404652088880539, "learning_rate": 0.0004071941439324666, "loss": 1.9094, "step": 3283 }, { "epoch": 0.320703125, "grad_norm": 0.2777053117752075, "learning_rate": 0.0004071365928941029, "loss": 1.8456, "step": 3284 }, { "epoch": 0.32080078125, "grad_norm": 0.2563461363315582, "learning_rate": 0.00040707902865646405, "loss": 1.8749, "step": 3285 }, { "epoch": 0.3208984375, "grad_norm": 0.27938413619995117, "learning_rate": 0.00040702145122530054, "loss": 1.8811, "step": 3286 }, { "epoch": 0.32099609375, "grad_norm": 0.3102744519710541, "learning_rate": 0.00040696386060636356, "loss": 1.8909, "step": 3287 }, { "epoch": 0.32109375, "grad_norm": 0.23900067806243896, "learning_rate": 0.000406906256805406, "loss": 1.8604, "step": 3288 }, { "epoch": 0.32119140625, "grad_norm": 0.30121737718582153, "learning_rate": 0.000406848639828182, "loss": 1.8719, "step": 3289 }, { "epoch": 0.3212890625, "grad_norm": 0.25247108936309814, "learning_rate": 0.0004067910096804469, "loss": 1.8831, "step": 3290 }, { "epoch": 0.32138671875, "grad_norm": 0.32374677062034607, "learning_rate": 0.0004067333663679576, "loss": 1.8384, "step": 3291 }, { "epoch": 0.321484375, "grad_norm": 0.2805159091949463, "learning_rate": 0.00040667570989647196, "loss": 1.8153, "step": 3292 }, { "epoch": 0.32158203125, "grad_norm": 0.1982172280550003, "learning_rate": 0.00040661804027174945, "loss": 1.8686, "step": 3293 }, { "epoch": 0.3216796875, "grad_norm": 0.2745567560195923, "learning_rate": 0.0004065603574995508, "loss": 1.8745, "step": 3294 }, { "epoch": 0.32177734375, "grad_norm": 0.2640945315361023, "learning_rate": 0.000406502661585638, "loss": 1.8627, "step": 3295 }, { "epoch": 0.321875, "grad_norm": 0.21688511967658997, "learning_rate": 0.00040644495253577416, "loss": 1.8522, "step": 3296 }, { "epoch": 0.32197265625, "grad_norm": 0.22432774305343628, "learning_rate": 0.0004063872303557241, "loss": 1.8475, "step": 3297 }, { "epoch": 0.3220703125, "grad_norm": 0.27366623282432556, "learning_rate": 0.0004063294950512538, "loss": 1.8898, "step": 3298 }, { "epoch": 0.32216796875, "grad_norm": 0.23350487649440765, "learning_rate": 0.0004062717466281304, "loss": 1.8491, "step": 3299 }, { "epoch": 0.322265625, "grad_norm": 0.18709498643875122, "learning_rate": 0.0004062139850921225, "loss": 1.886, "step": 3300 }, { "epoch": 0.32236328125, "grad_norm": 0.23382367193698883, "learning_rate": 0.0004061562104489999, "loss": 1.8308, "step": 3301 }, { "epoch": 0.3224609375, "grad_norm": 0.22870516777038574, "learning_rate": 0.00040609842270453373, "loss": 1.8958, "step": 3302 }, { "epoch": 0.32255859375, "grad_norm": 0.20954212546348572, "learning_rate": 0.0004060406218644966, "loss": 1.8669, "step": 3303 }, { "epoch": 0.32265625, "grad_norm": 0.1893022060394287, "learning_rate": 0.00040598280793466236, "loss": 1.8881, "step": 3304 }, { "epoch": 0.32275390625, "grad_norm": 0.229387104511261, "learning_rate": 0.00040592498092080595, "loss": 1.8944, "step": 3305 }, { "epoch": 0.3228515625, "grad_norm": 0.22653591632843018, "learning_rate": 0.00040586714082870386, "loss": 1.8296, "step": 3306 }, { "epoch": 0.32294921875, "grad_norm": 0.18969039618968964, "learning_rate": 0.00040580928766413376, "loss": 1.8369, "step": 3307 }, { "epoch": 0.323046875, "grad_norm": 0.214459627866745, "learning_rate": 0.00040575142143287464, "loss": 1.8786, "step": 3308 }, { "epoch": 0.32314453125, "grad_norm": 0.20712082087993622, "learning_rate": 0.00040569354214070694, "loss": 1.8364, "step": 3309 }, { "epoch": 0.3232421875, "grad_norm": 0.20017264783382416, "learning_rate": 0.00040563564979341217, "loss": 1.8847, "step": 3310 }, { "epoch": 0.32333984375, "grad_norm": 0.20339415967464447, "learning_rate": 0.00040557774439677334, "loss": 1.829, "step": 3311 }, { "epoch": 0.3234375, "grad_norm": 0.2498069703578949, "learning_rate": 0.00040551982595657464, "loss": 1.8968, "step": 3312 }, { "epoch": 0.32353515625, "grad_norm": 0.1993158906698227, "learning_rate": 0.0004054618944786017, "loss": 1.8516, "step": 3313 }, { "epoch": 0.3236328125, "grad_norm": 0.21111157536506653, "learning_rate": 0.0004054039499686412, "loss": 1.8919, "step": 3314 }, { "epoch": 0.32373046875, "grad_norm": 0.20167800784111023, "learning_rate": 0.0004053459924324814, "loss": 1.863, "step": 3315 }, { "epoch": 0.323828125, "grad_norm": 0.26367267966270447, "learning_rate": 0.00040528802187591174, "loss": 1.8788, "step": 3316 }, { "epoch": 0.32392578125, "grad_norm": 0.18443547189235687, "learning_rate": 0.0004052300383047229, "loss": 1.8753, "step": 3317 }, { "epoch": 0.3240234375, "grad_norm": 0.20379985868930817, "learning_rate": 0.000405172041724707, "loss": 1.8347, "step": 3318 }, { "epoch": 0.32412109375, "grad_norm": 0.1891467273235321, "learning_rate": 0.0004051140321416574, "loss": 1.8697, "step": 3319 }, { "epoch": 0.32421875, "grad_norm": 0.19938014447689056, "learning_rate": 0.0004050560095613686, "loss": 1.9285, "step": 3320 }, { "epoch": 0.32431640625, "grad_norm": 0.23373723030090332, "learning_rate": 0.0004049979739896367, "loss": 1.9126, "step": 3321 }, { "epoch": 0.3244140625, "grad_norm": 0.199408158659935, "learning_rate": 0.00040493992543225887, "loss": 1.8732, "step": 3322 }, { "epoch": 0.32451171875, "grad_norm": 0.2107379138469696, "learning_rate": 0.00040488186389503364, "loss": 1.8799, "step": 3323 }, { "epoch": 0.324609375, "grad_norm": 0.1870202124118805, "learning_rate": 0.00040482378938376084, "loss": 1.8878, "step": 3324 }, { "epoch": 0.32470703125, "grad_norm": 0.19757448136806488, "learning_rate": 0.0004047657019042416, "loss": 1.8622, "step": 3325 }, { "epoch": 0.3248046875, "grad_norm": 0.21575550734996796, "learning_rate": 0.0004047076014622784, "loss": 1.82, "step": 3326 }, { "epoch": 0.32490234375, "grad_norm": 0.2778875231742859, "learning_rate": 0.00040464948806367496, "loss": 1.8722, "step": 3327 }, { "epoch": 0.325, "grad_norm": 0.27637016773223877, "learning_rate": 0.00040459136171423624, "loss": 1.9002, "step": 3328 }, { "epoch": 0.32509765625, "grad_norm": 0.2065366804599762, "learning_rate": 0.0004045332224197685, "loss": 1.8722, "step": 3329 }, { "epoch": 0.3251953125, "grad_norm": 0.23534739017486572, "learning_rate": 0.00040447507018607945, "loss": 1.8721, "step": 3330 }, { "epoch": 0.32529296875, "grad_norm": 0.2959147095680237, "learning_rate": 0.0004044169050189779, "loss": 1.8888, "step": 3331 }, { "epoch": 0.325390625, "grad_norm": 0.2515638470649719, "learning_rate": 0.00040435872692427404, "loss": 1.8613, "step": 3332 }, { "epoch": 0.32548828125, "grad_norm": 0.2147361934185028, "learning_rate": 0.00040430053590777943, "loss": 1.8826, "step": 3333 }, { "epoch": 0.3255859375, "grad_norm": 0.26969170570373535, "learning_rate": 0.0004042423319753068, "loss": 1.8766, "step": 3334 }, { "epoch": 0.32568359375, "grad_norm": 0.20632268488407135, "learning_rate": 0.0004041841151326702, "loss": 1.859, "step": 3335 }, { "epoch": 0.32578125, "grad_norm": 0.21272492408752441, "learning_rate": 0.0004041258853856849, "loss": 1.8988, "step": 3336 }, { "epoch": 0.32587890625, "grad_norm": 0.18938429653644562, "learning_rate": 0.0004040676427401676, "loss": 1.8779, "step": 3337 }, { "epoch": 0.3259765625, "grad_norm": 0.21298819780349731, "learning_rate": 0.00040400938720193625, "loss": 1.9124, "step": 3338 }, { "epoch": 0.32607421875, "grad_norm": 0.24898596107959747, "learning_rate": 0.00040395111877681005, "loss": 1.8726, "step": 3339 }, { "epoch": 0.326171875, "grad_norm": 0.2002260535955429, "learning_rate": 0.0004038928374706095, "loss": 1.9205, "step": 3340 }, { "epoch": 0.32626953125, "grad_norm": 0.2754443287849426, "learning_rate": 0.0004038345432891564, "loss": 1.9182, "step": 3341 }, { "epoch": 0.3263671875, "grad_norm": 0.22315667569637299, "learning_rate": 0.0004037762362382737, "loss": 1.8561, "step": 3342 }, { "epoch": 0.32646484375, "grad_norm": 0.22198979556560516, "learning_rate": 0.0004037179163237859, "loss": 1.8672, "step": 3343 }, { "epoch": 0.3265625, "grad_norm": 0.19733594357967377, "learning_rate": 0.0004036595835515186, "loss": 1.8658, "step": 3344 }, { "epoch": 0.32666015625, "grad_norm": 0.213151216506958, "learning_rate": 0.00040360123792729875, "loss": 1.8732, "step": 3345 }, { "epoch": 0.3267578125, "grad_norm": 0.19012810289859772, "learning_rate": 0.0004035428794569545, "loss": 1.8884, "step": 3346 }, { "epoch": 0.32685546875, "grad_norm": 0.23143506050109863, "learning_rate": 0.0004034845081463154, "loss": 1.878, "step": 3347 }, { "epoch": 0.326953125, "grad_norm": 0.26487472653388977, "learning_rate": 0.0004034261240012122, "loss": 1.8587, "step": 3348 }, { "epoch": 0.32705078125, "grad_norm": 0.1884104460477829, "learning_rate": 0.000403367727027477, "loss": 1.9078, "step": 3349 }, { "epoch": 0.3271484375, "grad_norm": 0.2410648763179779, "learning_rate": 0.00040330931723094304, "loss": 1.8787, "step": 3350 }, { "epoch": 0.32724609375, "grad_norm": 0.20756936073303223, "learning_rate": 0.00040325089461744503, "loss": 1.8461, "step": 3351 }, { "epoch": 0.32734375, "grad_norm": 0.22132349014282227, "learning_rate": 0.0004031924591928189, "loss": 1.8745, "step": 3352 }, { "epoch": 0.32744140625, "grad_norm": 0.26123854517936707, "learning_rate": 0.0004031340109629017, "loss": 1.8589, "step": 3353 }, { "epoch": 0.3275390625, "grad_norm": 0.29072481393814087, "learning_rate": 0.000403075549933532, "loss": 1.866, "step": 3354 }, { "epoch": 0.32763671875, "grad_norm": 0.18452265858650208, "learning_rate": 0.0004030170761105495, "loss": 1.8764, "step": 3355 }, { "epoch": 0.327734375, "grad_norm": 0.28800269961357117, "learning_rate": 0.00040295858949979525, "loss": 1.8572, "step": 3356 }, { "epoch": 0.32783203125, "grad_norm": 0.21224476397037506, "learning_rate": 0.00040290009010711153, "loss": 1.8734, "step": 3357 }, { "epoch": 0.3279296875, "grad_norm": 0.4747043251991272, "learning_rate": 0.00040284157793834185, "loss": 1.8632, "step": 3358 }, { "epoch": 0.32802734375, "grad_norm": 0.2504129409790039, "learning_rate": 0.0004027830529993312, "loss": 1.92, "step": 3359 }, { "epoch": 0.328125, "grad_norm": 0.25447171926498413, "learning_rate": 0.0004027245152959256, "loss": 1.8547, "step": 3360 }, { "epoch": 0.32822265625, "grad_norm": 0.2585792541503906, "learning_rate": 0.00040266596483397234, "loss": 1.8402, "step": 3361 }, { "epoch": 0.3283203125, "grad_norm": 0.25113922357559204, "learning_rate": 0.00040260740161932024, "loss": 1.8564, "step": 3362 }, { "epoch": 0.32841796875, "grad_norm": 0.24170421063899994, "learning_rate": 0.0004025488256578193, "loss": 1.8804, "step": 3363 }, { "epoch": 0.328515625, "grad_norm": 0.20581528544425964, "learning_rate": 0.0004024902369553207, "loss": 1.8429, "step": 3364 }, { "epoch": 0.32861328125, "grad_norm": 0.24743464589118958, "learning_rate": 0.0004024316355176768, "loss": 1.8638, "step": 3365 }, { "epoch": 0.3287109375, "grad_norm": 0.1992998570203781, "learning_rate": 0.0004023730213507414, "loss": 1.852, "step": 3366 }, { "epoch": 0.32880859375, "grad_norm": 0.29643985629081726, "learning_rate": 0.00040231439446036967, "loss": 1.8801, "step": 3367 }, { "epoch": 0.32890625, "grad_norm": 0.19637344777584076, "learning_rate": 0.00040225575485241784, "loss": 1.9142, "step": 3368 }, { "epoch": 0.32900390625, "grad_norm": 0.3350497782230377, "learning_rate": 0.00040219710253274344, "loss": 1.8794, "step": 3369 }, { "epoch": 0.3291015625, "grad_norm": 0.33154067397117615, "learning_rate": 0.00040213843750720534, "loss": 1.8343, "step": 3370 }, { "epoch": 0.32919921875, "grad_norm": 0.34300824999809265, "learning_rate": 0.00040207975978166376, "loss": 1.8755, "step": 3371 }, { "epoch": 0.329296875, "grad_norm": 0.21365049481391907, "learning_rate": 0.00040202106936197994, "loss": 1.8392, "step": 3372 }, { "epoch": 0.32939453125, "grad_norm": 0.3048967123031616, "learning_rate": 0.00040196236625401666, "loss": 1.9099, "step": 3373 }, { "epoch": 0.3294921875, "grad_norm": 0.30764639377593994, "learning_rate": 0.00040190365046363776, "loss": 1.865, "step": 3374 }, { "epoch": 0.32958984375, "grad_norm": 0.2561836540699005, "learning_rate": 0.0004018449219967084, "loss": 1.8128, "step": 3375 }, { "epoch": 0.3296875, "grad_norm": 0.26492762565612793, "learning_rate": 0.0004017861808590951, "loss": 1.8447, "step": 3376 }, { "epoch": 0.32978515625, "grad_norm": 0.2783060073852539, "learning_rate": 0.00040172742705666553, "loss": 1.86, "step": 3377 }, { "epoch": 0.3298828125, "grad_norm": 0.2328444868326187, "learning_rate": 0.0004016686605952888, "loss": 1.893, "step": 3378 }, { "epoch": 0.32998046875, "grad_norm": 0.18817414343357086, "learning_rate": 0.0004016098814808349, "loss": 1.8713, "step": 3379 }, { "epoch": 0.330078125, "grad_norm": 0.22875960171222687, "learning_rate": 0.00040155108971917566, "loss": 1.882, "step": 3380 }, { "epoch": 0.33017578125, "grad_norm": 0.20584222674369812, "learning_rate": 0.00040149228531618364, "loss": 1.8385, "step": 3381 }, { "epoch": 0.3302734375, "grad_norm": 0.22708621621131897, "learning_rate": 0.00040143346827773297, "loss": 1.8348, "step": 3382 }, { "epoch": 0.33037109375, "grad_norm": 0.27723783254623413, "learning_rate": 0.0004013746386096988, "loss": 1.878, "step": 3383 }, { "epoch": 0.33046875, "grad_norm": 0.2413671463727951, "learning_rate": 0.0004013157963179579, "loss": 1.8662, "step": 3384 }, { "epoch": 0.33056640625, "grad_norm": 0.21263569593429565, "learning_rate": 0.00040125694140838805, "loss": 1.8447, "step": 3385 }, { "epoch": 0.3306640625, "grad_norm": 0.2437567114830017, "learning_rate": 0.0004011980738868682, "loss": 1.8617, "step": 3386 }, { "epoch": 0.33076171875, "grad_norm": 0.2796415686607361, "learning_rate": 0.00040113919375927887, "loss": 1.9222, "step": 3387 }, { "epoch": 0.330859375, "grad_norm": 0.22173389792442322, "learning_rate": 0.00040108030103150143, "loss": 1.8942, "step": 3388 }, { "epoch": 0.33095703125, "grad_norm": 0.23705333471298218, "learning_rate": 0.000401021395709419, "loss": 1.8621, "step": 3389 }, { "epoch": 0.3310546875, "grad_norm": 0.2618677020072937, "learning_rate": 0.0004009624777989156, "loss": 1.9274, "step": 3390 }, { "epoch": 0.33115234375, "grad_norm": 0.23887130618095398, "learning_rate": 0.00040090354730587654, "loss": 1.8938, "step": 3391 }, { "epoch": 0.33125, "grad_norm": 0.2295779585838318, "learning_rate": 0.0004008446042361885, "loss": 1.8543, "step": 3392 }, { "epoch": 0.33134765625, "grad_norm": 0.21946710348129272, "learning_rate": 0.0004007856485957394, "loss": 1.8599, "step": 3393 }, { "epoch": 0.3314453125, "grad_norm": 0.22274813055992126, "learning_rate": 0.00040072668039041844, "loss": 1.8412, "step": 3394 }, { "epoch": 0.33154296875, "grad_norm": 0.22093729674816132, "learning_rate": 0.0004006676996261159, "loss": 1.8838, "step": 3395 }, { "epoch": 0.331640625, "grad_norm": 0.20294345915317535, "learning_rate": 0.0004006087063087235, "loss": 1.8899, "step": 3396 }, { "epoch": 0.33173828125, "grad_norm": 0.19039151072502136, "learning_rate": 0.00040054970044413405, "loss": 1.9169, "step": 3397 }, { "epoch": 0.3318359375, "grad_norm": 0.2277788668870926, "learning_rate": 0.00040049068203824184, "loss": 1.8882, "step": 3398 }, { "epoch": 0.33193359375, "grad_norm": 0.22854270040988922, "learning_rate": 0.00040043165109694223, "loss": 1.8717, "step": 3399 }, { "epoch": 0.33203125, "grad_norm": 0.21665067970752716, "learning_rate": 0.00040037260762613194, "loss": 1.8951, "step": 3400 }, { "epoch": 0.33212890625, "grad_norm": 0.1944616436958313, "learning_rate": 0.00040031355163170886, "loss": 1.8353, "step": 3401 }, { "epoch": 0.3322265625, "grad_norm": 0.20269887149333954, "learning_rate": 0.0004002544831195721, "loss": 1.8445, "step": 3402 }, { "epoch": 0.33232421875, "grad_norm": 0.19559110701084137, "learning_rate": 0.00040019540209562215, "loss": 1.8275, "step": 3403 }, { "epoch": 0.332421875, "grad_norm": 0.215722918510437, "learning_rate": 0.0004001363085657606, "loss": 1.9083, "step": 3404 }, { "epoch": 0.33251953125, "grad_norm": 0.24516254663467407, "learning_rate": 0.00040007720253589055, "loss": 1.8533, "step": 3405 }, { "epoch": 0.3326171875, "grad_norm": 0.2360994964838028, "learning_rate": 0.000400018084011916, "loss": 1.8852, "step": 3406 }, { "epoch": 0.33271484375, "grad_norm": 0.20243117213249207, "learning_rate": 0.0003999589529997424, "loss": 1.8589, "step": 3407 }, { "epoch": 0.3328125, "grad_norm": 0.27010393142700195, "learning_rate": 0.00039989980950527645, "loss": 1.8809, "step": 3408 }, { "epoch": 0.33291015625, "grad_norm": 0.309101402759552, "learning_rate": 0.000399840653534426, "loss": 1.8543, "step": 3409 }, { "epoch": 0.3330078125, "grad_norm": 0.21303214132785797, "learning_rate": 0.00039978148509310033, "loss": 1.8663, "step": 3410 }, { "epoch": 0.33310546875, "grad_norm": 0.21366028487682343, "learning_rate": 0.0003997223041872097, "loss": 1.88, "step": 3411 }, { "epoch": 0.333203125, "grad_norm": 0.24959337711334229, "learning_rate": 0.0003996631108226658, "loss": 1.8789, "step": 3412 }, { "epoch": 0.33330078125, "grad_norm": 0.2526727616786957, "learning_rate": 0.00039960390500538156, "loss": 1.8924, "step": 3413 }, { "epoch": 0.3333984375, "grad_norm": 0.1968771517276764, "learning_rate": 0.00039954468674127116, "loss": 1.8682, "step": 3414 }, { "epoch": 0.33349609375, "grad_norm": 0.22142840921878815, "learning_rate": 0.0003994854560362499, "loss": 1.8676, "step": 3415 }, { "epoch": 0.33359375, "grad_norm": 0.26841989159584045, "learning_rate": 0.00039942621289623437, "loss": 1.8776, "step": 3416 }, { "epoch": 0.33369140625, "grad_norm": 0.19234198331832886, "learning_rate": 0.0003993669573271426, "loss": 1.8561, "step": 3417 }, { "epoch": 0.3337890625, "grad_norm": 0.22837333381175995, "learning_rate": 0.0003993076893348935, "loss": 1.901, "step": 3418 }, { "epoch": 0.33388671875, "grad_norm": 0.2666394114494324, "learning_rate": 0.0003992484089254075, "loss": 1.894, "step": 3419 }, { "epoch": 0.333984375, "grad_norm": 0.21690601110458374, "learning_rate": 0.0003991891161046062, "loss": 1.8313, "step": 3420 }, { "epoch": 0.33408203125, "grad_norm": 0.2514253556728363, "learning_rate": 0.0003991298108784125, "loss": 1.8744, "step": 3421 }, { "epoch": 0.3341796875, "grad_norm": 0.21778957545757294, "learning_rate": 0.0003990704932527502, "loss": 1.84, "step": 3422 }, { "epoch": 0.33427734375, "grad_norm": 0.25223058462142944, "learning_rate": 0.00039901116323354496, "loss": 1.8226, "step": 3423 }, { "epoch": 0.334375, "grad_norm": 0.21998263895511627, "learning_rate": 0.0003989518208267231, "loss": 1.87, "step": 3424 }, { "epoch": 0.33447265625, "grad_norm": 0.21188127994537354, "learning_rate": 0.00039889246603821244, "loss": 1.8484, "step": 3425 }, { "epoch": 0.3345703125, "grad_norm": 0.18874864280223846, "learning_rate": 0.000398833098873942, "loss": 1.87, "step": 3426 }, { "epoch": 0.33466796875, "grad_norm": 0.22441589832305908, "learning_rate": 0.00039877371933984214, "loss": 1.8692, "step": 3427 }, { "epoch": 0.334765625, "grad_norm": 0.2002934217453003, "learning_rate": 0.0003987143274418441, "loss": 1.8813, "step": 3428 }, { "epoch": 0.33486328125, "grad_norm": 0.1878109574317932, "learning_rate": 0.0003986549231858808, "loss": 1.8756, "step": 3429 }, { "epoch": 0.3349609375, "grad_norm": 0.19219312071800232, "learning_rate": 0.00039859550657788615, "loss": 1.8489, "step": 3430 }, { "epoch": 0.33505859375, "grad_norm": 0.2394527792930603, "learning_rate": 0.0003985360776237955, "loss": 1.8379, "step": 3431 }, { "epoch": 0.33515625, "grad_norm": 0.25712698698043823, "learning_rate": 0.00039847663632954503, "loss": 1.888, "step": 3432 }, { "epoch": 0.33525390625, "grad_norm": 0.20241060853004456, "learning_rate": 0.00039841718270107246, "loss": 1.8531, "step": 3433 }, { "epoch": 0.3353515625, "grad_norm": 0.19888858497142792, "learning_rate": 0.00039835771674431674, "loss": 1.8525, "step": 3434 }, { "epoch": 0.33544921875, "grad_norm": 0.19226659834384918, "learning_rate": 0.00039829823846521795, "loss": 1.8784, "step": 3435 }, { "epoch": 0.335546875, "grad_norm": 0.22404858469963074, "learning_rate": 0.00039823874786971747, "loss": 1.8587, "step": 3436 }, { "epoch": 0.33564453125, "grad_norm": 0.2008732706308365, "learning_rate": 0.0003981792449637579, "loss": 1.8372, "step": 3437 }, { "epoch": 0.3357421875, "grad_norm": 0.21434251964092255, "learning_rate": 0.00039811972975328303, "loss": 1.886, "step": 3438 }, { "epoch": 0.33583984375, "grad_norm": 0.20970679819583893, "learning_rate": 0.0003980602022442379, "loss": 1.9008, "step": 3439 }, { "epoch": 0.3359375, "grad_norm": 0.25312310457229614, "learning_rate": 0.00039800066244256876, "loss": 1.8862, "step": 3440 }, { "epoch": 0.33603515625, "grad_norm": 0.21453332901000977, "learning_rate": 0.00039794111035422317, "loss": 1.8772, "step": 3441 }, { "epoch": 0.3361328125, "grad_norm": 0.22672387957572937, "learning_rate": 0.00039788154598514977, "loss": 1.8699, "step": 3442 }, { "epoch": 0.33623046875, "grad_norm": 0.21547792851924896, "learning_rate": 0.0003978219693412985, "loss": 1.9035, "step": 3443 }, { "epoch": 0.336328125, "grad_norm": 0.23252512514591217, "learning_rate": 0.0003977623804286207, "loss": 1.8595, "step": 3444 }, { "epoch": 0.33642578125, "grad_norm": 0.29749032855033875, "learning_rate": 0.0003977027792530687, "loss": 1.8509, "step": 3445 }, { "epoch": 0.3365234375, "grad_norm": 0.29424184560775757, "learning_rate": 0.00039764316582059596, "loss": 1.8583, "step": 3446 }, { "epoch": 0.33662109375, "grad_norm": 0.23205117881298065, "learning_rate": 0.00039758354013715757, "loss": 1.8871, "step": 3447 }, { "epoch": 0.33671875, "grad_norm": 0.23557694256305695, "learning_rate": 0.0003975239022087095, "loss": 1.851, "step": 3448 }, { "epoch": 0.33681640625, "grad_norm": 0.2064058780670166, "learning_rate": 0.00039746425204120907, "loss": 1.866, "step": 3449 }, { "epoch": 0.3369140625, "grad_norm": 0.19623836874961853, "learning_rate": 0.0003974045896406148, "loss": 1.8494, "step": 3450 }, { "epoch": 0.33701171875, "grad_norm": 0.20967812836170197, "learning_rate": 0.00039734491501288637, "loss": 1.8672, "step": 3451 }, { "epoch": 0.337109375, "grad_norm": 0.2055741846561432, "learning_rate": 0.00039728522816398487, "loss": 1.8739, "step": 3452 }, { "epoch": 0.33720703125, "grad_norm": 0.1989482045173645, "learning_rate": 0.00039722552909987244, "loss": 1.8595, "step": 3453 }, { "epoch": 0.3373046875, "grad_norm": 0.22619061172008514, "learning_rate": 0.00039716581782651245, "loss": 1.8641, "step": 3454 }, { "epoch": 0.33740234375, "grad_norm": 0.20724697411060333, "learning_rate": 0.00039710609434986954, "loss": 1.866, "step": 3455 }, { "epoch": 0.3375, "grad_norm": 0.20163816213607788, "learning_rate": 0.0003970463586759095, "loss": 1.8354, "step": 3456 }, { "epoch": 0.33759765625, "grad_norm": 0.22491896152496338, "learning_rate": 0.00039698661081059963, "loss": 1.8648, "step": 3457 }, { "epoch": 0.3376953125, "grad_norm": 0.19193711876869202, "learning_rate": 0.0003969268507599079, "loss": 1.8797, "step": 3458 }, { "epoch": 0.33779296875, "grad_norm": 0.2137691229581833, "learning_rate": 0.00039686707852980405, "loss": 1.862, "step": 3459 }, { "epoch": 0.337890625, "grad_norm": 0.2840065062046051, "learning_rate": 0.00039680729412625867, "loss": 1.8528, "step": 3460 }, { "epoch": 0.33798828125, "grad_norm": 0.22890503704547882, "learning_rate": 0.0003967474975552436, "loss": 1.8681, "step": 3461 }, { "epoch": 0.3380859375, "grad_norm": 0.20640310645103455, "learning_rate": 0.0003966876888227323, "loss": 1.8319, "step": 3462 }, { "epoch": 0.33818359375, "grad_norm": 0.20327606797218323, "learning_rate": 0.00039662786793469887, "loss": 1.8788, "step": 3463 }, { "epoch": 0.33828125, "grad_norm": 0.26422062516212463, "learning_rate": 0.0003965680348971189, "loss": 1.8525, "step": 3464 }, { "epoch": 0.33837890625, "grad_norm": 0.23500823974609375, "learning_rate": 0.0003965081897159693, "loss": 1.8777, "step": 3465 }, { "epoch": 0.3384765625, "grad_norm": 0.19631804525852203, "learning_rate": 0.000396448332397228, "loss": 1.8702, "step": 3466 }, { "epoch": 0.33857421875, "grad_norm": 0.21247118711471558, "learning_rate": 0.0003963884629468742, "loss": 1.8706, "step": 3467 }, { "epoch": 0.338671875, "grad_norm": 0.23746398091316223, "learning_rate": 0.0003963285813708884, "loss": 1.8526, "step": 3468 }, { "epoch": 0.33876953125, "grad_norm": 0.2166304886341095, "learning_rate": 0.00039626868767525227, "loss": 1.8407, "step": 3469 }, { "epoch": 0.3388671875, "grad_norm": 0.18043971061706543, "learning_rate": 0.00039620878186594845, "loss": 1.8535, "step": 3470 }, { "epoch": 0.33896484375, "grad_norm": 0.20053336024284363, "learning_rate": 0.0003961488639489612, "loss": 1.7842, "step": 3471 }, { "epoch": 0.3390625, "grad_norm": 0.22193653881549835, "learning_rate": 0.0003960889339302756, "loss": 1.8312, "step": 3472 }, { "epoch": 0.33916015625, "grad_norm": 0.20127463340759277, "learning_rate": 0.00039602899181587843, "loss": 1.9094, "step": 3473 }, { "epoch": 0.3392578125, "grad_norm": 0.23692427575588226, "learning_rate": 0.0003959690376117571, "loss": 1.9139, "step": 3474 }, { "epoch": 0.33935546875, "grad_norm": 0.23256926238536835, "learning_rate": 0.0003959090713239006, "loss": 1.8432, "step": 3475 }, { "epoch": 0.339453125, "grad_norm": 0.1989685297012329, "learning_rate": 0.00039584909295829914, "loss": 1.8473, "step": 3476 }, { "epoch": 0.33955078125, "grad_norm": 0.2824874222278595, "learning_rate": 0.0003957891025209438, "loss": 1.9134, "step": 3477 }, { "epoch": 0.3396484375, "grad_norm": 0.293108731508255, "learning_rate": 0.0003957291000178273, "loss": 1.8542, "step": 3478 }, { "epoch": 0.33974609375, "grad_norm": 0.26237407326698303, "learning_rate": 0.00039566908545494317, "loss": 1.8847, "step": 3479 }, { "epoch": 0.33984375, "grad_norm": 0.2476871907711029, "learning_rate": 0.0003956090588382865, "loss": 1.8712, "step": 3480 }, { "epoch": 0.33994140625, "grad_norm": 0.29409369826316833, "learning_rate": 0.00039554902017385334, "loss": 1.8622, "step": 3481 }, { "epoch": 0.3400390625, "grad_norm": 0.24744457006454468, "learning_rate": 0.0003954889694676411, "loss": 1.8819, "step": 3482 }, { "epoch": 0.34013671875, "grad_norm": 0.2826077342033386, "learning_rate": 0.00039542890672564815, "loss": 1.8725, "step": 3483 }, { "epoch": 0.340234375, "grad_norm": 0.2639237940311432, "learning_rate": 0.00039536883195387434, "loss": 1.8544, "step": 3484 }, { "epoch": 0.34033203125, "grad_norm": 0.20325662195682526, "learning_rate": 0.0003953087451583206, "loss": 1.8691, "step": 3485 }, { "epoch": 0.3404296875, "grad_norm": 0.23570391535758972, "learning_rate": 0.00039524864634498904, "loss": 1.8841, "step": 3486 }, { "epoch": 0.34052734375, "grad_norm": 0.2665724754333496, "learning_rate": 0.00039518853551988303, "loss": 1.8977, "step": 3487 }, { "epoch": 0.340625, "grad_norm": 0.22694428265094757, "learning_rate": 0.0003951284126890071, "loss": 1.89, "step": 3488 }, { "epoch": 0.34072265625, "grad_norm": 0.20151232182979584, "learning_rate": 0.000395068277858367, "loss": 1.8981, "step": 3489 }, { "epoch": 0.3408203125, "grad_norm": 0.25018495321273804, "learning_rate": 0.0003950081310339697, "loss": 1.8307, "step": 3490 }, { "epoch": 0.34091796875, "grad_norm": 0.22320601344108582, "learning_rate": 0.00039494797222182316, "loss": 1.8672, "step": 3491 }, { "epoch": 0.341015625, "grad_norm": 0.23789243400096893, "learning_rate": 0.00039488780142793694, "loss": 1.8801, "step": 3492 }, { "epoch": 0.34111328125, "grad_norm": 0.21100810170173645, "learning_rate": 0.0003948276186583214, "loss": 1.8492, "step": 3493 }, { "epoch": 0.3412109375, "grad_norm": 0.20523199439048767, "learning_rate": 0.00039476742391898837, "loss": 1.901, "step": 3494 }, { "epoch": 0.34130859375, "grad_norm": 0.20760668814182281, "learning_rate": 0.0003947072172159507, "loss": 1.8568, "step": 3495 }, { "epoch": 0.34140625, "grad_norm": 0.25376006960868835, "learning_rate": 0.0003946469985552226, "loss": 1.8542, "step": 3496 }, { "epoch": 0.34150390625, "grad_norm": 0.20873723924160004, "learning_rate": 0.0003945867679428193, "loss": 1.8735, "step": 3497 }, { "epoch": 0.3416015625, "grad_norm": 0.20614324510097504, "learning_rate": 0.0003945265253847573, "loss": 1.8462, "step": 3498 }, { "epoch": 0.34169921875, "grad_norm": 0.22653350234031677, "learning_rate": 0.0003944662708870544, "loss": 1.8914, "step": 3499 }, { "epoch": 0.341796875, "grad_norm": 0.18062424659729004, "learning_rate": 0.0003944060044557293, "loss": 1.8581, "step": 3500 }, { "epoch": 0.34189453125, "grad_norm": 0.22148776054382324, "learning_rate": 0.00039434572609680225, "loss": 1.8843, "step": 3501 }, { "epoch": 0.3419921875, "grad_norm": 0.19227011501789093, "learning_rate": 0.0003942854358162945, "loss": 1.8677, "step": 3502 }, { "epoch": 0.34208984375, "grad_norm": 0.20412443578243256, "learning_rate": 0.00039422513362022844, "loss": 1.835, "step": 3503 }, { "epoch": 0.3421875, "grad_norm": 0.19800935685634613, "learning_rate": 0.0003941648195146278, "loss": 1.9221, "step": 3504 }, { "epoch": 0.34228515625, "grad_norm": 0.2127007693052292, "learning_rate": 0.00039410449350551737, "loss": 1.8946, "step": 3505 }, { "epoch": 0.3423828125, "grad_norm": 0.22856412827968597, "learning_rate": 0.00039404415559892326, "loss": 1.8817, "step": 3506 }, { "epoch": 0.34248046875, "grad_norm": 0.18088886141777039, "learning_rate": 0.0003939838058008726, "loss": 1.8779, "step": 3507 }, { "epoch": 0.342578125, "grad_norm": 0.19799447059631348, "learning_rate": 0.0003939234441173938, "loss": 1.8329, "step": 3508 }, { "epoch": 0.34267578125, "grad_norm": 0.205015167593956, "learning_rate": 0.00039386307055451654, "loss": 1.9022, "step": 3509 }, { "epoch": 0.3427734375, "grad_norm": 0.19568204879760742, "learning_rate": 0.0003938026851182716, "loss": 1.8265, "step": 3510 }, { "epoch": 0.34287109375, "grad_norm": 0.1977141946554184, "learning_rate": 0.0003937422878146909, "loss": 1.8974, "step": 3511 }, { "epoch": 0.34296875, "grad_norm": 0.24509556591510773, "learning_rate": 0.0003936818786498076, "loss": 1.8637, "step": 3512 }, { "epoch": 0.34306640625, "grad_norm": 0.18602833151817322, "learning_rate": 0.000393621457629656, "loss": 1.8834, "step": 3513 }, { "epoch": 0.3431640625, "grad_norm": 0.25452902913093567, "learning_rate": 0.00039356102476027175, "loss": 1.7987, "step": 3514 }, { "epoch": 0.34326171875, "grad_norm": 0.2689136266708374, "learning_rate": 0.0003935005800476914, "loss": 1.8608, "step": 3515 }, { "epoch": 0.343359375, "grad_norm": 0.17565156519412994, "learning_rate": 0.000393440123497953, "loss": 1.8781, "step": 3516 }, { "epoch": 0.34345703125, "grad_norm": 0.2498348355293274, "learning_rate": 0.0003933796551170955, "loss": 1.8346, "step": 3517 }, { "epoch": 0.3435546875, "grad_norm": 0.2310560792684555, "learning_rate": 0.00039331917491115933, "loss": 1.8462, "step": 3518 }, { "epoch": 0.34365234375, "grad_norm": 0.24668028950691223, "learning_rate": 0.00039325868288618565, "loss": 1.9157, "step": 3519 }, { "epoch": 0.34375, "grad_norm": 0.20355354249477386, "learning_rate": 0.0003931981790482172, "loss": 1.9128, "step": 3520 }, { "epoch": 0.34384765625, "grad_norm": 0.25293999910354614, "learning_rate": 0.00039313766340329795, "loss": 1.8329, "step": 3521 }, { "epoch": 0.3439453125, "grad_norm": 0.22218835353851318, "learning_rate": 0.00039307713595747263, "loss": 1.8865, "step": 3522 }, { "epoch": 0.34404296875, "grad_norm": 0.22433200478553772, "learning_rate": 0.0003930165967167876, "loss": 1.8778, "step": 3523 }, { "epoch": 0.344140625, "grad_norm": 0.22571197152137756, "learning_rate": 0.0003929560456872899, "loss": 1.8835, "step": 3524 }, { "epoch": 0.34423828125, "grad_norm": 0.2275848686695099, "learning_rate": 0.00039289548287502847, "loss": 1.8552, "step": 3525 }, { "epoch": 0.3443359375, "grad_norm": 0.19883055984973907, "learning_rate": 0.00039283490828605254, "loss": 1.8562, "step": 3526 }, { "epoch": 0.34443359375, "grad_norm": 0.2009442299604416, "learning_rate": 0.00039277432192641335, "loss": 1.8354, "step": 3527 }, { "epoch": 0.34453125, "grad_norm": 0.20756255090236664, "learning_rate": 0.0003927137238021627, "loss": 1.8615, "step": 3528 }, { "epoch": 0.34462890625, "grad_norm": 0.21216446161270142, "learning_rate": 0.00039265311391935395, "loss": 1.9002, "step": 3529 }, { "epoch": 0.3447265625, "grad_norm": 0.21828250586986542, "learning_rate": 0.00039259249228404137, "loss": 1.9665, "step": 3530 }, { "epoch": 0.34482421875, "grad_norm": 0.22328948974609375, "learning_rate": 0.0003925318589022806, "loss": 1.8696, "step": 3531 }, { "epoch": 0.344921875, "grad_norm": 0.27527397871017456, "learning_rate": 0.00039247121378012845, "loss": 1.8477, "step": 3532 }, { "epoch": 0.34501953125, "grad_norm": 0.3070453703403473, "learning_rate": 0.00039241055692364256, "loss": 1.8264, "step": 3533 }, { "epoch": 0.3451171875, "grad_norm": 0.22435393929481506, "learning_rate": 0.0003923498883388824, "loss": 1.8824, "step": 3534 }, { "epoch": 0.34521484375, "grad_norm": 0.25616350769996643, "learning_rate": 0.00039228920803190793, "loss": 1.8645, "step": 3535 }, { "epoch": 0.3453125, "grad_norm": 0.3189918100833893, "learning_rate": 0.00039222851600878073, "loss": 1.843, "step": 3536 }, { "epoch": 0.34541015625, "grad_norm": 0.22394324839115143, "learning_rate": 0.0003921678122755632, "loss": 1.8536, "step": 3537 }, { "epoch": 0.3455078125, "grad_norm": 0.22344903647899628, "learning_rate": 0.00039210709683831936, "loss": 1.8434, "step": 3538 }, { "epoch": 0.34560546875, "grad_norm": 0.18886572122573853, "learning_rate": 0.000392046369703114, "loss": 1.8473, "step": 3539 }, { "epoch": 0.345703125, "grad_norm": 0.24359601736068726, "learning_rate": 0.00039198563087601313, "loss": 1.8687, "step": 3540 }, { "epoch": 0.34580078125, "grad_norm": 0.1771291345357895, "learning_rate": 0.0003919248803630843, "loss": 1.9114, "step": 3541 }, { "epoch": 0.3458984375, "grad_norm": 0.25192058086395264, "learning_rate": 0.0003918641181703957, "loss": 1.8616, "step": 3542 }, { "epoch": 0.34599609375, "grad_norm": 0.21408501267433167, "learning_rate": 0.000391803344304017, "loss": 1.8387, "step": 3543 }, { "epoch": 0.34609375, "grad_norm": 0.2607284486293793, "learning_rate": 0.000391742558770019, "loss": 1.8529, "step": 3544 }, { "epoch": 0.34619140625, "grad_norm": 0.2747364044189453, "learning_rate": 0.0003916817615744737, "loss": 1.8514, "step": 3545 }, { "epoch": 0.3462890625, "grad_norm": 0.2552090585231781, "learning_rate": 0.000391620952723454, "loss": 1.8858, "step": 3546 }, { "epoch": 0.34638671875, "grad_norm": 0.27006518840789795, "learning_rate": 0.00039156013222303433, "loss": 1.8749, "step": 3547 }, { "epoch": 0.346484375, "grad_norm": 0.2662247121334076, "learning_rate": 0.00039149930007929015, "loss": 1.8688, "step": 3548 }, { "epoch": 0.34658203125, "grad_norm": 0.2671772539615631, "learning_rate": 0.00039143845629829795, "loss": 1.852, "step": 3549 }, { "epoch": 0.3466796875, "grad_norm": 0.204731747508049, "learning_rate": 0.0003913776008861355, "loss": 1.8848, "step": 3550 }, { "epoch": 0.34677734375, "grad_norm": 0.27976933121681213, "learning_rate": 0.00039131673384888173, "loss": 1.8698, "step": 3551 }, { "epoch": 0.346875, "grad_norm": 0.19963674247264862, "learning_rate": 0.0003912558551926168, "loss": 1.9433, "step": 3552 }, { "epoch": 0.34697265625, "grad_norm": 0.2379409819841385, "learning_rate": 0.00039119496492342175, "loss": 1.8777, "step": 3553 }, { "epoch": 0.3470703125, "grad_norm": 0.21821127831935883, "learning_rate": 0.0003911340630473791, "loss": 1.8634, "step": 3554 }, { "epoch": 0.34716796875, "grad_norm": 0.26188230514526367, "learning_rate": 0.0003910731495705725, "loss": 1.8684, "step": 3555 }, { "epoch": 0.347265625, "grad_norm": 0.2163819670677185, "learning_rate": 0.0003910122244990866, "loss": 1.8762, "step": 3556 }, { "epoch": 0.34736328125, "grad_norm": 0.2630750834941864, "learning_rate": 0.0003909512878390072, "loss": 1.8506, "step": 3557 }, { "epoch": 0.3474609375, "grad_norm": 0.229984313249588, "learning_rate": 0.0003908903395964213, "loss": 1.8492, "step": 3558 }, { "epoch": 0.34755859375, "grad_norm": 0.22928504645824432, "learning_rate": 0.0003908293797774172, "loss": 1.8541, "step": 3559 }, { "epoch": 0.34765625, "grad_norm": 0.22383026778697968, "learning_rate": 0.0003907684083880843, "loss": 1.8417, "step": 3560 }, { "epoch": 0.34775390625, "grad_norm": 0.23472000658512115, "learning_rate": 0.0003907074254345129, "loss": 1.8546, "step": 3561 }, { "epoch": 0.3478515625, "grad_norm": 0.20112402737140656, "learning_rate": 0.00039064643092279487, "loss": 1.8371, "step": 3562 }, { "epoch": 0.34794921875, "grad_norm": 0.23486244678497314, "learning_rate": 0.0003905854248590229, "loss": 1.8562, "step": 3563 }, { "epoch": 0.348046875, "grad_norm": 0.17908360064029694, "learning_rate": 0.000390524407249291, "loss": 1.828, "step": 3564 }, { "epoch": 0.34814453125, "grad_norm": 0.2056552618741989, "learning_rate": 0.00039046337809969415, "loss": 1.8765, "step": 3565 }, { "epoch": 0.3482421875, "grad_norm": 0.19951866567134857, "learning_rate": 0.0003904023374163289, "loss": 1.8921, "step": 3566 }, { "epoch": 0.34833984375, "grad_norm": 0.22525285184383392, "learning_rate": 0.0003903412852052925, "loss": 1.8943, "step": 3567 }, { "epoch": 0.3484375, "grad_norm": 0.27338072657585144, "learning_rate": 0.00039028022147268346, "loss": 1.8677, "step": 3568 }, { "epoch": 0.34853515625, "grad_norm": 0.287302166223526, "learning_rate": 0.00039021914622460157, "loss": 1.8679, "step": 3569 }, { "epoch": 0.3486328125, "grad_norm": 0.1998988389968872, "learning_rate": 0.0003901580594671478, "loss": 1.8138, "step": 3570 }, { "epoch": 0.34873046875, "grad_norm": 0.24139219522476196, "learning_rate": 0.00039009696120642407, "loss": 1.8438, "step": 3571 }, { "epoch": 0.348828125, "grad_norm": 0.21555127203464508, "learning_rate": 0.00039003585144853353, "loss": 1.8697, "step": 3572 }, { "epoch": 0.34892578125, "grad_norm": 0.20976285636425018, "learning_rate": 0.0003899747301995806, "loss": 1.8486, "step": 3573 }, { "epoch": 0.3490234375, "grad_norm": 0.22705312073230743, "learning_rate": 0.0003899135974656708, "loss": 1.8197, "step": 3574 }, { "epoch": 0.34912109375, "grad_norm": 0.21448858082294464, "learning_rate": 0.0003898524532529105, "loss": 1.8833, "step": 3575 }, { "epoch": 0.34921875, "grad_norm": 0.1872919797897339, "learning_rate": 0.00038979129756740774, "loss": 1.8701, "step": 3576 }, { "epoch": 0.34931640625, "grad_norm": 0.25819647312164307, "learning_rate": 0.00038973013041527127, "loss": 1.8932, "step": 3577 }, { "epoch": 0.3494140625, "grad_norm": 0.19156448543071747, "learning_rate": 0.0003896689518026112, "loss": 1.856, "step": 3578 }, { "epoch": 0.34951171875, "grad_norm": 0.20140022039413452, "learning_rate": 0.00038960776173553867, "loss": 1.8327, "step": 3579 }, { "epoch": 0.349609375, "grad_norm": 0.18098512291908264, "learning_rate": 0.0003895465602201661, "loss": 1.8791, "step": 3580 }, { "epoch": 0.34970703125, "grad_norm": 0.20784790813922882, "learning_rate": 0.000389485347262607, "loss": 1.8375, "step": 3581 }, { "epoch": 0.3498046875, "grad_norm": 0.22096554934978485, "learning_rate": 0.0003894241228689759, "loss": 1.8553, "step": 3582 }, { "epoch": 0.34990234375, "grad_norm": 0.24984371662139893, "learning_rate": 0.0003893628870453886, "loss": 1.8573, "step": 3583 }, { "epoch": 0.35, "grad_norm": 0.19057220220565796, "learning_rate": 0.0003893016397979621, "loss": 1.8712, "step": 3584 }, { "epoch": 0.35009765625, "grad_norm": 0.22978176176548004, "learning_rate": 0.0003892403811328144, "loss": 1.8274, "step": 3585 }, { "epoch": 0.3501953125, "grad_norm": 0.21941888332366943, "learning_rate": 0.00038917911105606476, "loss": 1.8153, "step": 3586 }, { "epoch": 0.35029296875, "grad_norm": 0.2456945776939392, "learning_rate": 0.0003891178295738335, "loss": 1.8303, "step": 3587 }, { "epoch": 0.350390625, "grad_norm": 0.2776344120502472, "learning_rate": 0.00038905653669224185, "loss": 1.8185, "step": 3588 }, { "epoch": 0.35048828125, "grad_norm": 0.2225499004125595, "learning_rate": 0.00038899523241741273, "loss": 1.8456, "step": 3589 }, { "epoch": 0.3505859375, "grad_norm": 0.2002578228712082, "learning_rate": 0.00038893391675546993, "loss": 1.8501, "step": 3590 }, { "epoch": 0.35068359375, "grad_norm": 0.21190977096557617, "learning_rate": 0.00038887258971253804, "loss": 1.8311, "step": 3591 }, { "epoch": 0.35078125, "grad_norm": 0.20795829594135284, "learning_rate": 0.00038881125129474334, "loss": 1.8079, "step": 3592 }, { "epoch": 0.35087890625, "grad_norm": 0.20573373138904572, "learning_rate": 0.0003887499015082129, "loss": 1.9012, "step": 3593 }, { "epoch": 0.3509765625, "grad_norm": 0.24483314156532288, "learning_rate": 0.000388688540359075, "loss": 1.841, "step": 3594 }, { "epoch": 0.35107421875, "grad_norm": 0.2204584926366806, "learning_rate": 0.00038862716785345916, "loss": 1.834, "step": 3595 }, { "epoch": 0.351171875, "grad_norm": 0.18937397003173828, "learning_rate": 0.00038856578399749583, "loss": 1.8726, "step": 3596 }, { "epoch": 0.35126953125, "grad_norm": 0.23919473588466644, "learning_rate": 0.0003885043887973168, "loss": 1.8745, "step": 3597 }, { "epoch": 0.3513671875, "grad_norm": 0.19273193180561066, "learning_rate": 0.000388442982259055, "loss": 1.833, "step": 3598 }, { "epoch": 0.35146484375, "grad_norm": 0.2513469159603119, "learning_rate": 0.0003883815643888441, "loss": 1.8519, "step": 3599 }, { "epoch": 0.3515625, "grad_norm": 0.3091588318347931, "learning_rate": 0.0003883201351928195, "loss": 1.8621, "step": 3600 }, { "epoch": 0.35166015625, "grad_norm": 0.22832126915454865, "learning_rate": 0.0003882586946771173, "loss": 1.8215, "step": 3601 }, { "epoch": 0.3517578125, "grad_norm": 0.20138737559318542, "learning_rate": 0.00038819724284787496, "loss": 1.8488, "step": 3602 }, { "epoch": 0.35185546875, "grad_norm": 0.2468486875295639, "learning_rate": 0.00038813577971123076, "loss": 1.8553, "step": 3603 }, { "epoch": 0.351953125, "grad_norm": 0.2534483075141907, "learning_rate": 0.0003880743052733246, "loss": 1.8304, "step": 3604 }, { "epoch": 0.35205078125, "grad_norm": 0.17403554916381836, "learning_rate": 0.000388012819540297, "loss": 1.82, "step": 3605 }, { "epoch": 0.3521484375, "grad_norm": 0.23867322504520416, "learning_rate": 0.00038795132251828995, "loss": 1.8401, "step": 3606 }, { "epoch": 0.35224609375, "grad_norm": 0.3085857033729553, "learning_rate": 0.00038788981421344643, "loss": 1.8749, "step": 3607 }, { "epoch": 0.35234375, "grad_norm": 0.24236635863780975, "learning_rate": 0.00038782829463191056, "loss": 1.8541, "step": 3608 }, { "epoch": 0.35244140625, "grad_norm": 0.16068558394908905, "learning_rate": 0.00038776676377982763, "loss": 1.8867, "step": 3609 }, { "epoch": 0.3525390625, "grad_norm": 0.2600993514060974, "learning_rate": 0.00038770522166334405, "loss": 1.872, "step": 3610 }, { "epoch": 0.35263671875, "grad_norm": 0.21148955821990967, "learning_rate": 0.00038764366828860736, "loss": 1.8504, "step": 3611 }, { "epoch": 0.352734375, "grad_norm": 0.19798903167247772, "learning_rate": 0.00038758210366176605, "loss": 1.8641, "step": 3612 }, { "epoch": 0.35283203125, "grad_norm": 0.24468740820884705, "learning_rate": 0.00038752052778896995, "loss": 1.8666, "step": 3613 }, { "epoch": 0.3529296875, "grad_norm": 0.2498670220375061, "learning_rate": 0.00038745894067637005, "loss": 1.894, "step": 3614 }, { "epoch": 0.35302734375, "grad_norm": 0.192293182015419, "learning_rate": 0.0003873973423301183, "loss": 1.8973, "step": 3615 }, { "epoch": 0.353125, "grad_norm": 0.23599712550640106, "learning_rate": 0.00038733573275636767, "loss": 1.8605, "step": 3616 }, { "epoch": 0.35322265625, "grad_norm": 0.24024492502212524, "learning_rate": 0.0003872741119612726, "loss": 1.8377, "step": 3617 }, { "epoch": 0.3533203125, "grad_norm": 0.18636207282543182, "learning_rate": 0.00038721247995098846, "loss": 1.8966, "step": 3618 }, { "epoch": 0.35341796875, "grad_norm": 0.2846168577671051, "learning_rate": 0.0003871508367316717, "loss": 1.887, "step": 3619 }, { "epoch": 0.353515625, "grad_norm": 0.2541621923446655, "learning_rate": 0.0003870891823094798, "loss": 1.864, "step": 3620 }, { "epoch": 0.35361328125, "grad_norm": 0.2141297310590744, "learning_rate": 0.0003870275166905717, "loss": 1.8547, "step": 3621 }, { "epoch": 0.3537109375, "grad_norm": 0.22027063369750977, "learning_rate": 0.00038696583988110717, "loss": 1.881, "step": 3622 }, { "epoch": 0.35380859375, "grad_norm": 0.18557903170585632, "learning_rate": 0.0003869041518872471, "loss": 1.8469, "step": 3623 }, { "epoch": 0.35390625, "grad_norm": 0.202976256608963, "learning_rate": 0.00038684245271515365, "loss": 1.8438, "step": 3624 }, { "epoch": 0.35400390625, "grad_norm": 0.2126491665840149, "learning_rate": 0.00038678074237099, "loss": 1.8884, "step": 3625 }, { "epoch": 0.3541015625, "grad_norm": 0.2188192456960678, "learning_rate": 0.0003867190208609205, "loss": 1.8504, "step": 3626 }, { "epoch": 0.35419921875, "grad_norm": 0.2543421983718872, "learning_rate": 0.00038665728819111056, "loss": 1.8995, "step": 3627 }, { "epoch": 0.354296875, "grad_norm": 0.2173098921775818, "learning_rate": 0.0003865955443677267, "loss": 1.9028, "step": 3628 }, { "epoch": 0.35439453125, "grad_norm": 0.21151049435138702, "learning_rate": 0.0003865337893969367, "loss": 1.8721, "step": 3629 }, { "epoch": 0.3544921875, "grad_norm": 0.24614599347114563, "learning_rate": 0.0003864720232849091, "loss": 1.8269, "step": 3630 }, { "epoch": 0.35458984375, "grad_norm": 0.19173894822597504, "learning_rate": 0.000386410246037814, "loss": 1.9056, "step": 3631 }, { "epoch": 0.3546875, "grad_norm": 0.2210468053817749, "learning_rate": 0.00038634845766182235, "loss": 1.842, "step": 3632 }, { "epoch": 0.35478515625, "grad_norm": 0.23308482766151428, "learning_rate": 0.00038628665816310616, "loss": 1.8237, "step": 3633 }, { "epoch": 0.3548828125, "grad_norm": 0.23058968782424927, "learning_rate": 0.0003862248475478388, "loss": 1.8754, "step": 3634 }, { "epoch": 0.35498046875, "grad_norm": 0.2620023488998413, "learning_rate": 0.0003861630258221945, "loss": 1.873, "step": 3635 }, { "epoch": 0.355078125, "grad_norm": 0.24256622791290283, "learning_rate": 0.00038610119299234874, "loss": 1.8666, "step": 3636 }, { "epoch": 0.35517578125, "grad_norm": 0.23211470246315002, "learning_rate": 0.0003860393490644781, "loss": 1.8632, "step": 3637 }, { "epoch": 0.3552734375, "grad_norm": 0.22089043259620667, "learning_rate": 0.00038597749404476015, "loss": 1.8948, "step": 3638 }, { "epoch": 0.35537109375, "grad_norm": 0.20722050964832306, "learning_rate": 0.00038591562793937375, "loss": 1.8426, "step": 3639 }, { "epoch": 0.35546875, "grad_norm": 0.2407168745994568, "learning_rate": 0.00038585375075449874, "loss": 1.8757, "step": 3640 }, { "epoch": 0.35556640625, "grad_norm": 0.21094001829624176, "learning_rate": 0.0003857918624963161, "loss": 1.8471, "step": 3641 }, { "epoch": 0.3556640625, "grad_norm": 0.21768639981746674, "learning_rate": 0.00038572996317100787, "loss": 1.8501, "step": 3642 }, { "epoch": 0.35576171875, "grad_norm": 0.198587104678154, "learning_rate": 0.0003856680527847574, "loss": 1.8872, "step": 3643 }, { "epoch": 0.355859375, "grad_norm": 0.2296200841665268, "learning_rate": 0.0003856061313437489, "loss": 1.8713, "step": 3644 }, { "epoch": 0.35595703125, "grad_norm": 0.19478590786457062, "learning_rate": 0.00038554419885416765, "loss": 1.8553, "step": 3645 }, { "epoch": 0.3560546875, "grad_norm": 0.2553071081638336, "learning_rate": 0.0003854822553222004, "loss": 1.8828, "step": 3646 }, { "epoch": 0.35615234375, "grad_norm": 0.20786093175411224, "learning_rate": 0.0003854203007540345, "loss": 1.7858, "step": 3647 }, { "epoch": 0.35625, "grad_norm": 0.2537551820278168, "learning_rate": 0.00038535833515585883, "loss": 1.8407, "step": 3648 }, { "epoch": 0.35634765625, "grad_norm": 0.23400303721427917, "learning_rate": 0.0003852963585338631, "loss": 1.8623, "step": 3649 }, { "epoch": 0.3564453125, "grad_norm": 0.21759647130966187, "learning_rate": 0.0003852343708942385, "loss": 1.8419, "step": 3650 }, { "epoch": 0.35654296875, "grad_norm": 0.21964114904403687, "learning_rate": 0.00038517237224317664, "loss": 1.8366, "step": 3651 }, { "epoch": 0.356640625, "grad_norm": 0.21748779714107513, "learning_rate": 0.0003851103625868709, "loss": 1.8917, "step": 3652 }, { "epoch": 0.35673828125, "grad_norm": 0.216062992811203, "learning_rate": 0.00038504834193151547, "loss": 1.87, "step": 3653 }, { "epoch": 0.3568359375, "grad_norm": 0.223576620221138, "learning_rate": 0.0003849863102833055, "loss": 1.8502, "step": 3654 }, { "epoch": 0.35693359375, "grad_norm": 0.19889536499977112, "learning_rate": 0.0003849242676484376, "loss": 1.8545, "step": 3655 }, { "epoch": 0.35703125, "grad_norm": 0.2517528533935547, "learning_rate": 0.0003848622140331092, "loss": 1.8575, "step": 3656 }, { "epoch": 0.35712890625, "grad_norm": 0.22014349699020386, "learning_rate": 0.000384800149443519, "loss": 1.9084, "step": 3657 }, { "epoch": 0.3572265625, "grad_norm": 0.2128230482339859, "learning_rate": 0.0003847380738858665, "loss": 1.8927, "step": 3658 }, { "epoch": 0.35732421875, "grad_norm": 0.2507193088531494, "learning_rate": 0.0003846759873663526, "loss": 1.8391, "step": 3659 }, { "epoch": 0.357421875, "grad_norm": 0.23501361906528473, "learning_rate": 0.00038461388989117926, "loss": 1.8985, "step": 3660 }, { "epoch": 0.35751953125, "grad_norm": 0.3247581422328949, "learning_rate": 0.00038455178146654936, "loss": 1.8718, "step": 3661 }, { "epoch": 0.3576171875, "grad_norm": 0.22511839866638184, "learning_rate": 0.00038448966209866703, "loss": 1.8715, "step": 3662 }, { "epoch": 0.35771484375, "grad_norm": 0.2150260955095291, "learning_rate": 0.00038442753179373753, "loss": 1.8549, "step": 3663 }, { "epoch": 0.3578125, "grad_norm": 0.2238318771123886, "learning_rate": 0.00038436539055796705, "loss": 1.873, "step": 3664 }, { "epoch": 0.35791015625, "grad_norm": 0.2875686287879944, "learning_rate": 0.0003843032383975628, "loss": 1.8757, "step": 3665 }, { "epoch": 0.3580078125, "grad_norm": 0.25750482082366943, "learning_rate": 0.00038424107531873346, "loss": 1.8343, "step": 3666 }, { "epoch": 0.35810546875, "grad_norm": 0.21657031774520874, "learning_rate": 0.00038417890132768853, "loss": 1.9179, "step": 3667 }, { "epoch": 0.358203125, "grad_norm": 0.2154138833284378, "learning_rate": 0.00038411671643063855, "loss": 1.8464, "step": 3668 }, { "epoch": 0.35830078125, "grad_norm": 0.19663992524147034, "learning_rate": 0.00038405452063379524, "loss": 1.8742, "step": 3669 }, { "epoch": 0.3583984375, "grad_norm": 0.21699386835098267, "learning_rate": 0.0003839923139433715, "loss": 1.8815, "step": 3670 }, { "epoch": 0.35849609375, "grad_norm": 0.23034091293811798, "learning_rate": 0.00038393009636558125, "loss": 1.8276, "step": 3671 }, { "epoch": 0.35859375, "grad_norm": 0.203604593873024, "learning_rate": 0.00038386786790663933, "loss": 1.9034, "step": 3672 }, { "epoch": 0.35869140625, "grad_norm": 0.23865197598934174, "learning_rate": 0.00038380562857276185, "loss": 1.9006, "step": 3673 }, { "epoch": 0.3587890625, "grad_norm": 0.18150199949741364, "learning_rate": 0.0003837433783701661, "loss": 1.8433, "step": 3674 }, { "epoch": 0.35888671875, "grad_norm": 0.2704537510871887, "learning_rate": 0.00038368111730507015, "loss": 1.8847, "step": 3675 }, { "epoch": 0.358984375, "grad_norm": 0.19624443352222443, "learning_rate": 0.00038361884538369345, "loss": 1.849, "step": 3676 }, { "epoch": 0.35908203125, "grad_norm": 0.26155540347099304, "learning_rate": 0.00038355656261225633, "loss": 1.8205, "step": 3677 }, { "epoch": 0.3591796875, "grad_norm": 0.2297595739364624, "learning_rate": 0.0003834942689969804, "loss": 1.8437, "step": 3678 }, { "epoch": 0.35927734375, "grad_norm": 0.2379789799451828, "learning_rate": 0.00038343196454408815, "loss": 1.8765, "step": 3679 }, { "epoch": 0.359375, "grad_norm": 0.24337363243103027, "learning_rate": 0.0003833696492598032, "loss": 1.8568, "step": 3680 }, { "epoch": 0.35947265625, "grad_norm": 0.23507575690746307, "learning_rate": 0.0003833073231503505, "loss": 1.8405, "step": 3681 }, { "epoch": 0.3595703125, "grad_norm": 0.20884008705615997, "learning_rate": 0.0003832449862219556, "loss": 1.8292, "step": 3682 }, { "epoch": 0.35966796875, "grad_norm": 0.24468238651752472, "learning_rate": 0.0003831826384808456, "loss": 1.8841, "step": 3683 }, { "epoch": 0.359765625, "grad_norm": 0.19200499355793, "learning_rate": 0.0003831202799332484, "loss": 1.831, "step": 3684 }, { "epoch": 0.35986328125, "grad_norm": 0.2582682967185974, "learning_rate": 0.00038305791058539313, "loss": 1.9164, "step": 3685 }, { "epoch": 0.3599609375, "grad_norm": 0.2183380275964737, "learning_rate": 0.00038299553044350977, "loss": 1.8589, "step": 3686 }, { "epoch": 0.36005859375, "grad_norm": 0.2560194134712219, "learning_rate": 0.0003829331395138298, "loss": 1.8866, "step": 3687 }, { "epoch": 0.36015625, "grad_norm": 0.23860234022140503, "learning_rate": 0.0003828707378025854, "loss": 1.851, "step": 3688 }, { "epoch": 0.36025390625, "grad_norm": 0.2541812062263489, "learning_rate": 0.0003828083253160099, "loss": 1.827, "step": 3689 }, { "epoch": 0.3603515625, "grad_norm": 0.23112213611602783, "learning_rate": 0.0003827459020603378, "loss": 1.8759, "step": 3690 }, { "epoch": 0.36044921875, "grad_norm": 0.2021452635526657, "learning_rate": 0.0003826834680418047, "loss": 1.8809, "step": 3691 }, { "epoch": 0.360546875, "grad_norm": 0.21591801941394806, "learning_rate": 0.00038262102326664705, "loss": 1.8231, "step": 3692 }, { "epoch": 0.36064453125, "grad_norm": 0.23959480226039886, "learning_rate": 0.0003825585677411025, "loss": 1.8422, "step": 3693 }, { "epoch": 0.3607421875, "grad_norm": 0.22720707952976227, "learning_rate": 0.0003824961014714101, "loss": 1.8724, "step": 3694 }, { "epoch": 0.36083984375, "grad_norm": 0.19454148411750793, "learning_rate": 0.0003824336244638095, "loss": 1.842, "step": 3695 }, { "epoch": 0.3609375, "grad_norm": 0.31191563606262207, "learning_rate": 0.00038237113672454147, "loss": 1.8517, "step": 3696 }, { "epoch": 0.36103515625, "grad_norm": 0.22866396605968475, "learning_rate": 0.0003823086382598482, "loss": 1.8568, "step": 3697 }, { "epoch": 0.3611328125, "grad_norm": 0.265703946352005, "learning_rate": 0.00038224612907597255, "loss": 1.8486, "step": 3698 }, { "epoch": 0.36123046875, "grad_norm": 0.24628691375255585, "learning_rate": 0.00038218360917915885, "loss": 1.8597, "step": 3699 }, { "epoch": 0.361328125, "grad_norm": 0.23236118257045746, "learning_rate": 0.00038212107857565203, "loss": 1.8615, "step": 3700 }, { "epoch": 0.36142578125, "grad_norm": 0.308764785528183, "learning_rate": 0.00038205853727169855, "loss": 1.9187, "step": 3701 }, { "epoch": 0.3615234375, "grad_norm": 0.1871979832649231, "learning_rate": 0.0003819959852735456, "loss": 1.8889, "step": 3702 }, { "epoch": 0.36162109375, "grad_norm": 0.2598519027233124, "learning_rate": 0.0003819334225874416, "loss": 1.8565, "step": 3703 }, { "epoch": 0.36171875, "grad_norm": 0.2150181531906128, "learning_rate": 0.0003818708492196361, "loss": 1.8964, "step": 3704 }, { "epoch": 0.36181640625, "grad_norm": 0.24690137803554535, "learning_rate": 0.0003818082651763795, "loss": 1.8602, "step": 3705 }, { "epoch": 0.3619140625, "grad_norm": 0.24732674658298492, "learning_rate": 0.0003817456704639235, "loss": 1.8561, "step": 3706 }, { "epoch": 0.36201171875, "grad_norm": 0.18767625093460083, "learning_rate": 0.0003816830650885206, "loss": 1.8208, "step": 3707 }, { "epoch": 0.362109375, "grad_norm": 0.20773689448833466, "learning_rate": 0.0003816204490564247, "loss": 1.8739, "step": 3708 }, { "epoch": 0.36220703125, "grad_norm": 0.2196119874715805, "learning_rate": 0.00038155782237389054, "loss": 1.8929, "step": 3709 }, { "epoch": 0.3623046875, "grad_norm": 0.21558576822280884, "learning_rate": 0.00038149518504717385, "loss": 1.8732, "step": 3710 }, { "epoch": 0.36240234375, "grad_norm": 0.20954236388206482, "learning_rate": 0.00038143253708253173, "loss": 1.8729, "step": 3711 }, { "epoch": 0.3625, "grad_norm": 0.2093965858221054, "learning_rate": 0.00038136987848622197, "loss": 1.8957, "step": 3712 }, { "epoch": 0.36259765625, "grad_norm": 0.21315394341945648, "learning_rate": 0.00038130720926450384, "loss": 1.8769, "step": 3713 }, { "epoch": 0.3626953125, "grad_norm": 0.20709800720214844, "learning_rate": 0.00038124452942363713, "loss": 1.8633, "step": 3714 }, { "epoch": 0.36279296875, "grad_norm": 0.18485292792320251, "learning_rate": 0.0003811818389698833, "loss": 1.8707, "step": 3715 }, { "epoch": 0.362890625, "grad_norm": 0.21978256106376648, "learning_rate": 0.00038111913790950437, "loss": 1.8756, "step": 3716 }, { "epoch": 0.36298828125, "grad_norm": 0.22362416982650757, "learning_rate": 0.00038105642624876367, "loss": 1.8429, "step": 3717 }, { "epoch": 0.3630859375, "grad_norm": 0.20255053043365479, "learning_rate": 0.0003809937039939257, "loss": 1.8815, "step": 3718 }, { "epoch": 0.36318359375, "grad_norm": 0.25443053245544434, "learning_rate": 0.0003809309711512556, "loss": 1.8544, "step": 3719 }, { "epoch": 0.36328125, "grad_norm": 0.20008181035518646, "learning_rate": 0.00038086822772702, "loss": 1.8782, "step": 3720 }, { "epoch": 0.36337890625, "grad_norm": 0.25505152344703674, "learning_rate": 0.0003808054737274863, "loss": 1.8917, "step": 3721 }, { "epoch": 0.3634765625, "grad_norm": 0.23726782202720642, "learning_rate": 0.00038074270915892315, "loss": 1.8482, "step": 3722 }, { "epoch": 0.36357421875, "grad_norm": 0.21997596323490143, "learning_rate": 0.0003806799340276002, "loss": 1.878, "step": 3723 }, { "epoch": 0.363671875, "grad_norm": 0.27170437574386597, "learning_rate": 0.0003806171483397881, "loss": 1.825, "step": 3724 }, { "epoch": 0.36376953125, "grad_norm": 0.18107932806015015, "learning_rate": 0.00038055435210175846, "loss": 1.8275, "step": 3725 }, { "epoch": 0.3638671875, "grad_norm": 0.22321036458015442, "learning_rate": 0.00038049154531978423, "loss": 1.8339, "step": 3726 }, { "epoch": 0.36396484375, "grad_norm": 0.24020689725875854, "learning_rate": 0.0003804287280001392, "loss": 1.8945, "step": 3727 }, { "epoch": 0.3640625, "grad_norm": 0.21679888665676117, "learning_rate": 0.0003803659001490982, "loss": 1.871, "step": 3728 }, { "epoch": 0.36416015625, "grad_norm": 0.20395179092884064, "learning_rate": 0.00038030306177293726, "loss": 1.8462, "step": 3729 }, { "epoch": 0.3642578125, "grad_norm": 0.19003739953041077, "learning_rate": 0.0003802402128779334, "loss": 1.9009, "step": 3730 }, { "epoch": 0.36435546875, "grad_norm": 0.21998478472232819, "learning_rate": 0.00038017735347036457, "loss": 1.8584, "step": 3731 }, { "epoch": 0.364453125, "grad_norm": 0.19138023257255554, "learning_rate": 0.00038011448355650996, "loss": 1.8894, "step": 3732 }, { "epoch": 0.36455078125, "grad_norm": 0.20978252589702606, "learning_rate": 0.00038005160314264966, "loss": 1.808, "step": 3733 }, { "epoch": 0.3646484375, "grad_norm": 0.2341012954711914, "learning_rate": 0.00037998871223506487, "loss": 1.8356, "step": 3734 }, { "epoch": 0.36474609375, "grad_norm": 0.21801787614822388, "learning_rate": 0.0003799258108400378, "loss": 1.819, "step": 3735 }, { "epoch": 0.36484375, "grad_norm": 0.21013173460960388, "learning_rate": 0.00037986289896385183, "loss": 1.8275, "step": 3736 }, { "epoch": 0.36494140625, "grad_norm": 0.24805255234241486, "learning_rate": 0.00037979997661279123, "loss": 1.9283, "step": 3737 }, { "epoch": 0.3650390625, "grad_norm": 0.21214769780635834, "learning_rate": 0.0003797370437931414, "loss": 1.8444, "step": 3738 }, { "epoch": 0.36513671875, "grad_norm": 0.2353964000940323, "learning_rate": 0.0003796741005111889, "loss": 1.8862, "step": 3739 }, { "epoch": 0.365234375, "grad_norm": 0.17549175024032593, "learning_rate": 0.00037961114677322093, "loss": 1.8671, "step": 3740 }, { "epoch": 0.36533203125, "grad_norm": 0.24111035466194153, "learning_rate": 0.0003795481825855263, "loss": 1.8462, "step": 3741 }, { "epoch": 0.3654296875, "grad_norm": 0.26238951086997986, "learning_rate": 0.00037948520795439436, "loss": 1.892, "step": 3742 }, { "epoch": 0.36552734375, "grad_norm": 0.21740008890628815, "learning_rate": 0.00037942222288611584, "loss": 1.8798, "step": 3743 }, { "epoch": 0.365625, "grad_norm": 0.22931286692619324, "learning_rate": 0.0003793592273869823, "loss": 1.8866, "step": 3744 }, { "epoch": 0.36572265625, "grad_norm": 0.19847440719604492, "learning_rate": 0.0003792962214632865, "loss": 1.8912, "step": 3745 }, { "epoch": 0.3658203125, "grad_norm": 0.186073899269104, "learning_rate": 0.00037923320512132225, "loss": 1.741, "step": 3746 }, { "epoch": 0.36591796875, "grad_norm": 0.21398687362670898, "learning_rate": 0.0003791701783673841, "loss": 1.8779, "step": 3747 }, { "epoch": 0.366015625, "grad_norm": 0.16714359819889069, "learning_rate": 0.00037910714120776814, "loss": 1.8772, "step": 3748 }, { "epoch": 0.36611328125, "grad_norm": 0.23024225234985352, "learning_rate": 0.0003790440936487709, "loss": 1.8416, "step": 3749 }, { "epoch": 0.3662109375, "grad_norm": 0.22035610675811768, "learning_rate": 0.0003789810356966906, "loss": 1.8962, "step": 3750 }, { "epoch": 0.36630859375, "grad_norm": 0.19260485470294952, "learning_rate": 0.0003789179673578259, "loss": 1.8683, "step": 3751 }, { "epoch": 0.36640625, "grad_norm": 0.2358340322971344, "learning_rate": 0.00037885488863847696, "loss": 1.871, "step": 3752 }, { "epoch": 0.36650390625, "grad_norm": 0.2558320462703705, "learning_rate": 0.00037879179954494475, "loss": 1.8232, "step": 3753 }, { "epoch": 0.3666015625, "grad_norm": 0.20952382683753967, "learning_rate": 0.00037872870008353124, "loss": 1.8833, "step": 3754 }, { "epoch": 0.36669921875, "grad_norm": 0.20016750693321228, "learning_rate": 0.0003786655902605396, "loss": 1.8461, "step": 3755 }, { "epoch": 0.366796875, "grad_norm": 0.21693620085716248, "learning_rate": 0.0003786024700822738, "loss": 1.8463, "step": 3756 }, { "epoch": 0.36689453125, "grad_norm": 0.18927980959415436, "learning_rate": 0.0003785393395550391, "loss": 1.8573, "step": 3757 }, { "epoch": 0.3669921875, "grad_norm": 0.21964512765407562, "learning_rate": 0.0003784761986851416, "loss": 1.8861, "step": 3758 }, { "epoch": 0.36708984375, "grad_norm": 0.2026022970676422, "learning_rate": 0.0003784130474788887, "loss": 1.8446, "step": 3759 }, { "epoch": 0.3671875, "grad_norm": 0.19343462586402893, "learning_rate": 0.00037834988594258843, "loss": 1.8459, "step": 3760 }, { "epoch": 0.36728515625, "grad_norm": 0.22341008484363556, "learning_rate": 0.0003782867140825502, "loss": 1.9195, "step": 3761 }, { "epoch": 0.3673828125, "grad_norm": 0.21624907851219177, "learning_rate": 0.0003782235319050842, "loss": 1.8169, "step": 3762 }, { "epoch": 0.36748046875, "grad_norm": 0.2739900052547455, "learning_rate": 0.00037816033941650185, "loss": 1.8959, "step": 3763 }, { "epoch": 0.367578125, "grad_norm": 0.20762409269809723, "learning_rate": 0.0003780971366231156, "loss": 1.8501, "step": 3764 }, { "epoch": 0.36767578125, "grad_norm": 0.23242148756980896, "learning_rate": 0.0003780339235312387, "loss": 1.8915, "step": 3765 }, { "epoch": 0.3677734375, "grad_norm": 0.2940577268600464, "learning_rate": 0.00037797070014718555, "loss": 1.8851, "step": 3766 }, { "epoch": 0.36787109375, "grad_norm": 0.26492175459861755, "learning_rate": 0.0003779074664772718, "loss": 1.8035, "step": 3767 }, { "epoch": 0.36796875, "grad_norm": 0.23707738518714905, "learning_rate": 0.0003778442225278138, "loss": 1.8489, "step": 3768 }, { "epoch": 0.36806640625, "grad_norm": 0.1993209421634674, "learning_rate": 0.0003777809683051291, "loss": 1.8721, "step": 3769 }, { "epoch": 0.3681640625, "grad_norm": 0.24402756989002228, "learning_rate": 0.0003777177038155362, "loss": 1.8695, "step": 3770 }, { "epoch": 0.36826171875, "grad_norm": 0.21460117399692535, "learning_rate": 0.00037765442906535475, "loss": 1.8673, "step": 3771 }, { "epoch": 0.368359375, "grad_norm": 0.205267071723938, "learning_rate": 0.0003775911440609052, "loss": 1.8147, "step": 3772 }, { "epoch": 0.36845703125, "grad_norm": 0.20510420203208923, "learning_rate": 0.0003775278488085093, "loss": 1.854, "step": 3773 }, { "epoch": 0.3685546875, "grad_norm": 0.20449259877204895, "learning_rate": 0.0003774645433144896, "loss": 1.898, "step": 3774 }, { "epoch": 0.36865234375, "grad_norm": 0.24301192164421082, "learning_rate": 0.0003774012275851697, "loss": 1.8672, "step": 3775 }, { "epoch": 0.36875, "grad_norm": 0.18098856508731842, "learning_rate": 0.00037733790162687445, "loss": 1.8821, "step": 3776 }, { "epoch": 0.36884765625, "grad_norm": 0.2757241427898407, "learning_rate": 0.00037727456544592937, "loss": 1.8566, "step": 3777 }, { "epoch": 0.3689453125, "grad_norm": 0.20406556129455566, "learning_rate": 0.00037721121904866133, "loss": 1.8682, "step": 3778 }, { "epoch": 0.36904296875, "grad_norm": 0.26397988200187683, "learning_rate": 0.000377147862441398, "loss": 1.8397, "step": 3779 }, { "epoch": 0.369140625, "grad_norm": 0.22704710066318512, "learning_rate": 0.0003770844956304682, "loss": 1.8548, "step": 3780 }, { "epoch": 0.36923828125, "grad_norm": 0.2295246124267578, "learning_rate": 0.0003770211186222017, "loss": 1.8748, "step": 3781 }, { "epoch": 0.3693359375, "grad_norm": 0.2571696937084198, "learning_rate": 0.0003769577314229292, "loss": 1.8545, "step": 3782 }, { "epoch": 0.36943359375, "grad_norm": 0.22579611837863922, "learning_rate": 0.0003768943340389826, "loss": 1.8948, "step": 3783 }, { "epoch": 0.36953125, "grad_norm": 0.21409833431243896, "learning_rate": 0.0003768309264766947, "loss": 1.7853, "step": 3784 }, { "epoch": 0.36962890625, "grad_norm": 0.23976057767868042, "learning_rate": 0.0003767675087423995, "loss": 1.8401, "step": 3785 }, { "epoch": 0.3697265625, "grad_norm": 0.21009457111358643, "learning_rate": 0.0003767040808424317, "loss": 1.8773, "step": 3786 }, { "epoch": 0.36982421875, "grad_norm": 0.27418291568756104, "learning_rate": 0.0003766406427831272, "loss": 1.8874, "step": 3787 }, { "epoch": 0.369921875, "grad_norm": 0.20294521749019623, "learning_rate": 0.000376577194570823, "loss": 1.8804, "step": 3788 }, { "epoch": 0.37001953125, "grad_norm": 0.3269689679145813, "learning_rate": 0.00037651373621185697, "loss": 1.8547, "step": 3789 }, { "epoch": 0.3701171875, "grad_norm": 0.2686477303504944, "learning_rate": 0.0003764502677125679, "loss": 1.8666, "step": 3790 }, { "epoch": 0.37021484375, "grad_norm": 0.2950127422809601, "learning_rate": 0.000376386789079296, "loss": 1.9096, "step": 3791 }, { "epoch": 0.3703125, "grad_norm": 0.19845204055309296, "learning_rate": 0.00037632330031838195, "loss": 1.8558, "step": 3792 }, { "epoch": 0.37041015625, "grad_norm": 0.27506914734840393, "learning_rate": 0.00037625980143616796, "loss": 1.8602, "step": 3793 }, { "epoch": 0.3705078125, "grad_norm": 0.23172548413276672, "learning_rate": 0.0003761962924389968, "loss": 1.8983, "step": 3794 }, { "epoch": 0.37060546875, "grad_norm": 0.19600559771060944, "learning_rate": 0.0003761327733332126, "loss": 1.8958, "step": 3795 }, { "epoch": 0.370703125, "grad_norm": 0.22822436690330505, "learning_rate": 0.0003760692441251602, "loss": 1.8606, "step": 3796 }, { "epoch": 0.37080078125, "grad_norm": 0.2549194097518921, "learning_rate": 0.00037600570482118585, "loss": 1.8825, "step": 3797 }, { "epoch": 0.3708984375, "grad_norm": 0.21062737703323364, "learning_rate": 0.00037594215542763624, "loss": 1.8963, "step": 3798 }, { "epoch": 0.37099609375, "grad_norm": 0.2261582314968109, "learning_rate": 0.0003758785959508598, "loss": 1.8917, "step": 3799 }, { "epoch": 0.37109375, "grad_norm": 0.22571882605552673, "learning_rate": 0.00037581502639720516, "loss": 1.802, "step": 3800 }, { "epoch": 0.37119140625, "grad_norm": 0.23393283784389496, "learning_rate": 0.0003757514467730225, "loss": 1.855, "step": 3801 }, { "epoch": 0.3712890625, "grad_norm": 0.21826542913913727, "learning_rate": 0.00037568785708466304, "loss": 1.8376, "step": 3802 }, { "epoch": 0.37138671875, "grad_norm": 0.19500795006752014, "learning_rate": 0.00037562425733847856, "loss": 1.8535, "step": 3803 }, { "epoch": 0.371484375, "grad_norm": 0.22810782492160797, "learning_rate": 0.00037556064754082233, "loss": 1.8501, "step": 3804 }, { "epoch": 0.37158203125, "grad_norm": 0.17076687514781952, "learning_rate": 0.00037549702769804815, "loss": 1.8216, "step": 3805 }, { "epoch": 0.3716796875, "grad_norm": 0.21661575138568878, "learning_rate": 0.00037543339781651134, "loss": 1.8498, "step": 3806 }, { "epoch": 0.37177734375, "grad_norm": 0.24469956755638123, "learning_rate": 0.00037536975790256776, "loss": 1.8986, "step": 3807 }, { "epoch": 0.371875, "grad_norm": 0.2178879976272583, "learning_rate": 0.0003753061079625746, "loss": 1.8269, "step": 3808 }, { "epoch": 0.37197265625, "grad_norm": 0.19541752338409424, "learning_rate": 0.00037524244800288985, "loss": 1.8767, "step": 3809 }, { "epoch": 0.3720703125, "grad_norm": 0.22389501333236694, "learning_rate": 0.0003751787780298727, "loss": 1.8638, "step": 3810 }, { "epoch": 0.37216796875, "grad_norm": 0.2582794427871704, "learning_rate": 0.0003751150980498831, "loss": 1.8045, "step": 3811 }, { "epoch": 0.372265625, "grad_norm": 0.22942645847797394, "learning_rate": 0.00037505140806928214, "loss": 1.8611, "step": 3812 }, { "epoch": 0.37236328125, "grad_norm": 0.20219996571540833, "learning_rate": 0.00037498770809443185, "loss": 1.8577, "step": 3813 }, { "epoch": 0.3724609375, "grad_norm": 0.21832026541233063, "learning_rate": 0.00037492399813169534, "loss": 1.8651, "step": 3814 }, { "epoch": 0.37255859375, "grad_norm": 0.2228987216949463, "learning_rate": 0.00037486027818743665, "loss": 1.8622, "step": 3815 }, { "epoch": 0.37265625, "grad_norm": 0.24790452420711517, "learning_rate": 0.0003747965482680209, "loss": 1.8527, "step": 3816 }, { "epoch": 0.37275390625, "grad_norm": 0.19790831208229065, "learning_rate": 0.0003747328083798141, "loss": 1.9084, "step": 3817 }, { "epoch": 0.3728515625, "grad_norm": 0.21211573481559753, "learning_rate": 0.00037466905852918324, "loss": 1.8931, "step": 3818 }, { "epoch": 0.37294921875, "grad_norm": 0.20081430673599243, "learning_rate": 0.0003746052987224964, "loss": 1.7665, "step": 3819 }, { "epoch": 0.373046875, "grad_norm": 0.19182641804218292, "learning_rate": 0.0003745415289661228, "loss": 1.8826, "step": 3820 }, { "epoch": 0.37314453125, "grad_norm": 0.1944439709186554, "learning_rate": 0.0003744777492664321, "loss": 1.8696, "step": 3821 }, { "epoch": 0.3732421875, "grad_norm": 0.17847688496112823, "learning_rate": 0.00037441395962979575, "loss": 1.8371, "step": 3822 }, { "epoch": 0.37333984375, "grad_norm": 0.19860583543777466, "learning_rate": 0.00037435016006258544, "loss": 1.8642, "step": 3823 }, { "epoch": 0.3734375, "grad_norm": 0.18326810002326965, "learning_rate": 0.0003742863505711744, "loss": 1.8481, "step": 3824 }, { "epoch": 0.37353515625, "grad_norm": 0.2152925431728363, "learning_rate": 0.0003742225311619364, "loss": 1.8934, "step": 3825 }, { "epoch": 0.3736328125, "grad_norm": 0.20573176443576813, "learning_rate": 0.00037415870184124666, "loss": 1.8624, "step": 3826 }, { "epoch": 0.37373046875, "grad_norm": 0.23908239603042603, "learning_rate": 0.00037409486261548105, "loss": 1.8815, "step": 3827 }, { "epoch": 0.373828125, "grad_norm": 0.21868613362312317, "learning_rate": 0.0003740310134910165, "loss": 1.8917, "step": 3828 }, { "epoch": 0.37392578125, "grad_norm": 0.22748807072639465, "learning_rate": 0.00037396715447423116, "loss": 1.8948, "step": 3829 }, { "epoch": 0.3740234375, "grad_norm": 0.2231343686580658, "learning_rate": 0.00037390328557150376, "loss": 1.8595, "step": 3830 }, { "epoch": 0.37412109375, "grad_norm": 0.19311115145683289, "learning_rate": 0.0003738394067892144, "loss": 1.833, "step": 3831 }, { "epoch": 0.37421875, "grad_norm": 0.24683715403079987, "learning_rate": 0.00037377551813374394, "loss": 1.8881, "step": 3832 }, { "epoch": 0.37431640625, "grad_norm": 0.27622419595718384, "learning_rate": 0.0003737116196114742, "loss": 1.8784, "step": 3833 }, { "epoch": 0.3744140625, "grad_norm": 0.20628823339939117, "learning_rate": 0.0003736477112287883, "loss": 1.856, "step": 3834 }, { "epoch": 0.37451171875, "grad_norm": 0.2787981927394867, "learning_rate": 0.0003735837929920698, "loss": 1.8345, "step": 3835 }, { "epoch": 0.374609375, "grad_norm": 0.25385451316833496, "learning_rate": 0.00037351986490770394, "loss": 1.8824, "step": 3836 }, { "epoch": 0.37470703125, "grad_norm": 0.2456686645746231, "learning_rate": 0.0003734559269820763, "loss": 1.8494, "step": 3837 }, { "epoch": 0.3748046875, "grad_norm": 0.23777589201927185, "learning_rate": 0.0003733919792215738, "loss": 1.8332, "step": 3838 }, { "epoch": 0.37490234375, "grad_norm": 0.24769902229309082, "learning_rate": 0.0003733280216325843, "loss": 1.8698, "step": 3839 }, { "epoch": 0.375, "grad_norm": 0.23901763558387756, "learning_rate": 0.0003732640542214965, "loss": 1.8203, "step": 3840 }, { "epoch": 0.37509765625, "grad_norm": 0.20374193787574768, "learning_rate": 0.0003732000769947003, "loss": 1.8252, "step": 3841 }, { "epoch": 0.3751953125, "grad_norm": 0.2926974594593048, "learning_rate": 0.00037313608995858625, "loss": 1.8683, "step": 3842 }, { "epoch": 0.37529296875, "grad_norm": 0.19516809284687042, "learning_rate": 0.00037307209311954626, "loss": 1.8714, "step": 3843 }, { "epoch": 0.375390625, "grad_norm": 0.2639426589012146, "learning_rate": 0.00037300808648397306, "loss": 1.8392, "step": 3844 }, { "epoch": 0.37548828125, "grad_norm": 0.2614942193031311, "learning_rate": 0.00037294407005826024, "loss": 1.8272, "step": 3845 }, { "epoch": 0.3755859375, "grad_norm": 0.1904597282409668, "learning_rate": 0.0003728800438488026, "loss": 1.862, "step": 3846 }, { "epoch": 0.37568359375, "grad_norm": 0.2732922434806824, "learning_rate": 0.00037281600786199566, "loss": 1.86, "step": 3847 }, { "epoch": 0.37578125, "grad_norm": 0.19078542292118073, "learning_rate": 0.0003727519621042361, "loss": 1.8619, "step": 3848 }, { "epoch": 0.37587890625, "grad_norm": 0.2331223487854004, "learning_rate": 0.00037268790658192153, "loss": 1.8451, "step": 3849 }, { "epoch": 0.3759765625, "grad_norm": 0.2795408070087433, "learning_rate": 0.00037262384130145053, "loss": 1.8958, "step": 3850 }, { "epoch": 0.37607421875, "grad_norm": 0.18555857241153717, "learning_rate": 0.0003725597662692227, "loss": 1.8379, "step": 3851 }, { "epoch": 0.376171875, "grad_norm": 0.23913845419883728, "learning_rate": 0.00037249568149163854, "loss": 1.7977, "step": 3852 }, { "epoch": 0.37626953125, "grad_norm": 0.25755074620246887, "learning_rate": 0.0003724315869750995, "loss": 1.8633, "step": 3853 }, { "epoch": 0.3763671875, "grad_norm": 0.23956407606601715, "learning_rate": 0.00037236748272600806, "loss": 1.8842, "step": 3854 }, { "epoch": 0.37646484375, "grad_norm": 0.21027059853076935, "learning_rate": 0.00037230336875076776, "loss": 1.8684, "step": 3855 }, { "epoch": 0.3765625, "grad_norm": 0.20290538668632507, "learning_rate": 0.0003722392450557828, "loss": 1.8714, "step": 3856 }, { "epoch": 0.37666015625, "grad_norm": 0.24776442348957062, "learning_rate": 0.0003721751116474589, "loss": 1.8192, "step": 3857 }, { "epoch": 0.3767578125, "grad_norm": 0.28582367300987244, "learning_rate": 0.0003721109685322022, "loss": 1.8696, "step": 3858 }, { "epoch": 0.37685546875, "grad_norm": 0.2176811397075653, "learning_rate": 0.00037204681571642004, "loss": 1.8502, "step": 3859 }, { "epoch": 0.376953125, "grad_norm": 0.2929898500442505, "learning_rate": 0.00037198265320652064, "loss": 1.8552, "step": 3860 }, { "epoch": 0.37705078125, "grad_norm": 0.2156658172607422, "learning_rate": 0.0003719184810089135, "loss": 1.8439, "step": 3861 }, { "epoch": 0.3771484375, "grad_norm": 0.272320955991745, "learning_rate": 0.00037185429913000865, "loss": 1.8302, "step": 3862 }, { "epoch": 0.37724609375, "grad_norm": 0.21699823439121246, "learning_rate": 0.00037179010757621745, "loss": 1.8717, "step": 3863 }, { "epoch": 0.37734375, "grad_norm": 0.2614113390445709, "learning_rate": 0.00037172590635395196, "loss": 1.8684, "step": 3864 }, { "epoch": 0.37744140625, "grad_norm": 0.21246129274368286, "learning_rate": 0.0003716616954696254, "loss": 1.8775, "step": 3865 }, { "epoch": 0.3775390625, "grad_norm": 0.24341346323490143, "learning_rate": 0.0003715974749296517, "loss": 1.8622, "step": 3866 }, { "epoch": 0.37763671875, "grad_norm": 0.20468126237392426, "learning_rate": 0.00037153324474044607, "loss": 1.8373, "step": 3867 }, { "epoch": 0.377734375, "grad_norm": 0.22117452323436737, "learning_rate": 0.00037146900490842445, "loss": 1.8322, "step": 3868 }, { "epoch": 0.37783203125, "grad_norm": 0.21013975143432617, "learning_rate": 0.00037140475544000394, "loss": 1.8814, "step": 3869 }, { "epoch": 0.3779296875, "grad_norm": 0.19857852160930634, "learning_rate": 0.0003713404963416024, "loss": 1.8798, "step": 3870 }, { "epoch": 0.37802734375, "grad_norm": 0.2867245078086853, "learning_rate": 0.0003712762276196389, "loss": 1.8232, "step": 3871 }, { "epoch": 0.378125, "grad_norm": 0.260097861289978, "learning_rate": 0.000371211949280533, "loss": 1.843, "step": 3872 }, { "epoch": 0.37822265625, "grad_norm": 0.23447772860527039, "learning_rate": 0.00037114766133070586, "loss": 1.8219, "step": 3873 }, { "epoch": 0.3783203125, "grad_norm": 0.223851278424263, "learning_rate": 0.0003710833637765791, "loss": 1.8897, "step": 3874 }, { "epoch": 0.37841796875, "grad_norm": 0.2399793565273285, "learning_rate": 0.00037101905662457546, "loss": 1.8636, "step": 3875 }, { "epoch": 0.378515625, "grad_norm": 0.20454320311546326, "learning_rate": 0.00037095473988111883, "loss": 1.8749, "step": 3876 }, { "epoch": 0.37861328125, "grad_norm": 0.22218739986419678, "learning_rate": 0.0003708904135526337, "loss": 1.8741, "step": 3877 }, { "epoch": 0.3787109375, "grad_norm": 0.2315162718296051, "learning_rate": 0.00037082607764554574, "loss": 1.8579, "step": 3878 }, { "epoch": 0.37880859375, "grad_norm": 0.21069717407226562, "learning_rate": 0.00037076173216628165, "loss": 1.9328, "step": 3879 }, { "epoch": 0.37890625, "grad_norm": 0.23605448007583618, "learning_rate": 0.00037069737712126886, "loss": 1.8225, "step": 3880 }, { "epoch": 0.37900390625, "grad_norm": 0.18359407782554626, "learning_rate": 0.00037063301251693593, "loss": 1.8626, "step": 3881 }, { "epoch": 0.3791015625, "grad_norm": 0.2351130247116089, "learning_rate": 0.00037056863835971214, "loss": 1.8855, "step": 3882 }, { "epoch": 0.37919921875, "grad_norm": 0.1951916217803955, "learning_rate": 0.0003705042546560282, "loss": 1.8423, "step": 3883 }, { "epoch": 0.379296875, "grad_norm": 0.2034117877483368, "learning_rate": 0.00037043986141231525, "loss": 1.8542, "step": 3884 }, { "epoch": 0.37939453125, "grad_norm": 0.2252059429883957, "learning_rate": 0.00037037545863500567, "loss": 1.8623, "step": 3885 }, { "epoch": 0.3794921875, "grad_norm": 0.17208275198936462, "learning_rate": 0.0003703110463305327, "loss": 1.8129, "step": 3886 }, { "epoch": 0.37958984375, "grad_norm": 0.19508133828639984, "learning_rate": 0.0003702466245053306, "loss": 1.8187, "step": 3887 }, { "epoch": 0.3796875, "grad_norm": 0.18515267968177795, "learning_rate": 0.0003701821931658345, "loss": 1.8416, "step": 3888 }, { "epoch": 0.37978515625, "grad_norm": 0.16859371960163116, "learning_rate": 0.0003701177523184806, "loss": 1.9132, "step": 3889 }, { "epoch": 0.3798828125, "grad_norm": 0.1977667212486267, "learning_rate": 0.0003700533019697059, "loss": 1.8912, "step": 3890 }, { "epoch": 0.37998046875, "grad_norm": 0.18891897797584534, "learning_rate": 0.0003699888421259483, "loss": 1.8595, "step": 3891 }, { "epoch": 0.380078125, "grad_norm": 0.21774737536907196, "learning_rate": 0.00036992437279364707, "loss": 1.911, "step": 3892 }, { "epoch": 0.38017578125, "grad_norm": 0.29687103629112244, "learning_rate": 0.00036985989397924187, "loss": 1.8465, "step": 3893 }, { "epoch": 0.3802734375, "grad_norm": 0.30139657855033875, "learning_rate": 0.0003697954056891736, "loss": 1.8941, "step": 3894 }, { "epoch": 0.38037109375, "grad_norm": 0.22587136924266815, "learning_rate": 0.00036973090792988424, "loss": 1.8231, "step": 3895 }, { "epoch": 0.38046875, "grad_norm": 0.2205764204263687, "learning_rate": 0.0003696664007078163, "loss": 1.8506, "step": 3896 }, { "epoch": 0.38056640625, "grad_norm": 0.1905178278684616, "learning_rate": 0.00036960188402941375, "loss": 1.8633, "step": 3897 }, { "epoch": 0.3806640625, "grad_norm": 0.24393075704574585, "learning_rate": 0.00036953735790112087, "loss": 1.8828, "step": 3898 }, { "epoch": 0.38076171875, "grad_norm": 0.23119643330574036, "learning_rate": 0.0003694728223293836, "loss": 1.8468, "step": 3899 }, { "epoch": 0.380859375, "grad_norm": 0.274705708026886, "learning_rate": 0.0003694082773206483, "loss": 1.889, "step": 3900 }, { "epoch": 0.38095703125, "grad_norm": 0.18551072478294373, "learning_rate": 0.0003693437228813624, "loss": 1.8425, "step": 3901 }, { "epoch": 0.3810546875, "grad_norm": 0.3081590533256531, "learning_rate": 0.0003692791590179746, "loss": 1.8491, "step": 3902 }, { "epoch": 0.38115234375, "grad_norm": 0.20071613788604736, "learning_rate": 0.00036921458573693387, "loss": 1.8793, "step": 3903 }, { "epoch": 0.38125, "grad_norm": 0.26677054166793823, "learning_rate": 0.0003691500030446908, "loss": 1.9019, "step": 3904 }, { "epoch": 0.38134765625, "grad_norm": 0.19375574588775635, "learning_rate": 0.0003690854109476964, "loss": 1.8623, "step": 3905 }, { "epoch": 0.3814453125, "grad_norm": 0.23315277695655823, "learning_rate": 0.00036902080945240307, "loss": 1.8494, "step": 3906 }, { "epoch": 0.38154296875, "grad_norm": 0.24586628377437592, "learning_rate": 0.00036895619856526377, "loss": 1.8783, "step": 3907 }, { "epoch": 0.381640625, "grad_norm": 0.18601994216442108, "learning_rate": 0.00036889157829273265, "loss": 1.7878, "step": 3908 }, { "epoch": 0.38173828125, "grad_norm": 0.2880648970603943, "learning_rate": 0.0003688269486412646, "loss": 1.8621, "step": 3909 }, { "epoch": 0.3818359375, "grad_norm": 0.20966652035713196, "learning_rate": 0.00036876230961731564, "loss": 1.8271, "step": 3910 }, { "epoch": 0.38193359375, "grad_norm": 0.22924384474754333, "learning_rate": 0.0003686976612273427, "loss": 1.8308, "step": 3911 }, { "epoch": 0.38203125, "grad_norm": 0.2276628315448761, "learning_rate": 0.0003686330034778034, "loss": 1.8565, "step": 3912 }, { "epoch": 0.38212890625, "grad_norm": 0.2096962332725525, "learning_rate": 0.00036856833637515657, "loss": 1.8429, "step": 3913 }, { "epoch": 0.3822265625, "grad_norm": 0.25153499841690063, "learning_rate": 0.0003685036599258619, "loss": 1.8352, "step": 3914 }, { "epoch": 0.38232421875, "grad_norm": 0.21400980651378632, "learning_rate": 0.00036843897413637986, "loss": 1.9016, "step": 3915 }, { "epoch": 0.382421875, "grad_norm": 0.2173420935869217, "learning_rate": 0.00036837427901317225, "loss": 1.861, "step": 3916 }, { "epoch": 0.38251953125, "grad_norm": 0.1773025095462799, "learning_rate": 0.0003683095745627014, "loss": 1.849, "step": 3917 }, { "epoch": 0.3826171875, "grad_norm": 0.20093916356563568, "learning_rate": 0.00036824486079143064, "loss": 1.8404, "step": 3918 }, { "epoch": 0.38271484375, "grad_norm": 0.20080716907978058, "learning_rate": 0.00036818013770582424, "loss": 1.8567, "step": 3919 }, { "epoch": 0.3828125, "grad_norm": 0.24920229613780975, "learning_rate": 0.0003681154053123478, "loss": 1.857, "step": 3920 }, { "epoch": 0.38291015625, "grad_norm": 0.19836358726024628, "learning_rate": 0.00036805066361746716, "loss": 1.8674, "step": 3921 }, { "epoch": 0.3830078125, "grad_norm": 0.2825253903865814, "learning_rate": 0.00036798591262764965, "loss": 1.8862, "step": 3922 }, { "epoch": 0.38310546875, "grad_norm": 0.25824931263923645, "learning_rate": 0.00036792115234936316, "loss": 1.8765, "step": 3923 }, { "epoch": 0.383203125, "grad_norm": 0.22667564451694489, "learning_rate": 0.00036785638278907687, "loss": 1.8585, "step": 3924 }, { "epoch": 0.38330078125, "grad_norm": 0.29045218229293823, "learning_rate": 0.0003677916039532605, "loss": 1.858, "step": 3925 }, { "epoch": 0.3833984375, "grad_norm": 0.20461486279964447, "learning_rate": 0.00036772681584838497, "loss": 1.8507, "step": 3926 }, { "epoch": 0.38349609375, "grad_norm": 0.25927817821502686, "learning_rate": 0.000367662018480922, "loss": 1.8297, "step": 3927 }, { "epoch": 0.38359375, "grad_norm": 0.22330361604690552, "learning_rate": 0.00036759721185734433, "loss": 1.8351, "step": 3928 }, { "epoch": 0.38369140625, "grad_norm": 0.26381710171699524, "learning_rate": 0.00036753239598412554, "loss": 1.9159, "step": 3929 }, { "epoch": 0.3837890625, "grad_norm": 0.2930384576320648, "learning_rate": 0.0003674675708677401, "loss": 1.857, "step": 3930 }, { "epoch": 0.38388671875, "grad_norm": 0.19331973791122437, "learning_rate": 0.0003674027365146636, "loss": 1.8628, "step": 3931 }, { "epoch": 0.383984375, "grad_norm": 0.30140820145606995, "learning_rate": 0.00036733789293137226, "loss": 1.8753, "step": 3932 }, { "epoch": 0.38408203125, "grad_norm": 0.306213915348053, "learning_rate": 0.0003672730401243435, "loss": 1.8682, "step": 3933 }, { "epoch": 0.3841796875, "grad_norm": 0.22284962236881256, "learning_rate": 0.00036720817810005554, "loss": 1.8159, "step": 3934 }, { "epoch": 0.38427734375, "grad_norm": 0.3082934319972992, "learning_rate": 0.0003671433068649874, "loss": 1.893, "step": 3935 }, { "epoch": 0.384375, "grad_norm": 0.264959454536438, "learning_rate": 0.0003670784264256192, "loss": 1.8645, "step": 3936 }, { "epoch": 0.38447265625, "grad_norm": 0.20448656380176544, "learning_rate": 0.000367013536788432, "loss": 1.8722, "step": 3937 }, { "epoch": 0.3845703125, "grad_norm": 0.2482098639011383, "learning_rate": 0.0003669486379599077, "loss": 1.8509, "step": 3938 }, { "epoch": 0.38466796875, "grad_norm": 0.24799376726150513, "learning_rate": 0.000366883729946529, "loss": 1.8262, "step": 3939 }, { "epoch": 0.384765625, "grad_norm": 0.21177054941654205, "learning_rate": 0.0003668188127547796, "loss": 1.8538, "step": 3940 }, { "epoch": 0.38486328125, "grad_norm": 0.2216721624135971, "learning_rate": 0.0003667538863911444, "loss": 1.8115, "step": 3941 }, { "epoch": 0.3849609375, "grad_norm": 0.2219676971435547, "learning_rate": 0.00036668895086210867, "loss": 1.852, "step": 3942 }, { "epoch": 0.38505859375, "grad_norm": 0.1981191635131836, "learning_rate": 0.00036662400617415913, "loss": 1.8699, "step": 3943 }, { "epoch": 0.38515625, "grad_norm": 0.19222186505794525, "learning_rate": 0.00036655905233378306, "loss": 1.864, "step": 3944 }, { "epoch": 0.38525390625, "grad_norm": 0.23437833786010742, "learning_rate": 0.00036649408934746885, "loss": 1.9019, "step": 3945 }, { "epoch": 0.3853515625, "grad_norm": 0.18790283799171448, "learning_rate": 0.00036642911722170563, "loss": 1.8592, "step": 3946 }, { "epoch": 0.38544921875, "grad_norm": 0.2296464890241623, "learning_rate": 0.00036636413596298365, "loss": 1.8829, "step": 3947 }, { "epoch": 0.385546875, "grad_norm": 0.21702931821346283, "learning_rate": 0.0003662991455777938, "loss": 1.868, "step": 3948 }, { "epoch": 0.38564453125, "grad_norm": 0.1856737732887268, "learning_rate": 0.0003662341460726282, "loss": 1.8538, "step": 3949 }, { "epoch": 0.3857421875, "grad_norm": 0.2527240216732025, "learning_rate": 0.0003661691374539797, "loss": 1.8328, "step": 3950 }, { "epoch": 0.38583984375, "grad_norm": 0.17746932804584503, "learning_rate": 0.00036610411972834195, "loss": 1.8069, "step": 3951 }, { "epoch": 0.3859375, "grad_norm": 0.19036374986171722, "learning_rate": 0.00036603909290220993, "loss": 1.8435, "step": 3952 }, { "epoch": 0.38603515625, "grad_norm": 0.18192900717258453, "learning_rate": 0.0003659740569820789, "loss": 1.7831, "step": 3953 }, { "epoch": 0.3861328125, "grad_norm": 0.21768184006214142, "learning_rate": 0.0003659090119744456, "loss": 1.8316, "step": 3954 }, { "epoch": 0.38623046875, "grad_norm": 0.2052927315235138, "learning_rate": 0.0003658439578858074, "loss": 1.8758, "step": 3955 }, { "epoch": 0.386328125, "grad_norm": 0.1776941865682602, "learning_rate": 0.0003657788947226626, "loss": 1.8418, "step": 3956 }, { "epoch": 0.38642578125, "grad_norm": 0.23303310573101044, "learning_rate": 0.0003657138224915104, "loss": 1.8663, "step": 3957 }, { "epoch": 0.3865234375, "grad_norm": 0.20003245770931244, "learning_rate": 0.000365648741198851, "loss": 1.8711, "step": 3958 }, { "epoch": 0.38662109375, "grad_norm": 0.19043077528476715, "learning_rate": 0.00036558365085118545, "loss": 1.8747, "step": 3959 }, { "epoch": 0.38671875, "grad_norm": 0.18690763413906097, "learning_rate": 0.00036551855145501566, "loss": 1.8116, "step": 3960 }, { "epoch": 0.38681640625, "grad_norm": 0.25892189145088196, "learning_rate": 0.0003654534430168445, "loss": 1.8288, "step": 3961 }, { "epoch": 0.3869140625, "grad_norm": 0.2468033730983734, "learning_rate": 0.0003653883255431758, "loss": 1.8459, "step": 3962 }, { "epoch": 0.38701171875, "grad_norm": 0.1726692169904709, "learning_rate": 0.0003653231990405141, "loss": 1.8456, "step": 3963 }, { "epoch": 0.387109375, "grad_norm": 0.21686433255672455, "learning_rate": 0.00036525806351536493, "loss": 1.8196, "step": 3964 }, { "epoch": 0.38720703125, "grad_norm": 0.23522233963012695, "learning_rate": 0.00036519291897423495, "loss": 1.8325, "step": 3965 }, { "epoch": 0.3873046875, "grad_norm": 0.2262456715106964, "learning_rate": 0.00036512776542363135, "loss": 1.831, "step": 3966 }, { "epoch": 0.38740234375, "grad_norm": 0.2011958360671997, "learning_rate": 0.0003650626028700625, "loss": 1.8707, "step": 3967 }, { "epoch": 0.3875, "grad_norm": 0.297262579202652, "learning_rate": 0.0003649974313200374, "loss": 1.8487, "step": 3968 }, { "epoch": 0.38759765625, "grad_norm": 0.2600073516368866, "learning_rate": 0.00036493225078006625, "loss": 1.8283, "step": 3969 }, { "epoch": 0.3876953125, "grad_norm": 0.26020193099975586, "learning_rate": 0.00036486706125666, "loss": 1.853, "step": 3970 }, { "epoch": 0.38779296875, "grad_norm": 0.3049705922603607, "learning_rate": 0.0003648018627563305, "loss": 1.8563, "step": 3971 }, { "epoch": 0.387890625, "grad_norm": 0.22959887981414795, "learning_rate": 0.0003647366552855905, "loss": 1.8756, "step": 3972 }, { "epoch": 0.38798828125, "grad_norm": 0.2265494018793106, "learning_rate": 0.0003646714388509536, "loss": 1.8164, "step": 3973 }, { "epoch": 0.3880859375, "grad_norm": 0.22998474538326263, "learning_rate": 0.0003646062134589343, "loss": 1.8905, "step": 3974 }, { "epoch": 0.38818359375, "grad_norm": 0.19562913477420807, "learning_rate": 0.00036454097911604817, "loss": 1.864, "step": 3975 }, { "epoch": 0.38828125, "grad_norm": 0.23625966906547546, "learning_rate": 0.0003644757358288116, "loss": 1.8254, "step": 3976 }, { "epoch": 0.38837890625, "grad_norm": 0.25710153579711914, "learning_rate": 0.00036441048360374154, "loss": 1.8971, "step": 3977 }, { "epoch": 0.3884765625, "grad_norm": 0.22964118421077728, "learning_rate": 0.0003643452224473563, "loss": 1.8228, "step": 3978 }, { "epoch": 0.38857421875, "grad_norm": 0.22172428667545319, "learning_rate": 0.00036427995236617487, "loss": 1.8451, "step": 3979 }, { "epoch": 0.388671875, "grad_norm": 0.184449702501297, "learning_rate": 0.0003642146733667172, "loss": 1.8486, "step": 3980 }, { "epoch": 0.38876953125, "grad_norm": 0.2299502044916153, "learning_rate": 0.000364149385455504, "loss": 1.8722, "step": 3981 }, { "epoch": 0.3888671875, "grad_norm": 0.20748716592788696, "learning_rate": 0.00036408408863905696, "loss": 1.8757, "step": 3982 }, { "epoch": 0.38896484375, "grad_norm": 0.20994932949543, "learning_rate": 0.0003640187829238988, "loss": 1.8664, "step": 3983 }, { "epoch": 0.3890625, "grad_norm": 0.20542369782924652, "learning_rate": 0.0003639534683165527, "loss": 1.8165, "step": 3984 }, { "epoch": 0.38916015625, "grad_norm": 0.22056767344474792, "learning_rate": 0.00036388814482354333, "loss": 1.8286, "step": 3985 }, { "epoch": 0.3892578125, "grad_norm": 0.24840861558914185, "learning_rate": 0.00036382281245139563, "loss": 1.8559, "step": 3986 }, { "epoch": 0.38935546875, "grad_norm": 0.20917242765426636, "learning_rate": 0.0003637574712066359, "loss": 1.8134, "step": 3987 }, { "epoch": 0.389453125, "grad_norm": 0.21101856231689453, "learning_rate": 0.0003636921210957912, "loss": 1.8487, "step": 3988 }, { "epoch": 0.38955078125, "grad_norm": 0.2077265977859497, "learning_rate": 0.00036362676212538925, "loss": 1.8672, "step": 3989 }, { "epoch": 0.3896484375, "grad_norm": 0.21952399611473083, "learning_rate": 0.00036356139430195907, "loss": 1.8617, "step": 3990 }, { "epoch": 0.38974609375, "grad_norm": 0.2540169060230255, "learning_rate": 0.00036349601763203, "loss": 1.8471, "step": 3991 }, { "epoch": 0.38984375, "grad_norm": 0.18410301208496094, "learning_rate": 0.00036343063212213283, "loss": 1.8571, "step": 3992 }, { "epoch": 0.38994140625, "grad_norm": 0.22891919314861298, "learning_rate": 0.0003633652377787989, "loss": 1.8226, "step": 3993 }, { "epoch": 0.3900390625, "grad_norm": 0.2170734405517578, "learning_rate": 0.0003632998346085607, "loss": 1.8544, "step": 3994 }, { "epoch": 0.39013671875, "grad_norm": 0.2172975242137909, "learning_rate": 0.00036323442261795115, "loss": 1.8275, "step": 3995 }, { "epoch": 0.390234375, "grad_norm": 0.22081336379051208, "learning_rate": 0.0003631690018135045, "loss": 1.8478, "step": 3996 }, { "epoch": 0.39033203125, "grad_norm": 0.17948199808597565, "learning_rate": 0.00036310357220175574, "loss": 1.8671, "step": 3997 }, { "epoch": 0.3904296875, "grad_norm": 0.2276734560728073, "learning_rate": 0.0003630381337892406, "loss": 1.8256, "step": 3998 }, { "epoch": 0.39052734375, "grad_norm": 0.21136391162872314, "learning_rate": 0.0003629726865824958, "loss": 1.8463, "step": 3999 }, { "epoch": 0.390625, "grad_norm": 0.26860180497169495, "learning_rate": 0.000362907230588059, "loss": 1.8529, "step": 4000 }, { "epoch": 0.39072265625, "grad_norm": 0.22392459213733673, "learning_rate": 0.00036284176581246876, "loss": 1.8282, "step": 4001 }, { "epoch": 0.3908203125, "grad_norm": 0.22236575186252594, "learning_rate": 0.00036277629226226423, "loss": 1.8288, "step": 4002 }, { "epoch": 0.39091796875, "grad_norm": 0.2793997526168823, "learning_rate": 0.0003627108099439857, "loss": 1.8791, "step": 4003 }, { "epoch": 0.391015625, "grad_norm": 0.194699227809906, "learning_rate": 0.00036264531886417436, "loss": 1.8592, "step": 4004 }, { "epoch": 0.39111328125, "grad_norm": 0.27056989073753357, "learning_rate": 0.000362579819029372, "loss": 1.8255, "step": 4005 }, { "epoch": 0.3912109375, "grad_norm": 0.2623845338821411, "learning_rate": 0.0003625143104461218, "loss": 1.8555, "step": 4006 }, { "epoch": 0.39130859375, "grad_norm": 0.2528698146343231, "learning_rate": 0.0003624487931209672, "loss": 1.8176, "step": 4007 }, { "epoch": 0.39140625, "grad_norm": 0.2843823730945587, "learning_rate": 0.000362383267060453, "loss": 1.8409, "step": 4008 }, { "epoch": 0.39150390625, "grad_norm": 0.196132093667984, "learning_rate": 0.0003623177322711244, "loss": 1.8331, "step": 4009 }, { "epoch": 0.3916015625, "grad_norm": 0.2398262619972229, "learning_rate": 0.000362252188759528, "loss": 1.867, "step": 4010 }, { "epoch": 0.39169921875, "grad_norm": 0.21861997246742249, "learning_rate": 0.0003621866365322109, "loss": 1.869, "step": 4011 }, { "epoch": 0.391796875, "grad_norm": 0.23221160471439362, "learning_rate": 0.0003621210755957212, "loss": 1.8816, "step": 4012 }, { "epoch": 0.39189453125, "grad_norm": 0.18605566024780273, "learning_rate": 0.0003620555059566079, "loss": 1.8516, "step": 4013 }, { "epoch": 0.3919921875, "grad_norm": 0.2481876164674759, "learning_rate": 0.0003619899276214208, "loss": 1.867, "step": 4014 }, { "epoch": 0.39208984375, "grad_norm": 0.22675789892673492, "learning_rate": 0.0003619243405967106, "loss": 1.8357, "step": 4015 }, { "epoch": 0.3921875, "grad_norm": 0.19097070395946503, "learning_rate": 0.0003618587448890289, "loss": 1.8706, "step": 4016 }, { "epoch": 0.39228515625, "grad_norm": 0.22903457283973694, "learning_rate": 0.00036179314050492796, "loss": 1.8436, "step": 4017 }, { "epoch": 0.3923828125, "grad_norm": 0.20662012696266174, "learning_rate": 0.0003617275274509613, "loss": 1.8151, "step": 4018 }, { "epoch": 0.39248046875, "grad_norm": 0.21021722257137299, "learning_rate": 0.0003616619057336829, "loss": 1.8779, "step": 4019 }, { "epoch": 0.392578125, "grad_norm": 0.22491545975208282, "learning_rate": 0.00036159627535964795, "loss": 1.8575, "step": 4020 }, { "epoch": 0.39267578125, "grad_norm": 0.24279853701591492, "learning_rate": 0.0003615306363354122, "loss": 1.8671, "step": 4021 }, { "epoch": 0.3927734375, "grad_norm": 0.23732434213161469, "learning_rate": 0.0003614649886675326, "loss": 1.8306, "step": 4022 }, { "epoch": 0.39287109375, "grad_norm": 0.17786726355552673, "learning_rate": 0.0003613993323625665, "loss": 1.8064, "step": 4023 }, { "epoch": 0.39296875, "grad_norm": 0.18526485562324524, "learning_rate": 0.00036133366742707257, "loss": 1.8427, "step": 4024 }, { "epoch": 0.39306640625, "grad_norm": 0.1718062311410904, "learning_rate": 0.00036126799386761025, "loss": 1.8645, "step": 4025 }, { "epoch": 0.3931640625, "grad_norm": 0.19269327819347382, "learning_rate": 0.00036120231169073944, "loss": 1.8668, "step": 4026 }, { "epoch": 0.39326171875, "grad_norm": 0.19223515689373016, "learning_rate": 0.0003611366209030214, "loss": 1.861, "step": 4027 }, { "epoch": 0.393359375, "grad_norm": 0.18523992598056793, "learning_rate": 0.000361070921511018, "loss": 1.8786, "step": 4028 }, { "epoch": 0.39345703125, "grad_norm": 0.18942400813102722, "learning_rate": 0.00036100521352129214, "loss": 1.8613, "step": 4029 }, { "epoch": 0.3935546875, "grad_norm": 0.20724228024482727, "learning_rate": 0.00036093949694040734, "loss": 1.7906, "step": 4030 }, { "epoch": 0.39365234375, "grad_norm": 0.21637628972530365, "learning_rate": 0.0003608737717749282, "loss": 1.89, "step": 4031 }, { "epoch": 0.39375, "grad_norm": 0.1924957036972046, "learning_rate": 0.00036080803803142003, "loss": 1.8543, "step": 4032 }, { "epoch": 0.39384765625, "grad_norm": 0.19536274671554565, "learning_rate": 0.00036074229571644894, "loss": 1.8353, "step": 4033 }, { "epoch": 0.3939453125, "grad_norm": 0.21658040583133698, "learning_rate": 0.0003606765448365823, "loss": 1.8187, "step": 4034 }, { "epoch": 0.39404296875, "grad_norm": 0.20503970980644226, "learning_rate": 0.0003606107853983877, "loss": 1.8199, "step": 4035 }, { "epoch": 0.394140625, "grad_norm": 0.22070644795894623, "learning_rate": 0.00036054501740843416, "loss": 1.8097, "step": 4036 }, { "epoch": 0.39423828125, "grad_norm": 0.19360551238059998, "learning_rate": 0.0003604792408732911, "loss": 1.8259, "step": 4037 }, { "epoch": 0.3943359375, "grad_norm": 0.18369770050048828, "learning_rate": 0.0003604134557995294, "loss": 1.859, "step": 4038 }, { "epoch": 0.39443359375, "grad_norm": 0.219722718000412, "learning_rate": 0.00036034766219372, "loss": 1.8831, "step": 4039 }, { "epoch": 0.39453125, "grad_norm": 0.18859165906906128, "learning_rate": 0.0003602818600624353, "loss": 1.871, "step": 4040 }, { "epoch": 0.39462890625, "grad_norm": 0.16907556354999542, "learning_rate": 0.00036021604941224834, "loss": 1.8429, "step": 4041 }, { "epoch": 0.3947265625, "grad_norm": 0.20368745923042297, "learning_rate": 0.0003601502302497329, "loss": 1.8453, "step": 4042 }, { "epoch": 0.39482421875, "grad_norm": 0.21178042888641357, "learning_rate": 0.0003600844025814639, "loss": 1.8394, "step": 4043 }, { "epoch": 0.394921875, "grad_norm": 0.2171480804681778, "learning_rate": 0.0003600185664140168, "loss": 1.873, "step": 4044 }, { "epoch": 0.39501953125, "grad_norm": 0.2801247239112854, "learning_rate": 0.0003599527217539682, "loss": 1.892, "step": 4045 }, { "epoch": 0.3951171875, "grad_norm": 0.20998451113700867, "learning_rate": 0.00035988686860789526, "loss": 1.8469, "step": 4046 }, { "epoch": 0.39521484375, "grad_norm": 0.23407518863677979, "learning_rate": 0.0003598210069823761, "loss": 1.8476, "step": 4047 }, { "epoch": 0.3953125, "grad_norm": 0.27358564734458923, "learning_rate": 0.00035975513688398994, "loss": 1.8614, "step": 4048 }, { "epoch": 0.39541015625, "grad_norm": 0.25694334506988525, "learning_rate": 0.0003596892583193164, "loss": 1.8174, "step": 4049 }, { "epoch": 0.3955078125, "grad_norm": 0.24655091762542725, "learning_rate": 0.0003596233712949362, "loss": 1.8475, "step": 4050 }, { "epoch": 0.39560546875, "grad_norm": 0.23054777085781097, "learning_rate": 0.0003595574758174309, "loss": 1.7855, "step": 4051 }, { "epoch": 0.395703125, "grad_norm": 0.31576693058013916, "learning_rate": 0.00035949157189338294, "loss": 1.8487, "step": 4052 }, { "epoch": 0.39580078125, "grad_norm": 0.21702778339385986, "learning_rate": 0.00035942565952937547, "loss": 1.8384, "step": 4053 }, { "epoch": 0.3958984375, "grad_norm": 0.2344021499156952, "learning_rate": 0.0003593597387319925, "loss": 1.8384, "step": 4054 }, { "epoch": 0.39599609375, "grad_norm": 0.2818906903266907, "learning_rate": 0.000359293809507819, "loss": 1.8767, "step": 4055 }, { "epoch": 0.39609375, "grad_norm": 0.24886588752269745, "learning_rate": 0.00035922787186344076, "loss": 1.8564, "step": 4056 }, { "epoch": 0.39619140625, "grad_norm": 0.21671739220619202, "learning_rate": 0.00035916192580544434, "loss": 1.8982, "step": 4057 }, { "epoch": 0.3962890625, "grad_norm": 0.21123698353767395, "learning_rate": 0.000359095971340417, "loss": 1.924, "step": 4058 }, { "epoch": 0.39638671875, "grad_norm": 0.21664677560329437, "learning_rate": 0.00035903000847494724, "loss": 1.8998, "step": 4059 }, { "epoch": 0.396484375, "grad_norm": 0.19823968410491943, "learning_rate": 0.00035896403721562406, "loss": 1.8309, "step": 4060 }, { "epoch": 0.39658203125, "grad_norm": 0.22748810052871704, "learning_rate": 0.0003588980575690374, "loss": 1.8967, "step": 4061 }, { "epoch": 0.3966796875, "grad_norm": 0.23281057178974152, "learning_rate": 0.0003588320695417781, "loss": 1.8736, "step": 4062 }, { "epoch": 0.39677734375, "grad_norm": 0.22981210052967072, "learning_rate": 0.00035876607314043766, "loss": 1.8353, "step": 4063 }, { "epoch": 0.396875, "grad_norm": 0.22894325852394104, "learning_rate": 0.00035870006837160866, "loss": 1.8416, "step": 4064 }, { "epoch": 0.39697265625, "grad_norm": 0.22248433530330658, "learning_rate": 0.0003586340552418843, "loss": 1.8807, "step": 4065 }, { "epoch": 0.3970703125, "grad_norm": 0.237218976020813, "learning_rate": 0.00035856803375785884, "loss": 1.7974, "step": 4066 }, { "epoch": 0.39716796875, "grad_norm": 0.20639021694660187, "learning_rate": 0.00035850200392612713, "loss": 1.8686, "step": 4067 }, { "epoch": 0.397265625, "grad_norm": 0.21953007578849792, "learning_rate": 0.00035843596575328495, "loss": 1.8426, "step": 4068 }, { "epoch": 0.39736328125, "grad_norm": 0.21375709772109985, "learning_rate": 0.00035836991924592903, "loss": 1.8502, "step": 4069 }, { "epoch": 0.3974609375, "grad_norm": 0.26015183329582214, "learning_rate": 0.0003583038644106567, "loss": 1.8387, "step": 4070 }, { "epoch": 0.39755859375, "grad_norm": 0.22684982419013977, "learning_rate": 0.00035823780125406637, "loss": 1.8188, "step": 4071 }, { "epoch": 0.39765625, "grad_norm": 0.1907491832971573, "learning_rate": 0.0003581717297827571, "loss": 1.8611, "step": 4072 }, { "epoch": 0.39775390625, "grad_norm": 0.2399233877658844, "learning_rate": 0.0003581056500033289, "loss": 1.8722, "step": 4073 }, { "epoch": 0.3978515625, "grad_norm": 0.2267402708530426, "learning_rate": 0.0003580395619223824, "loss": 1.8556, "step": 4074 }, { "epoch": 0.39794921875, "grad_norm": 0.2641052007675171, "learning_rate": 0.0003579734655465194, "loss": 1.8526, "step": 4075 }, { "epoch": 0.398046875, "grad_norm": 0.21591079235076904, "learning_rate": 0.00035790736088234227, "loss": 1.8107, "step": 4076 }, { "epoch": 0.39814453125, "grad_norm": 0.23522786796092987, "learning_rate": 0.0003578412479364543, "loss": 1.8761, "step": 4077 }, { "epoch": 0.3982421875, "grad_norm": 0.21411925554275513, "learning_rate": 0.00035777512671545953, "loss": 1.8791, "step": 4078 }, { "epoch": 0.39833984375, "grad_norm": 0.21472613513469696, "learning_rate": 0.000357708997225963, "loss": 1.8194, "step": 4079 }, { "epoch": 0.3984375, "grad_norm": 0.23808172345161438, "learning_rate": 0.0003576428594745703, "loss": 1.8432, "step": 4080 }, { "epoch": 0.39853515625, "grad_norm": 0.21339382231235504, "learning_rate": 0.00035757671346788803, "loss": 1.8387, "step": 4081 }, { "epoch": 0.3986328125, "grad_norm": 0.22620201110839844, "learning_rate": 0.0003575105592125238, "loss": 1.8393, "step": 4082 }, { "epoch": 0.39873046875, "grad_norm": 0.25172725319862366, "learning_rate": 0.0003574443967150856, "loss": 1.8487, "step": 4083 }, { "epoch": 0.398828125, "grad_norm": 0.20012016594409943, "learning_rate": 0.00035737822598218256, "loss": 1.8415, "step": 4084 }, { "epoch": 0.39892578125, "grad_norm": 0.21950571238994598, "learning_rate": 0.0003573120470204246, "loss": 1.8832, "step": 4085 }, { "epoch": 0.3990234375, "grad_norm": 0.20533329248428345, "learning_rate": 0.00035724585983642234, "loss": 1.8099, "step": 4086 }, { "epoch": 0.39912109375, "grad_norm": 0.231434166431427, "learning_rate": 0.0003571796644367873, "loss": 1.8297, "step": 4087 }, { "epoch": 0.39921875, "grad_norm": 0.20492421090602875, "learning_rate": 0.00035711346082813183, "loss": 1.8369, "step": 4088 }, { "epoch": 0.39931640625, "grad_norm": 0.23543506860733032, "learning_rate": 0.00035704724901706905, "loss": 1.8491, "step": 4089 }, { "epoch": 0.3994140625, "grad_norm": 0.227260559797287, "learning_rate": 0.00035698102901021304, "loss": 1.8817, "step": 4090 }, { "epoch": 0.39951171875, "grad_norm": 0.22218318283557892, "learning_rate": 0.00035691480081417843, "loss": 1.8093, "step": 4091 }, { "epoch": 0.399609375, "grad_norm": 0.20409689843654633, "learning_rate": 0.00035684856443558095, "loss": 1.8418, "step": 4092 }, { "epoch": 0.39970703125, "grad_norm": 0.21179689466953278, "learning_rate": 0.000356782319881037, "loss": 1.8685, "step": 4093 }, { "epoch": 0.3998046875, "grad_norm": 0.20115849375724792, "learning_rate": 0.0003567160671571639, "loss": 1.8759, "step": 4094 }, { "epoch": 0.39990234375, "grad_norm": 0.23569095134735107, "learning_rate": 0.00035664980627057955, "loss": 1.8217, "step": 4095 }, { "epoch": 0.4, "grad_norm": 0.21255749464035034, "learning_rate": 0.0003565835372279029, "loss": 1.8576, "step": 4096 }, { "epoch": 0.40009765625, "grad_norm": 0.20328132808208466, "learning_rate": 0.0003565172600357537, "loss": 1.8178, "step": 4097 }, { "epoch": 0.4001953125, "grad_norm": 0.18217550218105316, "learning_rate": 0.0003564509747007523, "loss": 1.8567, "step": 4098 }, { "epoch": 0.40029296875, "grad_norm": 0.24595117568969727, "learning_rate": 0.0003563846812295202, "loss": 1.8359, "step": 4099 }, { "epoch": 0.400390625, "grad_norm": 0.22450943291187286, "learning_rate": 0.0003563183796286794, "loss": 1.8305, "step": 4100 }, { "epoch": 0.40048828125, "grad_norm": 0.23122942447662354, "learning_rate": 0.00035625206990485295, "loss": 1.8603, "step": 4101 }, { "epoch": 0.4005859375, "grad_norm": 0.19824737310409546, "learning_rate": 0.0003561857520646645, "loss": 1.8617, "step": 4102 }, { "epoch": 0.40068359375, "grad_norm": 0.18395096063613892, "learning_rate": 0.0003561194261147386, "loss": 1.8598, "step": 4103 }, { "epoch": 0.40078125, "grad_norm": 0.2111394703388214, "learning_rate": 0.0003560530920617009, "loss": 1.8488, "step": 4104 }, { "epoch": 0.40087890625, "grad_norm": 0.20235751569271088, "learning_rate": 0.00035598674991217715, "loss": 1.8167, "step": 4105 }, { "epoch": 0.4009765625, "grad_norm": 0.20254381000995636, "learning_rate": 0.00035592039967279466, "loss": 1.8727, "step": 4106 }, { "epoch": 0.40107421875, "grad_norm": 0.19116955995559692, "learning_rate": 0.0003558540413501811, "loss": 1.8705, "step": 4107 }, { "epoch": 0.401171875, "grad_norm": 0.2449391633272171, "learning_rate": 0.00035578767495096517, "loss": 1.8497, "step": 4108 }, { "epoch": 0.40126953125, "grad_norm": 0.20743444561958313, "learning_rate": 0.00035572130048177616, "loss": 1.8824, "step": 4109 }, { "epoch": 0.4013671875, "grad_norm": 0.2034366875886917, "learning_rate": 0.00035565491794924435, "loss": 1.8859, "step": 4110 }, { "epoch": 0.40146484375, "grad_norm": 0.19879882037639618, "learning_rate": 0.0003555885273600009, "loss": 1.8771, "step": 4111 }, { "epoch": 0.4015625, "grad_norm": 0.1877567619085312, "learning_rate": 0.00035552212872067733, "loss": 1.8553, "step": 4112 }, { "epoch": 0.40166015625, "grad_norm": 0.20307008922100067, "learning_rate": 0.00035545572203790653, "loss": 1.8379, "step": 4113 }, { "epoch": 0.4017578125, "grad_norm": 0.1797867864370346, "learning_rate": 0.0003553893073183219, "loss": 1.8359, "step": 4114 }, { "epoch": 0.40185546875, "grad_norm": 0.1826634407043457, "learning_rate": 0.0003553228845685577, "loss": 1.9227, "step": 4115 }, { "epoch": 0.401953125, "grad_norm": 0.17619703710079193, "learning_rate": 0.0003552564537952489, "loss": 1.8275, "step": 4116 }, { "epoch": 0.40205078125, "grad_norm": 0.1993313729763031, "learning_rate": 0.0003551900150050313, "loss": 1.87, "step": 4117 }, { "epoch": 0.4021484375, "grad_norm": 0.19552871584892273, "learning_rate": 0.00035512356820454173, "loss": 1.8668, "step": 4118 }, { "epoch": 0.40224609375, "grad_norm": 0.21230106055736542, "learning_rate": 0.00035505711340041746, "loss": 1.8461, "step": 4119 }, { "epoch": 0.40234375, "grad_norm": 0.22370152175426483, "learning_rate": 0.0003549906505992968, "loss": 1.8724, "step": 4120 }, { "epoch": 0.40244140625, "grad_norm": 0.20677858591079712, "learning_rate": 0.00035492417980781876, "loss": 1.8431, "step": 4121 }, { "epoch": 0.4025390625, "grad_norm": 0.2558901906013489, "learning_rate": 0.0003548577010326233, "loss": 1.8964, "step": 4122 }, { "epoch": 0.40263671875, "grad_norm": 0.27261173725128174, "learning_rate": 0.0003547912142803509, "loss": 1.886, "step": 4123 }, { "epoch": 0.402734375, "grad_norm": 0.1867993026971817, "learning_rate": 0.0003547247195576432, "loss": 1.8509, "step": 4124 }, { "epoch": 0.40283203125, "grad_norm": 0.2584269642829895, "learning_rate": 0.00035465821687114224, "loss": 1.87, "step": 4125 }, { "epoch": 0.4029296875, "grad_norm": 0.2057262659072876, "learning_rate": 0.00035459170622749117, "loss": 1.8496, "step": 4126 }, { "epoch": 0.40302734375, "grad_norm": 0.22833289206027985, "learning_rate": 0.0003545251876333337, "loss": 1.8487, "step": 4127 }, { "epoch": 0.403125, "grad_norm": 0.21056689321994781, "learning_rate": 0.00035445866109531455, "loss": 1.849, "step": 4128 }, { "epoch": 0.40322265625, "grad_norm": 0.18776793777942657, "learning_rate": 0.0003543921266200791, "loss": 1.8724, "step": 4129 }, { "epoch": 0.4033203125, "grad_norm": 0.2139902561903, "learning_rate": 0.0003543255842142736, "loss": 1.8522, "step": 4130 }, { "epoch": 0.40341796875, "grad_norm": 0.2051902860403061, "learning_rate": 0.0003542590338845449, "loss": 1.8631, "step": 4131 }, { "epoch": 0.403515625, "grad_norm": 0.21415852010250092, "learning_rate": 0.00035419247563754094, "loss": 1.8147, "step": 4132 }, { "epoch": 0.40361328125, "grad_norm": 0.19563965499401093, "learning_rate": 0.00035412590947991017, "loss": 1.81, "step": 4133 }, { "epoch": 0.4037109375, "grad_norm": 0.17605456709861755, "learning_rate": 0.0003540593354183022, "loss": 1.8285, "step": 4134 }, { "epoch": 0.40380859375, "grad_norm": 0.24080799520015717, "learning_rate": 0.0003539927534593668, "loss": 1.8393, "step": 4135 }, { "epoch": 0.40390625, "grad_norm": 0.24760200083255768, "learning_rate": 0.0003539261636097553, "loss": 1.8982, "step": 4136 }, { "epoch": 0.40400390625, "grad_norm": 0.21751075983047485, "learning_rate": 0.0003538595658761192, "loss": 1.8246, "step": 4137 }, { "epoch": 0.4041015625, "grad_norm": 0.19909746944904327, "learning_rate": 0.00035379296026511115, "loss": 1.8526, "step": 4138 }, { "epoch": 0.40419921875, "grad_norm": 0.24126000702381134, "learning_rate": 0.0003537263467833845, "loss": 1.865, "step": 4139 }, { "epoch": 0.404296875, "grad_norm": 0.22304266691207886, "learning_rate": 0.0003536597254375931, "loss": 1.8456, "step": 4140 }, { "epoch": 0.40439453125, "grad_norm": 0.20051011443138123, "learning_rate": 0.0003535930962343921, "loss": 1.8121, "step": 4141 }, { "epoch": 0.4044921875, "grad_norm": 0.2357867807149887, "learning_rate": 0.00035352645918043695, "loss": 1.8646, "step": 4142 }, { "epoch": 0.40458984375, "grad_norm": 0.25244826078414917, "learning_rate": 0.00035345981428238434, "loss": 1.892, "step": 4143 }, { "epoch": 0.4046875, "grad_norm": 0.2515060603618622, "learning_rate": 0.0003533931615468913, "loss": 1.8368, "step": 4144 }, { "epoch": 0.40478515625, "grad_norm": 0.2245102971792221, "learning_rate": 0.00035332650098061593, "loss": 1.8444, "step": 4145 }, { "epoch": 0.4048828125, "grad_norm": 0.3059788644313812, "learning_rate": 0.00035325983259021707, "loss": 1.8445, "step": 4146 }, { "epoch": 0.40498046875, "grad_norm": 0.2570042610168457, "learning_rate": 0.0003531931563823542, "loss": 1.8501, "step": 4147 }, { "epoch": 0.405078125, "grad_norm": 0.23951447010040283, "learning_rate": 0.0003531264723636877, "loss": 1.839, "step": 4148 }, { "epoch": 0.40517578125, "grad_norm": 0.28501424193382263, "learning_rate": 0.00035305978054087886, "loss": 1.823, "step": 4149 }, { "epoch": 0.4052734375, "grad_norm": 0.21223093569278717, "learning_rate": 0.0003529930809205894, "loss": 1.8349, "step": 4150 }, { "epoch": 0.40537109375, "grad_norm": 0.25295740365982056, "learning_rate": 0.00035292637350948207, "loss": 1.8081, "step": 4151 }, { "epoch": 0.40546875, "grad_norm": 0.2550055980682373, "learning_rate": 0.00035285965831422054, "loss": 1.8832, "step": 4152 }, { "epoch": 0.40556640625, "grad_norm": 0.2280154526233673, "learning_rate": 0.00035279293534146877, "loss": 1.841, "step": 4153 }, { "epoch": 0.4056640625, "grad_norm": 0.20514193177223206, "learning_rate": 0.00035272620459789205, "loss": 1.861, "step": 4154 }, { "epoch": 0.40576171875, "grad_norm": 0.2677548825740814, "learning_rate": 0.000352659466090156, "loss": 1.8526, "step": 4155 }, { "epoch": 0.405859375, "grad_norm": 0.23861974477767944, "learning_rate": 0.00035259271982492735, "loss": 1.8089, "step": 4156 }, { "epoch": 0.40595703125, "grad_norm": 0.22723138332366943, "learning_rate": 0.00035252596580887343, "loss": 1.8546, "step": 4157 }, { "epoch": 0.4060546875, "grad_norm": 0.2531287670135498, "learning_rate": 0.0003524592040486622, "loss": 1.8654, "step": 4158 }, { "epoch": 0.40615234375, "grad_norm": 0.2574120759963989, "learning_rate": 0.00035239243455096285, "loss": 1.8565, "step": 4159 }, { "epoch": 0.40625, "grad_norm": 0.20986592769622803, "learning_rate": 0.0003523256573224449, "loss": 1.836, "step": 4160 }, { "epoch": 0.40634765625, "grad_norm": 0.1971091628074646, "learning_rate": 0.0003522588723697789, "loss": 1.8979, "step": 4161 }, { "epoch": 0.4064453125, "grad_norm": 0.2572074234485626, "learning_rate": 0.00035219207969963587, "loss": 1.8226, "step": 4162 }, { "epoch": 0.40654296875, "grad_norm": 0.17880785465240479, "learning_rate": 0.0003521252793186879, "loss": 1.797, "step": 4163 }, { "epoch": 0.406640625, "grad_norm": 0.23887290060520172, "learning_rate": 0.00035205847123360794, "loss": 1.8581, "step": 4164 }, { "epoch": 0.40673828125, "grad_norm": 0.21064692735671997, "learning_rate": 0.00035199165545106925, "loss": 1.871, "step": 4165 }, { "epoch": 0.4068359375, "grad_norm": 0.19952860474586487, "learning_rate": 0.00035192483197774634, "loss": 1.8349, "step": 4166 }, { "epoch": 0.40693359375, "grad_norm": 0.19057486951351166, "learning_rate": 0.0003518580008203142, "loss": 1.8081, "step": 4167 }, { "epoch": 0.40703125, "grad_norm": 0.19488559663295746, "learning_rate": 0.00035179116198544877, "loss": 1.8361, "step": 4168 }, { "epoch": 0.40712890625, "grad_norm": 0.18777482211589813, "learning_rate": 0.00035172431547982644, "loss": 1.8531, "step": 4169 }, { "epoch": 0.4072265625, "grad_norm": 0.2459339201450348, "learning_rate": 0.0003516574613101247, "loss": 1.8105, "step": 4170 }, { "epoch": 0.40732421875, "grad_norm": 0.19164249300956726, "learning_rate": 0.0003515905994830218, "loss": 1.8585, "step": 4171 }, { "epoch": 0.407421875, "grad_norm": 0.2304055094718933, "learning_rate": 0.00035152373000519644, "loss": 1.7608, "step": 4172 }, { "epoch": 0.40751953125, "grad_norm": 0.22517216205596924, "learning_rate": 0.00035145685288332846, "loss": 1.8473, "step": 4173 }, { "epoch": 0.4076171875, "grad_norm": 0.20082591474056244, "learning_rate": 0.0003513899681240981, "loss": 1.8695, "step": 4174 }, { "epoch": 0.40771484375, "grad_norm": 0.22921518981456757, "learning_rate": 0.0003513230757341869, "loss": 1.8218, "step": 4175 }, { "epoch": 0.4078125, "grad_norm": 0.19029761850833893, "learning_rate": 0.0003512561757202764, "loss": 1.8311, "step": 4176 }, { "epoch": 0.40791015625, "grad_norm": 0.19780279695987701, "learning_rate": 0.0003511892680890496, "loss": 1.8341, "step": 4177 }, { "epoch": 0.4080078125, "grad_norm": 0.19720710813999176, "learning_rate": 0.00035112235284718993, "loss": 1.8473, "step": 4178 }, { "epoch": 0.40810546875, "grad_norm": 0.19246746599674225, "learning_rate": 0.00035105543000138147, "loss": 1.8593, "step": 4179 }, { "epoch": 0.408203125, "grad_norm": 0.19287921488285065, "learning_rate": 0.00035098849955830943, "loss": 1.9073, "step": 4180 }, { "epoch": 0.40830078125, "grad_norm": 0.22399508953094482, "learning_rate": 0.0003509215615246595, "loss": 1.8363, "step": 4181 }, { "epoch": 0.4083984375, "grad_norm": 0.2298484742641449, "learning_rate": 0.00035085461590711817, "loss": 1.8709, "step": 4182 }, { "epoch": 0.40849609375, "grad_norm": 0.18680132925510406, "learning_rate": 0.0003507876627123727, "loss": 1.8364, "step": 4183 }, { "epoch": 0.40859375, "grad_norm": 0.21050740778446198, "learning_rate": 0.00035072070194711106, "loss": 1.8382, "step": 4184 }, { "epoch": 0.40869140625, "grad_norm": 0.24991776049137115, "learning_rate": 0.00035065373361802227, "loss": 1.8289, "step": 4185 }, { "epoch": 0.4087890625, "grad_norm": 0.22001494467258453, "learning_rate": 0.00035058675773179566, "loss": 1.8253, "step": 4186 }, { "epoch": 0.40888671875, "grad_norm": 0.23366527259349823, "learning_rate": 0.0003505197742951216, "loss": 1.8731, "step": 4187 }, { "epoch": 0.408984375, "grad_norm": 0.1992902010679245, "learning_rate": 0.0003504527833146911, "loss": 1.8637, "step": 4188 }, { "epoch": 0.40908203125, "grad_norm": 0.18898622691631317, "learning_rate": 0.000350385784797196, "loss": 1.8352, "step": 4189 }, { "epoch": 0.4091796875, "grad_norm": 0.24903878569602966, "learning_rate": 0.00035031877874932893, "loss": 1.8385, "step": 4190 }, { "epoch": 0.40927734375, "grad_norm": 0.20514540374279022, "learning_rate": 0.00035025176517778305, "loss": 1.85, "step": 4191 }, { "epoch": 0.409375, "grad_norm": 0.2659608721733093, "learning_rate": 0.00035018474408925253, "loss": 1.8314, "step": 4192 }, { "epoch": 0.40947265625, "grad_norm": 0.17859287559986115, "learning_rate": 0.0003501177154904321, "loss": 1.8438, "step": 4193 }, { "epoch": 0.4095703125, "grad_norm": 0.19967199862003326, "learning_rate": 0.00035005067938801744, "loss": 1.8129, "step": 4194 }, { "epoch": 0.40966796875, "grad_norm": 0.21078793704509735, "learning_rate": 0.0003499836357887048, "loss": 1.8088, "step": 4195 }, { "epoch": 0.409765625, "grad_norm": 0.16362448036670685, "learning_rate": 0.00034991658469919117, "loss": 1.8544, "step": 4196 }, { "epoch": 0.40986328125, "grad_norm": 0.1888558268547058, "learning_rate": 0.0003498495261261746, "loss": 1.8535, "step": 4197 }, { "epoch": 0.4099609375, "grad_norm": 0.19681403040885925, "learning_rate": 0.00034978246007635335, "loss": 1.8286, "step": 4198 }, { "epoch": 0.41005859375, "grad_norm": 0.1948511153459549, "learning_rate": 0.0003497153865564268, "loss": 1.8237, "step": 4199 }, { "epoch": 0.41015625, "grad_norm": 0.21308070421218872, "learning_rate": 0.00034964830557309513, "loss": 1.8074, "step": 4200 }, { "epoch": 0.41025390625, "grad_norm": 0.20020486414432526, "learning_rate": 0.000349581217133059, "loss": 1.8793, "step": 4201 }, { "epoch": 0.4103515625, "grad_norm": 0.20001058280467987, "learning_rate": 0.00034951412124302006, "loss": 1.8133, "step": 4202 }, { "epoch": 0.41044921875, "grad_norm": 0.21066218614578247, "learning_rate": 0.00034944701790968054, "loss": 1.8185, "step": 4203 }, { "epoch": 0.410546875, "grad_norm": 0.22479777038097382, "learning_rate": 0.0003493799071397435, "loss": 1.8591, "step": 4204 }, { "epoch": 0.41064453125, "grad_norm": 0.1952837109565735, "learning_rate": 0.00034931278893991265, "loss": 1.8423, "step": 4205 }, { "epoch": 0.4107421875, "grad_norm": 0.28419220447540283, "learning_rate": 0.0003492456633168925, "loss": 1.8809, "step": 4206 }, { "epoch": 0.41083984375, "grad_norm": 0.22131340205669403, "learning_rate": 0.0003491785302773883, "loss": 1.8141, "step": 4207 }, { "epoch": 0.4109375, "grad_norm": 0.2682664096355438, "learning_rate": 0.0003491113898281062, "loss": 1.8872, "step": 4208 }, { "epoch": 0.41103515625, "grad_norm": 0.24773511290550232, "learning_rate": 0.0003490442419757528, "loss": 1.8325, "step": 4209 }, { "epoch": 0.4111328125, "grad_norm": 0.2604011595249176, "learning_rate": 0.0003489770867270356, "loss": 1.8726, "step": 4210 }, { "epoch": 0.41123046875, "grad_norm": 0.2504948377609253, "learning_rate": 0.00034890992408866285, "loss": 1.8481, "step": 4211 }, { "epoch": 0.411328125, "grad_norm": 0.32314008474349976, "learning_rate": 0.0003488427540673433, "loss": 1.8546, "step": 4212 }, { "epoch": 0.41142578125, "grad_norm": 0.285114049911499, "learning_rate": 0.000348775576669787, "loss": 1.8531, "step": 4213 }, { "epoch": 0.4115234375, "grad_norm": 0.25935712456703186, "learning_rate": 0.000348708391902704, "loss": 1.8106, "step": 4214 }, { "epoch": 0.41162109375, "grad_norm": 0.2512550950050354, "learning_rate": 0.00034864119977280584, "loss": 1.8648, "step": 4215 }, { "epoch": 0.41171875, "grad_norm": 0.333379864692688, "learning_rate": 0.00034857400028680415, "loss": 1.841, "step": 4216 }, { "epoch": 0.41181640625, "grad_norm": 0.25787755846977234, "learning_rate": 0.0003485067934514116, "loss": 1.869, "step": 4217 }, { "epoch": 0.4119140625, "grad_norm": 0.19393840432167053, "learning_rate": 0.0003484395792733416, "loss": 1.838, "step": 4218 }, { "epoch": 0.41201171875, "grad_norm": 0.2149869054555893, "learning_rate": 0.0003483723577593083, "loss": 1.8729, "step": 4219 }, { "epoch": 0.412109375, "grad_norm": 0.23125958442687988, "learning_rate": 0.0003483051289160265, "loss": 1.837, "step": 4220 }, { "epoch": 0.41220703125, "grad_norm": 0.1976689249277115, "learning_rate": 0.00034823789275021164, "loss": 1.8447, "step": 4221 }, { "epoch": 0.4123046875, "grad_norm": 0.24214595556259155, "learning_rate": 0.00034817064926858017, "loss": 1.8098, "step": 4222 }, { "epoch": 0.41240234375, "grad_norm": 0.22756950557231903, "learning_rate": 0.00034810339847784913, "loss": 1.8572, "step": 4223 }, { "epoch": 0.4125, "grad_norm": 0.20165514945983887, "learning_rate": 0.0003480361403847361, "loss": 1.8555, "step": 4224 }, { "epoch": 0.41259765625, "grad_norm": 0.27508628368377686, "learning_rate": 0.0003479688749959598, "loss": 1.8739, "step": 4225 }, { "epoch": 0.4126953125, "grad_norm": 0.19843104481697083, "learning_rate": 0.00034790160231823925, "loss": 1.8419, "step": 4226 }, { "epoch": 0.41279296875, "grad_norm": 0.20928111672401428, "learning_rate": 0.0003478343223582946, "loss": 1.8809, "step": 4227 }, { "epoch": 0.412890625, "grad_norm": 0.21286095678806305, "learning_rate": 0.0003477670351228462, "loss": 1.8071, "step": 4228 }, { "epoch": 0.41298828125, "grad_norm": 0.183101087808609, "learning_rate": 0.0003476997406186158, "loss": 1.836, "step": 4229 }, { "epoch": 0.4130859375, "grad_norm": 0.16599592566490173, "learning_rate": 0.00034763243885232536, "loss": 1.8283, "step": 4230 }, { "epoch": 0.41318359375, "grad_norm": 0.2242308109998703, "learning_rate": 0.00034756512983069777, "loss": 1.8075, "step": 4231 }, { "epoch": 0.41328125, "grad_norm": 0.19931399822235107, "learning_rate": 0.0003474978135604565, "loss": 1.8351, "step": 4232 }, { "epoch": 0.41337890625, "grad_norm": 0.20005737245082855, "learning_rate": 0.000347430490048326, "loss": 1.8304, "step": 4233 }, { "epoch": 0.4134765625, "grad_norm": 0.23433679342269897, "learning_rate": 0.00034736315930103124, "loss": 1.861, "step": 4234 }, { "epoch": 0.41357421875, "grad_norm": 0.2258288711309433, "learning_rate": 0.0003472958213252978, "loss": 1.8316, "step": 4235 }, { "epoch": 0.413671875, "grad_norm": 0.19864246249198914, "learning_rate": 0.0003472284761278524, "loss": 1.8173, "step": 4236 }, { "epoch": 0.41376953125, "grad_norm": 0.2385239154100418, "learning_rate": 0.00034716112371542215, "loss": 1.8522, "step": 4237 }, { "epoch": 0.4138671875, "grad_norm": 0.2307146191596985, "learning_rate": 0.00034709376409473484, "loss": 1.8446, "step": 4238 }, { "epoch": 0.41396484375, "grad_norm": 0.22357000410556793, "learning_rate": 0.0003470263972725193, "loss": 1.8856, "step": 4239 }, { "epoch": 0.4140625, "grad_norm": 0.20348325371742249, "learning_rate": 0.0003469590232555046, "loss": 1.8688, "step": 4240 }, { "epoch": 0.41416015625, "grad_norm": 0.21446335315704346, "learning_rate": 0.0003468916420504211, "loss": 1.8158, "step": 4241 }, { "epoch": 0.4142578125, "grad_norm": 0.23739059269428253, "learning_rate": 0.00034682425366399944, "loss": 1.8127, "step": 4242 }, { "epoch": 0.41435546875, "grad_norm": 0.16460199654102325, "learning_rate": 0.0003467568581029712, "loss": 1.83, "step": 4243 }, { "epoch": 0.414453125, "grad_norm": 0.22891512513160706, "learning_rate": 0.0003466894553740685, "loss": 1.8249, "step": 4244 }, { "epoch": 0.41455078125, "grad_norm": 0.21211554110050201, "learning_rate": 0.00034662204548402425, "loss": 1.8282, "step": 4245 }, { "epoch": 0.4146484375, "grad_norm": 0.23098908364772797, "learning_rate": 0.00034655462843957225, "loss": 1.8365, "step": 4246 }, { "epoch": 0.41474609375, "grad_norm": 0.2643522024154663, "learning_rate": 0.0003464872042474468, "loss": 1.7939, "step": 4247 }, { "epoch": 0.41484375, "grad_norm": 0.179979145526886, "learning_rate": 0.00034641977291438293, "loss": 1.8654, "step": 4248 }, { "epoch": 0.41494140625, "grad_norm": 0.2652702331542969, "learning_rate": 0.00034635233444711645, "loss": 1.8313, "step": 4249 }, { "epoch": 0.4150390625, "grad_norm": 0.18599368631839752, "learning_rate": 0.00034628488885238393, "loss": 1.8835, "step": 4250 }, { "epoch": 0.41513671875, "grad_norm": 0.24554285407066345, "learning_rate": 0.00034621743613692254, "loss": 1.8121, "step": 4251 }, { "epoch": 0.415234375, "grad_norm": 0.2408851832151413, "learning_rate": 0.00034614997630747006, "loss": 1.8248, "step": 4252 }, { "epoch": 0.41533203125, "grad_norm": 0.2368103414773941, "learning_rate": 0.00034608250937076545, "loss": 1.8479, "step": 4253 }, { "epoch": 0.4154296875, "grad_norm": 0.20753245055675507, "learning_rate": 0.0003460150353335479, "loss": 1.865, "step": 4254 }, { "epoch": 0.41552734375, "grad_norm": 0.20561878383159637, "learning_rate": 0.00034594755420255737, "loss": 1.863, "step": 4255 }, { "epoch": 0.415625, "grad_norm": 0.24068519473075867, "learning_rate": 0.0003458800659845347, "loss": 1.8511, "step": 4256 }, { "epoch": 0.41572265625, "grad_norm": 0.23239868879318237, "learning_rate": 0.00034581257068622145, "loss": 1.9037, "step": 4257 }, { "epoch": 0.4158203125, "grad_norm": 0.23503153026103973, "learning_rate": 0.0003457450683143597, "loss": 1.7994, "step": 4258 }, { "epoch": 0.41591796875, "grad_norm": 0.22664009034633636, "learning_rate": 0.0003456775588756924, "loss": 1.8591, "step": 4259 }, { "epoch": 0.416015625, "grad_norm": 0.32614830136299133, "learning_rate": 0.00034561004237696304, "loss": 1.8632, "step": 4260 }, { "epoch": 0.41611328125, "grad_norm": 0.24125008285045624, "learning_rate": 0.0003455425188249161, "loss": 1.8563, "step": 4261 }, { "epoch": 0.4162109375, "grad_norm": 0.22518505156040192, "learning_rate": 0.00034547498822629644, "loss": 1.8402, "step": 4262 }, { "epoch": 0.41630859375, "grad_norm": 0.2516859471797943, "learning_rate": 0.0003454074505878498, "loss": 1.8494, "step": 4263 }, { "epoch": 0.41640625, "grad_norm": 0.2525559067726135, "learning_rate": 0.00034533990591632254, "loss": 1.8434, "step": 4264 }, { "epoch": 0.41650390625, "grad_norm": 0.16537804901599884, "learning_rate": 0.00034527235421846195, "loss": 1.8087, "step": 4265 }, { "epoch": 0.4166015625, "grad_norm": 0.2685443162918091, "learning_rate": 0.0003452047955010156, "loss": 1.827, "step": 4266 }, { "epoch": 0.41669921875, "grad_norm": 0.20350132882595062, "learning_rate": 0.0003451372297707322, "loss": 1.852, "step": 4267 }, { "epoch": 0.416796875, "grad_norm": 0.21128815412521362, "learning_rate": 0.00034506965703436093, "loss": 1.8463, "step": 4268 }, { "epoch": 0.41689453125, "grad_norm": 0.20447909832000732, "learning_rate": 0.00034500207729865165, "loss": 1.8083, "step": 4269 }, { "epoch": 0.4169921875, "grad_norm": 0.20351362228393555, "learning_rate": 0.00034493449057035504, "loss": 1.874, "step": 4270 }, { "epoch": 0.41708984375, "grad_norm": 0.2231147587299347, "learning_rate": 0.00034486689685622236, "loss": 1.8357, "step": 4271 }, { "epoch": 0.4171875, "grad_norm": 0.19443069398403168, "learning_rate": 0.00034479929616300565, "loss": 1.8425, "step": 4272 }, { "epoch": 0.41728515625, "grad_norm": 0.24243175983428955, "learning_rate": 0.00034473168849745764, "loss": 1.8664, "step": 4273 }, { "epoch": 0.4173828125, "grad_norm": 0.2010633796453476, "learning_rate": 0.0003446640738663316, "loss": 1.8539, "step": 4274 }, { "epoch": 0.41748046875, "grad_norm": 0.25846266746520996, "learning_rate": 0.000344596452276382, "loss": 1.829, "step": 4275 }, { "epoch": 0.417578125, "grad_norm": 0.22176463901996613, "learning_rate": 0.0003445288237343632, "loss": 1.8324, "step": 4276 }, { "epoch": 0.41767578125, "grad_norm": 0.21178513765335083, "learning_rate": 0.00034446118824703087, "loss": 1.8216, "step": 4277 }, { "epoch": 0.4177734375, "grad_norm": 0.25420916080474854, "learning_rate": 0.0003443935458211413, "loss": 1.8528, "step": 4278 }, { "epoch": 0.41787109375, "grad_norm": 0.1733928769826889, "learning_rate": 0.0003443258964634512, "loss": 1.8562, "step": 4279 }, { "epoch": 0.41796875, "grad_norm": 0.22979426383972168, "learning_rate": 0.00034425824018071826, "loss": 1.8582, "step": 4280 }, { "epoch": 0.41806640625, "grad_norm": 0.18637573719024658, "learning_rate": 0.0003441905769797007, "loss": 1.8275, "step": 4281 }, { "epoch": 0.4181640625, "grad_norm": 0.18606674671173096, "learning_rate": 0.00034412290686715747, "loss": 1.8616, "step": 4282 }, { "epoch": 0.41826171875, "grad_norm": 0.18189047276973724, "learning_rate": 0.00034405522984984815, "loss": 1.8007, "step": 4283 }, { "epoch": 0.418359375, "grad_norm": 0.21638208627700806, "learning_rate": 0.0003439875459345332, "loss": 1.8307, "step": 4284 }, { "epoch": 0.41845703125, "grad_norm": 0.19453690946102142, "learning_rate": 0.00034391985512797357, "loss": 1.8311, "step": 4285 }, { "epoch": 0.4185546875, "grad_norm": 0.22609570622444153, "learning_rate": 0.000343852157436931, "loss": 1.8486, "step": 4286 }, { "epoch": 0.41865234375, "grad_norm": 0.1817261129617691, "learning_rate": 0.0003437844528681679, "loss": 1.866, "step": 4287 }, { "epoch": 0.41875, "grad_norm": 0.1934969276189804, "learning_rate": 0.00034371674142844727, "loss": 1.8448, "step": 4288 }, { "epoch": 0.41884765625, "grad_norm": 0.17022395133972168, "learning_rate": 0.00034364902312453307, "loss": 1.809, "step": 4289 }, { "epoch": 0.4189453125, "grad_norm": 0.221804678440094, "learning_rate": 0.00034358129796318947, "loss": 1.8872, "step": 4290 }, { "epoch": 0.41904296875, "grad_norm": 0.22451527416706085, "learning_rate": 0.0003435135659511819, "loss": 1.8112, "step": 4291 }, { "epoch": 0.419140625, "grad_norm": 0.23531721532344818, "learning_rate": 0.00034344582709527606, "loss": 1.8643, "step": 4292 }, { "epoch": 0.41923828125, "grad_norm": 0.206741601228714, "learning_rate": 0.00034337808140223844, "loss": 1.8382, "step": 4293 }, { "epoch": 0.4193359375, "grad_norm": 0.26458290219306946, "learning_rate": 0.0003433103288788362, "loss": 1.8481, "step": 4294 }, { "epoch": 0.41943359375, "grad_norm": 0.2532312870025635, "learning_rate": 0.0003432425695318373, "loss": 1.8158, "step": 4295 }, { "epoch": 0.41953125, "grad_norm": 0.20990796387195587, "learning_rate": 0.00034317480336801037, "loss": 1.8749, "step": 4296 }, { "epoch": 0.41962890625, "grad_norm": 0.23719432950019836, "learning_rate": 0.0003431070303941245, "loss": 1.8536, "step": 4297 }, { "epoch": 0.4197265625, "grad_norm": 0.22180777788162231, "learning_rate": 0.00034303925061694967, "loss": 1.8891, "step": 4298 }, { "epoch": 0.41982421875, "grad_norm": 0.225361168384552, "learning_rate": 0.00034297146404325653, "loss": 1.838, "step": 4299 }, { "epoch": 0.419921875, "grad_norm": 0.23712079226970673, "learning_rate": 0.0003429036706798162, "loss": 1.8225, "step": 4300 }, { "epoch": 0.42001953125, "grad_norm": 0.2806594967842102, "learning_rate": 0.00034283587053340084, "loss": 1.8636, "step": 4301 }, { "epoch": 0.4201171875, "grad_norm": 0.21580100059509277, "learning_rate": 0.0003427680636107829, "loss": 1.834, "step": 4302 }, { "epoch": 0.42021484375, "grad_norm": 0.24084138870239258, "learning_rate": 0.0003427002499187358, "loss": 1.8283, "step": 4303 }, { "epoch": 0.4203125, "grad_norm": 0.26236194372177124, "learning_rate": 0.00034263242946403356, "loss": 1.8105, "step": 4304 }, { "epoch": 0.42041015625, "grad_norm": 0.24141961336135864, "learning_rate": 0.0003425646022534508, "loss": 1.8311, "step": 4305 }, { "epoch": 0.4205078125, "grad_norm": 0.17347706854343414, "learning_rate": 0.0003424967682937627, "loss": 1.7743, "step": 4306 }, { "epoch": 0.42060546875, "grad_norm": 0.27681705355644226, "learning_rate": 0.0003424289275917455, "loss": 1.8708, "step": 4307 }, { "epoch": 0.420703125, "grad_norm": 0.22059215605258942, "learning_rate": 0.00034236108015417584, "loss": 1.809, "step": 4308 }, { "epoch": 0.42080078125, "grad_norm": 0.2038775086402893, "learning_rate": 0.000342293225987831, "loss": 1.8531, "step": 4309 }, { "epoch": 0.4208984375, "grad_norm": 0.1890358030796051, "learning_rate": 0.0003422253650994891, "loss": 1.8161, "step": 4310 }, { "epoch": 0.42099609375, "grad_norm": 0.22576870024204254, "learning_rate": 0.00034215749749592873, "loss": 1.8652, "step": 4311 }, { "epoch": 0.42109375, "grad_norm": 0.2194606214761734, "learning_rate": 0.0003420896231839293, "loss": 1.7729, "step": 4312 }, { "epoch": 0.42119140625, "grad_norm": 0.2838427424430847, "learning_rate": 0.000342021742170271, "loss": 1.856, "step": 4313 }, { "epoch": 0.4212890625, "grad_norm": 0.23746374249458313, "learning_rate": 0.0003419538544617342, "loss": 1.8322, "step": 4314 }, { "epoch": 0.42138671875, "grad_norm": 0.1634446680545807, "learning_rate": 0.00034188596006510066, "loss": 1.8129, "step": 4315 }, { "epoch": 0.421484375, "grad_norm": 0.23567970097064972, "learning_rate": 0.00034181805898715216, "loss": 1.8431, "step": 4316 }, { "epoch": 0.42158203125, "grad_norm": 0.20782487094402313, "learning_rate": 0.0003417501512346717, "loss": 1.8508, "step": 4317 }, { "epoch": 0.4216796875, "grad_norm": 0.18531768023967743, "learning_rate": 0.0003416822368144424, "loss": 1.8645, "step": 4318 }, { "epoch": 0.42177734375, "grad_norm": 0.21057714521884918, "learning_rate": 0.0003416143157332483, "loss": 1.8604, "step": 4319 }, { "epoch": 0.421875, "grad_norm": 0.1936180740594864, "learning_rate": 0.0003415463879978743, "loss": 1.8278, "step": 4320 }, { "epoch": 0.42197265625, "grad_norm": 0.20181427896022797, "learning_rate": 0.0003414784536151056, "loss": 1.8653, "step": 4321 }, { "epoch": 0.4220703125, "grad_norm": 0.21293999254703522, "learning_rate": 0.00034141051259172845, "loss": 1.8462, "step": 4322 }, { "epoch": 0.42216796875, "grad_norm": 0.18978172540664673, "learning_rate": 0.0003413425649345293, "loss": 1.8836, "step": 4323 }, { "epoch": 0.422265625, "grad_norm": 0.2115108072757721, "learning_rate": 0.0003412746106502958, "loss": 1.8257, "step": 4324 }, { "epoch": 0.42236328125, "grad_norm": 0.19577565789222717, "learning_rate": 0.0003412066497458158, "loss": 1.8143, "step": 4325 }, { "epoch": 0.4224609375, "grad_norm": 0.18919500708580017, "learning_rate": 0.0003411386822278779, "loss": 1.8241, "step": 4326 }, { "epoch": 0.42255859375, "grad_norm": 0.248185396194458, "learning_rate": 0.0003410707081032717, "loss": 1.8358, "step": 4327 }, { "epoch": 0.42265625, "grad_norm": 0.17570361495018005, "learning_rate": 0.00034100272737878706, "loss": 1.8312, "step": 4328 }, { "epoch": 0.42275390625, "grad_norm": 0.24675165116786957, "learning_rate": 0.00034093474006121477, "loss": 1.8289, "step": 4329 }, { "epoch": 0.4228515625, "grad_norm": 0.1773347556591034, "learning_rate": 0.000340866746157346, "loss": 1.8583, "step": 4330 }, { "epoch": 0.42294921875, "grad_norm": 0.2368694543838501, "learning_rate": 0.00034079874567397283, "loss": 1.8379, "step": 4331 }, { "epoch": 0.423046875, "grad_norm": 0.21391184628009796, "learning_rate": 0.00034073073861788785, "loss": 1.8469, "step": 4332 }, { "epoch": 0.42314453125, "grad_norm": 0.24347218871116638, "learning_rate": 0.00034066272499588444, "loss": 1.8379, "step": 4333 }, { "epoch": 0.4232421875, "grad_norm": 0.20501993596553802, "learning_rate": 0.0003405947048147566, "loss": 1.8698, "step": 4334 }, { "epoch": 0.42333984375, "grad_norm": 0.23922660946846008, "learning_rate": 0.00034052667808129864, "loss": 1.8463, "step": 4335 }, { "epoch": 0.4234375, "grad_norm": 0.22898203134536743, "learning_rate": 0.00034045864480230625, "loss": 1.838, "step": 4336 }, { "epoch": 0.42353515625, "grad_norm": 0.2185804545879364, "learning_rate": 0.0003403906049845751, "loss": 1.8265, "step": 4337 }, { "epoch": 0.4236328125, "grad_norm": 0.2115095853805542, "learning_rate": 0.00034032255863490184, "loss": 1.8322, "step": 4338 }, { "epoch": 0.42373046875, "grad_norm": 0.1770572066307068, "learning_rate": 0.00034025450576008373, "loss": 1.8171, "step": 4339 }, { "epoch": 0.423828125, "grad_norm": 0.2317141890525818, "learning_rate": 0.0003401864463669185, "loss": 1.8298, "step": 4340 }, { "epoch": 0.42392578125, "grad_norm": 0.20465056598186493, "learning_rate": 0.00034011838046220486, "loss": 1.8688, "step": 4341 }, { "epoch": 0.4240234375, "grad_norm": 0.1989961564540863, "learning_rate": 0.0003400503080527418, "loss": 1.8509, "step": 4342 }, { "epoch": 0.42412109375, "grad_norm": 0.2024098038673401, "learning_rate": 0.00033998222914532935, "loss": 1.8697, "step": 4343 }, { "epoch": 0.42421875, "grad_norm": 0.19737909734249115, "learning_rate": 0.0003399141437467678, "loss": 1.8403, "step": 4344 }, { "epoch": 0.42431640625, "grad_norm": 0.19448748230934143, "learning_rate": 0.0003398460518638585, "loss": 1.8132, "step": 4345 }, { "epoch": 0.4244140625, "grad_norm": 0.231892392039299, "learning_rate": 0.00033977795350340306, "loss": 1.8447, "step": 4346 }, { "epoch": 0.42451171875, "grad_norm": 0.24322080612182617, "learning_rate": 0.0003397098486722039, "loss": 1.8636, "step": 4347 }, { "epoch": 0.424609375, "grad_norm": 0.20982776582241058, "learning_rate": 0.0003396417373770642, "loss": 1.8177, "step": 4348 }, { "epoch": 0.42470703125, "grad_norm": 0.17859229445457458, "learning_rate": 0.0003395736196247875, "loss": 1.8398, "step": 4349 }, { "epoch": 0.4248046875, "grad_norm": 0.17166008055210114, "learning_rate": 0.0003395054954221784, "loss": 1.8346, "step": 4350 }, { "epoch": 0.42490234375, "grad_norm": 0.192952960729599, "learning_rate": 0.0003394373647760417, "loss": 1.856, "step": 4351 }, { "epoch": 0.425, "grad_norm": 0.18562056124210358, "learning_rate": 0.00033936922769318317, "loss": 1.8328, "step": 4352 }, { "epoch": 0.42509765625, "grad_norm": 0.18785525858402252, "learning_rate": 0.00033930108418040903, "loss": 1.8347, "step": 4353 }, { "epoch": 0.4251953125, "grad_norm": 0.20413319766521454, "learning_rate": 0.0003392329342445262, "loss": 1.8773, "step": 4354 }, { "epoch": 0.42529296875, "grad_norm": 0.227433443069458, "learning_rate": 0.0003391647778923424, "loss": 1.8488, "step": 4355 }, { "epoch": 0.425390625, "grad_norm": 0.1805514395236969, "learning_rate": 0.0003390966151306656, "loss": 1.8409, "step": 4356 }, { "epoch": 0.42548828125, "grad_norm": 0.21339501440525055, "learning_rate": 0.0003390284459663049, "loss": 1.8263, "step": 4357 }, { "epoch": 0.4255859375, "grad_norm": 0.21802109479904175, "learning_rate": 0.0003389602704060696, "loss": 1.8303, "step": 4358 }, { "epoch": 0.42568359375, "grad_norm": 0.1962195634841919, "learning_rate": 0.00033889208845676994, "loss": 1.8547, "step": 4359 }, { "epoch": 0.42578125, "grad_norm": 0.20132724940776825, "learning_rate": 0.00033882390012521675, "loss": 1.8268, "step": 4360 }, { "epoch": 0.42587890625, "grad_norm": 0.2011682391166687, "learning_rate": 0.0003387557054182214, "loss": 1.8384, "step": 4361 }, { "epoch": 0.4259765625, "grad_norm": 0.1959267556667328, "learning_rate": 0.0003386875043425958, "loss": 1.8443, "step": 4362 }, { "epoch": 0.42607421875, "grad_norm": 0.20704340934753418, "learning_rate": 0.0003386192969051527, "loss": 1.8025, "step": 4363 }, { "epoch": 0.426171875, "grad_norm": 0.22771066427230835, "learning_rate": 0.0003385510831127056, "loss": 1.8652, "step": 4364 }, { "epoch": 0.42626953125, "grad_norm": 0.20367157459259033, "learning_rate": 0.00033848286297206813, "loss": 1.8589, "step": 4365 }, { "epoch": 0.4263671875, "grad_norm": 0.21674802899360657, "learning_rate": 0.00033841463649005515, "loss": 1.8249, "step": 4366 }, { "epoch": 0.42646484375, "grad_norm": 0.23962077498435974, "learning_rate": 0.0003383464036734818, "loss": 1.8522, "step": 4367 }, { "epoch": 0.4265625, "grad_norm": 0.2480829656124115, "learning_rate": 0.00033827816452916396, "loss": 1.8578, "step": 4368 }, { "epoch": 0.42666015625, "grad_norm": 0.23992200195789337, "learning_rate": 0.00033820991906391814, "loss": 1.847, "step": 4369 }, { "epoch": 0.4267578125, "grad_norm": 0.16551890969276428, "learning_rate": 0.00033814166728456113, "loss": 1.8748, "step": 4370 }, { "epoch": 0.42685546875, "grad_norm": 0.2550032138824463, "learning_rate": 0.0003380734091979111, "loss": 1.8253, "step": 4371 }, { "epoch": 0.426953125, "grad_norm": 0.2584201395511627, "learning_rate": 0.0003380051448107863, "loss": 1.8391, "step": 4372 }, { "epoch": 0.42705078125, "grad_norm": 0.17704187333583832, "learning_rate": 0.0003379368741300057, "loss": 1.8121, "step": 4373 }, { "epoch": 0.4271484375, "grad_norm": 0.2456081509590149, "learning_rate": 0.0003378685971623889, "loss": 1.8527, "step": 4374 }, { "epoch": 0.42724609375, "grad_norm": 0.25017473101615906, "learning_rate": 0.00033780031391475625, "loss": 1.8178, "step": 4375 }, { "epoch": 0.42734375, "grad_norm": 0.18413616716861725, "learning_rate": 0.0003377320243939286, "loss": 1.8327, "step": 4376 }, { "epoch": 0.42744140625, "grad_norm": 0.28050103783607483, "learning_rate": 0.0003376637286067274, "loss": 1.8438, "step": 4377 }, { "epoch": 0.4275390625, "grad_norm": 0.23926864564418793, "learning_rate": 0.00033759542655997495, "loss": 1.7982, "step": 4378 }, { "epoch": 0.42763671875, "grad_norm": 0.24252592027187347, "learning_rate": 0.0003375271182604939, "loss": 1.8424, "step": 4379 }, { "epoch": 0.427734375, "grad_norm": 0.2253219336271286, "learning_rate": 0.00033745880371510766, "loss": 1.8144, "step": 4380 }, { "epoch": 0.42783203125, "grad_norm": 0.2022935003042221, "learning_rate": 0.0003373904829306402, "loss": 1.8445, "step": 4381 }, { "epoch": 0.4279296875, "grad_norm": 0.23967212438583374, "learning_rate": 0.0003373221559139164, "loss": 1.8364, "step": 4382 }, { "epoch": 0.42802734375, "grad_norm": 0.2502332925796509, "learning_rate": 0.00033725382267176126, "loss": 1.8337, "step": 4383 }, { "epoch": 0.428125, "grad_norm": 0.20870167016983032, "learning_rate": 0.0003371854832110007, "loss": 1.7944, "step": 4384 }, { "epoch": 0.42822265625, "grad_norm": 0.23509882390499115, "learning_rate": 0.0003371171375384614, "loss": 1.783, "step": 4385 }, { "epoch": 0.4283203125, "grad_norm": 0.21745553612709045, "learning_rate": 0.00033704878566097026, "loss": 1.8586, "step": 4386 }, { "epoch": 0.42841796875, "grad_norm": 0.23707370460033417, "learning_rate": 0.0003369804275853551, "loss": 1.8661, "step": 4387 }, { "epoch": 0.428515625, "grad_norm": 0.2220204472541809, "learning_rate": 0.00033691206331844443, "loss": 1.8088, "step": 4388 }, { "epoch": 0.42861328125, "grad_norm": 0.21647056937217712, "learning_rate": 0.00033684369286706713, "loss": 1.8383, "step": 4389 }, { "epoch": 0.4287109375, "grad_norm": 0.21368125081062317, "learning_rate": 0.00033677531623805277, "loss": 1.8219, "step": 4390 }, { "epoch": 0.42880859375, "grad_norm": 0.28654998540878296, "learning_rate": 0.00033670693343823153, "loss": 1.8687, "step": 4391 }, { "epoch": 0.42890625, "grad_norm": 0.19630244374275208, "learning_rate": 0.00033663854447443445, "loss": 1.8421, "step": 4392 }, { "epoch": 0.42900390625, "grad_norm": 0.2457696795463562, "learning_rate": 0.00033657014935349275, "loss": 1.8398, "step": 4393 }, { "epoch": 0.4291015625, "grad_norm": 0.2542276680469513, "learning_rate": 0.0003365017480822385, "loss": 1.8567, "step": 4394 }, { "epoch": 0.42919921875, "grad_norm": 0.22448401153087616, "learning_rate": 0.0003364333406675046, "loss": 1.8188, "step": 4395 }, { "epoch": 0.429296875, "grad_norm": 0.2244417518377304, "learning_rate": 0.0003363649271161243, "loss": 1.798, "step": 4396 }, { "epoch": 0.42939453125, "grad_norm": 0.24035105109214783, "learning_rate": 0.0003362965074349313, "loss": 1.8418, "step": 4397 }, { "epoch": 0.4294921875, "grad_norm": 0.2114304155111313, "learning_rate": 0.0003362280816307602, "loss": 1.8185, "step": 4398 }, { "epoch": 0.42958984375, "grad_norm": 0.22470279037952423, "learning_rate": 0.0003361596497104463, "loss": 1.8282, "step": 4399 }, { "epoch": 0.4296875, "grad_norm": 0.22919301688671112, "learning_rate": 0.0003360912116808251, "loss": 1.8662, "step": 4400 }, { "epoch": 0.42978515625, "grad_norm": 0.24255748093128204, "learning_rate": 0.0003360227675487332, "loss": 1.8386, "step": 4401 }, { "epoch": 0.4298828125, "grad_norm": 0.2792574167251587, "learning_rate": 0.0003359543173210073, "loss": 1.8308, "step": 4402 }, { "epoch": 0.42998046875, "grad_norm": 0.22785964608192444, "learning_rate": 0.0003358858610044852, "loss": 1.8419, "step": 4403 }, { "epoch": 0.430078125, "grad_norm": 0.23355011641979218, "learning_rate": 0.00033581739860600495, "loss": 1.8332, "step": 4404 }, { "epoch": 0.43017578125, "grad_norm": 0.2188209593296051, "learning_rate": 0.00033574893013240537, "loss": 1.8468, "step": 4405 }, { "epoch": 0.4302734375, "grad_norm": 0.21975260972976685, "learning_rate": 0.00033568045559052596, "loss": 1.8454, "step": 4406 }, { "epoch": 0.43037109375, "grad_norm": 0.22873692214488983, "learning_rate": 0.00033561197498720646, "loss": 1.8758, "step": 4407 }, { "epoch": 0.43046875, "grad_norm": 0.25508803129196167, "learning_rate": 0.0003355434883292877, "loss": 1.882, "step": 4408 }, { "epoch": 0.43056640625, "grad_norm": 0.22015030682086945, "learning_rate": 0.00033547499562361084, "loss": 1.835, "step": 4409 }, { "epoch": 0.4306640625, "grad_norm": 0.2391720563173294, "learning_rate": 0.0003354064968770177, "loss": 1.8394, "step": 4410 }, { "epoch": 0.43076171875, "grad_norm": 0.23599278926849365, "learning_rate": 0.0003353379920963506, "loss": 1.8123, "step": 4411 }, { "epoch": 0.430859375, "grad_norm": 0.2925673723220825, "learning_rate": 0.00033526948128845263, "loss": 1.859, "step": 4412 }, { "epoch": 0.43095703125, "grad_norm": 0.2177896946668625, "learning_rate": 0.0003352009644601675, "loss": 1.812, "step": 4413 }, { "epoch": 0.4310546875, "grad_norm": 0.18912582099437714, "learning_rate": 0.0003351324416183393, "loss": 1.8409, "step": 4414 }, { "epoch": 0.43115234375, "grad_norm": 0.197793111205101, "learning_rate": 0.00033506391276981294, "loss": 1.9291, "step": 4415 }, { "epoch": 0.43125, "grad_norm": 0.21789690852165222, "learning_rate": 0.0003349953779214338, "loss": 1.8663, "step": 4416 }, { "epoch": 0.43134765625, "grad_norm": 0.19930073618888855, "learning_rate": 0.00033492683708004795, "loss": 1.872, "step": 4417 }, { "epoch": 0.4314453125, "grad_norm": 0.19942253828048706, "learning_rate": 0.00033485829025250194, "loss": 1.8517, "step": 4418 }, { "epoch": 0.43154296875, "grad_norm": 0.21190962195396423, "learning_rate": 0.000334789737445643, "loss": 1.8754, "step": 4419 }, { "epoch": 0.431640625, "grad_norm": 0.17792046070098877, "learning_rate": 0.00033472117866631905, "loss": 1.8699, "step": 4420 }, { "epoch": 0.43173828125, "grad_norm": 0.2024158537387848, "learning_rate": 0.00033465261392137837, "loss": 1.8027, "step": 4421 }, { "epoch": 0.4318359375, "grad_norm": 0.16588537395000458, "learning_rate": 0.0003345840432176701, "loss": 1.8071, "step": 4422 }, { "epoch": 0.43193359375, "grad_norm": 0.1971169114112854, "learning_rate": 0.0003345154665620437, "loss": 1.7873, "step": 4423 }, { "epoch": 0.43203125, "grad_norm": 0.1795888990163803, "learning_rate": 0.0003344468839613495, "loss": 1.8088, "step": 4424 }, { "epoch": 0.43212890625, "grad_norm": 0.1773122400045395, "learning_rate": 0.0003343782954224382, "loss": 1.8417, "step": 4425 }, { "epoch": 0.4322265625, "grad_norm": 0.1802736520767212, "learning_rate": 0.0003343097009521613, "loss": 1.8258, "step": 4426 }, { "epoch": 0.43232421875, "grad_norm": 0.19189099967479706, "learning_rate": 0.0003342411005573707, "loss": 1.849, "step": 4427 }, { "epoch": 0.432421875, "grad_norm": 0.21457602083683014, "learning_rate": 0.0003341724942449189, "loss": 1.8498, "step": 4428 }, { "epoch": 0.43251953125, "grad_norm": 0.20727668702602386, "learning_rate": 0.0003341038820216592, "loss": 1.7926, "step": 4429 }, { "epoch": 0.4326171875, "grad_norm": 0.26479217410087585, "learning_rate": 0.00033403526389444535, "loss": 1.8665, "step": 4430 }, { "epoch": 0.43271484375, "grad_norm": 0.1955796629190445, "learning_rate": 0.0003339666398701316, "loss": 1.8521, "step": 4431 }, { "epoch": 0.4328125, "grad_norm": 0.22441504895687103, "learning_rate": 0.0003338980099555729, "loss": 1.8614, "step": 4432 }, { "epoch": 0.43291015625, "grad_norm": 0.20976248383522034, "learning_rate": 0.0003338293741576248, "loss": 1.8114, "step": 4433 }, { "epoch": 0.4330078125, "grad_norm": 0.20051336288452148, "learning_rate": 0.0003337607324831434, "loss": 1.848, "step": 4434 }, { "epoch": 0.43310546875, "grad_norm": 0.20708587765693665, "learning_rate": 0.00033369208493898544, "loss": 1.8375, "step": 4435 }, { "epoch": 0.433203125, "grad_norm": 0.23173046112060547, "learning_rate": 0.0003336234315320081, "loss": 1.8573, "step": 4436 }, { "epoch": 0.43330078125, "grad_norm": 0.2623552083969116, "learning_rate": 0.00033355477226906934, "loss": 1.8618, "step": 4437 }, { "epoch": 0.4333984375, "grad_norm": 0.2273312360048294, "learning_rate": 0.0003334861071570276, "loss": 1.8438, "step": 4438 }, { "epoch": 0.43349609375, "grad_norm": 0.16699610650539398, "learning_rate": 0.0003334174362027419, "loss": 1.7673, "step": 4439 }, { "epoch": 0.43359375, "grad_norm": 0.20863619446754456, "learning_rate": 0.0003333487594130718, "loss": 1.8566, "step": 4440 }, { "epoch": 0.43369140625, "grad_norm": 0.2478940486907959, "learning_rate": 0.0003332800767948776, "loss": 1.815, "step": 4441 }, { "epoch": 0.4337890625, "grad_norm": 0.24342428147792816, "learning_rate": 0.00033321138835502005, "loss": 1.844, "step": 4442 }, { "epoch": 0.43388671875, "grad_norm": 0.2302752137184143, "learning_rate": 0.0003331426941003605, "loss": 1.8641, "step": 4443 }, { "epoch": 0.433984375, "grad_norm": 0.16929592192173004, "learning_rate": 0.0003330739940377608, "loss": 1.861, "step": 4444 }, { "epoch": 0.43408203125, "grad_norm": 0.22187499701976776, "learning_rate": 0.0003330052881740838, "loss": 1.8441, "step": 4445 }, { "epoch": 0.4341796875, "grad_norm": 0.19175705313682556, "learning_rate": 0.00033293657651619225, "loss": 1.8116, "step": 4446 }, { "epoch": 0.43427734375, "grad_norm": 0.1969825178384781, "learning_rate": 0.00033286785907094994, "loss": 1.8518, "step": 4447 }, { "epoch": 0.434375, "grad_norm": 0.20496003329753876, "learning_rate": 0.0003327991358452213, "loss": 1.8396, "step": 4448 }, { "epoch": 0.43447265625, "grad_norm": 0.2111838310956955, "learning_rate": 0.000332730406845871, "loss": 1.8398, "step": 4449 }, { "epoch": 0.4345703125, "grad_norm": 0.18950366973876953, "learning_rate": 0.00033266167207976445, "loss": 1.8106, "step": 4450 }, { "epoch": 0.43466796875, "grad_norm": 0.20442768931388855, "learning_rate": 0.00033259293155376776, "loss": 1.8547, "step": 4451 }, { "epoch": 0.434765625, "grad_norm": 0.2146398425102234, "learning_rate": 0.0003325241852747474, "loss": 1.8698, "step": 4452 }, { "epoch": 0.43486328125, "grad_norm": 0.2124415785074234, "learning_rate": 0.0003324554332495705, "loss": 1.7994, "step": 4453 }, { "epoch": 0.4349609375, "grad_norm": 0.21013392508029938, "learning_rate": 0.0003323866754851049, "loss": 1.8857, "step": 4454 }, { "epoch": 0.43505859375, "grad_norm": 0.2193000763654709, "learning_rate": 0.0003323179119882189, "loss": 1.8585, "step": 4455 }, { "epoch": 0.43515625, "grad_norm": 0.19693540036678314, "learning_rate": 0.0003322491427657811, "loss": 1.823, "step": 4456 }, { "epoch": 0.43525390625, "grad_norm": 0.19659705460071564, "learning_rate": 0.00033218036782466115, "loss": 1.8586, "step": 4457 }, { "epoch": 0.4353515625, "grad_norm": 0.22691860795021057, "learning_rate": 0.00033211158717172905, "loss": 1.7934, "step": 4458 }, { "epoch": 0.43544921875, "grad_norm": 0.2183864265680313, "learning_rate": 0.00033204280081385544, "loss": 1.8764, "step": 4459 }, { "epoch": 0.435546875, "grad_norm": 0.2263919562101364, "learning_rate": 0.00033197400875791135, "loss": 1.8278, "step": 4460 }, { "epoch": 0.43564453125, "grad_norm": 0.2252654880285263, "learning_rate": 0.0003319052110107685, "loss": 1.8433, "step": 4461 }, { "epoch": 0.4357421875, "grad_norm": 0.2387869954109192, "learning_rate": 0.00033183640757929926, "loss": 1.8106, "step": 4462 }, { "epoch": 0.43583984375, "grad_norm": 0.2297157645225525, "learning_rate": 0.00033176759847037635, "loss": 1.8814, "step": 4463 }, { "epoch": 0.4359375, "grad_norm": 0.2321292757987976, "learning_rate": 0.0003316987836908733, "loss": 1.8298, "step": 4464 }, { "epoch": 0.43603515625, "grad_norm": 0.2759620249271393, "learning_rate": 0.00033162996324766404, "loss": 1.8671, "step": 4465 }, { "epoch": 0.4361328125, "grad_norm": 0.20262287557125092, "learning_rate": 0.0003315611371476232, "loss": 1.802, "step": 4466 }, { "epoch": 0.43623046875, "grad_norm": 0.23232300579547882, "learning_rate": 0.00033149230539762577, "loss": 1.8417, "step": 4467 }, { "epoch": 0.436328125, "grad_norm": 0.21800456941127777, "learning_rate": 0.0003314234680045475, "loss": 1.8644, "step": 4468 }, { "epoch": 0.43642578125, "grad_norm": 0.19706159830093384, "learning_rate": 0.00033135462497526475, "loss": 1.8082, "step": 4469 }, { "epoch": 0.4365234375, "grad_norm": 0.2481984794139862, "learning_rate": 0.0003312857763166542, "loss": 1.8404, "step": 4470 }, { "epoch": 0.43662109375, "grad_norm": 0.22650815546512604, "learning_rate": 0.00033121692203559323, "loss": 1.8619, "step": 4471 }, { "epoch": 0.43671875, "grad_norm": 0.2094004899263382, "learning_rate": 0.0003311480621389597, "loss": 1.8826, "step": 4472 }, { "epoch": 0.43681640625, "grad_norm": 0.24898546934127808, "learning_rate": 0.0003310791966336323, "loss": 1.8522, "step": 4473 }, { "epoch": 0.4369140625, "grad_norm": 0.20734456181526184, "learning_rate": 0.0003310103255264899, "loss": 1.7986, "step": 4474 }, { "epoch": 0.43701171875, "grad_norm": 0.23696695268154144, "learning_rate": 0.0003309414488244122, "loss": 1.8541, "step": 4475 }, { "epoch": 0.437109375, "grad_norm": 0.24068109691143036, "learning_rate": 0.00033087256653427946, "loss": 1.8579, "step": 4476 }, { "epoch": 0.43720703125, "grad_norm": 0.19585324823856354, "learning_rate": 0.00033080367866297213, "loss": 1.8765, "step": 4477 }, { "epoch": 0.4373046875, "grad_norm": 0.2366010844707489, "learning_rate": 0.00033073478521737176, "loss": 1.8456, "step": 4478 }, { "epoch": 0.43740234375, "grad_norm": 0.18371711671352386, "learning_rate": 0.00033066588620436017, "loss": 1.817, "step": 4479 }, { "epoch": 0.4375, "grad_norm": 0.24547503888607025, "learning_rate": 0.0003305969816308197, "loss": 1.8545, "step": 4480 }, { "epoch": 0.43759765625, "grad_norm": 0.22530093789100647, "learning_rate": 0.00033052807150363316, "loss": 1.8209, "step": 4481 }, { "epoch": 0.4376953125, "grad_norm": 0.22203901410102844, "learning_rate": 0.0003304591558296844, "loss": 1.83, "step": 4482 }, { "epoch": 0.43779296875, "grad_norm": 0.2331925481557846, "learning_rate": 0.0003303902346158572, "loss": 1.8952, "step": 4483 }, { "epoch": 0.437890625, "grad_norm": 0.26899224519729614, "learning_rate": 0.0003303213078690363, "loss": 1.8432, "step": 4484 }, { "epoch": 0.43798828125, "grad_norm": 0.8147704005241394, "learning_rate": 0.00033025237559610685, "loss": 1.9056, "step": 4485 }, { "epoch": 0.4380859375, "grad_norm": 0.26135900616645813, "learning_rate": 0.0003301834378039545, "loss": 1.8493, "step": 4486 }, { "epoch": 0.43818359375, "grad_norm": 0.2273353487253189, "learning_rate": 0.00033011449449946574, "loss": 1.8561, "step": 4487 }, { "epoch": 0.43828125, "grad_norm": 0.2513292133808136, "learning_rate": 0.00033004554568952704, "loss": 1.851, "step": 4488 }, { "epoch": 0.43837890625, "grad_norm": 0.20417039096355438, "learning_rate": 0.0003299765913810261, "loss": 1.8483, "step": 4489 }, { "epoch": 0.4384765625, "grad_norm": 0.24769926071166992, "learning_rate": 0.0003299076315808508, "loss": 1.8546, "step": 4490 }, { "epoch": 0.43857421875, "grad_norm": 0.21822988986968994, "learning_rate": 0.00032983866629588944, "loss": 1.8686, "step": 4491 }, { "epoch": 0.438671875, "grad_norm": 0.25055810809135437, "learning_rate": 0.0003297696955330313, "loss": 1.8126, "step": 4492 }, { "epoch": 0.43876953125, "grad_norm": 0.1908411830663681, "learning_rate": 0.00032970071929916563, "loss": 1.8546, "step": 4493 }, { "epoch": 0.4388671875, "grad_norm": 0.19980601966381073, "learning_rate": 0.00032963173760118276, "loss": 1.7861, "step": 4494 }, { "epoch": 0.43896484375, "grad_norm": 0.18650661408901215, "learning_rate": 0.0003295627504459732, "loss": 1.829, "step": 4495 }, { "epoch": 0.4390625, "grad_norm": 0.19553345441818237, "learning_rate": 0.0003294937578404284, "loss": 1.8121, "step": 4496 }, { "epoch": 0.43916015625, "grad_norm": 0.20082074403762817, "learning_rate": 0.00032942475979143987, "loss": 1.8404, "step": 4497 }, { "epoch": 0.4392578125, "grad_norm": 0.20696525275707245, "learning_rate": 0.00032935575630590005, "loss": 1.8435, "step": 4498 }, { "epoch": 0.43935546875, "grad_norm": 0.2295655757188797, "learning_rate": 0.00032928674739070177, "loss": 1.8257, "step": 4499 }, { "epoch": 0.439453125, "grad_norm": 0.22089581191539764, "learning_rate": 0.0003292177330527382, "loss": 1.8122, "step": 4500 }, { "epoch": 0.43955078125, "grad_norm": 0.21159827709197998, "learning_rate": 0.0003291487132989036, "loss": 1.8339, "step": 4501 }, { "epoch": 0.4396484375, "grad_norm": 0.2064434140920639, "learning_rate": 0.00032907968813609213, "loss": 1.7816, "step": 4502 }, { "epoch": 0.43974609375, "grad_norm": 0.2011205554008484, "learning_rate": 0.000329010657571199, "loss": 1.8245, "step": 4503 }, { "epoch": 0.43984375, "grad_norm": 0.23691421747207642, "learning_rate": 0.00032894162161111964, "loss": 1.9096, "step": 4504 }, { "epoch": 0.43994140625, "grad_norm": 0.19454586505889893, "learning_rate": 0.00032887258026275014, "loss": 1.8582, "step": 4505 }, { "epoch": 0.4400390625, "grad_norm": 0.18718378245830536, "learning_rate": 0.0003288035335329871, "loss": 1.8708, "step": 4506 }, { "epoch": 0.44013671875, "grad_norm": 0.19976142048835754, "learning_rate": 0.00032873448142872784, "loss": 1.8312, "step": 4507 }, { "epoch": 0.440234375, "grad_norm": 0.1672353744506836, "learning_rate": 0.0003286654239568699, "loss": 1.8541, "step": 4508 }, { "epoch": 0.44033203125, "grad_norm": 0.19212840497493744, "learning_rate": 0.00032859636112431145, "loss": 1.8372, "step": 4509 }, { "epoch": 0.4404296875, "grad_norm": 0.2028074860572815, "learning_rate": 0.0003285272929379514, "loss": 1.8201, "step": 4510 }, { "epoch": 0.44052734375, "grad_norm": 0.2094787210226059, "learning_rate": 0.00032845821940468903, "loss": 1.7874, "step": 4511 }, { "epoch": 0.440625, "grad_norm": 0.19495780766010284, "learning_rate": 0.00032838914053142407, "loss": 1.8786, "step": 4512 }, { "epoch": 0.44072265625, "grad_norm": 0.21458013355731964, "learning_rate": 0.00032832005632505703, "loss": 1.8543, "step": 4513 }, { "epoch": 0.4408203125, "grad_norm": 0.19527952373027802, "learning_rate": 0.00032825096679248866, "loss": 1.8582, "step": 4514 }, { "epoch": 0.44091796875, "grad_norm": 0.2177453637123108, "learning_rate": 0.00032818187194062054, "loss": 1.8508, "step": 4515 }, { "epoch": 0.441015625, "grad_norm": 0.19066031277179718, "learning_rate": 0.00032811277177635445, "loss": 1.8406, "step": 4516 }, { "epoch": 0.44111328125, "grad_norm": 0.2284795641899109, "learning_rate": 0.0003280436663065931, "loss": 1.8698, "step": 4517 }, { "epoch": 0.4412109375, "grad_norm": 0.1739828735589981, "learning_rate": 0.0003279745555382394, "loss": 1.8475, "step": 4518 }, { "epoch": 0.44130859375, "grad_norm": 0.20771338045597076, "learning_rate": 0.00032790543947819686, "loss": 1.8242, "step": 4519 }, { "epoch": 0.44140625, "grad_norm": 0.174216166138649, "learning_rate": 0.00032783631813336966, "loss": 1.8561, "step": 4520 }, { "epoch": 0.44150390625, "grad_norm": 0.21372954547405243, "learning_rate": 0.0003277671915106624, "loss": 1.8698, "step": 4521 }, { "epoch": 0.4416015625, "grad_norm": 0.23978406190872192, "learning_rate": 0.0003276980596169801, "loss": 1.8271, "step": 4522 }, { "epoch": 0.44169921875, "grad_norm": 0.24592150747776031, "learning_rate": 0.00032762892245922847, "loss": 1.819, "step": 4523 }, { "epoch": 0.441796875, "grad_norm": 0.23378491401672363, "learning_rate": 0.0003275597800443139, "loss": 1.8596, "step": 4524 }, { "epoch": 0.44189453125, "grad_norm": 0.173017218708992, "learning_rate": 0.00032749063237914286, "loss": 1.8465, "step": 4525 }, { "epoch": 0.4419921875, "grad_norm": 0.2911878228187561, "learning_rate": 0.00032742147947062265, "loss": 1.8099, "step": 4526 }, { "epoch": 0.44208984375, "grad_norm": 0.23090296983718872, "learning_rate": 0.00032735232132566106, "loss": 1.8158, "step": 4527 }, { "epoch": 0.4421875, "grad_norm": 0.20182637870311737, "learning_rate": 0.00032728315795116635, "loss": 1.8516, "step": 4528 }, { "epoch": 0.44228515625, "grad_norm": 0.22502705454826355, "learning_rate": 0.00032721398935404744, "loss": 1.8343, "step": 4529 }, { "epoch": 0.4423828125, "grad_norm": 0.2155645191669464, "learning_rate": 0.00032714481554121347, "loss": 1.8259, "step": 4530 }, { "epoch": 0.44248046875, "grad_norm": 0.20365890860557556, "learning_rate": 0.00032707563651957443, "loss": 1.8103, "step": 4531 }, { "epoch": 0.442578125, "grad_norm": 0.20811273157596588, "learning_rate": 0.00032700645229604065, "loss": 1.8743, "step": 4532 }, { "epoch": 0.44267578125, "grad_norm": 0.2342660278081894, "learning_rate": 0.00032693726287752294, "loss": 1.8403, "step": 4533 }, { "epoch": 0.4427734375, "grad_norm": 0.15678024291992188, "learning_rate": 0.000326868068270933, "loss": 1.8119, "step": 4534 }, { "epoch": 0.44287109375, "grad_norm": 0.257323682308197, "learning_rate": 0.0003267988684831824, "loss": 1.8259, "step": 4535 }, { "epoch": 0.44296875, "grad_norm": 0.21096593141555786, "learning_rate": 0.00032672966352118374, "loss": 1.8489, "step": 4536 }, { "epoch": 0.44306640625, "grad_norm": 0.2324661761522293, "learning_rate": 0.00032666045339184994, "loss": 1.8353, "step": 4537 }, { "epoch": 0.4431640625, "grad_norm": 0.20259353518486023, "learning_rate": 0.0003265912381020946, "loss": 1.8492, "step": 4538 }, { "epoch": 0.44326171875, "grad_norm": 0.2664618492126465, "learning_rate": 0.0003265220176588315, "loss": 1.8471, "step": 4539 }, { "epoch": 0.443359375, "grad_norm": 0.22948837280273438, "learning_rate": 0.00032645279206897543, "loss": 1.8128, "step": 4540 }, { "epoch": 0.44345703125, "grad_norm": 0.23944346606731415, "learning_rate": 0.00032638356133944126, "loss": 1.817, "step": 4541 }, { "epoch": 0.4435546875, "grad_norm": 0.2528168261051178, "learning_rate": 0.00032631432547714444, "loss": 1.818, "step": 4542 }, { "epoch": 0.44365234375, "grad_norm": 0.22383758425712585, "learning_rate": 0.00032624508448900115, "loss": 1.8603, "step": 4543 }, { "epoch": 0.44375, "grad_norm": 0.2284606248140335, "learning_rate": 0.00032617583838192795, "loss": 1.8358, "step": 4544 }, { "epoch": 0.44384765625, "grad_norm": 0.21029891073703766, "learning_rate": 0.0003261065871628418, "loss": 1.8036, "step": 4545 }, { "epoch": 0.4439453125, "grad_norm": 0.22057075798511505, "learning_rate": 0.0003260373308386604, "loss": 1.8392, "step": 4546 }, { "epoch": 0.44404296875, "grad_norm": 0.20081530511379242, "learning_rate": 0.00032596806941630175, "loss": 1.8567, "step": 4547 }, { "epoch": 0.444140625, "grad_norm": 0.20286759734153748, "learning_rate": 0.0003258988029026845, "loss": 1.7656, "step": 4548 }, { "epoch": 0.44423828125, "grad_norm": 0.22599783539772034, "learning_rate": 0.0003258295313047279, "loss": 1.8068, "step": 4549 }, { "epoch": 0.4443359375, "grad_norm": 0.1798899620771408, "learning_rate": 0.00032576025462935127, "loss": 1.8433, "step": 4550 }, { "epoch": 0.44443359375, "grad_norm": 0.2571936845779419, "learning_rate": 0.00032569097288347496, "loss": 1.8405, "step": 4551 }, { "epoch": 0.44453125, "grad_norm": 0.194194957613945, "learning_rate": 0.00032562168607401954, "loss": 1.8379, "step": 4552 }, { "epoch": 0.44462890625, "grad_norm": 0.21969552338123322, "learning_rate": 0.0003255523942079062, "loss": 1.863, "step": 4553 }, { "epoch": 0.4447265625, "grad_norm": 0.2289535403251648, "learning_rate": 0.0003254830972920564, "loss": 1.8698, "step": 4554 }, { "epoch": 0.44482421875, "grad_norm": 0.20062187314033508, "learning_rate": 0.0003254137953333926, "loss": 1.8851, "step": 4555 }, { "epoch": 0.444921875, "grad_norm": 0.26987403631210327, "learning_rate": 0.0003253444883388371, "loss": 1.8601, "step": 4556 }, { "epoch": 0.44501953125, "grad_norm": 0.20700135827064514, "learning_rate": 0.0003252751763153133, "loss": 1.8286, "step": 4557 }, { "epoch": 0.4451171875, "grad_norm": 0.275177001953125, "learning_rate": 0.0003252058592697448, "loss": 1.8307, "step": 4558 }, { "epoch": 0.44521484375, "grad_norm": 0.25656774640083313, "learning_rate": 0.0003251365372090558, "loss": 1.8535, "step": 4559 }, { "epoch": 0.4453125, "grad_norm": 0.20680679380893707, "learning_rate": 0.00032506721014017074, "loss": 1.8538, "step": 4560 }, { "epoch": 0.44541015625, "grad_norm": 0.21911300718784332, "learning_rate": 0.0003249978780700151, "loss": 1.8574, "step": 4561 }, { "epoch": 0.4455078125, "grad_norm": 0.26287397742271423, "learning_rate": 0.0003249285410055143, "loss": 1.8358, "step": 4562 }, { "epoch": 0.44560546875, "grad_norm": 0.2139604240655899, "learning_rate": 0.0003248591989535946, "loss": 1.8372, "step": 4563 }, { "epoch": 0.445703125, "grad_norm": 0.22483497858047485, "learning_rate": 0.0003247898519211826, "loss": 1.8617, "step": 4564 }, { "epoch": 0.44580078125, "grad_norm": 0.23549950122833252, "learning_rate": 0.0003247204999152055, "loss": 1.7976, "step": 4565 }, { "epoch": 0.4458984375, "grad_norm": 0.2233511060476303, "learning_rate": 0.000324651142942591, "loss": 1.8178, "step": 4566 }, { "epoch": 0.44599609375, "grad_norm": 0.23170800507068634, "learning_rate": 0.00032458178101026715, "loss": 1.826, "step": 4567 }, { "epoch": 0.44609375, "grad_norm": 0.2332051545381546, "learning_rate": 0.0003245124141251626, "loss": 1.7838, "step": 4568 }, { "epoch": 0.44619140625, "grad_norm": 0.2292395979166031, "learning_rate": 0.0003244430422942066, "loss": 1.8636, "step": 4569 }, { "epoch": 0.4462890625, "grad_norm": 0.19580964744091034, "learning_rate": 0.00032437366552432866, "loss": 1.8412, "step": 4570 }, { "epoch": 0.44638671875, "grad_norm": 0.22505177557468414, "learning_rate": 0.0003243042838224589, "loss": 1.8197, "step": 4571 }, { "epoch": 0.446484375, "grad_norm": 0.18000158667564392, "learning_rate": 0.000324234897195528, "loss": 1.8399, "step": 4572 }, { "epoch": 0.44658203125, "grad_norm": 0.2370178997516632, "learning_rate": 0.00032416550565046715, "loss": 1.842, "step": 4573 }, { "epoch": 0.4466796875, "grad_norm": 0.387603223323822, "learning_rate": 0.0003240961091942078, "loss": 1.8703, "step": 4574 }, { "epoch": 0.44677734375, "grad_norm": 0.2057318389415741, "learning_rate": 0.000324026707833682, "loss": 1.7813, "step": 4575 }, { "epoch": 0.446875, "grad_norm": 0.26612409949302673, "learning_rate": 0.0003239573015758225, "loss": 1.8523, "step": 4576 }, { "epoch": 0.44697265625, "grad_norm": 0.21448631584644318, "learning_rate": 0.00032388789042756233, "loss": 1.8147, "step": 4577 }, { "epoch": 0.4470703125, "grad_norm": 0.22870376706123352, "learning_rate": 0.00032381847439583503, "loss": 1.8709, "step": 4578 }, { "epoch": 0.44716796875, "grad_norm": 0.19087672233581543, "learning_rate": 0.00032374905348757447, "loss": 1.8498, "step": 4579 }, { "epoch": 0.447265625, "grad_norm": 0.20512868463993073, "learning_rate": 0.00032367962770971553, "loss": 1.8191, "step": 4580 }, { "epoch": 0.44736328125, "grad_norm": 0.22222061455249786, "learning_rate": 0.000323610197069193, "loss": 1.8537, "step": 4581 }, { "epoch": 0.4474609375, "grad_norm": 0.249727264046669, "learning_rate": 0.0003235407615729424, "loss": 1.8624, "step": 4582 }, { "epoch": 0.44755859375, "grad_norm": 0.19810056686401367, "learning_rate": 0.00032347132122789973, "loss": 1.8805, "step": 4583 }, { "epoch": 0.44765625, "grad_norm": 0.28888627886772156, "learning_rate": 0.0003234018760410015, "loss": 1.8309, "step": 4584 }, { "epoch": 0.44775390625, "grad_norm": 0.2219923883676529, "learning_rate": 0.0003233324260191846, "loss": 1.8829, "step": 4585 }, { "epoch": 0.4478515625, "grad_norm": 0.2210731953382492, "learning_rate": 0.00032326297116938666, "loss": 1.85, "step": 4586 }, { "epoch": 0.44794921875, "grad_norm": 0.2635856866836548, "learning_rate": 0.00032319351149854544, "loss": 1.8324, "step": 4587 }, { "epoch": 0.448046875, "grad_norm": 0.19042371213436127, "learning_rate": 0.0003231240470135993, "loss": 1.8668, "step": 4588 }, { "epoch": 0.44814453125, "grad_norm": 0.26712489128112793, "learning_rate": 0.0003230545777214872, "loss": 1.8891, "step": 4589 }, { "epoch": 0.4482421875, "grad_norm": 0.20257070660591125, "learning_rate": 0.0003229851036291486, "loss": 1.822, "step": 4590 }, { "epoch": 0.44833984375, "grad_norm": 0.24346715211868286, "learning_rate": 0.00032291562474352314, "loss": 1.845, "step": 4591 }, { "epoch": 0.4484375, "grad_norm": 0.19787989556789398, "learning_rate": 0.00032284614107155136, "loss": 1.8411, "step": 4592 }, { "epoch": 0.44853515625, "grad_norm": 0.20641331374645233, "learning_rate": 0.000322776652620174, "loss": 1.8279, "step": 4593 }, { "epoch": 0.4486328125, "grad_norm": 0.1935081034898758, "learning_rate": 0.0003227071593963322, "loss": 1.8451, "step": 4594 }, { "epoch": 0.44873046875, "grad_norm": 0.2476644665002823, "learning_rate": 0.00032263766140696784, "loss": 1.8205, "step": 4595 }, { "epoch": 0.448828125, "grad_norm": 0.22297869622707367, "learning_rate": 0.0003225681586590232, "loss": 1.8599, "step": 4596 }, { "epoch": 0.44892578125, "grad_norm": 0.2534295618534088, "learning_rate": 0.0003224986511594408, "loss": 1.8362, "step": 4597 }, { "epoch": 0.4490234375, "grad_norm": 0.2747684121131897, "learning_rate": 0.00032242913891516415, "loss": 1.8534, "step": 4598 }, { "epoch": 0.44912109375, "grad_norm": 0.2065267711877823, "learning_rate": 0.00032235962193313654, "loss": 1.8454, "step": 4599 }, { "epoch": 0.44921875, "grad_norm": 0.23845352232456207, "learning_rate": 0.0003222901002203023, "loss": 1.8486, "step": 4600 }, { "epoch": 0.44931640625, "grad_norm": 0.2285664677619934, "learning_rate": 0.0003222205737836061, "loss": 1.8862, "step": 4601 }, { "epoch": 0.4494140625, "grad_norm": 0.199177548289299, "learning_rate": 0.0003221510426299928, "loss": 1.8045, "step": 4602 }, { "epoch": 0.44951171875, "grad_norm": 0.21812167763710022, "learning_rate": 0.00032208150676640813, "loss": 1.8548, "step": 4603 }, { "epoch": 0.449609375, "grad_norm": 0.19792795181274414, "learning_rate": 0.000322011966199798, "loss": 1.8192, "step": 4604 }, { "epoch": 0.44970703125, "grad_norm": 0.2088000774383545, "learning_rate": 0.000321942420937109, "loss": 1.8137, "step": 4605 }, { "epoch": 0.4498046875, "grad_norm": 0.1992752104997635, "learning_rate": 0.00032187287098528786, "loss": 1.8331, "step": 4606 }, { "epoch": 0.44990234375, "grad_norm": 0.24328278005123138, "learning_rate": 0.00032180331635128236, "loss": 1.91, "step": 4607 }, { "epoch": 0.45, "grad_norm": 0.21111084520816803, "learning_rate": 0.0003217337570420401, "loss": 1.8614, "step": 4608 }, { "epoch": 0.45009765625, "grad_norm": 0.22330433130264282, "learning_rate": 0.0003216641930645095, "loss": 1.8478, "step": 4609 }, { "epoch": 0.4501953125, "grad_norm": 0.22892868518829346, "learning_rate": 0.0003215946244256395, "loss": 1.856, "step": 4610 }, { "epoch": 0.45029296875, "grad_norm": 0.18284457921981812, "learning_rate": 0.0003215250511323792, "loss": 1.8035, "step": 4611 }, { "epoch": 0.450390625, "grad_norm": 0.21104735136032104, "learning_rate": 0.0003214554731916786, "loss": 1.814, "step": 4612 }, { "epoch": 0.45048828125, "grad_norm": 0.1928839236497879, "learning_rate": 0.00032138589061048774, "loss": 1.8403, "step": 4613 }, { "epoch": 0.4505859375, "grad_norm": 0.2700890004634857, "learning_rate": 0.00032131630339575734, "loss": 1.8487, "step": 4614 }, { "epoch": 0.45068359375, "grad_norm": 0.25254520773887634, "learning_rate": 0.0003212467115544386, "loss": 1.8956, "step": 4615 }, { "epoch": 0.45078125, "grad_norm": 0.19750279188156128, "learning_rate": 0.00032117711509348294, "loss": 1.8706, "step": 4616 }, { "epoch": 0.45087890625, "grad_norm": 0.2669914662837982, "learning_rate": 0.0003211075140198427, "loss": 1.8755, "step": 4617 }, { "epoch": 0.4509765625, "grad_norm": 0.20006148517131805, "learning_rate": 0.0003210379083404703, "loss": 1.8485, "step": 4618 }, { "epoch": 0.45107421875, "grad_norm": 0.22197791934013367, "learning_rate": 0.0003209682980623186, "loss": 1.8702, "step": 4619 }, { "epoch": 0.451171875, "grad_norm": 0.23509272933006287, "learning_rate": 0.00032089868319234126, "loss": 1.8208, "step": 4620 }, { "epoch": 0.45126953125, "grad_norm": 0.20819897949695587, "learning_rate": 0.0003208290637374921, "loss": 1.8235, "step": 4621 }, { "epoch": 0.4513671875, "grad_norm": 0.2182103544473648, "learning_rate": 0.0003207594397047255, "loss": 1.8122, "step": 4622 }, { "epoch": 0.45146484375, "grad_norm": 0.2312661111354828, "learning_rate": 0.00032068981110099613, "loss": 1.8667, "step": 4623 }, { "epoch": 0.4515625, "grad_norm": 0.22262756526470184, "learning_rate": 0.0003206201779332596, "loss": 1.8487, "step": 4624 }, { "epoch": 0.45166015625, "grad_norm": 0.19101223349571228, "learning_rate": 0.0003205505402084713, "loss": 1.858, "step": 4625 }, { "epoch": 0.4517578125, "grad_norm": 0.23975621163845062, "learning_rate": 0.0003204808979335877, "loss": 1.8766, "step": 4626 }, { "epoch": 0.45185546875, "grad_norm": 0.1717529445886612, "learning_rate": 0.0003204112511155652, "loss": 1.8459, "step": 4627 }, { "epoch": 0.451953125, "grad_norm": 0.2034163922071457, "learning_rate": 0.00032034159976136105, "loss": 1.8183, "step": 4628 }, { "epoch": 0.45205078125, "grad_norm": 0.2004881650209427, "learning_rate": 0.00032027194387793285, "loss": 1.8368, "step": 4629 }, { "epoch": 0.4521484375, "grad_norm": 0.2012924998998642, "learning_rate": 0.0003202022834722384, "loss": 1.8415, "step": 4630 }, { "epoch": 0.45224609375, "grad_norm": 0.21484839916229248, "learning_rate": 0.00032013261855123637, "loss": 1.8123, "step": 4631 }, { "epoch": 0.45234375, "grad_norm": 0.18154776096343994, "learning_rate": 0.0003200629491218856, "loss": 1.864, "step": 4632 }, { "epoch": 0.45244140625, "grad_norm": 0.20559240877628326, "learning_rate": 0.0003199932751911454, "loss": 1.8732, "step": 4633 }, { "epoch": 0.4525390625, "grad_norm": 0.22608701884746552, "learning_rate": 0.0003199235967659756, "loss": 1.8225, "step": 4634 }, { "epoch": 0.45263671875, "grad_norm": 0.22271795570850372, "learning_rate": 0.0003198539138533365, "loss": 1.833, "step": 4635 }, { "epoch": 0.452734375, "grad_norm": 0.21005083620548248, "learning_rate": 0.00031978422646018886, "loss": 1.8308, "step": 4636 }, { "epoch": 0.45283203125, "grad_norm": 0.22713293135166168, "learning_rate": 0.0003197145345934935, "loss": 1.8179, "step": 4637 }, { "epoch": 0.4529296875, "grad_norm": 0.2946507930755615, "learning_rate": 0.0003196448382602125, "loss": 1.8665, "step": 4638 }, { "epoch": 0.45302734375, "grad_norm": 0.24628376960754395, "learning_rate": 0.0003195751374673076, "loss": 1.8204, "step": 4639 }, { "epoch": 0.453125, "grad_norm": 0.2525516152381897, "learning_rate": 0.0003195054322217414, "loss": 1.8574, "step": 4640 }, { "epoch": 0.45322265625, "grad_norm": 0.2724788188934326, "learning_rate": 0.0003194357225304768, "loss": 1.8386, "step": 4641 }, { "epoch": 0.4533203125, "grad_norm": 0.24351954460144043, "learning_rate": 0.00031936600840047706, "loss": 1.8179, "step": 4642 }, { "epoch": 0.45341796875, "grad_norm": 0.23132874071598053, "learning_rate": 0.0003192962898387063, "loss": 1.8607, "step": 4643 }, { "epoch": 0.453515625, "grad_norm": 0.24049578607082367, "learning_rate": 0.00031922656685212853, "loss": 1.8435, "step": 4644 }, { "epoch": 0.45361328125, "grad_norm": 0.19537390768527985, "learning_rate": 0.0003191568394477086, "loss": 1.8654, "step": 4645 }, { "epoch": 0.4537109375, "grad_norm": 0.22461603581905365, "learning_rate": 0.00031908710763241157, "loss": 1.8072, "step": 4646 }, { "epoch": 0.45380859375, "grad_norm": 0.1938980519771576, "learning_rate": 0.00031901737141320303, "loss": 1.8501, "step": 4647 }, { "epoch": 0.45390625, "grad_norm": 0.1806454211473465, "learning_rate": 0.000318947630797049, "loss": 1.8417, "step": 4648 }, { "epoch": 0.45400390625, "grad_norm": 0.18523812294006348, "learning_rate": 0.0003188778857909161, "loss": 1.8341, "step": 4649 }, { "epoch": 0.4541015625, "grad_norm": 0.18443694710731506, "learning_rate": 0.0003188081364017711, "loss": 1.8724, "step": 4650 }, { "epoch": 0.45419921875, "grad_norm": 0.201009139418602, "learning_rate": 0.0003187383826365813, "loss": 1.8353, "step": 4651 }, { "epoch": 0.454296875, "grad_norm": 0.18479257822036743, "learning_rate": 0.0003186686245023147, "loss": 1.8154, "step": 4652 }, { "epoch": 0.45439453125, "grad_norm": 0.22350969910621643, "learning_rate": 0.00031859886200593935, "loss": 1.8358, "step": 4653 }, { "epoch": 0.4544921875, "grad_norm": 0.19744180142879486, "learning_rate": 0.00031852909515442385, "loss": 1.8177, "step": 4654 }, { "epoch": 0.45458984375, "grad_norm": 0.22612422704696655, "learning_rate": 0.0003184593239547375, "loss": 1.8113, "step": 4655 }, { "epoch": 0.4546875, "grad_norm": 0.21232947707176208, "learning_rate": 0.0003183895484138496, "loss": 1.8488, "step": 4656 }, { "epoch": 0.45478515625, "grad_norm": 0.1647469848394394, "learning_rate": 0.0003183197685387303, "loss": 1.8376, "step": 4657 }, { "epoch": 0.4548828125, "grad_norm": 0.20144745707511902, "learning_rate": 0.00031824998433634973, "loss": 1.824, "step": 4658 }, { "epoch": 0.45498046875, "grad_norm": 0.1921469271183014, "learning_rate": 0.00031818019581367907, "loss": 1.8716, "step": 4659 }, { "epoch": 0.455078125, "grad_norm": 0.21708911657333374, "learning_rate": 0.0003181104029776893, "loss": 1.8351, "step": 4660 }, { "epoch": 0.45517578125, "grad_norm": 0.18313667178153992, "learning_rate": 0.00031804060583535217, "loss": 1.7719, "step": 4661 }, { "epoch": 0.4552734375, "grad_norm": 0.21659673750400543, "learning_rate": 0.00031797080439363996, "loss": 1.8509, "step": 4662 }, { "epoch": 0.45537109375, "grad_norm": 0.21164323389530182, "learning_rate": 0.00031790099865952495, "loss": 1.8348, "step": 4663 }, { "epoch": 0.45546875, "grad_norm": 0.19318445026874542, "learning_rate": 0.0003178311886399803, "loss": 1.8157, "step": 4664 }, { "epoch": 0.45556640625, "grad_norm": 0.2079407125711441, "learning_rate": 0.00031776137434197924, "loss": 1.8298, "step": 4665 }, { "epoch": 0.4556640625, "grad_norm": 0.23482640087604523, "learning_rate": 0.0003176915557724959, "loss": 1.8816, "step": 4666 }, { "epoch": 0.45576171875, "grad_norm": 0.2438378483057022, "learning_rate": 0.00031762173293850424, "loss": 1.8628, "step": 4667 }, { "epoch": 0.455859375, "grad_norm": 0.2278955578804016, "learning_rate": 0.0003175519058469791, "loss": 1.8159, "step": 4668 }, { "epoch": 0.45595703125, "grad_norm": 0.2014375478029251, "learning_rate": 0.0003174820745048955, "loss": 1.887, "step": 4669 }, { "epoch": 0.4560546875, "grad_norm": 0.24122567474842072, "learning_rate": 0.00031741223891922906, "loss": 1.8403, "step": 4670 }, { "epoch": 0.45615234375, "grad_norm": 0.1940990686416626, "learning_rate": 0.00031734239909695565, "loss": 1.8293, "step": 4671 }, { "epoch": 0.45625, "grad_norm": 0.21825401484966278, "learning_rate": 0.00031727255504505165, "loss": 1.8123, "step": 4672 }, { "epoch": 0.45634765625, "grad_norm": 0.22472091019153595, "learning_rate": 0.00031720270677049395, "loss": 1.8572, "step": 4673 }, { "epoch": 0.4564453125, "grad_norm": 0.2011629343032837, "learning_rate": 0.00031713285428025973, "loss": 1.8251, "step": 4674 }, { "epoch": 0.45654296875, "grad_norm": 0.19883796572685242, "learning_rate": 0.00031706299758132663, "loss": 1.8322, "step": 4675 }, { "epoch": 0.456640625, "grad_norm": 0.20913474261760712, "learning_rate": 0.00031699313668067275, "loss": 1.8519, "step": 4676 }, { "epoch": 0.45673828125, "grad_norm": 0.19562242925167084, "learning_rate": 0.0003169232715852765, "loss": 1.742, "step": 4677 }, { "epoch": 0.4568359375, "grad_norm": 0.19825175404548645, "learning_rate": 0.0003168534023021169, "loss": 1.8201, "step": 4678 }, { "epoch": 0.45693359375, "grad_norm": 0.19559837877750397, "learning_rate": 0.000316783528838173, "loss": 1.8539, "step": 4679 }, { "epoch": 0.45703125, "grad_norm": 0.2345285564661026, "learning_rate": 0.00031671365120042495, "loss": 1.7824, "step": 4680 }, { "epoch": 0.45712890625, "grad_norm": 0.26567429304122925, "learning_rate": 0.00031664376939585255, "loss": 1.8448, "step": 4681 }, { "epoch": 0.4572265625, "grad_norm": 0.24556522071361542, "learning_rate": 0.00031657388343143657, "loss": 1.8422, "step": 4682 }, { "epoch": 0.45732421875, "grad_norm": 0.19742777943611145, "learning_rate": 0.000316503993314158, "loss": 1.8493, "step": 4683 }, { "epoch": 0.457421875, "grad_norm": 0.2256293147802353, "learning_rate": 0.0003164340990509982, "loss": 1.8233, "step": 4684 }, { "epoch": 0.45751953125, "grad_norm": 0.2693006098270416, "learning_rate": 0.0003163642006489389, "loss": 1.9092, "step": 4685 }, { "epoch": 0.4576171875, "grad_norm": 0.19180917739868164, "learning_rate": 0.00031629429811496235, "loss": 1.8163, "step": 4686 }, { "epoch": 0.45771484375, "grad_norm": 0.23703311383724213, "learning_rate": 0.0003162243914560514, "loss": 1.8646, "step": 4687 }, { "epoch": 0.4578125, "grad_norm": 0.22602078318595886, "learning_rate": 0.0003161544806791889, "loss": 1.8588, "step": 4688 }, { "epoch": 0.45791015625, "grad_norm": 0.22127455472946167, "learning_rate": 0.0003160845657913584, "loss": 1.8436, "step": 4689 }, { "epoch": 0.4580078125, "grad_norm": 0.25368431210517883, "learning_rate": 0.0003160146467995437, "loss": 1.803, "step": 4690 }, { "epoch": 0.45810546875, "grad_norm": 0.21059094369411469, "learning_rate": 0.00031594472371072917, "loss": 1.8458, "step": 4691 }, { "epoch": 0.458203125, "grad_norm": 0.19106703996658325, "learning_rate": 0.0003158747965318995, "loss": 1.8406, "step": 4692 }, { "epoch": 0.45830078125, "grad_norm": 0.20046930015087128, "learning_rate": 0.0003158048652700397, "loss": 1.8376, "step": 4693 }, { "epoch": 0.4583984375, "grad_norm": 0.17693589627742767, "learning_rate": 0.0003157349299321355, "loss": 1.8098, "step": 4694 }, { "epoch": 0.45849609375, "grad_norm": 0.19985699653625488, "learning_rate": 0.0003156649905251725, "loss": 1.8566, "step": 4695 }, { "epoch": 0.45859375, "grad_norm": 0.21398624777793884, "learning_rate": 0.0003155950470561373, "loss": 1.86, "step": 4696 }, { "epoch": 0.45869140625, "grad_norm": 0.19084253907203674, "learning_rate": 0.00031552509953201657, "loss": 1.847, "step": 4697 }, { "epoch": 0.4587890625, "grad_norm": 0.1904585063457489, "learning_rate": 0.0003154551479597974, "loss": 1.8069, "step": 4698 }, { "epoch": 0.45888671875, "grad_norm": 0.20789475739002228, "learning_rate": 0.0003153851923464673, "loss": 1.8255, "step": 4699 }, { "epoch": 0.458984375, "grad_norm": 0.19003227353096008, "learning_rate": 0.0003153152326990143, "loss": 1.8396, "step": 4700 }, { "epoch": 0.45908203125, "grad_norm": 0.17602834105491638, "learning_rate": 0.00031524526902442667, "loss": 1.8203, "step": 4701 }, { "epoch": 0.4591796875, "grad_norm": 0.2242649793624878, "learning_rate": 0.00031517530132969324, "loss": 1.8263, "step": 4702 }, { "epoch": 0.45927734375, "grad_norm": 0.21256732940673828, "learning_rate": 0.00031510532962180315, "loss": 1.8385, "step": 4703 }, { "epoch": 0.459375, "grad_norm": 0.23991239070892334, "learning_rate": 0.0003150353539077459, "loss": 1.7929, "step": 4704 }, { "epoch": 0.45947265625, "grad_norm": 0.21919970214366913, "learning_rate": 0.0003149653741945115, "loss": 1.8649, "step": 4705 }, { "epoch": 0.4595703125, "grad_norm": 0.20121543109416962, "learning_rate": 0.0003148953904890903, "loss": 1.8084, "step": 4706 }, { "epoch": 0.45966796875, "grad_norm": 0.2023702710866928, "learning_rate": 0.00031482540279847293, "loss": 1.8375, "step": 4707 }, { "epoch": 0.459765625, "grad_norm": 0.20311948657035828, "learning_rate": 0.0003147554111296508, "loss": 1.8536, "step": 4708 }, { "epoch": 0.45986328125, "grad_norm": 0.19386813044548035, "learning_rate": 0.00031468541548961514, "loss": 1.8395, "step": 4709 }, { "epoch": 0.4599609375, "grad_norm": 0.1801415979862213, "learning_rate": 0.00031461541588535813, "loss": 1.8171, "step": 4710 }, { "epoch": 0.46005859375, "grad_norm": 0.22196736931800842, "learning_rate": 0.000314545412323872, "loss": 1.8151, "step": 4711 }, { "epoch": 0.46015625, "grad_norm": 0.21495920419692993, "learning_rate": 0.00031447540481214966, "loss": 1.7585, "step": 4712 }, { "epoch": 0.46025390625, "grad_norm": 0.21375064551830292, "learning_rate": 0.0003144053933571839, "loss": 1.8212, "step": 4713 }, { "epoch": 0.4603515625, "grad_norm": 0.23266538977622986, "learning_rate": 0.0003143353779659685, "loss": 1.8824, "step": 4714 }, { "epoch": 0.46044921875, "grad_norm": 0.20703117549419403, "learning_rate": 0.00031426535864549746, "loss": 1.8231, "step": 4715 }, { "epoch": 0.460546875, "grad_norm": 0.19576159119606018, "learning_rate": 0.0003141953354027648, "loss": 1.7841, "step": 4716 }, { "epoch": 0.46064453125, "grad_norm": 0.23798930644989014, "learning_rate": 0.00031412530824476544, "loss": 1.8671, "step": 4717 }, { "epoch": 0.4607421875, "grad_norm": 0.17755867540836334, "learning_rate": 0.0003140552771784943, "loss": 1.7874, "step": 4718 }, { "epoch": 0.46083984375, "grad_norm": 0.22959421575069427, "learning_rate": 0.0003139852422109471, "loss": 1.7994, "step": 4719 }, { "epoch": 0.4609375, "grad_norm": 0.1911461055278778, "learning_rate": 0.00031391520334911956, "loss": 1.8492, "step": 4720 }, { "epoch": 0.46103515625, "grad_norm": 0.20863354206085205, "learning_rate": 0.00031384516060000794, "loss": 1.8397, "step": 4721 }, { "epoch": 0.4611328125, "grad_norm": 0.21101114153862, "learning_rate": 0.0003137751139706089, "loss": 1.8613, "step": 4722 }, { "epoch": 0.46123046875, "grad_norm": 0.17822936177253723, "learning_rate": 0.0003137050634679195, "loss": 1.8547, "step": 4723 }, { "epoch": 0.461328125, "grad_norm": 0.18411372601985931, "learning_rate": 0.00031363500909893714, "loss": 1.7851, "step": 4724 }, { "epoch": 0.46142578125, "grad_norm": 0.2380722016096115, "learning_rate": 0.00031356495087065967, "loss": 1.8754, "step": 4725 }, { "epoch": 0.4615234375, "grad_norm": 0.1815214604139328, "learning_rate": 0.00031349488879008525, "loss": 1.8613, "step": 4726 }, { "epoch": 0.46162109375, "grad_norm": 0.2026418000459671, "learning_rate": 0.00031342482286421245, "loss": 1.8119, "step": 4727 }, { "epoch": 0.46171875, "grad_norm": 0.201633021235466, "learning_rate": 0.0003133547531000403, "loss": 1.8222, "step": 4728 }, { "epoch": 0.46181640625, "grad_norm": 0.20121082663536072, "learning_rate": 0.0003132846795045681, "loss": 1.8296, "step": 4729 }, { "epoch": 0.4619140625, "grad_norm": 0.20398394763469696, "learning_rate": 0.0003132146020847956, "loss": 1.8039, "step": 4730 }, { "epoch": 0.46201171875, "grad_norm": 0.23014071583747864, "learning_rate": 0.00031314452084772287, "loss": 1.8408, "step": 4731 }, { "epoch": 0.462109375, "grad_norm": 0.18318094313144684, "learning_rate": 0.00031307443580035044, "loss": 1.847, "step": 4732 }, { "epoch": 0.46220703125, "grad_norm": 0.24769657850265503, "learning_rate": 0.0003130043469496793, "loss": 1.7926, "step": 4733 }, { "epoch": 0.4623046875, "grad_norm": 0.23016546666622162, "learning_rate": 0.0003129342543027104, "loss": 1.829, "step": 4734 }, { "epoch": 0.46240234375, "grad_norm": 0.20693141222000122, "learning_rate": 0.00031286415786644566, "loss": 1.8134, "step": 4735 }, { "epoch": 0.4625, "grad_norm": 0.2622755467891693, "learning_rate": 0.000312794057647887, "loss": 1.7994, "step": 4736 }, { "epoch": 0.46259765625, "grad_norm": 0.2551204562187195, "learning_rate": 0.00031272395365403676, "loss": 1.8068, "step": 4737 }, { "epoch": 0.4626953125, "grad_norm": 0.2194601446390152, "learning_rate": 0.00031265384589189784, "loss": 1.8373, "step": 4738 }, { "epoch": 0.46279296875, "grad_norm": 0.2409636527299881, "learning_rate": 0.0003125837343684732, "loss": 1.8253, "step": 4739 }, { "epoch": 0.462890625, "grad_norm": 0.18212181329727173, "learning_rate": 0.00031251361909076655, "loss": 1.8611, "step": 4740 }, { "epoch": 0.46298828125, "grad_norm": 0.2114643007516861, "learning_rate": 0.0003124435000657817, "loss": 1.8038, "step": 4741 }, { "epoch": 0.4630859375, "grad_norm": 0.18933500349521637, "learning_rate": 0.0003123733773005228, "loss": 1.8489, "step": 4742 }, { "epoch": 0.46318359375, "grad_norm": 0.26138171553611755, "learning_rate": 0.00031230325080199467, "loss": 1.8637, "step": 4743 }, { "epoch": 0.46328125, "grad_norm": 0.19251863658428192, "learning_rate": 0.0003122331205772023, "loss": 1.8781, "step": 4744 }, { "epoch": 0.46337890625, "grad_norm": 0.22503794729709625, "learning_rate": 0.0003121629866331509, "loss": 1.8259, "step": 4745 }, { "epoch": 0.4634765625, "grad_norm": 0.18829478323459625, "learning_rate": 0.0003120928489768465, "loss": 1.8363, "step": 4746 }, { "epoch": 0.46357421875, "grad_norm": 0.23937945067882538, "learning_rate": 0.00031202270761529504, "loss": 1.8785, "step": 4747 }, { "epoch": 0.463671875, "grad_norm": 0.19133053719997406, "learning_rate": 0.0003119525625555031, "loss": 1.7037, "step": 4748 }, { "epoch": 0.46376953125, "grad_norm": 0.238962322473526, "learning_rate": 0.00031188241380447753, "loss": 1.8661, "step": 4749 }, { "epoch": 0.4638671875, "grad_norm": 0.19993072748184204, "learning_rate": 0.00031181226136922565, "loss": 1.8326, "step": 4750 }, { "epoch": 0.46396484375, "grad_norm": 0.22920852899551392, "learning_rate": 0.00031174210525675486, "loss": 1.8469, "step": 4751 }, { "epoch": 0.4640625, "grad_norm": 0.1875825673341751, "learning_rate": 0.0003116719454740733, "loss": 1.8428, "step": 4752 }, { "epoch": 0.46416015625, "grad_norm": 0.17680490016937256, "learning_rate": 0.0003116017820281893, "loss": 1.857, "step": 4753 }, { "epoch": 0.4642578125, "grad_norm": 0.19791361689567566, "learning_rate": 0.00031153161492611153, "loss": 1.8623, "step": 4754 }, { "epoch": 0.46435546875, "grad_norm": 0.19239719212055206, "learning_rate": 0.00031146144417484905, "loss": 1.8489, "step": 4755 }, { "epoch": 0.464453125, "grad_norm": 0.25808653235435486, "learning_rate": 0.0003113912697814113, "loss": 1.8682, "step": 4756 }, { "epoch": 0.46455078125, "grad_norm": 0.24941441416740417, "learning_rate": 0.0003113210917528082, "loss": 1.8667, "step": 4757 }, { "epoch": 0.4646484375, "grad_norm": 0.22093206644058228, "learning_rate": 0.00031125091009604974, "loss": 1.8292, "step": 4758 }, { "epoch": 0.46474609375, "grad_norm": 0.2532329559326172, "learning_rate": 0.00031118072481814646, "loss": 1.8661, "step": 4759 }, { "epoch": 0.46484375, "grad_norm": 0.23447300493717194, "learning_rate": 0.00031111053592610934, "loss": 1.856, "step": 4760 }, { "epoch": 0.46494140625, "grad_norm": 0.2655586898326874, "learning_rate": 0.00031104034342694964, "loss": 1.8637, "step": 4761 }, { "epoch": 0.4650390625, "grad_norm": 0.24939312040805817, "learning_rate": 0.00031097014732767885, "loss": 1.856, "step": 4762 }, { "epoch": 0.46513671875, "grad_norm": 0.22842450439929962, "learning_rate": 0.00031089994763530904, "loss": 1.8004, "step": 4763 }, { "epoch": 0.465234375, "grad_norm": 0.23652945458889008, "learning_rate": 0.00031082974435685257, "loss": 1.8637, "step": 4764 }, { "epoch": 0.46533203125, "grad_norm": 0.2600919306278229, "learning_rate": 0.00031075953749932193, "loss": 1.865, "step": 4765 }, { "epoch": 0.4654296875, "grad_norm": 0.23312941193580627, "learning_rate": 0.0003106893270697304, "loss": 1.8382, "step": 4766 }, { "epoch": 0.46552734375, "grad_norm": 0.22886423766613007, "learning_rate": 0.0003106191130750912, "loss": 1.8445, "step": 4767 }, { "epoch": 0.465625, "grad_norm": 0.21378353238105774, "learning_rate": 0.0003105488955224182, "loss": 1.8366, "step": 4768 }, { "epoch": 0.46572265625, "grad_norm": 0.20071300864219666, "learning_rate": 0.0003104786744187255, "loss": 1.8199, "step": 4769 }, { "epoch": 0.4658203125, "grad_norm": 0.19779184460639954, "learning_rate": 0.0003104084497710275, "loss": 1.8652, "step": 4770 }, { "epoch": 0.46591796875, "grad_norm": 0.22096706926822662, "learning_rate": 0.00031033822158633913, "loss": 1.8289, "step": 4771 }, { "epoch": 0.466015625, "grad_norm": 0.1866249144077301, "learning_rate": 0.0003102679898716754, "loss": 1.8178, "step": 4772 }, { "epoch": 0.46611328125, "grad_norm": 0.21741607785224915, "learning_rate": 0.000310197754634052, "loss": 1.8224, "step": 4773 }, { "epoch": 0.4662109375, "grad_norm": 0.18589475750923157, "learning_rate": 0.00031012751588048465, "loss": 1.8504, "step": 4774 }, { "epoch": 0.46630859375, "grad_norm": 0.18547822535037994, "learning_rate": 0.00031005727361798973, "loss": 1.8022, "step": 4775 }, { "epoch": 0.46640625, "grad_norm": 0.22344110906124115, "learning_rate": 0.0003099870278535837, "loss": 1.8399, "step": 4776 }, { "epoch": 0.46650390625, "grad_norm": 0.1753036230802536, "learning_rate": 0.00030991677859428364, "loss": 1.8744, "step": 4777 }, { "epoch": 0.4666015625, "grad_norm": 0.20883294939994812, "learning_rate": 0.00030984652584710665, "loss": 1.8307, "step": 4778 }, { "epoch": 0.46669921875, "grad_norm": 0.2109844982624054, "learning_rate": 0.0003097762696190704, "loss": 1.8164, "step": 4779 }, { "epoch": 0.466796875, "grad_norm": 0.18941885232925415, "learning_rate": 0.000309706009917193, "loss": 1.8122, "step": 4780 }, { "epoch": 0.46689453125, "grad_norm": 0.21899116039276123, "learning_rate": 0.00030963574674849263, "loss": 1.7913, "step": 4781 }, { "epoch": 0.4669921875, "grad_norm": 0.2173784077167511, "learning_rate": 0.0003095654801199881, "loss": 1.8328, "step": 4782 }, { "epoch": 0.46708984375, "grad_norm": 0.1898737996816635, "learning_rate": 0.00030949521003869826, "loss": 1.8587, "step": 4783 }, { "epoch": 0.4671875, "grad_norm": 0.22946122288703918, "learning_rate": 0.00030942493651164247, "loss": 1.869, "step": 4784 }, { "epoch": 0.46728515625, "grad_norm": 0.22063229978084564, "learning_rate": 0.00030935465954584065, "loss": 1.8588, "step": 4785 }, { "epoch": 0.4673828125, "grad_norm": 0.24954596161842346, "learning_rate": 0.0003092843791483125, "loss": 1.8617, "step": 4786 }, { "epoch": 0.46748046875, "grad_norm": 0.19222931563854218, "learning_rate": 0.0003092140953260788, "loss": 1.8434, "step": 4787 }, { "epoch": 0.467578125, "grad_norm": 0.26410526037216187, "learning_rate": 0.00030914380808615995, "loss": 1.8371, "step": 4788 }, { "epoch": 0.46767578125, "grad_norm": 0.208063542842865, "learning_rate": 0.00030907351743557723, "loss": 1.8176, "step": 4789 }, { "epoch": 0.4677734375, "grad_norm": 0.21440909802913666, "learning_rate": 0.00030900322338135196, "loss": 1.8456, "step": 4790 }, { "epoch": 0.46787109375, "grad_norm": 0.21355842053890228, "learning_rate": 0.00030893292593050587, "loss": 1.8031, "step": 4791 }, { "epoch": 0.46796875, "grad_norm": 0.17706333100795746, "learning_rate": 0.00030886262509006117, "loss": 1.8279, "step": 4792 }, { "epoch": 0.46806640625, "grad_norm": 0.18919691443443298, "learning_rate": 0.00030879232086704014, "loss": 1.8368, "step": 4793 }, { "epoch": 0.4681640625, "grad_norm": 0.18476110696792603, "learning_rate": 0.00030872201326846574, "loss": 1.8686, "step": 4794 }, { "epoch": 0.46826171875, "grad_norm": 0.20892632007598877, "learning_rate": 0.00030865170230136085, "loss": 1.8322, "step": 4795 }, { "epoch": 0.468359375, "grad_norm": 0.23286119103431702, "learning_rate": 0.00030858138797274913, "loss": 1.8497, "step": 4796 }, { "epoch": 0.46845703125, "grad_norm": 0.18783611059188843, "learning_rate": 0.00030851107028965417, "loss": 1.8145, "step": 4797 }, { "epoch": 0.4685546875, "grad_norm": 0.2191592901945114, "learning_rate": 0.00030844074925910023, "loss": 1.8404, "step": 4798 }, { "epoch": 0.46865234375, "grad_norm": 0.2329634130001068, "learning_rate": 0.0003083704248881117, "loss": 1.8082, "step": 4799 }, { "epoch": 0.46875, "grad_norm": 0.24307815730571747, "learning_rate": 0.0003083000971837133, "loss": 1.8451, "step": 4800 }, { "epoch": 0.46884765625, "grad_norm": 0.18854624032974243, "learning_rate": 0.00030822976615293025, "loss": 1.8779, "step": 4801 }, { "epoch": 0.4689453125, "grad_norm": 0.20139099657535553, "learning_rate": 0.00030815943180278793, "loss": 1.7947, "step": 4802 }, { "epoch": 0.46904296875, "grad_norm": 0.20154617726802826, "learning_rate": 0.00030808909414031224, "loss": 1.8244, "step": 4803 }, { "epoch": 0.469140625, "grad_norm": 0.2227020114660263, "learning_rate": 0.00030801875317252903, "loss": 1.8179, "step": 4804 }, { "epoch": 0.46923828125, "grad_norm": 0.19972631335258484, "learning_rate": 0.0003079484089064649, "loss": 1.8663, "step": 4805 }, { "epoch": 0.4693359375, "grad_norm": 0.2464173436164856, "learning_rate": 0.0003078780613491468, "loss": 1.8387, "step": 4806 }, { "epoch": 0.46943359375, "grad_norm": 0.16700857877731323, "learning_rate": 0.00030780771050760143, "loss": 1.8156, "step": 4807 }, { "epoch": 0.46953125, "grad_norm": 0.27698126435279846, "learning_rate": 0.0003077373563888565, "loss": 1.8527, "step": 4808 }, { "epoch": 0.46962890625, "grad_norm": 0.24625715613365173, "learning_rate": 0.0003076669989999398, "loss": 1.8459, "step": 4809 }, { "epoch": 0.4697265625, "grad_norm": 0.20767800509929657, "learning_rate": 0.00030759663834787915, "loss": 1.8617, "step": 4810 }, { "epoch": 0.46982421875, "grad_norm": 0.2153705656528473, "learning_rate": 0.0003075262744397032, "loss": 1.8623, "step": 4811 }, { "epoch": 0.469921875, "grad_norm": 0.26322141289711, "learning_rate": 0.00030745590728244057, "loss": 1.8434, "step": 4812 }, { "epoch": 0.47001953125, "grad_norm": 0.22390654683113098, "learning_rate": 0.0003073855368831204, "loss": 1.8188, "step": 4813 }, { "epoch": 0.4701171875, "grad_norm": 0.22162260115146637, "learning_rate": 0.0003073151632487719, "loss": 1.8019, "step": 4814 }, { "epoch": 0.47021484375, "grad_norm": 0.29895639419555664, "learning_rate": 0.00030724478638642486, "loss": 1.8382, "step": 4815 }, { "epoch": 0.4703125, "grad_norm": 0.22144287824630737, "learning_rate": 0.0003071744063031094, "loss": 1.8087, "step": 4816 }, { "epoch": 0.47041015625, "grad_norm": 0.21702681481838226, "learning_rate": 0.00030710402300585584, "loss": 1.8183, "step": 4817 }, { "epoch": 0.4705078125, "grad_norm": 0.19565224647521973, "learning_rate": 0.0003070336365016947, "loss": 1.8004, "step": 4818 }, { "epoch": 0.47060546875, "grad_norm": 0.21208736300468445, "learning_rate": 0.00030696324679765707, "loss": 1.8431, "step": 4819 }, { "epoch": 0.470703125, "grad_norm": 0.17192751169204712, "learning_rate": 0.0003068928539007743, "loss": 1.8063, "step": 4820 }, { "epoch": 0.47080078125, "grad_norm": 0.20137910544872284, "learning_rate": 0.00030682245781807793, "loss": 1.8505, "step": 4821 }, { "epoch": 0.4708984375, "grad_norm": 0.20366063714027405, "learning_rate": 0.0003067520585566, "loss": 1.8078, "step": 4822 }, { "epoch": 0.47099609375, "grad_norm": 0.20955240726470947, "learning_rate": 0.0003066816561233727, "loss": 1.8682, "step": 4823 }, { "epoch": 0.47109375, "grad_norm": 0.21498572826385498, "learning_rate": 0.0003066112505254286, "loss": 1.8392, "step": 4824 }, { "epoch": 0.47119140625, "grad_norm": 0.18648019433021545, "learning_rate": 0.0003065408417698006, "loss": 1.8007, "step": 4825 }, { "epoch": 0.4712890625, "grad_norm": 0.20473703742027283, "learning_rate": 0.00030647042986352204, "loss": 1.7969, "step": 4826 }, { "epoch": 0.47138671875, "grad_norm": 0.1886153668165207, "learning_rate": 0.0003064000148136262, "loss": 1.8533, "step": 4827 }, { "epoch": 0.471484375, "grad_norm": 0.23058368265628815, "learning_rate": 0.0003063295966271471, "loss": 1.8437, "step": 4828 }, { "epoch": 0.47158203125, "grad_norm": 0.20837700366973877, "learning_rate": 0.0003062591753111189, "loss": 1.8072, "step": 4829 }, { "epoch": 0.4716796875, "grad_norm": 0.1911066621541977, "learning_rate": 0.000306188750872576, "loss": 1.8039, "step": 4830 }, { "epoch": 0.47177734375, "grad_norm": 0.24263250827789307, "learning_rate": 0.0003061183233185532, "loss": 1.8207, "step": 4831 }, { "epoch": 0.471875, "grad_norm": 0.23435233533382416, "learning_rate": 0.00030604789265608554, "loss": 1.8643, "step": 4832 }, { "epoch": 0.47197265625, "grad_norm": 0.2540555000305176, "learning_rate": 0.0003059774588922085, "loss": 1.7786, "step": 4833 }, { "epoch": 0.4720703125, "grad_norm": 0.20325538516044617, "learning_rate": 0.00030590702203395784, "loss": 1.8221, "step": 4834 }, { "epoch": 0.47216796875, "grad_norm": 0.3231290876865387, "learning_rate": 0.00030583658208836934, "loss": 1.8664, "step": 4835 }, { "epoch": 0.472265625, "grad_norm": 0.23043441772460938, "learning_rate": 0.0003057661390624796, "loss": 1.8649, "step": 4836 }, { "epoch": 0.47236328125, "grad_norm": 0.2705766558647156, "learning_rate": 0.000305695692963325, "loss": 1.8615, "step": 4837 }, { "epoch": 0.4724609375, "grad_norm": 0.2516499161720276, "learning_rate": 0.00030562524379794273, "loss": 1.8781, "step": 4838 }, { "epoch": 0.47255859375, "grad_norm": 0.23899440467357635, "learning_rate": 0.0003055547915733699, "loss": 1.8527, "step": 4839 }, { "epoch": 0.47265625, "grad_norm": 0.23780567944049835, "learning_rate": 0.0003054843362966441, "loss": 1.8686, "step": 4840 }, { "epoch": 0.47275390625, "grad_norm": 0.2458800971508026, "learning_rate": 0.0003054138779748031, "loss": 1.8402, "step": 4841 }, { "epoch": 0.4728515625, "grad_norm": 0.21160176396369934, "learning_rate": 0.0003053434166148851, "loss": 1.84, "step": 4842 }, { "epoch": 0.47294921875, "grad_norm": 0.19955572485923767, "learning_rate": 0.0003052729522239287, "loss": 1.8212, "step": 4843 }, { "epoch": 0.473046875, "grad_norm": 0.19997259974479675, "learning_rate": 0.0003052024848089725, "loss": 1.8435, "step": 4844 }, { "epoch": 0.47314453125, "grad_norm": 0.21002988517284393, "learning_rate": 0.0003051320143770557, "loss": 1.8503, "step": 4845 }, { "epoch": 0.4732421875, "grad_norm": 0.20021061599254608, "learning_rate": 0.00030506154093521754, "loss": 1.8227, "step": 4846 }, { "epoch": 0.47333984375, "grad_norm": 0.20722252130508423, "learning_rate": 0.0003049910644904978, "loss": 1.8288, "step": 4847 }, { "epoch": 0.4734375, "grad_norm": 0.23750610649585724, "learning_rate": 0.00030492058504993637, "loss": 1.8581, "step": 4848 }, { "epoch": 0.47353515625, "grad_norm": 0.19082392752170563, "learning_rate": 0.0003048501026205736, "loss": 1.8576, "step": 4849 }, { "epoch": 0.4736328125, "grad_norm": 0.23766285181045532, "learning_rate": 0.0003047796172094501, "loss": 1.8303, "step": 4850 }, { "epoch": 0.47373046875, "grad_norm": 0.21583178639411926, "learning_rate": 0.0003047091288236066, "loss": 1.789, "step": 4851 }, { "epoch": 0.473828125, "grad_norm": 0.21662573516368866, "learning_rate": 0.0003046386374700843, "loss": 1.8523, "step": 4852 }, { "epoch": 0.47392578125, "grad_norm": 0.22070710361003876, "learning_rate": 0.0003045681431559247, "loss": 1.8599, "step": 4853 }, { "epoch": 0.4740234375, "grad_norm": 0.24383734166622162, "learning_rate": 0.0003044976458881696, "loss": 1.8268, "step": 4854 }, { "epoch": 0.47412109375, "grad_norm": 0.2057112157344818, "learning_rate": 0.0003044271456738609, "loss": 1.8288, "step": 4855 }, { "epoch": 0.47421875, "grad_norm": 0.1968592405319214, "learning_rate": 0.0003043566425200411, "loss": 1.8348, "step": 4856 }, { "epoch": 0.47431640625, "grad_norm": 0.2273697406053543, "learning_rate": 0.0003042861364337529, "loss": 1.8611, "step": 4857 }, { "epoch": 0.4744140625, "grad_norm": 0.18474043905735016, "learning_rate": 0.00030421562742203895, "loss": 1.8141, "step": 4858 }, { "epoch": 0.47451171875, "grad_norm": 0.23579570651054382, "learning_rate": 0.00030414511549194275, "loss": 1.8601, "step": 4859 }, { "epoch": 0.474609375, "grad_norm": 0.21270345151424408, "learning_rate": 0.0003040746006505077, "loss": 1.8268, "step": 4860 }, { "epoch": 0.47470703125, "grad_norm": 0.2242332249879837, "learning_rate": 0.0003040040829047776, "loss": 1.8241, "step": 4861 }, { "epoch": 0.4748046875, "grad_norm": 0.23544490337371826, "learning_rate": 0.0003039335622617966, "loss": 1.8104, "step": 4862 }, { "epoch": 0.47490234375, "grad_norm": 0.2175990343093872, "learning_rate": 0.000303863038728609, "loss": 1.8609, "step": 4863 }, { "epoch": 0.475, "grad_norm": 0.21629612147808075, "learning_rate": 0.00030379251231225956, "loss": 1.8484, "step": 4864 }, { "epoch": 0.47509765625, "grad_norm": 0.24267800152301788, "learning_rate": 0.00030372198301979324, "loss": 1.809, "step": 4865 }, { "epoch": 0.4751953125, "grad_norm": 0.1799250841140747, "learning_rate": 0.0003036514508582553, "loss": 1.8257, "step": 4866 }, { "epoch": 0.47529296875, "grad_norm": 0.25859299302101135, "learning_rate": 0.00030358091583469114, "loss": 1.8236, "step": 4867 }, { "epoch": 0.475390625, "grad_norm": 0.18856181204319, "learning_rate": 0.00030351037795614685, "loss": 1.809, "step": 4868 }, { "epoch": 0.47548828125, "grad_norm": 0.24719011783599854, "learning_rate": 0.00030343983722966826, "loss": 1.8375, "step": 4869 }, { "epoch": 0.4755859375, "grad_norm": 0.23080545663833618, "learning_rate": 0.00030336929366230194, "loss": 1.9084, "step": 4870 }, { "epoch": 0.47568359375, "grad_norm": 0.2338431179523468, "learning_rate": 0.00030329874726109444, "loss": 1.8331, "step": 4871 }, { "epoch": 0.47578125, "grad_norm": 0.20135995745658875, "learning_rate": 0.0003032281980330929, "loss": 1.8057, "step": 4872 }, { "epoch": 0.47587890625, "grad_norm": 0.21494583785533905, "learning_rate": 0.0003031576459853444, "loss": 1.8325, "step": 4873 }, { "epoch": 0.4759765625, "grad_norm": 0.2113523930311203, "learning_rate": 0.0003030870911248965, "loss": 1.8472, "step": 4874 }, { "epoch": 0.47607421875, "grad_norm": 0.18979957699775696, "learning_rate": 0.0003030165334587971, "loss": 1.8702, "step": 4875 }, { "epoch": 0.476171875, "grad_norm": 0.22056496143341064, "learning_rate": 0.0003029459729940941, "loss": 1.8397, "step": 4876 }, { "epoch": 0.47626953125, "grad_norm": 0.23806160688400269, "learning_rate": 0.00030287540973783603, "loss": 1.8916, "step": 4877 }, { "epoch": 0.4763671875, "grad_norm": 0.21538598835468292, "learning_rate": 0.00030280484369707157, "loss": 1.845, "step": 4878 }, { "epoch": 0.47646484375, "grad_norm": 0.21205362677574158, "learning_rate": 0.0003027342748788494, "loss": 1.8413, "step": 4879 }, { "epoch": 0.4765625, "grad_norm": 0.23677507042884827, "learning_rate": 0.0003026637032902189, "loss": 1.8386, "step": 4880 }, { "epoch": 0.47666015625, "grad_norm": 0.18932418525218964, "learning_rate": 0.0003025931289382296, "loss": 1.8216, "step": 4881 }, { "epoch": 0.4767578125, "grad_norm": 0.21288220584392548, "learning_rate": 0.0003025225518299312, "loss": 1.8326, "step": 4882 }, { "epoch": 0.47685546875, "grad_norm": 0.19770745933055878, "learning_rate": 0.0003024519719723736, "loss": 1.8327, "step": 4883 }, { "epoch": 0.476953125, "grad_norm": 0.19045844674110413, "learning_rate": 0.00030238138937260724, "loss": 1.8104, "step": 4884 }, { "epoch": 0.47705078125, "grad_norm": 0.18676099181175232, "learning_rate": 0.0003023108040376827, "loss": 1.8833, "step": 4885 }, { "epoch": 0.4771484375, "grad_norm": 0.17938444018363953, "learning_rate": 0.0003022402159746507, "loss": 1.7904, "step": 4886 }, { "epoch": 0.47724609375, "grad_norm": 0.2277635782957077, "learning_rate": 0.0003021696251905625, "loss": 1.8592, "step": 4887 }, { "epoch": 0.47734375, "grad_norm": 0.22101110219955444, "learning_rate": 0.00030209903169246946, "loss": 1.8322, "step": 4888 }, { "epoch": 0.47744140625, "grad_norm": 0.20787768065929413, "learning_rate": 0.0003020284354874233, "loss": 1.8174, "step": 4889 }, { "epoch": 0.4775390625, "grad_norm": 0.21741506457328796, "learning_rate": 0.00030195783658247565, "loss": 1.858, "step": 4890 }, { "epoch": 0.47763671875, "grad_norm": 0.20655880868434906, "learning_rate": 0.0003018872349846791, "loss": 1.8527, "step": 4891 }, { "epoch": 0.477734375, "grad_norm": 0.25105416774749756, "learning_rate": 0.00030181663070108603, "loss": 1.8398, "step": 4892 }, { "epoch": 0.47783203125, "grad_norm": 0.23866187036037445, "learning_rate": 0.0003017460237387491, "loss": 1.8118, "step": 4893 }, { "epoch": 0.4779296875, "grad_norm": 0.20497925579547882, "learning_rate": 0.00030167541410472127, "loss": 1.8236, "step": 4894 }, { "epoch": 0.47802734375, "grad_norm": 0.2557959258556366, "learning_rate": 0.0003016048018060559, "loss": 1.828, "step": 4895 }, { "epoch": 0.478125, "grad_norm": 0.18973244726657867, "learning_rate": 0.0003015341868498066, "loss": 1.8348, "step": 4896 }, { "epoch": 0.47822265625, "grad_norm": 0.21844008564949036, "learning_rate": 0.0003014635692430271, "loss": 1.8442, "step": 4897 }, { "epoch": 0.4783203125, "grad_norm": 0.2102670669555664, "learning_rate": 0.0003013929489927714, "loss": 1.8281, "step": 4898 }, { "epoch": 0.47841796875, "grad_norm": 0.21705353260040283, "learning_rate": 0.000301322326106094, "loss": 1.8495, "step": 4899 }, { "epoch": 0.478515625, "grad_norm": 0.20494355261325836, "learning_rate": 0.0003012517005900494, "loss": 1.8531, "step": 4900 }, { "epoch": 0.47861328125, "grad_norm": 0.24969077110290527, "learning_rate": 0.0003011810724516924, "loss": 1.8269, "step": 4901 }, { "epoch": 0.4787109375, "grad_norm": 0.18864525854587555, "learning_rate": 0.00030111044169807834, "loss": 1.8504, "step": 4902 }, { "epoch": 0.47880859375, "grad_norm": 0.2562215328216553, "learning_rate": 0.00030103980833626236, "loss": 1.7692, "step": 4903 }, { "epoch": 0.47890625, "grad_norm": 0.16926217079162598, "learning_rate": 0.00030096917237330026, "loss": 1.8161, "step": 4904 }, { "epoch": 0.47900390625, "grad_norm": 0.26131317019462585, "learning_rate": 0.00030089853381624795, "loss": 1.8637, "step": 4905 }, { "epoch": 0.4791015625, "grad_norm": 0.1805618852376938, "learning_rate": 0.0003008278926721615, "loss": 1.8404, "step": 4906 }, { "epoch": 0.47919921875, "grad_norm": 0.21555235981941223, "learning_rate": 0.00030075724894809735, "loss": 1.8624, "step": 4907 }, { "epoch": 0.479296875, "grad_norm": 0.20038826763629913, "learning_rate": 0.00030068660265111227, "loss": 1.81, "step": 4908 }, { "epoch": 0.47939453125, "grad_norm": 0.20453526079654694, "learning_rate": 0.0003006159537882631, "loss": 1.8658, "step": 4909 }, { "epoch": 0.4794921875, "grad_norm": 0.23110945522785187, "learning_rate": 0.0003005453023666072, "loss": 1.8521, "step": 4910 }, { "epoch": 0.47958984375, "grad_norm": 0.25841307640075684, "learning_rate": 0.0003004746483932018, "loss": 1.825, "step": 4911 }, { "epoch": 0.4796875, "grad_norm": 0.21263115108013153, "learning_rate": 0.0003004039918751047, "loss": 1.8036, "step": 4912 }, { "epoch": 0.47978515625, "grad_norm": 0.21389488875865936, "learning_rate": 0.00030033333281937396, "loss": 1.7746, "step": 4913 }, { "epoch": 0.4798828125, "grad_norm": 0.22237850725650787, "learning_rate": 0.00030026267123306764, "loss": 1.8436, "step": 4914 }, { "epoch": 0.47998046875, "grad_norm": 0.2137548178434372, "learning_rate": 0.0003001920071232443, "loss": 1.8037, "step": 4915 }, { "epoch": 0.480078125, "grad_norm": 0.1943012923002243, "learning_rate": 0.0003001213404969626, "loss": 1.8613, "step": 4916 }, { "epoch": 0.48017578125, "grad_norm": 0.1761748194694519, "learning_rate": 0.00030005067136128157, "loss": 1.8841, "step": 4917 }, { "epoch": 0.4802734375, "grad_norm": 0.20015399158000946, "learning_rate": 0.00029997999972326036, "loss": 1.8309, "step": 4918 }, { "epoch": 0.48037109375, "grad_norm": 0.19679149985313416, "learning_rate": 0.00029990932558995854, "loss": 1.8015, "step": 4919 }, { "epoch": 0.48046875, "grad_norm": 0.22215259075164795, "learning_rate": 0.00029983864896843576, "loss": 1.837, "step": 4920 }, { "epoch": 0.48056640625, "grad_norm": 0.20847636461257935, "learning_rate": 0.00029976796986575194, "loss": 1.7845, "step": 4921 }, { "epoch": 0.4806640625, "grad_norm": 0.21888545155525208, "learning_rate": 0.0002996972882889674, "loss": 1.7938, "step": 4922 }, { "epoch": 0.48076171875, "grad_norm": 0.1888756901025772, "learning_rate": 0.00029962660424514247, "loss": 1.8435, "step": 4923 }, { "epoch": 0.480859375, "grad_norm": 0.2355894297361374, "learning_rate": 0.0002995559177413381, "loss": 1.8505, "step": 4924 }, { "epoch": 0.48095703125, "grad_norm": 0.18631896376609802, "learning_rate": 0.00029948522878461495, "loss": 1.84, "step": 4925 }, { "epoch": 0.4810546875, "grad_norm": 0.20177164673805237, "learning_rate": 0.0002994145373820345, "loss": 1.8834, "step": 4926 }, { "epoch": 0.48115234375, "grad_norm": 0.19149263203144073, "learning_rate": 0.000299343843540658, "loss": 1.825, "step": 4927 }, { "epoch": 0.48125, "grad_norm": 0.2072160691022873, "learning_rate": 0.0002992731472675472, "loss": 1.8557, "step": 4928 }, { "epoch": 0.48134765625, "grad_norm": 0.19966818392276764, "learning_rate": 0.00029920244856976395, "loss": 1.8274, "step": 4929 }, { "epoch": 0.4814453125, "grad_norm": 0.21067434549331665, "learning_rate": 0.00029913174745437054, "loss": 1.8083, "step": 4930 }, { "epoch": 0.48154296875, "grad_norm": 0.21291854977607727, "learning_rate": 0.00029906104392842946, "loss": 1.8408, "step": 4931 }, { "epoch": 0.481640625, "grad_norm": 0.24560315907001495, "learning_rate": 0.0002989903379990031, "loss": 1.8039, "step": 4932 }, { "epoch": 0.48173828125, "grad_norm": 0.21008658409118652, "learning_rate": 0.0002989196296731547, "loss": 1.8344, "step": 4933 }, { "epoch": 0.4818359375, "grad_norm": 0.1998874843120575, "learning_rate": 0.00029884891895794706, "loss": 1.836, "step": 4934 }, { "epoch": 0.48193359375, "grad_norm": 0.1839420348405838, "learning_rate": 0.0002987782058604437, "loss": 1.8072, "step": 4935 }, { "epoch": 0.48203125, "grad_norm": 0.18092377483844757, "learning_rate": 0.00029870749038770825, "loss": 1.8041, "step": 4936 }, { "epoch": 0.48212890625, "grad_norm": 0.19177129864692688, "learning_rate": 0.0002986367725468045, "loss": 1.7981, "step": 4937 }, { "epoch": 0.4822265625, "grad_norm": 0.16073240339756012, "learning_rate": 0.0002985660523447967, "loss": 1.82, "step": 4938 }, { "epoch": 0.48232421875, "grad_norm": 0.19713576138019562, "learning_rate": 0.0002984953297887489, "loss": 1.8136, "step": 4939 }, { "epoch": 0.482421875, "grad_norm": 0.16476258635520935, "learning_rate": 0.0002984246048857259, "loss": 1.853, "step": 4940 }, { "epoch": 0.48251953125, "grad_norm": 0.20114773511886597, "learning_rate": 0.0002983538776427923, "loss": 1.8006, "step": 4941 }, { "epoch": 0.4826171875, "grad_norm": 0.18631845712661743, "learning_rate": 0.00029828314806701334, "loss": 1.8018, "step": 4942 }, { "epoch": 0.48271484375, "grad_norm": 0.1939045935869217, "learning_rate": 0.0002982124161654541, "loss": 1.7903, "step": 4943 }, { "epoch": 0.4828125, "grad_norm": 0.22019857168197632, "learning_rate": 0.0002981416819451801, "loss": 1.8579, "step": 4944 }, { "epoch": 0.48291015625, "grad_norm": 0.25916898250579834, "learning_rate": 0.0002980709454132571, "loss": 1.8313, "step": 4945 }, { "epoch": 0.4830078125, "grad_norm": 0.19881433248519897, "learning_rate": 0.00029800020657675107, "loss": 1.8632, "step": 4946 }, { "epoch": 0.48310546875, "grad_norm": 0.23610596358776093, "learning_rate": 0.0002979294654427282, "loss": 1.8515, "step": 4947 }, { "epoch": 0.483203125, "grad_norm": 0.290463387966156, "learning_rate": 0.00029785872201825483, "loss": 1.8176, "step": 4948 }, { "epoch": 0.48330078125, "grad_norm": 0.18031997978687286, "learning_rate": 0.00029778797631039764, "loss": 1.8665, "step": 4949 }, { "epoch": 0.4833984375, "grad_norm": 0.2531370520591736, "learning_rate": 0.00029771722832622357, "loss": 1.835, "step": 4950 }, { "epoch": 0.48349609375, "grad_norm": 0.22413069009780884, "learning_rate": 0.0002976464780727995, "loss": 1.7792, "step": 4951 }, { "epoch": 0.48359375, "grad_norm": 0.20282338559627533, "learning_rate": 0.000297575725557193, "loss": 1.8828, "step": 4952 }, { "epoch": 0.48369140625, "grad_norm": 0.22885966300964355, "learning_rate": 0.0002975049707864715, "loss": 1.8764, "step": 4953 }, { "epoch": 0.4837890625, "grad_norm": 0.23277723789215088, "learning_rate": 0.0002974342137677028, "loss": 1.8568, "step": 4954 }, { "epoch": 0.48388671875, "grad_norm": 0.18464137613773346, "learning_rate": 0.00029736345450795485, "loss": 1.8235, "step": 4955 }, { "epoch": 0.483984375, "grad_norm": 0.2433868944644928, "learning_rate": 0.0002972926930142959, "loss": 1.8362, "step": 4956 }, { "epoch": 0.48408203125, "grad_norm": 0.2033810317516327, "learning_rate": 0.00029722192929379455, "loss": 1.8419, "step": 4957 }, { "epoch": 0.4841796875, "grad_norm": 0.19732214510440826, "learning_rate": 0.0002971511633535193, "loss": 1.8475, "step": 4958 }, { "epoch": 0.48427734375, "grad_norm": 0.2697364091873169, "learning_rate": 0.000297080395200539, "loss": 1.8306, "step": 4959 }, { "epoch": 0.484375, "grad_norm": 0.2421739399433136, "learning_rate": 0.00029700962484192277, "loss": 1.8013, "step": 4960 }, { "epoch": 0.48447265625, "grad_norm": 0.1866464763879776, "learning_rate": 0.00029693885228474013, "loss": 1.8413, "step": 4961 }, { "epoch": 0.4845703125, "grad_norm": 0.23989856243133545, "learning_rate": 0.00029686807753606046, "loss": 1.8138, "step": 4962 }, { "epoch": 0.48466796875, "grad_norm": 0.1735045164823532, "learning_rate": 0.00029679730060295356, "loss": 1.8457, "step": 4963 }, { "epoch": 0.484765625, "grad_norm": 0.2334936559200287, "learning_rate": 0.0002967265214924895, "loss": 1.8282, "step": 4964 }, { "epoch": 0.48486328125, "grad_norm": 0.19146543741226196, "learning_rate": 0.0002966557402117384, "loss": 1.844, "step": 4965 }, { "epoch": 0.4849609375, "grad_norm": 0.20807620882987976, "learning_rate": 0.00029658495676777077, "loss": 1.8132, "step": 4966 }, { "epoch": 0.48505859375, "grad_norm": 0.2114933729171753, "learning_rate": 0.0002965141711676569, "loss": 1.8179, "step": 4967 }, { "epoch": 0.48515625, "grad_norm": 0.20859596133232117, "learning_rate": 0.0002964433834184683, "loss": 1.8603, "step": 4968 }, { "epoch": 0.48525390625, "grad_norm": 0.1918727159500122, "learning_rate": 0.0002963725935272755, "loss": 1.7765, "step": 4969 }, { "epoch": 0.4853515625, "grad_norm": 0.21384978294372559, "learning_rate": 0.00029630180150114985, "loss": 1.8126, "step": 4970 }, { "epoch": 0.48544921875, "grad_norm": 0.2083677351474762, "learning_rate": 0.0002962310073471631, "loss": 1.8557, "step": 4971 }, { "epoch": 0.485546875, "grad_norm": 0.1978531926870346, "learning_rate": 0.0002961602110723868, "loss": 1.8052, "step": 4972 }, { "epoch": 0.48564453125, "grad_norm": 0.27248382568359375, "learning_rate": 0.00029608941268389284, "loss": 1.8608, "step": 4973 }, { "epoch": 0.4857421875, "grad_norm": 0.19011075794696808, "learning_rate": 0.0002960186121887533, "loss": 1.8241, "step": 4974 }, { "epoch": 0.48583984375, "grad_norm": 0.27825161814689636, "learning_rate": 0.00029594780959404075, "loss": 1.825, "step": 4975 }, { "epoch": 0.4859375, "grad_norm": 0.23274178802967072, "learning_rate": 0.00029587700490682755, "loss": 1.8529, "step": 4976 }, { "epoch": 0.48603515625, "grad_norm": 0.22092591226100922, "learning_rate": 0.00029580619813418643, "loss": 1.8793, "step": 4977 }, { "epoch": 0.4861328125, "grad_norm": 0.24535295367240906, "learning_rate": 0.00029573538928319063, "loss": 1.8331, "step": 4978 }, { "epoch": 0.48623046875, "grad_norm": 0.2540774941444397, "learning_rate": 0.000295664578360913, "loss": 1.8456, "step": 4979 }, { "epoch": 0.486328125, "grad_norm": 0.25606876611709595, "learning_rate": 0.0002955937653744271, "loss": 1.796, "step": 4980 }, { "epoch": 0.48642578125, "grad_norm": 0.23073682188987732, "learning_rate": 0.0002955229503308064, "loss": 1.8139, "step": 4981 }, { "epoch": 0.4865234375, "grad_norm": 0.2704898416996002, "learning_rate": 0.00029545213323712487, "loss": 1.857, "step": 4982 }, { "epoch": 0.48662109375, "grad_norm": 0.18947190046310425, "learning_rate": 0.0002953813141004564, "loss": 1.8144, "step": 4983 }, { "epoch": 0.48671875, "grad_norm": 0.25032109022140503, "learning_rate": 0.00029531049292787524, "loss": 1.8256, "step": 4984 }, { "epoch": 0.48681640625, "grad_norm": 0.2130419909954071, "learning_rate": 0.00029523966972645575, "loss": 1.8148, "step": 4985 }, { "epoch": 0.4869140625, "grad_norm": 0.19153909385204315, "learning_rate": 0.0002951688445032726, "loss": 1.796, "step": 4986 }, { "epoch": 0.48701171875, "grad_norm": 0.2378961592912674, "learning_rate": 0.0002950980172654005, "loss": 1.8424, "step": 4987 }, { "epoch": 0.487109375, "grad_norm": 0.21440565586090088, "learning_rate": 0.00029502718801991453, "loss": 1.8671, "step": 4988 }, { "epoch": 0.48720703125, "grad_norm": 0.22810520231723785, "learning_rate": 0.0002949563567738899, "loss": 1.8683, "step": 4989 }, { "epoch": 0.4873046875, "grad_norm": 0.19989818334579468, "learning_rate": 0.0002948855235344021, "loss": 1.8653, "step": 4990 }, { "epoch": 0.48740234375, "grad_norm": 0.1847819685935974, "learning_rate": 0.0002948146883085266, "loss": 1.8064, "step": 4991 }, { "epoch": 0.4875, "grad_norm": 0.201404869556427, "learning_rate": 0.0002947438511033393, "loss": 1.8708, "step": 4992 }, { "epoch": 0.48759765625, "grad_norm": 0.18044066429138184, "learning_rate": 0.0002946730119259162, "loss": 1.8017, "step": 4993 }, { "epoch": 0.4876953125, "grad_norm": 0.19026845693588257, "learning_rate": 0.00029460217078333353, "loss": 1.8469, "step": 4994 }, { "epoch": 0.48779296875, "grad_norm": 0.18897053599357605, "learning_rate": 0.0002945313276826675, "loss": 1.8525, "step": 4995 }, { "epoch": 0.487890625, "grad_norm": 0.18522904813289642, "learning_rate": 0.0002944604826309951, "loss": 1.8473, "step": 4996 }, { "epoch": 0.48798828125, "grad_norm": 0.20513242483139038, "learning_rate": 0.0002943896356353927, "loss": 1.8781, "step": 4997 }, { "epoch": 0.4880859375, "grad_norm": 0.19440455734729767, "learning_rate": 0.0002943187867029375, "loss": 1.8408, "step": 4998 }, { "epoch": 0.48818359375, "grad_norm": 0.18897245824337006, "learning_rate": 0.0002942479358407068, "loss": 1.8311, "step": 4999 }, { "epoch": 0.48828125, "grad_norm": 0.18528372049331665, "learning_rate": 0.0002941770830557777, "loss": 1.7681, "step": 5000 }, { "epoch": 0.48837890625, "grad_norm": 0.17310313880443573, "learning_rate": 0.00029410622835522794, "loss": 1.8423, "step": 5001 }, { "epoch": 0.4884765625, "grad_norm": 0.19823698699474335, "learning_rate": 0.00029403537174613523, "loss": 1.8481, "step": 5002 }, { "epoch": 0.48857421875, "grad_norm": 0.1616418957710266, "learning_rate": 0.0002939645132355775, "loss": 1.8059, "step": 5003 }, { "epoch": 0.488671875, "grad_norm": 0.2244088351726532, "learning_rate": 0.0002938936528306329, "loss": 1.8022, "step": 5004 }, { "epoch": 0.48876953125, "grad_norm": 0.17269083857536316, "learning_rate": 0.0002938227905383798, "loss": 1.8475, "step": 5005 }, { "epoch": 0.4888671875, "grad_norm": 0.21807514131069183, "learning_rate": 0.0002937519263658967, "loss": 1.7768, "step": 5006 }, { "epoch": 0.48896484375, "grad_norm": 0.207741379737854, "learning_rate": 0.0002936810603202622, "loss": 1.8705, "step": 5007 }, { "epoch": 0.4890625, "grad_norm": 0.2272951453924179, "learning_rate": 0.0002936101924085553, "loss": 1.8451, "step": 5008 }, { "epoch": 0.48916015625, "grad_norm": 0.1937900185585022, "learning_rate": 0.0002935393226378551, "loss": 1.8091, "step": 5009 }, { "epoch": 0.4892578125, "grad_norm": 0.20884044468402863, "learning_rate": 0.0002934684510152408, "loss": 1.8087, "step": 5010 }, { "epoch": 0.48935546875, "grad_norm": 0.2534731328487396, "learning_rate": 0.00029339757754779185, "loss": 1.8153, "step": 5011 }, { "epoch": 0.489453125, "grad_norm": 0.19488762319087982, "learning_rate": 0.0002933267022425878, "loss": 1.8143, "step": 5012 }, { "epoch": 0.48955078125, "grad_norm": 0.2714734375476837, "learning_rate": 0.0002932558251067088, "loss": 1.8073, "step": 5013 }, { "epoch": 0.4896484375, "grad_norm": 0.2879018783569336, "learning_rate": 0.00029318494614723433, "loss": 1.8735, "step": 5014 }, { "epoch": 0.48974609375, "grad_norm": 0.1872895210981369, "learning_rate": 0.000293114065371245, "loss": 1.8632, "step": 5015 }, { "epoch": 0.48984375, "grad_norm": 0.2474721223115921, "learning_rate": 0.0002930431827858209, "loss": 1.81, "step": 5016 }, { "epoch": 0.48994140625, "grad_norm": 0.2217857390642166, "learning_rate": 0.00029297229839804283, "loss": 1.8247, "step": 5017 }, { "epoch": 0.4900390625, "grad_norm": 0.1853683590888977, "learning_rate": 0.00029290141221499134, "loss": 1.8166, "step": 5018 }, { "epoch": 0.49013671875, "grad_norm": 0.20162352919578552, "learning_rate": 0.00029283052424374726, "loss": 1.8109, "step": 5019 }, { "epoch": 0.490234375, "grad_norm": 0.21496646106243134, "learning_rate": 0.00029275963449139185, "loss": 1.8503, "step": 5020 }, { "epoch": 0.49033203125, "grad_norm": 0.18970216810703278, "learning_rate": 0.0002926887429650063, "loss": 1.8547, "step": 5021 }, { "epoch": 0.4904296875, "grad_norm": 0.20564354956150055, "learning_rate": 0.000292617849671672, "loss": 1.7995, "step": 5022 }, { "epoch": 0.49052734375, "grad_norm": 0.19551490247249603, "learning_rate": 0.00029254695461847056, "loss": 1.8874, "step": 5023 }, { "epoch": 0.490625, "grad_norm": 0.19914989173412323, "learning_rate": 0.0002924760578124839, "loss": 1.8238, "step": 5024 }, { "epoch": 0.49072265625, "grad_norm": 0.21502692997455597, "learning_rate": 0.0002924051592607939, "loss": 1.8423, "step": 5025 }, { "epoch": 0.4908203125, "grad_norm": 0.21075236797332764, "learning_rate": 0.00029233425897048253, "loss": 1.8564, "step": 5026 }, { "epoch": 0.49091796875, "grad_norm": 0.19957475364208221, "learning_rate": 0.00029226335694863245, "loss": 1.8435, "step": 5027 }, { "epoch": 0.491015625, "grad_norm": 0.21372473239898682, "learning_rate": 0.00029219245320232586, "loss": 1.8026, "step": 5028 }, { "epoch": 0.49111328125, "grad_norm": 0.21456246078014374, "learning_rate": 0.00029212154773864546, "loss": 1.8171, "step": 5029 }, { "epoch": 0.4912109375, "grad_norm": 0.2240496128797531, "learning_rate": 0.0002920506405646742, "loss": 1.7845, "step": 5030 }, { "epoch": 0.49130859375, "grad_norm": 0.22475247085094452, "learning_rate": 0.000291979731687495, "loss": 1.8036, "step": 5031 }, { "epoch": 0.49140625, "grad_norm": 0.20421327650547028, "learning_rate": 0.000291908821114191, "loss": 1.796, "step": 5032 }, { "epoch": 0.49150390625, "grad_norm": 0.19368720054626465, "learning_rate": 0.00029183790885184554, "loss": 1.8609, "step": 5033 }, { "epoch": 0.4916015625, "grad_norm": 0.2217213213443756, "learning_rate": 0.0002917669949075422, "loss": 1.8564, "step": 5034 }, { "epoch": 0.49169921875, "grad_norm": 0.17927753925323486, "learning_rate": 0.0002916960792883646, "loss": 1.8282, "step": 5035 }, { "epoch": 0.491796875, "grad_norm": 0.19984909892082214, "learning_rate": 0.00029162516200139656, "loss": 1.8092, "step": 5036 }, { "epoch": 0.49189453125, "grad_norm": 0.1873607486486435, "learning_rate": 0.00029155424305372214, "loss": 1.8537, "step": 5037 }, { "epoch": 0.4919921875, "grad_norm": 0.164852574467659, "learning_rate": 0.00029148332245242553, "loss": 1.9127, "step": 5038 }, { "epoch": 0.49208984375, "grad_norm": 0.18969477713108063, "learning_rate": 0.00029141240020459105, "loss": 1.86, "step": 5039 }, { "epoch": 0.4921875, "grad_norm": 0.15750835835933685, "learning_rate": 0.0002913414763173031, "loss": 1.7972, "step": 5040 }, { "epoch": 0.49228515625, "grad_norm": 0.20785696804523468, "learning_rate": 0.00029127055079764653, "loss": 1.8485, "step": 5041 }, { "epoch": 0.4923828125, "grad_norm": 0.16728556156158447, "learning_rate": 0.00029119962365270604, "loss": 1.8315, "step": 5042 }, { "epoch": 0.49248046875, "grad_norm": 0.21338783204555511, "learning_rate": 0.00029112869488956666, "loss": 1.8386, "step": 5043 }, { "epoch": 0.492578125, "grad_norm": 0.206341952085495, "learning_rate": 0.0002910577645153136, "loss": 1.8048, "step": 5044 }, { "epoch": 0.49267578125, "grad_norm": 0.19516567885875702, "learning_rate": 0.00029098683253703207, "loss": 1.8439, "step": 5045 }, { "epoch": 0.4927734375, "grad_norm": 0.2123129516839981, "learning_rate": 0.0002909158989618076, "loss": 1.8332, "step": 5046 }, { "epoch": 0.49287109375, "grad_norm": 0.25343215465545654, "learning_rate": 0.0002908449637967258, "loss": 1.8403, "step": 5047 }, { "epoch": 0.49296875, "grad_norm": 0.21154780685901642, "learning_rate": 0.0002907740270488725, "loss": 1.8634, "step": 5048 }, { "epoch": 0.49306640625, "grad_norm": 0.2604399621486664, "learning_rate": 0.00029070308872533373, "loss": 1.8023, "step": 5049 }, { "epoch": 0.4931640625, "grad_norm": 0.19805750250816345, "learning_rate": 0.0002906321488331954, "loss": 1.7975, "step": 5050 }, { "epoch": 0.49326171875, "grad_norm": 0.22015194594860077, "learning_rate": 0.000290561207379544, "loss": 1.866, "step": 5051 }, { "epoch": 0.493359375, "grad_norm": 0.18996109068393707, "learning_rate": 0.0002904902643714658, "loss": 1.8521, "step": 5052 }, { "epoch": 0.49345703125, "grad_norm": 0.20770952105522156, "learning_rate": 0.0002904193198160474, "loss": 1.8632, "step": 5053 }, { "epoch": 0.4935546875, "grad_norm": 0.19130323827266693, "learning_rate": 0.00029034837372037556, "loss": 1.8237, "step": 5054 }, { "epoch": 0.49365234375, "grad_norm": 0.20976804196834564, "learning_rate": 0.0002902774260915371, "loss": 1.8218, "step": 5055 }, { "epoch": 0.49375, "grad_norm": 0.22953778505325317, "learning_rate": 0.0002902064769366192, "loss": 1.7876, "step": 5056 }, { "epoch": 0.49384765625, "grad_norm": 0.22545038163661957, "learning_rate": 0.00029013552626270893, "loss": 1.8076, "step": 5057 }, { "epoch": 0.4939453125, "grad_norm": 0.2565915584564209, "learning_rate": 0.0002900645740768937, "loss": 1.8766, "step": 5058 }, { "epoch": 0.49404296875, "grad_norm": 0.17467525601387024, "learning_rate": 0.000289993620386261, "loss": 1.8522, "step": 5059 }, { "epoch": 0.494140625, "grad_norm": 0.2663061320781708, "learning_rate": 0.0002899226651978984, "loss": 1.8561, "step": 5060 }, { "epoch": 0.49423828125, "grad_norm": 0.22704920172691345, "learning_rate": 0.0002898517085188938, "loss": 1.8282, "step": 5061 }, { "epoch": 0.4943359375, "grad_norm": 0.23093096911907196, "learning_rate": 0.00028978075035633507, "loss": 1.8198, "step": 5062 }, { "epoch": 0.49443359375, "grad_norm": 0.22272057831287384, "learning_rate": 0.00028970979071731034, "loss": 1.8303, "step": 5063 }, { "epoch": 0.49453125, "grad_norm": 0.21321052312850952, "learning_rate": 0.0002896388296089078, "loss": 1.8488, "step": 5064 }, { "epoch": 0.49462890625, "grad_norm": 0.20292551815509796, "learning_rate": 0.0002895678670382159, "loss": 1.8561, "step": 5065 }, { "epoch": 0.4947265625, "grad_norm": 0.16809150576591492, "learning_rate": 0.00028949690301232315, "loss": 1.8175, "step": 5066 }, { "epoch": 0.49482421875, "grad_norm": 0.22057975828647614, "learning_rate": 0.00028942593753831814, "loss": 1.8166, "step": 5067 }, { "epoch": 0.494921875, "grad_norm": 0.1739296317100525, "learning_rate": 0.0002893549706232899, "loss": 1.8326, "step": 5068 }, { "epoch": 0.49501953125, "grad_norm": 0.2019680142402649, "learning_rate": 0.00028928400227432716, "loss": 1.877, "step": 5069 }, { "epoch": 0.4951171875, "grad_norm": 0.15942968428134918, "learning_rate": 0.0002892130324985192, "loss": 1.807, "step": 5070 }, { "epoch": 0.49521484375, "grad_norm": 0.2469278872013092, "learning_rate": 0.00028914206130295516, "loss": 1.8168, "step": 5071 }, { "epoch": 0.4953125, "grad_norm": 0.1972155123949051, "learning_rate": 0.00028907108869472454, "loss": 1.8556, "step": 5072 }, { "epoch": 0.49541015625, "grad_norm": 0.22488188743591309, "learning_rate": 0.0002890001146809168, "loss": 1.82, "step": 5073 }, { "epoch": 0.4955078125, "grad_norm": 0.24358659982681274, "learning_rate": 0.0002889291392686216, "loss": 1.8294, "step": 5074 }, { "epoch": 0.49560546875, "grad_norm": 0.17525681853294373, "learning_rate": 0.00028885816246492884, "loss": 1.8843, "step": 5075 }, { "epoch": 0.495703125, "grad_norm": 0.23828233778476715, "learning_rate": 0.00028878718427692834, "loss": 1.8521, "step": 5076 }, { "epoch": 0.49580078125, "grad_norm": 0.21223755180835724, "learning_rate": 0.0002887162047117104, "loss": 1.8736, "step": 5077 }, { "epoch": 0.4958984375, "grad_norm": 0.22311273217201233, "learning_rate": 0.00028864522377636503, "loss": 1.803, "step": 5078 }, { "epoch": 0.49599609375, "grad_norm": 0.20526814460754395, "learning_rate": 0.00028857424147798273, "loss": 1.8422, "step": 5079 }, { "epoch": 0.49609375, "grad_norm": 0.25489121675491333, "learning_rate": 0.000288503257823654, "loss": 1.8151, "step": 5080 }, { "epoch": 0.49619140625, "grad_norm": 0.20456595718860626, "learning_rate": 0.0002884322728204693, "loss": 1.8229, "step": 5081 }, { "epoch": 0.4962890625, "grad_norm": 0.2624034285545349, "learning_rate": 0.0002883612864755197, "loss": 1.8209, "step": 5082 }, { "epoch": 0.49638671875, "grad_norm": 0.20233194530010223, "learning_rate": 0.0002882902987958958, "loss": 1.8302, "step": 5083 }, { "epoch": 0.496484375, "grad_norm": 0.22940693795681, "learning_rate": 0.00028821930978868894, "loss": 1.8517, "step": 5084 }, { "epoch": 0.49658203125, "grad_norm": 0.22241154313087463, "learning_rate": 0.00028814831946099006, "loss": 1.8617, "step": 5085 }, { "epoch": 0.4966796875, "grad_norm": 0.18967120349407196, "learning_rate": 0.0002880773278198906, "loss": 1.8286, "step": 5086 }, { "epoch": 0.49677734375, "grad_norm": 0.2510261535644531, "learning_rate": 0.00028800633487248195, "loss": 1.7856, "step": 5087 }, { "epoch": 0.496875, "grad_norm": 0.17186236381530762, "learning_rate": 0.0002879353406258555, "loss": 1.8164, "step": 5088 }, { "epoch": 0.49697265625, "grad_norm": 0.25492095947265625, "learning_rate": 0.00028786434508710335, "loss": 1.8563, "step": 5089 }, { "epoch": 0.4970703125, "grad_norm": 0.193217471241951, "learning_rate": 0.000287793348263317, "loss": 1.8097, "step": 5090 }, { "epoch": 0.49716796875, "grad_norm": 0.22380249202251434, "learning_rate": 0.00028772235016158855, "loss": 1.8412, "step": 5091 }, { "epoch": 0.497265625, "grad_norm": 0.19743509590625763, "learning_rate": 0.00028765135078900997, "loss": 1.8061, "step": 5092 }, { "epoch": 0.49736328125, "grad_norm": 0.2040206342935562, "learning_rate": 0.00028758035015267347, "loss": 1.8043, "step": 5093 }, { "epoch": 0.4974609375, "grad_norm": 0.20091480016708374, "learning_rate": 0.00028750934825967156, "loss": 1.8243, "step": 5094 }, { "epoch": 0.49755859375, "grad_norm": 0.2049509435892105, "learning_rate": 0.00028743834511709646, "loss": 1.8647, "step": 5095 }, { "epoch": 0.49765625, "grad_norm": 0.2169008105993271, "learning_rate": 0.00028736734073204096, "loss": 1.8461, "step": 5096 }, { "epoch": 0.49775390625, "grad_norm": 0.2207108587026596, "learning_rate": 0.0002872963351115977, "loss": 1.8217, "step": 5097 }, { "epoch": 0.4978515625, "grad_norm": 0.21978992223739624, "learning_rate": 0.00028722532826285944, "loss": 1.864, "step": 5098 }, { "epoch": 0.49794921875, "grad_norm": 0.18339653313159943, "learning_rate": 0.0002871543201929192, "loss": 1.8132, "step": 5099 }, { "epoch": 0.498046875, "grad_norm": 0.1972111016511917, "learning_rate": 0.0002870833109088701, "loss": 1.8268, "step": 5100 }, { "epoch": 0.49814453125, "grad_norm": 0.17766819894313812, "learning_rate": 0.0002870123004178053, "loss": 1.8822, "step": 5101 }, { "epoch": 0.4982421875, "grad_norm": 0.20298975706100464, "learning_rate": 0.00028694128872681804, "loss": 1.8423, "step": 5102 }, { "epoch": 0.49833984375, "grad_norm": 0.17096921801567078, "learning_rate": 0.0002868702758430019, "loss": 1.8473, "step": 5103 }, { "epoch": 0.4984375, "grad_norm": 0.22858570516109467, "learning_rate": 0.0002867992617734502, "loss": 1.7863, "step": 5104 }, { "epoch": 0.49853515625, "grad_norm": 0.198171466588974, "learning_rate": 0.000286728246525257, "loss": 1.8157, "step": 5105 }, { "epoch": 0.4986328125, "grad_norm": 0.1736242175102234, "learning_rate": 0.00028665723010551575, "loss": 1.8479, "step": 5106 }, { "epoch": 0.49873046875, "grad_norm": 0.211822047829628, "learning_rate": 0.0002865862125213205, "loss": 1.8409, "step": 5107 }, { "epoch": 0.498828125, "grad_norm": 0.20091445744037628, "learning_rate": 0.00028651519377976534, "loss": 1.845, "step": 5108 }, { "epoch": 0.49892578125, "grad_norm": 0.19759409129619598, "learning_rate": 0.0002864441738879442, "loss": 1.8142, "step": 5109 }, { "epoch": 0.4990234375, "grad_norm": 0.19233040511608124, "learning_rate": 0.00028637315285295157, "loss": 1.8023, "step": 5110 }, { "epoch": 0.49912109375, "grad_norm": 0.20535460114479065, "learning_rate": 0.00028630213068188165, "loss": 1.8339, "step": 5111 }, { "epoch": 0.49921875, "grad_norm": 0.24215444922447205, "learning_rate": 0.00028623110738182914, "loss": 1.8361, "step": 5112 }, { "epoch": 0.49931640625, "grad_norm": 0.1809164583683014, "learning_rate": 0.0002861600829598884, "loss": 1.7994, "step": 5113 }, { "epoch": 0.4994140625, "grad_norm": 0.19106461107730865, "learning_rate": 0.0002860890574231543, "loss": 1.8529, "step": 5114 }, { "epoch": 0.49951171875, "grad_norm": 0.2225295603275299, "learning_rate": 0.0002860180307787215, "loss": 1.708, "step": 5115 }, { "epoch": 0.499609375, "grad_norm": 0.1928769201040268, "learning_rate": 0.0002859470030336851, "loss": 1.8652, "step": 5116 }, { "epoch": 0.49970703125, "grad_norm": 0.21711425483226776, "learning_rate": 0.00028587597419514003, "loss": 1.8215, "step": 5117 }, { "epoch": 0.4998046875, "grad_norm": 0.17738524079322815, "learning_rate": 0.0002858049442701816, "loss": 1.8312, "step": 5118 }, { "epoch": 0.49990234375, "grad_norm": 0.2254190742969513, "learning_rate": 0.0002857339132659049, "loss": 1.798, "step": 5119 }, { "epoch": 0.5, "grad_norm": 0.17561078071594238, "learning_rate": 0.00028566288118940536, "loss": 1.8048, "step": 5120 }, { "epoch": 0.50009765625, "grad_norm": 0.18861603736877441, "learning_rate": 0.0002855918480477784, "loss": 1.8167, "step": 5121 }, { "epoch": 0.5001953125, "grad_norm": 0.1919478476047516, "learning_rate": 0.0002855208138481198, "loss": 1.8293, "step": 5122 }, { "epoch": 0.50029296875, "grad_norm": 0.16644835472106934, "learning_rate": 0.0002854497785975249, "loss": 1.8404, "step": 5123 }, { "epoch": 0.500390625, "grad_norm": 0.18720349669456482, "learning_rate": 0.00028537874230308985, "loss": 1.842, "step": 5124 }, { "epoch": 0.50048828125, "grad_norm": 0.1856456845998764, "learning_rate": 0.0002853077049719103, "loss": 1.8783, "step": 5125 }, { "epoch": 0.5005859375, "grad_norm": 0.17665253579616547, "learning_rate": 0.0002852366666110824, "loss": 1.8002, "step": 5126 }, { "epoch": 0.50068359375, "grad_norm": 0.2093137502670288, "learning_rate": 0.0002851656272277022, "loss": 1.8455, "step": 5127 }, { "epoch": 0.50078125, "grad_norm": 0.20296891033649445, "learning_rate": 0.0002850945868288659, "loss": 1.8384, "step": 5128 }, { "epoch": 0.50087890625, "grad_norm": 0.19395381212234497, "learning_rate": 0.0002850235454216698, "loss": 1.813, "step": 5129 }, { "epoch": 0.5009765625, "grad_norm": 0.22478842735290527, "learning_rate": 0.00028495250301321023, "loss": 1.8317, "step": 5130 }, { "epoch": 0.50107421875, "grad_norm": 0.2014487385749817, "learning_rate": 0.00028488145961058387, "loss": 1.815, "step": 5131 }, { "epoch": 0.501171875, "grad_norm": 0.21907967329025269, "learning_rate": 0.00028481041522088717, "loss": 1.8159, "step": 5132 }, { "epoch": 0.50126953125, "grad_norm": 0.24594099819660187, "learning_rate": 0.000284739369851217, "loss": 1.8346, "step": 5133 }, { "epoch": 0.5013671875, "grad_norm": 0.2633011043071747, "learning_rate": 0.00028466832350867007, "loss": 1.8594, "step": 5134 }, { "epoch": 0.50146484375, "grad_norm": 0.21223695576190948, "learning_rate": 0.0002845972762003432, "loss": 1.8595, "step": 5135 }, { "epoch": 0.5015625, "grad_norm": 0.2740681767463684, "learning_rate": 0.0002845262279333335, "loss": 1.851, "step": 5136 }, { "epoch": 0.50166015625, "grad_norm": 0.22165460884571075, "learning_rate": 0.0002844551787147379, "loss": 1.8282, "step": 5137 }, { "epoch": 0.5017578125, "grad_norm": 0.21008525788784027, "learning_rate": 0.00028438412855165385, "loss": 1.8341, "step": 5138 }, { "epoch": 0.50185546875, "grad_norm": 0.19968976080417633, "learning_rate": 0.0002843130774511785, "loss": 1.829, "step": 5139 }, { "epoch": 0.501953125, "grad_norm": 0.22986365854740143, "learning_rate": 0.00028424202542040914, "loss": 1.8063, "step": 5140 }, { "epoch": 0.50205078125, "grad_norm": 0.2158036231994629, "learning_rate": 0.0002841709724664434, "loss": 1.8717, "step": 5141 }, { "epoch": 0.5021484375, "grad_norm": 0.21261337399482727, "learning_rate": 0.00028409991859637866, "loss": 1.8121, "step": 5142 }, { "epoch": 0.50224609375, "grad_norm": 0.19104473292827606, "learning_rate": 0.0002840288638173128, "loss": 1.8203, "step": 5143 }, { "epoch": 0.50234375, "grad_norm": 0.21090002357959747, "learning_rate": 0.0002839578081363433, "loss": 1.8479, "step": 5144 }, { "epoch": 0.50244140625, "grad_norm": 0.19558128714561462, "learning_rate": 0.0002838867515605682, "loss": 1.816, "step": 5145 }, { "epoch": 0.5025390625, "grad_norm": 0.2041911482810974, "learning_rate": 0.0002838156940970853, "loss": 1.8143, "step": 5146 }, { "epoch": 0.50263671875, "grad_norm": 0.20941641926765442, "learning_rate": 0.0002837446357529927, "loss": 1.8639, "step": 5147 }, { "epoch": 0.502734375, "grad_norm": 0.18806858360767365, "learning_rate": 0.00028367357653538846, "loss": 1.8245, "step": 5148 }, { "epoch": 0.50283203125, "grad_norm": 0.19874818623065948, "learning_rate": 0.0002836025164513708, "loss": 1.7998, "step": 5149 }, { "epoch": 0.5029296875, "grad_norm": 0.2484036535024643, "learning_rate": 0.00028353145550803787, "loss": 1.8529, "step": 5150 }, { "epoch": 0.50302734375, "grad_norm": 0.204560324549675, "learning_rate": 0.00028346039371248815, "loss": 1.843, "step": 5151 }, { "epoch": 0.503125, "grad_norm": 0.20437026023864746, "learning_rate": 0.00028338933107182007, "loss": 1.8029, "step": 5152 }, { "epoch": 0.50322265625, "grad_norm": 0.2271643728017807, "learning_rate": 0.00028331826759313213, "loss": 1.8464, "step": 5153 }, { "epoch": 0.5033203125, "grad_norm": 0.1757575124502182, "learning_rate": 0.0002832472032835229, "loss": 1.8076, "step": 5154 }, { "epoch": 0.50341796875, "grad_norm": 0.2290390431880951, "learning_rate": 0.0002831761381500912, "loss": 1.8048, "step": 5155 }, { "epoch": 0.503515625, "grad_norm": 0.2349993884563446, "learning_rate": 0.0002831050721999358, "loss": 1.8208, "step": 5156 }, { "epoch": 0.50361328125, "grad_norm": 0.20555686950683594, "learning_rate": 0.00028303400544015537, "loss": 1.8302, "step": 5157 }, { "epoch": 0.5037109375, "grad_norm": 0.21505816280841827, "learning_rate": 0.00028296293787784894, "loss": 1.837, "step": 5158 }, { "epoch": 0.50380859375, "grad_norm": 0.1931108981370926, "learning_rate": 0.00028289186952011564, "loss": 1.8477, "step": 5159 }, { "epoch": 0.50390625, "grad_norm": 0.19991987943649292, "learning_rate": 0.00028282080037405445, "loss": 1.7883, "step": 5160 }, { "epoch": 0.50400390625, "grad_norm": 0.19590038061141968, "learning_rate": 0.00028274973044676465, "loss": 1.8734, "step": 5161 }, { "epoch": 0.5041015625, "grad_norm": 0.23448602855205536, "learning_rate": 0.00028267865974534534, "loss": 1.7512, "step": 5162 }, { "epoch": 0.50419921875, "grad_norm": 0.19897404313087463, "learning_rate": 0.00028260758827689604, "loss": 1.8271, "step": 5163 }, { "epoch": 0.504296875, "grad_norm": 0.28172561526298523, "learning_rate": 0.0002825365160485161, "loss": 1.85, "step": 5164 }, { "epoch": 0.50439453125, "grad_norm": 0.21375127136707306, "learning_rate": 0.0002824654430673048, "loss": 1.8422, "step": 5165 }, { "epoch": 0.5044921875, "grad_norm": 0.19045016169548035, "learning_rate": 0.00028239436934036205, "loss": 1.8458, "step": 5166 }, { "epoch": 0.50458984375, "grad_norm": 0.19772474467754364, "learning_rate": 0.0002823232948747872, "loss": 1.8384, "step": 5167 }, { "epoch": 0.5046875, "grad_norm": 0.18995516002178192, "learning_rate": 0.0002822522196776801, "loss": 1.8226, "step": 5168 }, { "epoch": 0.50478515625, "grad_norm": 0.2061922252178192, "learning_rate": 0.0002821811437561405, "loss": 1.8285, "step": 5169 }, { "epoch": 0.5048828125, "grad_norm": 0.2006341964006424, "learning_rate": 0.0002821100671172683, "loss": 1.8217, "step": 5170 }, { "epoch": 0.50498046875, "grad_norm": 0.21055051684379578, "learning_rate": 0.0002820389897681634, "loss": 1.8004, "step": 5171 }, { "epoch": 0.505078125, "grad_norm": 0.20487499237060547, "learning_rate": 0.0002819679117159257, "loss": 1.8312, "step": 5172 }, { "epoch": 0.50517578125, "grad_norm": 0.21000224351882935, "learning_rate": 0.0002818968329676554, "loss": 1.8611, "step": 5173 }, { "epoch": 0.5052734375, "grad_norm": 0.1996726542711258, "learning_rate": 0.0002818257535304526, "loss": 1.8287, "step": 5174 }, { "epoch": 0.50537109375, "grad_norm": 0.23702484369277954, "learning_rate": 0.00028175467341141745, "loss": 1.8036, "step": 5175 }, { "epoch": 0.50546875, "grad_norm": 0.19969522953033447, "learning_rate": 0.0002816835926176503, "loss": 1.7761, "step": 5176 }, { "epoch": 0.50556640625, "grad_norm": 0.21039023995399475, "learning_rate": 0.0002816125111562515, "loss": 1.8185, "step": 5177 }, { "epoch": 0.5056640625, "grad_norm": 0.23262640833854675, "learning_rate": 0.0002815414290343213, "loss": 1.8209, "step": 5178 }, { "epoch": 0.50576171875, "grad_norm": 0.18955937027931213, "learning_rate": 0.00028147034625896033, "loss": 1.7986, "step": 5179 }, { "epoch": 0.505859375, "grad_norm": 0.28865572810173035, "learning_rate": 0.0002813992628372692, "loss": 1.8865, "step": 5180 }, { "epoch": 0.50595703125, "grad_norm": 0.18123073875904083, "learning_rate": 0.00028132817877634823, "loss": 1.8333, "step": 5181 }, { "epoch": 0.5060546875, "grad_norm": 0.23983605206012726, "learning_rate": 0.0002812570940832984, "loss": 1.8036, "step": 5182 }, { "epoch": 0.50615234375, "grad_norm": 0.2014663964509964, "learning_rate": 0.00028118600876522027, "loss": 1.851, "step": 5183 }, { "epoch": 0.50625, "grad_norm": 0.19235989451408386, "learning_rate": 0.0002811149228292147, "loss": 1.8291, "step": 5184 }, { "epoch": 0.50634765625, "grad_norm": 0.21703949570655823, "learning_rate": 0.00028104383628238245, "loss": 1.8344, "step": 5185 }, { "epoch": 0.5064453125, "grad_norm": 0.23909872770309448, "learning_rate": 0.0002809727491318245, "loss": 1.8389, "step": 5186 }, { "epoch": 0.50654296875, "grad_norm": 0.196349635720253, "learning_rate": 0.00028090166138464184, "loss": 1.7998, "step": 5187 }, { "epoch": 0.506640625, "grad_norm": 0.23255367577075958, "learning_rate": 0.00028083057304793547, "loss": 1.8491, "step": 5188 }, { "epoch": 0.50673828125, "grad_norm": 0.28333580493927, "learning_rate": 0.0002807594841288066, "loss": 1.8542, "step": 5189 }, { "epoch": 0.5068359375, "grad_norm": 0.18618254363536835, "learning_rate": 0.00028068839463435614, "loss": 1.7837, "step": 5190 }, { "epoch": 0.50693359375, "grad_norm": 0.2517181932926178, "learning_rate": 0.00028061730457168557, "loss": 1.7998, "step": 5191 }, { "epoch": 0.50703125, "grad_norm": 0.25809919834136963, "learning_rate": 0.000280546213947896, "loss": 1.8252, "step": 5192 }, { "epoch": 0.50712890625, "grad_norm": 0.2025863379240036, "learning_rate": 0.0002804751227700887, "loss": 1.8463, "step": 5193 }, { "epoch": 0.5072265625, "grad_norm": 0.2257523089647293, "learning_rate": 0.00028040403104536533, "loss": 1.8552, "step": 5194 }, { "epoch": 0.50732421875, "grad_norm": 0.22546495497226715, "learning_rate": 0.00028033293878082705, "loss": 1.8513, "step": 5195 }, { "epoch": 0.507421875, "grad_norm": 0.22426986694335938, "learning_rate": 0.0002802618459835754, "loss": 1.8308, "step": 5196 }, { "epoch": 0.50751953125, "grad_norm": 0.18422812223434448, "learning_rate": 0.0002801907526607119, "loss": 1.8332, "step": 5197 }, { "epoch": 0.5076171875, "grad_norm": 0.22385282814502716, "learning_rate": 0.0002801196588193384, "loss": 1.839, "step": 5198 }, { "epoch": 0.50771484375, "grad_norm": 0.17488056421279907, "learning_rate": 0.00028004856446655615, "loss": 1.8156, "step": 5199 }, { "epoch": 0.5078125, "grad_norm": 0.21158188581466675, "learning_rate": 0.0002799774696094671, "loss": 1.8385, "step": 5200 }, { "epoch": 0.50791015625, "grad_norm": 0.2035149335861206, "learning_rate": 0.000279906374255173, "loss": 1.8716, "step": 5201 }, { "epoch": 0.5080078125, "grad_norm": 0.22821687161922455, "learning_rate": 0.00027983527841077537, "loss": 1.8212, "step": 5202 }, { "epoch": 0.50810546875, "grad_norm": 0.20443230867385864, "learning_rate": 0.0002797641820833764, "loss": 1.8456, "step": 5203 }, { "epoch": 0.508203125, "grad_norm": 0.206146702170372, "learning_rate": 0.00027969308528007783, "loss": 1.8515, "step": 5204 }, { "epoch": 0.50830078125, "grad_norm": 0.2013579159975052, "learning_rate": 0.00027962198800798166, "loss": 1.8478, "step": 5205 }, { "epoch": 0.5083984375, "grad_norm": 0.22122925519943237, "learning_rate": 0.00027955089027418986, "loss": 1.8315, "step": 5206 }, { "epoch": 0.50849609375, "grad_norm": 0.19927021861076355, "learning_rate": 0.0002794797920858043, "loss": 1.8308, "step": 5207 }, { "epoch": 0.50859375, "grad_norm": 0.18810036778450012, "learning_rate": 0.00027940869344992734, "loss": 1.8195, "step": 5208 }, { "epoch": 0.50869140625, "grad_norm": 0.20243652164936066, "learning_rate": 0.0002793375943736609, "loss": 1.8214, "step": 5209 }, { "epoch": 0.5087890625, "grad_norm": 0.18808774650096893, "learning_rate": 0.0002792664948641072, "loss": 1.8324, "step": 5210 }, { "epoch": 0.50888671875, "grad_norm": 0.18380095064640045, "learning_rate": 0.0002791953949283685, "loss": 1.8259, "step": 5211 }, { "epoch": 0.508984375, "grad_norm": 0.1954612135887146, "learning_rate": 0.00027912429457354693, "loss": 1.8463, "step": 5212 }, { "epoch": 0.50908203125, "grad_norm": 0.20272082090377808, "learning_rate": 0.00027905319380674493, "loss": 1.8104, "step": 5213 }, { "epoch": 0.5091796875, "grad_norm": 0.20339372754096985, "learning_rate": 0.0002789820926350648, "loss": 1.8512, "step": 5214 }, { "epoch": 0.50927734375, "grad_norm": 0.1901872456073761, "learning_rate": 0.00027891099106560886, "loss": 1.8355, "step": 5215 }, { "epoch": 0.509375, "grad_norm": 0.245780810713768, "learning_rate": 0.0002788398891054796, "loss": 1.8367, "step": 5216 }, { "epoch": 0.50947265625, "grad_norm": 0.21571531891822815, "learning_rate": 0.0002787687867617794, "loss": 1.8328, "step": 5217 }, { "epoch": 0.5095703125, "grad_norm": 0.23023070394992828, "learning_rate": 0.0002786976840416108, "loss": 1.8052, "step": 5218 }, { "epoch": 0.50966796875, "grad_norm": 0.18857774138450623, "learning_rate": 0.00027862658095207635, "loss": 1.7668, "step": 5219 }, { "epoch": 0.509765625, "grad_norm": 0.27064093947410583, "learning_rate": 0.00027855547750027857, "loss": 1.8479, "step": 5220 }, { "epoch": 0.50986328125, "grad_norm": 0.17316238582134247, "learning_rate": 0.00027848437369332, "loss": 1.8254, "step": 5221 }, { "epoch": 0.5099609375, "grad_norm": 0.2541126310825348, "learning_rate": 0.00027841326953830355, "loss": 1.881, "step": 5222 }, { "epoch": 0.51005859375, "grad_norm": 0.1796654611825943, "learning_rate": 0.0002783421650423315, "loss": 1.8222, "step": 5223 }, { "epoch": 0.51015625, "grad_norm": 0.24916066229343414, "learning_rate": 0.0002782710602125069, "loss": 1.7842, "step": 5224 }, { "epoch": 0.51025390625, "grad_norm": 0.2150392383337021, "learning_rate": 0.0002781999550559323, "loss": 1.8309, "step": 5225 }, { "epoch": 0.5103515625, "grad_norm": 0.2666669487953186, "learning_rate": 0.0002781288495797106, "loss": 1.7778, "step": 5226 }, { "epoch": 0.51044921875, "grad_norm": 0.22197528183460236, "learning_rate": 0.0002780577437909446, "loss": 1.8481, "step": 5227 }, { "epoch": 0.510546875, "grad_norm": 0.2138781100511551, "learning_rate": 0.0002779866376967369, "loss": 1.8323, "step": 5228 }, { "epoch": 0.51064453125, "grad_norm": 0.20169495046138763, "learning_rate": 0.0002779155313041908, "loss": 1.8512, "step": 5229 }, { "epoch": 0.5107421875, "grad_norm": 0.22633489966392517, "learning_rate": 0.00027784442462040884, "loss": 1.7964, "step": 5230 }, { "epoch": 0.51083984375, "grad_norm": 0.20830343663692474, "learning_rate": 0.000277773317652494, "loss": 1.8522, "step": 5231 }, { "epoch": 0.5109375, "grad_norm": 0.21799485385417938, "learning_rate": 0.0002777022104075493, "loss": 1.8263, "step": 5232 }, { "epoch": 0.51103515625, "grad_norm": 0.22696734964847565, "learning_rate": 0.00027763110289267775, "loss": 1.8187, "step": 5233 }, { "epoch": 0.5111328125, "grad_norm": 0.20702536404132843, "learning_rate": 0.0002775599951149823, "loss": 1.7891, "step": 5234 }, { "epoch": 0.51123046875, "grad_norm": 0.21518905460834503, "learning_rate": 0.0002774888870815661, "loss": 1.8306, "step": 5235 }, { "epoch": 0.511328125, "grad_norm": 0.24978503584861755, "learning_rate": 0.00027741777879953204, "loss": 1.85, "step": 5236 }, { "epoch": 0.51142578125, "grad_norm": 0.22853833436965942, "learning_rate": 0.00027734667027598335, "loss": 1.8444, "step": 5237 }, { "epoch": 0.5115234375, "grad_norm": 0.25632283091545105, "learning_rate": 0.00027727556151802306, "loss": 1.8295, "step": 5238 }, { "epoch": 0.51162109375, "grad_norm": 0.2526589334011078, "learning_rate": 0.00027720445253275435, "loss": 1.7868, "step": 5239 }, { "epoch": 0.51171875, "grad_norm": 0.21438026428222656, "learning_rate": 0.0002771333433272804, "loss": 1.8434, "step": 5240 }, { "epoch": 0.51181640625, "grad_norm": 0.21188658475875854, "learning_rate": 0.00027706223390870427, "loss": 1.8102, "step": 5241 }, { "epoch": 0.5119140625, "grad_norm": 0.2187248170375824, "learning_rate": 0.0002769911242841293, "loss": 1.826, "step": 5242 }, { "epoch": 0.51201171875, "grad_norm": 0.20622050762176514, "learning_rate": 0.0002769200144606587, "loss": 1.8182, "step": 5243 }, { "epoch": 0.512109375, "grad_norm": 0.19678251445293427, "learning_rate": 0.00027684890444539553, "loss": 1.8106, "step": 5244 }, { "epoch": 0.51220703125, "grad_norm": 0.17650017142295837, "learning_rate": 0.0002767777942454433, "loss": 1.7973, "step": 5245 }, { "epoch": 0.5123046875, "grad_norm": 0.1835285872220993, "learning_rate": 0.0002767066838679051, "loss": 1.8461, "step": 5246 }, { "epoch": 0.51240234375, "grad_norm": 0.20275579392910004, "learning_rate": 0.0002766355733198844, "loss": 1.8541, "step": 5247 }, { "epoch": 0.5125, "grad_norm": 0.20486068725585938, "learning_rate": 0.0002765644626084843, "loss": 1.834, "step": 5248 }, { "epoch": 0.51259765625, "grad_norm": 0.183939129114151, "learning_rate": 0.0002764933517408083, "loss": 1.8789, "step": 5249 }, { "epoch": 0.5126953125, "grad_norm": 0.18819910287857056, "learning_rate": 0.00027642224072395977, "loss": 1.8561, "step": 5250 }, { "epoch": 0.51279296875, "grad_norm": 0.1758238971233368, "learning_rate": 0.00027635112956504184, "loss": 1.7827, "step": 5251 }, { "epoch": 0.512890625, "grad_norm": 0.19081774353981018, "learning_rate": 0.00027628001827115814, "loss": 1.858, "step": 5252 }, { "epoch": 0.51298828125, "grad_norm": 0.20497320592403412, "learning_rate": 0.0002762089068494119, "loss": 1.8091, "step": 5253 }, { "epoch": 0.5130859375, "grad_norm": 0.20026497542858124, "learning_rate": 0.00027613779530690657, "loss": 1.8437, "step": 5254 }, { "epoch": 0.51318359375, "grad_norm": 0.198089599609375, "learning_rate": 0.0002760666836507456, "loss": 1.851, "step": 5255 }, { "epoch": 0.51328125, "grad_norm": 0.17989836633205414, "learning_rate": 0.0002759955718880324, "loss": 1.8217, "step": 5256 }, { "epoch": 0.51337890625, "grad_norm": 0.20834361016750336, "learning_rate": 0.00027592446002587035, "loss": 1.8208, "step": 5257 }, { "epoch": 0.5134765625, "grad_norm": 0.2187504917383194, "learning_rate": 0.0002758533480713629, "loss": 1.7988, "step": 5258 }, { "epoch": 0.51357421875, "grad_norm": 0.19491791725158691, "learning_rate": 0.0002757822360316136, "loss": 1.8159, "step": 5259 }, { "epoch": 0.513671875, "grad_norm": 0.26729193329811096, "learning_rate": 0.0002757111239137259, "loss": 1.7805, "step": 5260 }, { "epoch": 0.51376953125, "grad_norm": 0.22634105384349823, "learning_rate": 0.00027564001172480316, "loss": 1.8904, "step": 5261 }, { "epoch": 0.5138671875, "grad_norm": 0.1892109513282776, "learning_rate": 0.0002755688994719489, "loss": 1.7876, "step": 5262 }, { "epoch": 0.51396484375, "grad_norm": 0.1953965425491333, "learning_rate": 0.00027549778716226667, "loss": 1.8182, "step": 5263 }, { "epoch": 0.5140625, "grad_norm": 0.20991718769073486, "learning_rate": 0.00027542667480285986, "loss": 1.8139, "step": 5264 }, { "epoch": 0.51416015625, "grad_norm": 0.18365232646465302, "learning_rate": 0.000275355562400832, "loss": 1.8111, "step": 5265 }, { "epoch": 0.5142578125, "grad_norm": 0.2225840985774994, "learning_rate": 0.0002752844499632867, "loss": 1.8177, "step": 5266 }, { "epoch": 0.51435546875, "grad_norm": 0.2332131564617157, "learning_rate": 0.0002752133374973273, "loss": 1.8341, "step": 5267 }, { "epoch": 0.514453125, "grad_norm": 0.19824832677841187, "learning_rate": 0.00027514222501005744, "loss": 1.871, "step": 5268 }, { "epoch": 0.51455078125, "grad_norm": 0.22547608613967896, "learning_rate": 0.0002750711125085804, "loss": 1.809, "step": 5269 }, { "epoch": 0.5146484375, "grad_norm": 0.20121882855892181, "learning_rate": 0.000275, "loss": 1.8416, "step": 5270 }, { "epoch": 0.51474609375, "grad_norm": 0.21252721548080444, "learning_rate": 0.0002749288874914196, "loss": 1.8267, "step": 5271 }, { "epoch": 0.51484375, "grad_norm": 0.21727199852466583, "learning_rate": 0.0002748577749899427, "loss": 1.824, "step": 5272 }, { "epoch": 0.51494140625, "grad_norm": 0.23070158064365387, "learning_rate": 0.0002747866625026728, "loss": 1.8394, "step": 5273 }, { "epoch": 0.5150390625, "grad_norm": 0.19188296794891357, "learning_rate": 0.00027471555003671336, "loss": 1.8037, "step": 5274 }, { "epoch": 0.51513671875, "grad_norm": 0.2062949240207672, "learning_rate": 0.00027464443759916794, "loss": 1.8275, "step": 5275 }, { "epoch": 0.515234375, "grad_norm": 0.17409728467464447, "learning_rate": 0.0002745733251971402, "loss": 1.8196, "step": 5276 }, { "epoch": 0.51533203125, "grad_norm": 0.420627236366272, "learning_rate": 0.0002745022128377334, "loss": 1.8713, "step": 5277 }, { "epoch": 0.5154296875, "grad_norm": 0.21082288026809692, "learning_rate": 0.00027443110052805116, "loss": 1.8463, "step": 5278 }, { "epoch": 0.51552734375, "grad_norm": 0.2221074104309082, "learning_rate": 0.00027435998827519693, "loss": 1.8335, "step": 5279 }, { "epoch": 0.515625, "grad_norm": 0.2161598950624466, "learning_rate": 0.00027428887608627425, "loss": 1.802, "step": 5280 }, { "epoch": 0.51572265625, "grad_norm": 0.2788350582122803, "learning_rate": 0.0002742177639683864, "loss": 1.7903, "step": 5281 }, { "epoch": 0.5158203125, "grad_norm": 0.19664369523525238, "learning_rate": 0.00027414665192863707, "loss": 1.8127, "step": 5282 }, { "epoch": 0.51591796875, "grad_norm": 0.22516986727714539, "learning_rate": 0.0002740755399741296, "loss": 1.8597, "step": 5283 }, { "epoch": 0.516015625, "grad_norm": 0.21816439926624298, "learning_rate": 0.0002740044281119677, "loss": 1.7709, "step": 5284 }, { "epoch": 0.51611328125, "grad_norm": 0.18468748033046722, "learning_rate": 0.00027393331634925443, "loss": 1.8002, "step": 5285 }, { "epoch": 0.5162109375, "grad_norm": 0.2160843163728714, "learning_rate": 0.00027386220469309346, "loss": 1.8275, "step": 5286 }, { "epoch": 0.51630859375, "grad_norm": 0.2091751992702484, "learning_rate": 0.0002737910931505882, "loss": 1.8597, "step": 5287 }, { "epoch": 0.51640625, "grad_norm": 0.2260010987520218, "learning_rate": 0.0002737199817288419, "loss": 1.8304, "step": 5288 }, { "epoch": 0.51650390625, "grad_norm": 0.18804267048835754, "learning_rate": 0.0002736488704349582, "loss": 1.8253, "step": 5289 }, { "epoch": 0.5166015625, "grad_norm": 0.2063298225402832, "learning_rate": 0.0002735777592760403, "loss": 1.9005, "step": 5290 }, { "epoch": 0.51669921875, "grad_norm": 0.17253370583057404, "learning_rate": 0.00027350664825919174, "loss": 1.8316, "step": 5291 }, { "epoch": 0.516796875, "grad_norm": 0.18270735442638397, "learning_rate": 0.0002734355373915157, "loss": 1.7906, "step": 5292 }, { "epoch": 0.51689453125, "grad_norm": 0.202109694480896, "learning_rate": 0.0002733644266801157, "loss": 1.8425, "step": 5293 }, { "epoch": 0.5169921875, "grad_norm": 0.1937425136566162, "learning_rate": 0.000273293316132095, "loss": 1.7975, "step": 5294 }, { "epoch": 0.51708984375, "grad_norm": 0.1813644915819168, "learning_rate": 0.00027322220575455667, "loss": 1.8253, "step": 5295 }, { "epoch": 0.5171875, "grad_norm": 0.21616274118423462, "learning_rate": 0.0002731510955546045, "loss": 1.8131, "step": 5296 }, { "epoch": 0.51728515625, "grad_norm": 0.22926616668701172, "learning_rate": 0.0002730799855393414, "loss": 1.803, "step": 5297 }, { "epoch": 0.5173828125, "grad_norm": 0.19997656345367432, "learning_rate": 0.00027300887571587073, "loss": 1.7951, "step": 5298 }, { "epoch": 0.51748046875, "grad_norm": 0.22420541942119598, "learning_rate": 0.00027293776609129577, "loss": 1.7845, "step": 5299 }, { "epoch": 0.517578125, "grad_norm": 0.20603252947330475, "learning_rate": 0.0002728666566727197, "loss": 1.8268, "step": 5300 }, { "epoch": 0.51767578125, "grad_norm": 0.26573997735977173, "learning_rate": 0.00027279554746724574, "loss": 1.8514, "step": 5301 }, { "epoch": 0.5177734375, "grad_norm": 0.21173542737960815, "learning_rate": 0.0002727244384819769, "loss": 1.8457, "step": 5302 }, { "epoch": 0.51787109375, "grad_norm": 0.22665506601333618, "learning_rate": 0.0002726533297240167, "loss": 1.8285, "step": 5303 }, { "epoch": 0.51796875, "grad_norm": 0.22150909900665283, "learning_rate": 0.000272582221200468, "loss": 1.8009, "step": 5304 }, { "epoch": 0.51806640625, "grad_norm": 0.2240312099456787, "learning_rate": 0.000272511112918434, "loss": 1.8244, "step": 5305 }, { "epoch": 0.5181640625, "grad_norm": 0.2355889528989792, "learning_rate": 0.0002724400048850177, "loss": 1.8305, "step": 5306 }, { "epoch": 0.51826171875, "grad_norm": 0.2118416428565979, "learning_rate": 0.00027236889710732234, "loss": 1.8589, "step": 5307 }, { "epoch": 0.518359375, "grad_norm": 0.21639370918273926, "learning_rate": 0.0002722977895924508, "loss": 1.8263, "step": 5308 }, { "epoch": 0.51845703125, "grad_norm": 0.19224722683429718, "learning_rate": 0.000272226682347506, "loss": 1.8154, "step": 5309 }, { "epoch": 0.5185546875, "grad_norm": 0.2449813187122345, "learning_rate": 0.0002721555753795913, "loss": 1.815, "step": 5310 }, { "epoch": 0.51865234375, "grad_norm": 0.18118219077587128, "learning_rate": 0.0002720844686958093, "loss": 1.7511, "step": 5311 }, { "epoch": 0.51875, "grad_norm": 0.21740548312664032, "learning_rate": 0.00027201336230326306, "loss": 1.8323, "step": 5312 }, { "epoch": 0.51884765625, "grad_norm": 0.2405717819929123, "learning_rate": 0.0002719422562090555, "loss": 1.8465, "step": 5313 }, { "epoch": 0.5189453125, "grad_norm": 0.1835559904575348, "learning_rate": 0.0002718711504202894, "loss": 1.8693, "step": 5314 }, { "epoch": 0.51904296875, "grad_norm": 0.23080873489379883, "learning_rate": 0.0002718000449440677, "loss": 1.8272, "step": 5315 }, { "epoch": 0.519140625, "grad_norm": 0.2529352605342865, "learning_rate": 0.0002717289397874931, "loss": 1.8342, "step": 5316 }, { "epoch": 0.51923828125, "grad_norm": 0.24486424028873444, "learning_rate": 0.0002716578349576685, "loss": 1.7871, "step": 5317 }, { "epoch": 0.5193359375, "grad_norm": 0.18374153971672058, "learning_rate": 0.0002715867304616966, "loss": 1.7643, "step": 5318 }, { "epoch": 0.51943359375, "grad_norm": 0.24394896626472473, "learning_rate": 0.00027151562630668, "loss": 1.835, "step": 5319 }, { "epoch": 0.51953125, "grad_norm": 0.22305937111377716, "learning_rate": 0.0002714445224997215, "loss": 1.8235, "step": 5320 }, { "epoch": 0.51962890625, "grad_norm": 0.2576371133327484, "learning_rate": 0.0002713734190479238, "loss": 1.8074, "step": 5321 }, { "epoch": 0.5197265625, "grad_norm": 0.19837586581707, "learning_rate": 0.0002713023159583893, "loss": 1.8607, "step": 5322 }, { "epoch": 0.51982421875, "grad_norm": 0.2343999594449997, "learning_rate": 0.00027123121323822057, "loss": 1.829, "step": 5323 }, { "epoch": 0.519921875, "grad_norm": 0.20340736210346222, "learning_rate": 0.00027116011089452045, "loss": 1.8105, "step": 5324 }, { "epoch": 0.52001953125, "grad_norm": 0.21537081897258759, "learning_rate": 0.00027108900893439117, "loss": 1.8187, "step": 5325 }, { "epoch": 0.5201171875, "grad_norm": 0.21169380843639374, "learning_rate": 0.0002710179073649353, "loss": 1.867, "step": 5326 }, { "epoch": 0.52021484375, "grad_norm": 0.2153967320919037, "learning_rate": 0.00027094680619325505, "loss": 1.832, "step": 5327 }, { "epoch": 0.5203125, "grad_norm": 0.19875775277614594, "learning_rate": 0.0002708757054264531, "loss": 1.7699, "step": 5328 }, { "epoch": 0.52041015625, "grad_norm": 0.22768448293209076, "learning_rate": 0.0002708046050716316, "loss": 1.8604, "step": 5329 }, { "epoch": 0.5205078125, "grad_norm": 0.23284323513507843, "learning_rate": 0.0002707335051358928, "loss": 1.863, "step": 5330 }, { "epoch": 0.52060546875, "grad_norm": 0.26372697949409485, "learning_rate": 0.0002706624056263391, "loss": 1.8398, "step": 5331 }, { "epoch": 0.520703125, "grad_norm": 0.21927045285701752, "learning_rate": 0.0002705913065500727, "loss": 1.8123, "step": 5332 }, { "epoch": 0.52080078125, "grad_norm": 0.23541967570781708, "learning_rate": 0.00027052020791419574, "loss": 1.8135, "step": 5333 }, { "epoch": 0.5208984375, "grad_norm": 0.22243377566337585, "learning_rate": 0.0002704491097258102, "loss": 1.8238, "step": 5334 }, { "epoch": 0.52099609375, "grad_norm": 0.22381052374839783, "learning_rate": 0.0002703780119920184, "loss": 1.774, "step": 5335 }, { "epoch": 0.52109375, "grad_norm": 0.23030735552310944, "learning_rate": 0.00027030691471992215, "loss": 1.7834, "step": 5336 }, { "epoch": 0.52119140625, "grad_norm": 0.18623344600200653, "learning_rate": 0.00027023581791662355, "loss": 1.7704, "step": 5337 }, { "epoch": 0.5212890625, "grad_norm": 0.21923956274986267, "learning_rate": 0.00027016472158922466, "loss": 1.8322, "step": 5338 }, { "epoch": 0.52138671875, "grad_norm": 0.18253932893276215, "learning_rate": 0.0002700936257448271, "loss": 1.8258, "step": 5339 }, { "epoch": 0.521484375, "grad_norm": 0.2143840491771698, "learning_rate": 0.000270022530390533, "loss": 1.8196, "step": 5340 }, { "epoch": 0.52158203125, "grad_norm": 0.21123650670051575, "learning_rate": 0.00026995143553344394, "loss": 1.8044, "step": 5341 }, { "epoch": 0.5216796875, "grad_norm": 0.23476621508598328, "learning_rate": 0.0002698803411806618, "loss": 1.8336, "step": 5342 }, { "epoch": 0.52177734375, "grad_norm": 0.19970251619815826, "learning_rate": 0.0002698092473392881, "loss": 1.7946, "step": 5343 }, { "epoch": 0.521875, "grad_norm": 0.23670539259910583, "learning_rate": 0.0002697381540164246, "loss": 1.8241, "step": 5344 }, { "epoch": 0.52197265625, "grad_norm": 0.2114463895559311, "learning_rate": 0.000269667061219173, "loss": 1.8313, "step": 5345 }, { "epoch": 0.5220703125, "grad_norm": 0.24083422124385834, "learning_rate": 0.0002695959689546347, "loss": 1.8623, "step": 5346 }, { "epoch": 0.52216796875, "grad_norm": 0.26950281858444214, "learning_rate": 0.00026952487722991126, "loss": 1.7974, "step": 5347 }, { "epoch": 0.522265625, "grad_norm": 0.19649575650691986, "learning_rate": 0.00026945378605210406, "loss": 1.8072, "step": 5348 }, { "epoch": 0.52236328125, "grad_norm": 0.20691539347171783, "learning_rate": 0.0002693826954283145, "loss": 1.7944, "step": 5349 }, { "epoch": 0.5224609375, "grad_norm": 0.2399909347295761, "learning_rate": 0.00026931160536564384, "loss": 1.8401, "step": 5350 }, { "epoch": 0.52255859375, "grad_norm": 0.18562795221805573, "learning_rate": 0.0002692405158711935, "loss": 1.8089, "step": 5351 }, { "epoch": 0.52265625, "grad_norm": 0.19294597208499908, "learning_rate": 0.0002691694269520645, "loss": 1.8241, "step": 5352 }, { "epoch": 0.52275390625, "grad_norm": 0.1945067197084427, "learning_rate": 0.0002690983386153582, "loss": 1.824, "step": 5353 }, { "epoch": 0.5228515625, "grad_norm": 0.22624894976615906, "learning_rate": 0.0002690272508681756, "loss": 1.8276, "step": 5354 }, { "epoch": 0.52294921875, "grad_norm": 0.1782267987728119, "learning_rate": 0.00026895616371761763, "loss": 1.8504, "step": 5355 }, { "epoch": 0.523046875, "grad_norm": 0.2316916137933731, "learning_rate": 0.0002688850771707854, "loss": 1.8127, "step": 5356 }, { "epoch": 0.52314453125, "grad_norm": 0.23209956288337708, "learning_rate": 0.0002688139912347797, "loss": 1.8079, "step": 5357 }, { "epoch": 0.5232421875, "grad_norm": 0.21986278891563416, "learning_rate": 0.0002687429059167016, "loss": 1.8538, "step": 5358 }, { "epoch": 0.52333984375, "grad_norm": 0.22243796288967133, "learning_rate": 0.00026867182122365175, "loss": 1.8458, "step": 5359 }, { "epoch": 0.5234375, "grad_norm": 0.23557168245315552, "learning_rate": 0.0002686007371627309, "loss": 1.795, "step": 5360 }, { "epoch": 0.52353515625, "grad_norm": 0.2438548058271408, "learning_rate": 0.0002685296537410397, "loss": 1.8702, "step": 5361 }, { "epoch": 0.5236328125, "grad_norm": 0.2350616157054901, "learning_rate": 0.0002684585709656787, "loss": 1.8292, "step": 5362 }, { "epoch": 0.52373046875, "grad_norm": 0.23501183092594147, "learning_rate": 0.00026838748884374866, "loss": 1.8485, "step": 5363 }, { "epoch": 0.523828125, "grad_norm": 0.23806075751781464, "learning_rate": 0.0002683164073823497, "loss": 1.8387, "step": 5364 }, { "epoch": 0.52392578125, "grad_norm": 0.21280235052108765, "learning_rate": 0.00026824532658858253, "loss": 1.812, "step": 5365 }, { "epoch": 0.5240234375, "grad_norm": 0.18295879662036896, "learning_rate": 0.00026817424646954745, "loss": 1.8499, "step": 5366 }, { "epoch": 0.52412109375, "grad_norm": 0.2224673181772232, "learning_rate": 0.0002681031670323446, "loss": 1.7872, "step": 5367 }, { "epoch": 0.52421875, "grad_norm": 0.1947047859430313, "learning_rate": 0.0002680320882840743, "loss": 1.8096, "step": 5368 }, { "epoch": 0.52431640625, "grad_norm": 0.2096828669309616, "learning_rate": 0.0002679610102318367, "loss": 1.846, "step": 5369 }, { "epoch": 0.5244140625, "grad_norm": 0.20171454548835754, "learning_rate": 0.0002678899328827318, "loss": 1.8148, "step": 5370 }, { "epoch": 0.52451171875, "grad_norm": 0.20900505781173706, "learning_rate": 0.00026781885624385947, "loss": 1.8311, "step": 5371 }, { "epoch": 0.524609375, "grad_norm": 0.24200557172298431, "learning_rate": 0.00026774778032231987, "loss": 1.8416, "step": 5372 }, { "epoch": 0.52470703125, "grad_norm": 0.19616490602493286, "learning_rate": 0.00026767670512521283, "loss": 1.8496, "step": 5373 }, { "epoch": 0.5248046875, "grad_norm": 0.20534493029117584, "learning_rate": 0.000267605630659638, "loss": 1.7896, "step": 5374 }, { "epoch": 0.52490234375, "grad_norm": 0.2087337225675583, "learning_rate": 0.00026753455693269526, "loss": 1.8351, "step": 5375 }, { "epoch": 0.525, "grad_norm": 0.18226175010204315, "learning_rate": 0.000267463483951484, "loss": 1.7969, "step": 5376 }, { "epoch": 0.52509765625, "grad_norm": 0.23331479728221893, "learning_rate": 0.00026739241172310405, "loss": 1.8188, "step": 5377 }, { "epoch": 0.5251953125, "grad_norm": 0.2282353937625885, "learning_rate": 0.00026732134025465464, "loss": 1.8189, "step": 5378 }, { "epoch": 0.52529296875, "grad_norm": 0.1712988168001175, "learning_rate": 0.0002672502695532354, "loss": 1.8412, "step": 5379 }, { "epoch": 0.525390625, "grad_norm": 0.2554522752761841, "learning_rate": 0.00026717919962594553, "loss": 1.8529, "step": 5380 }, { "epoch": 0.52548828125, "grad_norm": 0.18042561411857605, "learning_rate": 0.0002671081304798844, "loss": 1.7873, "step": 5381 }, { "epoch": 0.5255859375, "grad_norm": 0.2376638799905777, "learning_rate": 0.00026703706212215114, "loss": 1.806, "step": 5382 }, { "epoch": 0.52568359375, "grad_norm": 0.16039441525936127, "learning_rate": 0.0002669659945598447, "loss": 1.8364, "step": 5383 }, { "epoch": 0.52578125, "grad_norm": 0.2442803978919983, "learning_rate": 0.00026689492780006437, "loss": 1.8323, "step": 5384 }, { "epoch": 0.52587890625, "grad_norm": 0.16879159212112427, "learning_rate": 0.0002668238618499088, "loss": 1.8731, "step": 5385 }, { "epoch": 0.5259765625, "grad_norm": 0.2303413450717926, "learning_rate": 0.00026675279671647706, "loss": 1.8368, "step": 5386 }, { "epoch": 0.52607421875, "grad_norm": 0.2049660086631775, "learning_rate": 0.00026668173240686796, "loss": 1.8327, "step": 5387 }, { "epoch": 0.526171875, "grad_norm": 0.19848890602588654, "learning_rate": 0.00026661066892817996, "loss": 1.7689, "step": 5388 }, { "epoch": 0.52626953125, "grad_norm": 0.19235661625862122, "learning_rate": 0.00026653960628751194, "loss": 1.8212, "step": 5389 }, { "epoch": 0.5263671875, "grad_norm": 0.1751050353050232, "learning_rate": 0.00026646854449196216, "loss": 1.8769, "step": 5390 }, { "epoch": 0.52646484375, "grad_norm": 0.2012677788734436, "learning_rate": 0.00026639748354862934, "loss": 1.8268, "step": 5391 }, { "epoch": 0.5265625, "grad_norm": 0.20840688049793243, "learning_rate": 0.00026632642346461157, "loss": 1.8284, "step": 5392 }, { "epoch": 0.52666015625, "grad_norm": 0.19536815583705902, "learning_rate": 0.0002662553642470073, "loss": 1.8327, "step": 5393 }, { "epoch": 0.5267578125, "grad_norm": 0.22179467976093292, "learning_rate": 0.00026618430590291475, "loss": 1.8766, "step": 5394 }, { "epoch": 0.52685546875, "grad_norm": 0.19378171861171722, "learning_rate": 0.00026611324843943185, "loss": 1.8399, "step": 5395 }, { "epoch": 0.526953125, "grad_norm": 0.21545320749282837, "learning_rate": 0.00026604219186365677, "loss": 1.8231, "step": 5396 }, { "epoch": 0.52705078125, "grad_norm": 0.19444288313388824, "learning_rate": 0.0002659711361826873, "loss": 1.8299, "step": 5397 }, { "epoch": 0.5271484375, "grad_norm": 0.2240353226661682, "learning_rate": 0.00026590008140362143, "loss": 1.8174, "step": 5398 }, { "epoch": 0.52724609375, "grad_norm": 0.2034679651260376, "learning_rate": 0.00026582902753355663, "loss": 1.8522, "step": 5399 }, { "epoch": 0.52734375, "grad_norm": 0.2095334678888321, "learning_rate": 0.0002657579745795909, "loss": 1.8709, "step": 5400 }, { "epoch": 0.52744140625, "grad_norm": 0.2119787037372589, "learning_rate": 0.0002656869225488216, "loss": 1.833, "step": 5401 }, { "epoch": 0.5275390625, "grad_norm": 0.18732643127441406, "learning_rate": 0.0002656158714483461, "loss": 1.7844, "step": 5402 }, { "epoch": 0.52763671875, "grad_norm": 0.2131863385438919, "learning_rate": 0.0002655448212852621, "loss": 1.8041, "step": 5403 }, { "epoch": 0.527734375, "grad_norm": 0.2099706530570984, "learning_rate": 0.00026547377206666663, "loss": 1.8845, "step": 5404 }, { "epoch": 0.52783203125, "grad_norm": 0.22966785728931427, "learning_rate": 0.00026540272379965696, "loss": 1.7975, "step": 5405 }, { "epoch": 0.5279296875, "grad_norm": 0.17860554158687592, "learning_rate": 0.00026533167649133, "loss": 1.7672, "step": 5406 }, { "epoch": 0.52802734375, "grad_norm": 0.23800534009933472, "learning_rate": 0.000265260630148783, "loss": 1.8196, "step": 5407 }, { "epoch": 0.528125, "grad_norm": 0.1917227953672409, "learning_rate": 0.0002651895847791128, "loss": 1.7954, "step": 5408 }, { "epoch": 0.52822265625, "grad_norm": 0.2600693702697754, "learning_rate": 0.00026511854038941616, "loss": 1.8506, "step": 5409 }, { "epoch": 0.5283203125, "grad_norm": 0.20854026079177856, "learning_rate": 0.0002650474969867898, "loss": 1.8381, "step": 5410 }, { "epoch": 0.52841796875, "grad_norm": 0.24315145611763, "learning_rate": 0.0002649764545783303, "loss": 1.83, "step": 5411 }, { "epoch": 0.528515625, "grad_norm": 0.22768034040927887, "learning_rate": 0.00026490541317113425, "loss": 1.8163, "step": 5412 }, { "epoch": 0.52861328125, "grad_norm": 0.2481195032596588, "learning_rate": 0.0002648343727722978, "loss": 1.8054, "step": 5413 }, { "epoch": 0.5287109375, "grad_norm": 0.20168271660804749, "learning_rate": 0.0002647633333889176, "loss": 1.8325, "step": 5414 }, { "epoch": 0.52880859375, "grad_norm": 0.24382466077804565, "learning_rate": 0.0002646922950280897, "loss": 1.8403, "step": 5415 }, { "epoch": 0.52890625, "grad_norm": 0.17584636807441711, "learning_rate": 0.0002646212576969102, "loss": 1.8202, "step": 5416 }, { "epoch": 0.52900390625, "grad_norm": 0.23748265206813812, "learning_rate": 0.00026455022140247514, "loss": 1.7864, "step": 5417 }, { "epoch": 0.5291015625, "grad_norm": 0.18739382922649384, "learning_rate": 0.00026447918615188034, "loss": 1.8738, "step": 5418 }, { "epoch": 0.52919921875, "grad_norm": 0.23764699697494507, "learning_rate": 0.0002644081519522217, "loss": 1.8609, "step": 5419 }, { "epoch": 0.529296875, "grad_norm": 0.1940706968307495, "learning_rate": 0.0002643371188105947, "loss": 1.8363, "step": 5420 }, { "epoch": 0.52939453125, "grad_norm": 0.22153788805007935, "learning_rate": 0.00026426608673409515, "loss": 1.8102, "step": 5421 }, { "epoch": 0.5294921875, "grad_norm": 0.16070859134197235, "learning_rate": 0.0002641950557298185, "loss": 1.8349, "step": 5422 }, { "epoch": 0.52958984375, "grad_norm": 0.22653818130493164, "learning_rate": 0.00026412402580486, "loss": 1.844, "step": 5423 }, { "epoch": 0.5296875, "grad_norm": 0.22334744036197662, "learning_rate": 0.000264052996966315, "loss": 1.8311, "step": 5424 }, { "epoch": 0.52978515625, "grad_norm": 0.22687822580337524, "learning_rate": 0.0002639819692212786, "loss": 1.8033, "step": 5425 }, { "epoch": 0.5298828125, "grad_norm": 0.20469629764556885, "learning_rate": 0.0002639109425768459, "loss": 1.7667, "step": 5426 }, { "epoch": 0.52998046875, "grad_norm": 0.18325157463550568, "learning_rate": 0.0002638399170401117, "loss": 1.817, "step": 5427 }, { "epoch": 0.530078125, "grad_norm": 0.19420349597930908, "learning_rate": 0.00026376889261817095, "loss": 1.8075, "step": 5428 }, { "epoch": 0.53017578125, "grad_norm": 0.2145165205001831, "learning_rate": 0.0002636978693181183, "loss": 1.8228, "step": 5429 }, { "epoch": 0.5302734375, "grad_norm": 0.20965144038200378, "learning_rate": 0.0002636268471470485, "loss": 1.804, "step": 5430 }, { "epoch": 0.53037109375, "grad_norm": 0.1818566918373108, "learning_rate": 0.0002635558261120559, "loss": 1.7946, "step": 5431 }, { "epoch": 0.53046875, "grad_norm": 0.19721442461013794, "learning_rate": 0.0002634848062202348, "loss": 1.7748, "step": 5432 }, { "epoch": 0.53056640625, "grad_norm": 0.19389717280864716, "learning_rate": 0.0002634137874786796, "loss": 1.8455, "step": 5433 }, { "epoch": 0.5306640625, "grad_norm": 0.20727874338626862, "learning_rate": 0.0002633427698944843, "loss": 1.8315, "step": 5434 }, { "epoch": 0.53076171875, "grad_norm": 0.18529097735881805, "learning_rate": 0.0002632717534747431, "loss": 1.7471, "step": 5435 }, { "epoch": 0.530859375, "grad_norm": 0.24017512798309326, "learning_rate": 0.00026320073822654977, "loss": 1.8034, "step": 5436 }, { "epoch": 0.53095703125, "grad_norm": 0.2015991508960724, "learning_rate": 0.0002631297241569982, "loss": 1.8378, "step": 5437 }, { "epoch": 0.5310546875, "grad_norm": 0.2310858517885208, "learning_rate": 0.00026305871127318205, "loss": 1.8503, "step": 5438 }, { "epoch": 0.53115234375, "grad_norm": 0.16991358995437622, "learning_rate": 0.0002629876995821948, "loss": 1.7912, "step": 5439 }, { "epoch": 0.53125, "grad_norm": 0.21322619915008545, "learning_rate": 0.00026291668909113, "loss": 1.8179, "step": 5440 }, { "epoch": 0.53134765625, "grad_norm": 0.1953168362379074, "learning_rate": 0.0002628456798070808, "loss": 1.8134, "step": 5441 }, { "epoch": 0.5314453125, "grad_norm": 0.23283635079860687, "learning_rate": 0.0002627746717371406, "loss": 1.8024, "step": 5442 }, { "epoch": 0.53154296875, "grad_norm": 0.18258614838123322, "learning_rate": 0.00026270366488840235, "loss": 1.8251, "step": 5443 }, { "epoch": 0.531640625, "grad_norm": 0.2341890037059784, "learning_rate": 0.0002626326592679591, "loss": 1.8388, "step": 5444 }, { "epoch": 0.53173828125, "grad_norm": 0.19102002680301666, "learning_rate": 0.00026256165488290357, "loss": 1.8211, "step": 5445 }, { "epoch": 0.5318359375, "grad_norm": 0.20009714365005493, "learning_rate": 0.0002624906517403285, "loss": 1.8561, "step": 5446 }, { "epoch": 0.53193359375, "grad_norm": 0.16548582911491394, "learning_rate": 0.0002624196498473266, "loss": 1.8054, "step": 5447 }, { "epoch": 0.53203125, "grad_norm": 0.2068140059709549, "learning_rate": 0.0002623486492109901, "loss": 1.8565, "step": 5448 }, { "epoch": 0.53212890625, "grad_norm": 0.173319011926651, "learning_rate": 0.00026227764983841154, "loss": 1.8041, "step": 5449 }, { "epoch": 0.5322265625, "grad_norm": 0.18561090528964996, "learning_rate": 0.0002622066517366831, "loss": 1.8147, "step": 5450 }, { "epoch": 0.53232421875, "grad_norm": 0.1849651038646698, "learning_rate": 0.0002621356549128967, "loss": 1.7957, "step": 5451 }, { "epoch": 0.532421875, "grad_norm": 0.19743481278419495, "learning_rate": 0.0002620646593741445, "loss": 1.8438, "step": 5452 }, { "epoch": 0.53251953125, "grad_norm": 0.20082075893878937, "learning_rate": 0.00026199366512751814, "loss": 1.8054, "step": 5453 }, { "epoch": 0.5326171875, "grad_norm": 0.21201133728027344, "learning_rate": 0.0002619226721801094, "loss": 1.8407, "step": 5454 }, { "epoch": 0.53271484375, "grad_norm": 0.1894374042749405, "learning_rate": 0.00026185168053901, "loss": 1.8014, "step": 5455 }, { "epoch": 0.5328125, "grad_norm": 0.20400665700435638, "learning_rate": 0.00026178069021131104, "loss": 1.84, "step": 5456 }, { "epoch": 0.53291015625, "grad_norm": 0.18003985285758972, "learning_rate": 0.0002617097012041042, "loss": 1.7753, "step": 5457 }, { "epoch": 0.5330078125, "grad_norm": 0.23151762783527374, "learning_rate": 0.00026163871352448035, "loss": 1.8384, "step": 5458 }, { "epoch": 0.53310546875, "grad_norm": 0.2079254686832428, "learning_rate": 0.00026156772717953077, "loss": 1.8204, "step": 5459 }, { "epoch": 0.533203125, "grad_norm": 0.22848433256149292, "learning_rate": 0.0002614967421763461, "loss": 1.8062, "step": 5460 }, { "epoch": 0.53330078125, "grad_norm": 0.25768956542015076, "learning_rate": 0.0002614257585220173, "loss": 1.844, "step": 5461 }, { "epoch": 0.5333984375, "grad_norm": 0.22604383528232574, "learning_rate": 0.000261354776223635, "loss": 1.8169, "step": 5462 }, { "epoch": 0.53349609375, "grad_norm": 0.24321582913398743, "learning_rate": 0.00026128379528828966, "loss": 1.8239, "step": 5463 }, { "epoch": 0.53359375, "grad_norm": 0.20043078064918518, "learning_rate": 0.00026121281572307164, "loss": 1.8125, "step": 5464 }, { "epoch": 0.53369140625, "grad_norm": 0.1991351991891861, "learning_rate": 0.0002611418375350712, "loss": 1.8043, "step": 5465 }, { "epoch": 0.5337890625, "grad_norm": 0.24020060896873474, "learning_rate": 0.0002610708607313785, "loss": 1.8138, "step": 5466 }, { "epoch": 0.53388671875, "grad_norm": 0.18532365560531616, "learning_rate": 0.00026099988531908333, "loss": 1.8529, "step": 5467 }, { "epoch": 0.533984375, "grad_norm": 0.2191806435585022, "learning_rate": 0.00026092891130527544, "loss": 1.8232, "step": 5468 }, { "epoch": 0.53408203125, "grad_norm": 0.1792948991060257, "learning_rate": 0.0002608579386970448, "loss": 1.835, "step": 5469 }, { "epoch": 0.5341796875, "grad_norm": 0.21077221632003784, "learning_rate": 0.00026078696750148083, "loss": 1.7839, "step": 5470 }, { "epoch": 0.53427734375, "grad_norm": 0.1714853197336197, "learning_rate": 0.00026071599772567287, "loss": 1.8259, "step": 5471 }, { "epoch": 0.534375, "grad_norm": 0.22515757381916046, "learning_rate": 0.0002606450293767102, "loss": 1.8389, "step": 5472 }, { "epoch": 0.53447265625, "grad_norm": 0.19979530572891235, "learning_rate": 0.0002605740624616819, "loss": 1.7934, "step": 5473 }, { "epoch": 0.5345703125, "grad_norm": 0.22420503199100494, "learning_rate": 0.00026050309698767693, "loss": 1.843, "step": 5474 }, { "epoch": 0.53466796875, "grad_norm": 0.1782931685447693, "learning_rate": 0.00026043213296178416, "loss": 1.8518, "step": 5475 }, { "epoch": 0.534765625, "grad_norm": 0.24308639764785767, "learning_rate": 0.00026036117039109226, "loss": 1.8183, "step": 5476 }, { "epoch": 0.53486328125, "grad_norm": 0.29384294152259827, "learning_rate": 0.0002602902092826897, "loss": 1.823, "step": 5477 }, { "epoch": 0.5349609375, "grad_norm": 0.20736265182495117, "learning_rate": 0.000260219249643665, "loss": 1.8207, "step": 5478 }, { "epoch": 0.53505859375, "grad_norm": 0.20558327436447144, "learning_rate": 0.00026014829148110626, "loss": 1.8735, "step": 5479 }, { "epoch": 0.53515625, "grad_norm": 0.2373596876859665, "learning_rate": 0.0002600773348021017, "loss": 1.8703, "step": 5480 }, { "epoch": 0.53525390625, "grad_norm": 0.19190528988838196, "learning_rate": 0.00026000637961373906, "loss": 1.8284, "step": 5481 }, { "epoch": 0.5353515625, "grad_norm": 0.17614686489105225, "learning_rate": 0.00025993542592310634, "loss": 1.83, "step": 5482 }, { "epoch": 0.53544921875, "grad_norm": 0.19289451837539673, "learning_rate": 0.0002598644737372911, "loss": 1.85, "step": 5483 }, { "epoch": 0.535546875, "grad_norm": 0.18200995028018951, "learning_rate": 0.00025979352306338083, "loss": 1.8233, "step": 5484 }, { "epoch": 0.53564453125, "grad_norm": 0.18574489653110504, "learning_rate": 0.00025972257390846295, "loss": 1.8124, "step": 5485 }, { "epoch": 0.5357421875, "grad_norm": 0.18897280097007751, "learning_rate": 0.0002596516262796245, "loss": 1.8326, "step": 5486 }, { "epoch": 0.53583984375, "grad_norm": 0.18039488792419434, "learning_rate": 0.0002595806801839527, "loss": 1.8115, "step": 5487 }, { "epoch": 0.5359375, "grad_norm": 0.18147403001785278, "learning_rate": 0.0002595097356285344, "loss": 1.8547, "step": 5488 }, { "epoch": 0.53603515625, "grad_norm": 0.22002235054969788, "learning_rate": 0.00025943879262045605, "loss": 1.7807, "step": 5489 }, { "epoch": 0.5361328125, "grad_norm": 0.17554640769958496, "learning_rate": 0.00025936785116680456, "loss": 1.8027, "step": 5490 }, { "epoch": 0.53623046875, "grad_norm": 0.21823273599147797, "learning_rate": 0.0002592969112746663, "loss": 1.8096, "step": 5491 }, { "epoch": 0.536328125, "grad_norm": 0.2186814546585083, "learning_rate": 0.00025922597295112757, "loss": 1.8073, "step": 5492 }, { "epoch": 0.53642578125, "grad_norm": 0.18474088609218597, "learning_rate": 0.0002591550362032742, "loss": 1.8504, "step": 5493 }, { "epoch": 0.5365234375, "grad_norm": 0.21907639503479004, "learning_rate": 0.00025908410103819254, "loss": 1.8462, "step": 5494 }, { "epoch": 0.53662109375, "grad_norm": 0.17159883677959442, "learning_rate": 0.000259013167462968, "loss": 1.8614, "step": 5495 }, { "epoch": 0.53671875, "grad_norm": 0.19756270945072174, "learning_rate": 0.00025894223548468645, "loss": 1.8432, "step": 5496 }, { "epoch": 0.53681640625, "grad_norm": 0.2252732366323471, "learning_rate": 0.0002588713051104333, "loss": 1.816, "step": 5497 }, { "epoch": 0.5369140625, "grad_norm": 0.19140206277370453, "learning_rate": 0.000258800376347294, "loss": 1.8171, "step": 5498 }, { "epoch": 0.53701171875, "grad_norm": 0.21776041388511658, "learning_rate": 0.00025872944920235345, "loss": 1.8142, "step": 5499 }, { "epoch": 0.537109375, "grad_norm": 0.18836744129657745, "learning_rate": 0.0002586585236826969, "loss": 1.8195, "step": 5500 }, { "epoch": 0.53720703125, "grad_norm": 0.23153814673423767, "learning_rate": 0.0002585875997954091, "loss": 1.841, "step": 5501 }, { "epoch": 0.5373046875, "grad_norm": 0.2054804116487503, "learning_rate": 0.00025851667754757455, "loss": 1.8091, "step": 5502 }, { "epoch": 0.53740234375, "grad_norm": 0.2152758091688156, "learning_rate": 0.00025844575694627784, "loss": 1.7953, "step": 5503 }, { "epoch": 0.5375, "grad_norm": 0.24548089504241943, "learning_rate": 0.00025837483799860347, "loss": 1.8231, "step": 5504 }, { "epoch": 0.53759765625, "grad_norm": 0.20327144861221313, "learning_rate": 0.0002583039207116355, "loss": 1.8203, "step": 5505 }, { "epoch": 0.5376953125, "grad_norm": 0.21840816736221313, "learning_rate": 0.00025823300509245783, "loss": 1.8643, "step": 5506 }, { "epoch": 0.53779296875, "grad_norm": 0.21195746958255768, "learning_rate": 0.00025816209114815455, "loss": 1.838, "step": 5507 }, { "epoch": 0.537890625, "grad_norm": 0.2129138559103012, "learning_rate": 0.00025809117888580906, "loss": 1.8068, "step": 5508 }, { "epoch": 0.53798828125, "grad_norm": 0.2126106172800064, "learning_rate": 0.00025802026831250516, "loss": 1.8311, "step": 5509 }, { "epoch": 0.5380859375, "grad_norm": 0.2245035618543625, "learning_rate": 0.00025794935943532584, "loss": 1.8071, "step": 5510 }, { "epoch": 0.53818359375, "grad_norm": 0.20958508551120758, "learning_rate": 0.0002578784522613545, "loss": 1.822, "step": 5511 }, { "epoch": 0.53828125, "grad_norm": 0.19479487836360931, "learning_rate": 0.00025780754679767423, "loss": 1.7754, "step": 5512 }, { "epoch": 0.53837890625, "grad_norm": 0.2391282320022583, "learning_rate": 0.00025773664305136763, "loss": 1.8761, "step": 5513 }, { "epoch": 0.5384765625, "grad_norm": 0.2205953449010849, "learning_rate": 0.00025766574102951745, "loss": 1.8495, "step": 5514 }, { "epoch": 0.53857421875, "grad_norm": 0.2355017513036728, "learning_rate": 0.0002575948407392062, "loss": 1.8204, "step": 5515 }, { "epoch": 0.538671875, "grad_norm": 0.2196408212184906, "learning_rate": 0.00025752394218751616, "loss": 1.7814, "step": 5516 }, { "epoch": 0.53876953125, "grad_norm": 0.2433854341506958, "learning_rate": 0.0002574530453815294, "loss": 1.8491, "step": 5517 }, { "epoch": 0.5388671875, "grad_norm": 0.20816633105278015, "learning_rate": 0.00025738215032832803, "loss": 1.8251, "step": 5518 }, { "epoch": 0.53896484375, "grad_norm": 0.2403120994567871, "learning_rate": 0.00025731125703499374, "loss": 1.8206, "step": 5519 }, { "epoch": 0.5390625, "grad_norm": 0.20789697766304016, "learning_rate": 0.0002572403655086082, "loss": 1.8621, "step": 5520 }, { "epoch": 0.53916015625, "grad_norm": 0.23628948628902435, "learning_rate": 0.00025716947575625277, "loss": 1.8328, "step": 5521 }, { "epoch": 0.5392578125, "grad_norm": 0.1993051916360855, "learning_rate": 0.0002570985877850088, "loss": 1.7848, "step": 5522 }, { "epoch": 0.53935546875, "grad_norm": 0.19878573715686798, "learning_rate": 0.00025702770160195725, "loss": 1.8286, "step": 5523 }, { "epoch": 0.539453125, "grad_norm": 0.2252517193555832, "learning_rate": 0.00025695681721417906, "loss": 1.8257, "step": 5524 }, { "epoch": 0.53955078125, "grad_norm": 0.18713703751564026, "learning_rate": 0.000256885934628755, "loss": 1.7943, "step": 5525 }, { "epoch": 0.5396484375, "grad_norm": 0.21350198984146118, "learning_rate": 0.00025681505385276564, "loss": 1.8182, "step": 5526 }, { "epoch": 0.53974609375, "grad_norm": 0.16980531811714172, "learning_rate": 0.0002567441748932913, "loss": 1.82, "step": 5527 }, { "epoch": 0.53984375, "grad_norm": 0.20884129405021667, "learning_rate": 0.0002566732977574122, "loss": 1.8644, "step": 5528 }, { "epoch": 0.53994140625, "grad_norm": 0.190639927983284, "learning_rate": 0.00025660242245220823, "loss": 1.8109, "step": 5529 }, { "epoch": 0.5400390625, "grad_norm": 0.20282813906669617, "learning_rate": 0.0002565315489847593, "loss": 1.8204, "step": 5530 }, { "epoch": 0.54013671875, "grad_norm": 0.19925494492053986, "learning_rate": 0.00025646067736214497, "loss": 1.8325, "step": 5531 }, { "epoch": 0.540234375, "grad_norm": 0.22936612367630005, "learning_rate": 0.0002563898075914447, "loss": 1.8147, "step": 5532 }, { "epoch": 0.54033203125, "grad_norm": 0.20881228148937225, "learning_rate": 0.00025631893967973784, "loss": 1.8105, "step": 5533 }, { "epoch": 0.5404296875, "grad_norm": 0.21756277978420258, "learning_rate": 0.0002562480736341034, "loss": 1.8219, "step": 5534 }, { "epoch": 0.54052734375, "grad_norm": 0.23426201939582825, "learning_rate": 0.00025617720946162026, "loss": 1.8387, "step": 5535 }, { "epoch": 0.540625, "grad_norm": 0.2312038242816925, "learning_rate": 0.0002561063471693671, "loss": 1.8096, "step": 5536 }, { "epoch": 0.54072265625, "grad_norm": 0.21664568781852722, "learning_rate": 0.0002560354867644226, "loss": 1.8192, "step": 5537 }, { "epoch": 0.5408203125, "grad_norm": 0.21599158644676208, "learning_rate": 0.00025596462825386486, "loss": 1.8381, "step": 5538 }, { "epoch": 0.54091796875, "grad_norm": 0.21335677802562714, "learning_rate": 0.00025589377164477204, "loss": 1.8241, "step": 5539 }, { "epoch": 0.541015625, "grad_norm": 0.22373104095458984, "learning_rate": 0.0002558229169442224, "loss": 1.8621, "step": 5540 }, { "epoch": 0.54111328125, "grad_norm": 0.19807226955890656, "learning_rate": 0.0002557520641592933, "loss": 1.8284, "step": 5541 }, { "epoch": 0.5412109375, "grad_norm": 0.19161798059940338, "learning_rate": 0.00025568121329706253, "loss": 1.7522, "step": 5542 }, { "epoch": 0.54130859375, "grad_norm": 0.25048694014549255, "learning_rate": 0.0002556103643646073, "loss": 1.8088, "step": 5543 }, { "epoch": 0.54140625, "grad_norm": 0.1828339844942093, "learning_rate": 0.00025553951736900505, "loss": 1.797, "step": 5544 }, { "epoch": 0.54150390625, "grad_norm": 0.2264985591173172, "learning_rate": 0.0002554686723173325, "loss": 1.8614, "step": 5545 }, { "epoch": 0.5416015625, "grad_norm": 0.18261796236038208, "learning_rate": 0.00025539782921666656, "loss": 1.8491, "step": 5546 }, { "epoch": 0.54169921875, "grad_norm": 0.22332465648651123, "learning_rate": 0.00025532698807408387, "loss": 1.8421, "step": 5547 }, { "epoch": 0.541796875, "grad_norm": 0.22376058995723724, "learning_rate": 0.0002552561488966607, "loss": 1.8088, "step": 5548 }, { "epoch": 0.54189453125, "grad_norm": 0.23659147322177887, "learning_rate": 0.00025518531169147347, "loss": 1.8348, "step": 5549 }, { "epoch": 0.5419921875, "grad_norm": 0.23126518726348877, "learning_rate": 0.000255114476465598, "loss": 1.8615, "step": 5550 }, { "epoch": 0.54208984375, "grad_norm": 0.22165727615356445, "learning_rate": 0.00025504364322611017, "loss": 1.8019, "step": 5551 }, { "epoch": 0.5421875, "grad_norm": 0.2730003595352173, "learning_rate": 0.0002549728119800855, "loss": 1.8646, "step": 5552 }, { "epoch": 0.54228515625, "grad_norm": 0.2216140478849411, "learning_rate": 0.0002549019827345995, "loss": 1.8174, "step": 5553 }, { "epoch": 0.5423828125, "grad_norm": 0.2471429854631424, "learning_rate": 0.0002548311554967275, "loss": 1.8314, "step": 5554 }, { "epoch": 0.54248046875, "grad_norm": 0.2024291455745697, "learning_rate": 0.0002547603302735443, "loss": 1.8185, "step": 5555 }, { "epoch": 0.542578125, "grad_norm": 0.20605266094207764, "learning_rate": 0.00025468950707212485, "loss": 1.8478, "step": 5556 }, { "epoch": 0.54267578125, "grad_norm": 0.22533302009105682, "learning_rate": 0.0002546186858995436, "loss": 1.8037, "step": 5557 }, { "epoch": 0.5427734375, "grad_norm": 0.16469420492649078, "learning_rate": 0.0002545478667628752, "loss": 1.8322, "step": 5558 }, { "epoch": 0.54287109375, "grad_norm": 0.22251582145690918, "learning_rate": 0.0002544770496691936, "loss": 1.8185, "step": 5559 }, { "epoch": 0.54296875, "grad_norm": 0.19479742646217346, "learning_rate": 0.0002544062346255729, "loss": 1.8316, "step": 5560 }, { "epoch": 0.54306640625, "grad_norm": 0.19660165905952454, "learning_rate": 0.0002543354216390871, "loss": 1.8092, "step": 5561 }, { "epoch": 0.5431640625, "grad_norm": 0.17606791853904724, "learning_rate": 0.0002542646107168095, "loss": 1.8037, "step": 5562 }, { "epoch": 0.54326171875, "grad_norm": 0.1711820363998413, "learning_rate": 0.0002541938018658136, "loss": 1.7779, "step": 5563 }, { "epoch": 0.543359375, "grad_norm": 0.19791677594184875, "learning_rate": 0.00025412299509317254, "loss": 1.7675, "step": 5564 }, { "epoch": 0.54345703125, "grad_norm": 0.18170693516731262, "learning_rate": 0.0002540521904059594, "loss": 1.7781, "step": 5565 }, { "epoch": 0.5435546875, "grad_norm": 0.21157903969287872, "learning_rate": 0.0002539813878112467, "loss": 1.8557, "step": 5566 }, { "epoch": 0.54365234375, "grad_norm": 0.18882139027118683, "learning_rate": 0.00025391058731610725, "loss": 1.8141, "step": 5567 }, { "epoch": 0.54375, "grad_norm": 0.19027011096477509, "learning_rate": 0.0002538397889276133, "loss": 1.822, "step": 5568 }, { "epoch": 0.54384765625, "grad_norm": 0.1943526566028595, "learning_rate": 0.0002537689926528369, "loss": 1.8345, "step": 5569 }, { "epoch": 0.5439453125, "grad_norm": 0.18600139021873474, "learning_rate": 0.0002536981984988502, "loss": 1.8058, "step": 5570 }, { "epoch": 0.54404296875, "grad_norm": 0.17392703890800476, "learning_rate": 0.0002536274064727246, "loss": 1.809, "step": 5571 }, { "epoch": 0.544140625, "grad_norm": 0.21319179236888885, "learning_rate": 0.0002535566165815317, "loss": 1.798, "step": 5572 }, { "epoch": 0.54423828125, "grad_norm": 0.20055635273456573, "learning_rate": 0.000253485828832343, "loss": 1.8006, "step": 5573 }, { "epoch": 0.5443359375, "grad_norm": 0.19154739379882812, "learning_rate": 0.00025341504323222937, "loss": 1.8017, "step": 5574 }, { "epoch": 0.54443359375, "grad_norm": 0.21032437682151794, "learning_rate": 0.0002533442597882617, "loss": 1.8161, "step": 5575 }, { "epoch": 0.54453125, "grad_norm": 0.18448124825954437, "learning_rate": 0.0002532734785075105, "loss": 1.7913, "step": 5576 }, { "epoch": 0.54462890625, "grad_norm": 0.19715270400047302, "learning_rate": 0.00025320269939704647, "loss": 1.826, "step": 5577 }, { "epoch": 0.5447265625, "grad_norm": 0.1870099902153015, "learning_rate": 0.00025313192246393957, "loss": 1.7861, "step": 5578 }, { "epoch": 0.54482421875, "grad_norm": 0.19428372383117676, "learning_rate": 0.00025306114771525985, "loss": 1.8214, "step": 5579 }, { "epoch": 0.544921875, "grad_norm": 0.22493353486061096, "learning_rate": 0.0002529903751580772, "loss": 1.814, "step": 5580 }, { "epoch": 0.54501953125, "grad_norm": 0.26752540469169617, "learning_rate": 0.0002529196047994611, "loss": 1.812, "step": 5581 }, { "epoch": 0.5451171875, "grad_norm": 0.17355501651763916, "learning_rate": 0.00025284883664648084, "loss": 1.8521, "step": 5582 }, { "epoch": 0.54521484375, "grad_norm": 0.2585143446922302, "learning_rate": 0.0002527780707062055, "loss": 1.8169, "step": 5583 }, { "epoch": 0.5453125, "grad_norm": 0.1954551488161087, "learning_rate": 0.0002527073069857041, "loss": 1.8226, "step": 5584 }, { "epoch": 0.54541015625, "grad_norm": 0.22599788010120392, "learning_rate": 0.0002526365454920452, "loss": 1.7663, "step": 5585 }, { "epoch": 0.5455078125, "grad_norm": 0.2566271126270294, "learning_rate": 0.0002525657862322972, "loss": 1.8212, "step": 5586 }, { "epoch": 0.54560546875, "grad_norm": 0.2740970551967621, "learning_rate": 0.00025249502921352856, "loss": 1.7096, "step": 5587 }, { "epoch": 0.545703125, "grad_norm": 0.21593160927295685, "learning_rate": 0.00025242427444280703, "loss": 1.8239, "step": 5588 }, { "epoch": 0.54580078125, "grad_norm": 0.23447977006435394, "learning_rate": 0.0002523535219272005, "loss": 1.8375, "step": 5589 }, { "epoch": 0.5458984375, "grad_norm": 0.23651035130023956, "learning_rate": 0.0002522827716737766, "loss": 1.7495, "step": 5590 }, { "epoch": 0.54599609375, "grad_norm": 0.18802134692668915, "learning_rate": 0.00025221202368960245, "loss": 1.8186, "step": 5591 }, { "epoch": 0.54609375, "grad_norm": 0.23343928158283234, "learning_rate": 0.00025214127798174526, "loss": 1.8415, "step": 5592 }, { "epoch": 0.54619140625, "grad_norm": 0.15958987176418304, "learning_rate": 0.0002520705345572718, "loss": 1.8124, "step": 5593 }, { "epoch": 0.5462890625, "grad_norm": 0.260282427072525, "learning_rate": 0.0002519997934232489, "loss": 1.8048, "step": 5594 }, { "epoch": 0.54638671875, "grad_norm": 0.18524472415447235, "learning_rate": 0.00025192905458674286, "loss": 1.8342, "step": 5595 }, { "epoch": 0.546484375, "grad_norm": 0.22570255398750305, "learning_rate": 0.00025185831805481993, "loss": 1.8276, "step": 5596 }, { "epoch": 0.54658203125, "grad_norm": 0.22866535186767578, "learning_rate": 0.000251787583834546, "loss": 1.7834, "step": 5597 }, { "epoch": 0.5466796875, "grad_norm": 0.1903475970029831, "learning_rate": 0.00025171685193298675, "loss": 1.8312, "step": 5598 }, { "epoch": 0.54677734375, "grad_norm": 0.2691018879413605, "learning_rate": 0.00025164612235720776, "loss": 1.8453, "step": 5599 }, { "epoch": 0.546875, "grad_norm": 0.18015816807746887, "learning_rate": 0.0002515753951142741, "loss": 1.8247, "step": 5600 }, { "epoch": 0.54697265625, "grad_norm": 0.23992204666137695, "learning_rate": 0.00025150467021125113, "loss": 1.8134, "step": 5601 }, { "epoch": 0.5470703125, "grad_norm": 0.2217307984828949, "learning_rate": 0.0002514339476552034, "loss": 1.834, "step": 5602 }, { "epoch": 0.54716796875, "grad_norm": 0.21274347603321075, "learning_rate": 0.00025136322745319555, "loss": 1.7571, "step": 5603 }, { "epoch": 0.547265625, "grad_norm": 0.2632990777492523, "learning_rate": 0.00025129250961229184, "loss": 1.8381, "step": 5604 }, { "epoch": 0.54736328125, "grad_norm": 0.20031650364398956, "learning_rate": 0.00025122179413955635, "loss": 1.8103, "step": 5605 }, { "epoch": 0.5474609375, "grad_norm": 0.2619915008544922, "learning_rate": 0.0002511510810420531, "loss": 1.8373, "step": 5606 }, { "epoch": 0.54755859375, "grad_norm": 0.21256199479103088, "learning_rate": 0.0002510803703268454, "loss": 1.7402, "step": 5607 }, { "epoch": 0.54765625, "grad_norm": 0.24447529017925262, "learning_rate": 0.0002510096620009969, "loss": 1.7873, "step": 5608 }, { "epoch": 0.54775390625, "grad_norm": 0.21407048404216766, "learning_rate": 0.0002509389560715706, "loss": 1.8152, "step": 5609 }, { "epoch": 0.5478515625, "grad_norm": 0.28688907623291016, "learning_rate": 0.00025086825254562944, "loss": 1.7828, "step": 5610 }, { "epoch": 0.54794921875, "grad_norm": 0.19840657711029053, "learning_rate": 0.0002507975514302361, "loss": 1.8365, "step": 5611 }, { "epoch": 0.548046875, "grad_norm": 0.261705219745636, "learning_rate": 0.00025072685273245295, "loss": 1.7812, "step": 5612 }, { "epoch": 0.54814453125, "grad_norm": 0.1972021460533142, "learning_rate": 0.0002506561564593421, "loss": 1.8304, "step": 5613 }, { "epoch": 0.5482421875, "grad_norm": 0.20700006186962128, "learning_rate": 0.0002505854626179655, "loss": 1.7967, "step": 5614 }, { "epoch": 0.54833984375, "grad_norm": 0.22016876935958862, "learning_rate": 0.0002505147712153851, "loss": 1.8218, "step": 5615 }, { "epoch": 0.5484375, "grad_norm": 0.2347933053970337, "learning_rate": 0.00025044408225866196, "loss": 1.8036, "step": 5616 }, { "epoch": 0.54853515625, "grad_norm": 0.23675140738487244, "learning_rate": 0.0002503733957548575, "loss": 1.7992, "step": 5617 }, { "epoch": 0.5486328125, "grad_norm": 0.21792195737361908, "learning_rate": 0.00025030271171103265, "loss": 1.7906, "step": 5618 }, { "epoch": 0.54873046875, "grad_norm": 0.21808528900146484, "learning_rate": 0.00025023203013424815, "loss": 1.7552, "step": 5619 }, { "epoch": 0.548828125, "grad_norm": 0.20091412961483002, "learning_rate": 0.0002501613510315643, "loss": 1.8115, "step": 5620 }, { "epoch": 0.54892578125, "grad_norm": 0.2109213024377823, "learning_rate": 0.00025009067441004144, "loss": 1.8295, "step": 5621 }, { "epoch": 0.5490234375, "grad_norm": 0.19148148596286774, "learning_rate": 0.00025002000027673967, "loss": 1.806, "step": 5622 }, { "epoch": 0.54912109375, "grad_norm": 0.22250260412693024, "learning_rate": 0.00024994932863871847, "loss": 1.8466, "step": 5623 }, { "epoch": 0.54921875, "grad_norm": 0.20109923183918, "learning_rate": 0.00024987865950303745, "loss": 1.8038, "step": 5624 }, { "epoch": 0.54931640625, "grad_norm": 0.21783488988876343, "learning_rate": 0.00024980799287675575, "loss": 1.8051, "step": 5625 }, { "epoch": 0.5494140625, "grad_norm": 0.2240290492773056, "learning_rate": 0.00024973732876693244, "loss": 1.7757, "step": 5626 }, { "epoch": 0.54951171875, "grad_norm": 0.28062963485717773, "learning_rate": 0.00024966666718062607, "loss": 1.812, "step": 5627 }, { "epoch": 0.549609375, "grad_norm": 0.18937014043331146, "learning_rate": 0.0002495960081248952, "loss": 1.8124, "step": 5628 }, { "epoch": 0.54970703125, "grad_norm": 0.2349611073732376, "learning_rate": 0.0002495253516067982, "loss": 1.7936, "step": 5629 }, { "epoch": 0.5498046875, "grad_norm": 0.19194374978542328, "learning_rate": 0.00024945469763339287, "loss": 1.824, "step": 5630 }, { "epoch": 0.54990234375, "grad_norm": 0.22067919373512268, "learning_rate": 0.0002493840462117369, "loss": 1.8503, "step": 5631 }, { "epoch": 0.55, "grad_norm": 0.19356147944927216, "learning_rate": 0.0002493133973488878, "loss": 1.8288, "step": 5632 }, { "epoch": 0.55009765625, "grad_norm": 0.2134767472743988, "learning_rate": 0.00024924275105190274, "loss": 1.8116, "step": 5633 }, { "epoch": 0.5501953125, "grad_norm": 0.19148734211921692, "learning_rate": 0.00024917210732783865, "loss": 1.8198, "step": 5634 }, { "epoch": 0.55029296875, "grad_norm": 0.2349918782711029, "learning_rate": 0.00024910146618375214, "loss": 1.845, "step": 5635 }, { "epoch": 0.550390625, "grad_norm": 0.1864594668149948, "learning_rate": 0.0002490308276266998, "loss": 1.7815, "step": 5636 }, { "epoch": 0.55048828125, "grad_norm": 0.24822095036506653, "learning_rate": 0.00024896019166373767, "loss": 1.8507, "step": 5637 }, { "epoch": 0.5505859375, "grad_norm": 0.17140084505081177, "learning_rate": 0.0002488895583019218, "loss": 1.8559, "step": 5638 }, { "epoch": 0.55068359375, "grad_norm": 0.27590298652648926, "learning_rate": 0.00024881892754830757, "loss": 1.8564, "step": 5639 }, { "epoch": 0.55078125, "grad_norm": 0.1764567643404007, "learning_rate": 0.00024874829940995074, "loss": 1.8322, "step": 5640 }, { "epoch": 0.55087890625, "grad_norm": 0.25785911083221436, "learning_rate": 0.00024867767389390606, "loss": 1.8247, "step": 5641 }, { "epoch": 0.5509765625, "grad_norm": 0.23109538853168488, "learning_rate": 0.0002486070510072286, "loss": 1.7906, "step": 5642 }, { "epoch": 0.55107421875, "grad_norm": 0.2514552175998688, "learning_rate": 0.00024853643075697294, "loss": 1.7986, "step": 5643 }, { "epoch": 0.551171875, "grad_norm": 0.2271673083305359, "learning_rate": 0.0002484658131501934, "loss": 1.8121, "step": 5644 }, { "epoch": 0.55126953125, "grad_norm": 0.26640182733535767, "learning_rate": 0.0002483951981939441, "loss": 1.808, "step": 5645 }, { "epoch": 0.5513671875, "grad_norm": 0.1872779130935669, "learning_rate": 0.00024832458589527876, "loss": 1.8316, "step": 5646 }, { "epoch": 0.55146484375, "grad_norm": 0.25839999318122864, "learning_rate": 0.000248253976261251, "loss": 1.8188, "step": 5647 }, { "epoch": 0.5515625, "grad_norm": 0.17632631957530975, "learning_rate": 0.00024818336929891406, "loss": 1.8427, "step": 5648 }, { "epoch": 0.55166015625, "grad_norm": 0.23481936752796173, "learning_rate": 0.0002481127650153209, "loss": 1.8178, "step": 5649 }, { "epoch": 0.5517578125, "grad_norm": 0.21371105313301086, "learning_rate": 0.0002480421634175243, "loss": 1.8441, "step": 5650 }, { "epoch": 0.55185546875, "grad_norm": 0.2622377872467041, "learning_rate": 0.00024797156451257686, "loss": 1.8329, "step": 5651 }, { "epoch": 0.551953125, "grad_norm": 0.1987522840499878, "learning_rate": 0.00024790096830753063, "loss": 1.8199, "step": 5652 }, { "epoch": 0.55205078125, "grad_norm": 0.22086186707019806, "learning_rate": 0.00024783037480943755, "loss": 1.8186, "step": 5653 }, { "epoch": 0.5521484375, "grad_norm": 0.1863006204366684, "learning_rate": 0.0002477597840253494, "loss": 1.8401, "step": 5654 }, { "epoch": 0.55224609375, "grad_norm": 0.23199500143527985, "learning_rate": 0.00024768919596231744, "loss": 1.8327, "step": 5655 }, { "epoch": 0.55234375, "grad_norm": 0.2213217318058014, "learning_rate": 0.0002476186106273928, "loss": 1.8026, "step": 5656 }, { "epoch": 0.55244140625, "grad_norm": 0.19168421626091003, "learning_rate": 0.00024754802802762644, "loss": 1.7974, "step": 5657 }, { "epoch": 0.5525390625, "grad_norm": 0.22788816690444946, "learning_rate": 0.00024747744817006883, "loss": 1.82, "step": 5658 }, { "epoch": 0.55263671875, "grad_norm": 0.2302289605140686, "learning_rate": 0.00024740687106177044, "loss": 1.8003, "step": 5659 }, { "epoch": 0.552734375, "grad_norm": 0.21979904174804688, "learning_rate": 0.00024733629670978106, "loss": 1.8258, "step": 5660 }, { "epoch": 0.55283203125, "grad_norm": 0.19267718493938446, "learning_rate": 0.00024726572512115066, "loss": 1.7861, "step": 5661 }, { "epoch": 0.5529296875, "grad_norm": 0.22639423608779907, "learning_rate": 0.0002471951563029286, "loss": 1.8169, "step": 5662 }, { "epoch": 0.55302734375, "grad_norm": 0.1872047781944275, "learning_rate": 0.00024712459026216395, "loss": 1.7864, "step": 5663 }, { "epoch": 0.553125, "grad_norm": 0.23099260032176971, "learning_rate": 0.0002470540270059059, "loss": 1.8282, "step": 5664 }, { "epoch": 0.55322265625, "grad_norm": 0.17061591148376465, "learning_rate": 0.00024698346654120296, "loss": 1.8057, "step": 5665 }, { "epoch": 0.5533203125, "grad_norm": 0.2588363289833069, "learning_rate": 0.00024691290887510355, "loss": 1.8012, "step": 5666 }, { "epoch": 0.55341796875, "grad_norm": 0.16465163230895996, "learning_rate": 0.00024684235401465564, "loss": 1.8399, "step": 5667 }, { "epoch": 0.553515625, "grad_norm": 0.22874172031879425, "learning_rate": 0.0002467718019669072, "loss": 1.8124, "step": 5668 }, { "epoch": 0.55361328125, "grad_norm": 0.1634414941072464, "learning_rate": 0.0002467012527389056, "loss": 1.8137, "step": 5669 }, { "epoch": 0.5537109375, "grad_norm": 0.20362968742847443, "learning_rate": 0.0002466307063376981, "loss": 1.8263, "step": 5670 }, { "epoch": 0.55380859375, "grad_norm": 0.17248284816741943, "learning_rate": 0.00024656016277033177, "loss": 1.8028, "step": 5671 }, { "epoch": 0.55390625, "grad_norm": 0.20407073199748993, "learning_rate": 0.00024648962204385324, "loss": 1.784, "step": 5672 }, { "epoch": 0.55400390625, "grad_norm": 0.17844130098819733, "learning_rate": 0.00024641908416530884, "loss": 1.8438, "step": 5673 }, { "epoch": 0.5541015625, "grad_norm": 0.20489801466464996, "learning_rate": 0.00024634854914174474, "loss": 1.8255, "step": 5674 }, { "epoch": 0.55419921875, "grad_norm": 0.18895667791366577, "learning_rate": 0.0002462780169802068, "loss": 1.8523, "step": 5675 }, { "epoch": 0.554296875, "grad_norm": 0.2184802144765854, "learning_rate": 0.0002462074876877405, "loss": 1.8185, "step": 5676 }, { "epoch": 0.55439453125, "grad_norm": 0.20135514438152313, "learning_rate": 0.000246136961271391, "loss": 1.8609, "step": 5677 }, { "epoch": 0.5544921875, "grad_norm": 0.21290968358516693, "learning_rate": 0.00024606643773820346, "loss": 1.8362, "step": 5678 }, { "epoch": 0.55458984375, "grad_norm": 0.23066574335098267, "learning_rate": 0.00024599591709522246, "loss": 1.7996, "step": 5679 }, { "epoch": 0.5546875, "grad_norm": 0.2147592008113861, "learning_rate": 0.00024592539934949236, "loss": 1.8152, "step": 5680 }, { "epoch": 0.55478515625, "grad_norm": 0.24552731215953827, "learning_rate": 0.00024585488450805734, "loss": 1.8196, "step": 5681 }, { "epoch": 0.5548828125, "grad_norm": 0.2122492492198944, "learning_rate": 0.0002457843725779611, "loss": 1.8456, "step": 5682 }, { "epoch": 0.55498046875, "grad_norm": 0.20884108543395996, "learning_rate": 0.00024571386356624726, "loss": 1.8161, "step": 5683 }, { "epoch": 0.555078125, "grad_norm": 0.20209017395973206, "learning_rate": 0.00024564335747995886, "loss": 1.8391, "step": 5684 }, { "epoch": 0.55517578125, "grad_norm": 0.19246253371238708, "learning_rate": 0.00024557285432613913, "loss": 1.8339, "step": 5685 }, { "epoch": 0.5552734375, "grad_norm": 0.21154919266700745, "learning_rate": 0.0002455023541118305, "loss": 1.8049, "step": 5686 }, { "epoch": 0.55537109375, "grad_norm": 0.1984611600637436, "learning_rate": 0.00024543185684407535, "loss": 1.8442, "step": 5687 }, { "epoch": 0.55546875, "grad_norm": 0.18544642627239227, "learning_rate": 0.00024536136252991574, "loss": 1.8456, "step": 5688 }, { "epoch": 0.55556640625, "grad_norm": 0.19832609593868256, "learning_rate": 0.00024529087117639356, "loss": 1.8242, "step": 5689 }, { "epoch": 0.5556640625, "grad_norm": 0.17086392641067505, "learning_rate": 0.00024522038279055, "loss": 1.8194, "step": 5690 }, { "epoch": 0.55576171875, "grad_norm": 0.20552946627140045, "learning_rate": 0.00024514989737942636, "loss": 1.8338, "step": 5691 }, { "epoch": 0.555859375, "grad_norm": 0.17606981098651886, "learning_rate": 0.00024507941495006367, "loss": 1.8282, "step": 5692 }, { "epoch": 0.55595703125, "grad_norm": 0.19542939960956573, "learning_rate": 0.0002450089355095023, "loss": 1.8211, "step": 5693 }, { "epoch": 0.5560546875, "grad_norm": 0.2150462120771408, "learning_rate": 0.00024493845906478254, "loss": 1.8638, "step": 5694 }, { "epoch": 0.55615234375, "grad_norm": 0.21000662446022034, "learning_rate": 0.00024486798562294444, "loss": 1.7942, "step": 5695 }, { "epoch": 0.55625, "grad_norm": 0.1968635618686676, "learning_rate": 0.0002447975151910276, "loss": 1.8046, "step": 5696 }, { "epoch": 0.55634765625, "grad_norm": 0.19422626495361328, "learning_rate": 0.0002447270477760714, "loss": 1.7975, "step": 5697 }, { "epoch": 0.5564453125, "grad_norm": 0.21265894174575806, "learning_rate": 0.00024465658338511495, "loss": 1.8269, "step": 5698 }, { "epoch": 0.55654296875, "grad_norm": 0.21565228700637817, "learning_rate": 0.000244586122025197, "loss": 1.8409, "step": 5699 }, { "epoch": 0.556640625, "grad_norm": 0.21961477398872375, "learning_rate": 0.00024451566370335605, "loss": 1.8221, "step": 5700 }, { "epoch": 0.55673828125, "grad_norm": 0.23264193534851074, "learning_rate": 0.0002444452084266302, "loss": 1.8078, "step": 5701 }, { "epoch": 0.5568359375, "grad_norm": 0.23000706732273102, "learning_rate": 0.00024437475620205735, "loss": 1.8193, "step": 5702 }, { "epoch": 0.55693359375, "grad_norm": 0.20064018666744232, "learning_rate": 0.00024430430703667504, "loss": 1.8204, "step": 5703 }, { "epoch": 0.55703125, "grad_norm": 0.2533198893070221, "learning_rate": 0.0002442338609375205, "loss": 1.8071, "step": 5704 }, { "epoch": 0.55712890625, "grad_norm": 0.18992823362350464, "learning_rate": 0.00024416341791163063, "loss": 1.8181, "step": 5705 }, { "epoch": 0.5572265625, "grad_norm": 0.19736839830875397, "learning_rate": 0.00024409297796604225, "loss": 1.8468, "step": 5706 }, { "epoch": 0.55732421875, "grad_norm": 0.22081637382507324, "learning_rate": 0.00024402254110779148, "loss": 1.81, "step": 5707 }, { "epoch": 0.557421875, "grad_norm": 0.1739356964826584, "learning_rate": 0.0002439521073439145, "loss": 1.8429, "step": 5708 }, { "epoch": 0.55751953125, "grad_norm": 0.22305378317832947, "learning_rate": 0.00024388167668144684, "loss": 1.7729, "step": 5709 }, { "epoch": 0.5576171875, "grad_norm": 0.17189301550388336, "learning_rate": 0.0002438112491274241, "loss": 1.8481, "step": 5710 }, { "epoch": 0.55771484375, "grad_norm": 0.19308514893054962, "learning_rate": 0.00024374082468888104, "loss": 1.8296, "step": 5711 }, { "epoch": 0.5578125, "grad_norm": 0.20334729552268982, "learning_rate": 0.00024367040337285284, "loss": 1.7512, "step": 5712 }, { "epoch": 0.55791015625, "grad_norm": 0.17574207484722137, "learning_rate": 0.00024359998518637383, "loss": 1.8463, "step": 5713 }, { "epoch": 0.5580078125, "grad_norm": 0.2026742696762085, "learning_rate": 0.00024352957013647802, "loss": 1.804, "step": 5714 }, { "epoch": 0.55810546875, "grad_norm": 0.17970959842205048, "learning_rate": 0.0002434591582301995, "loss": 1.8422, "step": 5715 }, { "epoch": 0.558203125, "grad_norm": 0.21098735928535461, "learning_rate": 0.00024338874947457147, "loss": 1.8473, "step": 5716 }, { "epoch": 0.55830078125, "grad_norm": 0.1758074164390564, "learning_rate": 0.00024331834387662744, "loss": 1.7975, "step": 5717 }, { "epoch": 0.5583984375, "grad_norm": 0.1915355622768402, "learning_rate": 0.0002432479414434, "loss": 1.8224, "step": 5718 }, { "epoch": 0.55849609375, "grad_norm": 0.16299821436405182, "learning_rate": 0.00024317754218192207, "loss": 1.8396, "step": 5719 }, { "epoch": 0.55859375, "grad_norm": 0.19891248643398285, "learning_rate": 0.00024310714609922568, "loss": 1.8234, "step": 5720 }, { "epoch": 0.55869140625, "grad_norm": 0.17823755741119385, "learning_rate": 0.00024303675320234297, "loss": 1.8284, "step": 5721 }, { "epoch": 0.5587890625, "grad_norm": 0.18366536498069763, "learning_rate": 0.00024296636349830535, "loss": 1.8385, "step": 5722 }, { "epoch": 0.55888671875, "grad_norm": 0.208879292011261, "learning_rate": 0.00024289597699414422, "loss": 1.8044, "step": 5723 }, { "epoch": 0.558984375, "grad_norm": 0.19340184330940247, "learning_rate": 0.00024282559369689067, "loss": 1.8443, "step": 5724 }, { "epoch": 0.55908203125, "grad_norm": 0.18234075605869293, "learning_rate": 0.00024275521361357506, "loss": 1.7926, "step": 5725 }, { "epoch": 0.5591796875, "grad_norm": 0.18023990094661713, "learning_rate": 0.00024268483675122816, "loss": 1.7938, "step": 5726 }, { "epoch": 0.55927734375, "grad_norm": 0.16844049096107483, "learning_rate": 0.0002426144631168797, "loss": 1.7979, "step": 5727 }, { "epoch": 0.559375, "grad_norm": 0.1664550006389618, "learning_rate": 0.00024254409271755946, "loss": 1.8028, "step": 5728 }, { "epoch": 0.55947265625, "grad_norm": 0.18516187369823456, "learning_rate": 0.00024247372556029684, "loss": 1.8328, "step": 5729 }, { "epoch": 0.5595703125, "grad_norm": 0.1614200621843338, "learning_rate": 0.0002424033616521209, "loss": 1.7865, "step": 5730 }, { "epoch": 0.55966796875, "grad_norm": 0.20548386871814728, "learning_rate": 0.0002423330010000604, "loss": 1.8078, "step": 5731 }, { "epoch": 0.559765625, "grad_norm": 0.19825580716133118, "learning_rate": 0.00024226264361114347, "loss": 1.8063, "step": 5732 }, { "epoch": 0.55986328125, "grad_norm": 0.1988549828529358, "learning_rate": 0.00024219228949239857, "loss": 1.7732, "step": 5733 }, { "epoch": 0.5599609375, "grad_norm": 0.21454189717769623, "learning_rate": 0.00024212193865085331, "loss": 1.8169, "step": 5734 }, { "epoch": 0.56005859375, "grad_norm": 0.19592006504535675, "learning_rate": 0.00024205159109353508, "loss": 1.8479, "step": 5735 }, { "epoch": 0.56015625, "grad_norm": 0.19390515983104706, "learning_rate": 0.00024198124682747103, "loss": 1.7987, "step": 5736 }, { "epoch": 0.56025390625, "grad_norm": 0.22296620905399323, "learning_rate": 0.00024191090585968795, "loss": 1.7989, "step": 5737 }, { "epoch": 0.5603515625, "grad_norm": 0.19524182379245758, "learning_rate": 0.00024184056819721213, "loss": 1.8049, "step": 5738 }, { "epoch": 0.56044921875, "grad_norm": 0.24143314361572266, "learning_rate": 0.00024177023384706976, "loss": 1.81, "step": 5739 }, { "epoch": 0.560546875, "grad_norm": 0.19502969086170197, "learning_rate": 0.00024169990281628674, "loss": 1.8267, "step": 5740 }, { "epoch": 0.56064453125, "grad_norm": 0.18794019520282745, "learning_rate": 0.00024162957511188833, "loss": 1.8053, "step": 5741 }, { "epoch": 0.5607421875, "grad_norm": 0.1919977366924286, "learning_rate": 0.00024155925074089985, "loss": 1.8174, "step": 5742 }, { "epoch": 0.56083984375, "grad_norm": 0.17232075333595276, "learning_rate": 0.00024148892971034586, "loss": 1.7414, "step": 5743 }, { "epoch": 0.5609375, "grad_norm": 0.17742431163787842, "learning_rate": 0.00024141861202725095, "loss": 1.8627, "step": 5744 }, { "epoch": 0.56103515625, "grad_norm": 0.1941133737564087, "learning_rate": 0.00024134829769863913, "loss": 1.786, "step": 5745 }, { "epoch": 0.5611328125, "grad_norm": 0.18315130472183228, "learning_rate": 0.0002412779867315343, "loss": 1.8181, "step": 5746 }, { "epoch": 0.56123046875, "grad_norm": 0.19053520262241364, "learning_rate": 0.0002412076791329598, "loss": 1.8089, "step": 5747 }, { "epoch": 0.561328125, "grad_norm": 0.18608012795448303, "learning_rate": 0.00024113737490993886, "loss": 1.8021, "step": 5748 }, { "epoch": 0.56142578125, "grad_norm": 0.1906263828277588, "learning_rate": 0.0002410670740694942, "loss": 1.8742, "step": 5749 }, { "epoch": 0.5615234375, "grad_norm": 0.18018518388271332, "learning_rate": 0.0002409967766186481, "loss": 1.8193, "step": 5750 }, { "epoch": 0.56162109375, "grad_norm": 0.18714922666549683, "learning_rate": 0.0002409264825644229, "loss": 1.8551, "step": 5751 }, { "epoch": 0.56171875, "grad_norm": 0.2062990665435791, "learning_rate": 0.0002408561919138401, "loss": 1.8358, "step": 5752 }, { "epoch": 0.56181640625, "grad_norm": 0.19944621622562408, "learning_rate": 0.00024078590467392126, "loss": 1.79, "step": 5753 }, { "epoch": 0.5619140625, "grad_norm": 0.24852348864078522, "learning_rate": 0.00024071562085168746, "loss": 1.8452, "step": 5754 }, { "epoch": 0.56201171875, "grad_norm": 0.1966351717710495, "learning_rate": 0.0002406453404541594, "loss": 1.8437, "step": 5755 }, { "epoch": 0.562109375, "grad_norm": 0.21869508922100067, "learning_rate": 0.00024057506348835756, "loss": 1.7797, "step": 5756 }, { "epoch": 0.56220703125, "grad_norm": 0.2181652933359146, "learning_rate": 0.00024050478996130183, "loss": 1.8295, "step": 5757 }, { "epoch": 0.5623046875, "grad_norm": 0.1720779836177826, "learning_rate": 0.00024043451988001196, "loss": 1.7724, "step": 5758 }, { "epoch": 0.56240234375, "grad_norm": 0.2307814657688141, "learning_rate": 0.0002403642532515074, "loss": 1.8202, "step": 5759 }, { "epoch": 0.5625, "grad_norm": 0.15321797132492065, "learning_rate": 0.00024029399008280696, "loss": 1.8091, "step": 5760 }, { "epoch": 0.56259765625, "grad_norm": 0.283995658159256, "learning_rate": 0.00024022373038092953, "loss": 1.8275, "step": 5761 }, { "epoch": 0.5626953125, "grad_norm": 0.18518809974193573, "learning_rate": 0.00024015347415289336, "loss": 1.8031, "step": 5762 }, { "epoch": 0.56279296875, "grad_norm": 0.23342187702655792, "learning_rate": 0.00024008322140571642, "loss": 1.7771, "step": 5763 }, { "epoch": 0.562890625, "grad_norm": 0.20206767320632935, "learning_rate": 0.00024001297214641632, "loss": 1.8494, "step": 5764 }, { "epoch": 0.56298828125, "grad_norm": 0.21536582708358765, "learning_rate": 0.00023994272638201036, "loss": 1.7981, "step": 5765 }, { "epoch": 0.5630859375, "grad_norm": 0.20568695664405823, "learning_rate": 0.0002398724841195154, "loss": 1.7603, "step": 5766 }, { "epoch": 0.56318359375, "grad_norm": 0.20718802511692047, "learning_rate": 0.00023980224536594802, "loss": 1.8304, "step": 5767 }, { "epoch": 0.56328125, "grad_norm": 0.19743190705776215, "learning_rate": 0.00023973201012832463, "loss": 1.8226, "step": 5768 }, { "epoch": 0.56337890625, "grad_norm": 0.24248574674129486, "learning_rate": 0.00023966177841366093, "loss": 1.8359, "step": 5769 }, { "epoch": 0.5634765625, "grad_norm": 0.19077816605567932, "learning_rate": 0.00023959155022897256, "loss": 1.8419, "step": 5770 }, { "epoch": 0.56357421875, "grad_norm": 0.24983060359954834, "learning_rate": 0.00023952132558127453, "loss": 1.7844, "step": 5771 }, { "epoch": 0.563671875, "grad_norm": 0.17045295238494873, "learning_rate": 0.00023945110447758184, "loss": 1.8418, "step": 5772 }, { "epoch": 0.56376953125, "grad_norm": 0.21654754877090454, "learning_rate": 0.00023938088692490884, "loss": 1.7959, "step": 5773 }, { "epoch": 0.5638671875, "grad_norm": 0.18142317235469818, "learning_rate": 0.0002393106729302696, "loss": 1.8108, "step": 5774 }, { "epoch": 0.56396484375, "grad_norm": 0.18149898946285248, "learning_rate": 0.00023924046250067805, "loss": 1.8039, "step": 5775 }, { "epoch": 0.5640625, "grad_norm": 0.16428892314434052, "learning_rate": 0.0002391702556431475, "loss": 1.8096, "step": 5776 }, { "epoch": 0.56416015625, "grad_norm": 0.2334410846233368, "learning_rate": 0.00023910005236469102, "loss": 1.828, "step": 5777 }, { "epoch": 0.5642578125, "grad_norm": 0.20832157135009766, "learning_rate": 0.00023902985267232118, "loss": 1.8301, "step": 5778 }, { "epoch": 0.56435546875, "grad_norm": 0.18299533426761627, "learning_rate": 0.00023895965657305047, "loss": 1.7888, "step": 5779 }, { "epoch": 0.564453125, "grad_norm": 0.20615266263484955, "learning_rate": 0.00023888946407389072, "loss": 1.8578, "step": 5780 }, { "epoch": 0.56455078125, "grad_norm": 0.21540112793445587, "learning_rate": 0.00023881927518185355, "loss": 1.8391, "step": 5781 }, { "epoch": 0.5646484375, "grad_norm": 0.22710773348808289, "learning_rate": 0.00023874908990395035, "loss": 1.7931, "step": 5782 }, { "epoch": 0.56474609375, "grad_norm": 0.2003847360610962, "learning_rate": 0.00023867890824719185, "loss": 1.785, "step": 5783 }, { "epoch": 0.56484375, "grad_norm": 0.20065125823020935, "learning_rate": 0.00023860873021858876, "loss": 1.8377, "step": 5784 }, { "epoch": 0.56494140625, "grad_norm": 0.23354925215244293, "learning_rate": 0.00023853855582515099, "loss": 1.8106, "step": 5785 }, { "epoch": 0.5650390625, "grad_norm": 0.2283407598733902, "learning_rate": 0.00023846838507388858, "loss": 1.8133, "step": 5786 }, { "epoch": 0.56513671875, "grad_norm": 0.1872320920228958, "learning_rate": 0.00023839821797181078, "loss": 1.8649, "step": 5787 }, { "epoch": 0.565234375, "grad_norm": 0.1979392170906067, "learning_rate": 0.00023832805452592671, "loss": 1.8017, "step": 5788 }, { "epoch": 0.56533203125, "grad_norm": 0.1842741072177887, "learning_rate": 0.00023825789474324517, "loss": 1.7922, "step": 5789 }, { "epoch": 0.5654296875, "grad_norm": 0.1707819402217865, "learning_rate": 0.0002381877386307744, "loss": 1.7923, "step": 5790 }, { "epoch": 0.56552734375, "grad_norm": 0.1861731857061386, "learning_rate": 0.00023811758619552253, "loss": 1.7853, "step": 5791 }, { "epoch": 0.565625, "grad_norm": 0.18668276071548462, "learning_rate": 0.0002380474374444969, "loss": 1.7662, "step": 5792 }, { "epoch": 0.56572265625, "grad_norm": 0.24352197349071503, "learning_rate": 0.00023797729238470505, "loss": 1.8305, "step": 5793 }, { "epoch": 0.5658203125, "grad_norm": 0.2274145632982254, "learning_rate": 0.00023790715102315358, "loss": 1.8105, "step": 5794 }, { "epoch": 0.56591796875, "grad_norm": 0.17751888930797577, "learning_rate": 0.0002378370133668491, "loss": 1.7844, "step": 5795 }, { "epoch": 0.566015625, "grad_norm": 0.2262359857559204, "learning_rate": 0.00023776687942279784, "loss": 1.8367, "step": 5796 }, { "epoch": 0.56611328125, "grad_norm": 0.18775540590286255, "learning_rate": 0.0002376967491980054, "loss": 1.7966, "step": 5797 }, { "epoch": 0.5662109375, "grad_norm": 0.2217329740524292, "learning_rate": 0.00023762662269947733, "loss": 1.7881, "step": 5798 }, { "epoch": 0.56630859375, "grad_norm": 0.2080443650484085, "learning_rate": 0.00023755649993421847, "loss": 1.8621, "step": 5799 }, { "epoch": 0.56640625, "grad_norm": 0.22089287638664246, "learning_rate": 0.00023748638090923357, "loss": 1.7738, "step": 5800 }, { "epoch": 0.56650390625, "grad_norm": 0.19419841468334198, "learning_rate": 0.00023741626563152684, "loss": 1.7843, "step": 5801 }, { "epoch": 0.5666015625, "grad_norm": 0.17229866981506348, "learning_rate": 0.00023734615410810216, "loss": 1.7844, "step": 5802 }, { "epoch": 0.56669921875, "grad_norm": 0.21409763395786285, "learning_rate": 0.00023727604634596322, "loss": 1.8353, "step": 5803 }, { "epoch": 0.566796875, "grad_norm": 0.18066783249378204, "learning_rate": 0.00023720594235211303, "loss": 1.7995, "step": 5804 }, { "epoch": 0.56689453125, "grad_norm": 0.24585050344467163, "learning_rate": 0.00023713584213355438, "loss": 1.8122, "step": 5805 }, { "epoch": 0.5669921875, "grad_norm": 0.19318366050720215, "learning_rate": 0.00023706574569728962, "loss": 1.8378, "step": 5806 }, { "epoch": 0.56708984375, "grad_norm": 0.1839967966079712, "learning_rate": 0.00023699565305032084, "loss": 1.7877, "step": 5807 }, { "epoch": 0.5671875, "grad_norm": 0.1931408941745758, "learning_rate": 0.0002369255641996495, "loss": 1.8032, "step": 5808 }, { "epoch": 0.56728515625, "grad_norm": 0.1600138396024704, "learning_rate": 0.0002368554791522771, "loss": 1.7912, "step": 5809 }, { "epoch": 0.5673828125, "grad_norm": 0.2113138735294342, "learning_rate": 0.00023678539791520444, "loss": 1.795, "step": 5810 }, { "epoch": 0.56748046875, "grad_norm": 0.17674539983272552, "learning_rate": 0.00023671532049543187, "loss": 1.7973, "step": 5811 }, { "epoch": 0.567578125, "grad_norm": 0.1920803189277649, "learning_rate": 0.0002366452468999597, "loss": 1.7893, "step": 5812 }, { "epoch": 0.56767578125, "grad_norm": 0.19549739360809326, "learning_rate": 0.00023657517713578753, "loss": 1.8511, "step": 5813 }, { "epoch": 0.5677734375, "grad_norm": 0.17303209006786346, "learning_rate": 0.0002365051112099148, "loss": 1.8339, "step": 5814 }, { "epoch": 0.56787109375, "grad_norm": 0.1878969967365265, "learning_rate": 0.00023643504912934033, "loss": 1.8186, "step": 5815 }, { "epoch": 0.56796875, "grad_norm": 0.15308107435703278, "learning_rate": 0.00023636499090106284, "loss": 1.8039, "step": 5816 }, { "epoch": 0.56806640625, "grad_norm": 0.1914999634027481, "learning_rate": 0.0002362949365320805, "loss": 1.736, "step": 5817 }, { "epoch": 0.5681640625, "grad_norm": 0.1814614236354828, "learning_rate": 0.00023622488602939113, "loss": 1.7941, "step": 5818 }, { "epoch": 0.56826171875, "grad_norm": 0.16493721306324005, "learning_rate": 0.00023615483939999217, "loss": 1.7825, "step": 5819 }, { "epoch": 0.568359375, "grad_norm": 0.20275363326072693, "learning_rate": 0.0002360847966508805, "loss": 1.8191, "step": 5820 }, { "epoch": 0.56845703125, "grad_norm": 0.20842303335666656, "learning_rate": 0.00023601475778905296, "loss": 1.8425, "step": 5821 }, { "epoch": 0.5685546875, "grad_norm": 0.20482796430587769, "learning_rate": 0.00023594472282150564, "loss": 1.8321, "step": 5822 }, { "epoch": 0.56865234375, "grad_norm": 0.18061845004558563, "learning_rate": 0.00023587469175523457, "loss": 1.8043, "step": 5823 }, { "epoch": 0.56875, "grad_norm": 0.22678276896476746, "learning_rate": 0.0002358046645972352, "loss": 1.8432, "step": 5824 }, { "epoch": 0.56884765625, "grad_norm": 0.17450343072414398, "learning_rate": 0.00023573464135450263, "loss": 1.8715, "step": 5825 }, { "epoch": 0.5689453125, "grad_norm": 0.18723644316196442, "learning_rate": 0.0002356646220340315, "loss": 1.809, "step": 5826 }, { "epoch": 0.56904296875, "grad_norm": 0.17098674178123474, "learning_rate": 0.00023559460664281613, "loss": 1.8366, "step": 5827 }, { "epoch": 0.569140625, "grad_norm": 0.17753233015537262, "learning_rate": 0.00023552459518785048, "loss": 1.8092, "step": 5828 }, { "epoch": 0.56923828125, "grad_norm": 0.1522631049156189, "learning_rate": 0.000235454587676128, "loss": 1.8266, "step": 5829 }, { "epoch": 0.5693359375, "grad_norm": 0.19509999454021454, "learning_rate": 0.00023538458411464188, "loss": 1.8025, "step": 5830 }, { "epoch": 0.56943359375, "grad_norm": 0.1844097077846527, "learning_rate": 0.00023531458451038484, "loss": 1.7896, "step": 5831 }, { "epoch": 0.56953125, "grad_norm": 0.18491721153259277, "learning_rate": 0.00023524458887034928, "loss": 1.8211, "step": 5832 }, { "epoch": 0.56962890625, "grad_norm": 0.19944505393505096, "learning_rate": 0.00023517459720152708, "loss": 1.7383, "step": 5833 }, { "epoch": 0.5697265625, "grad_norm": 0.1913967877626419, "learning_rate": 0.0002351046095109098, "loss": 1.8512, "step": 5834 }, { "epoch": 0.56982421875, "grad_norm": 0.22124047577381134, "learning_rate": 0.00023503462580548858, "loss": 1.815, "step": 5835 }, { "epoch": 0.569921875, "grad_norm": 0.21470440924167633, "learning_rate": 0.0002349646460922541, "loss": 1.8029, "step": 5836 }, { "epoch": 0.57001953125, "grad_norm": 0.24024231731891632, "learning_rate": 0.00023489467037819683, "loss": 1.7467, "step": 5837 }, { "epoch": 0.5701171875, "grad_norm": 0.22828757762908936, "learning_rate": 0.00023482469867030676, "loss": 1.7944, "step": 5838 }, { "epoch": 0.57021484375, "grad_norm": 0.24289603531360626, "learning_rate": 0.0002347547309755733, "loss": 1.8269, "step": 5839 }, { "epoch": 0.5703125, "grad_norm": 0.24885110557079315, "learning_rate": 0.00023468476730098575, "loss": 1.8184, "step": 5840 }, { "epoch": 0.57041015625, "grad_norm": 0.2282242327928543, "learning_rate": 0.00023461480765353277, "loss": 1.8193, "step": 5841 }, { "epoch": 0.5705078125, "grad_norm": 0.22720734775066376, "learning_rate": 0.0002345448520402027, "loss": 1.8278, "step": 5842 }, { "epoch": 0.57060546875, "grad_norm": 0.18812432885169983, "learning_rate": 0.00023447490046798347, "loss": 1.826, "step": 5843 }, { "epoch": 0.570703125, "grad_norm": 0.2212895303964615, "learning_rate": 0.00023440495294386272, "loss": 1.8028, "step": 5844 }, { "epoch": 0.57080078125, "grad_norm": 0.21255379915237427, "learning_rate": 0.0002343350094748275, "loss": 1.789, "step": 5845 }, { "epoch": 0.5708984375, "grad_norm": 0.21496912837028503, "learning_rate": 0.00023426507006786456, "loss": 1.8455, "step": 5846 }, { "epoch": 0.57099609375, "grad_norm": 0.24907967448234558, "learning_rate": 0.00023419513472996035, "loss": 1.8299, "step": 5847 }, { "epoch": 0.57109375, "grad_norm": 0.19135573506355286, "learning_rate": 0.00023412520346810056, "loss": 1.864, "step": 5848 }, { "epoch": 0.57119140625, "grad_norm": 0.2593901455402374, "learning_rate": 0.00023405527628927092, "loss": 1.7756, "step": 5849 }, { "epoch": 0.5712890625, "grad_norm": 0.21900144219398499, "learning_rate": 0.00023398535320045633, "loss": 1.8432, "step": 5850 }, { "epoch": 0.57138671875, "grad_norm": 0.23860612511634827, "learning_rate": 0.00023391543420864163, "loss": 1.7954, "step": 5851 }, { "epoch": 0.571484375, "grad_norm": 0.23289039731025696, "learning_rate": 0.00023384551932081118, "loss": 1.7943, "step": 5852 }, { "epoch": 0.57158203125, "grad_norm": 0.21105468273162842, "learning_rate": 0.00023377560854394863, "loss": 1.823, "step": 5853 }, { "epoch": 0.5716796875, "grad_norm": 0.22342118620872498, "learning_rate": 0.00023370570188503765, "loss": 1.8082, "step": 5854 }, { "epoch": 0.57177734375, "grad_norm": 0.21346870064735413, "learning_rate": 0.00023363579935106117, "loss": 1.7725, "step": 5855 }, { "epoch": 0.571875, "grad_norm": 0.20988278090953827, "learning_rate": 0.00023356590094900193, "loss": 1.7713, "step": 5856 }, { "epoch": 0.57197265625, "grad_norm": 0.21179601550102234, "learning_rate": 0.00023349600668584204, "loss": 1.8317, "step": 5857 }, { "epoch": 0.5720703125, "grad_norm": 0.21883586049079895, "learning_rate": 0.00023342611656856338, "loss": 1.8435, "step": 5858 }, { "epoch": 0.57216796875, "grad_norm": 0.17406393587589264, "learning_rate": 0.00023335623060414745, "loss": 1.8199, "step": 5859 }, { "epoch": 0.572265625, "grad_norm": 0.24157069623470306, "learning_rate": 0.0002332863487995751, "loss": 1.8292, "step": 5860 }, { "epoch": 0.57236328125, "grad_norm": 0.18727564811706543, "learning_rate": 0.00023321647116182704, "loss": 1.8033, "step": 5861 }, { "epoch": 0.5724609375, "grad_norm": 0.20670999586582184, "learning_rate": 0.0002331465976978832, "loss": 1.8311, "step": 5862 }, { "epoch": 0.57255859375, "grad_norm": 0.1828058809041977, "learning_rate": 0.00023307672841472361, "loss": 1.8008, "step": 5863 }, { "epoch": 0.57265625, "grad_norm": 0.19982615113258362, "learning_rate": 0.0002330068633193273, "loss": 1.8044, "step": 5864 }, { "epoch": 0.57275390625, "grad_norm": 0.18759532272815704, "learning_rate": 0.00023293700241867337, "loss": 1.805, "step": 5865 }, { "epoch": 0.5728515625, "grad_norm": 0.22639347612857819, "learning_rate": 0.00023286714571974028, "loss": 1.7705, "step": 5866 }, { "epoch": 0.57294921875, "grad_norm": 0.18828190863132477, "learning_rate": 0.00023279729322950605, "loss": 1.7804, "step": 5867 }, { "epoch": 0.573046875, "grad_norm": 0.2167462706565857, "learning_rate": 0.0002327274449549484, "loss": 1.8225, "step": 5868 }, { "epoch": 0.57314453125, "grad_norm": 0.19454501569271088, "learning_rate": 0.00023265760090304444, "loss": 1.7931, "step": 5869 }, { "epoch": 0.5732421875, "grad_norm": 0.1902778297662735, "learning_rate": 0.00023258776108077105, "loss": 1.8076, "step": 5870 }, { "epoch": 0.57333984375, "grad_norm": 0.22381719946861267, "learning_rate": 0.00023251792549510453, "loss": 1.7909, "step": 5871 }, { "epoch": 0.5734375, "grad_norm": 0.17360778152942657, "learning_rate": 0.00023244809415302092, "loss": 1.8091, "step": 5872 }, { "epoch": 0.57353515625, "grad_norm": 0.22898004949092865, "learning_rate": 0.00023237826706149585, "loss": 1.8141, "step": 5873 }, { "epoch": 0.5736328125, "grad_norm": 0.16430418193340302, "learning_rate": 0.00023230844422750415, "loss": 1.8335, "step": 5874 }, { "epoch": 0.57373046875, "grad_norm": 0.24011602997779846, "learning_rate": 0.0002322386256580208, "loss": 1.7898, "step": 5875 }, { "epoch": 0.573828125, "grad_norm": 0.17345397174358368, "learning_rate": 0.00023216881136001977, "loss": 1.7664, "step": 5876 }, { "epoch": 0.57392578125, "grad_norm": 0.24298913776874542, "learning_rate": 0.0002320990013404752, "loss": 1.7892, "step": 5877 }, { "epoch": 0.5740234375, "grad_norm": 0.22097083926200867, "learning_rate": 0.0002320291956063601, "loss": 1.8025, "step": 5878 }, { "epoch": 0.57412109375, "grad_norm": 0.17510278522968292, "learning_rate": 0.00023195939416464779, "loss": 1.7722, "step": 5879 }, { "epoch": 0.57421875, "grad_norm": 0.24010610580444336, "learning_rate": 0.00023188959702231078, "loss": 1.7976, "step": 5880 }, { "epoch": 0.57431640625, "grad_norm": 0.19529971480369568, "learning_rate": 0.000231819804186321, "loss": 1.8104, "step": 5881 }, { "epoch": 0.5744140625, "grad_norm": 0.23939085006713867, "learning_rate": 0.00023175001566365028, "loss": 1.7874, "step": 5882 }, { "epoch": 0.57451171875, "grad_norm": 0.20986172556877136, "learning_rate": 0.0002316802314612698, "loss": 1.8129, "step": 5883 }, { "epoch": 0.574609375, "grad_norm": 0.20623964071273804, "learning_rate": 0.0002316104515861505, "loss": 1.7942, "step": 5884 }, { "epoch": 0.57470703125, "grad_norm": 0.20922249555587769, "learning_rate": 0.00023154067604526257, "loss": 1.8184, "step": 5885 }, { "epoch": 0.5748046875, "grad_norm": 0.1990262269973755, "learning_rate": 0.00023147090484557616, "loss": 1.8364, "step": 5886 }, { "epoch": 0.57490234375, "grad_norm": 0.2150501012802124, "learning_rate": 0.00023140113799406076, "loss": 1.8288, "step": 5887 }, { "epoch": 0.575, "grad_norm": 0.18194036185741425, "learning_rate": 0.00023133137549768528, "loss": 1.8079, "step": 5888 }, { "epoch": 0.57509765625, "grad_norm": 0.17163975536823273, "learning_rate": 0.0002312616173634187, "loss": 1.788, "step": 5889 }, { "epoch": 0.5751953125, "grad_norm": 0.1753661334514618, "learning_rate": 0.00023119186359822896, "loss": 1.837, "step": 5890 }, { "epoch": 0.57529296875, "grad_norm": 0.18372245132923126, "learning_rate": 0.00023112211420908398, "loss": 1.8363, "step": 5891 }, { "epoch": 0.575390625, "grad_norm": 0.16989116370677948, "learning_rate": 0.00023105236920295096, "loss": 1.7838, "step": 5892 }, { "epoch": 0.57548828125, "grad_norm": 0.17176464200019836, "learning_rate": 0.00023098262858679703, "loss": 1.7661, "step": 5893 }, { "epoch": 0.5755859375, "grad_norm": 0.1610552966594696, "learning_rate": 0.0002309128923675885, "loss": 1.7517, "step": 5894 }, { "epoch": 0.57568359375, "grad_norm": 0.19519434869289398, "learning_rate": 0.00023084316055229149, "loss": 1.8143, "step": 5895 }, { "epoch": 0.57578125, "grad_norm": 0.16446132957935333, "learning_rate": 0.00023077343314787153, "loss": 1.7787, "step": 5896 }, { "epoch": 0.57587890625, "grad_norm": 0.19893570244312286, "learning_rate": 0.00023070371016129377, "loss": 1.8103, "step": 5897 }, { "epoch": 0.5759765625, "grad_norm": 0.16365966200828552, "learning_rate": 0.00023063399159952298, "loss": 1.8115, "step": 5898 }, { "epoch": 0.57607421875, "grad_norm": 0.18286655843257904, "learning_rate": 0.0002305642774695233, "loss": 1.8162, "step": 5899 }, { "epoch": 0.576171875, "grad_norm": 0.1785878688097, "learning_rate": 0.0002304945677782586, "loss": 1.7788, "step": 5900 }, { "epoch": 0.57626953125, "grad_norm": 0.17843903601169586, "learning_rate": 0.0002304248625326924, "loss": 1.8091, "step": 5901 }, { "epoch": 0.5763671875, "grad_norm": 0.2067364603281021, "learning_rate": 0.00023035516173978748, "loss": 1.7794, "step": 5902 }, { "epoch": 0.57646484375, "grad_norm": 0.18219676613807678, "learning_rate": 0.00023028546540650646, "loss": 1.7949, "step": 5903 }, { "epoch": 0.5765625, "grad_norm": 0.18007482588291168, "learning_rate": 0.00023021577353981126, "loss": 1.7838, "step": 5904 }, { "epoch": 0.57666015625, "grad_norm": 0.18739664554595947, "learning_rate": 0.0002301460861466636, "loss": 1.8007, "step": 5905 }, { "epoch": 0.5767578125, "grad_norm": 0.19373473525047302, "learning_rate": 0.0002300764032340244, "loss": 1.7936, "step": 5906 }, { "epoch": 0.57685546875, "grad_norm": 0.17329183220863342, "learning_rate": 0.00023000672480885458, "loss": 1.8256, "step": 5907 }, { "epoch": 0.576953125, "grad_norm": 0.17185525596141815, "learning_rate": 0.00022993705087811446, "loss": 1.7714, "step": 5908 }, { "epoch": 0.57705078125, "grad_norm": 0.19177356362342834, "learning_rate": 0.00022986738144876363, "loss": 1.8244, "step": 5909 }, { "epoch": 0.5771484375, "grad_norm": 0.19425243139266968, "learning_rate": 0.00022979771652776167, "loss": 1.8117, "step": 5910 }, { "epoch": 0.57724609375, "grad_norm": 0.21529611945152283, "learning_rate": 0.00022972805612206726, "loss": 1.8624, "step": 5911 }, { "epoch": 0.57734375, "grad_norm": 0.301800400018692, "learning_rate": 0.00022965840023863904, "loss": 1.806, "step": 5912 }, { "epoch": 0.57744140625, "grad_norm": 0.2577924430370331, "learning_rate": 0.00022958874888443482, "loss": 1.8253, "step": 5913 }, { "epoch": 0.5775390625, "grad_norm": 0.21474717557430267, "learning_rate": 0.00022951910206641242, "loss": 1.8261, "step": 5914 }, { "epoch": 0.57763671875, "grad_norm": 0.1987772434949875, "learning_rate": 0.00022944945979152875, "loss": 1.8172, "step": 5915 }, { "epoch": 0.577734375, "grad_norm": 0.18028530478477478, "learning_rate": 0.00022937982206674046, "loss": 1.802, "step": 5916 }, { "epoch": 0.57783203125, "grad_norm": 0.2041766345500946, "learning_rate": 0.0002293101888990039, "loss": 1.8419, "step": 5917 }, { "epoch": 0.5779296875, "grad_norm": 0.22396759688854218, "learning_rate": 0.00022924056029527458, "loss": 1.7519, "step": 5918 }, { "epoch": 0.57802734375, "grad_norm": 0.21696776151657104, "learning_rate": 0.00022917093626250796, "loss": 1.8326, "step": 5919 }, { "epoch": 0.578125, "grad_norm": 0.1985805779695511, "learning_rate": 0.00022910131680765872, "loss": 1.8115, "step": 5920 }, { "epoch": 0.57822265625, "grad_norm": 0.19348695874214172, "learning_rate": 0.00022903170193768136, "loss": 1.8109, "step": 5921 }, { "epoch": 0.5783203125, "grad_norm": 0.19444002211093903, "learning_rate": 0.0002289620916595298, "loss": 1.8108, "step": 5922 }, { "epoch": 0.57841796875, "grad_norm": 0.23987627029418945, "learning_rate": 0.00022889248598015734, "loss": 1.8053, "step": 5923 }, { "epoch": 0.578515625, "grad_norm": 0.1732168048620224, "learning_rate": 0.0002288228849065171, "loss": 1.83, "step": 5924 }, { "epoch": 0.57861328125, "grad_norm": 0.22279787063598633, "learning_rate": 0.00022875328844556154, "loss": 1.7885, "step": 5925 }, { "epoch": 0.5787109375, "grad_norm": 0.20709843933582306, "learning_rate": 0.00022868369660424267, "loss": 1.7972, "step": 5926 }, { "epoch": 0.57880859375, "grad_norm": 0.200025275349617, "learning_rate": 0.00022861410938951232, "loss": 1.7865, "step": 5927 }, { "epoch": 0.57890625, "grad_norm": 0.18898484110832214, "learning_rate": 0.00022854452680832145, "loss": 1.8002, "step": 5928 }, { "epoch": 0.57900390625, "grad_norm": 0.1793789118528366, "learning_rate": 0.00022847494886762076, "loss": 1.8085, "step": 5929 }, { "epoch": 0.5791015625, "grad_norm": 0.16998471319675446, "learning_rate": 0.00022840537557436059, "loss": 1.746, "step": 5930 }, { "epoch": 0.57919921875, "grad_norm": 0.1707158237695694, "learning_rate": 0.00022833580693549054, "loss": 1.82, "step": 5931 }, { "epoch": 0.579296875, "grad_norm": 0.17955337464809418, "learning_rate": 0.00022826624295796, "loss": 1.8359, "step": 5932 }, { "epoch": 0.57939453125, "grad_norm": 0.20279590785503387, "learning_rate": 0.0002281966836487177, "loss": 1.8033, "step": 5933 }, { "epoch": 0.5794921875, "grad_norm": 0.18756870925426483, "learning_rate": 0.00022812712901471211, "loss": 1.783, "step": 5934 }, { "epoch": 0.57958984375, "grad_norm": 0.17610830068588257, "learning_rate": 0.0002280575790628911, "loss": 1.822, "step": 5935 }, { "epoch": 0.5796875, "grad_norm": 0.18555815517902374, "learning_rate": 0.00022798803380020204, "loss": 1.7898, "step": 5936 }, { "epoch": 0.57978515625, "grad_norm": 0.15962721407413483, "learning_rate": 0.00022791849323359193, "loss": 1.7326, "step": 5937 }, { "epoch": 0.5798828125, "grad_norm": 0.18843944370746613, "learning_rate": 0.00022784895737000728, "loss": 1.7361, "step": 5938 }, { "epoch": 0.57998046875, "grad_norm": 0.1689312756061554, "learning_rate": 0.00022777942621639397, "loss": 1.8065, "step": 5939 }, { "epoch": 0.580078125, "grad_norm": 0.1914089471101761, "learning_rate": 0.00022770989977969765, "loss": 1.819, "step": 5940 }, { "epoch": 0.58017578125, "grad_norm": 0.17071323096752167, "learning_rate": 0.00022764037806686344, "loss": 1.7935, "step": 5941 }, { "epoch": 0.5802734375, "grad_norm": 0.184160515666008, "learning_rate": 0.00022757086108483594, "loss": 1.7965, "step": 5942 }, { "epoch": 0.58037109375, "grad_norm": 0.18327662348747253, "learning_rate": 0.00022750134884055918, "loss": 1.8204, "step": 5943 }, { "epoch": 0.58046875, "grad_norm": 0.19059066474437714, "learning_rate": 0.0002274318413409769, "loss": 1.801, "step": 5944 }, { "epoch": 0.58056640625, "grad_norm": 0.17623156309127808, "learning_rate": 0.00022736233859303224, "loss": 1.788, "step": 5945 }, { "epoch": 0.5806640625, "grad_norm": 0.2023933082818985, "learning_rate": 0.0002272928406036679, "loss": 1.752, "step": 5946 }, { "epoch": 0.58076171875, "grad_norm": 0.2118426412343979, "learning_rate": 0.00022722334737982608, "loss": 1.8366, "step": 5947 }, { "epoch": 0.580859375, "grad_norm": 0.187327042222023, "learning_rate": 0.00022715385892844864, "loss": 1.8065, "step": 5948 }, { "epoch": 0.58095703125, "grad_norm": 0.2185956835746765, "learning_rate": 0.00022708437525647686, "loss": 1.8512, "step": 5949 }, { "epoch": 0.5810546875, "grad_norm": 0.23365098237991333, "learning_rate": 0.00022701489637085142, "loss": 1.8213, "step": 5950 }, { "epoch": 0.58115234375, "grad_norm": 0.17716625332832336, "learning_rate": 0.00022694542227851285, "loss": 1.837, "step": 5951 }, { "epoch": 0.58125, "grad_norm": 0.19819201529026031, "learning_rate": 0.00022687595298640084, "loss": 1.8058, "step": 5952 }, { "epoch": 0.58134765625, "grad_norm": 0.21341556310653687, "learning_rate": 0.0002268064885014547, "loss": 1.8027, "step": 5953 }, { "epoch": 0.5814453125, "grad_norm": 0.1808188110589981, "learning_rate": 0.00022673702883061337, "loss": 1.7992, "step": 5954 }, { "epoch": 0.58154296875, "grad_norm": 0.2339494228363037, "learning_rate": 0.00022666757398081533, "loss": 1.8144, "step": 5955 }, { "epoch": 0.581640625, "grad_norm": 0.24668490886688232, "learning_rate": 0.00022659812395899853, "loss": 1.8251, "step": 5956 }, { "epoch": 0.58173828125, "grad_norm": 0.1760711967945099, "learning_rate": 0.0002265286787721003, "loss": 1.8229, "step": 5957 }, { "epoch": 0.5818359375, "grad_norm": 0.22810928523540497, "learning_rate": 0.00022645923842705767, "loss": 1.7876, "step": 5958 }, { "epoch": 0.58193359375, "grad_norm": 0.1946016550064087, "learning_rate": 0.00022638980293080706, "loss": 1.7285, "step": 5959 }, { "epoch": 0.58203125, "grad_norm": 0.215434730052948, "learning_rate": 0.00022632037229028458, "loss": 1.7619, "step": 5960 }, { "epoch": 0.58212890625, "grad_norm": 0.2115330845117569, "learning_rate": 0.00022625094651242545, "loss": 1.8002, "step": 5961 }, { "epoch": 0.5822265625, "grad_norm": 0.19616055488586426, "learning_rate": 0.00022618152560416506, "loss": 1.8037, "step": 5962 }, { "epoch": 0.58232421875, "grad_norm": 0.21775726974010468, "learning_rate": 0.00022611210957243773, "loss": 1.8191, "step": 5963 }, { "epoch": 0.582421875, "grad_norm": 0.23273715376853943, "learning_rate": 0.00022604269842417746, "loss": 1.8329, "step": 5964 }, { "epoch": 0.58251953125, "grad_norm": 0.20015306770801544, "learning_rate": 0.00022597329216631802, "loss": 1.7884, "step": 5965 }, { "epoch": 0.5826171875, "grad_norm": 0.2166900485754013, "learning_rate": 0.00022590389080579226, "loss": 1.8675, "step": 5966 }, { "epoch": 0.58271484375, "grad_norm": 0.19326549768447876, "learning_rate": 0.00022583449434953297, "loss": 1.8094, "step": 5967 }, { "epoch": 0.5828125, "grad_norm": 0.2037588208913803, "learning_rate": 0.000225765102804472, "loss": 1.7768, "step": 5968 }, { "epoch": 0.58291015625, "grad_norm": 0.21830052137374878, "learning_rate": 0.00022569571617754109, "loss": 1.8351, "step": 5969 }, { "epoch": 0.5830078125, "grad_norm": 0.18288429081439972, "learning_rate": 0.00022562633447567137, "loss": 1.8439, "step": 5970 }, { "epoch": 0.58310546875, "grad_norm": 0.20786455273628235, "learning_rate": 0.00022555695770579343, "loss": 1.8372, "step": 5971 }, { "epoch": 0.583203125, "grad_norm": 0.22064544260501862, "learning_rate": 0.00022548758587483743, "loss": 1.8244, "step": 5972 }, { "epoch": 0.58330078125, "grad_norm": 0.19834762811660767, "learning_rate": 0.0002254182189897329, "loss": 1.7861, "step": 5973 }, { "epoch": 0.5833984375, "grad_norm": 0.2608279585838318, "learning_rate": 0.0002253488570574091, "loss": 1.783, "step": 5974 }, { "epoch": 0.58349609375, "grad_norm": 0.2500442564487457, "learning_rate": 0.00022527950008479448, "loss": 1.8389, "step": 5975 }, { "epoch": 0.58359375, "grad_norm": 0.1744433492422104, "learning_rate": 0.0002252101480788174, "loss": 1.7786, "step": 5976 }, { "epoch": 0.58369140625, "grad_norm": 0.22279968857765198, "learning_rate": 0.00022514080104640545, "loss": 1.7925, "step": 5977 }, { "epoch": 0.5837890625, "grad_norm": 0.16777446866035461, "learning_rate": 0.00022507145899448573, "loss": 1.8034, "step": 5978 }, { "epoch": 0.58388671875, "grad_norm": 0.20873762667179108, "learning_rate": 0.00022500212192998504, "loss": 1.803, "step": 5979 }, { "epoch": 0.583984375, "grad_norm": 0.1901940107345581, "learning_rate": 0.0002249327898598293, "loss": 1.7905, "step": 5980 }, { "epoch": 0.58408203125, "grad_norm": 0.2246997058391571, "learning_rate": 0.00022486346279094438, "loss": 1.7931, "step": 5981 }, { "epoch": 0.5841796875, "grad_norm": 0.1967909038066864, "learning_rate": 0.00022479414073025522, "loss": 1.802, "step": 5982 }, { "epoch": 0.58427734375, "grad_norm": 0.22402171790599823, "learning_rate": 0.00022472482368468666, "loss": 1.835, "step": 5983 }, { "epoch": 0.584375, "grad_norm": 0.17172573506832123, "learning_rate": 0.00022465551166116294, "loss": 1.8402, "step": 5984 }, { "epoch": 0.58447265625, "grad_norm": 0.24722321331501007, "learning_rate": 0.0002245862046666075, "loss": 1.7867, "step": 5985 }, { "epoch": 0.5845703125, "grad_norm": 0.16767068207263947, "learning_rate": 0.00022451690270794362, "loss": 1.8119, "step": 5986 }, { "epoch": 0.58466796875, "grad_norm": 0.23585526645183563, "learning_rate": 0.00022444760579209388, "loss": 1.8405, "step": 5987 }, { "epoch": 0.584765625, "grad_norm": 0.16253513097763062, "learning_rate": 0.00022437831392598052, "loss": 1.7839, "step": 5988 }, { "epoch": 0.58486328125, "grad_norm": 0.20177872478961945, "learning_rate": 0.00022430902711652502, "loss": 1.8031, "step": 5989 }, { "epoch": 0.5849609375, "grad_norm": 0.17927582561969757, "learning_rate": 0.00022423974537064874, "loss": 1.8231, "step": 5990 }, { "epoch": 0.58505859375, "grad_norm": 0.20338888466358185, "learning_rate": 0.0002241704686952722, "loss": 1.8378, "step": 5991 }, { "epoch": 0.58515625, "grad_norm": 0.17618761956691742, "learning_rate": 0.00022410119709731548, "loss": 1.8546, "step": 5992 }, { "epoch": 0.58525390625, "grad_norm": 0.20496289432048798, "learning_rate": 0.0002240319305836983, "loss": 1.7878, "step": 5993 }, { "epoch": 0.5853515625, "grad_norm": 0.1795557141304016, "learning_rate": 0.00022396266916133967, "loss": 1.8441, "step": 5994 }, { "epoch": 0.58544921875, "grad_norm": 0.1833583116531372, "learning_rate": 0.0002238934128371583, "loss": 1.7609, "step": 5995 }, { "epoch": 0.585546875, "grad_norm": 0.19359761476516724, "learning_rate": 0.00022382416161807213, "loss": 1.8109, "step": 5996 }, { "epoch": 0.58564453125, "grad_norm": 0.1706235408782959, "learning_rate": 0.00022375491551099885, "loss": 1.7968, "step": 5997 }, { "epoch": 0.5857421875, "grad_norm": 0.19470283389091492, "learning_rate": 0.0002236856745228556, "loss": 1.8444, "step": 5998 }, { "epoch": 0.58583984375, "grad_norm": 0.17885595560073853, "learning_rate": 0.0002236164386605588, "loss": 1.8308, "step": 5999 }, { "epoch": 0.5859375, "grad_norm": 0.2032635360956192, "learning_rate": 0.00022354720793102463, "loss": 1.8297, "step": 6000 }, { "epoch": 0.58603515625, "grad_norm": 0.19302742183208466, "learning_rate": 0.00022347798234116845, "loss": 1.7838, "step": 6001 }, { "epoch": 0.5861328125, "grad_norm": 0.1792043000459671, "learning_rate": 0.0002234087618979055, "loss": 1.8175, "step": 6002 }, { "epoch": 0.58623046875, "grad_norm": 0.1926499903202057, "learning_rate": 0.0002233395466081501, "loss": 1.8069, "step": 6003 }, { "epoch": 0.586328125, "grad_norm": 0.20764052867889404, "learning_rate": 0.0002232703364788163, "loss": 1.7872, "step": 6004 }, { "epoch": 0.58642578125, "grad_norm": 0.1965053826570511, "learning_rate": 0.0002232011315168177, "loss": 1.8227, "step": 6005 }, { "epoch": 0.5865234375, "grad_norm": 0.2014039158821106, "learning_rate": 0.00022313193172906708, "loss": 1.7757, "step": 6006 }, { "epoch": 0.58662109375, "grad_norm": 0.1969563364982605, "learning_rate": 0.00022306273712247704, "loss": 1.811, "step": 6007 }, { "epoch": 0.58671875, "grad_norm": 0.19397762417793274, "learning_rate": 0.0002229935477039594, "loss": 1.8039, "step": 6008 }, { "epoch": 0.58681640625, "grad_norm": 0.2287880927324295, "learning_rate": 0.00022292436348042566, "loss": 1.8175, "step": 6009 }, { "epoch": 0.5869140625, "grad_norm": 0.21714447438716888, "learning_rate": 0.0002228551844587865, "loss": 1.7546, "step": 6010 }, { "epoch": 0.58701171875, "grad_norm": 0.17961035668849945, "learning_rate": 0.00022278601064595262, "loss": 1.7789, "step": 6011 }, { "epoch": 0.587109375, "grad_norm": 0.22085663676261902, "learning_rate": 0.00022271684204883363, "loss": 1.7988, "step": 6012 }, { "epoch": 0.58720703125, "grad_norm": 0.18990541994571686, "learning_rate": 0.00022264767867433895, "loss": 1.7883, "step": 6013 }, { "epoch": 0.5873046875, "grad_norm": 0.21019993722438812, "learning_rate": 0.00022257852052937744, "loss": 1.7951, "step": 6014 }, { "epoch": 0.58740234375, "grad_norm": 0.23121781647205353, "learning_rate": 0.00022250936762085717, "loss": 1.8254, "step": 6015 }, { "epoch": 0.5875, "grad_norm": 0.20345181226730347, "learning_rate": 0.0002224402199556862, "loss": 1.7909, "step": 6016 }, { "epoch": 0.58759765625, "grad_norm": 0.19238971173763275, "learning_rate": 0.00022237107754077145, "loss": 1.8088, "step": 6017 }, { "epoch": 0.5876953125, "grad_norm": 0.20436668395996094, "learning_rate": 0.00022230194038301988, "loss": 1.7687, "step": 6018 }, { "epoch": 0.58779296875, "grad_norm": 0.17639438807964325, "learning_rate": 0.0002222328084893377, "loss": 1.7983, "step": 6019 }, { "epoch": 0.587890625, "grad_norm": 0.20139825344085693, "learning_rate": 0.00022216368186663032, "loss": 1.8115, "step": 6020 }, { "epoch": 0.58798828125, "grad_norm": 0.17256253957748413, "learning_rate": 0.00022209456052180314, "loss": 1.769, "step": 6021 }, { "epoch": 0.5880859375, "grad_norm": 0.20930175483226776, "learning_rate": 0.0002220254444617607, "loss": 1.8475, "step": 6022 }, { "epoch": 0.58818359375, "grad_norm": 0.2093883454799652, "learning_rate": 0.000221956333693407, "loss": 1.7944, "step": 6023 }, { "epoch": 0.58828125, "grad_norm": 0.20417183637619019, "learning_rate": 0.00022188722822364553, "loss": 1.7618, "step": 6024 }, { "epoch": 0.58837890625, "grad_norm": 0.24197806417942047, "learning_rate": 0.00022181812805937952, "loss": 1.7803, "step": 6025 }, { "epoch": 0.5884765625, "grad_norm": 0.17471839487552643, "learning_rate": 0.00022174903320751137, "loss": 1.7747, "step": 6026 }, { "epoch": 0.58857421875, "grad_norm": 0.22639308869838715, "learning_rate": 0.00022167994367494303, "loss": 1.8172, "step": 6027 }, { "epoch": 0.588671875, "grad_norm": 0.17517556250095367, "learning_rate": 0.00022161085946857602, "loss": 1.7871, "step": 6028 }, { "epoch": 0.58876953125, "grad_norm": 0.23104669153690338, "learning_rate": 0.00022154178059531106, "loss": 1.811, "step": 6029 }, { "epoch": 0.5888671875, "grad_norm": 0.21483208239078522, "learning_rate": 0.0002214727070620487, "loss": 1.7755, "step": 6030 }, { "epoch": 0.58896484375, "grad_norm": 0.2366401106119156, "learning_rate": 0.00022140363887568855, "loss": 1.7889, "step": 6031 }, { "epoch": 0.5890625, "grad_norm": 0.19270631670951843, "learning_rate": 0.00022133457604313017, "loss": 1.8242, "step": 6032 }, { "epoch": 0.58916015625, "grad_norm": 0.2212655395269394, "learning_rate": 0.00022126551857127224, "loss": 1.7908, "step": 6033 }, { "epoch": 0.5892578125, "grad_norm": 0.18300123512744904, "learning_rate": 0.00022119646646701286, "loss": 1.7958, "step": 6034 }, { "epoch": 0.58935546875, "grad_norm": 0.1906541883945465, "learning_rate": 0.00022112741973724992, "loss": 1.8123, "step": 6035 }, { "epoch": 0.589453125, "grad_norm": 0.19313155114650726, "learning_rate": 0.00022105837838888042, "loss": 1.808, "step": 6036 }, { "epoch": 0.58955078125, "grad_norm": 0.20650918781757355, "learning_rate": 0.00022098934242880108, "loss": 1.8405, "step": 6037 }, { "epoch": 0.5896484375, "grad_norm": 0.23968300223350525, "learning_rate": 0.00022092031186390787, "loss": 1.7833, "step": 6038 }, { "epoch": 0.58974609375, "grad_norm": 0.18253521621227264, "learning_rate": 0.00022085128670109645, "loss": 1.7856, "step": 6039 }, { "epoch": 0.58984375, "grad_norm": 0.20907902717590332, "learning_rate": 0.00022078226694726178, "loss": 1.7983, "step": 6040 }, { "epoch": 0.58994140625, "grad_norm": 0.22630053758621216, "learning_rate": 0.00022071325260929832, "loss": 1.8146, "step": 6041 }, { "epoch": 0.5900390625, "grad_norm": 0.2044190913438797, "learning_rate": 0.0002206442436941, "loss": 1.7683, "step": 6042 }, { "epoch": 0.59013671875, "grad_norm": 0.23442262411117554, "learning_rate": 0.00022057524020856014, "loss": 1.7931, "step": 6043 }, { "epoch": 0.590234375, "grad_norm": 0.20358428359031677, "learning_rate": 0.0002205062421595716, "loss": 1.825, "step": 6044 }, { "epoch": 0.59033203125, "grad_norm": 0.23162676393985748, "learning_rate": 0.00022043724955402678, "loss": 1.8149, "step": 6045 }, { "epoch": 0.5904296875, "grad_norm": 0.2306784987449646, "learning_rate": 0.0002203682623988173, "loss": 1.8062, "step": 6046 }, { "epoch": 0.59052734375, "grad_norm": 0.2284695953130722, "learning_rate": 0.00022029928070083443, "loss": 1.8037, "step": 6047 }, { "epoch": 0.590625, "grad_norm": 0.22685149312019348, "learning_rate": 0.00022023030446696878, "loss": 1.7926, "step": 6048 }, { "epoch": 0.59072265625, "grad_norm": 0.21958404779434204, "learning_rate": 0.00022016133370411056, "loss": 1.8412, "step": 6049 }, { "epoch": 0.5908203125, "grad_norm": 0.21019254624843597, "learning_rate": 0.00022009236841914927, "loss": 1.8257, "step": 6050 }, { "epoch": 0.59091796875, "grad_norm": 0.21856051683425903, "learning_rate": 0.0002200234086189738, "loss": 1.7974, "step": 6051 }, { "epoch": 0.591015625, "grad_norm": 0.21175473928451538, "learning_rate": 0.00021995445431047288, "loss": 1.7976, "step": 6052 }, { "epoch": 0.59111328125, "grad_norm": 0.2119484692811966, "learning_rate": 0.00021988550550053432, "loss": 1.7706, "step": 6053 }, { "epoch": 0.5912109375, "grad_norm": 0.19626151025295258, "learning_rate": 0.0002198165621960455, "loss": 1.8337, "step": 6054 }, { "epoch": 0.59130859375, "grad_norm": 0.1788095235824585, "learning_rate": 0.00021974762440389324, "loss": 1.8279, "step": 6055 }, { "epoch": 0.59140625, "grad_norm": 0.18501365184783936, "learning_rate": 0.0002196786921309638, "loss": 1.8195, "step": 6056 }, { "epoch": 0.59150390625, "grad_norm": 0.20309464633464813, "learning_rate": 0.00021960976538414284, "loss": 1.8414, "step": 6057 }, { "epoch": 0.5916015625, "grad_norm": 0.20530590415000916, "learning_rate": 0.0002195408441703156, "loss": 1.8047, "step": 6058 }, { "epoch": 0.59169921875, "grad_norm": 0.20639538764953613, "learning_rate": 0.00021947192849636681, "loss": 1.7993, "step": 6059 }, { "epoch": 0.591796875, "grad_norm": 0.2447599619626999, "learning_rate": 0.00021940301836918035, "loss": 1.8334, "step": 6060 }, { "epoch": 0.59189453125, "grad_norm": 0.2148212045431137, "learning_rate": 0.0002193341137956399, "loss": 1.7669, "step": 6061 }, { "epoch": 0.5919921875, "grad_norm": 0.23096372187137604, "learning_rate": 0.00021926521478262824, "loss": 1.8126, "step": 6062 }, { "epoch": 0.59208984375, "grad_norm": 0.2068783938884735, "learning_rate": 0.00021919632133702793, "loss": 1.8149, "step": 6063 }, { "epoch": 0.5921875, "grad_norm": 0.21563322842121124, "learning_rate": 0.00021912743346572068, "loss": 1.8106, "step": 6064 }, { "epoch": 0.59228515625, "grad_norm": 0.19701358675956726, "learning_rate": 0.00021905855117558776, "loss": 1.8045, "step": 6065 }, { "epoch": 0.5923828125, "grad_norm": 0.18591518700122833, "learning_rate": 0.00021898967447351014, "loss": 1.7808, "step": 6066 }, { "epoch": 0.59248046875, "grad_norm": 0.2422100156545639, "learning_rate": 0.00021892080336636776, "loss": 1.8409, "step": 6067 }, { "epoch": 0.592578125, "grad_norm": 0.19388064742088318, "learning_rate": 0.00021885193786104034, "loss": 1.7842, "step": 6068 }, { "epoch": 0.59267578125, "grad_norm": 0.2088778167963028, "learning_rate": 0.00021878307796440689, "loss": 1.7931, "step": 6069 }, { "epoch": 0.5927734375, "grad_norm": 0.21715819835662842, "learning_rate": 0.00021871422368334586, "loss": 1.8623, "step": 6070 }, { "epoch": 0.59287109375, "grad_norm": 0.17884254455566406, "learning_rate": 0.0002186453750247353, "loss": 1.8346, "step": 6071 }, { "epoch": 0.59296875, "grad_norm": 0.17933261394500732, "learning_rate": 0.0002185765319954524, "loss": 1.7936, "step": 6072 }, { "epoch": 0.59306640625, "grad_norm": 0.19158947467803955, "learning_rate": 0.00021850769460237424, "loss": 1.8363, "step": 6073 }, { "epoch": 0.5931640625, "grad_norm": 0.17418980598449707, "learning_rate": 0.00021843886285237684, "loss": 1.7742, "step": 6074 }, { "epoch": 0.59326171875, "grad_norm": 0.1857522577047348, "learning_rate": 0.00021837003675233602, "loss": 1.7661, "step": 6075 }, { "epoch": 0.593359375, "grad_norm": 0.18093770742416382, "learning_rate": 0.00021830121630912682, "loss": 1.7937, "step": 6076 }, { "epoch": 0.59345703125, "grad_norm": 0.16597363352775574, "learning_rate": 0.00021823240152962377, "loss": 1.8237, "step": 6077 }, { "epoch": 0.5935546875, "grad_norm": 0.17338544130325317, "learning_rate": 0.00021816359242070088, "loss": 1.7878, "step": 6078 }, { "epoch": 0.59365234375, "grad_norm": 0.19970352947711945, "learning_rate": 0.0002180947889892315, "loss": 1.8319, "step": 6079 }, { "epoch": 0.59375, "grad_norm": 0.18089355528354645, "learning_rate": 0.0002180259912420887, "loss": 1.8593, "step": 6080 }, { "epoch": 0.59384765625, "grad_norm": 0.22402983903884888, "learning_rate": 0.00021795719918614453, "loss": 1.8518, "step": 6081 }, { "epoch": 0.5939453125, "grad_norm": 0.1666041910648346, "learning_rate": 0.00021788841282827093, "loss": 1.8535, "step": 6082 }, { "epoch": 0.59404296875, "grad_norm": 0.21207848191261292, "learning_rate": 0.00021781963217533883, "loss": 1.8683, "step": 6083 }, { "epoch": 0.594140625, "grad_norm": 0.17553874850273132, "learning_rate": 0.00021775085723421895, "loss": 1.8202, "step": 6084 }, { "epoch": 0.59423828125, "grad_norm": 0.1913154572248459, "learning_rate": 0.00021768208801178123, "loss": 1.8066, "step": 6085 }, { "epoch": 0.5943359375, "grad_norm": 0.19232282042503357, "learning_rate": 0.00021761332451489506, "loss": 1.7937, "step": 6086 }, { "epoch": 0.59443359375, "grad_norm": 0.18931885063648224, "learning_rate": 0.0002175445667504295, "loss": 1.7946, "step": 6087 }, { "epoch": 0.59453125, "grad_norm": 0.1824049949645996, "learning_rate": 0.00021747581472525264, "loss": 1.8079, "step": 6088 }, { "epoch": 0.59462890625, "grad_norm": 0.1940474957227707, "learning_rate": 0.00021740706844623233, "loss": 1.8687, "step": 6089 }, { "epoch": 0.5947265625, "grad_norm": 0.1801449954509735, "learning_rate": 0.00021733832792023556, "loss": 1.8014, "step": 6090 }, { "epoch": 0.59482421875, "grad_norm": 0.1910218745470047, "learning_rate": 0.00021726959315412908, "loss": 1.8089, "step": 6091 }, { "epoch": 0.594921875, "grad_norm": 0.1901610642671585, "learning_rate": 0.00021720086415477873, "loss": 1.811, "step": 6092 }, { "epoch": 0.59501953125, "grad_norm": 0.19574207067489624, "learning_rate": 0.00021713214092904999, "loss": 1.8198, "step": 6093 }, { "epoch": 0.5951171875, "grad_norm": 0.21282432973384857, "learning_rate": 0.00021706342348380776, "loss": 1.7843, "step": 6094 }, { "epoch": 0.59521484375, "grad_norm": 0.1983288675546646, "learning_rate": 0.00021699471182591625, "loss": 1.7782, "step": 6095 }, { "epoch": 0.5953125, "grad_norm": 0.20402789115905762, "learning_rate": 0.00021692600596223916, "loss": 1.8106, "step": 6096 }, { "epoch": 0.59541015625, "grad_norm": 0.2298576682806015, "learning_rate": 0.0002168573058996396, "loss": 1.8096, "step": 6097 }, { "epoch": 0.5955078125, "grad_norm": 0.19262485206127167, "learning_rate": 0.00021678861164498004, "loss": 1.7872, "step": 6098 }, { "epoch": 0.59560546875, "grad_norm": 0.23409678041934967, "learning_rate": 0.00021671992320512247, "loss": 1.809, "step": 6099 }, { "epoch": 0.595703125, "grad_norm": 0.18759672343730927, "learning_rate": 0.0002166512405869282, "loss": 1.8106, "step": 6100 }, { "epoch": 0.59580078125, "grad_norm": 0.23655839264392853, "learning_rate": 0.00021658256379725817, "loss": 1.8456, "step": 6101 }, { "epoch": 0.5958984375, "grad_norm": 0.1923137754201889, "learning_rate": 0.0002165138928429724, "loss": 1.7662, "step": 6102 }, { "epoch": 0.59599609375, "grad_norm": 0.1997900903224945, "learning_rate": 0.00021644522773093066, "loss": 1.8666, "step": 6103 }, { "epoch": 0.59609375, "grad_norm": 0.1940336674451828, "learning_rate": 0.00021637656846799192, "loss": 1.8298, "step": 6104 }, { "epoch": 0.59619140625, "grad_norm": 0.2312546819448471, "learning_rate": 0.00021630791506101465, "loss": 1.8026, "step": 6105 }, { "epoch": 0.5962890625, "grad_norm": 0.1886315941810608, "learning_rate": 0.0002162392675168567, "loss": 1.8113, "step": 6106 }, { "epoch": 0.59638671875, "grad_norm": 0.20764055848121643, "learning_rate": 0.00021617062584237522, "loss": 1.8112, "step": 6107 }, { "epoch": 0.596484375, "grad_norm": 0.22300314903259277, "learning_rate": 0.00021610199004442715, "loss": 1.826, "step": 6108 }, { "epoch": 0.59658203125, "grad_norm": 0.21806038916110992, "learning_rate": 0.00021603336012986847, "loss": 1.8216, "step": 6109 }, { "epoch": 0.5966796875, "grad_norm": 0.24024689197540283, "learning_rate": 0.0002159647361055548, "loss": 1.8278, "step": 6110 }, { "epoch": 0.59677734375, "grad_norm": 0.19355463981628418, "learning_rate": 0.00021589611797834086, "loss": 1.7777, "step": 6111 }, { "epoch": 0.596875, "grad_norm": 0.25856268405914307, "learning_rate": 0.00021582750575508113, "loss": 1.7916, "step": 6112 }, { "epoch": 0.59697265625, "grad_norm": 0.19265173375606537, "learning_rate": 0.00021575889944262943, "loss": 1.8187, "step": 6113 }, { "epoch": 0.5970703125, "grad_norm": 0.25137102603912354, "learning_rate": 0.00021569029904783876, "loss": 1.7592, "step": 6114 }, { "epoch": 0.59716796875, "grad_norm": 0.1922004371881485, "learning_rate": 0.00021562170457756186, "loss": 1.7992, "step": 6115 }, { "epoch": 0.597265625, "grad_norm": 0.22714975476264954, "learning_rate": 0.00021555311603865053, "loss": 1.8132, "step": 6116 }, { "epoch": 0.59736328125, "grad_norm": 0.1898004412651062, "learning_rate": 0.00021548453343795638, "loss": 1.8619, "step": 6117 }, { "epoch": 0.5974609375, "grad_norm": 0.19873394072055817, "learning_rate": 0.00021541595678233, "loss": 1.8025, "step": 6118 }, { "epoch": 0.59755859375, "grad_norm": 0.18881534039974213, "learning_rate": 0.0002153473860786217, "loss": 1.848, "step": 6119 }, { "epoch": 0.59765625, "grad_norm": 0.18700185418128967, "learning_rate": 0.00021527882133368103, "loss": 1.798, "step": 6120 }, { "epoch": 0.59775390625, "grad_norm": 0.2004774510860443, "learning_rate": 0.000215210262554357, "loss": 1.8104, "step": 6121 }, { "epoch": 0.5978515625, "grad_norm": 0.163076251745224, "learning_rate": 0.00021514170974749812, "loss": 1.822, "step": 6122 }, { "epoch": 0.59794921875, "grad_norm": 0.18918396532535553, "learning_rate": 0.0002150731629199521, "loss": 1.7442, "step": 6123 }, { "epoch": 0.598046875, "grad_norm": 0.15878728032112122, "learning_rate": 0.00021500462207856623, "loss": 1.7844, "step": 6124 }, { "epoch": 0.59814453125, "grad_norm": 0.17013801634311676, "learning_rate": 0.0002149360872301871, "loss": 1.7894, "step": 6125 }, { "epoch": 0.5982421875, "grad_norm": 0.1773090958595276, "learning_rate": 0.00021486755838166078, "loss": 1.7861, "step": 6126 }, { "epoch": 0.59833984375, "grad_norm": 0.16531570255756378, "learning_rate": 0.00021479903553983255, "loss": 1.7883, "step": 6127 }, { "epoch": 0.5984375, "grad_norm": 0.18113350868225098, "learning_rate": 0.00021473051871154732, "loss": 1.8027, "step": 6128 }, { "epoch": 0.59853515625, "grad_norm": 0.1998959481716156, "learning_rate": 0.00021466200790364943, "loss": 1.8131, "step": 6129 }, { "epoch": 0.5986328125, "grad_norm": 0.19699302315711975, "learning_rate": 0.00021459350312298236, "loss": 1.7933, "step": 6130 }, { "epoch": 0.59873046875, "grad_norm": 0.23520085215568542, "learning_rate": 0.00021452500437638922, "loss": 1.8138, "step": 6131 }, { "epoch": 0.598828125, "grad_norm": 0.2062321901321411, "learning_rate": 0.00021445651167071228, "loss": 1.8175, "step": 6132 }, { "epoch": 0.59892578125, "grad_norm": 0.2537485957145691, "learning_rate": 0.00021438802501279358, "loss": 1.8388, "step": 6133 }, { "epoch": 0.5990234375, "grad_norm": 0.2082526832818985, "learning_rate": 0.00021431954440947416, "loss": 1.7959, "step": 6134 }, { "epoch": 0.59912109375, "grad_norm": 0.23269964754581451, "learning_rate": 0.00021425106986759459, "loss": 1.7628, "step": 6135 }, { "epoch": 0.59921875, "grad_norm": 0.21268583834171295, "learning_rate": 0.00021418260139399509, "loss": 1.8105, "step": 6136 }, { "epoch": 0.59931640625, "grad_norm": 0.258386492729187, "learning_rate": 0.0002141141389955148, "loss": 1.8187, "step": 6137 }, { "epoch": 0.5994140625, "grad_norm": 0.19568558037281036, "learning_rate": 0.0002140456826789927, "loss": 1.819, "step": 6138 }, { "epoch": 0.59951171875, "grad_norm": 0.2289549708366394, "learning_rate": 0.00021397723245126688, "loss": 1.803, "step": 6139 }, { "epoch": 0.599609375, "grad_norm": 0.2308306246995926, "learning_rate": 0.00021390878831917492, "loss": 1.7939, "step": 6140 }, { "epoch": 0.59970703125, "grad_norm": 0.20380614697933197, "learning_rate": 0.0002138403502895538, "loss": 1.7922, "step": 6141 }, { "epoch": 0.5998046875, "grad_norm": 0.23161107301712036, "learning_rate": 0.00021377191836923976, "loss": 1.7821, "step": 6142 }, { "epoch": 0.59990234375, "grad_norm": 0.19782742857933044, "learning_rate": 0.00021370349256506882, "loss": 1.8412, "step": 6143 }, { "epoch": 0.6, "grad_norm": 0.2351178228855133, "learning_rate": 0.0002136350728838758, "loss": 1.784, "step": 6144 }, { "epoch": 0.60009765625, "grad_norm": 0.22225898504257202, "learning_rate": 0.00021356665933249541, "loss": 1.8154, "step": 6145 }, { "epoch": 0.6001953125, "grad_norm": 0.17293980717658997, "learning_rate": 0.00021349825191776145, "loss": 1.8177, "step": 6146 }, { "epoch": 0.60029296875, "grad_norm": 0.2289429008960724, "learning_rate": 0.0002134298506465074, "loss": 1.8065, "step": 6147 }, { "epoch": 0.600390625, "grad_norm": 0.1761815845966339, "learning_rate": 0.0002133614555255657, "loss": 1.7977, "step": 6148 }, { "epoch": 0.60048828125, "grad_norm": 0.22960148751735687, "learning_rate": 0.00021329306656176845, "loss": 1.7184, "step": 6149 }, { "epoch": 0.6005859375, "grad_norm": 0.17355045676231384, "learning_rate": 0.00021322468376194732, "loss": 1.8288, "step": 6150 }, { "epoch": 0.60068359375, "grad_norm": 0.17345373332500458, "learning_rate": 0.00021315630713293293, "loss": 1.7954, "step": 6151 }, { "epoch": 0.60078125, "grad_norm": 0.182003915309906, "learning_rate": 0.00021308793668155563, "loss": 1.8075, "step": 6152 }, { "epoch": 0.60087890625, "grad_norm": 0.1681591272354126, "learning_rate": 0.00021301957241464487, "loss": 1.833, "step": 6153 }, { "epoch": 0.6009765625, "grad_norm": 0.18944664299488068, "learning_rate": 0.0002129512143390298, "loss": 1.8127, "step": 6154 }, { "epoch": 0.60107421875, "grad_norm": 0.16395343840122223, "learning_rate": 0.0002128828624615387, "loss": 1.7997, "step": 6155 }, { "epoch": 0.601171875, "grad_norm": 0.19553793966770172, "learning_rate": 0.00021281451678899926, "loss": 1.7859, "step": 6156 }, { "epoch": 0.60126953125, "grad_norm": 0.17499969899654388, "learning_rate": 0.0002127461773282388, "loss": 1.8443, "step": 6157 }, { "epoch": 0.6013671875, "grad_norm": 0.18928645551204681, "learning_rate": 0.00021267784408608365, "loss": 1.7826, "step": 6158 }, { "epoch": 0.60146484375, "grad_norm": 0.20224414765834808, "learning_rate": 0.00021260951706935976, "loss": 1.8021, "step": 6159 }, { "epoch": 0.6015625, "grad_norm": 0.2315026819705963, "learning_rate": 0.00021254119628489238, "loss": 1.8057, "step": 6160 }, { "epoch": 0.60166015625, "grad_norm": 0.1945047676563263, "learning_rate": 0.00021247288173950618, "loss": 1.7968, "step": 6161 }, { "epoch": 0.6017578125, "grad_norm": 0.25634056329727173, "learning_rate": 0.000212404573440025, "loss": 1.8152, "step": 6162 }, { "epoch": 0.60185546875, "grad_norm": 0.18256628513336182, "learning_rate": 0.00021233627139327255, "loss": 1.829, "step": 6163 }, { "epoch": 0.601953125, "grad_norm": 0.23352459073066711, "learning_rate": 0.00021226797560607147, "loss": 1.8647, "step": 6164 }, { "epoch": 0.60205078125, "grad_norm": 0.19065333902835846, "learning_rate": 0.00021219968608524376, "loss": 1.8029, "step": 6165 }, { "epoch": 0.6021484375, "grad_norm": 0.21321852505207062, "learning_rate": 0.00021213140283761112, "loss": 1.781, "step": 6166 }, { "epoch": 0.60224609375, "grad_norm": 0.17412959039211273, "learning_rate": 0.00021206312586999432, "loss": 1.7912, "step": 6167 }, { "epoch": 0.60234375, "grad_norm": 0.18121132254600525, "learning_rate": 0.00021199485518921373, "loss": 1.809, "step": 6168 }, { "epoch": 0.60244140625, "grad_norm": 0.17271718382835388, "learning_rate": 0.00021192659080208881, "loss": 1.7936, "step": 6169 }, { "epoch": 0.6025390625, "grad_norm": 0.20449566841125488, "learning_rate": 0.0002118583327154388, "loss": 1.753, "step": 6170 }, { "epoch": 0.60263671875, "grad_norm": 0.17554304003715515, "learning_rate": 0.00021179008093608198, "loss": 1.8024, "step": 6171 }, { "epoch": 0.602734375, "grad_norm": 0.24413961172103882, "learning_rate": 0.00021172183547083607, "loss": 1.8099, "step": 6172 }, { "epoch": 0.60283203125, "grad_norm": 0.15495261549949646, "learning_rate": 0.0002116535963265182, "loss": 1.7946, "step": 6173 }, { "epoch": 0.6029296875, "grad_norm": 0.19747887551784515, "learning_rate": 0.00021158536350994483, "loss": 1.7887, "step": 6174 }, { "epoch": 0.60302734375, "grad_norm": 0.16191768646240234, "learning_rate": 0.0002115171370279319, "loss": 1.7913, "step": 6175 }, { "epoch": 0.603125, "grad_norm": 0.17747072875499725, "learning_rate": 0.00021144891688729445, "loss": 1.797, "step": 6176 }, { "epoch": 0.60322265625, "grad_norm": 0.1931808739900589, "learning_rate": 0.00021138070309484732, "loss": 1.8052, "step": 6177 }, { "epoch": 0.6033203125, "grad_norm": 0.17066913843154907, "learning_rate": 0.00021131249565740424, "loss": 1.789, "step": 6178 }, { "epoch": 0.60341796875, "grad_norm": 0.19153670966625214, "learning_rate": 0.00021124429458177867, "loss": 1.7916, "step": 6179 }, { "epoch": 0.603515625, "grad_norm": 0.17722178995609283, "learning_rate": 0.00021117609987478326, "loss": 1.8409, "step": 6180 }, { "epoch": 0.60361328125, "grad_norm": 0.18929599225521088, "learning_rate": 0.00021110791154323007, "loss": 1.7921, "step": 6181 }, { "epoch": 0.6037109375, "grad_norm": 0.17693963646888733, "learning_rate": 0.00021103972959393046, "loss": 1.759, "step": 6182 }, { "epoch": 0.60380859375, "grad_norm": 0.18351872265338898, "learning_rate": 0.00021097155403369513, "loss": 1.824, "step": 6183 }, { "epoch": 0.60390625, "grad_norm": 0.2246830314397812, "learning_rate": 0.00021090338486933442, "loss": 1.8349, "step": 6184 }, { "epoch": 0.60400390625, "grad_norm": 0.1977761685848236, "learning_rate": 0.00021083522210765766, "loss": 1.8087, "step": 6185 }, { "epoch": 0.6041015625, "grad_norm": 0.24857626855373383, "learning_rate": 0.00021076706575547384, "loss": 1.796, "step": 6186 }, { "epoch": 0.60419921875, "grad_norm": 0.2145289033651352, "learning_rate": 0.000210698915819591, "loss": 1.7655, "step": 6187 }, { "epoch": 0.604296875, "grad_norm": 0.248208686709404, "learning_rate": 0.00021063077230681687, "loss": 1.7826, "step": 6188 }, { "epoch": 0.60439453125, "grad_norm": 0.20095187425613403, "learning_rate": 0.0002105626352239583, "loss": 1.7768, "step": 6189 }, { "epoch": 0.6044921875, "grad_norm": 0.2546161413192749, "learning_rate": 0.0002104945045778216, "loss": 1.8018, "step": 6190 }, { "epoch": 0.60458984375, "grad_norm": 0.23642940819263458, "learning_rate": 0.00021042638037521244, "loss": 1.8317, "step": 6191 }, { "epoch": 0.6046875, "grad_norm": 0.22463467717170715, "learning_rate": 0.00021035826262293583, "loss": 1.8296, "step": 6192 }, { "epoch": 0.60478515625, "grad_norm": 0.19615551829338074, "learning_rate": 0.00021029015132779615, "loss": 1.8081, "step": 6193 }, { "epoch": 0.6048828125, "grad_norm": 0.19459615647792816, "learning_rate": 0.00021022204649659705, "loss": 1.8235, "step": 6194 }, { "epoch": 0.60498046875, "grad_norm": 0.1959541141986847, "learning_rate": 0.00021015394813614157, "loss": 1.8081, "step": 6195 }, { "epoch": 0.605078125, "grad_norm": 0.17641213536262512, "learning_rate": 0.00021008585625323224, "loss": 1.8406, "step": 6196 }, { "epoch": 0.60517578125, "grad_norm": 0.18726864457130432, "learning_rate": 0.00021001777085467066, "loss": 1.7954, "step": 6197 }, { "epoch": 0.6052734375, "grad_norm": 0.18067772686481476, "learning_rate": 0.00020994969194725822, "loss": 1.8242, "step": 6198 }, { "epoch": 0.60537109375, "grad_norm": 0.2052745819091797, "learning_rate": 0.00020988161953779523, "loss": 1.824, "step": 6199 }, { "epoch": 0.60546875, "grad_norm": 0.1806965321302414, "learning_rate": 0.0002098135536330815, "loss": 1.7809, "step": 6200 }, { "epoch": 0.60556640625, "grad_norm": 0.167053684592247, "learning_rate": 0.00020974549423991633, "loss": 1.7913, "step": 6201 }, { "epoch": 0.6056640625, "grad_norm": 0.17809627950191498, "learning_rate": 0.0002096774413650982, "loss": 1.8048, "step": 6202 }, { "epoch": 0.60576171875, "grad_norm": 0.16934284567832947, "learning_rate": 0.00020960939501542492, "loss": 1.8468, "step": 6203 }, { "epoch": 0.605859375, "grad_norm": 0.1873352825641632, "learning_rate": 0.00020954135519769373, "loss": 1.8365, "step": 6204 }, { "epoch": 0.60595703125, "grad_norm": 0.17160330712795258, "learning_rate": 0.00020947332191870134, "loss": 1.7918, "step": 6205 }, { "epoch": 0.6060546875, "grad_norm": 0.17128941416740417, "learning_rate": 0.00020940529518524348, "loss": 1.7953, "step": 6206 }, { "epoch": 0.60615234375, "grad_norm": 0.20491628348827362, "learning_rate": 0.0002093372750041156, "loss": 1.8162, "step": 6207 }, { "epoch": 0.60625, "grad_norm": 0.18326154351234436, "learning_rate": 0.00020926926138211215, "loss": 1.7831, "step": 6208 }, { "epoch": 0.60634765625, "grad_norm": 0.18223023414611816, "learning_rate": 0.00020920125432602726, "loss": 1.8257, "step": 6209 }, { "epoch": 0.6064453125, "grad_norm": 0.17580585181713104, "learning_rate": 0.00020913325384265408, "loss": 1.8658, "step": 6210 }, { "epoch": 0.60654296875, "grad_norm": 0.1916073113679886, "learning_rate": 0.0002090652599387853, "loss": 1.7768, "step": 6211 }, { "epoch": 0.606640625, "grad_norm": 0.1854121834039688, "learning_rate": 0.00020899727262121292, "loss": 1.8178, "step": 6212 }, { "epoch": 0.60673828125, "grad_norm": 0.21547947824001312, "learning_rate": 0.00020892929189672826, "loss": 1.8084, "step": 6213 }, { "epoch": 0.6068359375, "grad_norm": 0.1767367273569107, "learning_rate": 0.00020886131777212208, "loss": 1.7852, "step": 6214 }, { "epoch": 0.60693359375, "grad_norm": 0.2141261100769043, "learning_rate": 0.00020879335025418428, "loss": 1.8261, "step": 6215 }, { "epoch": 0.60703125, "grad_norm": 0.19064612686634064, "learning_rate": 0.0002087253893497043, "loss": 1.7815, "step": 6216 }, { "epoch": 0.60712890625, "grad_norm": 0.1841682344675064, "learning_rate": 0.0002086574350654707, "loss": 1.7905, "step": 6217 }, { "epoch": 0.6072265625, "grad_norm": 0.19844800233840942, "learning_rate": 0.0002085894874082716, "loss": 1.7922, "step": 6218 }, { "epoch": 0.60732421875, "grad_norm": 0.19095765054225922, "learning_rate": 0.0002085215463848944, "loss": 1.8011, "step": 6219 }, { "epoch": 0.607421875, "grad_norm": 0.1884656548500061, "learning_rate": 0.0002084536120021257, "loss": 1.8046, "step": 6220 }, { "epoch": 0.60751953125, "grad_norm": 0.19417649507522583, "learning_rate": 0.00020838568426675174, "loss": 1.8128, "step": 6221 }, { "epoch": 0.6076171875, "grad_norm": 0.19642497599124908, "learning_rate": 0.00020831776318555774, "loss": 1.8176, "step": 6222 }, { "epoch": 0.60771484375, "grad_norm": 0.1605791449546814, "learning_rate": 0.00020824984876532842, "loss": 1.8, "step": 6223 }, { "epoch": 0.6078125, "grad_norm": 0.20104624330997467, "learning_rate": 0.00020818194101284782, "loss": 1.763, "step": 6224 }, { "epoch": 0.60791015625, "grad_norm": 0.18333274126052856, "learning_rate": 0.00020811403993489935, "loss": 1.8129, "step": 6225 }, { "epoch": 0.6080078125, "grad_norm": 0.19909921288490295, "learning_rate": 0.00020804614553826577, "loss": 1.7972, "step": 6226 }, { "epoch": 0.60810546875, "grad_norm": 0.17691384255886078, "learning_rate": 0.00020797825782972913, "loss": 1.8027, "step": 6227 }, { "epoch": 0.608203125, "grad_norm": 0.17926186323165894, "learning_rate": 0.00020791037681607073, "loss": 1.8237, "step": 6228 }, { "epoch": 0.60830078125, "grad_norm": 0.20887471735477448, "learning_rate": 0.00020784250250407133, "loss": 1.7907, "step": 6229 }, { "epoch": 0.6083984375, "grad_norm": 0.19959476590156555, "learning_rate": 0.00020777463490051102, "loss": 1.8082, "step": 6230 }, { "epoch": 0.60849609375, "grad_norm": 0.1845712810754776, "learning_rate": 0.00020770677401216908, "loss": 1.8227, "step": 6231 }, { "epoch": 0.60859375, "grad_norm": 0.1933058202266693, "learning_rate": 0.00020763891984582416, "loss": 1.7894, "step": 6232 }, { "epoch": 0.60869140625, "grad_norm": 0.18825393915176392, "learning_rate": 0.0002075710724082545, "loss": 1.7926, "step": 6233 }, { "epoch": 0.6087890625, "grad_norm": 0.20845210552215576, "learning_rate": 0.00020750323170623725, "loss": 1.7914, "step": 6234 }, { "epoch": 0.60888671875, "grad_norm": 0.17356601357460022, "learning_rate": 0.00020743539774654936, "loss": 1.834, "step": 6235 }, { "epoch": 0.608984375, "grad_norm": 0.2333955317735672, "learning_rate": 0.0002073675705359665, "loss": 1.812, "step": 6236 }, { "epoch": 0.60908203125, "grad_norm": 0.183224618434906, "learning_rate": 0.00020729975008126423, "loss": 1.7869, "step": 6237 }, { "epoch": 0.6091796875, "grad_norm": 0.22780759632587433, "learning_rate": 0.00020723193638921716, "loss": 1.8127, "step": 6238 }, { "epoch": 0.60927734375, "grad_norm": 0.18937449157238007, "learning_rate": 0.0002071641294665992, "loss": 1.8307, "step": 6239 }, { "epoch": 0.609375, "grad_norm": 0.240401953458786, "learning_rate": 0.00020709632932018385, "loss": 1.8049, "step": 6240 }, { "epoch": 0.60947265625, "grad_norm": 0.19335883855819702, "learning_rate": 0.00020702853595674353, "loss": 1.7938, "step": 6241 }, { "epoch": 0.6095703125, "grad_norm": 0.20582155883312225, "learning_rate": 0.00020696074938305036, "loss": 1.7854, "step": 6242 }, { "epoch": 0.60966796875, "grad_norm": 0.19017338752746582, "learning_rate": 0.0002068929696058755, "loss": 1.7997, "step": 6243 }, { "epoch": 0.609765625, "grad_norm": 0.168400377035141, "learning_rate": 0.00020682519663198974, "loss": 1.8081, "step": 6244 }, { "epoch": 0.60986328125, "grad_norm": 0.18282701075077057, "learning_rate": 0.0002067574304681627, "loss": 1.7998, "step": 6245 }, { "epoch": 0.6099609375, "grad_norm": 0.19018396735191345, "learning_rate": 0.00020668967112116377, "loss": 1.798, "step": 6246 }, { "epoch": 0.61005859375, "grad_norm": 0.17108431458473206, "learning_rate": 0.00020662191859776164, "loss": 1.815, "step": 6247 }, { "epoch": 0.61015625, "grad_norm": 0.1723288744688034, "learning_rate": 0.000206554172904724, "loss": 1.7932, "step": 6248 }, { "epoch": 0.61025390625, "grad_norm": 0.18096712231636047, "learning_rate": 0.00020648643404881818, "loss": 1.7718, "step": 6249 }, { "epoch": 0.6103515625, "grad_norm": 0.20423375070095062, "learning_rate": 0.00020641870203681056, "loss": 1.8432, "step": 6250 }, { "epoch": 0.61044921875, "grad_norm": 0.16024626791477203, "learning_rate": 0.00020635097687546705, "loss": 1.7992, "step": 6251 }, { "epoch": 0.610546875, "grad_norm": 0.19178245961666107, "learning_rate": 0.00020628325857155274, "loss": 1.8296, "step": 6252 }, { "epoch": 0.61064453125, "grad_norm": 0.1621384471654892, "learning_rate": 0.0002062155471318321, "loss": 1.7952, "step": 6253 }, { "epoch": 0.6107421875, "grad_norm": 0.19084779918193817, "learning_rate": 0.00020614784256306901, "loss": 1.7674, "step": 6254 }, { "epoch": 0.61083984375, "grad_norm": 0.17353181540966034, "learning_rate": 0.0002060801448720264, "loss": 1.8076, "step": 6255 }, { "epoch": 0.6109375, "grad_norm": 0.19326524436473846, "learning_rate": 0.00020601245406546686, "loss": 1.7972, "step": 6256 }, { "epoch": 0.61103515625, "grad_norm": 0.18858490884304047, "learning_rate": 0.00020594477015015188, "loss": 1.8043, "step": 6257 }, { "epoch": 0.6111328125, "grad_norm": 0.16944360733032227, "learning_rate": 0.00020587709313284265, "loss": 1.7977, "step": 6258 }, { "epoch": 0.61123046875, "grad_norm": 0.18798664212226868, "learning_rate": 0.00020580942302029937, "loss": 1.7831, "step": 6259 }, { "epoch": 0.611328125, "grad_norm": 0.16385523974895477, "learning_rate": 0.00020574175981928177, "loss": 1.7829, "step": 6260 }, { "epoch": 0.61142578125, "grad_norm": 0.1891658902168274, "learning_rate": 0.0002056741035365488, "loss": 1.7719, "step": 6261 }, { "epoch": 0.6115234375, "grad_norm": 0.16720306873321533, "learning_rate": 0.00020560645417885872, "loss": 1.8287, "step": 6262 }, { "epoch": 0.61162109375, "grad_norm": 0.20390987396240234, "learning_rate": 0.0002055388117529692, "loss": 1.8544, "step": 6263 }, { "epoch": 0.61171875, "grad_norm": 0.18311315774917603, "learning_rate": 0.0002054711762656369, "loss": 1.8221, "step": 6264 }, { "epoch": 0.61181640625, "grad_norm": 0.1740432232618332, "learning_rate": 0.00020540354772361814, "loss": 1.7887, "step": 6265 }, { "epoch": 0.6119140625, "grad_norm": 0.17014051973819733, "learning_rate": 0.00020533592613366836, "loss": 1.7707, "step": 6266 }, { "epoch": 0.61201171875, "grad_norm": 0.17353543639183044, "learning_rate": 0.0002052683115025424, "loss": 1.8133, "step": 6267 }, { "epoch": 0.612109375, "grad_norm": 0.17532706260681152, "learning_rate": 0.0002052007038369944, "loss": 1.7996, "step": 6268 }, { "epoch": 0.61220703125, "grad_norm": 0.1846437156200409, "learning_rate": 0.0002051331031437777, "loss": 1.8399, "step": 6269 }, { "epoch": 0.6123046875, "grad_norm": 0.1890735924243927, "learning_rate": 0.00020506550942964502, "loss": 1.755, "step": 6270 }, { "epoch": 0.61240234375, "grad_norm": 0.1725645661354065, "learning_rate": 0.00020499792270134838, "loss": 1.8526, "step": 6271 }, { "epoch": 0.6125, "grad_norm": 0.18746374547481537, "learning_rate": 0.00020493034296563918, "loss": 1.806, "step": 6272 }, { "epoch": 0.61259765625, "grad_norm": 0.20547369122505188, "learning_rate": 0.00020486277022926786, "loss": 1.8089, "step": 6273 }, { "epoch": 0.6126953125, "grad_norm": 0.16755247116088867, "learning_rate": 0.00020479520449898442, "loss": 1.7793, "step": 6274 }, { "epoch": 0.61279296875, "grad_norm": 0.21247141063213348, "learning_rate": 0.0002047276457815382, "loss": 1.8136, "step": 6275 }, { "epoch": 0.612890625, "grad_norm": 0.16563211381435394, "learning_rate": 0.00020466009408367746, "loss": 1.846, "step": 6276 }, { "epoch": 0.61298828125, "grad_norm": 0.19040432572364807, "learning_rate": 0.00020459254941215032, "loss": 1.805, "step": 6277 }, { "epoch": 0.6130859375, "grad_norm": 0.17477843165397644, "learning_rate": 0.00020452501177370365, "loss": 1.7841, "step": 6278 }, { "epoch": 0.61318359375, "grad_norm": 0.19525235891342163, "learning_rate": 0.000204457481175084, "loss": 1.8383, "step": 6279 }, { "epoch": 0.61328125, "grad_norm": 0.16099274158477783, "learning_rate": 0.00020438995762303696, "loss": 1.8275, "step": 6280 }, { "epoch": 0.61337890625, "grad_norm": 0.17238663136959076, "learning_rate": 0.00020432244112430765, "loss": 1.8259, "step": 6281 }, { "epoch": 0.6134765625, "grad_norm": 0.17719082534313202, "learning_rate": 0.00020425493168564035, "loss": 1.7592, "step": 6282 }, { "epoch": 0.61357421875, "grad_norm": 0.1643250435590744, "learning_rate": 0.00020418742931377853, "loss": 1.8317, "step": 6283 }, { "epoch": 0.613671875, "grad_norm": 0.19490227103233337, "learning_rate": 0.00020411993401546531, "loss": 1.7808, "step": 6284 }, { "epoch": 0.61376953125, "grad_norm": 0.17311878502368927, "learning_rate": 0.0002040524457974427, "loss": 1.8101, "step": 6285 }, { "epoch": 0.6138671875, "grad_norm": 0.1899399608373642, "learning_rate": 0.00020398496466645223, "loss": 1.8288, "step": 6286 }, { "epoch": 0.61396484375, "grad_norm": 0.19409404695034027, "learning_rate": 0.00020391749062923453, "loss": 1.8314, "step": 6287 }, { "epoch": 0.6140625, "grad_norm": 0.21136577427387238, "learning_rate": 0.00020385002369252987, "loss": 1.8196, "step": 6288 }, { "epoch": 0.61416015625, "grad_norm": 0.20495185256004333, "learning_rate": 0.00020378256386307752, "loss": 1.8039, "step": 6289 }, { "epoch": 0.6142578125, "grad_norm": 0.22145520150661469, "learning_rate": 0.0002037151111476161, "loss": 1.8509, "step": 6290 }, { "epoch": 0.61435546875, "grad_norm": 0.22394141554832458, "learning_rate": 0.0002036476655528836, "loss": 1.7905, "step": 6291 }, { "epoch": 0.614453125, "grad_norm": 0.2093815952539444, "learning_rate": 0.00020358022708561713, "loss": 1.7776, "step": 6292 }, { "epoch": 0.61455078125, "grad_norm": 0.19375024735927582, "learning_rate": 0.00020351279575255333, "loss": 1.7736, "step": 6293 }, { "epoch": 0.6146484375, "grad_norm": 0.2177070826292038, "learning_rate": 0.00020344537156042776, "loss": 1.8194, "step": 6294 }, { "epoch": 0.61474609375, "grad_norm": 0.17821069061756134, "learning_rate": 0.00020337795451597573, "loss": 1.8201, "step": 6295 }, { "epoch": 0.61484375, "grad_norm": 0.2232263833284378, "learning_rate": 0.00020331054462593157, "loss": 1.8006, "step": 6296 }, { "epoch": 0.61494140625, "grad_norm": 0.1792532056570053, "learning_rate": 0.00020324314189702885, "loss": 1.7865, "step": 6297 }, { "epoch": 0.6150390625, "grad_norm": 0.23977532982826233, "learning_rate": 0.00020317574633600062, "loss": 1.7715, "step": 6298 }, { "epoch": 0.61513671875, "grad_norm": 0.218372642993927, "learning_rate": 0.00020310835794957893, "loss": 1.7826, "step": 6299 }, { "epoch": 0.615234375, "grad_norm": 0.2344183474779129, "learning_rate": 0.00020304097674449546, "loss": 1.8007, "step": 6300 }, { "epoch": 0.61533203125, "grad_norm": 0.21295252442359924, "learning_rate": 0.00020297360272748078, "loss": 1.786, "step": 6301 }, { "epoch": 0.6154296875, "grad_norm": 0.2057625949382782, "learning_rate": 0.00020290623590526514, "loss": 1.8572, "step": 6302 }, { "epoch": 0.61552734375, "grad_norm": 0.19968067109584808, "learning_rate": 0.0002028388762845779, "loss": 1.7695, "step": 6303 }, { "epoch": 0.615625, "grad_norm": 0.20274509489536285, "learning_rate": 0.0002027715238721476, "loss": 1.8178, "step": 6304 }, { "epoch": 0.61572265625, "grad_norm": 0.18402151763439178, "learning_rate": 0.00020270417867470226, "loss": 1.8038, "step": 6305 }, { "epoch": 0.6158203125, "grad_norm": 0.21641847491264343, "learning_rate": 0.00020263684069896887, "loss": 1.7739, "step": 6306 }, { "epoch": 0.61591796875, "grad_norm": 0.18694743514060974, "learning_rate": 0.00020256950995167411, "loss": 1.7647, "step": 6307 }, { "epoch": 0.616015625, "grad_norm": 0.17946001887321472, "learning_rate": 0.00020250218643954348, "loss": 1.8177, "step": 6308 }, { "epoch": 0.61611328125, "grad_norm": 0.182112917304039, "learning_rate": 0.00020243487016930224, "loss": 1.8356, "step": 6309 }, { "epoch": 0.6162109375, "grad_norm": 0.19437265396118164, "learning_rate": 0.0002023675611476747, "loss": 1.7979, "step": 6310 }, { "epoch": 0.61630859375, "grad_norm": 0.18136169016361237, "learning_rate": 0.0002023002593813842, "loss": 1.8259, "step": 6311 }, { "epoch": 0.61640625, "grad_norm": 0.18213734030723572, "learning_rate": 0.0002022329648771538, "loss": 1.8127, "step": 6312 }, { "epoch": 0.61650390625, "grad_norm": 0.19535046815872192, "learning_rate": 0.00020216567764170553, "loss": 1.7853, "step": 6313 }, { "epoch": 0.6166015625, "grad_norm": 0.19164642691612244, "learning_rate": 0.00020209839768176082, "loss": 1.8406, "step": 6314 }, { "epoch": 0.61669921875, "grad_norm": 0.1941312700510025, "learning_rate": 0.0002020311250040402, "loss": 1.7759, "step": 6315 }, { "epoch": 0.616796875, "grad_norm": 0.18514040112495422, "learning_rate": 0.00020196385961526386, "loss": 1.8243, "step": 6316 }, { "epoch": 0.61689453125, "grad_norm": 0.20187190175056458, "learning_rate": 0.00020189660152215096, "loss": 1.7779, "step": 6317 }, { "epoch": 0.6169921875, "grad_norm": 0.1750413030385971, "learning_rate": 0.00020182935073141984, "loss": 1.8607, "step": 6318 }, { "epoch": 0.61708984375, "grad_norm": 0.2049160599708557, "learning_rate": 0.00020176210724978845, "loss": 1.8196, "step": 6319 }, { "epoch": 0.6171875, "grad_norm": 0.1685493290424347, "learning_rate": 0.0002016948710839736, "loss": 1.7876, "step": 6320 }, { "epoch": 0.61728515625, "grad_norm": 0.18984028697013855, "learning_rate": 0.0002016276422406918, "loss": 1.7939, "step": 6321 }, { "epoch": 0.6173828125, "grad_norm": 0.1730455607175827, "learning_rate": 0.00020156042072665836, "loss": 1.8219, "step": 6322 }, { "epoch": 0.61748046875, "grad_norm": 0.1970357894897461, "learning_rate": 0.0002014932065485884, "loss": 1.8173, "step": 6323 }, { "epoch": 0.617578125, "grad_norm": 0.16878363490104675, "learning_rate": 0.0002014259997131959, "loss": 1.8266, "step": 6324 }, { "epoch": 0.61767578125, "grad_norm": 0.1875246912240982, "learning_rate": 0.00020135880022719422, "loss": 1.7944, "step": 6325 }, { "epoch": 0.6177734375, "grad_norm": 0.1607278734445572, "learning_rate": 0.00020129160809729597, "loss": 1.8701, "step": 6326 }, { "epoch": 0.61787109375, "grad_norm": 0.19331388175487518, "learning_rate": 0.00020122442333021307, "loss": 1.7664, "step": 6327 }, { "epoch": 0.61796875, "grad_norm": 0.17490042746067047, "learning_rate": 0.0002011572459326567, "loss": 1.7784, "step": 6328 }, { "epoch": 0.61806640625, "grad_norm": 0.2111634910106659, "learning_rate": 0.00020109007591133726, "loss": 1.8036, "step": 6329 }, { "epoch": 0.6181640625, "grad_norm": 0.17160950601100922, "learning_rate": 0.00020102291327296447, "loss": 1.8126, "step": 6330 }, { "epoch": 0.61826171875, "grad_norm": 0.18259228765964508, "learning_rate": 0.00020095575802424727, "loss": 1.7904, "step": 6331 }, { "epoch": 0.618359375, "grad_norm": 0.21800462901592255, "learning_rate": 0.0002008886101718938, "loss": 1.7985, "step": 6332 }, { "epoch": 0.61845703125, "grad_norm": 0.18857115507125854, "learning_rate": 0.00020082146972261173, "loss": 1.7984, "step": 6333 }, { "epoch": 0.6185546875, "grad_norm": 0.20058943331241608, "learning_rate": 0.00020075433668310756, "loss": 1.8147, "step": 6334 }, { "epoch": 0.61865234375, "grad_norm": 0.1917010098695755, "learning_rate": 0.00020068721106008747, "loss": 1.7504, "step": 6335 }, { "epoch": 0.61875, "grad_norm": 0.18075354397296906, "learning_rate": 0.00020062009286025655, "loss": 1.7929, "step": 6336 }, { "epoch": 0.61884765625, "grad_norm": 0.18145160377025604, "learning_rate": 0.00020055298209031947, "loss": 1.7763, "step": 6337 }, { "epoch": 0.6189453125, "grad_norm": 0.16852670907974243, "learning_rate": 0.00020048587875698, "loss": 1.8057, "step": 6338 }, { "epoch": 0.61904296875, "grad_norm": 0.17546924948692322, "learning_rate": 0.00020041878286694098, "loss": 1.7791, "step": 6339 }, { "epoch": 0.619140625, "grad_norm": 0.17903289198875427, "learning_rate": 0.00020035169442690498, "loss": 1.8158, "step": 6340 }, { "epoch": 0.61923828125, "grad_norm": 0.1931469589471817, "learning_rate": 0.00020028461344357324, "loss": 1.801, "step": 6341 }, { "epoch": 0.6193359375, "grad_norm": 0.17481836676597595, "learning_rate": 0.00020021753992364676, "loss": 1.7806, "step": 6342 }, { "epoch": 0.61943359375, "grad_norm": 0.17235007882118225, "learning_rate": 0.0002001504738738255, "loss": 1.8014, "step": 6343 }, { "epoch": 0.61953125, "grad_norm": 0.1851324439048767, "learning_rate": 0.0002000834153008088, "loss": 1.8535, "step": 6344 }, { "epoch": 0.61962890625, "grad_norm": 0.17306001484394073, "learning_rate": 0.00020001636421129526, "loss": 1.7842, "step": 6345 }, { "epoch": 0.6197265625, "grad_norm": 0.17070123553276062, "learning_rate": 0.00019994932061198257, "loss": 1.8253, "step": 6346 }, { "epoch": 0.61982421875, "grad_norm": 0.17640306055545807, "learning_rate": 0.00019988228450956792, "loss": 1.7847, "step": 6347 }, { "epoch": 0.619921875, "grad_norm": 0.1855783611536026, "learning_rate": 0.0001998152559107475, "loss": 1.7534, "step": 6348 }, { "epoch": 0.62001953125, "grad_norm": 0.18514123558998108, "learning_rate": 0.00019974823482221704, "loss": 1.8465, "step": 6349 }, { "epoch": 0.6201171875, "grad_norm": 0.17067478597164154, "learning_rate": 0.0001996812212506711, "loss": 1.7947, "step": 6350 }, { "epoch": 0.62021484375, "grad_norm": 0.22773151099681854, "learning_rate": 0.00019961421520280399, "loss": 1.8247, "step": 6351 }, { "epoch": 0.6203125, "grad_norm": 0.19859768450260162, "learning_rate": 0.00019954721668530894, "loss": 1.8252, "step": 6352 }, { "epoch": 0.62041015625, "grad_norm": 0.18404893577098846, "learning_rate": 0.00019948022570487846, "loss": 1.7736, "step": 6353 }, { "epoch": 0.6205078125, "grad_norm": 0.18539828062057495, "learning_rate": 0.00019941324226820445, "loss": 1.7936, "step": 6354 }, { "epoch": 0.62060546875, "grad_norm": 0.16867733001708984, "learning_rate": 0.0001993462663819778, "loss": 1.8397, "step": 6355 }, { "epoch": 0.620703125, "grad_norm": 0.18858270347118378, "learning_rate": 0.00019927929805288897, "loss": 1.801, "step": 6356 }, { "epoch": 0.62080078125, "grad_norm": 0.16562695801258087, "learning_rate": 0.00019921233728762734, "loss": 1.7688, "step": 6357 }, { "epoch": 0.6208984375, "grad_norm": 0.1765696257352829, "learning_rate": 0.00019914538409288184, "loss": 1.8437, "step": 6358 }, { "epoch": 0.62099609375, "grad_norm": 0.18784905970096588, "learning_rate": 0.00019907843847534058, "loss": 1.8279, "step": 6359 }, { "epoch": 0.62109375, "grad_norm": 0.1885390728712082, "learning_rate": 0.00019901150044169058, "loss": 1.8, "step": 6360 }, { "epoch": 0.62119140625, "grad_norm": 0.16758742928504944, "learning_rate": 0.00019894456999861853, "loss": 1.7802, "step": 6361 }, { "epoch": 0.6212890625, "grad_norm": 0.16144059598445892, "learning_rate": 0.00019887764715281015, "loss": 1.8091, "step": 6362 }, { "epoch": 0.62138671875, "grad_norm": 0.1763240545988083, "learning_rate": 0.00019881073191095044, "loss": 1.7843, "step": 6363 }, { "epoch": 0.621484375, "grad_norm": 0.20969723165035248, "learning_rate": 0.00019874382427972358, "loss": 1.7959, "step": 6364 }, { "epoch": 0.62158203125, "grad_norm": 0.1811879277229309, "learning_rate": 0.0001986769242658132, "loss": 1.8039, "step": 6365 }, { "epoch": 0.6216796875, "grad_norm": 0.14748938381671906, "learning_rate": 0.00019861003187590187, "loss": 1.7827, "step": 6366 }, { "epoch": 0.62177734375, "grad_norm": 0.17503249645233154, "learning_rate": 0.0001985431471166716, "loss": 1.804, "step": 6367 }, { "epoch": 0.621875, "grad_norm": 0.15178577601909637, "learning_rate": 0.00019847626999480367, "loss": 1.8098, "step": 6368 }, { "epoch": 0.62197265625, "grad_norm": 0.1604599803686142, "learning_rate": 0.0001984094005169783, "loss": 1.8333, "step": 6369 }, { "epoch": 0.6220703125, "grad_norm": 0.17357830703258514, "learning_rate": 0.0001983425386898754, "loss": 1.7747, "step": 6370 }, { "epoch": 0.62216796875, "grad_norm": 0.17284920811653137, "learning_rate": 0.00019827568452017363, "loss": 1.8291, "step": 6371 }, { "epoch": 0.622265625, "grad_norm": 0.18875861167907715, "learning_rate": 0.00019820883801455132, "loss": 1.7665, "step": 6372 }, { "epoch": 0.62236328125, "grad_norm": 0.19278576970100403, "learning_rate": 0.00019814199917968583, "loss": 1.7782, "step": 6373 }, { "epoch": 0.6224609375, "grad_norm": 0.1745540052652359, "learning_rate": 0.00019807516802225367, "loss": 1.8027, "step": 6374 }, { "epoch": 0.62255859375, "grad_norm": 0.20537987351417542, "learning_rate": 0.0001980083445489308, "loss": 1.7989, "step": 6375 }, { "epoch": 0.62265625, "grad_norm": 0.1840182989835739, "learning_rate": 0.00019794152876639214, "loss": 1.8131, "step": 6376 }, { "epoch": 0.62275390625, "grad_norm": 0.18179085850715637, "learning_rate": 0.0001978747206813122, "loss": 1.8335, "step": 6377 }, { "epoch": 0.6228515625, "grad_norm": 0.16934886574745178, "learning_rate": 0.00019780792030036422, "loss": 1.8103, "step": 6378 }, { "epoch": 0.62294921875, "grad_norm": 0.20014256238937378, "learning_rate": 0.00019774112763022123, "loss": 1.8799, "step": 6379 }, { "epoch": 0.623046875, "grad_norm": 0.18609273433685303, "learning_rate": 0.00019767434267755517, "loss": 1.8143, "step": 6380 }, { "epoch": 0.62314453125, "grad_norm": 0.1936090886592865, "learning_rate": 0.00019760756544903718, "loss": 1.7941, "step": 6381 }, { "epoch": 0.6232421875, "grad_norm": 0.18705575168132782, "learning_rate": 0.00019754079595133784, "loss": 1.8095, "step": 6382 }, { "epoch": 0.62333984375, "grad_norm": 0.18813374638557434, "learning_rate": 0.00019747403419112663, "loss": 1.7832, "step": 6383 }, { "epoch": 0.6234375, "grad_norm": 0.1743713617324829, "learning_rate": 0.0001974072801750727, "loss": 1.8145, "step": 6384 }, { "epoch": 0.62353515625, "grad_norm": 0.19811700284481049, "learning_rate": 0.00019734053390984402, "loss": 1.8162, "step": 6385 }, { "epoch": 0.6236328125, "grad_norm": 0.18207187950611115, "learning_rate": 0.00019727379540210798, "loss": 1.8491, "step": 6386 }, { "epoch": 0.62373046875, "grad_norm": 0.2032071352005005, "learning_rate": 0.0001972070646585312, "loss": 1.7971, "step": 6387 }, { "epoch": 0.623828125, "grad_norm": 0.24725903570652008, "learning_rate": 0.00019714034168577955, "loss": 1.7853, "step": 6388 }, { "epoch": 0.62392578125, "grad_norm": 0.21602684259414673, "learning_rate": 0.00019707362649051797, "loss": 1.8087, "step": 6389 }, { "epoch": 0.6240234375, "grad_norm": 0.2196822166442871, "learning_rate": 0.0001970069190794107, "loss": 1.8316, "step": 6390 }, { "epoch": 0.62412109375, "grad_norm": 0.20243632793426514, "learning_rate": 0.00019694021945912128, "loss": 1.8163, "step": 6391 }, { "epoch": 0.62421875, "grad_norm": 0.22791379690170288, "learning_rate": 0.00019687352763631227, "loss": 1.7999, "step": 6392 }, { "epoch": 0.62431640625, "grad_norm": 0.23579448461532593, "learning_rate": 0.0001968068436176459, "loss": 1.7977, "step": 6393 }, { "epoch": 0.6244140625, "grad_norm": 0.1964924931526184, "learning_rate": 0.00019674016740978307, "loss": 1.8014, "step": 6394 }, { "epoch": 0.62451171875, "grad_norm": 0.21976277232170105, "learning_rate": 0.00019667349901938413, "loss": 1.8382, "step": 6395 }, { "epoch": 0.624609375, "grad_norm": 0.20140255987644196, "learning_rate": 0.00019660683845310878, "loss": 1.7837, "step": 6396 }, { "epoch": 0.62470703125, "grad_norm": 0.20451895892620087, "learning_rate": 0.00019654018571761572, "loss": 1.784, "step": 6397 }, { "epoch": 0.6248046875, "grad_norm": 0.22466418147087097, "learning_rate": 0.00019647354081956303, "loss": 1.8482, "step": 6398 }, { "epoch": 0.62490234375, "grad_norm": 0.17379209399223328, "learning_rate": 0.00019640690376560794, "loss": 1.797, "step": 6399 }, { "epoch": 0.625, "grad_norm": 0.2479766458272934, "learning_rate": 0.0001963402745624069, "loss": 1.782, "step": 6400 }, { "epoch": 0.62509765625, "grad_norm": 0.15700212121009827, "learning_rate": 0.00019627365321661555, "loss": 1.7774, "step": 6401 }, { "epoch": 0.6251953125, "grad_norm": 0.21858108043670654, "learning_rate": 0.00019620703973488886, "loss": 1.7746, "step": 6402 }, { "epoch": 0.62529296875, "grad_norm": 0.1855676770210266, "learning_rate": 0.00019614043412388083, "loss": 1.7425, "step": 6403 }, { "epoch": 0.625390625, "grad_norm": 0.21127770841121674, "learning_rate": 0.00019607383639024472, "loss": 1.8033, "step": 6404 }, { "epoch": 0.62548828125, "grad_norm": 0.18523213267326355, "learning_rate": 0.00019600724654063317, "loss": 1.7955, "step": 6405 }, { "epoch": 0.6255859375, "grad_norm": 0.23227810859680176, "learning_rate": 0.0001959406645816979, "loss": 1.8323, "step": 6406 }, { "epoch": 0.62568359375, "grad_norm": 0.20112544298171997, "learning_rate": 0.00019587409052008987, "loss": 1.7839, "step": 6407 }, { "epoch": 0.62578125, "grad_norm": 0.21015582978725433, "learning_rate": 0.00019580752436245912, "loss": 1.8394, "step": 6408 }, { "epoch": 0.62587890625, "grad_norm": 0.17113018035888672, "learning_rate": 0.00019574096611545515, "loss": 1.7929, "step": 6409 }, { "epoch": 0.6259765625, "grad_norm": 0.17410972714424133, "learning_rate": 0.00019567441578572652, "loss": 1.786, "step": 6410 }, { "epoch": 0.62607421875, "grad_norm": 0.20029860734939575, "learning_rate": 0.00019560787337992097, "loss": 1.7646, "step": 6411 }, { "epoch": 0.626171875, "grad_norm": 0.1726536601781845, "learning_rate": 0.00019554133890468546, "loss": 1.7911, "step": 6412 }, { "epoch": 0.62626953125, "grad_norm": 0.1996196210384369, "learning_rate": 0.00019547481236666632, "loss": 1.8185, "step": 6413 }, { "epoch": 0.6263671875, "grad_norm": 0.22197911143302917, "learning_rate": 0.0001954082937725089, "loss": 1.8504, "step": 6414 }, { "epoch": 0.62646484375, "grad_norm": 0.18105211853981018, "learning_rate": 0.00019534178312885774, "loss": 1.7508, "step": 6415 }, { "epoch": 0.6265625, "grad_norm": 0.22429277002811432, "learning_rate": 0.00019527528044235683, "loss": 1.7872, "step": 6416 }, { "epoch": 0.62666015625, "grad_norm": 0.21304799616336823, "learning_rate": 0.00019520878571964906, "loss": 1.8129, "step": 6417 }, { "epoch": 0.6267578125, "grad_norm": 0.2244589626789093, "learning_rate": 0.00019514229896737674, "loss": 1.7731, "step": 6418 }, { "epoch": 0.62685546875, "grad_norm": 0.1967083066701889, "learning_rate": 0.00019507582019218122, "loss": 1.7747, "step": 6419 }, { "epoch": 0.626953125, "grad_norm": 0.23493900895118713, "learning_rate": 0.00019500934940070325, "loss": 1.8171, "step": 6420 }, { "epoch": 0.62705078125, "grad_norm": 0.19364342093467712, "learning_rate": 0.0001949428865995826, "loss": 1.8512, "step": 6421 }, { "epoch": 0.6271484375, "grad_norm": 0.22769080102443695, "learning_rate": 0.00019487643179545833, "loss": 1.7934, "step": 6422 }, { "epoch": 0.62724609375, "grad_norm": 0.18702098727226257, "learning_rate": 0.00019480998499496872, "loss": 1.8003, "step": 6423 }, { "epoch": 0.62734375, "grad_norm": 0.19778092205524445, "learning_rate": 0.00019474354620475115, "loss": 1.8174, "step": 6424 }, { "epoch": 0.62744140625, "grad_norm": 0.19711503386497498, "learning_rate": 0.0001946771154314424, "loss": 1.7984, "step": 6425 }, { "epoch": 0.6275390625, "grad_norm": 0.1893034428358078, "learning_rate": 0.00019461069268167808, "loss": 1.8316, "step": 6426 }, { "epoch": 0.62763671875, "grad_norm": 0.2028658092021942, "learning_rate": 0.0001945442779620934, "loss": 1.8319, "step": 6427 }, { "epoch": 0.627734375, "grad_norm": 0.21889033913612366, "learning_rate": 0.0001944778712793227, "loss": 1.8462, "step": 6428 }, { "epoch": 0.62783203125, "grad_norm": 0.20555053651332855, "learning_rate": 0.0001944114726399992, "loss": 1.7993, "step": 6429 }, { "epoch": 0.6279296875, "grad_norm": 0.2105427384376526, "learning_rate": 0.00019434508205075569, "loss": 1.7959, "step": 6430 }, { "epoch": 0.62802734375, "grad_norm": 0.1896512508392334, "learning_rate": 0.00019427869951822385, "loss": 1.7947, "step": 6431 }, { "epoch": 0.628125, "grad_norm": 0.2234705239534378, "learning_rate": 0.00019421232504903492, "loss": 1.8061, "step": 6432 }, { "epoch": 0.62822265625, "grad_norm": 0.1813373565673828, "learning_rate": 0.00019414595864981888, "loss": 1.8111, "step": 6433 }, { "epoch": 0.6283203125, "grad_norm": 0.1964898258447647, "learning_rate": 0.00019407960032720534, "loss": 1.7768, "step": 6434 }, { "epoch": 0.62841796875, "grad_norm": 0.16714709997177124, "learning_rate": 0.00019401325008782283, "loss": 1.7747, "step": 6435 }, { "epoch": 0.628515625, "grad_norm": 0.2066795378923416, "learning_rate": 0.00019394690793829916, "loss": 1.7877, "step": 6436 }, { "epoch": 0.62861328125, "grad_norm": 0.18410496413707733, "learning_rate": 0.00019388057388526137, "loss": 1.8015, "step": 6437 }, { "epoch": 0.6287109375, "grad_norm": 0.1935044378042221, "learning_rate": 0.00019381424793533554, "loss": 1.7755, "step": 6438 }, { "epoch": 0.62880859375, "grad_norm": 0.18074935674667358, "learning_rate": 0.00019374793009514714, "loss": 1.781, "step": 6439 }, { "epoch": 0.62890625, "grad_norm": 0.2087641805410385, "learning_rate": 0.00019368162037132058, "loss": 1.7667, "step": 6440 }, { "epoch": 0.62900390625, "grad_norm": 0.17115138471126556, "learning_rate": 0.00019361531877047978, "loss": 1.7939, "step": 6441 }, { "epoch": 0.6291015625, "grad_norm": 0.23389017581939697, "learning_rate": 0.00019354902529924772, "loss": 1.8286, "step": 6442 }, { "epoch": 0.62919921875, "grad_norm": 0.1912841796875, "learning_rate": 0.00019348273996424637, "loss": 1.8121, "step": 6443 }, { "epoch": 0.629296875, "grad_norm": 0.20178472995758057, "learning_rate": 0.0001934164627720972, "loss": 1.8061, "step": 6444 }, { "epoch": 0.62939453125, "grad_norm": 0.16488952934741974, "learning_rate": 0.00019335019372942054, "loss": 1.7915, "step": 6445 }, { "epoch": 0.6294921875, "grad_norm": 0.173313707113266, "learning_rate": 0.00019328393284283625, "loss": 1.8242, "step": 6446 }, { "epoch": 0.62958984375, "grad_norm": 0.17539027333259583, "learning_rate": 0.00019321768011896297, "loss": 1.8216, "step": 6447 }, { "epoch": 0.6296875, "grad_norm": 0.16892239451408386, "learning_rate": 0.00019315143556441905, "loss": 1.79, "step": 6448 }, { "epoch": 0.62978515625, "grad_norm": 0.17803457379341125, "learning_rate": 0.00019308519918582158, "loss": 1.819, "step": 6449 }, { "epoch": 0.6298828125, "grad_norm": 0.1753336638212204, "learning_rate": 0.00019301897098978704, "loss": 1.8272, "step": 6450 }, { "epoch": 0.62998046875, "grad_norm": 0.19283145666122437, "learning_rate": 0.000192952750982931, "loss": 1.7676, "step": 6451 }, { "epoch": 0.630078125, "grad_norm": 0.1610206812620163, "learning_rate": 0.00019288653917186823, "loss": 1.7988, "step": 6452 }, { "epoch": 0.63017578125, "grad_norm": 0.1912800818681717, "learning_rate": 0.00019282033556321276, "loss": 1.7626, "step": 6453 }, { "epoch": 0.6302734375, "grad_norm": 0.20288285613059998, "learning_rate": 0.00019275414016357772, "loss": 1.7858, "step": 6454 }, { "epoch": 0.63037109375, "grad_norm": 0.19619113206863403, "learning_rate": 0.00019268795297957541, "loss": 1.8252, "step": 6455 }, { "epoch": 0.63046875, "grad_norm": 0.20519226789474487, "learning_rate": 0.00019262177401781745, "loss": 1.7746, "step": 6456 }, { "epoch": 0.63056640625, "grad_norm": 0.19905945658683777, "learning_rate": 0.0001925556032849144, "loss": 1.8143, "step": 6457 }, { "epoch": 0.6306640625, "grad_norm": 0.1838093400001526, "learning_rate": 0.00019248944078747627, "loss": 1.7964, "step": 6458 }, { "epoch": 0.63076171875, "grad_norm": 0.2028278112411499, "learning_rate": 0.00019242328653211195, "loss": 1.7787, "step": 6459 }, { "epoch": 0.630859375, "grad_norm": 0.20368026196956635, "learning_rate": 0.0001923571405254298, "loss": 1.7647, "step": 6460 }, { "epoch": 0.63095703125, "grad_norm": 0.17372117936611176, "learning_rate": 0.00019229100277403706, "loss": 1.7708, "step": 6461 }, { "epoch": 0.6310546875, "grad_norm": 0.19635634124279022, "learning_rate": 0.00019222487328454047, "loss": 1.7949, "step": 6462 }, { "epoch": 0.63115234375, "grad_norm": 0.1920703798532486, "learning_rate": 0.00019215875206354572, "loss": 1.8581, "step": 6463 }, { "epoch": 0.63125, "grad_norm": 0.180561825633049, "learning_rate": 0.00019209263911765774, "loss": 1.8184, "step": 6464 }, { "epoch": 0.63134765625, "grad_norm": 0.17893658578395844, "learning_rate": 0.0001920265344534806, "loss": 1.7365, "step": 6465 }, { "epoch": 0.6314453125, "grad_norm": 0.195560023188591, "learning_rate": 0.00019196043807761758, "loss": 1.7659, "step": 6466 }, { "epoch": 0.63154296875, "grad_norm": 0.16944506764411926, "learning_rate": 0.0001918943499966712, "loss": 1.7704, "step": 6467 }, { "epoch": 0.631640625, "grad_norm": 0.17622090876102448, "learning_rate": 0.00019182827021724293, "loss": 1.8259, "step": 6468 }, { "epoch": 0.63173828125, "grad_norm": 0.21229338645935059, "learning_rate": 0.00019176219874593364, "loss": 1.7856, "step": 6469 }, { "epoch": 0.6318359375, "grad_norm": 0.1871611475944519, "learning_rate": 0.0001916961355893433, "loss": 1.8249, "step": 6470 }, { "epoch": 0.63193359375, "grad_norm": 0.1743934005498886, "learning_rate": 0.00019163008075407094, "loss": 1.8199, "step": 6471 }, { "epoch": 0.63203125, "grad_norm": 0.19774822890758514, "learning_rate": 0.00019156403424671508, "loss": 1.8418, "step": 6472 }, { "epoch": 0.63212890625, "grad_norm": 0.16966727375984192, "learning_rate": 0.0001914979960738729, "loss": 1.769, "step": 6473 }, { "epoch": 0.6322265625, "grad_norm": 0.22685794532299042, "learning_rate": 0.00019143196624214122, "loss": 1.7777, "step": 6474 }, { "epoch": 0.63232421875, "grad_norm": 0.18502193689346313, "learning_rate": 0.00019136594475811563, "loss": 1.7985, "step": 6475 }, { "epoch": 0.632421875, "grad_norm": 0.19318491220474243, "learning_rate": 0.00019129993162839132, "loss": 1.819, "step": 6476 }, { "epoch": 0.63251953125, "grad_norm": 0.19619008898735046, "learning_rate": 0.00019123392685956237, "loss": 1.7992, "step": 6477 }, { "epoch": 0.6326171875, "grad_norm": 0.21025346219539642, "learning_rate": 0.00019116793045822195, "loss": 1.764, "step": 6478 }, { "epoch": 0.63271484375, "grad_norm": 0.20766951143741608, "learning_rate": 0.00019110194243096264, "loss": 1.7674, "step": 6479 }, { "epoch": 0.6328125, "grad_norm": 0.2074117362499237, "learning_rate": 0.000191035962784376, "loss": 1.8118, "step": 6480 }, { "epoch": 0.63291015625, "grad_norm": 0.198982372879982, "learning_rate": 0.00019096999152505285, "loss": 1.7941, "step": 6481 }, { "epoch": 0.6330078125, "grad_norm": 0.1783127635717392, "learning_rate": 0.000190904028659583, "loss": 1.8244, "step": 6482 }, { "epoch": 0.63310546875, "grad_norm": 0.21847519278526306, "learning_rate": 0.00019083807419455572, "loss": 1.7944, "step": 6483 }, { "epoch": 0.633203125, "grad_norm": 0.20868249237537384, "learning_rate": 0.0001907721281365593, "loss": 1.8226, "step": 6484 }, { "epoch": 0.63330078125, "grad_norm": 0.23499496281147003, "learning_rate": 0.00019070619049218102, "loss": 1.7851, "step": 6485 }, { "epoch": 0.6333984375, "grad_norm": 0.23311583697795868, "learning_rate": 0.00019064026126800753, "loss": 1.8211, "step": 6486 }, { "epoch": 0.63349609375, "grad_norm": 0.23593838512897491, "learning_rate": 0.00019057434047062462, "loss": 1.8029, "step": 6487 }, { "epoch": 0.63359375, "grad_norm": 0.18262098729610443, "learning_rate": 0.00019050842810661717, "loss": 1.7866, "step": 6488 }, { "epoch": 0.63369140625, "grad_norm": 0.22920474410057068, "learning_rate": 0.00019044252418256907, "loss": 1.8228, "step": 6489 }, { "epoch": 0.6337890625, "grad_norm": 0.1713964194059372, "learning_rate": 0.00019037662870506378, "loss": 1.8011, "step": 6490 }, { "epoch": 0.63388671875, "grad_norm": 0.22496794164180756, "learning_rate": 0.00019031074168068368, "loss": 1.7921, "step": 6491 }, { "epoch": 0.633984375, "grad_norm": 0.2056436985731125, "learning_rate": 0.0001902448631160101, "loss": 1.8384, "step": 6492 }, { "epoch": 0.63408203125, "grad_norm": 0.20540577173233032, "learning_rate": 0.00019017899301762393, "loss": 1.7856, "step": 6493 }, { "epoch": 0.6341796875, "grad_norm": 0.19513583183288574, "learning_rate": 0.0001901131313921048, "loss": 1.8309, "step": 6494 }, { "epoch": 0.63427734375, "grad_norm": 0.1791575700044632, "learning_rate": 0.00019004727824603192, "loss": 1.8052, "step": 6495 }, { "epoch": 0.634375, "grad_norm": 0.17198437452316284, "learning_rate": 0.00018998143358598318, "loss": 1.7738, "step": 6496 }, { "epoch": 0.63447265625, "grad_norm": 0.18218986690044403, "learning_rate": 0.00018991559741853608, "loss": 1.7857, "step": 6497 }, { "epoch": 0.6345703125, "grad_norm": 0.15654172003269196, "learning_rate": 0.00018984976975026707, "loss": 1.7683, "step": 6498 }, { "epoch": 0.63466796875, "grad_norm": 0.19699935615062714, "learning_rate": 0.00018978395058775172, "loss": 1.7614, "step": 6499 }, { "epoch": 0.634765625, "grad_norm": 0.15532132983207703, "learning_rate": 0.00018971813993756475, "loss": 1.8059, "step": 6500 }, { "epoch": 0.63486328125, "grad_norm": 0.16963636875152588, "learning_rate": 0.00018965233780628005, "loss": 1.7701, "step": 6501 }, { "epoch": 0.6349609375, "grad_norm": 0.16323351860046387, "learning_rate": 0.00018958654420047074, "loss": 1.8267, "step": 6502 }, { "epoch": 0.63505859375, "grad_norm": 0.17396698892116547, "learning_rate": 0.0001895207591267088, "loss": 1.8113, "step": 6503 }, { "epoch": 0.63515625, "grad_norm": 0.16565603017807007, "learning_rate": 0.00018945498259156584, "loss": 1.7714, "step": 6504 }, { "epoch": 0.63525390625, "grad_norm": 0.19854402542114258, "learning_rate": 0.00018938921460161234, "loss": 1.853, "step": 6505 }, { "epoch": 0.6353515625, "grad_norm": 0.16162553429603577, "learning_rate": 0.00018932345516341776, "loss": 1.7952, "step": 6506 }, { "epoch": 0.63544921875, "grad_norm": 0.20997078716754913, "learning_rate": 0.00018925770428355104, "loss": 1.7889, "step": 6507 }, { "epoch": 0.635546875, "grad_norm": 0.18748736381530762, "learning_rate": 0.00018919196196858008, "loss": 1.7461, "step": 6508 }, { "epoch": 0.63564453125, "grad_norm": 0.20092548429965973, "learning_rate": 0.00018912622822507192, "loss": 1.7981, "step": 6509 }, { "epoch": 0.6357421875, "grad_norm": 0.17963604629039764, "learning_rate": 0.00018906050305959266, "loss": 1.7789, "step": 6510 }, { "epoch": 0.63583984375, "grad_norm": 0.1960425227880478, "learning_rate": 0.00018899478647870787, "loss": 1.814, "step": 6511 }, { "epoch": 0.6359375, "grad_norm": 0.18307647109031677, "learning_rate": 0.000188929078488982, "loss": 1.7352, "step": 6512 }, { "epoch": 0.63603515625, "grad_norm": 0.18898005783557892, "learning_rate": 0.00018886337909697866, "loss": 1.8156, "step": 6513 }, { "epoch": 0.6361328125, "grad_norm": 0.18129074573516846, "learning_rate": 0.00018879768830926065, "loss": 1.8024, "step": 6514 }, { "epoch": 0.63623046875, "grad_norm": 0.18491913378238678, "learning_rate": 0.0001887320061323899, "loss": 1.845, "step": 6515 }, { "epoch": 0.636328125, "grad_norm": 0.19737549126148224, "learning_rate": 0.00018866633257292738, "loss": 1.7862, "step": 6516 }, { "epoch": 0.63642578125, "grad_norm": 0.18737675249576569, "learning_rate": 0.0001886006676374335, "loss": 1.7802, "step": 6517 }, { "epoch": 0.6365234375, "grad_norm": 0.19316722452640533, "learning_rate": 0.00018853501133246742, "loss": 1.7631, "step": 6518 }, { "epoch": 0.63662109375, "grad_norm": 0.1689293533563614, "learning_rate": 0.00018846936366458778, "loss": 1.8185, "step": 6519 }, { "epoch": 0.63671875, "grad_norm": 0.1816440224647522, "learning_rate": 0.00018840372464035206, "loss": 1.8483, "step": 6520 }, { "epoch": 0.63681640625, "grad_norm": 0.1835990697145462, "learning_rate": 0.0001883380942663171, "loss": 1.7962, "step": 6521 }, { "epoch": 0.6369140625, "grad_norm": 0.2125129997730255, "learning_rate": 0.0001882724725490388, "loss": 1.8417, "step": 6522 }, { "epoch": 0.63701171875, "grad_norm": 0.20010530948638916, "learning_rate": 0.000188206859495072, "loss": 1.7705, "step": 6523 }, { "epoch": 0.637109375, "grad_norm": 0.2108786702156067, "learning_rate": 0.00018814125511097118, "loss": 1.8343, "step": 6524 }, { "epoch": 0.63720703125, "grad_norm": 0.1858823597431183, "learning_rate": 0.0001880756594032894, "loss": 1.7709, "step": 6525 }, { "epoch": 0.6373046875, "grad_norm": 0.1901320219039917, "learning_rate": 0.0001880100723785792, "loss": 1.7937, "step": 6526 }, { "epoch": 0.63740234375, "grad_norm": 0.19330911338329315, "learning_rate": 0.00018794449404339208, "loss": 1.7386, "step": 6527 }, { "epoch": 0.6375, "grad_norm": 0.18727366626262665, "learning_rate": 0.00018787892440427884, "loss": 1.8033, "step": 6528 }, { "epoch": 0.63759765625, "grad_norm": 0.17376196384429932, "learning_rate": 0.00018781336346778915, "loss": 1.7622, "step": 6529 }, { "epoch": 0.6376953125, "grad_norm": 0.2149302065372467, "learning_rate": 0.00018774781124047198, "loss": 1.8181, "step": 6530 }, { "epoch": 0.63779296875, "grad_norm": 0.1711289882659912, "learning_rate": 0.00018768226772887563, "loss": 1.8311, "step": 6531 }, { "epoch": 0.637890625, "grad_norm": 0.20867592096328735, "learning_rate": 0.00018761673293954707, "loss": 1.7815, "step": 6532 }, { "epoch": 0.63798828125, "grad_norm": 0.15529616177082062, "learning_rate": 0.00018755120687903283, "loss": 1.7792, "step": 6533 }, { "epoch": 0.6380859375, "grad_norm": 0.189920574426651, "learning_rate": 0.00018748568955387823, "loss": 1.8191, "step": 6534 }, { "epoch": 0.63818359375, "grad_norm": 0.16647270321846008, "learning_rate": 0.00018742018097062796, "loss": 1.8075, "step": 6535 }, { "epoch": 0.63828125, "grad_norm": 0.1696346551179886, "learning_rate": 0.0001873546811358257, "loss": 1.7614, "step": 6536 }, { "epoch": 0.63837890625, "grad_norm": 0.14949803054332733, "learning_rate": 0.0001872891900560143, "loss": 1.7515, "step": 6537 }, { "epoch": 0.6384765625, "grad_norm": 0.17705032229423523, "learning_rate": 0.00018722370773773583, "loss": 1.7862, "step": 6538 }, { "epoch": 0.63857421875, "grad_norm": 0.19289366900920868, "learning_rate": 0.0001871582341875313, "loss": 1.8043, "step": 6539 }, { "epoch": 0.638671875, "grad_norm": 0.18578293919563293, "learning_rate": 0.00018709276941194104, "loss": 1.8245, "step": 6540 }, { "epoch": 0.63876953125, "grad_norm": 0.20481370389461517, "learning_rate": 0.00018702731341750423, "loss": 1.7709, "step": 6541 }, { "epoch": 0.6388671875, "grad_norm": 0.15738046169281006, "learning_rate": 0.0001869618662107595, "loss": 1.8027, "step": 6542 }, { "epoch": 0.63896484375, "grad_norm": 0.191527858376503, "learning_rate": 0.00018689642779824435, "loss": 1.8383, "step": 6543 }, { "epoch": 0.6390625, "grad_norm": 0.18590493500232697, "learning_rate": 0.00018683099818649545, "loss": 1.8149, "step": 6544 }, { "epoch": 0.63916015625, "grad_norm": 0.19519750773906708, "learning_rate": 0.00018676557738204886, "loss": 1.8194, "step": 6545 }, { "epoch": 0.6392578125, "grad_norm": 0.19056223332881927, "learning_rate": 0.00018670016539143935, "loss": 1.7644, "step": 6546 }, { "epoch": 0.63935546875, "grad_norm": 0.19695255160331726, "learning_rate": 0.00018663476222120112, "loss": 1.7867, "step": 6547 }, { "epoch": 0.639453125, "grad_norm": 0.19194240868091583, "learning_rate": 0.0001865693678778672, "loss": 1.7725, "step": 6548 }, { "epoch": 0.63955078125, "grad_norm": 0.21404105424880981, "learning_rate": 0.00018650398236797012, "loss": 1.8127, "step": 6549 }, { "epoch": 0.6396484375, "grad_norm": 0.1939680129289627, "learning_rate": 0.0001864386056980411, "loss": 1.7956, "step": 6550 }, { "epoch": 0.63974609375, "grad_norm": 0.18326535820960999, "learning_rate": 0.00018637323787461076, "loss": 1.7909, "step": 6551 }, { "epoch": 0.63984375, "grad_norm": 0.1802639663219452, "learning_rate": 0.00018630787890420886, "loss": 1.7896, "step": 6552 }, { "epoch": 0.63994140625, "grad_norm": 0.1807204782962799, "learning_rate": 0.00018624252879336406, "loss": 1.7904, "step": 6553 }, { "epoch": 0.6400390625, "grad_norm": 0.2175232470035553, "learning_rate": 0.0001861771875486044, "loss": 1.8119, "step": 6554 }, { "epoch": 0.64013671875, "grad_norm": 0.15637123584747314, "learning_rate": 0.00018611185517645678, "loss": 1.757, "step": 6555 }, { "epoch": 0.640234375, "grad_norm": 0.2093076854944229, "learning_rate": 0.0001860465316834473, "loss": 1.7989, "step": 6556 }, { "epoch": 0.64033203125, "grad_norm": 0.15806697309017181, "learning_rate": 0.00018598121707610134, "loss": 1.7481, "step": 6557 }, { "epoch": 0.6404296875, "grad_norm": 0.24754294753074646, "learning_rate": 0.000185915911360943, "loss": 1.8406, "step": 6558 }, { "epoch": 0.64052734375, "grad_norm": 0.19666072726249695, "learning_rate": 0.00018585061454449604, "loss": 1.8213, "step": 6559 }, { "epoch": 0.640625, "grad_norm": 0.20593535900115967, "learning_rate": 0.00018578532663328284, "loss": 1.7746, "step": 6560 }, { "epoch": 0.64072265625, "grad_norm": 0.16784875094890594, "learning_rate": 0.00018572004763382517, "loss": 1.7937, "step": 6561 }, { "epoch": 0.6408203125, "grad_norm": 0.18931801617145538, "learning_rate": 0.00018565477755264377, "loss": 1.7963, "step": 6562 }, { "epoch": 0.64091796875, "grad_norm": 0.17548868060112, "learning_rate": 0.0001855895163962586, "loss": 1.8134, "step": 6563 }, { "epoch": 0.641015625, "grad_norm": 0.17581219971179962, "learning_rate": 0.00018552426417118857, "loss": 1.8426, "step": 6564 }, { "epoch": 0.64111328125, "grad_norm": 0.17546550929546356, "learning_rate": 0.00018545902088395184, "loss": 1.784, "step": 6565 }, { "epoch": 0.6412109375, "grad_norm": 0.18853163719177246, "learning_rate": 0.00018539378654106576, "loss": 1.7456, "step": 6566 }, { "epoch": 0.64130859375, "grad_norm": 0.17986208200454712, "learning_rate": 0.0001853285611490465, "loss": 1.8086, "step": 6567 }, { "epoch": 0.64140625, "grad_norm": 0.22958388924598694, "learning_rate": 0.00018526334471440965, "loss": 1.8208, "step": 6568 }, { "epoch": 0.64150390625, "grad_norm": 0.18311670422554016, "learning_rate": 0.00018519813724366958, "loss": 1.793, "step": 6569 }, { "epoch": 0.6416015625, "grad_norm": 0.2273964136838913, "learning_rate": 0.00018513293874334008, "loss": 1.8253, "step": 6570 }, { "epoch": 0.64169921875, "grad_norm": 0.19004887342453003, "learning_rate": 0.0001850677492199338, "loss": 1.7819, "step": 6571 }, { "epoch": 0.641796875, "grad_norm": 0.22003008425235748, "learning_rate": 0.00018500256867996263, "loss": 1.8137, "step": 6572 }, { "epoch": 0.64189453125, "grad_norm": 0.17378896474838257, "learning_rate": 0.0001849373971299376, "loss": 1.7772, "step": 6573 }, { "epoch": 0.6419921875, "grad_norm": 0.23605036735534668, "learning_rate": 0.00018487223457636865, "loss": 1.8309, "step": 6574 }, { "epoch": 0.64208984375, "grad_norm": 0.1655806452035904, "learning_rate": 0.0001848070810257651, "loss": 1.8421, "step": 6575 }, { "epoch": 0.6421875, "grad_norm": 0.22382014989852905, "learning_rate": 0.00018474193648463505, "loss": 1.8011, "step": 6576 }, { "epoch": 0.64228515625, "grad_norm": 0.176531121134758, "learning_rate": 0.00018467680095948596, "loss": 1.836, "step": 6577 }, { "epoch": 0.6423828125, "grad_norm": 0.20035278797149658, "learning_rate": 0.00018461167445682426, "loss": 1.8297, "step": 6578 }, { "epoch": 0.64248046875, "grad_norm": 0.18749552965164185, "learning_rate": 0.00018454655698315548, "loss": 1.8085, "step": 6579 }, { "epoch": 0.642578125, "grad_norm": 0.18144844472408295, "learning_rate": 0.00018448144854498434, "loss": 1.7764, "step": 6580 }, { "epoch": 0.64267578125, "grad_norm": 0.23240096867084503, "learning_rate": 0.00018441634914881456, "loss": 1.81, "step": 6581 }, { "epoch": 0.6427734375, "grad_norm": 0.19402514398097992, "learning_rate": 0.00018435125880114905, "loss": 1.7866, "step": 6582 }, { "epoch": 0.64287109375, "grad_norm": 0.2684725522994995, "learning_rate": 0.00018428617750848963, "loss": 1.8082, "step": 6583 }, { "epoch": 0.64296875, "grad_norm": 0.22399617731571198, "learning_rate": 0.00018422110527733753, "loss": 1.779, "step": 6584 }, { "epoch": 0.64306640625, "grad_norm": 0.21615202724933624, "learning_rate": 0.00018415604211419268, "loss": 1.8208, "step": 6585 }, { "epoch": 0.6431640625, "grad_norm": 0.2104441523551941, "learning_rate": 0.00018409098802555442, "loss": 1.8221, "step": 6586 }, { "epoch": 0.64326171875, "grad_norm": 0.19370757043361664, "learning_rate": 0.00018402594301792114, "loss": 1.7746, "step": 6587 }, { "epoch": 0.643359375, "grad_norm": 0.203922301530838, "learning_rate": 0.00018396090709779013, "loss": 1.8117, "step": 6588 }, { "epoch": 0.64345703125, "grad_norm": 0.16236847639083862, "learning_rate": 0.00018389588027165806, "loss": 1.8202, "step": 6589 }, { "epoch": 0.6435546875, "grad_norm": 0.19480746984481812, "learning_rate": 0.00018383086254602037, "loss": 1.7618, "step": 6590 }, { "epoch": 0.64365234375, "grad_norm": 0.17380346357822418, "learning_rate": 0.00018376585392737183, "loss": 1.7542, "step": 6591 }, { "epoch": 0.64375, "grad_norm": 0.1908940225839615, "learning_rate": 0.00018370085442220624, "loss": 1.7974, "step": 6592 }, { "epoch": 0.64384765625, "grad_norm": 0.163289412856102, "learning_rate": 0.00018363586403701639, "loss": 1.7816, "step": 6593 }, { "epoch": 0.6439453125, "grad_norm": 0.16874434053897858, "learning_rate": 0.00018357088277829443, "loss": 1.8044, "step": 6594 }, { "epoch": 0.64404296875, "grad_norm": 0.1882263422012329, "learning_rate": 0.0001835059106525312, "loss": 1.851, "step": 6595 }, { "epoch": 0.644140625, "grad_norm": 0.17016306519508362, "learning_rate": 0.000183440947666217, "loss": 1.8335, "step": 6596 }, { "epoch": 0.64423828125, "grad_norm": 0.15540587902069092, "learning_rate": 0.0001833759938258409, "loss": 1.8427, "step": 6597 }, { "epoch": 0.6443359375, "grad_norm": 0.15746371448040009, "learning_rate": 0.00018331104913789137, "loss": 1.752, "step": 6598 }, { "epoch": 0.64443359375, "grad_norm": 0.1669914275407791, "learning_rate": 0.00018324611360885567, "loss": 1.8007, "step": 6599 }, { "epoch": 0.64453125, "grad_norm": 0.18472078442573547, "learning_rate": 0.00018318118724522037, "loss": 1.7944, "step": 6600 }, { "epoch": 0.64462890625, "grad_norm": 0.19039252400398254, "learning_rate": 0.00018311627005347108, "loss": 1.7771, "step": 6601 }, { "epoch": 0.6447265625, "grad_norm": 0.19019733369350433, "learning_rate": 0.00018305136204009237, "loss": 1.7949, "step": 6602 }, { "epoch": 0.64482421875, "grad_norm": 0.17357708513736725, "learning_rate": 0.00018298646321156803, "loss": 1.7943, "step": 6603 }, { "epoch": 0.644921875, "grad_norm": 0.1723673790693283, "learning_rate": 0.00018292157357438084, "loss": 1.7639, "step": 6604 }, { "epoch": 0.64501953125, "grad_norm": 0.17414981126785278, "learning_rate": 0.0001828566931350127, "loss": 1.8162, "step": 6605 }, { "epoch": 0.6451171875, "grad_norm": 0.17521241307258606, "learning_rate": 0.00018279182189994458, "loss": 1.7837, "step": 6606 }, { "epoch": 0.64521484375, "grad_norm": 0.19556209444999695, "learning_rate": 0.00018272695987565652, "loss": 1.8011, "step": 6607 }, { "epoch": 0.6453125, "grad_norm": 0.17891360819339752, "learning_rate": 0.0001826621070686278, "loss": 1.7864, "step": 6608 }, { "epoch": 0.64541015625, "grad_norm": 0.18578435480594635, "learning_rate": 0.00018259726348533644, "loss": 1.762, "step": 6609 }, { "epoch": 0.6455078125, "grad_norm": 0.1856592297554016, "learning_rate": 0.00018253242913225993, "loss": 1.7992, "step": 6610 }, { "epoch": 0.64560546875, "grad_norm": 0.17778022587299347, "learning_rate": 0.00018246760401587452, "loss": 1.7617, "step": 6611 }, { "epoch": 0.645703125, "grad_norm": 0.18594184517860413, "learning_rate": 0.00018240278814265576, "loss": 1.8189, "step": 6612 }, { "epoch": 0.64580078125, "grad_norm": 0.18069908022880554, "learning_rate": 0.00018233798151907804, "loss": 1.8252, "step": 6613 }, { "epoch": 0.6458984375, "grad_norm": 0.18020601570606232, "learning_rate": 0.00018227318415161504, "loss": 1.8138, "step": 6614 }, { "epoch": 0.64599609375, "grad_norm": 0.17145338654518127, "learning_rate": 0.00018220839604673957, "loss": 1.7755, "step": 6615 }, { "epoch": 0.64609375, "grad_norm": 0.187910258769989, "learning_rate": 0.00018214361721092322, "loss": 1.8021, "step": 6616 }, { "epoch": 0.64619140625, "grad_norm": 0.1596071571111679, "learning_rate": 0.00018207884765063687, "loss": 1.7953, "step": 6617 }, { "epoch": 0.6462890625, "grad_norm": 0.1951507180929184, "learning_rate": 0.0001820140873723504, "loss": 1.7596, "step": 6618 }, { "epoch": 0.64638671875, "grad_norm": 0.15308111906051636, "learning_rate": 0.00018194933638253293, "loss": 1.8214, "step": 6619 }, { "epoch": 0.646484375, "grad_norm": 0.1699024885892868, "learning_rate": 0.00018188459468765233, "loss": 1.7938, "step": 6620 }, { "epoch": 0.64658203125, "grad_norm": 0.15247894823551178, "learning_rate": 0.00018181986229417568, "loss": 1.7426, "step": 6621 }, { "epoch": 0.6466796875, "grad_norm": 0.17210711538791656, "learning_rate": 0.00018175513920856945, "loss": 1.7684, "step": 6622 }, { "epoch": 0.64677734375, "grad_norm": 0.1601727306842804, "learning_rate": 0.0001816904254372987, "loss": 1.8074, "step": 6623 }, { "epoch": 0.646875, "grad_norm": 0.17445522546768188, "learning_rate": 0.00018162572098682779, "loss": 1.8004, "step": 6624 }, { "epoch": 0.64697265625, "grad_norm": 0.1846470832824707, "learning_rate": 0.00018156102586362012, "loss": 1.8071, "step": 6625 }, { "epoch": 0.6470703125, "grad_norm": 0.1782231479883194, "learning_rate": 0.00018149634007413824, "loss": 1.8218, "step": 6626 }, { "epoch": 0.64716796875, "grad_norm": 0.18465591967105865, "learning_rate": 0.00018143166362484355, "loss": 1.7711, "step": 6627 }, { "epoch": 0.647265625, "grad_norm": 0.21036024391651154, "learning_rate": 0.00018136699652219663, "loss": 1.7966, "step": 6628 }, { "epoch": 0.64736328125, "grad_norm": 0.18975861370563507, "learning_rate": 0.00018130233877265733, "loss": 1.8055, "step": 6629 }, { "epoch": 0.6474609375, "grad_norm": 0.17114122211933136, "learning_rate": 0.00018123769038268434, "loss": 1.7909, "step": 6630 }, { "epoch": 0.64755859375, "grad_norm": 0.17959724366664886, "learning_rate": 0.00018117305135873546, "loss": 1.8131, "step": 6631 }, { "epoch": 0.64765625, "grad_norm": 0.18097108602523804, "learning_rate": 0.00018110842170726744, "loss": 1.7694, "step": 6632 }, { "epoch": 0.64775390625, "grad_norm": 0.1652085781097412, "learning_rate": 0.00018104380143473626, "loss": 1.7934, "step": 6633 }, { "epoch": 0.6478515625, "grad_norm": 0.19862763583660126, "learning_rate": 0.000180979190547597, "loss": 1.8132, "step": 6634 }, { "epoch": 0.64794921875, "grad_norm": 0.17286108434200287, "learning_rate": 0.00018091458905230363, "loss": 1.7564, "step": 6635 }, { "epoch": 0.648046875, "grad_norm": 0.1997736096382141, "learning_rate": 0.00018084999695530928, "loss": 1.7758, "step": 6636 }, { "epoch": 0.64814453125, "grad_norm": 0.19438709318637848, "learning_rate": 0.0001807854142630661, "loss": 1.7878, "step": 6637 }, { "epoch": 0.6482421875, "grad_norm": 0.21814970672130585, "learning_rate": 0.00018072084098202552, "loss": 1.7816, "step": 6638 }, { "epoch": 0.64833984375, "grad_norm": 0.18091824650764465, "learning_rate": 0.00018065627711863757, "loss": 1.7827, "step": 6639 }, { "epoch": 0.6484375, "grad_norm": 0.21233859658241272, "learning_rate": 0.00018059172267935177, "loss": 1.807, "step": 6640 }, { "epoch": 0.64853515625, "grad_norm": 0.21697354316711426, "learning_rate": 0.00018052717767061643, "loss": 1.7976, "step": 6641 }, { "epoch": 0.6486328125, "grad_norm": 0.1806134581565857, "learning_rate": 0.00018046264209887913, "loss": 1.7615, "step": 6642 }, { "epoch": 0.64873046875, "grad_norm": 0.2275855988264084, "learning_rate": 0.00018039811597058637, "loss": 1.7797, "step": 6643 }, { "epoch": 0.648828125, "grad_norm": 0.19118793308734894, "learning_rate": 0.0001803335992921837, "loss": 1.7934, "step": 6644 }, { "epoch": 0.64892578125, "grad_norm": 0.21856589615345, "learning_rate": 0.00018026909207011582, "loss": 1.7977, "step": 6645 }, { "epoch": 0.6490234375, "grad_norm": 0.2057735025882721, "learning_rate": 0.0001802045943108264, "loss": 1.8211, "step": 6646 }, { "epoch": 0.64912109375, "grad_norm": 0.19466827809810638, "learning_rate": 0.0001801401060207582, "loss": 1.7959, "step": 6647 }, { "epoch": 0.64921875, "grad_norm": 0.18857146799564362, "learning_rate": 0.00018007562720635302, "loss": 1.8197, "step": 6648 }, { "epoch": 0.64931640625, "grad_norm": 0.20495252311229706, "learning_rate": 0.00018001115787405164, "loss": 1.7957, "step": 6649 }, { "epoch": 0.6494140625, "grad_norm": 0.20179209113121033, "learning_rate": 0.00017994669803029418, "loss": 1.8014, "step": 6650 }, { "epoch": 0.64951171875, "grad_norm": 0.16313163936138153, "learning_rate": 0.0001798822476815194, "loss": 1.8223, "step": 6651 }, { "epoch": 0.649609375, "grad_norm": 0.9002783894538879, "learning_rate": 0.00017981780683416554, "loss": 1.813, "step": 6652 }, { "epoch": 0.64970703125, "grad_norm": 0.1902114748954773, "learning_rate": 0.00017975337549466948, "loss": 1.8742, "step": 6653 }, { "epoch": 0.6498046875, "grad_norm": 0.18916437029838562, "learning_rate": 0.00017968895366946735, "loss": 1.8, "step": 6654 }, { "epoch": 0.64990234375, "grad_norm": 0.17160257697105408, "learning_rate": 0.00017962454136499434, "loss": 1.8173, "step": 6655 }, { "epoch": 0.65, "grad_norm": 0.20139747858047485, "learning_rate": 0.00017956013858768478, "loss": 1.8, "step": 6656 }, { "epoch": 0.65009765625, "grad_norm": 0.17342814803123474, "learning_rate": 0.00017949574534397185, "loss": 1.8104, "step": 6657 }, { "epoch": 0.6501953125, "grad_norm": 0.2100597470998764, "learning_rate": 0.00017943136164028778, "loss": 1.8052, "step": 6658 }, { "epoch": 0.65029296875, "grad_norm": 0.17557547986507416, "learning_rate": 0.00017936698748306423, "loss": 1.7853, "step": 6659 }, { "epoch": 0.650390625, "grad_norm": 0.1991180032491684, "learning_rate": 0.00017930262287873123, "loss": 1.837, "step": 6660 }, { "epoch": 0.65048828125, "grad_norm": 0.18550361692905426, "learning_rate": 0.0001792382678337184, "loss": 1.7838, "step": 6661 }, { "epoch": 0.6505859375, "grad_norm": 0.21125327050685883, "learning_rate": 0.00017917392235445423, "loss": 1.7959, "step": 6662 }, { "epoch": 0.65068359375, "grad_norm": 0.1706579625606537, "learning_rate": 0.0001791095864473663, "loss": 1.7563, "step": 6663 }, { "epoch": 0.65078125, "grad_norm": 0.19742251932621002, "learning_rate": 0.00017904526011888118, "loss": 1.8011, "step": 6664 }, { "epoch": 0.65087890625, "grad_norm": 0.16532139480113983, "learning_rate": 0.0001789809433754245, "loss": 1.7886, "step": 6665 }, { "epoch": 0.6509765625, "grad_norm": 0.1771339774131775, "learning_rate": 0.00017891663622342097, "loss": 1.7994, "step": 6666 }, { "epoch": 0.65107421875, "grad_norm": 0.19371575117111206, "learning_rate": 0.00017885233866929423, "loss": 1.7908, "step": 6667 }, { "epoch": 0.651171875, "grad_norm": 0.15499649941921234, "learning_rate": 0.00017878805071946702, "loss": 1.7711, "step": 6668 }, { "epoch": 0.65126953125, "grad_norm": 0.21640025079250336, "learning_rate": 0.00017872377238036118, "loss": 1.7952, "step": 6669 }, { "epoch": 0.6513671875, "grad_norm": 0.1708303540945053, "learning_rate": 0.0001786595036583976, "loss": 1.7663, "step": 6670 }, { "epoch": 0.65146484375, "grad_norm": 0.192025825381279, "learning_rate": 0.00017859524455999604, "loss": 1.8209, "step": 6671 }, { "epoch": 0.6515625, "grad_norm": 0.18873801827430725, "learning_rate": 0.0001785309950915755, "loss": 1.7575, "step": 6672 }, { "epoch": 0.65166015625, "grad_norm": 0.16461050510406494, "learning_rate": 0.00017846675525955402, "loss": 1.8007, "step": 6673 }, { "epoch": 0.6517578125, "grad_norm": 0.16708973050117493, "learning_rate": 0.00017840252507034837, "loss": 1.8122, "step": 6674 }, { "epoch": 0.65185546875, "grad_norm": 0.1955346316099167, "learning_rate": 0.00017833830453037468, "loss": 1.821, "step": 6675 }, { "epoch": 0.651953125, "grad_norm": 0.1643725037574768, "learning_rate": 0.00017827409364604808, "loss": 1.7475, "step": 6676 }, { "epoch": 0.65205078125, "grad_norm": 0.19626560807228088, "learning_rate": 0.0001782098924237826, "loss": 1.8254, "step": 6677 }, { "epoch": 0.6521484375, "grad_norm": 0.17043158411979675, "learning_rate": 0.00017814570086999133, "loss": 1.7895, "step": 6678 }, { "epoch": 0.65224609375, "grad_norm": 0.18180353939533234, "learning_rate": 0.00017808151899108653, "loss": 1.7952, "step": 6679 }, { "epoch": 0.65234375, "grad_norm": 0.18253888189792633, "learning_rate": 0.00017801734679347942, "loss": 1.8248, "step": 6680 }, { "epoch": 0.65244140625, "grad_norm": 0.16202782094478607, "learning_rate": 0.00017795318428358016, "loss": 1.8008, "step": 6681 }, { "epoch": 0.6525390625, "grad_norm": 0.18764299154281616, "learning_rate": 0.00017788903146779794, "loss": 1.7769, "step": 6682 }, { "epoch": 0.65263671875, "grad_norm": 0.16182288527488708, "learning_rate": 0.00017782488835254117, "loss": 1.7936, "step": 6683 }, { "epoch": 0.652734375, "grad_norm": 0.1879257708787918, "learning_rate": 0.00017776075494421723, "loss": 1.8363, "step": 6684 }, { "epoch": 0.65283203125, "grad_norm": 0.15209834277629852, "learning_rate": 0.00017769663124923236, "loss": 1.7414, "step": 6685 }, { "epoch": 0.6529296875, "grad_norm": 0.19562619924545288, "learning_rate": 0.000177632517273992, "loss": 1.8095, "step": 6686 }, { "epoch": 0.65302734375, "grad_norm": 0.1562444567680359, "learning_rate": 0.00017756841302490057, "loss": 1.7958, "step": 6687 }, { "epoch": 0.653125, "grad_norm": 0.19683891534805298, "learning_rate": 0.00017750431850836152, "loss": 1.7775, "step": 6688 }, { "epoch": 0.65322265625, "grad_norm": 0.1947227567434311, "learning_rate": 0.00017744023373077734, "loss": 1.8643, "step": 6689 }, { "epoch": 0.6533203125, "grad_norm": 0.1775227040052414, "learning_rate": 0.00017737615869854945, "loss": 1.7703, "step": 6690 }, { "epoch": 0.65341796875, "grad_norm": 0.20798581838607788, "learning_rate": 0.00017731209341807847, "loss": 1.7872, "step": 6691 }, { "epoch": 0.653515625, "grad_norm": 0.14988335967063904, "learning_rate": 0.0001772480378957639, "loss": 1.8208, "step": 6692 }, { "epoch": 0.65361328125, "grad_norm": 0.21855571866035461, "learning_rate": 0.00017718399213800435, "loss": 1.7972, "step": 6693 }, { "epoch": 0.6537109375, "grad_norm": 0.20375514030456543, "learning_rate": 0.00017711995615119747, "loss": 1.7989, "step": 6694 }, { "epoch": 0.65380859375, "grad_norm": 0.17546585202217102, "learning_rate": 0.0001770559299417398, "loss": 1.7935, "step": 6695 }, { "epoch": 0.65390625, "grad_norm": 0.222710520029068, "learning_rate": 0.000176991913516027, "loss": 1.7914, "step": 6696 }, { "epoch": 0.65400390625, "grad_norm": 0.15917977690696716, "learning_rate": 0.00017692790688045374, "loss": 1.7972, "step": 6697 }, { "epoch": 0.6541015625, "grad_norm": 0.2450484335422516, "learning_rate": 0.00017686391004141379, "loss": 1.7615, "step": 6698 }, { "epoch": 0.65419921875, "grad_norm": 0.15653565526008606, "learning_rate": 0.0001767999230052998, "loss": 1.7966, "step": 6699 }, { "epoch": 0.654296875, "grad_norm": 0.21703962981700897, "learning_rate": 0.0001767359457785035, "loss": 1.7667, "step": 6700 }, { "epoch": 0.65439453125, "grad_norm": 0.2027837336063385, "learning_rate": 0.00017667197836741577, "loss": 1.7982, "step": 6701 }, { "epoch": 0.6544921875, "grad_norm": 0.15651676058769226, "learning_rate": 0.00017660802077842623, "loss": 1.7457, "step": 6702 }, { "epoch": 0.65458984375, "grad_norm": 0.1962585151195526, "learning_rate": 0.00017654407301792373, "loss": 1.7826, "step": 6703 }, { "epoch": 0.6546875, "grad_norm": 0.14955860376358032, "learning_rate": 0.0001764801350922961, "loss": 1.8011, "step": 6704 }, { "epoch": 0.65478515625, "grad_norm": 0.21084991097450256, "learning_rate": 0.00017641620700793015, "loss": 1.8437, "step": 6705 }, { "epoch": 0.6548828125, "grad_norm": 0.15199366211891174, "learning_rate": 0.00017635228877121179, "loss": 1.7937, "step": 6706 }, { "epoch": 0.65498046875, "grad_norm": 0.17290577292442322, "learning_rate": 0.0001762883803885258, "loss": 1.8231, "step": 6707 }, { "epoch": 0.655078125, "grad_norm": 0.17053087055683136, "learning_rate": 0.00017622448186625618, "loss": 1.8027, "step": 6708 }, { "epoch": 0.65517578125, "grad_norm": 0.1526997685432434, "learning_rate": 0.0001761605932107857, "loss": 1.7629, "step": 6709 }, { "epoch": 0.6552734375, "grad_norm": 0.18400055170059204, "learning_rate": 0.00017609671442849628, "loss": 1.7904, "step": 6710 }, { "epoch": 0.65537109375, "grad_norm": 0.16712503135204315, "learning_rate": 0.00017603284552576893, "loss": 1.7984, "step": 6711 }, { "epoch": 0.65546875, "grad_norm": 0.18413284420967102, "learning_rate": 0.00017596898650898353, "loss": 1.8232, "step": 6712 }, { "epoch": 0.65556640625, "grad_norm": 0.18146318197250366, "learning_rate": 0.000175905137384519, "loss": 1.8149, "step": 6713 }, { "epoch": 0.6556640625, "grad_norm": 0.17898768186569214, "learning_rate": 0.00017584129815875337, "loss": 1.7926, "step": 6714 }, { "epoch": 0.65576171875, "grad_norm": 0.18604902923107147, "learning_rate": 0.00017577746883806368, "loss": 1.7697, "step": 6715 }, { "epoch": 0.655859375, "grad_norm": 0.16804422438144684, "learning_rate": 0.00017571364942882572, "loss": 1.7727, "step": 6716 }, { "epoch": 0.65595703125, "grad_norm": 0.19525182247161865, "learning_rate": 0.00017564983993741457, "loss": 1.838, "step": 6717 }, { "epoch": 0.6560546875, "grad_norm": 0.19974558055400848, "learning_rate": 0.0001755860403702043, "loss": 1.7995, "step": 6718 }, { "epoch": 0.65615234375, "grad_norm": 0.18463267385959625, "learning_rate": 0.00017552225073356786, "loss": 1.8073, "step": 6719 }, { "epoch": 0.65625, "grad_norm": 0.17929869890213013, "learning_rate": 0.00017545847103387724, "loss": 1.7459, "step": 6720 }, { "epoch": 0.65634765625, "grad_norm": 0.1914980262517929, "learning_rate": 0.0001753947012775035, "loss": 1.7303, "step": 6721 }, { "epoch": 0.6564453125, "grad_norm": 0.17289142310619354, "learning_rate": 0.00017533094147081685, "loss": 1.8047, "step": 6722 }, { "epoch": 0.65654296875, "grad_norm": 0.1822986602783203, "learning_rate": 0.000175267191620186, "loss": 1.7917, "step": 6723 }, { "epoch": 0.656640625, "grad_norm": 0.18381506204605103, "learning_rate": 0.00017520345173197916, "loss": 1.8077, "step": 6724 }, { "epoch": 0.65673828125, "grad_norm": 0.15967312455177307, "learning_rate": 0.00017513972181256338, "loss": 1.7824, "step": 6725 }, { "epoch": 0.6568359375, "grad_norm": 0.20369911193847656, "learning_rate": 0.0001750760018683047, "loss": 1.8234, "step": 6726 }, { "epoch": 0.65693359375, "grad_norm": 0.1579878181219101, "learning_rate": 0.00017501229190556816, "loss": 1.7657, "step": 6727 }, { "epoch": 0.65703125, "grad_norm": 0.1903339922428131, "learning_rate": 0.00017494859193071792, "loss": 1.8061, "step": 6728 }, { "epoch": 0.65712890625, "grad_norm": 0.16017843782901764, "learning_rate": 0.00017488490195011699, "loss": 1.7708, "step": 6729 }, { "epoch": 0.6572265625, "grad_norm": 0.19143888354301453, "learning_rate": 0.00017482122197012736, "loss": 1.8376, "step": 6730 }, { "epoch": 0.65732421875, "grad_norm": 0.19964507222175598, "learning_rate": 0.00017475755199711018, "loss": 1.8178, "step": 6731 }, { "epoch": 0.657421875, "grad_norm": 0.19024160504341125, "learning_rate": 0.00017469389203742541, "loss": 1.7528, "step": 6732 }, { "epoch": 0.65751953125, "grad_norm": 0.19692420959472656, "learning_rate": 0.00017463024209743227, "loss": 1.7634, "step": 6733 }, { "epoch": 0.6576171875, "grad_norm": 0.20431287586688995, "learning_rate": 0.00017456660218348872, "loss": 1.7925, "step": 6734 }, { "epoch": 0.65771484375, "grad_norm": 0.18051990866661072, "learning_rate": 0.00017450297230195183, "loss": 1.7719, "step": 6735 }, { "epoch": 0.6578125, "grad_norm": 0.21387675404548645, "learning_rate": 0.00017443935245917784, "loss": 1.7441, "step": 6736 }, { "epoch": 0.65791015625, "grad_norm": 0.16279537975788116, "learning_rate": 0.0001743757426615215, "loss": 1.7754, "step": 6737 }, { "epoch": 0.6580078125, "grad_norm": 0.19330066442489624, "learning_rate": 0.00017431214291533704, "loss": 1.755, "step": 6738 }, { "epoch": 0.65810546875, "grad_norm": 0.18594518303871155, "learning_rate": 0.00017424855322697747, "loss": 1.8167, "step": 6739 }, { "epoch": 0.658203125, "grad_norm": 0.2078866809606552, "learning_rate": 0.00017418497360279485, "loss": 1.777, "step": 6740 }, { "epoch": 0.65830078125, "grad_norm": 0.17232948541641235, "learning_rate": 0.00017412140404914024, "loss": 1.8026, "step": 6741 }, { "epoch": 0.6583984375, "grad_norm": 0.24474874138832092, "learning_rate": 0.00017405784457236368, "loss": 1.7968, "step": 6742 }, { "epoch": 0.65849609375, "grad_norm": 0.1580680012702942, "learning_rate": 0.00017399429517881427, "loss": 1.7805, "step": 6743 }, { "epoch": 0.65859375, "grad_norm": 0.22135238349437714, "learning_rate": 0.0001739307558748398, "loss": 1.8248, "step": 6744 }, { "epoch": 0.65869140625, "grad_norm": 0.17765362560749054, "learning_rate": 0.00017386722666678747, "loss": 1.7538, "step": 6745 }, { "epoch": 0.6587890625, "grad_norm": 0.20476053655147552, "learning_rate": 0.00017380370756100324, "loss": 1.7737, "step": 6746 }, { "epoch": 0.65888671875, "grad_norm": 0.19649860262870789, "learning_rate": 0.00017374019856383212, "loss": 1.7579, "step": 6747 }, { "epoch": 0.658984375, "grad_norm": 0.18356148898601532, "learning_rate": 0.00017367669968161803, "loss": 1.8151, "step": 6748 }, { "epoch": 0.65908203125, "grad_norm": 0.21151964366436005, "learning_rate": 0.00017361321092070403, "loss": 1.8144, "step": 6749 }, { "epoch": 0.6591796875, "grad_norm": 0.18294645845890045, "learning_rate": 0.00017354973228743214, "loss": 1.7923, "step": 6750 }, { "epoch": 0.65927734375, "grad_norm": 0.25080451369285583, "learning_rate": 0.00017348626378814317, "loss": 1.8008, "step": 6751 }, { "epoch": 0.659375, "grad_norm": 0.1969689130783081, "learning_rate": 0.000173422805429177, "loss": 1.795, "step": 6752 }, { "epoch": 0.65947265625, "grad_norm": 0.22283224761486053, "learning_rate": 0.00017335935721687285, "loss": 1.81, "step": 6753 }, { "epoch": 0.6595703125, "grad_norm": 0.19613216817378998, "learning_rate": 0.00017329591915756835, "loss": 1.8092, "step": 6754 }, { "epoch": 0.65966796875, "grad_norm": 0.22282087802886963, "learning_rate": 0.00017323249125760051, "loss": 1.7997, "step": 6755 }, { "epoch": 0.659765625, "grad_norm": 0.21357446908950806, "learning_rate": 0.00017316907352330524, "loss": 1.7547, "step": 6756 }, { "epoch": 0.65986328125, "grad_norm": 0.20224033296108246, "learning_rate": 0.00017310566596101746, "loss": 1.7539, "step": 6757 }, { "epoch": 0.6599609375, "grad_norm": 0.21612948179244995, "learning_rate": 0.0001730422685770709, "loss": 1.7912, "step": 6758 }, { "epoch": 0.66005859375, "grad_norm": 0.18233241140842438, "learning_rate": 0.0001729788813777983, "loss": 1.7855, "step": 6759 }, { "epoch": 0.66015625, "grad_norm": 0.22323350608348846, "learning_rate": 0.00017291550436953184, "loss": 1.7742, "step": 6760 }, { "epoch": 0.66025390625, "grad_norm": 0.17308898270130157, "learning_rate": 0.000172852137558602, "loss": 1.7783, "step": 6761 }, { "epoch": 0.6603515625, "grad_norm": 0.23073777556419373, "learning_rate": 0.00017278878095133864, "loss": 1.778, "step": 6762 }, { "epoch": 0.66044921875, "grad_norm": 0.1768324226140976, "learning_rate": 0.00017272543455407064, "loss": 1.7874, "step": 6763 }, { "epoch": 0.660546875, "grad_norm": 0.22257494926452637, "learning_rate": 0.00017266209837312567, "loss": 1.8085, "step": 6764 }, { "epoch": 0.66064453125, "grad_norm": 0.15608584880828857, "learning_rate": 0.00017259877241483036, "loss": 1.7914, "step": 6765 }, { "epoch": 0.6607421875, "grad_norm": 0.20792222023010254, "learning_rate": 0.0001725354566855104, "loss": 1.8054, "step": 6766 }, { "epoch": 0.66083984375, "grad_norm": 0.18167360126972198, "learning_rate": 0.00017247215119149076, "loss": 1.7713, "step": 6767 }, { "epoch": 0.6609375, "grad_norm": 0.20959608256816864, "learning_rate": 0.00017240885593909484, "loss": 1.7989, "step": 6768 }, { "epoch": 0.66103515625, "grad_norm": 0.21458731591701508, "learning_rate": 0.00017234557093464525, "loss": 1.7663, "step": 6769 }, { "epoch": 0.6611328125, "grad_norm": 0.16325435042381287, "learning_rate": 0.00017228229618446378, "loss": 1.7555, "step": 6770 }, { "epoch": 0.66123046875, "grad_norm": 0.19551919400691986, "learning_rate": 0.00017221903169487097, "loss": 1.7579, "step": 6771 }, { "epoch": 0.661328125, "grad_norm": 0.17660580575466156, "learning_rate": 0.00017215577747218625, "loss": 1.7496, "step": 6772 }, { "epoch": 0.66142578125, "grad_norm": 0.21222719550132751, "learning_rate": 0.00017209253352272813, "loss": 1.8029, "step": 6773 }, { "epoch": 0.6615234375, "grad_norm": 0.16924133896827698, "learning_rate": 0.00017202929985281446, "loss": 1.7708, "step": 6774 }, { "epoch": 0.66162109375, "grad_norm": 0.20815926790237427, "learning_rate": 0.00017196607646876138, "loss": 1.8054, "step": 6775 }, { "epoch": 0.66171875, "grad_norm": 0.15620411932468414, "learning_rate": 0.00017190286337688445, "loss": 1.7558, "step": 6776 }, { "epoch": 0.66181640625, "grad_norm": 0.1959308683872223, "learning_rate": 0.00017183966058349816, "loss": 1.7611, "step": 6777 }, { "epoch": 0.6619140625, "grad_norm": 0.16405551135540009, "learning_rate": 0.00017177646809491589, "loss": 1.7634, "step": 6778 }, { "epoch": 0.66201171875, "grad_norm": 0.18788251280784607, "learning_rate": 0.00017171328591744994, "loss": 1.8367, "step": 6779 }, { "epoch": 0.662109375, "grad_norm": 0.18898065388202667, "learning_rate": 0.00017165011405741157, "loss": 1.7735, "step": 6780 }, { "epoch": 0.66220703125, "grad_norm": 0.15792739391326904, "learning_rate": 0.00017158695252111138, "loss": 1.8312, "step": 6781 }, { "epoch": 0.6623046875, "grad_norm": 0.16342100501060486, "learning_rate": 0.00017152380131485838, "loss": 1.8466, "step": 6782 }, { "epoch": 0.66240234375, "grad_norm": 0.18232177197933197, "learning_rate": 0.00017146066044496094, "loss": 1.7966, "step": 6783 }, { "epoch": 0.6625, "grad_norm": 0.2075268030166626, "learning_rate": 0.00017139752991772624, "loss": 1.7523, "step": 6784 }, { "epoch": 0.66259765625, "grad_norm": 0.17881329357624054, "learning_rate": 0.00017133440973946055, "loss": 1.7736, "step": 6785 }, { "epoch": 0.6626953125, "grad_norm": 0.1827075481414795, "learning_rate": 0.00017127129991646882, "loss": 1.8097, "step": 6786 }, { "epoch": 0.66279296875, "grad_norm": 0.1748536229133606, "learning_rate": 0.00017120820045505526, "loss": 1.7981, "step": 6787 }, { "epoch": 0.662890625, "grad_norm": 0.19093164801597595, "learning_rate": 0.00017114511136152305, "loss": 1.7954, "step": 6788 }, { "epoch": 0.66298828125, "grad_norm": 0.18449187278747559, "learning_rate": 0.00017108203264217412, "loss": 1.8067, "step": 6789 }, { "epoch": 0.6630859375, "grad_norm": 0.19091874361038208, "learning_rate": 0.00017101896430330945, "loss": 1.7977, "step": 6790 }, { "epoch": 0.66318359375, "grad_norm": 0.1991206258535385, "learning_rate": 0.0001709559063512291, "loss": 1.7918, "step": 6791 }, { "epoch": 0.66328125, "grad_norm": 0.16191111505031586, "learning_rate": 0.000170892858792232, "loss": 1.7576, "step": 6792 }, { "epoch": 0.66337890625, "grad_norm": 0.22220070660114288, "learning_rate": 0.00017082982163261595, "loss": 1.7753, "step": 6793 }, { "epoch": 0.6634765625, "grad_norm": 0.1800573170185089, "learning_rate": 0.0001707667948786778, "loss": 1.7721, "step": 6794 }, { "epoch": 0.66357421875, "grad_norm": 0.21311317384243011, "learning_rate": 0.0001707037785367135, "loss": 1.8158, "step": 6795 }, { "epoch": 0.663671875, "grad_norm": 0.18075942993164062, "learning_rate": 0.00017064077261301776, "loss": 1.8177, "step": 6796 }, { "epoch": 0.66376953125, "grad_norm": 0.21312564611434937, "learning_rate": 0.00017057777711388422, "loss": 1.7365, "step": 6797 }, { "epoch": 0.6638671875, "grad_norm": 0.17900118231773376, "learning_rate": 0.00017051479204560567, "loss": 1.7706, "step": 6798 }, { "epoch": 0.66396484375, "grad_norm": 0.19088128209114075, "learning_rate": 0.0001704518174144738, "loss": 1.8338, "step": 6799 }, { "epoch": 0.6640625, "grad_norm": 0.1717141717672348, "learning_rate": 0.0001703888532267791, "loss": 1.7539, "step": 6800 }, { "epoch": 0.66416015625, "grad_norm": 0.16970093548297882, "learning_rate": 0.0001703258994888111, "loss": 1.8149, "step": 6801 }, { "epoch": 0.6642578125, "grad_norm": 0.1924213469028473, "learning_rate": 0.0001702629562068586, "loss": 1.7706, "step": 6802 }, { "epoch": 0.66435546875, "grad_norm": 0.17158691585063934, "learning_rate": 0.00017020002338720874, "loss": 1.7884, "step": 6803 }, { "epoch": 0.664453125, "grad_norm": 0.16739550232887268, "learning_rate": 0.00017013710103614817, "loss": 1.7834, "step": 6804 }, { "epoch": 0.66455078125, "grad_norm": 0.20774681866168976, "learning_rate": 0.0001700741891599622, "loss": 1.7986, "step": 6805 }, { "epoch": 0.6646484375, "grad_norm": 0.18969817459583282, "learning_rate": 0.00017001128776493525, "loss": 1.8014, "step": 6806 }, { "epoch": 0.66474609375, "grad_norm": 0.20991547405719757, "learning_rate": 0.00016994839685735043, "loss": 1.6974, "step": 6807 }, { "epoch": 0.66484375, "grad_norm": 0.20767414569854736, "learning_rate": 0.00016988551644349, "loss": 1.7779, "step": 6808 }, { "epoch": 0.66494140625, "grad_norm": 0.2222302258014679, "learning_rate": 0.00016982264652963544, "loss": 1.779, "step": 6809 }, { "epoch": 0.6650390625, "grad_norm": 0.21608057618141174, "learning_rate": 0.00016975978712206662, "loss": 1.7939, "step": 6810 }, { "epoch": 0.66513671875, "grad_norm": 0.16491436958312988, "learning_rate": 0.00016969693822706271, "loss": 1.7725, "step": 6811 }, { "epoch": 0.665234375, "grad_norm": 0.21504542231559753, "learning_rate": 0.00016963409985090177, "loss": 1.8223, "step": 6812 }, { "epoch": 0.66533203125, "grad_norm": 0.17450939118862152, "learning_rate": 0.0001695712719998609, "loss": 1.7902, "step": 6813 }, { "epoch": 0.6654296875, "grad_norm": 0.1837131530046463, "learning_rate": 0.00016950845468021586, "loss": 1.7958, "step": 6814 }, { "epoch": 0.66552734375, "grad_norm": 0.2253214418888092, "learning_rate": 0.00016944564789824151, "loss": 1.7688, "step": 6815 }, { "epoch": 0.665625, "grad_norm": 0.15871387720108032, "learning_rate": 0.000169382851660212, "loss": 1.8295, "step": 6816 }, { "epoch": 0.66572265625, "grad_norm": 0.2105254828929901, "learning_rate": 0.00016932006597239984, "loss": 1.7696, "step": 6817 }, { "epoch": 0.6658203125, "grad_norm": 0.1697392463684082, "learning_rate": 0.00016925729084107683, "loss": 1.8087, "step": 6818 }, { "epoch": 0.66591796875, "grad_norm": 0.20705845952033997, "learning_rate": 0.00016919452627251372, "loss": 1.7502, "step": 6819 }, { "epoch": 0.666015625, "grad_norm": 0.1619587540626526, "learning_rate": 0.0001691317722729801, "loss": 1.7471, "step": 6820 }, { "epoch": 0.66611328125, "grad_norm": 0.17735999822616577, "learning_rate": 0.00016906902884874448, "loss": 1.7635, "step": 6821 }, { "epoch": 0.6662109375, "grad_norm": 0.18751943111419678, "learning_rate": 0.0001690062960060743, "loss": 1.8023, "step": 6822 }, { "epoch": 0.66630859375, "grad_norm": 0.17518527805805206, "learning_rate": 0.0001689435737512363, "loss": 1.8305, "step": 6823 }, { "epoch": 0.66640625, "grad_norm": 0.2056555151939392, "learning_rate": 0.0001688808620904957, "loss": 1.7672, "step": 6824 }, { "epoch": 0.66650390625, "grad_norm": 0.32423311471939087, "learning_rate": 0.00016881816103011678, "loss": 1.7661, "step": 6825 }, { "epoch": 0.6666015625, "grad_norm": 0.33057934045791626, "learning_rate": 0.00016875547057636288, "loss": 1.8266, "step": 6826 }, { "epoch": 0.66669921875, "grad_norm": 0.19986188411712646, "learning_rate": 0.00016869279073549633, "loss": 1.7875, "step": 6827 }, { "epoch": 0.666796875, "grad_norm": 0.27870315313339233, "learning_rate": 0.0001686301215137781, "loss": 1.7438, "step": 6828 }, { "epoch": 0.66689453125, "grad_norm": 0.20533014833927155, "learning_rate": 0.0001685674629174683, "loss": 1.7693, "step": 6829 }, { "epoch": 0.6669921875, "grad_norm": 0.24424049258232117, "learning_rate": 0.0001685048149528262, "loss": 1.8411, "step": 6830 }, { "epoch": 0.66708984375, "grad_norm": 0.19368165731430054, "learning_rate": 0.00016844217762610952, "loss": 1.8003, "step": 6831 }, { "epoch": 0.6671875, "grad_norm": 0.22332055866718292, "learning_rate": 0.00016837955094357532, "loss": 1.8041, "step": 6832 }, { "epoch": 0.66728515625, "grad_norm": 0.19814680516719818, "learning_rate": 0.0001683169349114794, "loss": 1.7719, "step": 6833 }, { "epoch": 0.6673828125, "grad_norm": 0.22233711183071136, "learning_rate": 0.00016825432953607663, "loss": 1.7839, "step": 6834 }, { "epoch": 0.66748046875, "grad_norm": 0.2249952256679535, "learning_rate": 0.00016819173482362056, "loss": 1.7814, "step": 6835 }, { "epoch": 0.667578125, "grad_norm": 0.1995055377483368, "learning_rate": 0.0001681291507803639, "loss": 1.7832, "step": 6836 }, { "epoch": 0.66767578125, "grad_norm": 0.22346456348896027, "learning_rate": 0.00016806657741255844, "loss": 1.8225, "step": 6837 }, { "epoch": 0.6677734375, "grad_norm": 0.1739036738872528, "learning_rate": 0.00016800401472645443, "loss": 1.8169, "step": 6838 }, { "epoch": 0.66787109375, "grad_norm": 0.21818433701992035, "learning_rate": 0.00016794146272830151, "loss": 1.7832, "step": 6839 }, { "epoch": 0.66796875, "grad_norm": 0.20815785229206085, "learning_rate": 0.000167878921424348, "loss": 1.8324, "step": 6840 }, { "epoch": 0.66806640625, "grad_norm": 0.19075965881347656, "learning_rate": 0.00016781639082084126, "loss": 1.8087, "step": 6841 }, { "epoch": 0.6681640625, "grad_norm": 0.19678600132465363, "learning_rate": 0.0001677538709240275, "loss": 1.801, "step": 6842 }, { "epoch": 0.66826171875, "grad_norm": 0.1962931752204895, "learning_rate": 0.00016769136174015175, "loss": 1.8147, "step": 6843 }, { "epoch": 0.668359375, "grad_norm": 0.1802641749382019, "learning_rate": 0.00016762886327545846, "loss": 1.7621, "step": 6844 }, { "epoch": 0.66845703125, "grad_norm": 0.1762697696685791, "learning_rate": 0.0001675663755361906, "loss": 1.7682, "step": 6845 }, { "epoch": 0.6685546875, "grad_norm": 0.18440324068069458, "learning_rate": 0.00016750389852858988, "loss": 1.737, "step": 6846 }, { "epoch": 0.66865234375, "grad_norm": 0.17862525582313538, "learning_rate": 0.00016744143225889743, "loss": 1.7741, "step": 6847 }, { "epoch": 0.66875, "grad_norm": 0.19747160375118256, "learning_rate": 0.0001673789767333531, "loss": 1.7619, "step": 6848 }, { "epoch": 0.66884765625, "grad_norm": 0.1986236721277237, "learning_rate": 0.00016731653195819544, "loss": 1.7897, "step": 6849 }, { "epoch": 0.6689453125, "grad_norm": 0.17687690258026123, "learning_rate": 0.00016725409793966217, "loss": 1.7007, "step": 6850 }, { "epoch": 0.66904296875, "grad_norm": 0.17786692082881927, "learning_rate": 0.0001671916746839901, "loss": 1.83, "step": 6851 }, { "epoch": 0.669140625, "grad_norm": 0.18878452479839325, "learning_rate": 0.00016712926219741465, "loss": 1.7679, "step": 6852 }, { "epoch": 0.66923828125, "grad_norm": 0.1696411371231079, "learning_rate": 0.0001670668604861702, "loss": 1.8074, "step": 6853 }, { "epoch": 0.6693359375, "grad_norm": 0.1823590099811554, "learning_rate": 0.00016700446955649024, "loss": 1.7643, "step": 6854 }, { "epoch": 0.66943359375, "grad_norm": 0.16580212116241455, "learning_rate": 0.00016694208941460704, "loss": 1.7824, "step": 6855 }, { "epoch": 0.66953125, "grad_norm": 0.20189423859119415, "learning_rate": 0.00016687972006675173, "loss": 1.8029, "step": 6856 }, { "epoch": 0.66962890625, "grad_norm": 0.1672143191099167, "learning_rate": 0.00016681736151915444, "loss": 1.7957, "step": 6857 }, { "epoch": 0.6697265625, "grad_norm": 0.22320561110973358, "learning_rate": 0.00016675501377804442, "loss": 1.7772, "step": 6858 }, { "epoch": 0.66982421875, "grad_norm": 0.15218566358089447, "learning_rate": 0.00016669267684964962, "loss": 1.8085, "step": 6859 }, { "epoch": 0.669921875, "grad_norm": 0.1865987330675125, "learning_rate": 0.00016663035074019684, "loss": 1.7641, "step": 6860 }, { "epoch": 0.67001953125, "grad_norm": 0.17941977083683014, "learning_rate": 0.00016656803545591188, "loss": 1.7858, "step": 6861 }, { "epoch": 0.6701171875, "grad_norm": 0.15080498158931732, "learning_rate": 0.00016650573100301968, "loss": 1.7898, "step": 6862 }, { "epoch": 0.67021484375, "grad_norm": 0.1623527854681015, "learning_rate": 0.00016644343738774373, "loss": 1.8224, "step": 6863 }, { "epoch": 0.6703125, "grad_norm": 0.17387667298316956, "learning_rate": 0.00016638115461630658, "loss": 1.8279, "step": 6864 }, { "epoch": 0.67041015625, "grad_norm": 0.14696913957595825, "learning_rate": 0.00016631888269492983, "loss": 1.8095, "step": 6865 }, { "epoch": 0.6705078125, "grad_norm": 0.16662214696407318, "learning_rate": 0.000166256621629834, "loss": 1.8164, "step": 6866 }, { "epoch": 0.67060546875, "grad_norm": 0.15846864879131317, "learning_rate": 0.0001661943714272382, "loss": 1.7529, "step": 6867 }, { "epoch": 0.670703125, "grad_norm": 0.1668943613767624, "learning_rate": 0.00016613213209336076, "loss": 1.826, "step": 6868 }, { "epoch": 0.67080078125, "grad_norm": 0.1779264509677887, "learning_rate": 0.0001660699036344188, "loss": 1.7595, "step": 6869 }, { "epoch": 0.6708984375, "grad_norm": 0.17539826035499573, "learning_rate": 0.00016600768605662852, "loss": 1.8333, "step": 6870 }, { "epoch": 0.67099609375, "grad_norm": 0.18614362180233002, "learning_rate": 0.00016594547936620474, "loss": 1.7843, "step": 6871 }, { "epoch": 0.67109375, "grad_norm": 0.19493992626667023, "learning_rate": 0.0001658832835693615, "loss": 1.792, "step": 6872 }, { "epoch": 0.67119140625, "grad_norm": 0.15731357038021088, "learning_rate": 0.00016582109867231156, "loss": 1.7893, "step": 6873 }, { "epoch": 0.6712890625, "grad_norm": 0.20333430171012878, "learning_rate": 0.00016575892468126657, "loss": 1.7851, "step": 6874 }, { "epoch": 0.67138671875, "grad_norm": 0.160857692360878, "learning_rate": 0.00016569676160243724, "loss": 1.7464, "step": 6875 }, { "epoch": 0.671484375, "grad_norm": 0.20779138803482056, "learning_rate": 0.00016563460944203303, "loss": 1.7806, "step": 6876 }, { "epoch": 0.67158203125, "grad_norm": 0.16112321615219116, "learning_rate": 0.0001655724682062625, "loss": 1.7736, "step": 6877 }, { "epoch": 0.6716796875, "grad_norm": 0.18955856561660767, "learning_rate": 0.00016551033790133292, "loss": 1.7666, "step": 6878 }, { "epoch": 0.67177734375, "grad_norm": 0.19311009347438812, "learning_rate": 0.00016544821853345062, "loss": 1.7805, "step": 6879 }, { "epoch": 0.671875, "grad_norm": 0.17299115657806396, "learning_rate": 0.00016538611010882086, "loss": 1.8313, "step": 6880 }, { "epoch": 0.67197265625, "grad_norm": 0.18477989733219147, "learning_rate": 0.00016532401263364744, "loss": 1.7849, "step": 6881 }, { "epoch": 0.6720703125, "grad_norm": 0.16968649625778198, "learning_rate": 0.00016526192611413359, "loss": 1.7917, "step": 6882 }, { "epoch": 0.67216796875, "grad_norm": 0.18204939365386963, "learning_rate": 0.00016519985055648108, "loss": 1.7929, "step": 6883 }, { "epoch": 0.672265625, "grad_norm": 0.18578919768333435, "learning_rate": 0.0001651377859668908, "loss": 1.7605, "step": 6884 }, { "epoch": 0.67236328125, "grad_norm": 0.15926843881607056, "learning_rate": 0.00016507573235156236, "loss": 1.795, "step": 6885 }, { "epoch": 0.6724609375, "grad_norm": 0.21110518276691437, "learning_rate": 0.00016501368971669444, "loss": 1.7907, "step": 6886 }, { "epoch": 0.67255859375, "grad_norm": 0.1501876413822174, "learning_rate": 0.00016495165806848465, "loss": 1.8354, "step": 6887 }, { "epoch": 0.67265625, "grad_norm": 0.2440088987350464, "learning_rate": 0.00016488963741312915, "loss": 1.7597, "step": 6888 }, { "epoch": 0.67275390625, "grad_norm": 0.18479712307453156, "learning_rate": 0.0001648276277568234, "loss": 1.7642, "step": 6889 }, { "epoch": 0.6728515625, "grad_norm": 0.24002431333065033, "learning_rate": 0.00016476562910576158, "loss": 1.7987, "step": 6890 }, { "epoch": 0.67294921875, "grad_norm": 0.19026416540145874, "learning_rate": 0.00016470364146613687, "loss": 1.7985, "step": 6891 }, { "epoch": 0.673046875, "grad_norm": 0.19754427671432495, "learning_rate": 0.00016464166484414118, "loss": 1.8418, "step": 6892 }, { "epoch": 0.67314453125, "grad_norm": 0.2096160501241684, "learning_rate": 0.00016457969924596553, "loss": 1.7261, "step": 6893 }, { "epoch": 0.6732421875, "grad_norm": 0.17159762978553772, "learning_rate": 0.00016451774467779974, "loss": 1.7712, "step": 6894 }, { "epoch": 0.67333984375, "grad_norm": 0.2366153597831726, "learning_rate": 0.0001644558011458324, "loss": 1.8054, "step": 6895 }, { "epoch": 0.6734375, "grad_norm": 0.20519356429576874, "learning_rate": 0.0001643938686562512, "loss": 1.7703, "step": 6896 }, { "epoch": 0.67353515625, "grad_norm": 0.2375907450914383, "learning_rate": 0.0001643319472152426, "loss": 1.7932, "step": 6897 }, { "epoch": 0.6736328125, "grad_norm": 0.22247055172920227, "learning_rate": 0.0001642700368289921, "loss": 1.8056, "step": 6898 }, { "epoch": 0.67373046875, "grad_norm": 0.19705258309841156, "learning_rate": 0.00016420813750368392, "loss": 1.7976, "step": 6899 }, { "epoch": 0.673828125, "grad_norm": 0.21154995262622833, "learning_rate": 0.00016414624924550124, "loss": 1.7683, "step": 6900 }, { "epoch": 0.67392578125, "grad_norm": 0.1773930788040161, "learning_rate": 0.0001640843720606263, "loss": 1.8207, "step": 6901 }, { "epoch": 0.6740234375, "grad_norm": 0.19547690451145172, "learning_rate": 0.00016402250595523994, "loss": 1.8006, "step": 6902 }, { "epoch": 0.67412109375, "grad_norm": 0.15889418125152588, "learning_rate": 0.00016396065093552198, "loss": 1.7894, "step": 6903 }, { "epoch": 0.67421875, "grad_norm": 0.20280340313911438, "learning_rate": 0.00016389880700765132, "loss": 1.7846, "step": 6904 }, { "epoch": 0.67431640625, "grad_norm": 0.16782471537590027, "learning_rate": 0.00016383697417780556, "loss": 1.8028, "step": 6905 }, { "epoch": 0.6744140625, "grad_norm": 0.22529588639736176, "learning_rate": 0.0001637751524521612, "loss": 1.8394, "step": 6906 }, { "epoch": 0.67451171875, "grad_norm": 0.16469374299049377, "learning_rate": 0.00016371334183689384, "loss": 1.7948, "step": 6907 }, { "epoch": 0.674609375, "grad_norm": 0.19421949982643127, "learning_rate": 0.00016365154233817777, "loss": 1.7741, "step": 6908 }, { "epoch": 0.67470703125, "grad_norm": 0.16634027659893036, "learning_rate": 0.00016358975396218607, "loss": 1.7555, "step": 6909 }, { "epoch": 0.6748046875, "grad_norm": 0.17676925659179688, "learning_rate": 0.00016352797671509095, "loss": 1.8097, "step": 6910 }, { "epoch": 0.67490234375, "grad_norm": 0.18045926094055176, "learning_rate": 0.0001634662106030634, "loss": 1.7705, "step": 6911 }, { "epoch": 0.675, "grad_norm": 0.16005785763263702, "learning_rate": 0.0001634044556322733, "loss": 1.8078, "step": 6912 }, { "epoch": 0.67509765625, "grad_norm": 0.18497344851493835, "learning_rate": 0.00016334271180888944, "loss": 1.8283, "step": 6913 }, { "epoch": 0.6751953125, "grad_norm": 0.15121488273143768, "learning_rate": 0.0001632809791390795, "loss": 1.793, "step": 6914 }, { "epoch": 0.67529296875, "grad_norm": 0.17780368030071259, "learning_rate": 0.00016321925762901004, "loss": 1.7814, "step": 6915 }, { "epoch": 0.675390625, "grad_norm": 0.1537407636642456, "learning_rate": 0.0001631575472848464, "loss": 1.7888, "step": 6916 }, { "epoch": 0.67548828125, "grad_norm": 0.1656167209148407, "learning_rate": 0.00016309584811275297, "loss": 1.8084, "step": 6917 }, { "epoch": 0.6755859375, "grad_norm": 0.16236233711242676, "learning_rate": 0.00016303416011889291, "loss": 1.7974, "step": 6918 }, { "epoch": 0.67568359375, "grad_norm": 0.1652160882949829, "learning_rate": 0.0001629724833094283, "loss": 1.8102, "step": 6919 }, { "epoch": 0.67578125, "grad_norm": 0.15149018168449402, "learning_rate": 0.0001629108176905202, "loss": 1.7523, "step": 6920 }, { "epoch": 0.67587890625, "grad_norm": 0.14961397647857666, "learning_rate": 0.00016284916326832834, "loss": 1.7931, "step": 6921 }, { "epoch": 0.6759765625, "grad_norm": 0.1785857230424881, "learning_rate": 0.0001627875200490116, "loss": 1.7677, "step": 6922 }, { "epoch": 0.67607421875, "grad_norm": 0.1776105910539627, "learning_rate": 0.0001627258880387274, "loss": 1.7832, "step": 6923 }, { "epoch": 0.676171875, "grad_norm": 0.16321811079978943, "learning_rate": 0.0001626642672436323, "loss": 1.7944, "step": 6924 }, { "epoch": 0.67626953125, "grad_norm": 0.17351141571998596, "learning_rate": 0.00016260265766988177, "loss": 1.768, "step": 6925 }, { "epoch": 0.6763671875, "grad_norm": 0.16752485930919647, "learning_rate": 0.00016254105932362995, "loss": 1.8366, "step": 6926 }, { "epoch": 0.67646484375, "grad_norm": 0.19357427954673767, "learning_rate": 0.00016247947221102998, "loss": 1.7809, "step": 6927 }, { "epoch": 0.6765625, "grad_norm": 0.17808841168880463, "learning_rate": 0.00016241789633823396, "loss": 1.7605, "step": 6928 }, { "epoch": 0.67666015625, "grad_norm": 0.22302816808223724, "learning_rate": 0.00016235633171139276, "loss": 1.7688, "step": 6929 }, { "epoch": 0.6767578125, "grad_norm": 0.16737504303455353, "learning_rate": 0.00016229477833665598, "loss": 1.8147, "step": 6930 }, { "epoch": 0.67685546875, "grad_norm": 0.20709918439388275, "learning_rate": 0.0001622332362201724, "loss": 1.8314, "step": 6931 }, { "epoch": 0.676953125, "grad_norm": 0.16305744647979736, "learning_rate": 0.00016217170536808945, "loss": 1.7836, "step": 6932 }, { "epoch": 0.67705078125, "grad_norm": 0.18088005483150482, "learning_rate": 0.00016211018578655363, "loss": 1.7975, "step": 6933 }, { "epoch": 0.6771484375, "grad_norm": 0.18088318407535553, "learning_rate": 0.00016204867748171009, "loss": 1.8135, "step": 6934 }, { "epoch": 0.67724609375, "grad_norm": 0.17938348650932312, "learning_rate": 0.000161987180459703, "loss": 1.7924, "step": 6935 }, { "epoch": 0.67734375, "grad_norm": 0.16779905557632446, "learning_rate": 0.0001619256947266755, "loss": 1.7615, "step": 6936 }, { "epoch": 0.67744140625, "grad_norm": 0.1804201304912567, "learning_rate": 0.00016186422028876928, "loss": 1.806, "step": 6937 }, { "epoch": 0.6775390625, "grad_norm": 0.18485525250434875, "learning_rate": 0.0001618027571521251, "loss": 1.7875, "step": 6938 }, { "epoch": 0.67763671875, "grad_norm": 0.1624886393547058, "learning_rate": 0.00016174130532288267, "loss": 1.7667, "step": 6939 }, { "epoch": 0.677734375, "grad_norm": 0.19127444922924042, "learning_rate": 0.00016167986480718048, "loss": 1.8058, "step": 6940 }, { "epoch": 0.67783203125, "grad_norm": 0.16322876513004303, "learning_rate": 0.00016161843561115585, "loss": 1.8184, "step": 6941 }, { "epoch": 0.6779296875, "grad_norm": 0.1687088906764984, "learning_rate": 0.00016155701774094505, "loss": 1.7812, "step": 6942 }, { "epoch": 0.67802734375, "grad_norm": 0.16126418113708496, "learning_rate": 0.0001614956112026832, "loss": 1.7965, "step": 6943 }, { "epoch": 0.678125, "grad_norm": 0.15007956326007843, "learning_rate": 0.0001614342160025042, "loss": 1.8111, "step": 6944 }, { "epoch": 0.67822265625, "grad_norm": 0.17699624598026276, "learning_rate": 0.0001613728321465409, "loss": 1.7781, "step": 6945 }, { "epoch": 0.6783203125, "grad_norm": 0.18852028250694275, "learning_rate": 0.00016131145964092504, "loss": 1.7916, "step": 6946 }, { "epoch": 0.67841796875, "grad_norm": 0.1692580282688141, "learning_rate": 0.00016125009849178712, "loss": 1.8115, "step": 6947 }, { "epoch": 0.678515625, "grad_norm": 0.1754359006881714, "learning_rate": 0.0001611887487052567, "loss": 1.769, "step": 6948 }, { "epoch": 0.67861328125, "grad_norm": 0.20770426094532013, "learning_rate": 0.00016112741028746191, "loss": 1.7838, "step": 6949 }, { "epoch": 0.6787109375, "grad_norm": 0.18988534808158875, "learning_rate": 0.00016106608324453019, "loss": 1.8142, "step": 6950 }, { "epoch": 0.67880859375, "grad_norm": 0.22527390718460083, "learning_rate": 0.00016100476758258725, "loss": 1.81, "step": 6951 }, { "epoch": 0.67890625, "grad_norm": 0.164793461561203, "learning_rate": 0.00016094346330775816, "loss": 1.7938, "step": 6952 }, { "epoch": 0.67900390625, "grad_norm": 0.22036248445510864, "learning_rate": 0.00016088217042616658, "loss": 1.7987, "step": 6953 }, { "epoch": 0.6791015625, "grad_norm": 0.16861990094184875, "learning_rate": 0.00016082088894393525, "loss": 1.7804, "step": 6954 }, { "epoch": 0.67919921875, "grad_norm": 0.21151679754257202, "learning_rate": 0.00016075961886718556, "loss": 1.7862, "step": 6955 }, { "epoch": 0.679296875, "grad_norm": 0.16345049440860748, "learning_rate": 0.00016069836020203785, "loss": 1.7932, "step": 6956 }, { "epoch": 0.67939453125, "grad_norm": 0.1898726224899292, "learning_rate": 0.00016063711295461142, "loss": 1.8136, "step": 6957 }, { "epoch": 0.6794921875, "grad_norm": 0.16460607945919037, "learning_rate": 0.00016057587713102418, "loss": 1.8232, "step": 6958 }, { "epoch": 0.67958984375, "grad_norm": 0.21362203359603882, "learning_rate": 0.00016051465273739308, "loss": 1.8133, "step": 6959 }, { "epoch": 0.6796875, "grad_norm": 0.1831885725259781, "learning_rate": 0.00016045343977983396, "loss": 1.7508, "step": 6960 }, { "epoch": 0.67978515625, "grad_norm": 0.18768590688705444, "learning_rate": 0.00016039223826446136, "loss": 1.7558, "step": 6961 }, { "epoch": 0.6798828125, "grad_norm": 0.2103305459022522, "learning_rate": 0.00016033104819738886, "loss": 1.7416, "step": 6962 }, { "epoch": 0.67998046875, "grad_norm": 0.18648366630077362, "learning_rate": 0.00016026986958472876, "loss": 1.7796, "step": 6963 }, { "epoch": 0.680078125, "grad_norm": 0.19590221345424652, "learning_rate": 0.00016020870243259233, "loss": 1.8075, "step": 6964 }, { "epoch": 0.68017578125, "grad_norm": 0.1888592392206192, "learning_rate": 0.00016014754674708952, "loss": 1.7722, "step": 6965 }, { "epoch": 0.6802734375, "grad_norm": 0.1764562726020813, "learning_rate": 0.00016008640253432926, "loss": 1.8059, "step": 6966 }, { "epoch": 0.68037109375, "grad_norm": 0.19756117463111877, "learning_rate": 0.00016002526980041936, "loss": 1.7384, "step": 6967 }, { "epoch": 0.68046875, "grad_norm": 0.17833252251148224, "learning_rate": 0.00015996414855146645, "loss": 1.7642, "step": 6968 }, { "epoch": 0.68056640625, "grad_norm": 0.19569489359855652, "learning_rate": 0.00015990303879357594, "loss": 1.8023, "step": 6969 }, { "epoch": 0.6806640625, "grad_norm": 0.17910563945770264, "learning_rate": 0.0001598419405328522, "loss": 1.7703, "step": 6970 }, { "epoch": 0.68076171875, "grad_norm": 0.19319406151771545, "learning_rate": 0.00015978085377539846, "loss": 1.7665, "step": 6971 }, { "epoch": 0.680859375, "grad_norm": 0.18400554358959198, "learning_rate": 0.00015971977852731662, "loss": 1.7807, "step": 6972 }, { "epoch": 0.68095703125, "grad_norm": 0.18186430633068085, "learning_rate": 0.0001596587147947076, "loss": 1.7921, "step": 6973 }, { "epoch": 0.6810546875, "grad_norm": 0.17038005590438843, "learning_rate": 0.00015959766258367115, "loss": 1.7983, "step": 6974 }, { "epoch": 0.68115234375, "grad_norm": 0.1769455522298813, "learning_rate": 0.00015953662190030583, "loss": 1.7568, "step": 6975 }, { "epoch": 0.68125, "grad_norm": 0.17159441113471985, "learning_rate": 0.00015947559275070904, "loss": 1.7874, "step": 6976 }, { "epoch": 0.68134765625, "grad_norm": 0.21623560786247253, "learning_rate": 0.00015941457514097714, "loss": 1.7826, "step": 6977 }, { "epoch": 0.6814453125, "grad_norm": 0.1606811285018921, "learning_rate": 0.00015935356907720522, "loss": 1.7924, "step": 6978 }, { "epoch": 0.68154296875, "grad_norm": 0.17931677401065826, "learning_rate": 0.00015929257456548713, "loss": 1.8248, "step": 6979 }, { "epoch": 0.681640625, "grad_norm": 0.18598859012126923, "learning_rate": 0.0001592315916119158, "loss": 1.78, "step": 6980 }, { "epoch": 0.68173828125, "grad_norm": 0.1600993573665619, "learning_rate": 0.00015917062022258283, "loss": 1.791, "step": 6981 }, { "epoch": 0.6818359375, "grad_norm": 0.18051645159721375, "learning_rate": 0.00015910966040357874, "loss": 1.7979, "step": 6982 }, { "epoch": 0.68193359375, "grad_norm": 0.171726793050766, "learning_rate": 0.0001590487121609929, "loss": 1.7698, "step": 6983 }, { "epoch": 0.68203125, "grad_norm": 0.1995120793581009, "learning_rate": 0.0001589877755009135, "loss": 1.7896, "step": 6984 }, { "epoch": 0.68212890625, "grad_norm": 0.17130853235721588, "learning_rate": 0.00015892685042942755, "loss": 1.7214, "step": 6985 }, { "epoch": 0.6822265625, "grad_norm": 0.1644815355539322, "learning_rate": 0.0001588659369526209, "loss": 1.7954, "step": 6986 }, { "epoch": 0.68232421875, "grad_norm": 0.18016870319843292, "learning_rate": 0.00015880503507657834, "loss": 1.8235, "step": 6987 }, { "epoch": 0.682421875, "grad_norm": 0.17346541583538055, "learning_rate": 0.0001587441448073833, "loss": 1.8148, "step": 6988 }, { "epoch": 0.68251953125, "grad_norm": 0.18388555943965912, "learning_rate": 0.0001586832661511183, "loss": 1.7585, "step": 6989 }, { "epoch": 0.6826171875, "grad_norm": 0.15950731933116913, "learning_rate": 0.0001586223991138645, "loss": 1.7699, "step": 6990 }, { "epoch": 0.68271484375, "grad_norm": 0.1737283319234848, "learning_rate": 0.00015856154370170206, "loss": 1.824, "step": 6991 }, { "epoch": 0.6828125, "grad_norm": 0.1593412607908249, "learning_rate": 0.00015850069992070994, "loss": 1.7921, "step": 6992 }, { "epoch": 0.68291015625, "grad_norm": 0.18059980869293213, "learning_rate": 0.00015843986777696567, "loss": 1.7766, "step": 6993 }, { "epoch": 0.6830078125, "grad_norm": 0.1635483056306839, "learning_rate": 0.00015837904727654606, "loss": 1.7943, "step": 6994 }, { "epoch": 0.68310546875, "grad_norm": 0.16970890760421753, "learning_rate": 0.0001583182384255264, "loss": 1.7335, "step": 6995 }, { "epoch": 0.683203125, "grad_norm": 0.15687856078147888, "learning_rate": 0.00015825744122998105, "loss": 1.7729, "step": 6996 }, { "epoch": 0.68330078125, "grad_norm": 0.17292025685310364, "learning_rate": 0.000158196655695983, "loss": 1.8105, "step": 6997 }, { "epoch": 0.6833984375, "grad_norm": 0.16840864717960358, "learning_rate": 0.00015813588182960432, "loss": 1.7755, "step": 6998 }, { "epoch": 0.68349609375, "grad_norm": 0.18159016966819763, "learning_rate": 0.00015807511963691578, "loss": 1.8323, "step": 6999 }, { "epoch": 0.68359375, "grad_norm": 0.17530877888202667, "learning_rate": 0.0001580143691239869, "loss": 1.7844, "step": 7000 }, { "epoch": 0.68369140625, "grad_norm": 0.16628029942512512, "learning_rate": 0.0001579536302968861, "loss": 1.8202, "step": 7001 }, { "epoch": 0.6837890625, "grad_norm": 0.1802671104669571, "learning_rate": 0.0001578929031616807, "loss": 1.7986, "step": 7002 }, { "epoch": 0.68388671875, "grad_norm": 0.1900867372751236, "learning_rate": 0.0001578321877244368, "loss": 1.8371, "step": 7003 }, { "epoch": 0.683984375, "grad_norm": 0.18361061811447144, "learning_rate": 0.00015777148399121933, "loss": 1.791, "step": 7004 }, { "epoch": 0.68408203125, "grad_norm": 0.16304132342338562, "learning_rate": 0.00015771079196809208, "loss": 1.8344, "step": 7005 }, { "epoch": 0.6841796875, "grad_norm": 0.19150187075138092, "learning_rate": 0.0001576501116611177, "loss": 1.7739, "step": 7006 }, { "epoch": 0.68427734375, "grad_norm": 0.18854016065597534, "learning_rate": 0.00015758944307635742, "loss": 1.7764, "step": 7007 }, { "epoch": 0.684375, "grad_norm": 0.17195682227611542, "learning_rate": 0.00015752878621987166, "loss": 1.7829, "step": 7008 }, { "epoch": 0.68447265625, "grad_norm": 0.18562257289886475, "learning_rate": 0.00015746814109771944, "loss": 1.7989, "step": 7009 }, { "epoch": 0.6845703125, "grad_norm": 0.18442551791667938, "learning_rate": 0.0001574075077159587, "loss": 1.7817, "step": 7010 }, { "epoch": 0.68466796875, "grad_norm": 0.1659867763519287, "learning_rate": 0.0001573468860806461, "loss": 1.7948, "step": 7011 }, { "epoch": 0.684765625, "grad_norm": 0.17011453211307526, "learning_rate": 0.0001572862761978373, "loss": 1.8066, "step": 7012 }, { "epoch": 0.68486328125, "grad_norm": 0.1900080442428589, "learning_rate": 0.00015722567807358677, "loss": 1.7746, "step": 7013 }, { "epoch": 0.6849609375, "grad_norm": 0.17860539257526398, "learning_rate": 0.00015716509171394752, "loss": 1.7762, "step": 7014 }, { "epoch": 0.68505859375, "grad_norm": 0.17944754660129547, "learning_rate": 0.00015710451712497167, "loss": 1.7887, "step": 7015 }, { "epoch": 0.68515625, "grad_norm": 0.16661764681339264, "learning_rate": 0.00015704395431271012, "loss": 1.7602, "step": 7016 }, { "epoch": 0.68525390625, "grad_norm": 0.17134186625480652, "learning_rate": 0.0001569834032832125, "loss": 1.7794, "step": 7017 }, { "epoch": 0.6853515625, "grad_norm": 0.1614014208316803, "learning_rate": 0.00015692286404252737, "loss": 1.7211, "step": 7018 }, { "epoch": 0.68544921875, "grad_norm": 0.1677669882774353, "learning_rate": 0.00015686233659670208, "loss": 1.8292, "step": 7019 }, { "epoch": 0.685546875, "grad_norm": 0.21792346239089966, "learning_rate": 0.0001568018209517828, "loss": 1.785, "step": 7020 }, { "epoch": 0.68564453125, "grad_norm": 0.1642674058675766, "learning_rate": 0.00015674131711381446, "loss": 1.8068, "step": 7021 }, { "epoch": 0.6857421875, "grad_norm": 0.23985230922698975, "learning_rate": 0.0001566808250888408, "loss": 1.8086, "step": 7022 }, { "epoch": 0.68583984375, "grad_norm": 0.16522124409675598, "learning_rate": 0.0001566203448829045, "loss": 1.7566, "step": 7023 }, { "epoch": 0.6859375, "grad_norm": 0.21361692249774933, "learning_rate": 0.000156559876502047, "loss": 1.8032, "step": 7024 }, { "epoch": 0.68603515625, "grad_norm": 0.18104025721549988, "learning_rate": 0.0001564994199523086, "loss": 1.804, "step": 7025 }, { "epoch": 0.6861328125, "grad_norm": 0.22548973560333252, "learning_rate": 0.00015643897523972828, "loss": 1.8215, "step": 7026 }, { "epoch": 0.68623046875, "grad_norm": 0.17327019572257996, "learning_rate": 0.00015637854237034404, "loss": 1.8071, "step": 7027 }, { "epoch": 0.686328125, "grad_norm": 0.23282380402088165, "learning_rate": 0.0001563181213501925, "loss": 1.8057, "step": 7028 }, { "epoch": 0.68642578125, "grad_norm": 0.17707297205924988, "learning_rate": 0.00015625771218530919, "loss": 1.827, "step": 7029 }, { "epoch": 0.6865234375, "grad_norm": 0.2218504250049591, "learning_rate": 0.00015619731488172845, "loss": 1.7899, "step": 7030 }, { "epoch": 0.68662109375, "grad_norm": 0.1672588586807251, "learning_rate": 0.00015613692944548347, "loss": 1.7529, "step": 7031 }, { "epoch": 0.68671875, "grad_norm": 0.17623436450958252, "learning_rate": 0.00015607655588260622, "loss": 1.7842, "step": 7032 }, { "epoch": 0.68681640625, "grad_norm": 0.21378366649150848, "learning_rate": 0.00015601619419912743, "loss": 1.7788, "step": 7033 }, { "epoch": 0.6869140625, "grad_norm": 0.1768326610326767, "learning_rate": 0.00015595584440107686, "loss": 1.8052, "step": 7034 }, { "epoch": 0.68701171875, "grad_norm": 0.20340916514396667, "learning_rate": 0.0001558955064944827, "loss": 1.8394, "step": 7035 }, { "epoch": 0.687109375, "grad_norm": 0.17297755181789398, "learning_rate": 0.00015583518048537225, "loss": 1.7993, "step": 7036 }, { "epoch": 0.68720703125, "grad_norm": 0.21161890029907227, "learning_rate": 0.00015577486637977163, "loss": 1.8019, "step": 7037 }, { "epoch": 0.6873046875, "grad_norm": 0.15858536958694458, "learning_rate": 0.00015571456418370555, "loss": 1.797, "step": 7038 }, { "epoch": 0.68740234375, "grad_norm": 0.1983819603919983, "learning_rate": 0.00015565427390319775, "loss": 1.7731, "step": 7039 }, { "epoch": 0.6875, "grad_norm": 0.15905117988586426, "learning_rate": 0.0001555939955442707, "loss": 1.7715, "step": 7040 }, { "epoch": 0.68759765625, "grad_norm": 0.1989128589630127, "learning_rate": 0.0001555337291129457, "loss": 1.7871, "step": 7041 }, { "epoch": 0.6876953125, "grad_norm": 0.17942732572555542, "learning_rate": 0.00015547347461524274, "loss": 1.7925, "step": 7042 }, { "epoch": 0.68779296875, "grad_norm": 0.18206380307674408, "learning_rate": 0.0001554132320571808, "loss": 1.8274, "step": 7043 }, { "epoch": 0.687890625, "grad_norm": 0.19091618061065674, "learning_rate": 0.00015535300144477743, "loss": 1.7753, "step": 7044 }, { "epoch": 0.68798828125, "grad_norm": 0.15628927946090698, "learning_rate": 0.0001552927827840493, "loss": 1.821, "step": 7045 }, { "epoch": 0.6880859375, "grad_norm": 0.18300217390060425, "learning_rate": 0.00015523257608101166, "loss": 1.7936, "step": 7046 }, { "epoch": 0.68818359375, "grad_norm": 0.16574116051197052, "learning_rate": 0.0001551723813416786, "loss": 1.8038, "step": 7047 }, { "epoch": 0.68828125, "grad_norm": 0.18857359886169434, "learning_rate": 0.00015511219857206315, "loss": 1.79, "step": 7048 }, { "epoch": 0.68837890625, "grad_norm": 0.14915986359119415, "learning_rate": 0.0001550520277781769, "loss": 1.7769, "step": 7049 }, { "epoch": 0.6884765625, "grad_norm": 0.2018180787563324, "learning_rate": 0.0001549918689660304, "loss": 1.8446, "step": 7050 }, { "epoch": 0.68857421875, "grad_norm": 0.1732933074235916, "learning_rate": 0.00015493172214163303, "loss": 1.7896, "step": 7051 }, { "epoch": 0.688671875, "grad_norm": 0.1562858670949936, "learning_rate": 0.00015487158731099292, "loss": 1.7627, "step": 7052 }, { "epoch": 0.68876953125, "grad_norm": 0.20067474246025085, "learning_rate": 0.00015481146448011697, "loss": 1.7661, "step": 7053 }, { "epoch": 0.6888671875, "grad_norm": 0.16530682146549225, "learning_rate": 0.00015475135365501097, "loss": 1.788, "step": 7054 }, { "epoch": 0.68896484375, "grad_norm": 0.19360673427581787, "learning_rate": 0.0001546912548416795, "loss": 1.8198, "step": 7055 }, { "epoch": 0.6890625, "grad_norm": 0.17173407971858978, "learning_rate": 0.00015463116804612572, "loss": 1.7798, "step": 7056 }, { "epoch": 0.68916015625, "grad_norm": 0.17327551543712616, "learning_rate": 0.00015457109327435191, "loss": 1.8188, "step": 7057 }, { "epoch": 0.6892578125, "grad_norm": 0.17679515480995178, "learning_rate": 0.00015451103053235897, "loss": 1.8191, "step": 7058 }, { "epoch": 0.68935546875, "grad_norm": 0.1695164144039154, "learning_rate": 0.00015445097982614666, "loss": 1.7842, "step": 7059 }, { "epoch": 0.689453125, "grad_norm": 0.17532047629356384, "learning_rate": 0.00015439094116171355, "loss": 1.7803, "step": 7060 }, { "epoch": 0.68955078125, "grad_norm": 0.17038898169994354, "learning_rate": 0.0001543309145450568, "loss": 1.8086, "step": 7061 }, { "epoch": 0.6896484375, "grad_norm": 0.1681077778339386, "learning_rate": 0.0001542708999821728, "loss": 1.8086, "step": 7062 }, { "epoch": 0.68974609375, "grad_norm": 0.18737256526947021, "learning_rate": 0.00015421089747905624, "loss": 1.7863, "step": 7063 }, { "epoch": 0.68984375, "grad_norm": 0.1675270050764084, "learning_rate": 0.00015415090704170092, "loss": 1.8145, "step": 7064 }, { "epoch": 0.68994140625, "grad_norm": 0.17820687592029572, "learning_rate": 0.0001540909286760993, "loss": 1.7892, "step": 7065 }, { "epoch": 0.6900390625, "grad_norm": 0.16966290771961212, "learning_rate": 0.00015403096238824287, "loss": 1.7439, "step": 7066 }, { "epoch": 0.69013671875, "grad_norm": 0.17236657440662384, "learning_rate": 0.00015397100818412157, "loss": 1.7911, "step": 7067 }, { "epoch": 0.690234375, "grad_norm": 0.1600012332201004, "learning_rate": 0.00015391106606972433, "loss": 1.7943, "step": 7068 }, { "epoch": 0.69033203125, "grad_norm": 0.17807303369045258, "learning_rate": 0.00015385113605103889, "loss": 1.7727, "step": 7069 }, { "epoch": 0.6904296875, "grad_norm": 0.1634976863861084, "learning_rate": 0.0001537912181340516, "loss": 1.7891, "step": 7070 }, { "epoch": 0.69052734375, "grad_norm": 0.16638602316379547, "learning_rate": 0.00015373131232474785, "loss": 1.7959, "step": 7071 }, { "epoch": 0.690625, "grad_norm": 0.16791722178459167, "learning_rate": 0.00015367141862911155, "loss": 1.7681, "step": 7072 }, { "epoch": 0.69072265625, "grad_norm": 0.18002130091190338, "learning_rate": 0.0001536115370531258, "loss": 1.8083, "step": 7073 }, { "epoch": 0.6908203125, "grad_norm": 0.17238833010196686, "learning_rate": 0.000153551667602772, "loss": 1.8076, "step": 7074 }, { "epoch": 0.69091796875, "grad_norm": 0.16955898702144623, "learning_rate": 0.0001534918102840307, "loss": 1.8359, "step": 7075 }, { "epoch": 0.691015625, "grad_norm": 0.17133668065071106, "learning_rate": 0.00015343196510288115, "loss": 1.775, "step": 7076 }, { "epoch": 0.69111328125, "grad_norm": 0.1424286812543869, "learning_rate": 0.00015337213206530122, "loss": 1.8017, "step": 7077 }, { "epoch": 0.6912109375, "grad_norm": 0.16503962874412537, "learning_rate": 0.00015331231117726778, "loss": 1.8379, "step": 7078 }, { "epoch": 0.69130859375, "grad_norm": 0.17492873966693878, "learning_rate": 0.0001532525024447563, "loss": 1.8149, "step": 7079 }, { "epoch": 0.69140625, "grad_norm": 0.1776743233203888, "learning_rate": 0.0001531927058737414, "loss": 1.791, "step": 7080 }, { "epoch": 0.69150390625, "grad_norm": 0.167927548289299, "learning_rate": 0.00015313292147019604, "loss": 1.8002, "step": 7081 }, { "epoch": 0.6916015625, "grad_norm": 0.18689177930355072, "learning_rate": 0.0001530731492400921, "loss": 1.8125, "step": 7082 }, { "epoch": 0.69169921875, "grad_norm": 0.15624839067459106, "learning_rate": 0.0001530133891894005, "loss": 1.7524, "step": 7083 }, { "epoch": 0.691796875, "grad_norm": 0.2024654895067215, "learning_rate": 0.0001529536413240905, "loss": 1.8005, "step": 7084 }, { "epoch": 0.69189453125, "grad_norm": 0.18905296921730042, "learning_rate": 0.00015289390565013052, "loss": 1.8365, "step": 7085 }, { "epoch": 0.6919921875, "grad_norm": 0.15744717419147491, "learning_rate": 0.00015283418217348755, "loss": 1.7228, "step": 7086 }, { "epoch": 0.69208984375, "grad_norm": 0.19027529656887054, "learning_rate": 0.00015277447090012762, "loss": 1.8268, "step": 7087 }, { "epoch": 0.6921875, "grad_norm": 0.1656276136636734, "learning_rate": 0.00015271477183601516, "loss": 1.7744, "step": 7088 }, { "epoch": 0.69228515625, "grad_norm": 0.15903376042842865, "learning_rate": 0.00015265508498711361, "loss": 1.8102, "step": 7089 }, { "epoch": 0.6923828125, "grad_norm": 0.1556456983089447, "learning_rate": 0.00015259541035938522, "loss": 1.7855, "step": 7090 }, { "epoch": 0.69248046875, "grad_norm": 0.16047360002994537, "learning_rate": 0.00015253574795879102, "loss": 1.7942, "step": 7091 }, { "epoch": 0.692578125, "grad_norm": 0.16360905766487122, "learning_rate": 0.00015247609779129058, "loss": 1.8007, "step": 7092 }, { "epoch": 0.69267578125, "grad_norm": 0.15898703038692474, "learning_rate": 0.0001524164598628424, "loss": 1.8317, "step": 7093 }, { "epoch": 0.6927734375, "grad_norm": 0.19902659952640533, "learning_rate": 0.00015235683417940408, "loss": 1.7633, "step": 7094 }, { "epoch": 0.69287109375, "grad_norm": 0.17323528230190277, "learning_rate": 0.0001522972207469314, "loss": 1.7547, "step": 7095 }, { "epoch": 0.69296875, "grad_norm": 0.20322023332118988, "learning_rate": 0.00015223761957137934, "loss": 1.7973, "step": 7096 }, { "epoch": 0.69306640625, "grad_norm": 0.18776319921016693, "learning_rate": 0.0001521780306587015, "loss": 1.786, "step": 7097 }, { "epoch": 0.6931640625, "grad_norm": 0.20096303522586823, "learning_rate": 0.00015211845401485037, "loss": 1.7766, "step": 7098 }, { "epoch": 0.69326171875, "grad_norm": 0.217447429895401, "learning_rate": 0.00015205888964577694, "loss": 1.8052, "step": 7099 }, { "epoch": 0.693359375, "grad_norm": 0.1733197420835495, "learning_rate": 0.00015199933755743122, "loss": 1.7552, "step": 7100 }, { "epoch": 0.69345703125, "grad_norm": 0.20480690896511078, "learning_rate": 0.00015193979775576216, "loss": 1.8432, "step": 7101 }, { "epoch": 0.6935546875, "grad_norm": 0.21477128565311432, "learning_rate": 0.00015188027024671703, "loss": 1.8351, "step": 7102 }, { "epoch": 0.69365234375, "grad_norm": 0.19167658686637878, "learning_rate": 0.0001518207550362421, "loss": 1.7885, "step": 7103 }, { "epoch": 0.69375, "grad_norm": 0.22816379368305206, "learning_rate": 0.0001517612521302825, "loss": 1.7985, "step": 7104 }, { "epoch": 0.69384765625, "grad_norm": 0.19011880457401276, "learning_rate": 0.0001517017615347821, "loss": 1.7625, "step": 7105 }, { "epoch": 0.6939453125, "grad_norm": 0.18593943119049072, "learning_rate": 0.00015164228325568324, "loss": 1.8285, "step": 7106 }, { "epoch": 0.69404296875, "grad_norm": 0.17960593104362488, "learning_rate": 0.0001515828172989275, "loss": 1.8407, "step": 7107 }, { "epoch": 0.694140625, "grad_norm": 0.1839790940284729, "learning_rate": 0.00015152336367045506, "loss": 1.7834, "step": 7108 }, { "epoch": 0.69423828125, "grad_norm": 0.18928858637809753, "learning_rate": 0.00015146392237620453, "loss": 1.8345, "step": 7109 }, { "epoch": 0.6943359375, "grad_norm": 0.18579338490962982, "learning_rate": 0.00015140449342211383, "loss": 1.8304, "step": 7110 }, { "epoch": 0.69443359375, "grad_norm": 0.18886177241802216, "learning_rate": 0.00015134507681411916, "loss": 1.7446, "step": 7111 }, { "epoch": 0.69453125, "grad_norm": 0.1665489375591278, "learning_rate": 0.00015128567255815595, "loss": 1.7684, "step": 7112 }, { "epoch": 0.69462890625, "grad_norm": 0.1724035143852234, "learning_rate": 0.00015122628066015792, "loss": 1.7713, "step": 7113 }, { "epoch": 0.6947265625, "grad_norm": 0.1609368771314621, "learning_rate": 0.00015116690112605796, "loss": 1.7791, "step": 7114 }, { "epoch": 0.69482421875, "grad_norm": 0.19012293219566345, "learning_rate": 0.00015110753396178762, "loss": 1.736, "step": 7115 }, { "epoch": 0.694921875, "grad_norm": 0.18231430649757385, "learning_rate": 0.00015104817917327694, "loss": 1.8035, "step": 7116 }, { "epoch": 0.69501953125, "grad_norm": 0.19036336243152618, "learning_rate": 0.00015098883676645507, "loss": 1.7983, "step": 7117 }, { "epoch": 0.6951171875, "grad_norm": 0.17612749338150024, "learning_rate": 0.00015092950674724974, "loss": 1.7821, "step": 7118 }, { "epoch": 0.69521484375, "grad_norm": 0.16669610142707825, "learning_rate": 0.00015087018912158764, "loss": 1.7587, "step": 7119 }, { "epoch": 0.6953125, "grad_norm": 0.1986566036939621, "learning_rate": 0.0001508108838953938, "loss": 1.7929, "step": 7120 }, { "epoch": 0.69541015625, "grad_norm": 0.18402375280857086, "learning_rate": 0.00015075159107459248, "loss": 1.7679, "step": 7121 }, { "epoch": 0.6955078125, "grad_norm": 0.16872386634349823, "learning_rate": 0.00015069231066510655, "loss": 1.805, "step": 7122 }, { "epoch": 0.69560546875, "grad_norm": 0.20712271332740784, "learning_rate": 0.0001506330426728575, "loss": 1.7851, "step": 7123 }, { "epoch": 0.695703125, "grad_norm": 0.16342519223690033, "learning_rate": 0.0001505737871037656, "loss": 1.7644, "step": 7124 }, { "epoch": 0.69580078125, "grad_norm": 0.20487897098064423, "learning_rate": 0.0001505145439637501, "loss": 1.7991, "step": 7125 }, { "epoch": 0.6958984375, "grad_norm": 0.17124104499816895, "learning_rate": 0.0001504553132587289, "loss": 1.8306, "step": 7126 }, { "epoch": 0.69599609375, "grad_norm": 0.1997075080871582, "learning_rate": 0.0001503960949946184, "loss": 1.8521, "step": 7127 }, { "epoch": 0.69609375, "grad_norm": 0.17954126000404358, "learning_rate": 0.00015033688917733418, "loss": 1.7561, "step": 7128 }, { "epoch": 0.69619140625, "grad_norm": 0.16179515421390533, "learning_rate": 0.00015027769581279037, "loss": 1.7319, "step": 7129 }, { "epoch": 0.6962890625, "grad_norm": 0.170282244682312, "learning_rate": 0.00015021851490689976, "loss": 1.7524, "step": 7130 }, { "epoch": 0.69638671875, "grad_norm": 0.1912214756011963, "learning_rate": 0.00015015934646557403, "loss": 1.7828, "step": 7131 }, { "epoch": 0.696484375, "grad_norm": 0.1569763422012329, "learning_rate": 0.0001501001904947236, "loss": 1.7842, "step": 7132 }, { "epoch": 0.69658203125, "grad_norm": 0.24038471281528473, "learning_rate": 0.00015004104700025773, "loss": 1.8057, "step": 7133 }, { "epoch": 0.6966796875, "grad_norm": 0.1662021279335022, "learning_rate": 0.00014998191598808403, "loss": 1.8346, "step": 7134 }, { "epoch": 0.69677734375, "grad_norm": 0.19347864389419556, "learning_rate": 0.00014992279746410948, "loss": 1.8026, "step": 7135 }, { "epoch": 0.696875, "grad_norm": 0.18123270571231842, "learning_rate": 0.00014986369143423942, "loss": 1.804, "step": 7136 }, { "epoch": 0.69697265625, "grad_norm": 0.16862811148166656, "learning_rate": 0.0001498045979043779, "loss": 1.7759, "step": 7137 }, { "epoch": 0.6970703125, "grad_norm": 0.20176711678504944, "learning_rate": 0.00014974551688042798, "loss": 1.7613, "step": 7138 }, { "epoch": 0.69716796875, "grad_norm": 0.18291343748569489, "learning_rate": 0.0001496864483682912, "loss": 1.7923, "step": 7139 }, { "epoch": 0.697265625, "grad_norm": 0.1944350302219391, "learning_rate": 0.00014962739237386815, "loss": 1.8184, "step": 7140 }, { "epoch": 0.69736328125, "grad_norm": 0.1939167082309723, "learning_rate": 0.00014956834890305777, "loss": 1.8304, "step": 7141 }, { "epoch": 0.6974609375, "grad_norm": 0.19947269558906555, "learning_rate": 0.00014950931796175814, "loss": 1.8101, "step": 7142 }, { "epoch": 0.69755859375, "grad_norm": 0.18717512488365173, "learning_rate": 0.000149450299555866, "loss": 1.8247, "step": 7143 }, { "epoch": 0.69765625, "grad_norm": 0.1895373910665512, "learning_rate": 0.00014939129369127661, "loss": 1.7812, "step": 7144 }, { "epoch": 0.69775390625, "grad_norm": 0.15928807854652405, "learning_rate": 0.00014933230037388417, "loss": 1.7778, "step": 7145 }, { "epoch": 0.6978515625, "grad_norm": 0.1846880316734314, "learning_rate": 0.00014927331960958156, "loss": 1.7851, "step": 7146 }, { "epoch": 0.69794921875, "grad_norm": 0.18106119334697723, "learning_rate": 0.00014921435140426064, "loss": 1.796, "step": 7147 }, { "epoch": 0.698046875, "grad_norm": 0.153510183095932, "learning_rate": 0.00014915539576381147, "loss": 1.7811, "step": 7148 }, { "epoch": 0.69814453125, "grad_norm": 0.16683362424373627, "learning_rate": 0.0001490964526941234, "loss": 1.7634, "step": 7149 }, { "epoch": 0.6982421875, "grad_norm": 0.17028094828128815, "learning_rate": 0.0001490375222010845, "loss": 1.8077, "step": 7150 }, { "epoch": 0.69833984375, "grad_norm": 0.1741085648536682, "learning_rate": 0.00014897860429058103, "loss": 1.7934, "step": 7151 }, { "epoch": 0.6984375, "grad_norm": 0.19794243574142456, "learning_rate": 0.00014891969896849855, "loss": 1.8047, "step": 7152 }, { "epoch": 0.69853515625, "grad_norm": 0.1514754295349121, "learning_rate": 0.00014886080624072122, "loss": 1.7536, "step": 7153 }, { "epoch": 0.6986328125, "grad_norm": 0.18636241555213928, "learning_rate": 0.0001488019261131319, "loss": 1.7531, "step": 7154 }, { "epoch": 0.69873046875, "grad_norm": 0.1704765409231186, "learning_rate": 0.00014874305859161199, "loss": 1.8125, "step": 7155 }, { "epoch": 0.698828125, "grad_norm": 0.16350746154785156, "learning_rate": 0.00014868420368204205, "loss": 1.7859, "step": 7156 }, { "epoch": 0.69892578125, "grad_norm": 0.15320028364658356, "learning_rate": 0.00014862536139030122, "loss": 1.7951, "step": 7157 }, { "epoch": 0.6990234375, "grad_norm": 0.1737348437309265, "learning_rate": 0.00014856653172226711, "loss": 1.8255, "step": 7158 }, { "epoch": 0.69912109375, "grad_norm": 0.15953727066516876, "learning_rate": 0.00014850771468381642, "loss": 1.7923, "step": 7159 }, { "epoch": 0.69921875, "grad_norm": 0.17768947780132294, "learning_rate": 0.00014844891028082435, "loss": 1.8055, "step": 7160 }, { "epoch": 0.69931640625, "grad_norm": 0.17410802841186523, "learning_rate": 0.0001483901185191651, "loss": 1.7673, "step": 7161 }, { "epoch": 0.6994140625, "grad_norm": 0.17361333966255188, "learning_rate": 0.00014833133940471123, "loss": 1.7987, "step": 7162 }, { "epoch": 0.69951171875, "grad_norm": 0.1585068255662918, "learning_rate": 0.0001482725729433344, "loss": 1.8194, "step": 7163 }, { "epoch": 0.699609375, "grad_norm": 0.19176602363586426, "learning_rate": 0.00014821381914090494, "loss": 1.7756, "step": 7164 }, { "epoch": 0.69970703125, "grad_norm": 0.16472671926021576, "learning_rate": 0.00014815507800329164, "loss": 1.8002, "step": 7165 }, { "epoch": 0.6998046875, "grad_norm": 0.21169336140155792, "learning_rate": 0.0001480963495363623, "loss": 1.8058, "step": 7166 }, { "epoch": 0.69990234375, "grad_norm": 0.1583397388458252, "learning_rate": 0.00014803763374598332, "loss": 1.7802, "step": 7167 }, { "epoch": 0.7, "grad_norm": 0.20311272144317627, "learning_rate": 0.00014797893063802012, "loss": 1.8024, "step": 7168 }, { "epoch": 0.70009765625, "grad_norm": 0.19417189061641693, "learning_rate": 0.00014792024021833622, "loss": 1.7974, "step": 7169 }, { "epoch": 0.7001953125, "grad_norm": 0.1849735975265503, "learning_rate": 0.00014786156249279458, "loss": 1.7833, "step": 7170 }, { "epoch": 0.70029296875, "grad_norm": 0.1992449313402176, "learning_rate": 0.00014780289746725662, "loss": 1.8471, "step": 7171 }, { "epoch": 0.700390625, "grad_norm": 0.19165728986263275, "learning_rate": 0.00014774424514758225, "loss": 1.7878, "step": 7172 }, { "epoch": 0.70048828125, "grad_norm": 0.20915056765079498, "learning_rate": 0.00014768560553963037, "loss": 1.8021, "step": 7173 }, { "epoch": 0.7005859375, "grad_norm": 0.1690331995487213, "learning_rate": 0.00014762697864925862, "loss": 1.8099, "step": 7174 }, { "epoch": 0.70068359375, "grad_norm": 0.18815776705741882, "learning_rate": 0.00014756836448232336, "loss": 1.7787, "step": 7175 }, { "epoch": 0.70078125, "grad_norm": 0.17468397319316864, "learning_rate": 0.00014750976304467938, "loss": 1.7864, "step": 7176 }, { "epoch": 0.70087890625, "grad_norm": 0.19020915031433105, "learning_rate": 0.0001474511743421807, "loss": 1.7749, "step": 7177 }, { "epoch": 0.7009765625, "grad_norm": 0.17879393696784973, "learning_rate": 0.00014739259838067976, "loss": 1.8022, "step": 7178 }, { "epoch": 0.70107421875, "grad_norm": 0.19218885898590088, "learning_rate": 0.00014733403516602766, "loss": 1.7755, "step": 7179 }, { "epoch": 0.701171875, "grad_norm": 0.1966327279806137, "learning_rate": 0.00014727548470407447, "loss": 1.7721, "step": 7180 }, { "epoch": 0.70126953125, "grad_norm": 0.18549439311027527, "learning_rate": 0.00014721694700066884, "loss": 1.7463, "step": 7181 }, { "epoch": 0.7013671875, "grad_norm": 0.20020927488803864, "learning_rate": 0.00014715842206165818, "loss": 1.8411, "step": 7182 }, { "epoch": 0.70146484375, "grad_norm": 0.17020417749881744, "learning_rate": 0.00014709990989288847, "loss": 1.7995, "step": 7183 }, { "epoch": 0.7015625, "grad_norm": 0.1723005175590515, "learning_rate": 0.0001470414105002047, "loss": 1.7776, "step": 7184 }, { "epoch": 0.70166015625, "grad_norm": 0.17421451210975647, "learning_rate": 0.00014698292388945052, "loss": 1.8214, "step": 7185 }, { "epoch": 0.7017578125, "grad_norm": 0.1879092901945114, "learning_rate": 0.00014692445006646805, "loss": 1.7867, "step": 7186 }, { "epoch": 0.70185546875, "grad_norm": 0.171262726187706, "learning_rate": 0.0001468659890370983, "loss": 1.7658, "step": 7187 }, { "epoch": 0.701953125, "grad_norm": 0.1881995052099228, "learning_rate": 0.00014680754080718117, "loss": 1.7649, "step": 7188 }, { "epoch": 0.70205078125, "grad_norm": 0.17118552327156067, "learning_rate": 0.00014674910538255503, "loss": 1.7804, "step": 7189 }, { "epoch": 0.7021484375, "grad_norm": 0.17448610067367554, "learning_rate": 0.00014669068276905694, "loss": 1.7791, "step": 7190 }, { "epoch": 0.70224609375, "grad_norm": 0.1816215068101883, "learning_rate": 0.000146632272972523, "loss": 1.7777, "step": 7191 }, { "epoch": 0.70234375, "grad_norm": 0.1773415207862854, "learning_rate": 0.00014657387599878785, "loss": 1.7795, "step": 7192 }, { "epoch": 0.70244140625, "grad_norm": 0.19790494441986084, "learning_rate": 0.00014651549185368463, "loss": 1.785, "step": 7193 }, { "epoch": 0.7025390625, "grad_norm": 0.16788199543952942, "learning_rate": 0.00014645712054304553, "loss": 1.7861, "step": 7194 }, { "epoch": 0.70263671875, "grad_norm": 0.20210793614387512, "learning_rate": 0.00014639876207270128, "loss": 1.797, "step": 7195 }, { "epoch": 0.702734375, "grad_norm": 0.1523965448141098, "learning_rate": 0.00014634041644848148, "loss": 1.7369, "step": 7196 }, { "epoch": 0.70283203125, "grad_norm": 0.20466235280036926, "learning_rate": 0.0001462820836762141, "loss": 1.7736, "step": 7197 }, { "epoch": 0.7029296875, "grad_norm": 0.14303214848041534, "learning_rate": 0.0001462237637617263, "loss": 1.7611, "step": 7198 }, { "epoch": 0.70302734375, "grad_norm": 0.20729565620422363, "learning_rate": 0.00014616545671084375, "loss": 1.7635, "step": 7199 }, { "epoch": 0.703125, "grad_norm": 0.16768594086170197, "learning_rate": 0.00014610716252939058, "loss": 1.7414, "step": 7200 }, { "epoch": 0.70322265625, "grad_norm": 0.18160150945186615, "learning_rate": 0.00014604888122318998, "loss": 1.8077, "step": 7201 }, { "epoch": 0.7033203125, "grad_norm": 0.1779743731021881, "learning_rate": 0.0001459906127980638, "loss": 1.7958, "step": 7202 }, { "epoch": 0.70341796875, "grad_norm": 0.18949095904827118, "learning_rate": 0.00014593235725983251, "loss": 1.7696, "step": 7203 }, { "epoch": 0.703515625, "grad_norm": 0.1756759136915207, "learning_rate": 0.00014587411461431516, "loss": 1.7862, "step": 7204 }, { "epoch": 0.70361328125, "grad_norm": 0.22456172108650208, "learning_rate": 0.00014581588486732984, "loss": 1.7881, "step": 7205 }, { "epoch": 0.7037109375, "grad_norm": 0.19392071664333344, "learning_rate": 0.00014575766802469325, "loss": 1.8263, "step": 7206 }, { "epoch": 0.70380859375, "grad_norm": 0.2179490029811859, "learning_rate": 0.00014569946409222063, "loss": 1.8221, "step": 7207 }, { "epoch": 0.70390625, "grad_norm": 0.17002339661121368, "learning_rate": 0.000145641273075726, "loss": 1.7289, "step": 7208 }, { "epoch": 0.70400390625, "grad_norm": 0.21364550292491913, "learning_rate": 0.00014558309498102212, "loss": 1.7722, "step": 7209 }, { "epoch": 0.7041015625, "grad_norm": 0.19657489657402039, "learning_rate": 0.0001455249298139207, "loss": 1.8033, "step": 7210 }, { "epoch": 0.70419921875, "grad_norm": 0.20219051837921143, "learning_rate": 0.00014546677758023153, "loss": 1.7945, "step": 7211 }, { "epoch": 0.704296875, "grad_norm": 0.16790801286697388, "learning_rate": 0.0001454086382857638, "loss": 1.7981, "step": 7212 }, { "epoch": 0.70439453125, "grad_norm": 0.18352185189723969, "learning_rate": 0.00014535051193632513, "loss": 1.7615, "step": 7213 }, { "epoch": 0.7044921875, "grad_norm": 0.17234638333320618, "learning_rate": 0.00014529239853772161, "loss": 1.7728, "step": 7214 }, { "epoch": 0.70458984375, "grad_norm": 0.19208668172359467, "learning_rate": 0.0001452342980957584, "loss": 1.7969, "step": 7215 }, { "epoch": 0.7046875, "grad_norm": 0.1606440544128418, "learning_rate": 0.00014517621061623916, "loss": 1.7964, "step": 7216 }, { "epoch": 0.70478515625, "grad_norm": 0.18251542747020721, "learning_rate": 0.00014511813610496645, "loss": 1.7912, "step": 7217 }, { "epoch": 0.7048828125, "grad_norm": 0.1528090089559555, "learning_rate": 0.00014506007456774114, "loss": 1.7984, "step": 7218 }, { "epoch": 0.70498046875, "grad_norm": 0.19245217740535736, "learning_rate": 0.0001450020260103633, "loss": 1.7766, "step": 7219 }, { "epoch": 0.705078125, "grad_norm": 0.16308985650539398, "learning_rate": 0.00014494399043863147, "loss": 1.7904, "step": 7220 }, { "epoch": 0.70517578125, "grad_norm": 0.1835041493177414, "learning_rate": 0.00014488596785834268, "loss": 1.7483, "step": 7221 }, { "epoch": 0.7052734375, "grad_norm": 0.16805297136306763, "learning_rate": 0.00014482795827529303, "loss": 1.7944, "step": 7222 }, { "epoch": 0.70537109375, "grad_norm": 0.16445210576057434, "learning_rate": 0.00014476996169527712, "loss": 1.7721, "step": 7223 }, { "epoch": 0.70546875, "grad_norm": 0.16213344037532806, "learning_rate": 0.00014471197812408827, "loss": 1.7378, "step": 7224 }, { "epoch": 0.70556640625, "grad_norm": 0.1840299367904663, "learning_rate": 0.00014465400756751858, "loss": 1.7838, "step": 7225 }, { "epoch": 0.7056640625, "grad_norm": 0.18428277969360352, "learning_rate": 0.00014459605003135879, "loss": 1.7709, "step": 7226 }, { "epoch": 0.70576171875, "grad_norm": 0.16466780006885529, "learning_rate": 0.0001445381055213984, "loss": 1.7288, "step": 7227 }, { "epoch": 0.705859375, "grad_norm": 0.16679660975933075, "learning_rate": 0.0001444801740434254, "loss": 1.7903, "step": 7228 }, { "epoch": 0.70595703125, "grad_norm": 0.16120462119579315, "learning_rate": 0.00014442225560322672, "loss": 1.7553, "step": 7229 }, { "epoch": 0.7060546875, "grad_norm": 0.16525016725063324, "learning_rate": 0.00014436435020658784, "loss": 1.7864, "step": 7230 }, { "epoch": 0.70615234375, "grad_norm": 0.16459806263446808, "learning_rate": 0.0001443064578592931, "loss": 1.7616, "step": 7231 }, { "epoch": 0.70625, "grad_norm": 0.16287195682525635, "learning_rate": 0.00014424857856712537, "loss": 1.7692, "step": 7232 }, { "epoch": 0.70634765625, "grad_norm": 0.16694651544094086, "learning_rate": 0.00014419071233586628, "loss": 1.7717, "step": 7233 }, { "epoch": 0.7064453125, "grad_norm": 0.18044765293598175, "learning_rate": 0.00014413285917129622, "loss": 1.7781, "step": 7234 }, { "epoch": 0.70654296875, "grad_norm": 0.18464335799217224, "learning_rate": 0.00014407501907919414, "loss": 1.7863, "step": 7235 }, { "epoch": 0.706640625, "grad_norm": 0.20815010368824005, "learning_rate": 0.0001440171920653377, "loss": 1.7246, "step": 7236 }, { "epoch": 0.70673828125, "grad_norm": 0.18675754964351654, "learning_rate": 0.00014395937813550336, "loss": 1.8526, "step": 7237 }, { "epoch": 0.7068359375, "grad_norm": 0.1761256456375122, "learning_rate": 0.00014390157729546628, "loss": 1.7595, "step": 7238 }, { "epoch": 0.70693359375, "grad_norm": 0.20941513776779175, "learning_rate": 0.00014384378955100015, "loss": 1.7813, "step": 7239 }, { "epoch": 0.70703125, "grad_norm": 0.1767488270998001, "learning_rate": 0.00014378601490787755, "loss": 1.7979, "step": 7240 }, { "epoch": 0.70712890625, "grad_norm": 0.1953439712524414, "learning_rate": 0.00014372825337186969, "loss": 1.7863, "step": 7241 }, { "epoch": 0.7072265625, "grad_norm": 0.17770998179912567, "learning_rate": 0.00014367050494874624, "loss": 1.8131, "step": 7242 }, { "epoch": 0.70732421875, "grad_norm": 0.18517056107521057, "learning_rate": 0.00014361276964427588, "loss": 1.7511, "step": 7243 }, { "epoch": 0.707421875, "grad_norm": 0.17781153321266174, "learning_rate": 0.00014355504746422584, "loss": 1.7422, "step": 7244 }, { "epoch": 0.70751953125, "grad_norm": 0.17838197946548462, "learning_rate": 0.00014349733841436207, "loss": 1.7801, "step": 7245 }, { "epoch": 0.7076171875, "grad_norm": 0.166599839925766, "learning_rate": 0.00014343964250044923, "loss": 1.7521, "step": 7246 }, { "epoch": 0.70771484375, "grad_norm": 0.1974172592163086, "learning_rate": 0.00014338195972825053, "loss": 1.7599, "step": 7247 }, { "epoch": 0.7078125, "grad_norm": 0.17108438909053802, "learning_rate": 0.0001433242901035281, "loss": 1.7819, "step": 7248 }, { "epoch": 0.70791015625, "grad_norm": 0.18505041301250458, "learning_rate": 0.00014326663363204247, "loss": 1.791, "step": 7249 }, { "epoch": 0.7080078125, "grad_norm": 0.18089908361434937, "learning_rate": 0.0001432089903195531, "loss": 1.8079, "step": 7250 }, { "epoch": 0.70810546875, "grad_norm": 0.1572682410478592, "learning_rate": 0.00014315136017181804, "loss": 1.7804, "step": 7251 }, { "epoch": 0.708203125, "grad_norm": 0.1698255091905594, "learning_rate": 0.000143093743194594, "loss": 1.7596, "step": 7252 }, { "epoch": 0.70830078125, "grad_norm": 0.19964756071567535, "learning_rate": 0.00014303613939363648, "loss": 1.8064, "step": 7253 }, { "epoch": 0.7083984375, "grad_norm": 0.16591346263885498, "learning_rate": 0.00014297854877469952, "loss": 1.7921, "step": 7254 }, { "epoch": 0.70849609375, "grad_norm": 0.20315392315387726, "learning_rate": 0.00014292097134353595, "loss": 1.741, "step": 7255 }, { "epoch": 0.70859375, "grad_norm": 0.16226544976234436, "learning_rate": 0.00014286340710589724, "loss": 1.781, "step": 7256 }, { "epoch": 0.70869140625, "grad_norm": 0.18003511428833008, "learning_rate": 0.00014280585606753346, "loss": 1.7738, "step": 7257 }, { "epoch": 0.7087890625, "grad_norm": 0.15134800970554352, "learning_rate": 0.0001427483182341936, "loss": 1.8052, "step": 7258 }, { "epoch": 0.70888671875, "grad_norm": 0.20357757806777954, "learning_rate": 0.00014269079361162504, "loss": 1.7867, "step": 7259 }, { "epoch": 0.708984375, "grad_norm": 0.2080952525138855, "learning_rate": 0.00014263328220557407, "loss": 1.7852, "step": 7260 }, { "epoch": 0.70908203125, "grad_norm": 0.16084307432174683, "learning_rate": 0.00014257578402178557, "loss": 1.8095, "step": 7261 }, { "epoch": 0.7091796875, "grad_norm": 0.17504534125328064, "learning_rate": 0.0001425182990660031, "loss": 1.7816, "step": 7262 }, { "epoch": 0.70927734375, "grad_norm": 0.16018010675907135, "learning_rate": 0.00014246082734396882, "loss": 1.7881, "step": 7263 }, { "epoch": 0.709375, "grad_norm": 0.1809011995792389, "learning_rate": 0.00014240336886142368, "loss": 1.742, "step": 7264 }, { "epoch": 0.70947265625, "grad_norm": 0.16129007935523987, "learning_rate": 0.00014234592362410728, "loss": 1.7973, "step": 7265 }, { "epoch": 0.7095703125, "grad_norm": 0.16717775166034698, "learning_rate": 0.00014228849163775795, "loss": 1.7835, "step": 7266 }, { "epoch": 0.70966796875, "grad_norm": 0.15309709310531616, "learning_rate": 0.00014223107290811256, "loss": 1.789, "step": 7267 }, { "epoch": 0.709765625, "grad_norm": 0.18320903182029724, "learning_rate": 0.00014217366744090676, "loss": 1.7863, "step": 7268 }, { "epoch": 0.70986328125, "grad_norm": 0.16483837366104126, "learning_rate": 0.0001421162752418749, "loss": 1.7899, "step": 7269 }, { "epoch": 0.7099609375, "grad_norm": 0.17426174879074097, "learning_rate": 0.00014205889631674985, "loss": 1.8238, "step": 7270 }, { "epoch": 0.71005859375, "grad_norm": 0.17237837612628937, "learning_rate": 0.0001420015306712633, "loss": 1.7886, "step": 7271 }, { "epoch": 0.71015625, "grad_norm": 0.18018676340579987, "learning_rate": 0.0001419441783111456, "loss": 1.7746, "step": 7272 }, { "epoch": 0.71025390625, "grad_norm": 0.17349764704704285, "learning_rate": 0.00014188683924212567, "loss": 1.7844, "step": 7273 }, { "epoch": 0.7103515625, "grad_norm": 0.18277320265769958, "learning_rate": 0.00014182951346993126, "loss": 1.7881, "step": 7274 }, { "epoch": 0.71044921875, "grad_norm": 0.15737277269363403, "learning_rate": 0.00014177220100028868, "loss": 1.79, "step": 7275 }, { "epoch": 0.710546875, "grad_norm": 0.1631561666727066, "learning_rate": 0.00014171490183892296, "loss": 1.8216, "step": 7276 }, { "epoch": 0.71064453125, "grad_norm": 0.19738301634788513, "learning_rate": 0.0001416576159915577, "loss": 1.8053, "step": 7277 }, { "epoch": 0.7107421875, "grad_norm": 0.16368119418621063, "learning_rate": 0.0001416003434639153, "loss": 1.766, "step": 7278 }, { "epoch": 0.71083984375, "grad_norm": 0.1761307418346405, "learning_rate": 0.00014154308426171674, "loss": 1.7708, "step": 7279 }, { "epoch": 0.7109375, "grad_norm": 0.16696862876415253, "learning_rate": 0.0001414858383906818, "loss": 1.8212, "step": 7280 }, { "epoch": 0.71103515625, "grad_norm": 0.1651100367307663, "learning_rate": 0.0001414286058565287, "loss": 1.7991, "step": 7281 }, { "epoch": 0.7111328125, "grad_norm": 0.1816924810409546, "learning_rate": 0.0001413713866649746, "loss": 1.7866, "step": 7282 }, { "epoch": 0.71123046875, "grad_norm": 0.1608307808637619, "learning_rate": 0.00014131418082173519, "loss": 1.7945, "step": 7283 }, { "epoch": 0.711328125, "grad_norm": 0.17417645454406738, "learning_rate": 0.0001412569883325247, "loss": 1.8326, "step": 7284 }, { "epoch": 0.71142578125, "grad_norm": 0.20868746936321259, "learning_rate": 0.0001411998092030562, "loss": 1.7062, "step": 7285 }, { "epoch": 0.7115234375, "grad_norm": 0.19739334285259247, "learning_rate": 0.0001411426434390414, "loss": 1.8272, "step": 7286 }, { "epoch": 0.71162109375, "grad_norm": 0.15947982668876648, "learning_rate": 0.00014108549104619063, "loss": 1.7893, "step": 7287 }, { "epoch": 0.71171875, "grad_norm": 0.20264971256256104, "learning_rate": 0.00014102835203021297, "loss": 1.7432, "step": 7288 }, { "epoch": 0.71181640625, "grad_norm": 0.19442932307720184, "learning_rate": 0.00014097122639681602, "loss": 1.8233, "step": 7289 }, { "epoch": 0.7119140625, "grad_norm": 0.17889422178268433, "learning_rate": 0.00014091411415170628, "loss": 1.7704, "step": 7290 }, { "epoch": 0.71201171875, "grad_norm": 0.2012113332748413, "learning_rate": 0.00014085701530058855, "loss": 1.8274, "step": 7291 }, { "epoch": 0.712109375, "grad_norm": 0.17935416102409363, "learning_rate": 0.0001407999298491666, "loss": 1.8133, "step": 7292 }, { "epoch": 0.71220703125, "grad_norm": 0.18949976563453674, "learning_rate": 0.00014074285780314271, "loss": 1.7632, "step": 7293 }, { "epoch": 0.7123046875, "grad_norm": 0.1800801306962967, "learning_rate": 0.00014068579916821794, "loss": 1.8052, "step": 7294 }, { "epoch": 0.71240234375, "grad_norm": 0.17506444454193115, "learning_rate": 0.00014062875395009196, "loss": 1.7627, "step": 7295 }, { "epoch": 0.7125, "grad_norm": 0.18391722440719604, "learning_rate": 0.00014057172215446297, "loss": 1.7747, "step": 7296 }, { "epoch": 0.71259765625, "grad_norm": 0.20308047533035278, "learning_rate": 0.00014051470378702814, "loss": 1.7622, "step": 7297 }, { "epoch": 0.7126953125, "grad_norm": 0.21284976601600647, "learning_rate": 0.00014045769885348287, "loss": 1.7975, "step": 7298 }, { "epoch": 0.71279296875, "grad_norm": 0.20721568167209625, "learning_rate": 0.00014040070735952154, "loss": 1.7703, "step": 7299 }, { "epoch": 0.712890625, "grad_norm": 0.19586989283561707, "learning_rate": 0.0001403437293108371, "loss": 1.7923, "step": 7300 }, { "epoch": 0.71298828125, "grad_norm": 0.17941758036613464, "learning_rate": 0.0001402867647131212, "loss": 1.7804, "step": 7301 }, { "epoch": 0.7130859375, "grad_norm": 0.23242142796516418, "learning_rate": 0.00014022981357206398, "loss": 1.8029, "step": 7302 }, { "epoch": 0.71318359375, "grad_norm": 0.16339819133281708, "learning_rate": 0.00014017287589335447, "loss": 1.7708, "step": 7303 }, { "epoch": 0.71328125, "grad_norm": 0.22891005873680115, "learning_rate": 0.00014011595168268026, "loss": 1.7953, "step": 7304 }, { "epoch": 0.71337890625, "grad_norm": 0.19407442212104797, "learning_rate": 0.00014005904094572746, "loss": 1.8003, "step": 7305 }, { "epoch": 0.7134765625, "grad_norm": 0.20546594262123108, "learning_rate": 0.000140002143688181, "loss": 1.802, "step": 7306 }, { "epoch": 0.71357421875, "grad_norm": 0.19747906923294067, "learning_rate": 0.00013994525991572438, "loss": 1.7951, "step": 7307 }, { "epoch": 0.713671875, "grad_norm": 0.1798185110092163, "learning_rate": 0.0001398883896340399, "loss": 1.7616, "step": 7308 }, { "epoch": 0.71376953125, "grad_norm": 0.1720678061246872, "learning_rate": 0.0001398315328488082, "loss": 1.806, "step": 7309 }, { "epoch": 0.7138671875, "grad_norm": 0.17424190044403076, "learning_rate": 0.00013977468956570893, "loss": 1.7802, "step": 7310 }, { "epoch": 0.71396484375, "grad_norm": 0.19362299144268036, "learning_rate": 0.00013971785979042018, "loss": 1.7966, "step": 7311 }, { "epoch": 0.7140625, "grad_norm": 0.171610489487648, "learning_rate": 0.00013966104352861885, "loss": 1.767, "step": 7312 }, { "epoch": 0.71416015625, "grad_norm": 0.1866857260465622, "learning_rate": 0.00013960424078598017, "loss": 1.8027, "step": 7313 }, { "epoch": 0.7142578125, "grad_norm": 0.16544665396213531, "learning_rate": 0.00013954745156817834, "loss": 1.803, "step": 7314 }, { "epoch": 0.71435546875, "grad_norm": 0.17936468124389648, "learning_rate": 0.0001394906758808861, "loss": 1.7734, "step": 7315 }, { "epoch": 0.714453125, "grad_norm": 0.1764443814754486, "learning_rate": 0.00013943391372977482, "loss": 1.787, "step": 7316 }, { "epoch": 0.71455078125, "grad_norm": 0.1752578467130661, "learning_rate": 0.00013937716512051458, "loss": 1.7884, "step": 7317 }, { "epoch": 0.7146484375, "grad_norm": 0.17097489535808563, "learning_rate": 0.00013932043005877403, "loss": 1.7795, "step": 7318 }, { "epoch": 0.71474609375, "grad_norm": 0.17047381401062012, "learning_rate": 0.00013926370855022057, "loss": 1.781, "step": 7319 }, { "epoch": 0.71484375, "grad_norm": 0.18346750736236572, "learning_rate": 0.00013920700060052004, "loss": 1.8144, "step": 7320 }, { "epoch": 0.71494140625, "grad_norm": 0.16473081707954407, "learning_rate": 0.00013915030621533715, "loss": 1.7761, "step": 7321 }, { "epoch": 0.7150390625, "grad_norm": 0.17601098120212555, "learning_rate": 0.0001390936254003351, "loss": 1.8342, "step": 7322 }, { "epoch": 0.71513671875, "grad_norm": 0.1926531046628952, "learning_rate": 0.0001390369581611759, "loss": 1.7642, "step": 7323 }, { "epoch": 0.715234375, "grad_norm": 0.1662948876619339, "learning_rate": 0.00013898030450352, "loss": 1.8081, "step": 7324 }, { "epoch": 0.71533203125, "grad_norm": 0.19266390800476074, "learning_rate": 0.00013892366443302674, "loss": 1.7892, "step": 7325 }, { "epoch": 0.7154296875, "grad_norm": 0.1975274533033371, "learning_rate": 0.00013886703795535388, "loss": 1.7494, "step": 7326 }, { "epoch": 0.71552734375, "grad_norm": 0.15989366173744202, "learning_rate": 0.00013881042507615787, "loss": 1.7607, "step": 7327 }, { "epoch": 0.715625, "grad_norm": 0.20971594750881195, "learning_rate": 0.0001387538258010938, "loss": 1.7934, "step": 7328 }, { "epoch": 0.71572265625, "grad_norm": 0.15928344428539276, "learning_rate": 0.00013869724013581555, "loss": 1.8099, "step": 7329 }, { "epoch": 0.7158203125, "grad_norm": 0.19634442031383514, "learning_rate": 0.00013864066808597547, "loss": 1.7903, "step": 7330 }, { "epoch": 0.71591796875, "grad_norm": 0.17213429510593414, "learning_rate": 0.00013858410965722463, "loss": 1.8107, "step": 7331 }, { "epoch": 0.716015625, "grad_norm": 0.16371653974056244, "learning_rate": 0.00013852756485521268, "loss": 1.7654, "step": 7332 }, { "epoch": 0.71611328125, "grad_norm": 0.19284141063690186, "learning_rate": 0.000138471033685588, "loss": 1.7648, "step": 7333 }, { "epoch": 0.7162109375, "grad_norm": 0.17627480626106262, "learning_rate": 0.00013841451615399748, "loss": 1.7316, "step": 7334 }, { "epoch": 0.71630859375, "grad_norm": 0.2011154294013977, "learning_rate": 0.00013835801226608677, "loss": 1.7273, "step": 7335 }, { "epoch": 0.71640625, "grad_norm": 0.17693808674812317, "learning_rate": 0.00013830152202750007, "loss": 1.7363, "step": 7336 }, { "epoch": 0.71650390625, "grad_norm": 0.18761859834194183, "learning_rate": 0.00013824504544388027, "loss": 1.782, "step": 7337 }, { "epoch": 0.7166015625, "grad_norm": 0.16142895817756653, "learning_rate": 0.00013818858252086897, "loss": 1.8254, "step": 7338 }, { "epoch": 0.71669921875, "grad_norm": 0.19337977468967438, "learning_rate": 0.00013813213326410614, "loss": 1.7357, "step": 7339 }, { "epoch": 0.716796875, "grad_norm": 0.1727215051651001, "learning_rate": 0.00013807569767923078, "loss": 1.8089, "step": 7340 }, { "epoch": 0.71689453125, "grad_norm": 0.22707262635231018, "learning_rate": 0.00013801927577188013, "loss": 1.8412, "step": 7341 }, { "epoch": 0.7169921875, "grad_norm": 0.17474618554115295, "learning_rate": 0.00013796286754769014, "loss": 1.8188, "step": 7342 }, { "epoch": 0.71708984375, "grad_norm": 0.19014835357666016, "learning_rate": 0.00013790647301229584, "loss": 1.7849, "step": 7343 }, { "epoch": 0.7171875, "grad_norm": 0.16324470937252045, "learning_rate": 0.00013785009217133026, "loss": 1.7633, "step": 7344 }, { "epoch": 0.71728515625, "grad_norm": 0.16419410705566406, "learning_rate": 0.00013779372503042547, "loss": 1.8245, "step": 7345 }, { "epoch": 0.7173828125, "grad_norm": 0.17206589877605438, "learning_rate": 0.00013773737159521198, "loss": 1.77, "step": 7346 }, { "epoch": 0.71748046875, "grad_norm": 0.1604912430047989, "learning_rate": 0.00013768103187131907, "loss": 1.7897, "step": 7347 }, { "epoch": 0.717578125, "grad_norm": 0.187465101480484, "learning_rate": 0.00013762470586437453, "loss": 1.7967, "step": 7348 }, { "epoch": 0.71767578125, "grad_norm": 0.19727656245231628, "learning_rate": 0.00013756839358000474, "loss": 1.7942, "step": 7349 }, { "epoch": 0.7177734375, "grad_norm": 0.16194015741348267, "learning_rate": 0.00013751209502383503, "loss": 1.7845, "step": 7350 }, { "epoch": 0.71787109375, "grad_norm": 0.21801359951496124, "learning_rate": 0.0001374558102014889, "loss": 1.7814, "step": 7351 }, { "epoch": 0.71796875, "grad_norm": 0.19502060115337372, "learning_rate": 0.00013739953911858885, "loss": 1.7572, "step": 7352 }, { "epoch": 0.71806640625, "grad_norm": 0.20716382563114166, "learning_rate": 0.00013734328178075577, "loss": 1.792, "step": 7353 }, { "epoch": 0.7181640625, "grad_norm": 0.18944132328033447, "learning_rate": 0.00013728703819360942, "loss": 1.7792, "step": 7354 }, { "epoch": 0.71826171875, "grad_norm": 0.17309802770614624, "learning_rate": 0.00013723080836276785, "loss": 1.7399, "step": 7355 }, { "epoch": 0.718359375, "grad_norm": 0.1766345351934433, "learning_rate": 0.00013717459229384794, "loss": 1.8087, "step": 7356 }, { "epoch": 0.71845703125, "grad_norm": 0.17558035254478455, "learning_rate": 0.00013711838999246533, "loss": 1.7578, "step": 7357 }, { "epoch": 0.7185546875, "grad_norm": 0.1739203929901123, "learning_rate": 0.00013706220146423405, "loss": 1.7858, "step": 7358 }, { "epoch": 0.71865234375, "grad_norm": 0.20705237984657288, "learning_rate": 0.00013700602671476675, "loss": 1.8116, "step": 7359 }, { "epoch": 0.71875, "grad_norm": 0.18020687997341156, "learning_rate": 0.00013694986574967486, "loss": 1.808, "step": 7360 }, { "epoch": 0.71884765625, "grad_norm": 0.19875745475292206, "learning_rate": 0.0001368937185745685, "loss": 1.7559, "step": 7361 }, { "epoch": 0.7189453125, "grad_norm": 0.16574275493621826, "learning_rate": 0.00013683758519505602, "loss": 1.7762, "step": 7362 }, { "epoch": 0.71904296875, "grad_norm": 0.19599327445030212, "learning_rate": 0.0001367814656167447, "loss": 1.7547, "step": 7363 }, { "epoch": 0.719140625, "grad_norm": 0.18946218490600586, "learning_rate": 0.00013672535984524063, "loss": 1.7999, "step": 7364 }, { "epoch": 0.71923828125, "grad_norm": 0.1950998455286026, "learning_rate": 0.00013666926788614804, "loss": 1.7882, "step": 7365 }, { "epoch": 0.7193359375, "grad_norm": 0.16780412197113037, "learning_rate": 0.00013661318974507003, "loss": 1.8016, "step": 7366 }, { "epoch": 0.71943359375, "grad_norm": 0.2038903534412384, "learning_rate": 0.0001365571254276084, "loss": 1.7458, "step": 7367 }, { "epoch": 0.71953125, "grad_norm": 0.16975681483745575, "learning_rate": 0.00013650107493936355, "loss": 1.7599, "step": 7368 }, { "epoch": 0.71962890625, "grad_norm": 0.19641663134098053, "learning_rate": 0.00013644503828593425, "loss": 1.7768, "step": 7369 }, { "epoch": 0.7197265625, "grad_norm": 0.18100400269031525, "learning_rate": 0.00013638901547291806, "loss": 1.8069, "step": 7370 }, { "epoch": 0.71982421875, "grad_norm": 0.1927107721567154, "learning_rate": 0.00013633300650591135, "loss": 1.813, "step": 7371 }, { "epoch": 0.719921875, "grad_norm": 0.18132886290550232, "learning_rate": 0.00013627701139050876, "loss": 1.7772, "step": 7372 }, { "epoch": 0.72001953125, "grad_norm": 0.1746576726436615, "learning_rate": 0.00013622103013230375, "loss": 1.7727, "step": 7373 }, { "epoch": 0.7201171875, "grad_norm": 0.17436908185482025, "learning_rate": 0.00013616506273688838, "loss": 1.743, "step": 7374 }, { "epoch": 0.72021484375, "grad_norm": 0.20030182600021362, "learning_rate": 0.00013610910920985333, "loss": 1.8062, "step": 7375 }, { "epoch": 0.7203125, "grad_norm": 0.20663563907146454, "learning_rate": 0.00013605316955678777, "loss": 1.7762, "step": 7376 }, { "epoch": 0.72041015625, "grad_norm": 0.19021959602832794, "learning_rate": 0.00013599724378327948, "loss": 1.7844, "step": 7377 }, { "epoch": 0.7205078125, "grad_norm": 0.17393898963928223, "learning_rate": 0.00013594133189491524, "loss": 1.7849, "step": 7378 }, { "epoch": 0.72060546875, "grad_norm": 0.20883366465568542, "learning_rate": 0.00013588543389727998, "loss": 1.8112, "step": 7379 }, { "epoch": 0.720703125, "grad_norm": 0.17377766966819763, "learning_rate": 0.00013582954979595734, "loss": 1.7701, "step": 7380 }, { "epoch": 0.72080078125, "grad_norm": 0.1823614090681076, "learning_rate": 0.0001357736795965298, "loss": 1.786, "step": 7381 }, { "epoch": 0.7208984375, "grad_norm": 0.1657777577638626, "learning_rate": 0.0001357178233045783, "loss": 1.7645, "step": 7382 }, { "epoch": 0.72099609375, "grad_norm": 0.1628219038248062, "learning_rate": 0.00013566198092568224, "loss": 1.7643, "step": 7383 }, { "epoch": 0.72109375, "grad_norm": 0.1668781340122223, "learning_rate": 0.0001356061524654198, "loss": 1.7677, "step": 7384 }, { "epoch": 0.72119140625, "grad_norm": 0.16812074184417725, "learning_rate": 0.00013555033792936793, "loss": 1.7766, "step": 7385 }, { "epoch": 0.7212890625, "grad_norm": 0.19452530145645142, "learning_rate": 0.00013549453732310184, "loss": 1.7789, "step": 7386 }, { "epoch": 0.72138671875, "grad_norm": 0.1812771111726761, "learning_rate": 0.00013543875065219558, "loss": 1.7625, "step": 7387 }, { "epoch": 0.721484375, "grad_norm": 0.17223869264125824, "learning_rate": 0.00013538297792222172, "loss": 1.7948, "step": 7388 }, { "epoch": 0.72158203125, "grad_norm": 0.15679438412189484, "learning_rate": 0.00013532721913875152, "loss": 1.756, "step": 7389 }, { "epoch": 0.7216796875, "grad_norm": 0.17648717761039734, "learning_rate": 0.00013527147430735466, "loss": 1.7607, "step": 7390 }, { "epoch": 0.72177734375, "grad_norm": 0.16601228713989258, "learning_rate": 0.00013521574343359957, "loss": 1.7466, "step": 7391 }, { "epoch": 0.721875, "grad_norm": 0.19103634357452393, "learning_rate": 0.00013516002652305347, "loss": 1.8166, "step": 7392 }, { "epoch": 0.72197265625, "grad_norm": 0.1656274050474167, "learning_rate": 0.0001351043235812818, "loss": 1.7458, "step": 7393 }, { "epoch": 0.7220703125, "grad_norm": 0.19549719989299774, "learning_rate": 0.00013504863461384883, "loss": 1.7819, "step": 7394 }, { "epoch": 0.72216796875, "grad_norm": 0.14230042695999146, "learning_rate": 0.0001349929596263174, "loss": 1.8211, "step": 7395 }, { "epoch": 0.722265625, "grad_norm": 0.20790985226631165, "learning_rate": 0.0001349372986242491, "loss": 1.7745, "step": 7396 }, { "epoch": 0.72236328125, "grad_norm": 0.16216805577278137, "learning_rate": 0.00013488165161320366, "loss": 1.7744, "step": 7397 }, { "epoch": 0.7224609375, "grad_norm": 0.15003430843353271, "learning_rate": 0.00013482601859873982, "loss": 1.758, "step": 7398 }, { "epoch": 0.72255859375, "grad_norm": 0.1683526337146759, "learning_rate": 0.00013477039958641508, "loss": 1.7761, "step": 7399 }, { "epoch": 0.72265625, "grad_norm": 0.17012375593185425, "learning_rate": 0.00013471479458178497, "loss": 1.7978, "step": 7400 }, { "epoch": 0.72275390625, "grad_norm": 0.15678620338439941, "learning_rate": 0.0001346592035904041, "loss": 1.7436, "step": 7401 }, { "epoch": 0.7228515625, "grad_norm": 0.17875069379806519, "learning_rate": 0.00013460362661782548, "loss": 1.781, "step": 7402 }, { "epoch": 0.72294921875, "grad_norm": 0.15281298756599426, "learning_rate": 0.00013454806366960088, "loss": 1.8128, "step": 7403 }, { "epoch": 0.723046875, "grad_norm": 0.17729079723358154, "learning_rate": 0.00013449251475128028, "loss": 1.7846, "step": 7404 }, { "epoch": 0.72314453125, "grad_norm": 0.16915789246559143, "learning_rate": 0.00013443697986841263, "loss": 1.8492, "step": 7405 }, { "epoch": 0.7232421875, "grad_norm": 0.20500414073467255, "learning_rate": 0.00013438145902654552, "loss": 1.7713, "step": 7406 }, { "epoch": 0.72333984375, "grad_norm": 0.1689242422580719, "learning_rate": 0.00013432595223122491, "loss": 1.7709, "step": 7407 }, { "epoch": 0.7234375, "grad_norm": 0.18387208878993988, "learning_rate": 0.00013427045948799531, "loss": 1.7935, "step": 7408 }, { "epoch": 0.72353515625, "grad_norm": 0.1877800077199936, "learning_rate": 0.00013421498080240014, "loss": 1.7459, "step": 7409 }, { "epoch": 0.7236328125, "grad_norm": 0.17994552850723267, "learning_rate": 0.00013415951617998118, "loss": 1.7754, "step": 7410 }, { "epoch": 0.72373046875, "grad_norm": 0.2191135734319687, "learning_rate": 0.00013410406562627876, "loss": 1.7951, "step": 7411 }, { "epoch": 0.723828125, "grad_norm": 0.16902075707912445, "learning_rate": 0.00013404862914683186, "loss": 1.7552, "step": 7412 }, { "epoch": 0.72392578125, "grad_norm": 0.1902250349521637, "learning_rate": 0.00013399320674717833, "loss": 1.8083, "step": 7413 }, { "epoch": 0.7240234375, "grad_norm": 0.19407428801059723, "learning_rate": 0.00013393779843285416, "loss": 1.796, "step": 7414 }, { "epoch": 0.72412109375, "grad_norm": 0.15319876372814178, "learning_rate": 0.0001338824042093943, "loss": 1.7877, "step": 7415 }, { "epoch": 0.72421875, "grad_norm": 0.18018116056919098, "learning_rate": 0.00013382702408233199, "loss": 1.7752, "step": 7416 }, { "epoch": 0.72431640625, "grad_norm": 0.15209300816059113, "learning_rate": 0.0001337716580571994, "loss": 1.7709, "step": 7417 }, { "epoch": 0.7244140625, "grad_norm": 0.16800326108932495, "learning_rate": 0.0001337163061395269, "loss": 1.7587, "step": 7418 }, { "epoch": 0.72451171875, "grad_norm": 0.1575421839952469, "learning_rate": 0.00013366096833484372, "loss": 1.771, "step": 7419 }, { "epoch": 0.724609375, "grad_norm": 0.15844573080539703, "learning_rate": 0.0001336056446486778, "loss": 1.798, "step": 7420 }, { "epoch": 0.72470703125, "grad_norm": 0.17066654562950134, "learning_rate": 0.00013355033508655526, "loss": 1.7971, "step": 7421 }, { "epoch": 0.7248046875, "grad_norm": 0.14472037553787231, "learning_rate": 0.0001334950396540011, "loss": 1.7584, "step": 7422 }, { "epoch": 0.72490234375, "grad_norm": 0.19444644451141357, "learning_rate": 0.00013343975835653887, "loss": 1.8054, "step": 7423 }, { "epoch": 0.725, "grad_norm": 0.1636078804731369, "learning_rate": 0.00013338449119969074, "loss": 1.7343, "step": 7424 }, { "epoch": 0.72509765625, "grad_norm": 0.1884550303220749, "learning_rate": 0.00013332923818897723, "loss": 1.8074, "step": 7425 }, { "epoch": 0.7251953125, "grad_norm": 0.1609734296798706, "learning_rate": 0.0001332739993299177, "loss": 1.8006, "step": 7426 }, { "epoch": 0.72529296875, "grad_norm": 0.17826154828071594, "learning_rate": 0.00013321877462803023, "loss": 1.8105, "step": 7427 }, { "epoch": 0.725390625, "grad_norm": 0.18943947553634644, "learning_rate": 0.00013316356408883098, "loss": 1.751, "step": 7428 }, { "epoch": 0.72548828125, "grad_norm": 0.17735719680786133, "learning_rate": 0.00013310836771783513, "loss": 1.7973, "step": 7429 }, { "epoch": 0.7255859375, "grad_norm": 0.17730891704559326, "learning_rate": 0.0001330531855205563, "loss": 1.759, "step": 7430 }, { "epoch": 0.72568359375, "grad_norm": 0.17595741152763367, "learning_rate": 0.0001329980175025068, "loss": 1.7944, "step": 7431 }, { "epoch": 0.72578125, "grad_norm": 0.16178099811077118, "learning_rate": 0.0001329428636691972, "loss": 1.8019, "step": 7432 }, { "epoch": 0.72587890625, "grad_norm": 0.20344801247119904, "learning_rate": 0.000132887724026137, "loss": 1.7932, "step": 7433 }, { "epoch": 0.7259765625, "grad_norm": 0.1733372062444687, "learning_rate": 0.00013283259857883428, "loss": 1.786, "step": 7434 }, { "epoch": 0.72607421875, "grad_norm": 0.17082051932811737, "learning_rate": 0.0001327774873327954, "loss": 1.7751, "step": 7435 }, { "epoch": 0.726171875, "grad_norm": 0.17025423049926758, "learning_rate": 0.00013272239029352557, "loss": 1.7879, "step": 7436 }, { "epoch": 0.72626953125, "grad_norm": 0.18467023968696594, "learning_rate": 0.00013266730746652847, "loss": 1.8258, "step": 7437 }, { "epoch": 0.7263671875, "grad_norm": 0.1668064445257187, "learning_rate": 0.0001326122388573065, "loss": 1.7806, "step": 7438 }, { "epoch": 0.72646484375, "grad_norm": 0.17250077426433563, "learning_rate": 0.00013255718447136033, "loss": 1.8106, "step": 7439 }, { "epoch": 0.7265625, "grad_norm": 0.17105591297149658, "learning_rate": 0.0001325021443141894, "loss": 1.7943, "step": 7440 }, { "epoch": 0.72666015625, "grad_norm": 0.18120525777339935, "learning_rate": 0.00013244711839129202, "loss": 1.8372, "step": 7441 }, { "epoch": 0.7267578125, "grad_norm": 0.1756768822669983, "learning_rate": 0.00013239210670816455, "loss": 1.7701, "step": 7442 }, { "epoch": 0.72685546875, "grad_norm": 0.17965537309646606, "learning_rate": 0.00013233710927030217, "loss": 1.7725, "step": 7443 }, { "epoch": 0.726953125, "grad_norm": 0.16301393508911133, "learning_rate": 0.00013228212608319874, "loss": 1.788, "step": 7444 }, { "epoch": 0.72705078125, "grad_norm": 0.19617660343647003, "learning_rate": 0.00013222715715234662, "loss": 1.7958, "step": 7445 }, { "epoch": 0.7271484375, "grad_norm": 0.14668630063533783, "learning_rate": 0.00013217220248323654, "loss": 1.6955, "step": 7446 }, { "epoch": 0.72724609375, "grad_norm": 0.2118115872144699, "learning_rate": 0.00013211726208135804, "loss": 1.7642, "step": 7447 }, { "epoch": 0.72734375, "grad_norm": 0.1551916003227234, "learning_rate": 0.00013206233595219942, "loss": 1.7364, "step": 7448 }, { "epoch": 0.72744140625, "grad_norm": 0.19006231427192688, "learning_rate": 0.000132007424101247, "loss": 1.7831, "step": 7449 }, { "epoch": 0.7275390625, "grad_norm": 0.1846655309200287, "learning_rate": 0.0001319525265339861, "loss": 1.8011, "step": 7450 }, { "epoch": 0.72763671875, "grad_norm": 0.16944247484207153, "learning_rate": 0.00013189764325590055, "loss": 1.7954, "step": 7451 }, { "epoch": 0.727734375, "grad_norm": 0.20167969167232513, "learning_rate": 0.0001318427742724727, "loss": 1.7585, "step": 7452 }, { "epoch": 0.72783203125, "grad_norm": 0.15925198793411255, "learning_rate": 0.0001317879195891834, "loss": 1.8218, "step": 7453 }, { "epoch": 0.7279296875, "grad_norm": 0.20472463965415955, "learning_rate": 0.0001317330792115121, "loss": 1.7457, "step": 7454 }, { "epoch": 0.72802734375, "grad_norm": 0.15267744660377502, "learning_rate": 0.00013167825314493712, "loss": 1.7747, "step": 7455 }, { "epoch": 0.728125, "grad_norm": 0.17882832884788513, "learning_rate": 0.00013162344139493485, "loss": 1.8094, "step": 7456 }, { "epoch": 0.72822265625, "grad_norm": 0.17465415596961975, "learning_rate": 0.00013156864396698058, "loss": 1.7849, "step": 7457 }, { "epoch": 0.7283203125, "grad_norm": 0.17538368701934814, "learning_rate": 0.0001315138608665481, "loss": 1.7774, "step": 7458 }, { "epoch": 0.72841796875, "grad_norm": 0.1678989827632904, "learning_rate": 0.00013145909209910984, "loss": 1.79, "step": 7459 }, { "epoch": 0.728515625, "grad_norm": 0.1680895984172821, "learning_rate": 0.00013140433767013643, "loss": 1.8101, "step": 7460 }, { "epoch": 0.72861328125, "grad_norm": 0.16433890163898468, "learning_rate": 0.00013134959758509762, "loss": 1.7772, "step": 7461 }, { "epoch": 0.7287109375, "grad_norm": 0.15576274693012238, "learning_rate": 0.00013129487184946147, "loss": 1.7893, "step": 7462 }, { "epoch": 0.72880859375, "grad_norm": 0.16480329632759094, "learning_rate": 0.00013124016046869448, "loss": 1.7872, "step": 7463 }, { "epoch": 0.72890625, "grad_norm": 0.17765314877033234, "learning_rate": 0.00013118546344826175, "loss": 1.7544, "step": 7464 }, { "epoch": 0.72900390625, "grad_norm": 0.16216062009334564, "learning_rate": 0.0001311307807936273, "loss": 1.7384, "step": 7465 }, { "epoch": 0.7291015625, "grad_norm": 0.2023918479681015, "learning_rate": 0.0001310761125102532, "loss": 1.794, "step": 7466 }, { "epoch": 0.72919921875, "grad_norm": 0.18014465272426605, "learning_rate": 0.00013102145860360032, "loss": 1.7585, "step": 7467 }, { "epoch": 0.729296875, "grad_norm": 0.19077914953231812, "learning_rate": 0.0001309668190791283, "loss": 1.7453, "step": 7468 }, { "epoch": 0.72939453125, "grad_norm": 0.16500376164913177, "learning_rate": 0.0001309121939422951, "loss": 1.7385, "step": 7469 }, { "epoch": 0.7294921875, "grad_norm": 0.15535402297973633, "learning_rate": 0.0001308575831985571, "loss": 1.7784, "step": 7470 }, { "epoch": 0.72958984375, "grad_norm": 0.186142697930336, "learning_rate": 0.00013080298685336957, "loss": 1.7837, "step": 7471 }, { "epoch": 0.7296875, "grad_norm": 0.17004217207431793, "learning_rate": 0.0001307484049121862, "loss": 1.7583, "step": 7472 }, { "epoch": 0.72978515625, "grad_norm": 0.18570908904075623, "learning_rate": 0.0001306938373804593, "loss": 1.7382, "step": 7473 }, { "epoch": 0.7298828125, "grad_norm": 0.1649671494960785, "learning_rate": 0.00013063928426363948, "loss": 1.7918, "step": 7474 }, { "epoch": 0.72998046875, "grad_norm": 0.16941043734550476, "learning_rate": 0.00013058474556717624, "loss": 1.8358, "step": 7475 }, { "epoch": 0.730078125, "grad_norm": 0.21059249341487885, "learning_rate": 0.00013053022129651764, "loss": 1.738, "step": 7476 }, { "epoch": 0.73017578125, "grad_norm": 0.14668527245521545, "learning_rate": 0.00013047571145710998, "loss": 1.722, "step": 7477 }, { "epoch": 0.7302734375, "grad_norm": 0.17605291306972504, "learning_rate": 0.0001304212160543984, "loss": 1.7802, "step": 7478 }, { "epoch": 0.73037109375, "grad_norm": 0.18063002824783325, "learning_rate": 0.00013036673509382644, "loss": 1.8288, "step": 7479 }, { "epoch": 0.73046875, "grad_norm": 0.14899572730064392, "learning_rate": 0.00013031226858083636, "loss": 1.8049, "step": 7480 }, { "epoch": 0.73056640625, "grad_norm": 0.19755159318447113, "learning_rate": 0.0001302578165208687, "loss": 1.7771, "step": 7481 }, { "epoch": 0.7306640625, "grad_norm": 0.1508341282606125, "learning_rate": 0.000130203378919363, "loss": 1.7509, "step": 7482 }, { "epoch": 0.73076171875, "grad_norm": 0.17492534220218658, "learning_rate": 0.00013014895578175698, "loss": 1.7664, "step": 7483 }, { "epoch": 0.730859375, "grad_norm": 0.1679675132036209, "learning_rate": 0.00013009454711348695, "loss": 1.74, "step": 7484 }, { "epoch": 0.73095703125, "grad_norm": 0.16690431535243988, "learning_rate": 0.00013004015291998792, "loss": 1.73, "step": 7485 }, { "epoch": 0.7310546875, "grad_norm": 0.1829078495502472, "learning_rate": 0.00012998577320669336, "loss": 1.7988, "step": 7486 }, { "epoch": 0.73115234375, "grad_norm": 0.16561903059482574, "learning_rate": 0.00012993140797903547, "loss": 1.789, "step": 7487 }, { "epoch": 0.73125, "grad_norm": 0.18317846953868866, "learning_rate": 0.00012987705724244457, "loss": 1.7999, "step": 7488 }, { "epoch": 0.73134765625, "grad_norm": 0.18856428563594818, "learning_rate": 0.00012982272100234998, "loss": 1.799, "step": 7489 }, { "epoch": 0.7314453125, "grad_norm": 0.1738317459821701, "learning_rate": 0.00012976839926417957, "loss": 1.8097, "step": 7490 }, { "epoch": 0.73154296875, "grad_norm": 0.18362414836883545, "learning_rate": 0.00012971409203335933, "loss": 1.7717, "step": 7491 }, { "epoch": 0.731640625, "grad_norm": 0.20017080008983612, "learning_rate": 0.00012965979931531417, "loss": 1.7154, "step": 7492 }, { "epoch": 0.73173828125, "grad_norm": 0.15868380665779114, "learning_rate": 0.00012960552111546749, "loss": 1.7357, "step": 7493 }, { "epoch": 0.7318359375, "grad_norm": 0.1899017095565796, "learning_rate": 0.00012955125743924122, "loss": 1.7792, "step": 7494 }, { "epoch": 0.73193359375, "grad_norm": 0.14255568385124207, "learning_rate": 0.00012949700829205564, "loss": 1.7655, "step": 7495 }, { "epoch": 0.73203125, "grad_norm": 0.1990482062101364, "learning_rate": 0.00012944277367932998, "loss": 1.8125, "step": 7496 }, { "epoch": 0.73212890625, "grad_norm": 0.18005958199501038, "learning_rate": 0.00012938855360648176, "loss": 1.7994, "step": 7497 }, { "epoch": 0.7322265625, "grad_norm": 0.19129250943660736, "learning_rate": 0.00012933434807892695, "loss": 1.7924, "step": 7498 }, { "epoch": 0.73232421875, "grad_norm": 0.168843612074852, "learning_rate": 0.00012928015710208027, "loss": 1.7974, "step": 7499 }, { "epoch": 0.732421875, "grad_norm": 0.1725001037120819, "learning_rate": 0.00012922598068135495, "loss": 1.7838, "step": 7500 }, { "epoch": 0.73251953125, "grad_norm": 0.20322178304195404, "learning_rate": 0.0001291718188221628, "loss": 1.7902, "step": 7501 }, { "epoch": 0.7326171875, "grad_norm": 0.1639525443315506, "learning_rate": 0.0001291176715299139, "loss": 1.8002, "step": 7502 }, { "epoch": 0.73271484375, "grad_norm": 0.20017413794994354, "learning_rate": 0.00012906353881001725, "loss": 1.7837, "step": 7503 }, { "epoch": 0.7328125, "grad_norm": 0.15371525287628174, "learning_rate": 0.0001290094206678803, "loss": 1.7317, "step": 7504 }, { "epoch": 0.73291015625, "grad_norm": 0.2154158055782318, "learning_rate": 0.00012895531710890878, "loss": 1.7727, "step": 7505 }, { "epoch": 0.7330078125, "grad_norm": 0.161368265748024, "learning_rate": 0.00012890122813850725, "loss": 1.7851, "step": 7506 }, { "epoch": 0.73310546875, "grad_norm": 0.1790924072265625, "learning_rate": 0.00012884715376207867, "loss": 1.7917, "step": 7507 }, { "epoch": 0.733203125, "grad_norm": 0.16642016172409058, "learning_rate": 0.00012879309398502474, "loss": 1.748, "step": 7508 }, { "epoch": 0.73330078125, "grad_norm": 0.1756594330072403, "learning_rate": 0.0001287390488127453, "loss": 1.7894, "step": 7509 }, { "epoch": 0.7333984375, "grad_norm": 0.1952272355556488, "learning_rate": 0.00012868501825063916, "loss": 1.8067, "step": 7510 }, { "epoch": 0.73349609375, "grad_norm": 0.18256641924381256, "learning_rate": 0.00012863100230410359, "loss": 1.7584, "step": 7511 }, { "epoch": 0.73359375, "grad_norm": 0.16635093092918396, "learning_rate": 0.0001285770009785341, "loss": 1.7825, "step": 7512 }, { "epoch": 0.73369140625, "grad_norm": 0.21549956500530243, "learning_rate": 0.000128523014279325, "loss": 1.765, "step": 7513 }, { "epoch": 0.7337890625, "grad_norm": 0.15596963465213776, "learning_rate": 0.0001284690422118691, "loss": 1.8136, "step": 7514 }, { "epoch": 0.73388671875, "grad_norm": 0.21942372620105743, "learning_rate": 0.0001284150847815578, "loss": 1.8031, "step": 7515 }, { "epoch": 0.733984375, "grad_norm": 0.18020014464855194, "learning_rate": 0.0001283611419937808, "loss": 1.7287, "step": 7516 }, { "epoch": 0.73408203125, "grad_norm": 0.18655022978782654, "learning_rate": 0.00012830721385392662, "loss": 1.7696, "step": 7517 }, { "epoch": 0.7341796875, "grad_norm": 0.18170595169067383, "learning_rate": 0.00012825330036738233, "loss": 1.7893, "step": 7518 }, { "epoch": 0.73427734375, "grad_norm": 0.14338816702365875, "learning_rate": 0.0001281994015395332, "loss": 1.7604, "step": 7519 }, { "epoch": 0.734375, "grad_norm": 0.16252291202545166, "learning_rate": 0.00012814551737576327, "loss": 1.7944, "step": 7520 }, { "epoch": 0.73447265625, "grad_norm": 0.19054804742336273, "learning_rate": 0.00012809164788145515, "loss": 1.8311, "step": 7521 }, { "epoch": 0.7345703125, "grad_norm": 0.16654053330421448, "learning_rate": 0.00012803779306199003, "loss": 1.8323, "step": 7522 }, { "epoch": 0.73466796875, "grad_norm": 0.17077161371707916, "learning_rate": 0.00012798395292274725, "loss": 1.7363, "step": 7523 }, { "epoch": 0.734765625, "grad_norm": 0.15966129302978516, "learning_rate": 0.0001279301274691052, "loss": 1.7641, "step": 7524 }, { "epoch": 0.73486328125, "grad_norm": 0.15401005744934082, "learning_rate": 0.0001278763167064405, "loss": 1.7618, "step": 7525 }, { "epoch": 0.7349609375, "grad_norm": 0.15181098878383636, "learning_rate": 0.00012782252064012846, "loss": 1.7275, "step": 7526 }, { "epoch": 0.73505859375, "grad_norm": 0.1503584384918213, "learning_rate": 0.0001277687392755426, "loss": 1.7758, "step": 7527 }, { "epoch": 0.73515625, "grad_norm": 0.15785834193229675, "learning_rate": 0.00012771497261805537, "loss": 1.7765, "step": 7528 }, { "epoch": 0.73525390625, "grad_norm": 0.15471728146076202, "learning_rate": 0.0001276612206730376, "loss": 1.8108, "step": 7529 }, { "epoch": 0.7353515625, "grad_norm": 0.15729640424251556, "learning_rate": 0.00012760748344585842, "loss": 1.7741, "step": 7530 }, { "epoch": 0.73544921875, "grad_norm": 0.15733131766319275, "learning_rate": 0.00012755376094188598, "loss": 1.7741, "step": 7531 }, { "epoch": 0.735546875, "grad_norm": 0.15869319438934326, "learning_rate": 0.00012750005316648655, "loss": 1.813, "step": 7532 }, { "epoch": 0.73564453125, "grad_norm": 0.15959715843200684, "learning_rate": 0.00012744636012502515, "loss": 1.7686, "step": 7533 }, { "epoch": 0.7357421875, "grad_norm": 0.1653357595205307, "learning_rate": 0.00012739268182286508, "loss": 1.7871, "step": 7534 }, { "epoch": 0.73583984375, "grad_norm": 0.1749299168586731, "learning_rate": 0.00012733901826536846, "loss": 1.7723, "step": 7535 }, { "epoch": 0.7359375, "grad_norm": 0.1599985659122467, "learning_rate": 0.00012728536945789582, "loss": 1.7837, "step": 7536 }, { "epoch": 0.73603515625, "grad_norm": 0.18601056933403015, "learning_rate": 0.00012723173540580592, "loss": 1.7732, "step": 7537 }, { "epoch": 0.7361328125, "grad_norm": 0.1628461331129074, "learning_rate": 0.00012717811611445668, "loss": 1.7924, "step": 7538 }, { "epoch": 0.73623046875, "grad_norm": 0.1686810702085495, "learning_rate": 0.00012712451158920405, "loss": 1.7832, "step": 7539 }, { "epoch": 0.736328125, "grad_norm": 0.16788071393966675, "learning_rate": 0.0001270709218354027, "loss": 1.7301, "step": 7540 }, { "epoch": 0.73642578125, "grad_norm": 0.15346254408359528, "learning_rate": 0.00012701734685840565, "loss": 1.7799, "step": 7541 }, { "epoch": 0.7365234375, "grad_norm": 0.1681012511253357, "learning_rate": 0.00012696378666356468, "loss": 1.7575, "step": 7542 }, { "epoch": 0.73662109375, "grad_norm": 0.16378776729106903, "learning_rate": 0.00012691024125623002, "loss": 1.7736, "step": 7543 }, { "epoch": 0.73671875, "grad_norm": 0.15829038619995117, "learning_rate": 0.00012685671064175012, "loss": 1.7795, "step": 7544 }, { "epoch": 0.73681640625, "grad_norm": 0.17333818972110748, "learning_rate": 0.00012680319482547247, "loss": 1.7263, "step": 7545 }, { "epoch": 0.7369140625, "grad_norm": 0.16192445158958435, "learning_rate": 0.00012674969381274277, "loss": 1.814, "step": 7546 }, { "epoch": 0.73701171875, "grad_norm": 0.16561871767044067, "learning_rate": 0.00012669620760890534, "loss": 1.7385, "step": 7547 }, { "epoch": 0.737109375, "grad_norm": 0.15851564705371857, "learning_rate": 0.00012664273621930283, "loss": 1.7886, "step": 7548 }, { "epoch": 0.73720703125, "grad_norm": 0.1490420550107956, "learning_rate": 0.00012658927964927664, "loss": 1.7724, "step": 7549 }, { "epoch": 0.7373046875, "grad_norm": 0.16224917769432068, "learning_rate": 0.00012653583790416662, "loss": 1.7771, "step": 7550 }, { "epoch": 0.73740234375, "grad_norm": 0.16927330195903778, "learning_rate": 0.00012648241098931113, "loss": 1.7774, "step": 7551 }, { "epoch": 0.7375, "grad_norm": 0.15276582539081573, "learning_rate": 0.00012642899891004706, "loss": 1.7742, "step": 7552 }, { "epoch": 0.73759765625, "grad_norm": 0.18307168781757355, "learning_rate": 0.00012637560167170974, "loss": 1.7986, "step": 7553 }, { "epoch": 0.7376953125, "grad_norm": 0.14568272233009338, "learning_rate": 0.00012632221927963322, "loss": 1.8525, "step": 7554 }, { "epoch": 0.73779296875, "grad_norm": 0.21095643937587738, "learning_rate": 0.00012626885173914977, "loss": 1.7828, "step": 7555 }, { "epoch": 0.737890625, "grad_norm": 0.16750939190387726, "learning_rate": 0.00012621549905559033, "loss": 1.8254, "step": 7556 }, { "epoch": 0.73798828125, "grad_norm": 0.17135192453861237, "learning_rate": 0.00012616216123428447, "loss": 1.8078, "step": 7557 }, { "epoch": 0.7380859375, "grad_norm": 0.19611434638500214, "learning_rate": 0.00012610883828056009, "loss": 1.841, "step": 7558 }, { "epoch": 0.73818359375, "grad_norm": 0.1688537746667862, "learning_rate": 0.00012605553019974373, "loss": 1.8194, "step": 7559 }, { "epoch": 0.73828125, "grad_norm": 0.19806645810604095, "learning_rate": 0.00012600223699716034, "loss": 1.815, "step": 7560 }, { "epoch": 0.73837890625, "grad_norm": 0.20967325568199158, "learning_rate": 0.0001259489586781336, "loss": 1.806, "step": 7561 }, { "epoch": 0.7384765625, "grad_norm": 0.17362962663173676, "learning_rate": 0.00012589569524798527, "loss": 1.7495, "step": 7562 }, { "epoch": 0.73857421875, "grad_norm": 0.19243395328521729, "learning_rate": 0.0001258424467120361, "loss": 1.7734, "step": 7563 }, { "epoch": 0.738671875, "grad_norm": 0.15935564041137695, "learning_rate": 0.0001257892130756051, "loss": 1.7541, "step": 7564 }, { "epoch": 0.73876953125, "grad_norm": 0.17546871304512024, "learning_rate": 0.00012573599434400977, "loss": 1.7851, "step": 7565 }, { "epoch": 0.7388671875, "grad_norm": 0.17721617221832275, "learning_rate": 0.00012568279052256625, "loss": 1.7674, "step": 7566 }, { "epoch": 0.73896484375, "grad_norm": 0.15278668701648712, "learning_rate": 0.00012562960161658916, "loss": 1.778, "step": 7567 }, { "epoch": 0.7390625, "grad_norm": 0.19184555113315582, "learning_rate": 0.0001255764276313916, "loss": 1.7588, "step": 7568 }, { "epoch": 0.73916015625, "grad_norm": 0.17690050601959229, "learning_rate": 0.0001255232685722851, "loss": 1.8292, "step": 7569 }, { "epoch": 0.7392578125, "grad_norm": 0.17668579518795013, "learning_rate": 0.00012547012444457983, "loss": 1.7686, "step": 7570 }, { "epoch": 0.73935546875, "grad_norm": 0.18591047823429108, "learning_rate": 0.0001254169952535844, "loss": 1.7978, "step": 7571 }, { "epoch": 0.739453125, "grad_norm": 0.15838639438152313, "learning_rate": 0.00012536388100460596, "loss": 1.7895, "step": 7572 }, { "epoch": 0.73955078125, "grad_norm": 0.17246530950069427, "learning_rate": 0.00012531078170295018, "loss": 1.753, "step": 7573 }, { "epoch": 0.7396484375, "grad_norm": 0.15871788561344147, "learning_rate": 0.00012525769735392112, "loss": 1.7565, "step": 7574 }, { "epoch": 0.73974609375, "grad_norm": 0.16924740374088287, "learning_rate": 0.00012520462796282162, "loss": 1.7686, "step": 7575 }, { "epoch": 0.73984375, "grad_norm": 0.15710416436195374, "learning_rate": 0.00012515157353495265, "loss": 1.7766, "step": 7576 }, { "epoch": 0.73994140625, "grad_norm": 0.15807384252548218, "learning_rate": 0.00012509853407561392, "loss": 1.8333, "step": 7577 }, { "epoch": 0.7400390625, "grad_norm": 0.17482024431228638, "learning_rate": 0.00012504550959010363, "loss": 1.729, "step": 7578 }, { "epoch": 0.74013671875, "grad_norm": 0.15827827155590057, "learning_rate": 0.00012499250008371845, "loss": 1.7404, "step": 7579 }, { "epoch": 0.740234375, "grad_norm": 0.16501794755458832, "learning_rate": 0.00012493950556175358, "loss": 1.7985, "step": 7580 }, { "epoch": 0.74033203125, "grad_norm": 0.17801940441131592, "learning_rate": 0.00012488652602950267, "loss": 1.7844, "step": 7581 }, { "epoch": 0.7404296875, "grad_norm": 0.16047896444797516, "learning_rate": 0.00012483356149225802, "loss": 1.7643, "step": 7582 }, { "epoch": 0.74052734375, "grad_norm": 0.17623291909694672, "learning_rate": 0.00012478061195531016, "loss": 1.7713, "step": 7583 }, { "epoch": 0.740625, "grad_norm": 0.16305476427078247, "learning_rate": 0.0001247276774239483, "loss": 1.805, "step": 7584 }, { "epoch": 0.74072265625, "grad_norm": 0.1827363818883896, "learning_rate": 0.0001246747579034602, "loss": 1.8101, "step": 7585 }, { "epoch": 0.7408203125, "grad_norm": 0.17638084292411804, "learning_rate": 0.000124621853399132, "loss": 1.7945, "step": 7586 }, { "epoch": 0.74091796875, "grad_norm": 0.16983458399772644, "learning_rate": 0.00012456896391624842, "loss": 1.8368, "step": 7587 }, { "epoch": 0.741015625, "grad_norm": 0.17888088524341583, "learning_rate": 0.00012451608946009268, "loss": 1.8307, "step": 7588 }, { "epoch": 0.74111328125, "grad_norm": 0.1728510707616806, "learning_rate": 0.00012446323003594649, "loss": 1.7794, "step": 7589 }, { "epoch": 0.7412109375, "grad_norm": 0.16284529864788055, "learning_rate": 0.0001244103856490899, "loss": 1.7618, "step": 7590 }, { "epoch": 0.74130859375, "grad_norm": 0.16928517818450928, "learning_rate": 0.0001243575563048017, "loss": 1.7737, "step": 7591 }, { "epoch": 0.74140625, "grad_norm": 0.16028565168380737, "learning_rate": 0.00012430474200835905, "loss": 1.7723, "step": 7592 }, { "epoch": 0.74150390625, "grad_norm": 0.16314846277236938, "learning_rate": 0.0001242519427650376, "loss": 1.7354, "step": 7593 }, { "epoch": 0.7416015625, "grad_norm": 0.15943919122219086, "learning_rate": 0.00012419915858011157, "loss": 1.7875, "step": 7594 }, { "epoch": 0.74169921875, "grad_norm": 0.16565825045108795, "learning_rate": 0.00012414638945885365, "loss": 1.756, "step": 7595 }, { "epoch": 0.741796875, "grad_norm": 0.15424060821533203, "learning_rate": 0.00012409363540653504, "loss": 1.7439, "step": 7596 }, { "epoch": 0.74189453125, "grad_norm": 0.16673113405704498, "learning_rate": 0.00012404089642842526, "loss": 1.774, "step": 7597 }, { "epoch": 0.7419921875, "grad_norm": 0.16933690011501312, "learning_rate": 0.00012398817252979254, "loss": 1.7694, "step": 7598 }, { "epoch": 0.74208984375, "grad_norm": 0.14434173703193665, "learning_rate": 0.00012393546371590358, "loss": 1.7938, "step": 7599 }, { "epoch": 0.7421875, "grad_norm": 0.17025159299373627, "learning_rate": 0.00012388276999202347, "loss": 1.8139, "step": 7600 }, { "epoch": 0.74228515625, "grad_norm": 0.15207231044769287, "learning_rate": 0.00012383009136341587, "loss": 1.7689, "step": 7601 }, { "epoch": 0.7423828125, "grad_norm": 0.17351441085338593, "learning_rate": 0.00012377742783534285, "loss": 1.7831, "step": 7602 }, { "epoch": 0.74248046875, "grad_norm": 0.1589614748954773, "learning_rate": 0.0001237247794130652, "loss": 1.7653, "step": 7603 }, { "epoch": 0.742578125, "grad_norm": 0.2119864523410797, "learning_rate": 0.00012367214610184182, "loss": 1.8194, "step": 7604 }, { "epoch": 0.74267578125, "grad_norm": 0.1501256227493286, "learning_rate": 0.00012361952790693038, "loss": 1.7806, "step": 7605 }, { "epoch": 0.7427734375, "grad_norm": 0.1690906584262848, "learning_rate": 0.000123566924833587, "loss": 1.7863, "step": 7606 }, { "epoch": 0.74287109375, "grad_norm": 0.18081210553646088, "learning_rate": 0.00012351433688706628, "loss": 1.7861, "step": 7607 }, { "epoch": 0.74296875, "grad_norm": 0.14885453879833221, "learning_rate": 0.00012346176407262126, "loss": 1.8146, "step": 7608 }, { "epoch": 0.74306640625, "grad_norm": 0.1973363757133484, "learning_rate": 0.00012340920639550347, "loss": 1.7351, "step": 7609 }, { "epoch": 0.7431640625, "grad_norm": 0.14890922605991364, "learning_rate": 0.00012335666386096306, "loss": 1.7311, "step": 7610 }, { "epoch": 0.74326171875, "grad_norm": 0.19316153228282928, "learning_rate": 0.00012330413647424843, "loss": 1.8116, "step": 7611 }, { "epoch": 0.743359375, "grad_norm": 0.1485978662967682, "learning_rate": 0.0001232516242406067, "loss": 1.7603, "step": 7612 }, { "epoch": 0.74345703125, "grad_norm": 0.1613076776266098, "learning_rate": 0.0001231991271652833, "loss": 1.7464, "step": 7613 }, { "epoch": 0.7435546875, "grad_norm": 0.17458224296569824, "learning_rate": 0.00012314664525352225, "loss": 1.7664, "step": 7614 }, { "epoch": 0.74365234375, "grad_norm": 0.16558660566806793, "learning_rate": 0.00012309417851056608, "loss": 1.762, "step": 7615 }, { "epoch": 0.74375, "grad_norm": 0.17161481082439423, "learning_rate": 0.00012304172694165566, "loss": 1.7206, "step": 7616 }, { "epoch": 0.74384765625, "grad_norm": 0.1617281585931778, "learning_rate": 0.00012298929055203062, "loss": 1.7593, "step": 7617 }, { "epoch": 0.7439453125, "grad_norm": 0.1592521220445633, "learning_rate": 0.00012293686934692865, "loss": 1.7777, "step": 7618 }, { "epoch": 0.74404296875, "grad_norm": 0.1733996719121933, "learning_rate": 0.00012288446333158628, "loss": 1.7999, "step": 7619 }, { "epoch": 0.744140625, "grad_norm": 0.16289527714252472, "learning_rate": 0.0001228320725112384, "loss": 1.8092, "step": 7620 }, { "epoch": 0.74423828125, "grad_norm": 0.18455855548381805, "learning_rate": 0.0001227796968911184, "loss": 1.7375, "step": 7621 }, { "epoch": 0.7443359375, "grad_norm": 0.14352737367153168, "learning_rate": 0.00012272733647645812, "loss": 1.7896, "step": 7622 }, { "epoch": 0.74443359375, "grad_norm": 0.19543327391147614, "learning_rate": 0.00012267499127248792, "loss": 1.8062, "step": 7623 }, { "epoch": 0.74453125, "grad_norm": 0.1651216596364975, "learning_rate": 0.00012262266128443668, "loss": 1.7971, "step": 7624 }, { "epoch": 0.74462890625, "grad_norm": 0.16934999823570251, "learning_rate": 0.00012257034651753156, "loss": 1.7936, "step": 7625 }, { "epoch": 0.7447265625, "grad_norm": 0.16619908809661865, "learning_rate": 0.00012251804697699844, "loss": 1.7747, "step": 7626 }, { "epoch": 0.74482421875, "grad_norm": 0.15581020712852478, "learning_rate": 0.00012246576266806153, "loss": 1.7768, "step": 7627 }, { "epoch": 0.744921875, "grad_norm": 0.1921054571866989, "learning_rate": 0.00012241349359594364, "loss": 1.83, "step": 7628 }, { "epoch": 0.74501953125, "grad_norm": 0.15963982045650482, "learning_rate": 0.0001223612397658659, "loss": 1.8223, "step": 7629 }, { "epoch": 0.7451171875, "grad_norm": 0.17645329236984253, "learning_rate": 0.00012230900118304804, "loss": 1.7681, "step": 7630 }, { "epoch": 0.74521484375, "grad_norm": 0.16951271891593933, "learning_rate": 0.00012225677785270835, "loss": 1.7953, "step": 7631 }, { "epoch": 0.7453125, "grad_norm": 0.19877351820468903, "learning_rate": 0.0001222045697800633, "loss": 1.8184, "step": 7632 }, { "epoch": 0.74541015625, "grad_norm": 0.17049379646778107, "learning_rate": 0.00012215237697032805, "loss": 1.7884, "step": 7633 }, { "epoch": 0.7455078125, "grad_norm": 0.1721426397562027, "learning_rate": 0.00012210019942871626, "loss": 1.7749, "step": 7634 }, { "epoch": 0.74560546875, "grad_norm": 0.17262153327465057, "learning_rate": 0.00012204803716043996, "loss": 1.7788, "step": 7635 }, { "epoch": 0.745703125, "grad_norm": 0.21160109341144562, "learning_rate": 0.00012199589017070974, "loss": 1.7661, "step": 7636 }, { "epoch": 0.74580078125, "grad_norm": 0.15308937430381775, "learning_rate": 0.00012194375846473458, "loss": 1.8343, "step": 7637 }, { "epoch": 0.7458984375, "grad_norm": 0.17402414977550507, "learning_rate": 0.00012189164204772208, "loss": 1.7767, "step": 7638 }, { "epoch": 0.74599609375, "grad_norm": 0.17639994621276855, "learning_rate": 0.000121839540924878, "loss": 1.8153, "step": 7639 }, { "epoch": 0.74609375, "grad_norm": 0.17249640822410583, "learning_rate": 0.00012178745510140696, "loss": 1.7042, "step": 7640 }, { "epoch": 0.74619140625, "grad_norm": 0.17888103425502777, "learning_rate": 0.00012173538458251179, "loss": 1.8076, "step": 7641 }, { "epoch": 0.7462890625, "grad_norm": 0.16756807267665863, "learning_rate": 0.00012168332937339391, "loss": 1.7955, "step": 7642 }, { "epoch": 0.74638671875, "grad_norm": 0.19072501361370087, "learning_rate": 0.00012163128947925314, "loss": 1.7808, "step": 7643 }, { "epoch": 0.746484375, "grad_norm": 0.1527816206216812, "learning_rate": 0.00012157926490528785, "loss": 1.7892, "step": 7644 }, { "epoch": 0.74658203125, "grad_norm": 0.18487055599689484, "learning_rate": 0.00012152725565669486, "loss": 1.7662, "step": 7645 }, { "epoch": 0.7466796875, "grad_norm": 0.16346251964569092, "learning_rate": 0.00012147526173866932, "loss": 1.823, "step": 7646 }, { "epoch": 0.74677734375, "grad_norm": 0.18207375705242157, "learning_rate": 0.00012142328315640504, "loss": 1.8126, "step": 7647 }, { "epoch": 0.746875, "grad_norm": 0.1905677318572998, "learning_rate": 0.00012137131991509419, "loss": 1.8162, "step": 7648 }, { "epoch": 0.74697265625, "grad_norm": 0.1994086056947708, "learning_rate": 0.00012131937201992747, "loss": 1.7691, "step": 7649 }, { "epoch": 0.7470703125, "grad_norm": 0.17504943907260895, "learning_rate": 0.00012126743947609397, "loss": 1.7607, "step": 7650 }, { "epoch": 0.74716796875, "grad_norm": 0.1664980947971344, "learning_rate": 0.00012121552228878135, "loss": 1.8092, "step": 7651 }, { "epoch": 0.747265625, "grad_norm": 0.17842453718185425, "learning_rate": 0.00012116362046317569, "loss": 1.7525, "step": 7652 }, { "epoch": 0.74736328125, "grad_norm": 0.1539682000875473, "learning_rate": 0.00012111173400446143, "loss": 1.7913, "step": 7653 }, { "epoch": 0.7474609375, "grad_norm": 0.1590164601802826, "learning_rate": 0.0001210598629178216, "loss": 1.72, "step": 7654 }, { "epoch": 0.74755859375, "grad_norm": 0.18660178780555725, "learning_rate": 0.00012100800720843765, "loss": 1.8166, "step": 7655 }, { "epoch": 0.74765625, "grad_norm": 0.15589818358421326, "learning_rate": 0.00012095616688148958, "loss": 1.7629, "step": 7656 }, { "epoch": 0.74775390625, "grad_norm": 0.15937891602516174, "learning_rate": 0.00012090434194215574, "loss": 1.7779, "step": 7657 }, { "epoch": 0.7478515625, "grad_norm": 0.1904037445783615, "learning_rate": 0.00012085253239561295, "loss": 1.7737, "step": 7658 }, { "epoch": 0.74794921875, "grad_norm": 0.1592012345790863, "learning_rate": 0.0001208007382470366, "loss": 1.7752, "step": 7659 }, { "epoch": 0.748046875, "grad_norm": 0.18533967435359955, "learning_rate": 0.0001207489595016004, "loss": 1.7743, "step": 7660 }, { "epoch": 0.74814453125, "grad_norm": 0.15914149582386017, "learning_rate": 0.00012069719616447656, "loss": 1.7716, "step": 7661 }, { "epoch": 0.7482421875, "grad_norm": 0.19323600828647614, "learning_rate": 0.00012064544824083584, "loss": 1.7741, "step": 7662 }, { "epoch": 0.74833984375, "grad_norm": 0.1805322766304016, "learning_rate": 0.0001205937157358474, "loss": 1.7966, "step": 7663 }, { "epoch": 0.7484375, "grad_norm": 0.1733737587928772, "learning_rate": 0.0001205419986546788, "loss": 1.7768, "step": 7664 }, { "epoch": 0.74853515625, "grad_norm": 0.16829119622707367, "learning_rate": 0.0001204902970024962, "loss": 1.7413, "step": 7665 }, { "epoch": 0.7486328125, "grad_norm": 0.15850205719470978, "learning_rate": 0.00012043861078446413, "loss": 1.7457, "step": 7666 }, { "epoch": 0.74873046875, "grad_norm": 0.18297319114208221, "learning_rate": 0.0001203869400057455, "loss": 1.793, "step": 7667 }, { "epoch": 0.748828125, "grad_norm": 0.15747644007205963, "learning_rate": 0.00012033528467150183, "loss": 1.8001, "step": 7668 }, { "epoch": 0.74892578125, "grad_norm": 0.15908674895763397, "learning_rate": 0.00012028364478689299, "loss": 1.7342, "step": 7669 }, { "epoch": 0.7490234375, "grad_norm": 0.17064425349235535, "learning_rate": 0.00012023202035707738, "loss": 1.7608, "step": 7670 }, { "epoch": 0.74912109375, "grad_norm": 0.16707949340343475, "learning_rate": 0.00012018041138721183, "loss": 1.7737, "step": 7671 }, { "epoch": 0.74921875, "grad_norm": 0.1632205843925476, "learning_rate": 0.00012012881788245158, "loss": 1.7598, "step": 7672 }, { "epoch": 0.74931640625, "grad_norm": 0.15969471633434296, "learning_rate": 0.0001200772398479505, "loss": 1.7686, "step": 7673 }, { "epoch": 0.7494140625, "grad_norm": 0.16376468539237976, "learning_rate": 0.00012002567728886055, "loss": 1.7579, "step": 7674 }, { "epoch": 0.74951171875, "grad_norm": 0.15174399316310883, "learning_rate": 0.0001199741302103325, "loss": 1.807, "step": 7675 }, { "epoch": 0.749609375, "grad_norm": 0.16173924505710602, "learning_rate": 0.00011992259861751546, "loss": 1.8257, "step": 7676 }, { "epoch": 0.74970703125, "grad_norm": 0.16815942525863647, "learning_rate": 0.00011987108251555692, "loss": 1.7648, "step": 7677 }, { "epoch": 0.7498046875, "grad_norm": 0.1523892879486084, "learning_rate": 0.00011981958190960294, "loss": 1.7775, "step": 7678 }, { "epoch": 0.74990234375, "grad_norm": 0.18168233335018158, "learning_rate": 0.00011976809680479797, "loss": 1.8447, "step": 7679 }, { "epoch": 0.75, "grad_norm": 0.16950511932373047, "learning_rate": 0.00011971662720628496, "loss": 1.7367, "step": 7680 }, { "epoch": 0.75009765625, "grad_norm": 0.17498576641082764, "learning_rate": 0.0001196651731192051, "loss": 1.8054, "step": 7681 }, { "epoch": 0.7501953125, "grad_norm": 0.20565645396709442, "learning_rate": 0.00011961373454869834, "loss": 1.7769, "step": 7682 }, { "epoch": 0.75029296875, "grad_norm": 0.17719954252243042, "learning_rate": 0.00011956231149990289, "loss": 1.7642, "step": 7683 }, { "epoch": 0.750390625, "grad_norm": 0.20949502289295197, "learning_rate": 0.00011951090397795545, "loss": 1.8146, "step": 7684 }, { "epoch": 0.75048828125, "grad_norm": 0.1640123724937439, "learning_rate": 0.00011945951198799124, "loss": 1.777, "step": 7685 }, { "epoch": 0.7505859375, "grad_norm": 0.16843876242637634, "learning_rate": 0.0001194081355351438, "loss": 1.7565, "step": 7686 }, { "epoch": 0.75068359375, "grad_norm": 0.1698119044303894, "learning_rate": 0.00011935677462454525, "loss": 1.7787, "step": 7687 }, { "epoch": 0.75078125, "grad_norm": 0.1823517084121704, "learning_rate": 0.00011930542926132597, "loss": 1.7557, "step": 7688 }, { "epoch": 0.75087890625, "grad_norm": 0.14600202441215515, "learning_rate": 0.00011925409945061499, "loss": 1.8233, "step": 7689 }, { "epoch": 0.7509765625, "grad_norm": 0.16653165221214294, "learning_rate": 0.00011920278519753965, "loss": 1.7541, "step": 7690 }, { "epoch": 0.75107421875, "grad_norm": 0.17097769677639008, "learning_rate": 0.0001191514865072259, "loss": 1.7111, "step": 7691 }, { "epoch": 0.751171875, "grad_norm": 0.15861545503139496, "learning_rate": 0.0001191002033847979, "loss": 1.7652, "step": 7692 }, { "epoch": 0.75126953125, "grad_norm": 0.161625936627388, "learning_rate": 0.00011904893583537846, "loss": 1.7389, "step": 7693 }, { "epoch": 0.7513671875, "grad_norm": 0.16038577258586884, "learning_rate": 0.0001189976838640888, "loss": 1.7224, "step": 7694 }, { "epoch": 0.75146484375, "grad_norm": 0.15835894644260406, "learning_rate": 0.0001189464474760484, "loss": 1.7604, "step": 7695 }, { "epoch": 0.7515625, "grad_norm": 0.1626521795988083, "learning_rate": 0.00011889522667637528, "loss": 1.7535, "step": 7696 }, { "epoch": 0.75166015625, "grad_norm": 0.16321398317813873, "learning_rate": 0.00011884402147018623, "loss": 1.7773, "step": 7697 }, { "epoch": 0.7517578125, "grad_norm": 0.16762812435626984, "learning_rate": 0.00011879283186259592, "loss": 1.8089, "step": 7698 }, { "epoch": 0.75185546875, "grad_norm": 0.1494065672159195, "learning_rate": 0.00011874165785871785, "loss": 1.772, "step": 7699 }, { "epoch": 0.751953125, "grad_norm": 0.16971111297607422, "learning_rate": 0.00011869049946366385, "loss": 1.7904, "step": 7700 }, { "epoch": 0.75205078125, "grad_norm": 0.15486618876457214, "learning_rate": 0.00011863935668254423, "loss": 1.7469, "step": 7701 }, { "epoch": 0.7521484375, "grad_norm": 0.16619467735290527, "learning_rate": 0.00011858822952046758, "loss": 1.7357, "step": 7702 }, { "epoch": 0.75224609375, "grad_norm": 0.1598757803440094, "learning_rate": 0.00011853711798254103, "loss": 1.7671, "step": 7703 }, { "epoch": 0.75234375, "grad_norm": 0.1689143180847168, "learning_rate": 0.0001184860220738704, "loss": 1.7626, "step": 7704 }, { "epoch": 0.75244140625, "grad_norm": 0.17198412120342255, "learning_rate": 0.00011843494179955953, "loss": 1.8028, "step": 7705 }, { "epoch": 0.7525390625, "grad_norm": 0.1527240127325058, "learning_rate": 0.00011838387716471092, "loss": 1.7729, "step": 7706 }, { "epoch": 0.75263671875, "grad_norm": 0.15644703805446625, "learning_rate": 0.0001183328281744255, "loss": 1.7667, "step": 7707 }, { "epoch": 0.752734375, "grad_norm": 0.17615172266960144, "learning_rate": 0.00011828179483380266, "loss": 1.782, "step": 7708 }, { "epoch": 0.75283203125, "grad_norm": 0.1629565805196762, "learning_rate": 0.00011823077714794006, "loss": 1.7953, "step": 7709 }, { "epoch": 0.7529296875, "grad_norm": 0.16301971673965454, "learning_rate": 0.00011817977512193388, "loss": 1.7643, "step": 7710 }, { "epoch": 0.75302734375, "grad_norm": 0.16795071959495544, "learning_rate": 0.000118128788760879, "loss": 1.766, "step": 7711 }, { "epoch": 0.753125, "grad_norm": 0.18167097866535187, "learning_rate": 0.00011807781806986832, "loss": 1.7786, "step": 7712 }, { "epoch": 0.75322265625, "grad_norm": 0.1817089468240738, "learning_rate": 0.0001180268630539934, "loss": 1.7745, "step": 7713 }, { "epoch": 0.7533203125, "grad_norm": 0.1957869529724121, "learning_rate": 0.0001179759237183442, "loss": 1.7647, "step": 7714 }, { "epoch": 0.75341796875, "grad_norm": 0.1481465995311737, "learning_rate": 0.0001179250000680092, "loss": 1.7697, "step": 7715 }, { "epoch": 0.753515625, "grad_norm": 0.20453698933124542, "learning_rate": 0.00011787409210807504, "loss": 1.7868, "step": 7716 }, { "epoch": 0.75361328125, "grad_norm": 0.15494777262210846, "learning_rate": 0.00011782319984362701, "loss": 1.7896, "step": 7717 }, { "epoch": 0.7537109375, "grad_norm": 0.18950289487838745, "learning_rate": 0.000117772323279749, "loss": 1.7521, "step": 7718 }, { "epoch": 0.75380859375, "grad_norm": 0.17174574732780457, "learning_rate": 0.00011772146242152291, "loss": 1.7924, "step": 7719 }, { "epoch": 0.75390625, "grad_norm": 0.1954811066389084, "learning_rate": 0.00011767061727402935, "loss": 1.767, "step": 7720 }, { "epoch": 0.75400390625, "grad_norm": 0.18024030327796936, "learning_rate": 0.00011761978784234734, "loss": 1.7789, "step": 7721 }, { "epoch": 0.7541015625, "grad_norm": 0.17860965430736542, "learning_rate": 0.0001175689741315543, "loss": 1.7616, "step": 7722 }, { "epoch": 0.75419921875, "grad_norm": 0.23329001665115356, "learning_rate": 0.00011751817614672597, "loss": 1.7822, "step": 7723 }, { "epoch": 0.754296875, "grad_norm": 0.18111950159072876, "learning_rate": 0.00011746739389293662, "loss": 1.7711, "step": 7724 }, { "epoch": 0.75439453125, "grad_norm": 0.1974589228630066, "learning_rate": 0.00011741662737525914, "loss": 1.8008, "step": 7725 }, { "epoch": 0.7544921875, "grad_norm": 0.19731736183166504, "learning_rate": 0.00011736587659876444, "loss": 1.7731, "step": 7726 }, { "epoch": 0.75458984375, "grad_norm": 0.16115997731685638, "learning_rate": 0.00011731514156852216, "loss": 1.8092, "step": 7727 }, { "epoch": 0.7546875, "grad_norm": 0.21382762491703033, "learning_rate": 0.00011726442228960028, "loss": 1.7156, "step": 7728 }, { "epoch": 0.75478515625, "grad_norm": 0.15696550905704498, "learning_rate": 0.00011721371876706528, "loss": 1.7387, "step": 7729 }, { "epoch": 0.7548828125, "grad_norm": 0.1797853261232376, "learning_rate": 0.00011716303100598184, "loss": 1.7198, "step": 7730 }, { "epoch": 0.75498046875, "grad_norm": 0.1709548979997635, "learning_rate": 0.00011711235901141322, "loss": 1.7697, "step": 7731 }, { "epoch": 0.755078125, "grad_norm": 0.16802732646465302, "learning_rate": 0.00011706170278842133, "loss": 1.8109, "step": 7732 }, { "epoch": 0.75517578125, "grad_norm": 0.19212961196899414, "learning_rate": 0.00011701106234206602, "loss": 1.8022, "step": 7733 }, { "epoch": 0.7552734375, "grad_norm": 0.15853872895240784, "learning_rate": 0.00011696043767740593, "loss": 1.7644, "step": 7734 }, { "epoch": 0.75537109375, "grad_norm": 0.17812687158584595, "learning_rate": 0.00011690982879949804, "loss": 1.7854, "step": 7735 }, { "epoch": 0.75546875, "grad_norm": 0.18812188506126404, "learning_rate": 0.00011685923571339776, "loss": 1.7311, "step": 7736 }, { "epoch": 0.75556640625, "grad_norm": 0.15274593234062195, "learning_rate": 0.00011680865842415878, "loss": 1.7601, "step": 7737 }, { "epoch": 0.7556640625, "grad_norm": 0.1952308714389801, "learning_rate": 0.00011675809693683328, "loss": 1.8205, "step": 7738 }, { "epoch": 0.75576171875, "grad_norm": 0.17739272117614746, "learning_rate": 0.00011670755125647214, "loss": 1.7797, "step": 7739 }, { "epoch": 0.755859375, "grad_norm": 0.16060391068458557, "learning_rate": 0.00011665702138812421, "loss": 1.7333, "step": 7740 }, { "epoch": 0.75595703125, "grad_norm": 0.16423368453979492, "learning_rate": 0.00011660650733683707, "loss": 1.7768, "step": 7741 }, { "epoch": 0.7560546875, "grad_norm": 0.1475183665752411, "learning_rate": 0.0001165560091076566, "loss": 1.7573, "step": 7742 }, { "epoch": 0.75615234375, "grad_norm": 0.16978709399700165, "learning_rate": 0.00011650552670562722, "loss": 1.7278, "step": 7743 }, { "epoch": 0.75625, "grad_norm": 0.14882473647594452, "learning_rate": 0.00011645506013579146, "loss": 1.7818, "step": 7744 }, { "epoch": 0.75634765625, "grad_norm": 0.161651149392128, "learning_rate": 0.00011640460940319058, "loss": 1.7598, "step": 7745 }, { "epoch": 0.7564453125, "grad_norm": 0.16143260896205902, "learning_rate": 0.00011635417451286426, "loss": 1.7597, "step": 7746 }, { "epoch": 0.75654296875, "grad_norm": 0.15999546647071838, "learning_rate": 0.0001163037554698505, "loss": 1.7962, "step": 7747 }, { "epoch": 0.756640625, "grad_norm": 0.17096731066703796, "learning_rate": 0.00011625335227918559, "loss": 1.7583, "step": 7748 }, { "epoch": 0.75673828125, "grad_norm": 0.1843830794095993, "learning_rate": 0.00011620296494590441, "loss": 1.8026, "step": 7749 }, { "epoch": 0.7568359375, "grad_norm": 0.1477334052324295, "learning_rate": 0.00011615259347504027, "loss": 1.7288, "step": 7750 }, { "epoch": 0.75693359375, "grad_norm": 0.19715151190757751, "learning_rate": 0.00011610223787162474, "loss": 1.8017, "step": 7751 }, { "epoch": 0.75703125, "grad_norm": 0.15215277671813965, "learning_rate": 0.00011605189814068784, "loss": 1.7914, "step": 7752 }, { "epoch": 0.75712890625, "grad_norm": 0.16040953993797302, "learning_rate": 0.00011600157428725824, "loss": 1.7223, "step": 7753 }, { "epoch": 0.7572265625, "grad_norm": 0.173275426030159, "learning_rate": 0.00011595126631636286, "loss": 1.7997, "step": 7754 }, { "epoch": 0.75732421875, "grad_norm": 0.16508221626281738, "learning_rate": 0.00011590097423302682, "loss": 1.7379, "step": 7755 }, { "epoch": 0.757421875, "grad_norm": 0.179207444190979, "learning_rate": 0.000115850698042274, "loss": 1.7929, "step": 7756 }, { "epoch": 0.75751953125, "grad_norm": 0.15995274484157562, "learning_rate": 0.00011580043774912658, "loss": 1.7561, "step": 7757 }, { "epoch": 0.7576171875, "grad_norm": 0.17505817115306854, "learning_rate": 0.00011575019335860495, "loss": 1.7586, "step": 7758 }, { "epoch": 0.75771484375, "grad_norm": 0.17953334748744965, "learning_rate": 0.00011569996487572812, "loss": 1.76, "step": 7759 }, { "epoch": 0.7578125, "grad_norm": 0.17134277522563934, "learning_rate": 0.0001156497523055136, "loss": 1.8043, "step": 7760 }, { "epoch": 0.75791015625, "grad_norm": 0.16928955912590027, "learning_rate": 0.00011559955565297718, "loss": 1.7476, "step": 7761 }, { "epoch": 0.7580078125, "grad_norm": 0.1559809148311615, "learning_rate": 0.00011554937492313292, "loss": 1.788, "step": 7762 }, { "epoch": 0.75810546875, "grad_norm": 0.17514066398143768, "learning_rate": 0.0001154992101209935, "loss": 1.7745, "step": 7763 }, { "epoch": 0.758203125, "grad_norm": 0.16250185668468475, "learning_rate": 0.00011544906125157, "loss": 1.7744, "step": 7764 }, { "epoch": 0.75830078125, "grad_norm": 0.1742071956396103, "learning_rate": 0.00011539892831987173, "loss": 1.776, "step": 7765 }, { "epoch": 0.7583984375, "grad_norm": 0.19807018339633942, "learning_rate": 0.00011534881133090652, "loss": 1.7964, "step": 7766 }, { "epoch": 0.75849609375, "grad_norm": 0.1838013231754303, "learning_rate": 0.00011529871028968073, "loss": 1.8234, "step": 7767 }, { "epoch": 0.75859375, "grad_norm": 0.18357525765895844, "learning_rate": 0.00011524862520119902, "loss": 1.8184, "step": 7768 }, { "epoch": 0.75869140625, "grad_norm": 0.17254237830638885, "learning_rate": 0.00011519855607046432, "loss": 1.8048, "step": 7769 }, { "epoch": 0.7587890625, "grad_norm": 0.190409317612648, "learning_rate": 0.00011514850290247817, "loss": 1.757, "step": 7770 }, { "epoch": 0.75888671875, "grad_norm": 0.1757277548313141, "learning_rate": 0.00011509846570224039, "loss": 1.7875, "step": 7771 }, { "epoch": 0.758984375, "grad_norm": 0.20494605600833893, "learning_rate": 0.00011504844447474936, "loss": 1.7307, "step": 7772 }, { "epoch": 0.75908203125, "grad_norm": 0.1619090437889099, "learning_rate": 0.00011499843922500158, "loss": 1.8104, "step": 7773 }, { "epoch": 0.7591796875, "grad_norm": 0.1816893219947815, "learning_rate": 0.00011494844995799228, "loss": 1.7857, "step": 7774 }, { "epoch": 0.75927734375, "grad_norm": 0.17407159507274628, "learning_rate": 0.00011489847667871498, "loss": 1.7635, "step": 7775 }, { "epoch": 0.759375, "grad_norm": 0.1534554660320282, "learning_rate": 0.00011484851939216141, "loss": 1.7669, "step": 7776 }, { "epoch": 0.75947265625, "grad_norm": 0.1848212629556656, "learning_rate": 0.00011479857810332193, "loss": 1.7438, "step": 7777 }, { "epoch": 0.7595703125, "grad_norm": 0.16246220469474792, "learning_rate": 0.00011474865281718526, "loss": 1.748, "step": 7778 }, { "epoch": 0.75966796875, "grad_norm": 0.18291136622428894, "learning_rate": 0.00011469874353873855, "loss": 1.8059, "step": 7779 }, { "epoch": 0.759765625, "grad_norm": 0.15974071621894836, "learning_rate": 0.00011464885027296706, "loss": 1.8269, "step": 7780 }, { "epoch": 0.75986328125, "grad_norm": 0.1748373955488205, "learning_rate": 0.00011459897302485493, "loss": 1.7588, "step": 7781 }, { "epoch": 0.7599609375, "grad_norm": 0.1877850592136383, "learning_rate": 0.00011454911179938441, "loss": 1.7389, "step": 7782 }, { "epoch": 0.76005859375, "grad_norm": 0.17578373849391937, "learning_rate": 0.00011449926660153611, "loss": 1.7952, "step": 7783 }, { "epoch": 0.76015625, "grad_norm": 0.20385147631168365, "learning_rate": 0.00011444943743628918, "loss": 1.7889, "step": 7784 }, { "epoch": 0.76025390625, "grad_norm": 0.15334489941596985, "learning_rate": 0.00011439962430862108, "loss": 1.7772, "step": 7785 }, { "epoch": 0.7603515625, "grad_norm": 0.1844075471162796, "learning_rate": 0.00011434982722350778, "loss": 1.7692, "step": 7786 }, { "epoch": 0.76044921875, "grad_norm": 0.1936715692281723, "learning_rate": 0.0001143000461859234, "loss": 1.7889, "step": 7787 }, { "epoch": 0.760546875, "grad_norm": 0.14805731177330017, "learning_rate": 0.00011425028120084077, "loss": 1.7454, "step": 7788 }, { "epoch": 0.76064453125, "grad_norm": 0.18926295638084412, "learning_rate": 0.00011420053227323102, "loss": 1.7636, "step": 7789 }, { "epoch": 0.7607421875, "grad_norm": 0.15696342289447784, "learning_rate": 0.00011415079940806348, "loss": 1.784, "step": 7790 }, { "epoch": 0.76083984375, "grad_norm": 0.1785881221294403, "learning_rate": 0.00011410108261030604, "loss": 1.7958, "step": 7791 }, { "epoch": 0.7609375, "grad_norm": 0.16061805188655853, "learning_rate": 0.00011405138188492506, "loss": 1.7595, "step": 7792 }, { "epoch": 0.76103515625, "grad_norm": 0.15648432075977325, "learning_rate": 0.0001140016972368852, "loss": 1.7476, "step": 7793 }, { "epoch": 0.7611328125, "grad_norm": 0.16853050887584686, "learning_rate": 0.00011395202867114935, "loss": 1.7905, "step": 7794 }, { "epoch": 0.76123046875, "grad_norm": 0.13989965617656708, "learning_rate": 0.00011390237619267913, "loss": 1.7735, "step": 7795 }, { "epoch": 0.761328125, "grad_norm": 0.17202717065811157, "learning_rate": 0.00011385273980643442, "loss": 1.7597, "step": 7796 }, { "epoch": 0.76142578125, "grad_norm": 0.15987208485603333, "learning_rate": 0.00011380311951737327, "loss": 1.7644, "step": 7797 }, { "epoch": 0.7615234375, "grad_norm": 0.16264046728610992, "learning_rate": 0.00011375351533045243, "loss": 1.7761, "step": 7798 }, { "epoch": 0.76162109375, "grad_norm": 0.14572474360466003, "learning_rate": 0.00011370392725062687, "loss": 1.7766, "step": 7799 }, { "epoch": 0.76171875, "grad_norm": 0.16848650574684143, "learning_rate": 0.00011365435528285012, "loss": 1.7715, "step": 7800 }, { "epoch": 0.76181640625, "grad_norm": 0.1684025675058365, "learning_rate": 0.00011360479943207373, "loss": 1.7998, "step": 7801 }, { "epoch": 0.7619140625, "grad_norm": 0.16306045651435852, "learning_rate": 0.0001135552597032481, "loss": 1.8125, "step": 7802 }, { "epoch": 0.76201171875, "grad_norm": 0.16573961079120636, "learning_rate": 0.00011350573610132186, "loss": 1.7878, "step": 7803 }, { "epoch": 0.762109375, "grad_norm": 0.1655096411705017, "learning_rate": 0.00011345622863124177, "loss": 1.7951, "step": 7804 }, { "epoch": 0.76220703125, "grad_norm": 0.18864813446998596, "learning_rate": 0.0001134067372979533, "loss": 1.7201, "step": 7805 }, { "epoch": 0.7623046875, "grad_norm": 0.15294261276721954, "learning_rate": 0.00011335726210640019, "loss": 1.7608, "step": 7806 }, { "epoch": 0.76240234375, "grad_norm": 0.16524046659469604, "learning_rate": 0.00011330780306152463, "loss": 1.7909, "step": 7807 }, { "epoch": 0.7625, "grad_norm": 0.1627717763185501, "learning_rate": 0.00011325836016826695, "loss": 1.7827, "step": 7808 }, { "epoch": 0.76259765625, "grad_norm": 0.15350180864334106, "learning_rate": 0.00011320893343156627, "loss": 1.7704, "step": 7809 }, { "epoch": 0.7626953125, "grad_norm": 0.1808139830827713, "learning_rate": 0.00011315952285635984, "loss": 1.7741, "step": 7810 }, { "epoch": 0.76279296875, "grad_norm": 0.17194050550460815, "learning_rate": 0.00011311012844758325, "loss": 1.732, "step": 7811 }, { "epoch": 0.762890625, "grad_norm": 0.15617378056049347, "learning_rate": 0.00011306075021017059, "loss": 1.7037, "step": 7812 }, { "epoch": 0.76298828125, "grad_norm": 0.16638870537281036, "learning_rate": 0.00011301138814905435, "loss": 1.7501, "step": 7813 }, { "epoch": 0.7630859375, "grad_norm": 0.1813243180513382, "learning_rate": 0.00011296204226916543, "loss": 1.8113, "step": 7814 }, { "epoch": 0.76318359375, "grad_norm": 0.16186237335205078, "learning_rate": 0.0001129127125754328, "loss": 1.7992, "step": 7815 }, { "epoch": 0.76328125, "grad_norm": 0.18233315646648407, "learning_rate": 0.00011286339907278428, "loss": 1.7523, "step": 7816 }, { "epoch": 0.76337890625, "grad_norm": 0.16922254860401154, "learning_rate": 0.00011281410176614588, "loss": 1.7919, "step": 7817 }, { "epoch": 0.7634765625, "grad_norm": 0.17410801351070404, "learning_rate": 0.00011276482066044182, "loss": 1.7643, "step": 7818 }, { "epoch": 0.76357421875, "grad_norm": 0.17076055705547333, "learning_rate": 0.00011271555576059486, "loss": 1.7519, "step": 7819 }, { "epoch": 0.763671875, "grad_norm": 0.16231758892536163, "learning_rate": 0.00011266630707152621, "loss": 1.7804, "step": 7820 }, { "epoch": 0.76376953125, "grad_norm": 0.17753933370113373, "learning_rate": 0.0001126170745981553, "loss": 1.7882, "step": 7821 }, { "epoch": 0.7638671875, "grad_norm": 0.14908809959888458, "learning_rate": 0.00011256785834540008, "loss": 1.7426, "step": 7822 }, { "epoch": 0.76396484375, "grad_norm": 0.21346202492713928, "learning_rate": 0.0001125186583181768, "loss": 1.7665, "step": 7823 }, { "epoch": 0.7640625, "grad_norm": 0.15225225687026978, "learning_rate": 0.00011246947452140016, "loss": 1.7344, "step": 7824 }, { "epoch": 0.76416015625, "grad_norm": 0.18218566477298737, "learning_rate": 0.00011242030695998304, "loss": 1.7397, "step": 7825 }, { "epoch": 0.7642578125, "grad_norm": 0.17848140001296997, "learning_rate": 0.00011237115563883693, "loss": 1.7806, "step": 7826 }, { "epoch": 0.76435546875, "grad_norm": 0.19018568098545074, "learning_rate": 0.00011232202056287162, "loss": 1.7631, "step": 7827 }, { "epoch": 0.764453125, "grad_norm": 0.18303264677524567, "learning_rate": 0.00011227290173699524, "loss": 1.758, "step": 7828 }, { "epoch": 0.76455078125, "grad_norm": 0.20678399503231049, "learning_rate": 0.00011222379916611439, "loss": 1.7095, "step": 7829 }, { "epoch": 0.7646484375, "grad_norm": 0.15957704186439514, "learning_rate": 0.00011217471285513389, "loss": 1.7648, "step": 7830 }, { "epoch": 0.76474609375, "grad_norm": 0.19906574487686157, "learning_rate": 0.00011212564280895713, "loss": 1.7517, "step": 7831 }, { "epoch": 0.76484375, "grad_norm": 0.16301403939723969, "learning_rate": 0.00011207658903248569, "loss": 1.7168, "step": 7832 }, { "epoch": 0.76494140625, "grad_norm": 0.18607556819915771, "learning_rate": 0.00011202755153061964, "loss": 1.7941, "step": 7833 }, { "epoch": 0.7650390625, "grad_norm": 0.16136088967323303, "learning_rate": 0.00011197853030825734, "loss": 1.7511, "step": 7834 }, { "epoch": 0.76513671875, "grad_norm": 0.16216447949409485, "learning_rate": 0.00011192952537029565, "loss": 1.7742, "step": 7835 }, { "epoch": 0.765234375, "grad_norm": 0.15974228084087372, "learning_rate": 0.00011188053672162971, "loss": 1.7931, "step": 7836 }, { "epoch": 0.76533203125, "grad_norm": 0.1639247089624405, "learning_rate": 0.00011183156436715302, "loss": 1.7769, "step": 7837 }, { "epoch": 0.7654296875, "grad_norm": 0.1645023077726364, "learning_rate": 0.0001117826083117576, "loss": 1.7506, "step": 7838 }, { "epoch": 0.76552734375, "grad_norm": 0.1647023856639862, "learning_rate": 0.00011173366856033354, "loss": 1.7571, "step": 7839 }, { "epoch": 0.765625, "grad_norm": 0.15368427336215973, "learning_rate": 0.00011168474511776962, "loss": 1.7884, "step": 7840 }, { "epoch": 0.76572265625, "grad_norm": 0.17961928248405457, "learning_rate": 0.0001116358379889528, "loss": 1.789, "step": 7841 }, { "epoch": 0.7658203125, "grad_norm": 0.1581445038318634, "learning_rate": 0.00011158694717876847, "loss": 1.7396, "step": 7842 }, { "epoch": 0.76591796875, "grad_norm": 0.1637241691350937, "learning_rate": 0.00011153807269210043, "loss": 1.7604, "step": 7843 }, { "epoch": 0.766015625, "grad_norm": 0.16436229646205902, "learning_rate": 0.00011148921453383077, "loss": 1.7459, "step": 7844 }, { "epoch": 0.76611328125, "grad_norm": 0.17879031598567963, "learning_rate": 0.00011144037270884009, "loss": 1.8421, "step": 7845 }, { "epoch": 0.7662109375, "grad_norm": 0.18955329060554504, "learning_rate": 0.00011139154722200707, "loss": 1.7311, "step": 7846 }, { "epoch": 0.76630859375, "grad_norm": 0.18493473529815674, "learning_rate": 0.00011134273807820907, "loss": 1.7702, "step": 7847 }, { "epoch": 0.76640625, "grad_norm": 0.17156429588794708, "learning_rate": 0.00011129394528232165, "loss": 1.8019, "step": 7848 }, { "epoch": 0.76650390625, "grad_norm": 0.1929718554019928, "learning_rate": 0.00011124516883921878, "loss": 1.7474, "step": 7849 }, { "epoch": 0.7666015625, "grad_norm": 0.17022545635700226, "learning_rate": 0.00011119640875377285, "loss": 1.7721, "step": 7850 }, { "epoch": 0.76669921875, "grad_norm": 0.17937853932380676, "learning_rate": 0.00011114766503085447, "loss": 1.7454, "step": 7851 }, { "epoch": 0.766796875, "grad_norm": 0.19159084558486938, "learning_rate": 0.00011109893767533286, "loss": 1.7594, "step": 7852 }, { "epoch": 0.76689453125, "grad_norm": 0.19509002566337585, "learning_rate": 0.00011105022669207523, "loss": 1.7519, "step": 7853 }, { "epoch": 0.7669921875, "grad_norm": 0.17679280042648315, "learning_rate": 0.00011100153208594752, "loss": 1.7637, "step": 7854 }, { "epoch": 0.76708984375, "grad_norm": 0.16894085705280304, "learning_rate": 0.00011095285386181383, "loss": 1.8142, "step": 7855 }, { "epoch": 0.7671875, "grad_norm": 0.17472979426383972, "learning_rate": 0.00011090419202453678, "loss": 1.7978, "step": 7856 }, { "epoch": 0.76728515625, "grad_norm": 0.1617012470960617, "learning_rate": 0.00011085554657897711, "loss": 1.7834, "step": 7857 }, { "epoch": 0.7673828125, "grad_norm": 0.16980817914009094, "learning_rate": 0.0001108069175299942, "loss": 1.7784, "step": 7858 }, { "epoch": 0.76748046875, "grad_norm": 0.15959341824054718, "learning_rate": 0.00011075830488244565, "loss": 1.8073, "step": 7859 }, { "epoch": 0.767578125, "grad_norm": 0.15938346087932587, "learning_rate": 0.00011070970864118732, "loss": 1.7567, "step": 7860 }, { "epoch": 0.76767578125, "grad_norm": 0.15345844626426697, "learning_rate": 0.00011066112881107363, "loss": 1.7416, "step": 7861 }, { "epoch": 0.7677734375, "grad_norm": 0.16391496360301971, "learning_rate": 0.00011061256539695726, "loss": 1.7652, "step": 7862 }, { "epoch": 0.76787109375, "grad_norm": 0.17883946001529694, "learning_rate": 0.00011056401840368927, "loss": 1.8098, "step": 7863 }, { "epoch": 0.76796875, "grad_norm": 0.14473727345466614, "learning_rate": 0.00011051548783611906, "loss": 1.7665, "step": 7864 }, { "epoch": 0.76806640625, "grad_norm": 0.1757083386182785, "learning_rate": 0.00011046697369909443, "loss": 1.7643, "step": 7865 }, { "epoch": 0.7681640625, "grad_norm": 0.156539648771286, "learning_rate": 0.00011041847599746152, "loss": 1.7838, "step": 7866 }, { "epoch": 0.76826171875, "grad_norm": 0.1673443764448166, "learning_rate": 0.00011036999473606479, "loss": 1.8237, "step": 7867 }, { "epoch": 0.768359375, "grad_norm": 0.18954738974571228, "learning_rate": 0.00011032152991974706, "loss": 1.7644, "step": 7868 }, { "epoch": 0.76845703125, "grad_norm": 0.14470204710960388, "learning_rate": 0.00011027308155334958, "loss": 1.7938, "step": 7869 }, { "epoch": 0.7685546875, "grad_norm": 0.21683257818222046, "learning_rate": 0.00011022464964171189, "loss": 1.7577, "step": 7870 }, { "epoch": 0.76865234375, "grad_norm": 0.1849461793899536, "learning_rate": 0.00011017623418967194, "loss": 1.7796, "step": 7871 }, { "epoch": 0.76875, "grad_norm": 0.2103787064552307, "learning_rate": 0.00011012783520206598, "loss": 1.8103, "step": 7872 }, { "epoch": 0.76884765625, "grad_norm": 0.17644385993480682, "learning_rate": 0.0001100794526837287, "loss": 1.7174, "step": 7873 }, { "epoch": 0.7689453125, "grad_norm": 0.19126607477664948, "learning_rate": 0.000110031086639493, "loss": 1.8152, "step": 7874 }, { "epoch": 0.76904296875, "grad_norm": 0.15990057587623596, "learning_rate": 0.00010998273707419023, "loss": 1.7047, "step": 7875 }, { "epoch": 0.769140625, "grad_norm": 0.20092828571796417, "learning_rate": 0.00010993440399265009, "loss": 1.7741, "step": 7876 }, { "epoch": 0.76923828125, "grad_norm": 0.1421423852443695, "learning_rate": 0.00010988608739970067, "loss": 1.7736, "step": 7877 }, { "epoch": 0.7693359375, "grad_norm": 0.18841204047203064, "learning_rate": 0.00010983778730016834, "loss": 1.7932, "step": 7878 }, { "epoch": 0.76943359375, "grad_norm": 0.159399151802063, "learning_rate": 0.00010978950369887784, "loss": 1.7822, "step": 7879 }, { "epoch": 0.76953125, "grad_norm": 0.1772158294916153, "learning_rate": 0.00010974123660065234, "loss": 1.7662, "step": 7880 }, { "epoch": 0.76962890625, "grad_norm": 0.16945457458496094, "learning_rate": 0.00010969298601031319, "loss": 1.8057, "step": 7881 }, { "epoch": 0.7697265625, "grad_norm": 0.1610037386417389, "learning_rate": 0.00010964475193268029, "loss": 1.781, "step": 7882 }, { "epoch": 0.76982421875, "grad_norm": 0.16635893285274506, "learning_rate": 0.00010959653437257169, "loss": 1.7827, "step": 7883 }, { "epoch": 0.769921875, "grad_norm": 0.16549155116081238, "learning_rate": 0.00010954833333480399, "loss": 1.7624, "step": 7884 }, { "epoch": 0.77001953125, "grad_norm": 0.15887917578220367, "learning_rate": 0.00010950014882419204, "loss": 1.7622, "step": 7885 }, { "epoch": 0.7701171875, "grad_norm": 0.17244558036327362, "learning_rate": 0.000109451980845549, "loss": 1.7886, "step": 7886 }, { "epoch": 0.77021484375, "grad_norm": 0.13878133893013, "learning_rate": 0.00010940382940368654, "loss": 1.7611, "step": 7887 }, { "epoch": 0.7703125, "grad_norm": 0.15785396099090576, "learning_rate": 0.00010935569450341437, "loss": 1.7419, "step": 7888 }, { "epoch": 0.77041015625, "grad_norm": 0.1751440465450287, "learning_rate": 0.00010930757614954087, "loss": 1.777, "step": 7889 }, { "epoch": 0.7705078125, "grad_norm": 0.18435901403427124, "learning_rate": 0.00010925947434687262, "loss": 1.7937, "step": 7890 }, { "epoch": 0.77060546875, "grad_norm": 0.16029374301433563, "learning_rate": 0.00010921138910021453, "loss": 1.7169, "step": 7891 }, { "epoch": 0.770703125, "grad_norm": 0.15113088488578796, "learning_rate": 0.00010916332041436991, "loss": 1.8117, "step": 7892 }, { "epoch": 0.77080078125, "grad_norm": 0.16612599790096283, "learning_rate": 0.00010911526829414042, "loss": 1.7599, "step": 7893 }, { "epoch": 0.7708984375, "grad_norm": 0.18088118731975555, "learning_rate": 0.0001090672327443261, "loss": 1.8276, "step": 7894 }, { "epoch": 0.77099609375, "grad_norm": 0.15257737040519714, "learning_rate": 0.00010901921376972509, "loss": 1.774, "step": 7895 }, { "epoch": 0.77109375, "grad_norm": 0.19613753259181976, "learning_rate": 0.00010897121137513419, "loss": 1.7936, "step": 7896 }, { "epoch": 0.77119140625, "grad_norm": 0.15222609043121338, "learning_rate": 0.00010892322556534838, "loss": 1.7758, "step": 7897 }, { "epoch": 0.7712890625, "grad_norm": 0.18619371950626373, "learning_rate": 0.00010887525634516104, "loss": 1.7888, "step": 7898 }, { "epoch": 0.77138671875, "grad_norm": 0.16074694693088531, "learning_rate": 0.00010882730371936386, "loss": 1.7831, "step": 7899 }, { "epoch": 0.771484375, "grad_norm": 0.14029549062252045, "learning_rate": 0.00010877936769274687, "loss": 1.7587, "step": 7900 }, { "epoch": 0.77158203125, "grad_norm": 0.1762377768754959, "learning_rate": 0.00010873144827009853, "loss": 1.7901, "step": 7901 }, { "epoch": 0.7716796875, "grad_norm": 0.14294849336147308, "learning_rate": 0.00010868354545620543, "loss": 1.8007, "step": 7902 }, { "epoch": 0.77177734375, "grad_norm": 0.15748916566371918, "learning_rate": 0.00010863565925585273, "loss": 1.7707, "step": 7903 }, { "epoch": 0.771875, "grad_norm": 0.1540040671825409, "learning_rate": 0.0001085877896738238, "loss": 1.7955, "step": 7904 }, { "epoch": 0.77197265625, "grad_norm": 0.16300299763679504, "learning_rate": 0.00010853993671490041, "loss": 1.7782, "step": 7905 }, { "epoch": 0.7720703125, "grad_norm": 0.17450745403766632, "learning_rate": 0.00010849210038386264, "loss": 1.7811, "step": 7906 }, { "epoch": 0.77216796875, "grad_norm": 0.14275802671909332, "learning_rate": 0.00010844428068548892, "loss": 1.7353, "step": 7907 }, { "epoch": 0.772265625, "grad_norm": 0.17081297934055328, "learning_rate": 0.00010839647762455607, "loss": 1.7392, "step": 7908 }, { "epoch": 0.77236328125, "grad_norm": 0.17333891987800598, "learning_rate": 0.00010834869120583905, "loss": 1.7818, "step": 7909 }, { "epoch": 0.7724609375, "grad_norm": 0.1731674075126648, "learning_rate": 0.00010830092143411139, "loss": 1.799, "step": 7910 }, { "epoch": 0.77255859375, "grad_norm": 0.17279775440692902, "learning_rate": 0.00010825316831414486, "loss": 1.7779, "step": 7911 }, { "epoch": 0.77265625, "grad_norm": 0.1722838133573532, "learning_rate": 0.00010820543185070957, "loss": 1.723, "step": 7912 }, { "epoch": 0.77275390625, "grad_norm": 0.16671578586101532, "learning_rate": 0.00010815771204857397, "loss": 1.7969, "step": 7913 }, { "epoch": 0.7728515625, "grad_norm": 0.16792923212051392, "learning_rate": 0.00010811000891250486, "loss": 1.7589, "step": 7914 }, { "epoch": 0.77294921875, "grad_norm": 0.17990683019161224, "learning_rate": 0.00010806232244726741, "loss": 1.7491, "step": 7915 }, { "epoch": 0.773046875, "grad_norm": 0.15499593317508698, "learning_rate": 0.00010801465265762491, "loss": 1.797, "step": 7916 }, { "epoch": 0.77314453125, "grad_norm": 0.17780350148677826, "learning_rate": 0.0001079669995483393, "loss": 1.7362, "step": 7917 }, { "epoch": 0.7732421875, "grad_norm": 0.1547502726316452, "learning_rate": 0.00010791936312417063, "loss": 1.7861, "step": 7918 }, { "epoch": 0.77333984375, "grad_norm": 0.16669850051403046, "learning_rate": 0.00010787174338987739, "loss": 1.7456, "step": 7919 }, { "epoch": 0.7734375, "grad_norm": 0.16530273854732513, "learning_rate": 0.00010782414035021637, "loss": 1.7695, "step": 7920 }, { "epoch": 0.77353515625, "grad_norm": 0.15392063558101654, "learning_rate": 0.00010777655400994265, "loss": 1.735, "step": 7921 }, { "epoch": 0.7736328125, "grad_norm": 0.1474154144525528, "learning_rate": 0.00010772898437380985, "loss": 1.7593, "step": 7922 }, { "epoch": 0.77373046875, "grad_norm": 0.1689181923866272, "learning_rate": 0.0001076814314465695, "loss": 1.7945, "step": 7923 }, { "epoch": 0.773828125, "grad_norm": 0.16658832132816315, "learning_rate": 0.00010763389523297185, "loss": 1.7668, "step": 7924 }, { "epoch": 0.77392578125, "grad_norm": 0.1603284478187561, "learning_rate": 0.00010758637573776533, "loss": 1.7989, "step": 7925 }, { "epoch": 0.7740234375, "grad_norm": 0.15063580870628357, "learning_rate": 0.00010753887296569676, "loss": 1.7548, "step": 7926 }, { "epoch": 0.77412109375, "grad_norm": 0.15687081217765808, "learning_rate": 0.00010749138692151117, "loss": 1.7652, "step": 7927 }, { "epoch": 0.77421875, "grad_norm": 0.14713646471500397, "learning_rate": 0.00010744391760995205, "loss": 1.7896, "step": 7928 }, { "epoch": 0.77431640625, "grad_norm": 0.16969987750053406, "learning_rate": 0.00010739646503576123, "loss": 1.7593, "step": 7929 }, { "epoch": 0.7744140625, "grad_norm": 0.15260957181453705, "learning_rate": 0.00010734902920367864, "loss": 1.7463, "step": 7930 }, { "epoch": 0.77451171875, "grad_norm": 0.14665037393569946, "learning_rate": 0.00010730161011844277, "loss": 1.7603, "step": 7931 }, { "epoch": 0.774609375, "grad_norm": 0.16129754483699799, "learning_rate": 0.00010725420778479036, "loss": 1.7396, "step": 7932 }, { "epoch": 0.77470703125, "grad_norm": 0.1598609983921051, "learning_rate": 0.00010720682220745653, "loss": 1.7466, "step": 7933 }, { "epoch": 0.7748046875, "grad_norm": 0.15438376367092133, "learning_rate": 0.00010715945339117464, "loss": 1.7461, "step": 7934 }, { "epoch": 0.77490234375, "grad_norm": 0.1486106514930725, "learning_rate": 0.00010711210134067641, "loss": 1.7779, "step": 7935 }, { "epoch": 0.775, "grad_norm": 0.17271201312541962, "learning_rate": 0.00010706476606069198, "loss": 1.718, "step": 7936 }, { "epoch": 0.77509765625, "grad_norm": 0.1585274487733841, "learning_rate": 0.00010701744755594959, "loss": 1.7867, "step": 7937 }, { "epoch": 0.7751953125, "grad_norm": 0.16222359240055084, "learning_rate": 0.00010697014583117598, "loss": 1.7619, "step": 7938 }, { "epoch": 0.77529296875, "grad_norm": 0.15124040842056274, "learning_rate": 0.00010692286089109621, "loss": 1.8014, "step": 7939 }, { "epoch": 0.775390625, "grad_norm": 0.1728062480688095, "learning_rate": 0.0001068755927404336, "loss": 1.738, "step": 7940 }, { "epoch": 0.77548828125, "grad_norm": 0.17949330806732178, "learning_rate": 0.00010682834138390982, "loss": 1.7748, "step": 7941 }, { "epoch": 0.7755859375, "grad_norm": 0.1780107617378235, "learning_rate": 0.00010678110682624486, "loss": 1.755, "step": 7942 }, { "epoch": 0.77568359375, "grad_norm": 0.17196132242679596, "learning_rate": 0.00010673388907215712, "loss": 1.7834, "step": 7943 }, { "epoch": 0.77578125, "grad_norm": 0.15627440810203552, "learning_rate": 0.0001066866881263631, "loss": 1.7485, "step": 7944 }, { "epoch": 0.77587890625, "grad_norm": 0.2197687029838562, "learning_rate": 0.00010663950399357781, "loss": 1.7906, "step": 7945 }, { "epoch": 0.7759765625, "grad_norm": 0.15199071168899536, "learning_rate": 0.00010659233667851453, "loss": 1.7742, "step": 7946 }, { "epoch": 0.77607421875, "grad_norm": 0.1941189020872116, "learning_rate": 0.00010654518618588486, "loss": 1.7623, "step": 7947 }, { "epoch": 0.776171875, "grad_norm": 0.1498612016439438, "learning_rate": 0.00010649805252039871, "loss": 1.7206, "step": 7948 }, { "epoch": 0.77626953125, "grad_norm": 0.17155885696411133, "learning_rate": 0.00010645093568676434, "loss": 1.7579, "step": 7949 }, { "epoch": 0.7763671875, "grad_norm": 0.14994072914123535, "learning_rate": 0.00010640383568968831, "loss": 1.777, "step": 7950 }, { "epoch": 0.77646484375, "grad_norm": 0.15250138938426971, "learning_rate": 0.00010635675253387545, "loss": 1.8018, "step": 7951 }, { "epoch": 0.7765625, "grad_norm": 0.16684897243976593, "learning_rate": 0.00010630968622402892, "loss": 1.8171, "step": 7952 }, { "epoch": 0.77666015625, "grad_norm": 0.1489727944135666, "learning_rate": 0.00010626263676485031, "loss": 1.7586, "step": 7953 }, { "epoch": 0.7767578125, "grad_norm": 0.16676881909370422, "learning_rate": 0.00010621560416103939, "loss": 1.7953, "step": 7954 }, { "epoch": 0.77685546875, "grad_norm": 0.1602635383605957, "learning_rate": 0.00010616858841729433, "loss": 1.7635, "step": 7955 }, { "epoch": 0.776953125, "grad_norm": 0.19999918341636658, "learning_rate": 0.00010612158953831158, "loss": 1.7385, "step": 7956 }, { "epoch": 0.77705078125, "grad_norm": 0.1749488115310669, "learning_rate": 0.00010607460752878595, "loss": 1.7535, "step": 7957 }, { "epoch": 0.7771484375, "grad_norm": 0.15334823727607727, "learning_rate": 0.00010602764239341043, "loss": 1.7807, "step": 7958 }, { "epoch": 0.77724609375, "grad_norm": 0.18599779903888702, "learning_rate": 0.0001059806941368765, "loss": 1.7832, "step": 7959 }, { "epoch": 0.77734375, "grad_norm": 0.14852125942707062, "learning_rate": 0.00010593376276387381, "loss": 1.7708, "step": 7960 }, { "epoch": 0.77744140625, "grad_norm": 0.18457117676734924, "learning_rate": 0.00010588684827909044, "loss": 1.7695, "step": 7961 }, { "epoch": 0.7775390625, "grad_norm": 0.1540106236934662, "learning_rate": 0.00010583995068721272, "loss": 1.7774, "step": 7962 }, { "epoch": 0.77763671875, "grad_norm": 0.17137496173381805, "learning_rate": 0.00010579306999292532, "loss": 1.7631, "step": 7963 }, { "epoch": 0.777734375, "grad_norm": 0.15280619263648987, "learning_rate": 0.00010574620620091122, "loss": 1.7381, "step": 7964 }, { "epoch": 0.77783203125, "grad_norm": 0.1732707917690277, "learning_rate": 0.00010569935931585164, "loss": 1.7418, "step": 7965 }, { "epoch": 0.7779296875, "grad_norm": 0.14884692430496216, "learning_rate": 0.00010565252934242617, "loss": 1.7728, "step": 7966 }, { "epoch": 0.77802734375, "grad_norm": 0.19574162364006042, "learning_rate": 0.00010560571628531267, "loss": 1.7807, "step": 7967 }, { "epoch": 0.778125, "grad_norm": 0.15763074159622192, "learning_rate": 0.00010555892014918755, "loss": 1.8113, "step": 7968 }, { "epoch": 0.77822265625, "grad_norm": 0.1804940104484558, "learning_rate": 0.00010551214093872513, "loss": 1.7669, "step": 7969 }, { "epoch": 0.7783203125, "grad_norm": 0.18291085958480835, "learning_rate": 0.00010546537865859831, "loss": 1.7685, "step": 7970 }, { "epoch": 0.77841796875, "grad_norm": 0.19920875132083893, "learning_rate": 0.00010541863331347828, "loss": 1.7631, "step": 7971 }, { "epoch": 0.778515625, "grad_norm": 0.18716353178024292, "learning_rate": 0.00010537190490803437, "loss": 1.8, "step": 7972 }, { "epoch": 0.77861328125, "grad_norm": 0.1565290093421936, "learning_rate": 0.00010532519344693439, "loss": 1.7617, "step": 7973 }, { "epoch": 0.7787109375, "grad_norm": 0.1712377667427063, "learning_rate": 0.00010527849893484432, "loss": 1.7453, "step": 7974 }, { "epoch": 0.77880859375, "grad_norm": 0.14603757858276367, "learning_rate": 0.00010523182137642876, "loss": 1.7875, "step": 7975 }, { "epoch": 0.77890625, "grad_norm": 0.14508377015590668, "learning_rate": 0.00010518516077635015, "loss": 1.7471, "step": 7976 }, { "epoch": 0.77900390625, "grad_norm": 0.16750569641590118, "learning_rate": 0.00010513851713926957, "loss": 1.7586, "step": 7977 }, { "epoch": 0.7791015625, "grad_norm": 0.1641732007265091, "learning_rate": 0.00010509189046984635, "loss": 1.7889, "step": 7978 }, { "epoch": 0.77919921875, "grad_norm": 0.1760018914937973, "learning_rate": 0.00010504528077273795, "loss": 1.7895, "step": 7979 }, { "epoch": 0.779296875, "grad_norm": 0.15651258826255798, "learning_rate": 0.00010499868805260036, "loss": 1.7749, "step": 7980 }, { "epoch": 0.77939453125, "grad_norm": 0.1702810674905777, "learning_rate": 0.00010495211231408766, "loss": 1.7393, "step": 7981 }, { "epoch": 0.7794921875, "grad_norm": 0.16733390092849731, "learning_rate": 0.00010490555356185258, "loss": 1.757, "step": 7982 }, { "epoch": 0.77958984375, "grad_norm": 0.17077234387397766, "learning_rate": 0.00010485901180054575, "loss": 1.7519, "step": 7983 }, { "epoch": 0.7796875, "grad_norm": 0.15117628872394562, "learning_rate": 0.00010481248703481632, "loss": 1.7989, "step": 7984 }, { "epoch": 0.77978515625, "grad_norm": 0.1655247062444687, "learning_rate": 0.00010476597926931178, "loss": 1.7572, "step": 7985 }, { "epoch": 0.7798828125, "grad_norm": 0.1998799443244934, "learning_rate": 0.00010471948850867772, "loss": 1.7831, "step": 7986 }, { "epoch": 0.77998046875, "grad_norm": 0.16943563520908356, "learning_rate": 0.00010467301475755819, "loss": 1.7282, "step": 7987 }, { "epoch": 0.780078125, "grad_norm": 0.1619543880224228, "learning_rate": 0.00010462655802059547, "loss": 1.7711, "step": 7988 }, { "epoch": 0.78017578125, "grad_norm": 0.18380972743034363, "learning_rate": 0.00010458011830243036, "loss": 1.829, "step": 7989 }, { "epoch": 0.7802734375, "grad_norm": 0.1529158353805542, "learning_rate": 0.00010453369560770159, "loss": 1.7491, "step": 7990 }, { "epoch": 0.78037109375, "grad_norm": 0.15557043254375458, "learning_rate": 0.00010448728994104642, "loss": 1.7503, "step": 7991 }, { "epoch": 0.78046875, "grad_norm": 0.15161892771720886, "learning_rate": 0.00010444090130710037, "loss": 1.7768, "step": 7992 }, { "epoch": 0.78056640625, "grad_norm": 0.15918317437171936, "learning_rate": 0.00010439452971049734, "loss": 1.7915, "step": 7993 }, { "epoch": 0.7806640625, "grad_norm": 0.16481783986091614, "learning_rate": 0.0001043481751558693, "loss": 1.7937, "step": 7994 }, { "epoch": 0.78076171875, "grad_norm": 0.15308032929897308, "learning_rate": 0.00010430183764784666, "loss": 1.7659, "step": 7995 }, { "epoch": 0.780859375, "grad_norm": 0.17077121138572693, "learning_rate": 0.0001042555171910583, "loss": 1.8099, "step": 7996 }, { "epoch": 0.78095703125, "grad_norm": 0.1401623785495758, "learning_rate": 0.00010420921379013107, "loss": 1.7263, "step": 7997 }, { "epoch": 0.7810546875, "grad_norm": 0.1741294413805008, "learning_rate": 0.0001041629274496903, "loss": 1.7009, "step": 7998 }, { "epoch": 0.78115234375, "grad_norm": 0.15183861553668976, "learning_rate": 0.00010411665817435961, "loss": 1.7771, "step": 7999 }, { "epoch": 0.78125, "grad_norm": 0.171812504529953, "learning_rate": 0.00010407040596876094, "loss": 1.7303, "step": 8000 }, { "epoch": 0.78134765625, "grad_norm": 0.15995043516159058, "learning_rate": 0.00010402417083751433, "loss": 1.811, "step": 8001 }, { "epoch": 0.7814453125, "grad_norm": 0.16174574196338654, "learning_rate": 0.0001039779527852383, "loss": 1.7297, "step": 8002 }, { "epoch": 0.78154296875, "grad_norm": 0.17340624332427979, "learning_rate": 0.00010393175181654979, "loss": 1.7749, "step": 8003 }, { "epoch": 0.781640625, "grad_norm": 0.1505582630634308, "learning_rate": 0.00010388556793606368, "loss": 1.7496, "step": 8004 }, { "epoch": 0.78173828125, "grad_norm": 0.16094879806041718, "learning_rate": 0.00010383940114839341, "loss": 1.7686, "step": 8005 }, { "epoch": 0.7818359375, "grad_norm": 0.14584773778915405, "learning_rate": 0.00010379325145815061, "loss": 1.7827, "step": 8006 }, { "epoch": 0.78193359375, "grad_norm": 0.17591993510723114, "learning_rate": 0.00010374711886994528, "loss": 1.7746, "step": 8007 }, { "epoch": 0.78203125, "grad_norm": 0.15134340524673462, "learning_rate": 0.00010370100338838556, "loss": 1.7448, "step": 8008 }, { "epoch": 0.78212890625, "grad_norm": 0.15398776531219482, "learning_rate": 0.00010365490501807797, "loss": 1.8028, "step": 8009 }, { "epoch": 0.7822265625, "grad_norm": 0.1586681306362152, "learning_rate": 0.00010360882376362752, "loss": 1.7581, "step": 8010 }, { "epoch": 0.78232421875, "grad_norm": 0.17510618269443512, "learning_rate": 0.00010356275962963713, "loss": 1.7647, "step": 8011 }, { "epoch": 0.782421875, "grad_norm": 0.14512932300567627, "learning_rate": 0.00010351671262070821, "loss": 1.7883, "step": 8012 }, { "epoch": 0.78251953125, "grad_norm": 0.15925709903240204, "learning_rate": 0.00010347068274144053, "loss": 1.7806, "step": 8013 }, { "epoch": 0.7826171875, "grad_norm": 0.14268209040164948, "learning_rate": 0.0001034246699964321, "loss": 1.8011, "step": 8014 }, { "epoch": 0.78271484375, "grad_norm": 0.153525248169899, "learning_rate": 0.00010337867439027904, "loss": 1.8157, "step": 8015 }, { "epoch": 0.7828125, "grad_norm": 0.14614321291446686, "learning_rate": 0.00010333269592757591, "loss": 1.7532, "step": 8016 }, { "epoch": 0.78291015625, "grad_norm": 0.14757274091243744, "learning_rate": 0.00010328673461291577, "loss": 1.8108, "step": 8017 }, { "epoch": 0.7830078125, "grad_norm": 0.1567392796278, "learning_rate": 0.00010324079045088955, "loss": 1.8186, "step": 8018 }, { "epoch": 0.78310546875, "grad_norm": 0.15347546339035034, "learning_rate": 0.00010319486344608667, "loss": 1.8184, "step": 8019 }, { "epoch": 0.783203125, "grad_norm": 0.15745458006858826, "learning_rate": 0.00010314895360309488, "loss": 1.7694, "step": 8020 }, { "epoch": 0.78330078125, "grad_norm": 0.16925369203090668, "learning_rate": 0.00010310306092650023, "loss": 1.7662, "step": 8021 }, { "epoch": 0.7833984375, "grad_norm": 0.15049141645431519, "learning_rate": 0.0001030571854208869, "loss": 1.721, "step": 8022 }, { "epoch": 0.78349609375, "grad_norm": 0.1673462986946106, "learning_rate": 0.00010301132709083736, "loss": 1.7929, "step": 8023 }, { "epoch": 0.78359375, "grad_norm": 0.16047514975070953, "learning_rate": 0.00010296548594093272, "loss": 1.7595, "step": 8024 }, { "epoch": 0.78369140625, "grad_norm": 0.1714993417263031, "learning_rate": 0.00010291966197575187, "loss": 1.7583, "step": 8025 }, { "epoch": 0.7837890625, "grad_norm": 0.15753643214702606, "learning_rate": 0.00010287385519987228, "loss": 1.7613, "step": 8026 }, { "epoch": 0.78388671875, "grad_norm": 0.16978637874126434, "learning_rate": 0.00010282806561786968, "loss": 1.7847, "step": 8027 }, { "epoch": 0.783984375, "grad_norm": 0.16582293808460236, "learning_rate": 0.00010278229323431806, "loss": 1.7834, "step": 8028 }, { "epoch": 0.78408203125, "grad_norm": 0.20223289728164673, "learning_rate": 0.00010273653805378959, "loss": 1.8322, "step": 8029 }, { "epoch": 0.7841796875, "grad_norm": 0.17076505720615387, "learning_rate": 0.00010269080008085478, "loss": 1.7901, "step": 8030 }, { "epoch": 0.78427734375, "grad_norm": 0.21402543783187866, "learning_rate": 0.00010264507932008265, "loss": 1.7526, "step": 8031 }, { "epoch": 0.784375, "grad_norm": 0.1438603699207306, "learning_rate": 0.00010259937577604008, "loss": 1.7676, "step": 8032 }, { "epoch": 0.78447265625, "grad_norm": 0.20455195009708405, "learning_rate": 0.00010255368945329257, "loss": 1.7664, "step": 8033 }, { "epoch": 0.7845703125, "grad_norm": 0.1487656980752945, "learning_rate": 0.00010250802035640374, "loss": 1.7806, "step": 8034 }, { "epoch": 0.78466796875, "grad_norm": 0.1792377233505249, "learning_rate": 0.0001024623684899356, "loss": 1.7608, "step": 8035 }, { "epoch": 0.784765625, "grad_norm": 0.15126529335975647, "learning_rate": 0.00010241673385844823, "loss": 1.7718, "step": 8036 }, { "epoch": 0.78486328125, "grad_norm": 0.17155681550502777, "learning_rate": 0.00010237111646650011, "loss": 1.8061, "step": 8037 }, { "epoch": 0.7849609375, "grad_norm": 0.1553521305322647, "learning_rate": 0.00010232551631864828, "loss": 1.8209, "step": 8038 }, { "epoch": 0.78505859375, "grad_norm": 0.16262586414813995, "learning_rate": 0.00010227993341944749, "loss": 1.7113, "step": 8039 }, { "epoch": 0.78515625, "grad_norm": 0.15454205870628357, "learning_rate": 0.00010223436777345124, "loss": 1.7511, "step": 8040 }, { "epoch": 0.78525390625, "grad_norm": 0.14583556354045868, "learning_rate": 0.00010218881938521107, "loss": 1.7678, "step": 8041 }, { "epoch": 0.7853515625, "grad_norm": 0.16507697105407715, "learning_rate": 0.00010214328825927694, "loss": 1.737, "step": 8042 }, { "epoch": 0.78544921875, "grad_norm": 0.19864971935749054, "learning_rate": 0.00010209777440019688, "loss": 1.7496, "step": 8043 }, { "epoch": 0.785546875, "grad_norm": 0.15158237516880035, "learning_rate": 0.0001020522778125173, "loss": 1.747, "step": 8044 }, { "epoch": 0.78564453125, "grad_norm": 0.17931552231311798, "learning_rate": 0.00010200679850078313, "loss": 1.7415, "step": 8045 }, { "epoch": 0.7857421875, "grad_norm": 0.1639794409275055, "learning_rate": 0.00010196133646953712, "loss": 1.7991, "step": 8046 }, { "epoch": 0.78583984375, "grad_norm": 0.167446106672287, "learning_rate": 0.00010191589172332063, "loss": 1.8013, "step": 8047 }, { "epoch": 0.7859375, "grad_norm": 0.19085575640201569, "learning_rate": 0.00010187046426667317, "loss": 1.7506, "step": 8048 }, { "epoch": 0.78603515625, "grad_norm": 0.157392218708992, "learning_rate": 0.00010182505410413261, "loss": 1.7461, "step": 8049 }, { "epoch": 0.7861328125, "grad_norm": 0.16675955057144165, "learning_rate": 0.00010177966124023488, "loss": 1.7617, "step": 8050 }, { "epoch": 0.78623046875, "grad_norm": 0.17736288905143738, "learning_rate": 0.00010173428567951435, "loss": 1.766, "step": 8051 }, { "epoch": 0.786328125, "grad_norm": 0.14527861773967743, "learning_rate": 0.00010168892742650379, "loss": 1.809, "step": 8052 }, { "epoch": 0.78642578125, "grad_norm": 0.1700144112110138, "learning_rate": 0.00010164358648573394, "loss": 1.7402, "step": 8053 }, { "epoch": 0.7865234375, "grad_norm": 0.15125516057014465, "learning_rate": 0.00010159826286173399, "loss": 1.7984, "step": 8054 }, { "epoch": 0.78662109375, "grad_norm": 0.21444730460643768, "learning_rate": 0.00010155295655903136, "loss": 1.7306, "step": 8055 }, { "epoch": 0.78671875, "grad_norm": 0.16613276302814484, "learning_rate": 0.00010150766758215185, "loss": 1.8151, "step": 8056 }, { "epoch": 0.78681640625, "grad_norm": 0.16605831682682037, "learning_rate": 0.00010146239593561924, "loss": 1.7432, "step": 8057 }, { "epoch": 0.7869140625, "grad_norm": 0.1695815920829773, "learning_rate": 0.00010141714162395587, "loss": 1.7693, "step": 8058 }, { "epoch": 0.78701171875, "grad_norm": 0.15307383239269257, "learning_rate": 0.00010137190465168238, "loss": 1.7626, "step": 8059 }, { "epoch": 0.787109375, "grad_norm": 0.16684018075466156, "learning_rate": 0.00010132668502331733, "loss": 1.7823, "step": 8060 }, { "epoch": 0.78720703125, "grad_norm": 0.1716863065958023, "learning_rate": 0.00010128148274337779, "loss": 1.748, "step": 8061 }, { "epoch": 0.7873046875, "grad_norm": 0.15178294479846954, "learning_rate": 0.00010123629781637916, "loss": 1.7542, "step": 8062 }, { "epoch": 0.78740234375, "grad_norm": 0.16175462305545807, "learning_rate": 0.00010119113024683503, "loss": 1.7395, "step": 8063 }, { "epoch": 0.7875, "grad_norm": 0.1596645563840866, "learning_rate": 0.00010114598003925707, "loss": 1.7283, "step": 8064 }, { "epoch": 0.78759765625, "grad_norm": 0.1661875993013382, "learning_rate": 0.00010110084719815552, "loss": 1.7491, "step": 8065 }, { "epoch": 0.7876953125, "grad_norm": 0.16454742848873138, "learning_rate": 0.00010105573172803881, "loss": 1.7923, "step": 8066 }, { "epoch": 0.78779296875, "grad_norm": 0.17501476407051086, "learning_rate": 0.00010101063363341344, "loss": 1.7747, "step": 8067 }, { "epoch": 0.787890625, "grad_norm": 0.1777113974094391, "learning_rate": 0.00010096555291878435, "loss": 1.7579, "step": 8068 }, { "epoch": 0.78798828125, "grad_norm": 0.16878224909305573, "learning_rate": 0.00010092048958865471, "loss": 1.7587, "step": 8069 }, { "epoch": 0.7880859375, "grad_norm": 0.17391027510166168, "learning_rate": 0.00010087544364752604, "loss": 1.829, "step": 8070 }, { "epoch": 0.78818359375, "grad_norm": 0.16863572597503662, "learning_rate": 0.0001008304150998978, "loss": 1.7822, "step": 8071 }, { "epoch": 0.78828125, "grad_norm": 0.15384188294410706, "learning_rate": 0.00010078540395026816, "loss": 1.7862, "step": 8072 }, { "epoch": 0.78837890625, "grad_norm": 0.1729772686958313, "learning_rate": 0.00010074041020313332, "loss": 1.8084, "step": 8073 }, { "epoch": 0.7884765625, "grad_norm": 0.16926969587802887, "learning_rate": 0.00010069543386298763, "loss": 1.7661, "step": 8074 }, { "epoch": 0.78857421875, "grad_norm": 0.20843732357025146, "learning_rate": 0.00010065047493432392, "loss": 1.7556, "step": 8075 }, { "epoch": 0.788671875, "grad_norm": 0.18799719214439392, "learning_rate": 0.00010060553342163315, "loss": 1.7722, "step": 8076 }, { "epoch": 0.78876953125, "grad_norm": 0.1731976568698883, "learning_rate": 0.00010056060932940467, "loss": 1.8042, "step": 8077 }, { "epoch": 0.7888671875, "grad_norm": 0.20088277757167816, "learning_rate": 0.00010051570266212576, "loss": 1.7629, "step": 8078 }, { "epoch": 0.78896484375, "grad_norm": 0.1725306361913681, "learning_rate": 0.00010047081342428246, "loss": 1.7746, "step": 8079 }, { "epoch": 0.7890625, "grad_norm": 0.16228105127811432, "learning_rate": 0.00010042594162035878, "loss": 1.7588, "step": 8080 }, { "epoch": 0.78916015625, "grad_norm": 0.19001568853855133, "learning_rate": 0.00010038108725483686, "loss": 1.7238, "step": 8081 }, { "epoch": 0.7892578125, "grad_norm": 0.13994288444519043, "learning_rate": 0.00010033625033219734, "loss": 1.7579, "step": 8082 }, { "epoch": 0.78935546875, "grad_norm": 0.16057634353637695, "learning_rate": 0.00010029143085691905, "loss": 1.7694, "step": 8083 }, { "epoch": 0.789453125, "grad_norm": 0.16799655556678772, "learning_rate": 0.00010024662883347909, "loss": 1.7562, "step": 8084 }, { "epoch": 0.78955078125, "grad_norm": 0.17055276036262512, "learning_rate": 0.00010020184426635261, "loss": 1.7583, "step": 8085 }, { "epoch": 0.7896484375, "grad_norm": 0.16708062589168549, "learning_rate": 0.00010015707716001337, "loss": 1.797, "step": 8086 }, { "epoch": 0.78974609375, "grad_norm": 0.16499227285385132, "learning_rate": 0.00010011232751893323, "loss": 1.761, "step": 8087 }, { "epoch": 0.78984375, "grad_norm": 0.1706567108631134, "learning_rate": 0.00010006759534758214, "loss": 1.7801, "step": 8088 }, { "epoch": 0.78994140625, "grad_norm": 0.15282799303531647, "learning_rate": 0.0001000228806504285, "loss": 1.7638, "step": 8089 }, { "epoch": 0.7900390625, "grad_norm": 0.18718929588794708, "learning_rate": 9.997818343193896e-05, "loss": 1.7594, "step": 8090 }, { "epoch": 0.79013671875, "grad_norm": 0.1712728887796402, "learning_rate": 9.993350369657836e-05, "loss": 1.7624, "step": 8091 }, { "epoch": 0.790234375, "grad_norm": 0.17446118593215942, "learning_rate": 9.98888414488097e-05, "loss": 1.7625, "step": 8092 }, { "epoch": 0.79033203125, "grad_norm": 0.1735602170228958, "learning_rate": 9.984419669309447e-05, "loss": 1.8136, "step": 8093 }, { "epoch": 0.7904296875, "grad_norm": 0.1697428971529007, "learning_rate": 9.979956943389232e-05, "loss": 1.7188, "step": 8094 }, { "epoch": 0.79052734375, "grad_norm": 0.18635618686676025, "learning_rate": 9.975495967566101e-05, "loss": 1.7782, "step": 8095 }, { "epoch": 0.790625, "grad_norm": 0.16187472641468048, "learning_rate": 9.971036742285666e-05, "loss": 1.7694, "step": 8096 }, { "epoch": 0.79072265625, "grad_norm": 0.1763584017753601, "learning_rate": 9.96657926799337e-05, "loss": 1.7485, "step": 8097 }, { "epoch": 0.7908203125, "grad_norm": 0.15549612045288086, "learning_rate": 9.96212354513448e-05, "loss": 1.7694, "step": 8098 }, { "epoch": 0.79091796875, "grad_norm": 0.17059172689914703, "learning_rate": 9.95766957415406e-05, "loss": 1.7721, "step": 8099 }, { "epoch": 0.791015625, "grad_norm": 0.16169373691082, "learning_rate": 9.953217355497049e-05, "loss": 1.8158, "step": 8100 }, { "epoch": 0.79111328125, "grad_norm": 0.15920744836330414, "learning_rate": 9.948766889608177e-05, "loss": 1.7475, "step": 8101 }, { "epoch": 0.7912109375, "grad_norm": 0.16203097999095917, "learning_rate": 9.944318176931996e-05, "loss": 1.734, "step": 8102 }, { "epoch": 0.79130859375, "grad_norm": 0.15868419408798218, "learning_rate": 9.939871217912902e-05, "loss": 1.7473, "step": 8103 }, { "epoch": 0.79140625, "grad_norm": 0.16520927846431732, "learning_rate": 9.935426012995105e-05, "loss": 1.7326, "step": 8104 }, { "epoch": 0.79150390625, "grad_norm": 0.16671302914619446, "learning_rate": 9.930982562622648e-05, "loss": 1.8103, "step": 8105 }, { "epoch": 0.7916015625, "grad_norm": 0.14874283969402313, "learning_rate": 9.926540867239372e-05, "loss": 1.7688, "step": 8106 }, { "epoch": 0.79169921875, "grad_norm": 0.17812982201576233, "learning_rate": 9.922100927288979e-05, "loss": 1.7612, "step": 8107 }, { "epoch": 0.791796875, "grad_norm": 0.1598200649023056, "learning_rate": 9.91766274321499e-05, "loss": 1.7694, "step": 8108 }, { "epoch": 0.79189453125, "grad_norm": 0.16262486577033997, "learning_rate": 9.913226315460717e-05, "loss": 1.776, "step": 8109 }, { "epoch": 0.7919921875, "grad_norm": 0.164301335811615, "learning_rate": 9.908791644469331e-05, "loss": 1.6952, "step": 8110 }, { "epoch": 0.79208984375, "grad_norm": 0.1732422262430191, "learning_rate": 9.904358730683815e-05, "loss": 1.7844, "step": 8111 }, { "epoch": 0.7921875, "grad_norm": 0.16472798585891724, "learning_rate": 9.899927574546988e-05, "loss": 1.7723, "step": 8112 }, { "epoch": 0.79228515625, "grad_norm": 0.15927135944366455, "learning_rate": 9.895498176501458e-05, "loss": 1.7753, "step": 8113 }, { "epoch": 0.7923828125, "grad_norm": 0.15669554471969604, "learning_rate": 9.891070536989705e-05, "loss": 1.7273, "step": 8114 }, { "epoch": 0.79248046875, "grad_norm": 0.14867709577083588, "learning_rate": 9.88664465645401e-05, "loss": 1.7609, "step": 8115 }, { "epoch": 0.792578125, "grad_norm": 0.15429994463920593, "learning_rate": 9.88222053533647e-05, "loss": 1.7288, "step": 8116 }, { "epoch": 0.79267578125, "grad_norm": 0.14516234397888184, "learning_rate": 9.877798174079016e-05, "loss": 1.7588, "step": 8117 }, { "epoch": 0.7927734375, "grad_norm": 0.15281657874584198, "learning_rate": 9.873377573123407e-05, "loss": 1.752, "step": 8118 }, { "epoch": 0.79287109375, "grad_norm": 0.1544937789440155, "learning_rate": 9.868958732911228e-05, "loss": 1.8031, "step": 8119 }, { "epoch": 0.79296875, "grad_norm": 0.14326708018779755, "learning_rate": 9.864541653883862e-05, "loss": 1.7807, "step": 8120 }, { "epoch": 0.79306640625, "grad_norm": 0.15142633020877838, "learning_rate": 9.860126336482553e-05, "loss": 1.7872, "step": 8121 }, { "epoch": 0.7931640625, "grad_norm": 0.1475905328989029, "learning_rate": 9.855712781148355e-05, "loss": 1.7922, "step": 8122 }, { "epoch": 0.79326171875, "grad_norm": 0.16118106245994568, "learning_rate": 9.851300988322129e-05, "loss": 1.7784, "step": 8123 }, { "epoch": 0.793359375, "grad_norm": 0.15844540297985077, "learning_rate": 9.846890958444581e-05, "loss": 1.8162, "step": 8124 }, { "epoch": 0.79345703125, "grad_norm": 0.16065232455730438, "learning_rate": 9.842482691956234e-05, "loss": 1.8049, "step": 8125 }, { "epoch": 0.7935546875, "grad_norm": 0.17715489864349365, "learning_rate": 9.838076189297441e-05, "loss": 1.7335, "step": 8126 }, { "epoch": 0.79365234375, "grad_norm": 0.16543740034103394, "learning_rate": 9.833671450908355e-05, "loss": 1.7617, "step": 8127 }, { "epoch": 0.79375, "grad_norm": 0.16583222150802612, "learning_rate": 9.829268477228988e-05, "loss": 1.729, "step": 8128 }, { "epoch": 0.79384765625, "grad_norm": 0.16696074604988098, "learning_rate": 9.824867268699156e-05, "loss": 1.7532, "step": 8129 }, { "epoch": 0.7939453125, "grad_norm": 0.16066285967826843, "learning_rate": 9.820467825758489e-05, "loss": 1.7848, "step": 8130 }, { "epoch": 0.79404296875, "grad_norm": 0.1664610654115677, "learning_rate": 9.81607014884646e-05, "loss": 1.7572, "step": 8131 }, { "epoch": 0.794140625, "grad_norm": 0.1601037234067917, "learning_rate": 9.811674238402359e-05, "loss": 1.7719, "step": 8132 }, { "epoch": 0.79423828125, "grad_norm": 0.16935135424137115, "learning_rate": 9.807280094865303e-05, "loss": 1.7567, "step": 8133 }, { "epoch": 0.7943359375, "grad_norm": 0.1587732881307602, "learning_rate": 9.802887718674211e-05, "loss": 1.7592, "step": 8134 }, { "epoch": 0.79443359375, "grad_norm": 0.15791217982769012, "learning_rate": 9.798497110267858e-05, "loss": 1.7767, "step": 8135 }, { "epoch": 0.79453125, "grad_norm": 0.17662079632282257, "learning_rate": 9.79410827008483e-05, "loss": 1.7723, "step": 8136 }, { "epoch": 0.79462890625, "grad_norm": 0.16188281774520874, "learning_rate": 9.789721198563518e-05, "loss": 1.7811, "step": 8137 }, { "epoch": 0.7947265625, "grad_norm": 0.1789989024400711, "learning_rate": 9.785335896142161e-05, "loss": 1.7565, "step": 8138 }, { "epoch": 0.79482421875, "grad_norm": 0.17359992861747742, "learning_rate": 9.780952363258808e-05, "loss": 1.7928, "step": 8139 }, { "epoch": 0.794921875, "grad_norm": 0.15366967022418976, "learning_rate": 9.776570600351345e-05, "loss": 1.7551, "step": 8140 }, { "epoch": 0.79501953125, "grad_norm": 0.16422227025032043, "learning_rate": 9.772190607857451e-05, "loss": 1.7852, "step": 8141 }, { "epoch": 0.7951171875, "grad_norm": 0.17754937708377838, "learning_rate": 9.767812386214666e-05, "loss": 1.7727, "step": 8142 }, { "epoch": 0.79521484375, "grad_norm": 0.14544452726840973, "learning_rate": 9.763435935860339e-05, "loss": 1.7597, "step": 8143 }, { "epoch": 0.7953125, "grad_norm": 0.1603868007659912, "learning_rate": 9.759061257231622e-05, "loss": 1.7679, "step": 8144 }, { "epoch": 0.79541015625, "grad_norm": 0.16033457219600677, "learning_rate": 9.754688350765517e-05, "loss": 1.7584, "step": 8145 }, { "epoch": 0.7955078125, "grad_norm": 0.15280084311962128, "learning_rate": 9.750317216898835e-05, "loss": 1.8116, "step": 8146 }, { "epoch": 0.79560546875, "grad_norm": 0.16725927591323853, "learning_rate": 9.74594785606822e-05, "loss": 1.7485, "step": 8147 }, { "epoch": 0.795703125, "grad_norm": 0.15562520921230316, "learning_rate": 9.741580268710117e-05, "loss": 1.773, "step": 8148 }, { "epoch": 0.79580078125, "grad_norm": 0.17691388726234436, "learning_rate": 9.737214455260826e-05, "loss": 1.7388, "step": 8149 }, { "epoch": 0.7958984375, "grad_norm": 0.15958838164806366, "learning_rate": 9.732850416156452e-05, "loss": 1.7615, "step": 8150 }, { "epoch": 0.79599609375, "grad_norm": 0.16007177531719208, "learning_rate": 9.728488151832912e-05, "loss": 1.7636, "step": 8151 }, { "epoch": 0.79609375, "grad_norm": 0.17404800653457642, "learning_rate": 9.724127662725966e-05, "loss": 1.7666, "step": 8152 }, { "epoch": 0.79619140625, "grad_norm": 0.15104451775550842, "learning_rate": 9.719768949271185e-05, "loss": 1.7904, "step": 8153 }, { "epoch": 0.7962890625, "grad_norm": 0.19655390083789825, "learning_rate": 9.715412011903977e-05, "loss": 1.7914, "step": 8154 }, { "epoch": 0.79638671875, "grad_norm": 0.1672365665435791, "learning_rate": 9.711056851059533e-05, "loss": 1.7866, "step": 8155 }, { "epoch": 0.796484375, "grad_norm": 0.19428500533103943, "learning_rate": 9.706703467172923e-05, "loss": 1.7932, "step": 8156 }, { "epoch": 0.79658203125, "grad_norm": 0.17579397559165955, "learning_rate": 9.702351860679009e-05, "loss": 1.7561, "step": 8157 }, { "epoch": 0.7966796875, "grad_norm": 0.16000333428382874, "learning_rate": 9.698002032012465e-05, "loss": 1.764, "step": 8158 }, { "epoch": 0.79677734375, "grad_norm": 0.16930490732192993, "learning_rate": 9.693653981607809e-05, "loss": 1.7887, "step": 8159 }, { "epoch": 0.796875, "grad_norm": 0.1761905699968338, "learning_rate": 9.689307709899365e-05, "loss": 1.7992, "step": 8160 }, { "epoch": 0.79697265625, "grad_norm": 0.16088266670703888, "learning_rate": 9.684963217321302e-05, "loss": 1.7736, "step": 8161 }, { "epoch": 0.7970703125, "grad_norm": 0.1687707006931305, "learning_rate": 9.680620504307573e-05, "loss": 1.7824, "step": 8162 }, { "epoch": 0.79716796875, "grad_norm": 0.17093496024608612, "learning_rate": 9.676279571291996e-05, "loss": 1.8021, "step": 8163 }, { "epoch": 0.797265625, "grad_norm": 0.15577836334705353, "learning_rate": 9.671940418708192e-05, "loss": 1.7726, "step": 8164 }, { "epoch": 0.79736328125, "grad_norm": 0.16603948175907135, "learning_rate": 9.667603046989593e-05, "loss": 1.7434, "step": 8165 }, { "epoch": 0.7974609375, "grad_norm": 0.1703491061925888, "learning_rate": 9.663267456569468e-05, "loss": 1.7945, "step": 8166 }, { "epoch": 0.79755859375, "grad_norm": 0.1552608758211136, "learning_rate": 9.658933647880901e-05, "loss": 1.8153, "step": 8167 }, { "epoch": 0.79765625, "grad_norm": 0.16054320335388184, "learning_rate": 9.654601621356817e-05, "loss": 1.7534, "step": 8168 }, { "epoch": 0.79775390625, "grad_norm": 0.1653607338666916, "learning_rate": 9.65027137742992e-05, "loss": 1.8209, "step": 8169 }, { "epoch": 0.7978515625, "grad_norm": 0.18586716055870056, "learning_rate": 9.645942916532786e-05, "loss": 1.7592, "step": 8170 }, { "epoch": 0.79794921875, "grad_norm": 0.1610354632139206, "learning_rate": 9.641616239097787e-05, "loss": 1.7343, "step": 8171 }, { "epoch": 0.798046875, "grad_norm": 0.18218208849430084, "learning_rate": 9.637291345557108e-05, "loss": 1.7572, "step": 8172 }, { "epoch": 0.79814453125, "grad_norm": 0.1449289321899414, "learning_rate": 9.632968236342774e-05, "loss": 1.7827, "step": 8173 }, { "epoch": 0.7982421875, "grad_norm": 0.17308616638183594, "learning_rate": 9.628646911886626e-05, "loss": 1.7691, "step": 8174 }, { "epoch": 0.79833984375, "grad_norm": 0.1530013382434845, "learning_rate": 9.624327372620329e-05, "loss": 1.7901, "step": 8175 }, { "epoch": 0.7984375, "grad_norm": 0.14628000557422638, "learning_rate": 9.620009618975357e-05, "loss": 1.7831, "step": 8176 }, { "epoch": 0.79853515625, "grad_norm": 0.15641245245933533, "learning_rate": 9.615693651383026e-05, "loss": 1.7817, "step": 8177 }, { "epoch": 0.7986328125, "grad_norm": 0.15046559274196625, "learning_rate": 9.611379470274463e-05, "loss": 1.7971, "step": 8178 }, { "epoch": 0.79873046875, "grad_norm": 0.16089625656604767, "learning_rate": 9.607067076080605e-05, "loss": 1.7811, "step": 8179 }, { "epoch": 0.798828125, "grad_norm": 0.13809122145175934, "learning_rate": 9.60275646923223e-05, "loss": 1.7809, "step": 8180 }, { "epoch": 0.79892578125, "grad_norm": 0.15441864728927612, "learning_rate": 9.598447650159931e-05, "loss": 1.7686, "step": 8181 }, { "epoch": 0.7990234375, "grad_norm": 0.15981777012348175, "learning_rate": 9.594140619294115e-05, "loss": 1.7176, "step": 8182 }, { "epoch": 0.79912109375, "grad_norm": 0.14819484949111938, "learning_rate": 9.589835377065018e-05, "loss": 1.7755, "step": 8183 }, { "epoch": 0.79921875, "grad_norm": 0.18491797149181366, "learning_rate": 9.585531923902701e-05, "loss": 1.7435, "step": 8184 }, { "epoch": 0.79931640625, "grad_norm": 0.15217626094818115, "learning_rate": 9.58123026023704e-05, "loss": 1.771, "step": 8185 }, { "epoch": 0.7994140625, "grad_norm": 0.16853287816047668, "learning_rate": 9.576930386497726e-05, "loss": 1.7621, "step": 8186 }, { "epoch": 0.79951171875, "grad_norm": 0.170495867729187, "learning_rate": 9.572632303114282e-05, "loss": 1.7958, "step": 8187 }, { "epoch": 0.799609375, "grad_norm": 0.16692978143692017, "learning_rate": 9.568336010516049e-05, "loss": 1.7302, "step": 8188 }, { "epoch": 0.79970703125, "grad_norm": 0.16254328191280365, "learning_rate": 9.564041509132187e-05, "loss": 1.7722, "step": 8189 }, { "epoch": 0.7998046875, "grad_norm": 0.1572733074426651, "learning_rate": 9.559748799391681e-05, "loss": 1.7612, "step": 8190 }, { "epoch": 0.79990234375, "grad_norm": 0.16888238489627838, "learning_rate": 9.55545788172333e-05, "loss": 1.7364, "step": 8191 }, { "epoch": 0.8, "grad_norm": 0.15443305671215057, "learning_rate": 9.551168756555773e-05, "loss": 1.7846, "step": 8192 }, { "epoch": 0.80009765625, "grad_norm": 0.161743625998497, "learning_rate": 9.546881424317439e-05, "loss": 1.7835, "step": 8193 }, { "epoch": 0.8001953125, "grad_norm": 0.16679586470127106, "learning_rate": 9.542595885436599e-05, "loss": 1.78, "step": 8194 }, { "epoch": 0.80029296875, "grad_norm": 0.1508711278438568, "learning_rate": 9.538312140341341e-05, "loss": 1.7541, "step": 8195 }, { "epoch": 0.800390625, "grad_norm": 0.17870965600013733, "learning_rate": 9.534030189459575e-05, "loss": 1.7883, "step": 8196 }, { "epoch": 0.80048828125, "grad_norm": 0.14728200435638428, "learning_rate": 9.52975003321903e-05, "loss": 1.7627, "step": 8197 }, { "epoch": 0.8005859375, "grad_norm": 0.19367307424545288, "learning_rate": 9.52547167204725e-05, "loss": 1.8294, "step": 8198 }, { "epoch": 0.80068359375, "grad_norm": 0.15626779198646545, "learning_rate": 9.521195106371622e-05, "loss": 1.7997, "step": 8199 }, { "epoch": 0.80078125, "grad_norm": 0.16041617095470428, "learning_rate": 9.516920336619314e-05, "loss": 1.7855, "step": 8200 }, { "epoch": 0.80087890625, "grad_norm": 0.1865690052509308, "learning_rate": 9.51264736321735e-05, "loss": 1.7653, "step": 8201 }, { "epoch": 0.8009765625, "grad_norm": 0.2723311483860016, "learning_rate": 9.508376186592562e-05, "loss": 1.6967, "step": 8202 }, { "epoch": 0.80107421875, "grad_norm": 0.178361177444458, "learning_rate": 9.504106807171603e-05, "loss": 1.7221, "step": 8203 }, { "epoch": 0.801171875, "grad_norm": 0.18729250133037567, "learning_rate": 9.49983922538094e-05, "loss": 1.7743, "step": 8204 }, { "epoch": 0.80126953125, "grad_norm": 0.17930838465690613, "learning_rate": 9.495573441646876e-05, "loss": 1.7925, "step": 8205 }, { "epoch": 0.8013671875, "grad_norm": 0.19643360376358032, "learning_rate": 9.49130945639552e-05, "loss": 1.7846, "step": 8206 }, { "epoch": 0.80146484375, "grad_norm": 0.16372525691986084, "learning_rate": 9.48704727005281e-05, "loss": 1.7745, "step": 8207 }, { "epoch": 0.8015625, "grad_norm": 0.18729516863822937, "learning_rate": 9.482786883044492e-05, "loss": 1.7649, "step": 8208 }, { "epoch": 0.80166015625, "grad_norm": 0.1647922396659851, "learning_rate": 9.478528295796147e-05, "loss": 1.79, "step": 8209 }, { "epoch": 0.8017578125, "grad_norm": 0.1661582589149475, "learning_rate": 9.474271508733168e-05, "loss": 1.7982, "step": 8210 }, { "epoch": 0.80185546875, "grad_norm": 0.16948392987251282, "learning_rate": 9.470016522280775e-05, "loss": 1.7444, "step": 8211 }, { "epoch": 0.801953125, "grad_norm": 0.162079319357872, "learning_rate": 9.465763336863996e-05, "loss": 1.7894, "step": 8212 }, { "epoch": 0.80205078125, "grad_norm": 0.17512552440166473, "learning_rate": 9.461511952907693e-05, "loss": 1.7411, "step": 8213 }, { "epoch": 0.8021484375, "grad_norm": 0.1658283919095993, "learning_rate": 9.457262370836545e-05, "loss": 1.7678, "step": 8214 }, { "epoch": 0.80224609375, "grad_norm": 0.15746282041072845, "learning_rate": 9.453014591075035e-05, "loss": 1.7566, "step": 8215 }, { "epoch": 0.80234375, "grad_norm": 0.16000287234783173, "learning_rate": 9.448768614047487e-05, "loss": 1.766, "step": 8216 }, { "epoch": 0.80244140625, "grad_norm": 0.17093685269355774, "learning_rate": 9.444524440178034e-05, "loss": 1.7881, "step": 8217 }, { "epoch": 0.8025390625, "grad_norm": 0.15648116171360016, "learning_rate": 9.440282069890633e-05, "loss": 1.7889, "step": 8218 }, { "epoch": 0.80263671875, "grad_norm": 0.1828954815864563, "learning_rate": 9.436041503609058e-05, "loss": 1.7613, "step": 8219 }, { "epoch": 0.802734375, "grad_norm": 0.1645178347826004, "learning_rate": 9.431802741756903e-05, "loss": 1.8132, "step": 8220 }, { "epoch": 0.80283203125, "grad_norm": 0.14887890219688416, "learning_rate": 9.427565784757593e-05, "loss": 1.7573, "step": 8221 }, { "epoch": 0.8029296875, "grad_norm": 0.162347212433815, "learning_rate": 9.423330633034346e-05, "loss": 1.7644, "step": 8222 }, { "epoch": 0.80302734375, "grad_norm": 0.16091801226139069, "learning_rate": 9.419097287010226e-05, "loss": 1.6683, "step": 8223 }, { "epoch": 0.803125, "grad_norm": 0.17221833765506744, "learning_rate": 9.4148657471081e-05, "loss": 1.7992, "step": 8224 }, { "epoch": 0.80322265625, "grad_norm": 0.14379392564296722, "learning_rate": 9.410636013750673e-05, "loss": 1.7703, "step": 8225 }, { "epoch": 0.8033203125, "grad_norm": 0.39493706822395325, "learning_rate": 9.406408087360444e-05, "loss": 1.6513, "step": 8226 }, { "epoch": 0.80341796875, "grad_norm": 0.16395099461078644, "learning_rate": 9.402181968359758e-05, "loss": 1.7469, "step": 8227 }, { "epoch": 0.803515625, "grad_norm": 0.1640930473804474, "learning_rate": 9.397957657170763e-05, "loss": 1.7427, "step": 8228 }, { "epoch": 0.80361328125, "grad_norm": 0.16049596667289734, "learning_rate": 9.393735154215424e-05, "loss": 1.7649, "step": 8229 }, { "epoch": 0.8037109375, "grad_norm": 0.16843025386333466, "learning_rate": 9.389514459915538e-05, "loss": 1.7818, "step": 8230 }, { "epoch": 0.80380859375, "grad_norm": 0.15883910655975342, "learning_rate": 9.38529557469271e-05, "loss": 1.728, "step": 8231 }, { "epoch": 0.80390625, "grad_norm": 0.15511289238929749, "learning_rate": 9.381078498968374e-05, "loss": 1.7605, "step": 8232 }, { "epoch": 0.80400390625, "grad_norm": 0.16047821938991547, "learning_rate": 9.376863233163774e-05, "loss": 1.73, "step": 8233 }, { "epoch": 0.8041015625, "grad_norm": 0.14889119565486908, "learning_rate": 9.372649777699985e-05, "loss": 1.7674, "step": 8234 }, { "epoch": 0.80419921875, "grad_norm": 0.16824239492416382, "learning_rate": 9.368438132997891e-05, "loss": 1.7558, "step": 8235 }, { "epoch": 0.804296875, "grad_norm": 0.1676420420408249, "learning_rate": 9.364228299478191e-05, "loss": 1.7955, "step": 8236 }, { "epoch": 0.80439453125, "grad_norm": 0.15610766410827637, "learning_rate": 9.360020277561417e-05, "loss": 1.778, "step": 8237 }, { "epoch": 0.8044921875, "grad_norm": 0.15133769810199738, "learning_rate": 9.355814067667911e-05, "loss": 1.7685, "step": 8238 }, { "epoch": 0.80458984375, "grad_norm": 0.14806745946407318, "learning_rate": 9.351609670217837e-05, "loss": 1.7578, "step": 8239 }, { "epoch": 0.8046875, "grad_norm": 0.15249191224575043, "learning_rate": 9.347407085631176e-05, "loss": 1.7329, "step": 8240 }, { "epoch": 0.80478515625, "grad_norm": 0.15358677506446838, "learning_rate": 9.343206314327729e-05, "loss": 1.7535, "step": 8241 }, { "epoch": 0.8048828125, "grad_norm": 0.15819285809993744, "learning_rate": 9.339007356727125e-05, "loss": 1.79, "step": 8242 }, { "epoch": 0.80498046875, "grad_norm": 0.1476673185825348, "learning_rate": 9.334810213248785e-05, "loss": 1.7754, "step": 8243 }, { "epoch": 0.805078125, "grad_norm": 0.15774017572402954, "learning_rate": 9.33061488431198e-05, "loss": 1.7593, "step": 8244 }, { "epoch": 0.80517578125, "grad_norm": 0.15487153828144073, "learning_rate": 9.326421370335778e-05, "loss": 1.7268, "step": 8245 }, { "epoch": 0.8052734375, "grad_norm": 0.16074545681476593, "learning_rate": 9.322229671739079e-05, "loss": 1.7525, "step": 8246 }, { "epoch": 0.80537109375, "grad_norm": 0.1488671898841858, "learning_rate": 9.318039788940594e-05, "loss": 1.7477, "step": 8247 }, { "epoch": 0.80546875, "grad_norm": 0.18319620192050934, "learning_rate": 9.313851722358857e-05, "loss": 1.8111, "step": 8248 }, { "epoch": 0.80556640625, "grad_norm": 0.17238126695156097, "learning_rate": 9.309665472412227e-05, "loss": 1.7828, "step": 8249 }, { "epoch": 0.8056640625, "grad_norm": 0.1798265129327774, "learning_rate": 9.305481039518854e-05, "loss": 1.7824, "step": 8250 }, { "epoch": 0.80576171875, "grad_norm": 0.20586949586868286, "learning_rate": 9.301298424096738e-05, "loss": 1.7835, "step": 8251 }, { "epoch": 0.805859375, "grad_norm": 0.19937917590141296, "learning_rate": 9.297117626563687e-05, "loss": 1.7993, "step": 8252 }, { "epoch": 0.80595703125, "grad_norm": 0.19686391949653625, "learning_rate": 9.292938647337318e-05, "loss": 1.7706, "step": 8253 }, { "epoch": 0.8060546875, "grad_norm": 0.18577376008033752, "learning_rate": 9.288761486835078e-05, "loss": 1.8271, "step": 8254 }, { "epoch": 0.80615234375, "grad_norm": 0.2155599594116211, "learning_rate": 9.284586145474228e-05, "loss": 1.7778, "step": 8255 }, { "epoch": 0.80625, "grad_norm": 0.1662072092294693, "learning_rate": 9.280412623671855e-05, "loss": 1.7636, "step": 8256 }, { "epoch": 0.80634765625, "grad_norm": 0.16998130083084106, "learning_rate": 9.276240921844845e-05, "loss": 1.7414, "step": 8257 }, { "epoch": 0.8064453125, "grad_norm": 0.19075410068035126, "learning_rate": 9.272071040409919e-05, "loss": 1.7963, "step": 8258 }, { "epoch": 0.80654296875, "grad_norm": 0.1711096316576004, "learning_rate": 9.26790297978361e-05, "loss": 1.7599, "step": 8259 }, { "epoch": 0.806640625, "grad_norm": 0.1745196431875229, "learning_rate": 9.263736740382273e-05, "loss": 1.8052, "step": 8260 }, { "epoch": 0.80673828125, "grad_norm": 0.17694589495658875, "learning_rate": 9.259572322622072e-05, "loss": 1.7444, "step": 8261 }, { "epoch": 0.8068359375, "grad_norm": 0.17240089178085327, "learning_rate": 9.255409726919007e-05, "loss": 1.7166, "step": 8262 }, { "epoch": 0.80693359375, "grad_norm": 0.17992395162582397, "learning_rate": 9.251248953688882e-05, "loss": 1.7398, "step": 8263 }, { "epoch": 0.80703125, "grad_norm": 0.17324623465538025, "learning_rate": 9.247090003347308e-05, "loss": 1.7785, "step": 8264 }, { "epoch": 0.80712890625, "grad_norm": 0.1614452600479126, "learning_rate": 9.242932876309739e-05, "loss": 1.7766, "step": 8265 }, { "epoch": 0.8072265625, "grad_norm": 0.14612555503845215, "learning_rate": 9.238777572991431e-05, "loss": 1.7812, "step": 8266 }, { "epoch": 0.80732421875, "grad_norm": 0.15819239616394043, "learning_rate": 9.234624093807467e-05, "loss": 1.7899, "step": 8267 }, { "epoch": 0.807421875, "grad_norm": 0.14068304002285004, "learning_rate": 9.230472439172738e-05, "loss": 1.797, "step": 8268 }, { "epoch": 0.80751953125, "grad_norm": 0.14454112946987152, "learning_rate": 9.226322609501956e-05, "loss": 1.7535, "step": 8269 }, { "epoch": 0.8076171875, "grad_norm": 0.16472674906253815, "learning_rate": 9.222174605209663e-05, "loss": 1.7729, "step": 8270 }, { "epoch": 0.80771484375, "grad_norm": 0.15428626537322998, "learning_rate": 9.218028426710194e-05, "loss": 1.7689, "step": 8271 }, { "epoch": 0.8078125, "grad_norm": 0.16885636746883392, "learning_rate": 9.21388407441772e-05, "loss": 1.764, "step": 8272 }, { "epoch": 0.80791015625, "grad_norm": 0.14385245740413666, "learning_rate": 9.20974154874623e-05, "loss": 1.714, "step": 8273 }, { "epoch": 0.8080078125, "grad_norm": 0.16081559658050537, "learning_rate": 9.205600850109519e-05, "loss": 1.7597, "step": 8274 }, { "epoch": 0.80810546875, "grad_norm": 0.15406763553619385, "learning_rate": 9.201461978921212e-05, "loss": 1.7532, "step": 8275 }, { "epoch": 0.808203125, "grad_norm": 0.16237413883209229, "learning_rate": 9.197324935594743e-05, "loss": 1.7726, "step": 8276 }, { "epoch": 0.80830078125, "grad_norm": 0.1691114902496338, "learning_rate": 9.193189720543367e-05, "loss": 1.7737, "step": 8277 }, { "epoch": 0.8083984375, "grad_norm": 0.1370273232460022, "learning_rate": 9.189056334180151e-05, "loss": 1.7632, "step": 8278 }, { "epoch": 0.80849609375, "grad_norm": 0.1829172521829605, "learning_rate": 9.184924776917988e-05, "loss": 1.7983, "step": 8279 }, { "epoch": 0.80859375, "grad_norm": 0.1694648414850235, "learning_rate": 9.180795049169583e-05, "loss": 1.7423, "step": 8280 }, { "epoch": 0.80869140625, "grad_norm": 0.16438308358192444, "learning_rate": 9.176667151347459e-05, "loss": 1.7452, "step": 8281 }, { "epoch": 0.8087890625, "grad_norm": 0.1778310388326645, "learning_rate": 9.172541083863955e-05, "loss": 1.792, "step": 8282 }, { "epoch": 0.80888671875, "grad_norm": 0.2031765878200531, "learning_rate": 9.168416847131231e-05, "loss": 1.7704, "step": 8283 }, { "epoch": 0.808984375, "grad_norm": 0.16840870678424835, "learning_rate": 9.164294441561266e-05, "loss": 1.7419, "step": 8284 }, { "epoch": 0.80908203125, "grad_norm": 0.21077099442481995, "learning_rate": 9.160173867565841e-05, "loss": 1.7638, "step": 8285 }, { "epoch": 0.8091796875, "grad_norm": 0.16832222044467926, "learning_rate": 9.156055125556572e-05, "loss": 1.7617, "step": 8286 }, { "epoch": 0.80927734375, "grad_norm": 0.14836810529232025, "learning_rate": 9.15193821594488e-05, "loss": 1.7783, "step": 8287 }, { "epoch": 0.809375, "grad_norm": 0.17416007816791534, "learning_rate": 9.147823139142012e-05, "loss": 1.7342, "step": 8288 }, { "epoch": 0.80947265625, "grad_norm": 0.14421667158603668, "learning_rate": 9.143709895559029e-05, "loss": 1.7747, "step": 8289 }, { "epoch": 0.8095703125, "grad_norm": 0.15789243578910828, "learning_rate": 9.139598485606804e-05, "loss": 1.7522, "step": 8290 }, { "epoch": 0.80966796875, "grad_norm": 0.1616830974817276, "learning_rate": 9.135488909696038e-05, "loss": 1.8028, "step": 8291 }, { "epoch": 0.809765625, "grad_norm": 0.1527157723903656, "learning_rate": 9.131381168237227e-05, "loss": 1.753, "step": 8292 }, { "epoch": 0.80986328125, "grad_norm": 0.15609319508075714, "learning_rate": 9.127275261640702e-05, "loss": 1.7788, "step": 8293 }, { "epoch": 0.8099609375, "grad_norm": 0.1495826244354248, "learning_rate": 9.123171190316623e-05, "loss": 1.7671, "step": 8294 }, { "epoch": 0.81005859375, "grad_norm": 0.15772373974323273, "learning_rate": 9.11906895467493e-05, "loss": 1.7269, "step": 8295 }, { "epoch": 0.81015625, "grad_norm": 0.17248567938804626, "learning_rate": 9.114968555125407e-05, "loss": 1.7921, "step": 8296 }, { "epoch": 0.81025390625, "grad_norm": 0.15786801278591156, "learning_rate": 9.110869992077654e-05, "loss": 1.777, "step": 8297 }, { "epoch": 0.8103515625, "grad_norm": 0.1588079333305359, "learning_rate": 9.106773265941078e-05, "loss": 1.7736, "step": 8298 }, { "epoch": 0.81044921875, "grad_norm": 0.1548602283000946, "learning_rate": 9.102678377124898e-05, "loss": 1.8043, "step": 8299 }, { "epoch": 0.810546875, "grad_norm": 0.15719951689243317, "learning_rate": 9.098585326038158e-05, "loss": 1.7605, "step": 8300 }, { "epoch": 0.81064453125, "grad_norm": 0.1649072915315628, "learning_rate": 9.094494113089732e-05, "loss": 1.7494, "step": 8301 }, { "epoch": 0.8107421875, "grad_norm": 0.14217153191566467, "learning_rate": 9.090404738688281e-05, "loss": 1.788, "step": 8302 }, { "epoch": 0.81083984375, "grad_norm": 0.16541776061058044, "learning_rate": 9.086317203242303e-05, "loss": 1.7654, "step": 8303 }, { "epoch": 0.8109375, "grad_norm": 0.14846445620059967, "learning_rate": 9.082231507160104e-05, "loss": 1.7689, "step": 8304 }, { "epoch": 0.81103515625, "grad_norm": 0.1556052267551422, "learning_rate": 9.078147650849816e-05, "loss": 1.7426, "step": 8305 }, { "epoch": 0.8111328125, "grad_norm": 0.17227458953857422, "learning_rate": 9.07406563471937e-05, "loss": 1.7069, "step": 8306 }, { "epoch": 0.81123046875, "grad_norm": 0.15408748388290405, "learning_rate": 9.069985459176524e-05, "loss": 1.7924, "step": 8307 }, { "epoch": 0.811328125, "grad_norm": 0.1750294715166092, "learning_rate": 9.065907124628865e-05, "loss": 1.8001, "step": 8308 }, { "epoch": 0.81142578125, "grad_norm": 0.1639668494462967, "learning_rate": 9.061830631483769e-05, "loss": 1.7698, "step": 8309 }, { "epoch": 0.8115234375, "grad_norm": 0.17342323064804077, "learning_rate": 9.057755980148443e-05, "loss": 1.736, "step": 8310 }, { "epoch": 0.81162109375, "grad_norm": 0.15612269937992096, "learning_rate": 9.053683171029914e-05, "loss": 1.7353, "step": 8311 }, { "epoch": 0.81171875, "grad_norm": 0.17296473681926727, "learning_rate": 9.049612204535018e-05, "loss": 1.7735, "step": 8312 }, { "epoch": 0.81181640625, "grad_norm": 0.14164748787879944, "learning_rate": 9.045543081070406e-05, "loss": 1.7446, "step": 8313 }, { "epoch": 0.8119140625, "grad_norm": 0.16752690076828003, "learning_rate": 9.04147580104254e-05, "loss": 1.7626, "step": 8314 }, { "epoch": 0.81201171875, "grad_norm": 0.14894674718379974, "learning_rate": 9.037410364857726e-05, "loss": 1.6989, "step": 8315 }, { "epoch": 0.812109375, "grad_norm": 0.18001200258731842, "learning_rate": 9.033346772922048e-05, "loss": 1.7015, "step": 8316 }, { "epoch": 0.81220703125, "grad_norm": 0.16753733158111572, "learning_rate": 9.029285025641426e-05, "loss": 1.7868, "step": 8317 }, { "epoch": 0.8123046875, "grad_norm": 0.14113029837608337, "learning_rate": 9.025225123421593e-05, "loss": 1.7224, "step": 8318 }, { "epoch": 0.81240234375, "grad_norm": 0.2000410109758377, "learning_rate": 9.021167066668107e-05, "loss": 1.742, "step": 8319 }, { "epoch": 0.8125, "grad_norm": 0.15376614034175873, "learning_rate": 9.017110855786317e-05, "loss": 1.7245, "step": 8320 }, { "epoch": 0.81259765625, "grad_norm": 0.1504804939031601, "learning_rate": 9.013056491181399e-05, "loss": 1.7609, "step": 8321 }, { "epoch": 0.8126953125, "grad_norm": 0.20194365084171295, "learning_rate": 9.009003973258373e-05, "loss": 1.7832, "step": 8322 }, { "epoch": 0.81279296875, "grad_norm": 0.15205655992031097, "learning_rate": 9.004953302422029e-05, "loss": 1.7487, "step": 8323 }, { "epoch": 0.812890625, "grad_norm": 0.17037037014961243, "learning_rate": 9.000904479077e-05, "loss": 1.7384, "step": 8324 }, { "epoch": 0.81298828125, "grad_norm": 0.1496766209602356, "learning_rate": 8.996857503627722e-05, "loss": 1.7472, "step": 8325 }, { "epoch": 0.8130859375, "grad_norm": 0.15858812630176544, "learning_rate": 8.992812376478466e-05, "loss": 1.7445, "step": 8326 }, { "epoch": 0.81318359375, "grad_norm": 0.16192281246185303, "learning_rate": 8.988769098033286e-05, "loss": 1.7889, "step": 8327 }, { "epoch": 0.81328125, "grad_norm": 0.15642178058624268, "learning_rate": 8.984727668696075e-05, "loss": 1.7575, "step": 8328 }, { "epoch": 0.81337890625, "grad_norm": 0.16848362982273102, "learning_rate": 8.98068808887055e-05, "loss": 1.726, "step": 8329 }, { "epoch": 0.8134765625, "grad_norm": 0.14426805078983307, "learning_rate": 8.976650358960211e-05, "loss": 1.788, "step": 8330 }, { "epoch": 0.81357421875, "grad_norm": 0.1498468518257141, "learning_rate": 8.972614479368404e-05, "loss": 1.7801, "step": 8331 }, { "epoch": 0.813671875, "grad_norm": 0.16322986781597137, "learning_rate": 8.968580450498267e-05, "loss": 1.7526, "step": 8332 }, { "epoch": 0.81376953125, "grad_norm": 0.16089117527008057, "learning_rate": 8.964548272752779e-05, "loss": 1.8002, "step": 8333 }, { "epoch": 0.8138671875, "grad_norm": 0.16589125990867615, "learning_rate": 8.960517946534704e-05, "loss": 1.7303, "step": 8334 }, { "epoch": 0.81396484375, "grad_norm": 0.20223082602024078, "learning_rate": 8.956489472246632e-05, "loss": 1.8201, "step": 8335 }, { "epoch": 0.8140625, "grad_norm": 0.14411646127700806, "learning_rate": 8.952462850290993e-05, "loss": 1.7591, "step": 8336 }, { "epoch": 0.81416015625, "grad_norm": 0.16750937700271606, "learning_rate": 8.948438081069996e-05, "loss": 1.7752, "step": 8337 }, { "epoch": 0.8142578125, "grad_norm": 0.15201875567436218, "learning_rate": 8.944415164985681e-05, "loss": 1.7974, "step": 8338 }, { "epoch": 0.81435546875, "grad_norm": 0.15292836725711823, "learning_rate": 8.940394102439903e-05, "loss": 1.752, "step": 8339 }, { "epoch": 0.814453125, "grad_norm": 0.15420450270175934, "learning_rate": 8.936374893834335e-05, "loss": 1.76, "step": 8340 }, { "epoch": 0.81455078125, "grad_norm": 0.1460917592048645, "learning_rate": 8.932357539570454e-05, "loss": 1.7455, "step": 8341 }, { "epoch": 0.8146484375, "grad_norm": 0.1694454550743103, "learning_rate": 8.928342040049554e-05, "loss": 1.7815, "step": 8342 }, { "epoch": 0.81474609375, "grad_norm": 0.15859417617321014, "learning_rate": 8.924328395672762e-05, "loss": 1.7796, "step": 8343 }, { "epoch": 0.81484375, "grad_norm": 0.16326412558555603, "learning_rate": 8.920316606840997e-05, "loss": 1.7158, "step": 8344 }, { "epoch": 0.81494140625, "grad_norm": 0.16647277772426605, "learning_rate": 8.916306673954996e-05, "loss": 1.7795, "step": 8345 }, { "epoch": 0.8150390625, "grad_norm": 0.1539127081632614, "learning_rate": 8.912298597415325e-05, "loss": 1.7729, "step": 8346 }, { "epoch": 0.81513671875, "grad_norm": 0.14485040307044983, "learning_rate": 8.908292377622359e-05, "loss": 1.7887, "step": 8347 }, { "epoch": 0.815234375, "grad_norm": 0.15973401069641113, "learning_rate": 8.904288014976268e-05, "loss": 1.7483, "step": 8348 }, { "epoch": 0.81533203125, "grad_norm": 0.162570521235466, "learning_rate": 8.90028550987706e-05, "loss": 1.7801, "step": 8349 }, { "epoch": 0.8154296875, "grad_norm": 0.1585955172777176, "learning_rate": 8.896284862724556e-05, "loss": 1.7138, "step": 8350 }, { "epoch": 0.81552734375, "grad_norm": 0.17353785037994385, "learning_rate": 8.892286073918379e-05, "loss": 1.7499, "step": 8351 }, { "epoch": 0.815625, "grad_norm": 0.19171768426895142, "learning_rate": 8.888289143857973e-05, "loss": 1.7543, "step": 8352 }, { "epoch": 0.81572265625, "grad_norm": 0.17081120610237122, "learning_rate": 8.884294072942595e-05, "loss": 1.7521, "step": 8353 }, { "epoch": 0.8158203125, "grad_norm": 0.1563846617937088, "learning_rate": 8.880300861571326e-05, "loss": 1.7695, "step": 8354 }, { "epoch": 0.81591796875, "grad_norm": 0.15054002404212952, "learning_rate": 8.876309510143038e-05, "loss": 1.7808, "step": 8355 }, { "epoch": 0.816015625, "grad_norm": 0.16492918133735657, "learning_rate": 8.872320019056435e-05, "loss": 1.7529, "step": 8356 }, { "epoch": 0.81611328125, "grad_norm": 0.16009050607681274, "learning_rate": 8.868332388710045e-05, "loss": 1.789, "step": 8357 }, { "epoch": 0.8162109375, "grad_norm": 0.18431679904460907, "learning_rate": 8.864346619502185e-05, "loss": 1.7636, "step": 8358 }, { "epoch": 0.81630859375, "grad_norm": 0.16306042671203613, "learning_rate": 8.860362711830999e-05, "loss": 1.7541, "step": 8359 }, { "epoch": 0.81640625, "grad_norm": 0.15795700252056122, "learning_rate": 8.856380666094446e-05, "loss": 1.7742, "step": 8360 }, { "epoch": 0.81650390625, "grad_norm": 0.16943824291229248, "learning_rate": 8.8524004826903e-05, "loss": 1.8224, "step": 8361 }, { "epoch": 0.8166015625, "grad_norm": 0.17146413028240204, "learning_rate": 8.848422162016139e-05, "loss": 1.7409, "step": 8362 }, { "epoch": 0.81669921875, "grad_norm": 0.1564481407403946, "learning_rate": 8.844445704469363e-05, "loss": 1.7269, "step": 8363 }, { "epoch": 0.816796875, "grad_norm": 0.17175661027431488, "learning_rate": 8.840471110447195e-05, "loss": 1.7991, "step": 8364 }, { "epoch": 0.81689453125, "grad_norm": 0.17451311647891998, "learning_rate": 8.836498380346653e-05, "loss": 1.8006, "step": 8365 }, { "epoch": 0.8169921875, "grad_norm": 0.1590086668729782, "learning_rate": 8.832527514564579e-05, "loss": 1.7706, "step": 8366 }, { "epoch": 0.81708984375, "grad_norm": 0.17662088572978973, "learning_rate": 8.828558513497622e-05, "loss": 1.7648, "step": 8367 }, { "epoch": 0.8171875, "grad_norm": 0.18401403725147247, "learning_rate": 8.824591377542266e-05, "loss": 1.7545, "step": 8368 }, { "epoch": 0.81728515625, "grad_norm": 0.16471339762210846, "learning_rate": 8.820626107094773e-05, "loss": 1.7591, "step": 8369 }, { "epoch": 0.8173828125, "grad_norm": 0.18486930429935455, "learning_rate": 8.816662702551245e-05, "loss": 1.7311, "step": 8370 }, { "epoch": 0.81748046875, "grad_norm": 0.15122559666633606, "learning_rate": 8.812701164307605e-05, "loss": 1.7724, "step": 8371 }, { "epoch": 0.817578125, "grad_norm": 0.1851777583360672, "learning_rate": 8.808741492759556e-05, "loss": 1.8022, "step": 8372 }, { "epoch": 0.81767578125, "grad_norm": 0.15879541635513306, "learning_rate": 8.804783688302643e-05, "loss": 1.7143, "step": 8373 }, { "epoch": 0.8177734375, "grad_norm": 0.14798934757709503, "learning_rate": 8.800827751332217e-05, "loss": 1.7838, "step": 8374 }, { "epoch": 0.81787109375, "grad_norm": 0.16698406636714935, "learning_rate": 8.79687368224344e-05, "loss": 1.7926, "step": 8375 }, { "epoch": 0.81796875, "grad_norm": 0.15668557584285736, "learning_rate": 8.792921481431284e-05, "loss": 1.7956, "step": 8376 }, { "epoch": 0.81806640625, "grad_norm": 0.15291263163089752, "learning_rate": 8.788971149290536e-05, "loss": 1.7674, "step": 8377 }, { "epoch": 0.8181640625, "grad_norm": 0.17963092029094696, "learning_rate": 8.785022686215822e-05, "loss": 1.752, "step": 8378 }, { "epoch": 0.81826171875, "grad_norm": 0.1435219645500183, "learning_rate": 8.781076092601531e-05, "loss": 1.7123, "step": 8379 }, { "epoch": 0.818359375, "grad_norm": 0.1852775365114212, "learning_rate": 8.777131368841901e-05, "loss": 1.7361, "step": 8380 }, { "epoch": 0.81845703125, "grad_norm": 0.166164830327034, "learning_rate": 8.773188515330984e-05, "loss": 1.7643, "step": 8381 }, { "epoch": 0.8185546875, "grad_norm": 0.1496143937110901, "learning_rate": 8.769247532462633e-05, "loss": 1.7482, "step": 8382 }, { "epoch": 0.81865234375, "grad_norm": 0.1749107390642166, "learning_rate": 8.765308420630505e-05, "loss": 1.778, "step": 8383 }, { "epoch": 0.81875, "grad_norm": 0.1457228809595108, "learning_rate": 8.761371180228091e-05, "loss": 1.7677, "step": 8384 }, { "epoch": 0.81884765625, "grad_norm": 0.15485554933547974, "learning_rate": 8.757435811648696e-05, "loss": 1.7279, "step": 8385 }, { "epoch": 0.8189453125, "grad_norm": 0.174740269780159, "learning_rate": 8.753502315285411e-05, "loss": 1.7497, "step": 8386 }, { "epoch": 0.81904296875, "grad_norm": 0.15298032760620117, "learning_rate": 8.749570691531166e-05, "loss": 1.7386, "step": 8387 }, { "epoch": 0.819140625, "grad_norm": 0.18783634901046753, "learning_rate": 8.745640940778698e-05, "loss": 1.8286, "step": 8388 }, { "epoch": 0.81923828125, "grad_norm": 0.15853796899318695, "learning_rate": 8.74171306342056e-05, "loss": 1.7672, "step": 8389 }, { "epoch": 0.8193359375, "grad_norm": 0.14881697297096252, "learning_rate": 8.737787059849092e-05, "loss": 1.7288, "step": 8390 }, { "epoch": 0.81943359375, "grad_norm": 0.179273784160614, "learning_rate": 8.733862930456474e-05, "loss": 1.769, "step": 8391 }, { "epoch": 0.81953125, "grad_norm": 0.16236259043216705, "learning_rate": 8.729940675634709e-05, "loss": 1.7447, "step": 8392 }, { "epoch": 0.81962890625, "grad_norm": 0.16828756034374237, "learning_rate": 8.726020295775575e-05, "loss": 1.735, "step": 8393 }, { "epoch": 0.8197265625, "grad_norm": 0.19058358669281006, "learning_rate": 8.72210179127069e-05, "loss": 1.7773, "step": 8394 }, { "epoch": 0.81982421875, "grad_norm": 0.1357003003358841, "learning_rate": 8.718185162511483e-05, "loss": 1.7661, "step": 8395 }, { "epoch": 0.819921875, "grad_norm": 0.18297109007835388, "learning_rate": 8.714270409889189e-05, "loss": 1.7821, "step": 8396 }, { "epoch": 0.82001953125, "grad_norm": 0.1431070864200592, "learning_rate": 8.710357533794849e-05, "loss": 1.7829, "step": 8397 }, { "epoch": 0.8201171875, "grad_norm": 0.17153435945510864, "learning_rate": 8.706446534619328e-05, "loss": 1.7738, "step": 8398 }, { "epoch": 0.82021484375, "grad_norm": 0.15614938735961914, "learning_rate": 8.702537412753311e-05, "loss": 1.7651, "step": 8399 }, { "epoch": 0.8203125, "grad_norm": 0.15831902623176575, "learning_rate": 8.69863016858727e-05, "loss": 1.7393, "step": 8400 }, { "epoch": 0.82041015625, "grad_norm": 0.1600690633058548, "learning_rate": 8.694724802511511e-05, "loss": 1.7314, "step": 8401 }, { "epoch": 0.8205078125, "grad_norm": 0.1663656085729599, "learning_rate": 8.690821314916145e-05, "loss": 1.7691, "step": 8402 }, { "epoch": 0.82060546875, "grad_norm": 0.15876898169517517, "learning_rate": 8.686919706191098e-05, "loss": 1.7846, "step": 8403 }, { "epoch": 0.820703125, "grad_norm": 0.15833954513072968, "learning_rate": 8.6830199767261e-05, "loss": 1.7197, "step": 8404 }, { "epoch": 0.82080078125, "grad_norm": 0.15698617696762085, "learning_rate": 8.679122126910696e-05, "loss": 1.7862, "step": 8405 }, { "epoch": 0.8208984375, "grad_norm": 0.15046310424804688, "learning_rate": 8.675226157134266e-05, "loss": 1.7478, "step": 8406 }, { "epoch": 0.82099609375, "grad_norm": 0.1650071144104004, "learning_rate": 8.671332067785965e-05, "loss": 1.7751, "step": 8407 }, { "epoch": 0.82109375, "grad_norm": 0.16982229053974152, "learning_rate": 8.667439859254783e-05, "loss": 1.7844, "step": 8408 }, { "epoch": 0.82119140625, "grad_norm": 0.16051845252513885, "learning_rate": 8.663549531929518e-05, "loss": 1.7381, "step": 8409 }, { "epoch": 0.8212890625, "grad_norm": 0.16729596257209778, "learning_rate": 8.659661086198783e-05, "loss": 1.7593, "step": 8410 }, { "epoch": 0.82138671875, "grad_norm": 0.17896196246147156, "learning_rate": 8.655774522450987e-05, "loss": 1.7623, "step": 8411 }, { "epoch": 0.821484375, "grad_norm": 0.15487608313560486, "learning_rate": 8.651889841074374e-05, "loss": 1.7735, "step": 8412 }, { "epoch": 0.82158203125, "grad_norm": 0.15870285034179688, "learning_rate": 8.648007042456993e-05, "loss": 1.8011, "step": 8413 }, { "epoch": 0.8216796875, "grad_norm": 0.15496724843978882, "learning_rate": 8.64412612698669e-05, "loss": 1.747, "step": 8414 }, { "epoch": 0.82177734375, "grad_norm": 0.1451869159936905, "learning_rate": 8.64024709505114e-05, "loss": 1.7816, "step": 8415 }, { "epoch": 0.821875, "grad_norm": 0.15706181526184082, "learning_rate": 8.636369947037818e-05, "loss": 1.7463, "step": 8416 }, { "epoch": 0.82197265625, "grad_norm": 0.1472255140542984, "learning_rate": 8.632494683334032e-05, "loss": 1.7879, "step": 8417 }, { "epoch": 0.8220703125, "grad_norm": 0.14138470590114594, "learning_rate": 8.628621304326861e-05, "loss": 1.7744, "step": 8418 }, { "epoch": 0.82216796875, "grad_norm": 0.15681752562522888, "learning_rate": 8.624749810403246e-05, "loss": 1.77, "step": 8419 }, { "epoch": 0.822265625, "grad_norm": 0.14971685409545898, "learning_rate": 8.62088020194991e-05, "loss": 1.7279, "step": 8420 }, { "epoch": 0.82236328125, "grad_norm": 0.1646648794412613, "learning_rate": 8.61701247935338e-05, "loss": 1.7722, "step": 8421 }, { "epoch": 0.8224609375, "grad_norm": 0.14597146213054657, "learning_rate": 8.613146643000017e-05, "loss": 1.7234, "step": 8422 }, { "epoch": 0.82255859375, "grad_norm": 0.14998579025268555, "learning_rate": 8.60928269327598e-05, "loss": 1.7858, "step": 8423 }, { "epoch": 0.82265625, "grad_norm": 0.17277736961841583, "learning_rate": 8.605420630567254e-05, "loss": 1.7715, "step": 8424 }, { "epoch": 0.82275390625, "grad_norm": 0.1474238932132721, "learning_rate": 8.601560455259602e-05, "loss": 1.7787, "step": 8425 }, { "epoch": 0.8228515625, "grad_norm": 0.16638347506523132, "learning_rate": 8.597702167738641e-05, "loss": 1.7652, "step": 8426 }, { "epoch": 0.82294921875, "grad_norm": 0.15399546921253204, "learning_rate": 8.593845768389777e-05, "loss": 1.7992, "step": 8427 }, { "epoch": 0.823046875, "grad_norm": 0.1469046175479889, "learning_rate": 8.589991257598231e-05, "loss": 1.8078, "step": 8428 }, { "epoch": 0.82314453125, "grad_norm": 0.1573394536972046, "learning_rate": 8.586138635749024e-05, "loss": 1.7857, "step": 8429 }, { "epoch": 0.8232421875, "grad_norm": 0.15053480863571167, "learning_rate": 8.58228790322701e-05, "loss": 1.7587, "step": 8430 }, { "epoch": 0.82333984375, "grad_norm": 0.15259774029254913, "learning_rate": 8.57843906041684e-05, "loss": 1.6979, "step": 8431 }, { "epoch": 0.8234375, "grad_norm": 0.17417371273040771, "learning_rate": 8.57459210770297e-05, "loss": 1.765, "step": 8432 }, { "epoch": 0.82353515625, "grad_norm": 0.14357277750968933, "learning_rate": 8.570747045469687e-05, "loss": 1.7688, "step": 8433 }, { "epoch": 0.8236328125, "grad_norm": 0.17706722021102905, "learning_rate": 8.566903874101076e-05, "loss": 1.7996, "step": 8434 }, { "epoch": 0.82373046875, "grad_norm": 0.1508440524339676, "learning_rate": 8.563062593981042e-05, "loss": 1.7973, "step": 8435 }, { "epoch": 0.823828125, "grad_norm": 0.16880780458450317, "learning_rate": 8.559223205493282e-05, "loss": 1.748, "step": 8436 }, { "epoch": 0.82392578125, "grad_norm": 0.15065091848373413, "learning_rate": 8.55538570902133e-05, "loss": 1.748, "step": 8437 }, { "epoch": 0.8240234375, "grad_norm": 0.16556096076965332, "learning_rate": 8.551550104948508e-05, "loss": 1.723, "step": 8438 }, { "epoch": 0.82412109375, "grad_norm": 0.14696374535560608, "learning_rate": 8.547716393657956e-05, "loss": 1.7302, "step": 8439 }, { "epoch": 0.82421875, "grad_norm": 0.1390758901834488, "learning_rate": 8.54388457553264e-05, "loss": 1.7312, "step": 8440 }, { "epoch": 0.82431640625, "grad_norm": 0.15431326627731323, "learning_rate": 8.54005465095532e-05, "loss": 1.7702, "step": 8441 }, { "epoch": 0.8244140625, "grad_norm": 0.16889570653438568, "learning_rate": 8.536226620308574e-05, "loss": 1.7939, "step": 8442 }, { "epoch": 0.82451171875, "grad_norm": 0.15394927561283112, "learning_rate": 8.532400483974778e-05, "loss": 1.7469, "step": 8443 }, { "epoch": 0.824609375, "grad_norm": 0.15335671603679657, "learning_rate": 8.528576242336136e-05, "loss": 1.7653, "step": 8444 }, { "epoch": 0.82470703125, "grad_norm": 0.14125685393810272, "learning_rate": 8.524753895774663e-05, "loss": 1.7743, "step": 8445 }, { "epoch": 0.8248046875, "grad_norm": 0.1555527150630951, "learning_rate": 8.520933444672161e-05, "loss": 1.8224, "step": 8446 }, { "epoch": 0.82490234375, "grad_norm": 0.1414199322462082, "learning_rate": 8.51711488941027e-05, "loss": 1.7712, "step": 8447 }, { "epoch": 0.825, "grad_norm": 0.13463635742664337, "learning_rate": 8.51329823037043e-05, "loss": 1.7865, "step": 8448 }, { "epoch": 0.82509765625, "grad_norm": 0.16051934659481049, "learning_rate": 8.509483467933895e-05, "loss": 1.7033, "step": 8449 }, { "epoch": 0.8251953125, "grad_norm": 0.14151304960250854, "learning_rate": 8.505670602481714e-05, "loss": 1.7416, "step": 8450 }, { "epoch": 0.82529296875, "grad_norm": 0.15918949246406555, "learning_rate": 8.501859634394768e-05, "loss": 1.7259, "step": 8451 }, { "epoch": 0.825390625, "grad_norm": 0.13954640924930573, "learning_rate": 8.498050564053734e-05, "loss": 1.7793, "step": 8452 }, { "epoch": 0.82548828125, "grad_norm": 0.14561396837234497, "learning_rate": 8.494243391839107e-05, "loss": 1.7416, "step": 8453 }, { "epoch": 0.8255859375, "grad_norm": 0.15425601601600647, "learning_rate": 8.490438118131188e-05, "loss": 1.7616, "step": 8454 }, { "epoch": 0.82568359375, "grad_norm": 0.13874293863773346, "learning_rate": 8.486634743310094e-05, "loss": 1.7696, "step": 8455 }, { "epoch": 0.82578125, "grad_norm": 0.16830553114414215, "learning_rate": 8.482833267755751e-05, "loss": 1.8068, "step": 8456 }, { "epoch": 0.82587890625, "grad_norm": 0.13877147436141968, "learning_rate": 8.479033691847881e-05, "loss": 1.7623, "step": 8457 }, { "epoch": 0.8259765625, "grad_norm": 0.15236462652683258, "learning_rate": 8.475236015966035e-05, "loss": 1.7683, "step": 8458 }, { "epoch": 0.82607421875, "grad_norm": 0.1528972089290619, "learning_rate": 8.471440240489569e-05, "loss": 1.7116, "step": 8459 }, { "epoch": 0.826171875, "grad_norm": 0.14276178181171417, "learning_rate": 8.467646365797642e-05, "loss": 1.7533, "step": 8460 }, { "epoch": 0.82626953125, "grad_norm": 0.14902713894844055, "learning_rate": 8.463854392269233e-05, "loss": 1.7684, "step": 8461 }, { "epoch": 0.8263671875, "grad_norm": 0.15813300013542175, "learning_rate": 8.460064320283123e-05, "loss": 1.7761, "step": 8462 }, { "epoch": 0.82646484375, "grad_norm": 0.16702793538570404, "learning_rate": 8.456276150217918e-05, "loss": 1.7498, "step": 8463 }, { "epoch": 0.8265625, "grad_norm": 0.17834414541721344, "learning_rate": 8.452489882452005e-05, "loss": 1.7556, "step": 8464 }, { "epoch": 0.82666015625, "grad_norm": 0.15532442927360535, "learning_rate": 8.448705517363608e-05, "loss": 1.7405, "step": 8465 }, { "epoch": 0.8267578125, "grad_norm": 0.16517142951488495, "learning_rate": 8.444923055330751e-05, "loss": 1.7568, "step": 8466 }, { "epoch": 0.82685546875, "grad_norm": 0.1669408231973648, "learning_rate": 8.441142496731266e-05, "loss": 1.7875, "step": 8467 }, { "epoch": 0.826953125, "grad_norm": 0.13819104433059692, "learning_rate": 8.437363841942803e-05, "loss": 1.7284, "step": 8468 }, { "epoch": 0.82705078125, "grad_norm": 0.16980837285518646, "learning_rate": 8.43358709134281e-05, "loss": 1.7515, "step": 8469 }, { "epoch": 0.8271484375, "grad_norm": 0.15128012001514435, "learning_rate": 8.429812245308558e-05, "loss": 1.7235, "step": 8470 }, { "epoch": 0.82724609375, "grad_norm": 0.14332225918769836, "learning_rate": 8.426039304217111e-05, "loss": 1.7728, "step": 8471 }, { "epoch": 0.82734375, "grad_norm": 0.13301967084407806, "learning_rate": 8.42226826844536e-05, "loss": 1.7542, "step": 8472 }, { "epoch": 0.82744140625, "grad_norm": 0.16014280915260315, "learning_rate": 8.418499138369992e-05, "loss": 1.7468, "step": 8473 }, { "epoch": 0.8275390625, "grad_norm": 0.15309569239616394, "learning_rate": 8.414731914367516e-05, "loss": 1.7556, "step": 8474 }, { "epoch": 0.82763671875, "grad_norm": 0.15653081238269806, "learning_rate": 8.41096659681424e-05, "loss": 1.7757, "step": 8475 }, { "epoch": 0.827734375, "grad_norm": 0.1739870309829712, "learning_rate": 8.407203186086287e-05, "loss": 1.7789, "step": 8476 }, { "epoch": 0.82783203125, "grad_norm": 0.14959277212619781, "learning_rate": 8.403441682559593e-05, "loss": 1.7617, "step": 8477 }, { "epoch": 0.8279296875, "grad_norm": 0.18382064998149872, "learning_rate": 8.399682086609892e-05, "loss": 1.8045, "step": 8478 }, { "epoch": 0.82802734375, "grad_norm": 0.15641963481903076, "learning_rate": 8.395924398612736e-05, "loss": 1.7523, "step": 8479 }, { "epoch": 0.828125, "grad_norm": 0.17453552782535553, "learning_rate": 8.392168618943485e-05, "loss": 1.7348, "step": 8480 }, { "epoch": 0.82822265625, "grad_norm": 0.16665078699588776, "learning_rate": 8.38841474797731e-05, "loss": 1.7405, "step": 8481 }, { "epoch": 0.8283203125, "grad_norm": 0.1649959683418274, "learning_rate": 8.38466278608919e-05, "loss": 1.7536, "step": 8482 }, { "epoch": 0.82841796875, "grad_norm": 0.17497532069683075, "learning_rate": 8.380912733653908e-05, "loss": 1.7587, "step": 8483 }, { "epoch": 0.828515625, "grad_norm": 0.1554289162158966, "learning_rate": 8.377164591046069e-05, "loss": 1.7484, "step": 8484 }, { "epoch": 0.82861328125, "grad_norm": 0.1688387542963028, "learning_rate": 8.373418358640072e-05, "loss": 1.7601, "step": 8485 }, { "epoch": 0.8287109375, "grad_norm": 0.16524885594844818, "learning_rate": 8.369674036810133e-05, "loss": 1.7888, "step": 8486 }, { "epoch": 0.82880859375, "grad_norm": 0.17288674414157867, "learning_rate": 8.365931625930278e-05, "loss": 1.7657, "step": 8487 }, { "epoch": 0.82890625, "grad_norm": 0.16294053196907043, "learning_rate": 8.362191126374341e-05, "loss": 1.7522, "step": 8488 }, { "epoch": 0.82900390625, "grad_norm": 0.16123893857002258, "learning_rate": 8.358452538515964e-05, "loss": 1.7783, "step": 8489 }, { "epoch": 0.8291015625, "grad_norm": 0.15825918316841125, "learning_rate": 8.354715862728601e-05, "loss": 1.7681, "step": 8490 }, { "epoch": 0.82919921875, "grad_norm": 0.15531764924526215, "learning_rate": 8.350981099385519e-05, "loss": 1.7769, "step": 8491 }, { "epoch": 0.829296875, "grad_norm": 0.15621967613697052, "learning_rate": 8.347248248859768e-05, "loss": 1.7455, "step": 8492 }, { "epoch": 0.82939453125, "grad_norm": 0.14952236413955688, "learning_rate": 8.343517311524243e-05, "loss": 1.7187, "step": 8493 }, { "epoch": 0.8294921875, "grad_norm": 0.15428759157657623, "learning_rate": 8.33978828775163e-05, "loss": 1.773, "step": 8494 }, { "epoch": 0.82958984375, "grad_norm": 0.1409579962491989, "learning_rate": 8.336061177914419e-05, "loss": 1.7926, "step": 8495 }, { "epoch": 0.8296875, "grad_norm": 0.1452881097793579, "learning_rate": 8.33233598238492e-05, "loss": 1.7496, "step": 8496 }, { "epoch": 0.82978515625, "grad_norm": 0.15713250637054443, "learning_rate": 8.328612701535249e-05, "loss": 1.8167, "step": 8497 }, { "epoch": 0.8298828125, "grad_norm": 0.14852602779865265, "learning_rate": 8.324891335737328e-05, "loss": 1.7608, "step": 8498 }, { "epoch": 0.82998046875, "grad_norm": 0.14997956156730652, "learning_rate": 8.32117188536288e-05, "loss": 1.7624, "step": 8499 }, { "epoch": 0.830078125, "grad_norm": 0.15187768638134003, "learning_rate": 8.317454350783455e-05, "loss": 1.7794, "step": 8500 }, { "epoch": 0.83017578125, "grad_norm": 0.16927991807460785, "learning_rate": 8.313738732370397e-05, "loss": 1.7728, "step": 8501 }, { "epoch": 0.8302734375, "grad_norm": 0.14679668843746185, "learning_rate": 8.310025030494862e-05, "loss": 1.7736, "step": 8502 }, { "epoch": 0.83037109375, "grad_norm": 0.1658899039030075, "learning_rate": 8.306313245527822e-05, "loss": 1.7511, "step": 8503 }, { "epoch": 0.83046875, "grad_norm": 0.16163282096385956, "learning_rate": 8.302603377840046e-05, "loss": 1.7788, "step": 8504 }, { "epoch": 0.83056640625, "grad_norm": 0.16019085049629211, "learning_rate": 8.298895427802128e-05, "loss": 1.7559, "step": 8505 }, { "epoch": 0.8306640625, "grad_norm": 0.16204088926315308, "learning_rate": 8.295189395784438e-05, "loss": 1.8029, "step": 8506 }, { "epoch": 0.83076171875, "grad_norm": 0.14610053598880768, "learning_rate": 8.291485282157193e-05, "loss": 1.7499, "step": 8507 }, { "epoch": 0.830859375, "grad_norm": 0.15891225636005402, "learning_rate": 8.287783087290395e-05, "loss": 1.7751, "step": 8508 }, { "epoch": 0.83095703125, "grad_norm": 0.14721770584583282, "learning_rate": 8.284082811553864e-05, "loss": 1.8152, "step": 8509 }, { "epoch": 0.8310546875, "grad_norm": 0.15692001581192017, "learning_rate": 8.280384455317219e-05, "loss": 1.7367, "step": 8510 }, { "epoch": 0.83115234375, "grad_norm": 0.14958809316158295, "learning_rate": 8.276688018949899e-05, "loss": 1.7455, "step": 8511 }, { "epoch": 0.83125, "grad_norm": 0.16367430984973907, "learning_rate": 8.272993502821147e-05, "loss": 1.7711, "step": 8512 }, { "epoch": 0.83134765625, "grad_norm": 0.16750970482826233, "learning_rate": 8.269300907300003e-05, "loss": 1.7515, "step": 8513 }, { "epoch": 0.8314453125, "grad_norm": 0.15963725745677948, "learning_rate": 8.265610232755331e-05, "loss": 1.7628, "step": 8514 }, { "epoch": 0.83154296875, "grad_norm": 0.18168629705905914, "learning_rate": 8.26192147955579e-05, "loss": 1.7194, "step": 8515 }, { "epoch": 0.831640625, "grad_norm": 0.16153965890407562, "learning_rate": 8.258234648069865e-05, "loss": 1.7795, "step": 8516 }, { "epoch": 0.83173828125, "grad_norm": 0.15666508674621582, "learning_rate": 8.254549738665829e-05, "loss": 1.7417, "step": 8517 }, { "epoch": 0.8318359375, "grad_norm": 0.17527411878108978, "learning_rate": 8.250866751711777e-05, "loss": 1.7467, "step": 8518 }, { "epoch": 0.83193359375, "grad_norm": 0.16105444729328156, "learning_rate": 8.247185687575608e-05, "loss": 1.7531, "step": 8519 }, { "epoch": 0.83203125, "grad_norm": 0.1744052916765213, "learning_rate": 8.243506546625021e-05, "loss": 1.7515, "step": 8520 }, { "epoch": 0.83212890625, "grad_norm": 0.17629374563694, "learning_rate": 8.239829329227532e-05, "loss": 1.7915, "step": 8521 }, { "epoch": 0.8322265625, "grad_norm": 0.15314772725105286, "learning_rate": 8.236154035750462e-05, "loss": 1.8162, "step": 8522 }, { "epoch": 0.83232421875, "grad_norm": 0.15998722612857819, "learning_rate": 8.232480666560942e-05, "loss": 1.7695, "step": 8523 }, { "epoch": 0.832421875, "grad_norm": 0.15197938680648804, "learning_rate": 8.228809222025907e-05, "loss": 1.7757, "step": 8524 }, { "epoch": 0.83251953125, "grad_norm": 0.15860462188720703, "learning_rate": 8.225139702512107e-05, "loss": 1.742, "step": 8525 }, { "epoch": 0.8326171875, "grad_norm": 0.15002824366092682, "learning_rate": 8.221472108386094e-05, "loss": 1.8051, "step": 8526 }, { "epoch": 0.83271484375, "grad_norm": 0.1461990475654602, "learning_rate": 8.21780644001422e-05, "loss": 1.7587, "step": 8527 }, { "epoch": 0.8328125, "grad_norm": 0.15719151496887207, "learning_rate": 8.214142697762657e-05, "loss": 1.7566, "step": 8528 }, { "epoch": 0.83291015625, "grad_norm": 0.15825308859348297, "learning_rate": 8.210480881997382e-05, "loss": 1.7561, "step": 8529 }, { "epoch": 0.8330078125, "grad_norm": 0.14952531456947327, "learning_rate": 8.206820993084177e-05, "loss": 1.7884, "step": 8530 }, { "epoch": 0.83310546875, "grad_norm": 0.14304599165916443, "learning_rate": 8.203163031388633e-05, "loss": 1.7909, "step": 8531 }, { "epoch": 0.833203125, "grad_norm": 0.16494962573051453, "learning_rate": 8.199506997276146e-05, "loss": 1.8184, "step": 8532 }, { "epoch": 0.83330078125, "grad_norm": 0.14188691973686218, "learning_rate": 8.195852891111932e-05, "loss": 1.7267, "step": 8533 }, { "epoch": 0.8333984375, "grad_norm": 0.16217146813869476, "learning_rate": 8.192200713260986e-05, "loss": 1.765, "step": 8534 }, { "epoch": 0.83349609375, "grad_norm": 0.1558942198753357, "learning_rate": 8.188550464088138e-05, "loss": 1.7849, "step": 8535 }, { "epoch": 0.83359375, "grad_norm": 0.17038816213607788, "learning_rate": 8.184902143958013e-05, "loss": 1.788, "step": 8536 }, { "epoch": 0.83369140625, "grad_norm": 0.14825069904327393, "learning_rate": 8.18125575323505e-05, "loss": 1.7454, "step": 8537 }, { "epoch": 0.8337890625, "grad_norm": 0.16248475015163422, "learning_rate": 8.177611292283487e-05, "loss": 1.7904, "step": 8538 }, { "epoch": 0.83388671875, "grad_norm": 0.1667545884847641, "learning_rate": 8.173968761467378e-05, "loss": 1.795, "step": 8539 }, { "epoch": 0.833984375, "grad_norm": 0.1498890072107315, "learning_rate": 8.170328161150578e-05, "loss": 1.7722, "step": 8540 }, { "epoch": 0.83408203125, "grad_norm": 0.1526694893836975, "learning_rate": 8.166689491696745e-05, "loss": 1.7878, "step": 8541 }, { "epoch": 0.8341796875, "grad_norm": 0.16158267855644226, "learning_rate": 8.163052753469356e-05, "loss": 1.7875, "step": 8542 }, { "epoch": 0.83427734375, "grad_norm": 0.16205233335494995, "learning_rate": 8.159417946831685e-05, "loss": 1.775, "step": 8543 }, { "epoch": 0.834375, "grad_norm": 0.14739322662353516, "learning_rate": 8.155785072146821e-05, "loss": 1.8382, "step": 8544 }, { "epoch": 0.83447265625, "grad_norm": 0.15730011463165283, "learning_rate": 8.152154129777651e-05, "loss": 1.7943, "step": 8545 }, { "epoch": 0.8345703125, "grad_norm": 0.1416599601507187, "learning_rate": 8.148525120086879e-05, "loss": 1.8072, "step": 8546 }, { "epoch": 0.83466796875, "grad_norm": 0.15465784072875977, "learning_rate": 8.144898043437013e-05, "loss": 1.7336, "step": 8547 }, { "epoch": 0.834765625, "grad_norm": 0.16040630638599396, "learning_rate": 8.141272900190357e-05, "loss": 1.7667, "step": 8548 }, { "epoch": 0.83486328125, "grad_norm": 0.14462073147296906, "learning_rate": 8.137649690709035e-05, "loss": 1.7355, "step": 8549 }, { "epoch": 0.8349609375, "grad_norm": 0.1550874412059784, "learning_rate": 8.134028415354975e-05, "loss": 1.7773, "step": 8550 }, { "epoch": 0.83505859375, "grad_norm": 0.14693886041641235, "learning_rate": 8.130409074489908e-05, "loss": 1.7612, "step": 8551 }, { "epoch": 0.83515625, "grad_norm": 0.14603029191493988, "learning_rate": 8.126791668475375e-05, "loss": 1.7961, "step": 8552 }, { "epoch": 0.83525390625, "grad_norm": 0.15795499086380005, "learning_rate": 8.123176197672726e-05, "loss": 1.8052, "step": 8553 }, { "epoch": 0.8353515625, "grad_norm": 0.21764862537384033, "learning_rate": 8.119562662443116e-05, "loss": 1.7919, "step": 8554 }, { "epoch": 0.83544921875, "grad_norm": 0.17565517127513885, "learning_rate": 8.115951063147492e-05, "loss": 1.7536, "step": 8555 }, { "epoch": 0.835546875, "grad_norm": 0.16344936192035675, "learning_rate": 8.112341400146632e-05, "loss": 1.797, "step": 8556 }, { "epoch": 0.83564453125, "grad_norm": 0.16120970249176025, "learning_rate": 8.108733673801108e-05, "loss": 1.7773, "step": 8557 }, { "epoch": 0.8357421875, "grad_norm": 0.17257001996040344, "learning_rate": 8.105127884471299e-05, "loss": 1.735, "step": 8558 }, { "epoch": 0.83583984375, "grad_norm": 0.15913689136505127, "learning_rate": 8.101524032517389e-05, "loss": 1.7703, "step": 8559 }, { "epoch": 0.8359375, "grad_norm": 0.1579756736755371, "learning_rate": 8.097922118299374e-05, "loss": 1.8086, "step": 8560 }, { "epoch": 0.83603515625, "grad_norm": 0.1562652289867401, "learning_rate": 8.094322142177057e-05, "loss": 1.7643, "step": 8561 }, { "epoch": 0.8361328125, "grad_norm": 0.1768665462732315, "learning_rate": 8.090724104510037e-05, "loss": 1.7434, "step": 8562 }, { "epoch": 0.83623046875, "grad_norm": 0.16803112626075745, "learning_rate": 8.087128005657726e-05, "loss": 1.7788, "step": 8563 }, { "epoch": 0.836328125, "grad_norm": 0.1503148078918457, "learning_rate": 8.083533845979343e-05, "loss": 1.8024, "step": 8564 }, { "epoch": 0.83642578125, "grad_norm": 0.15047983825206757, "learning_rate": 8.079941625833918e-05, "loss": 1.7459, "step": 8565 }, { "epoch": 0.8365234375, "grad_norm": 0.17495952546596527, "learning_rate": 8.076351345580278e-05, "loss": 1.7475, "step": 8566 }, { "epoch": 0.83662109375, "grad_norm": 0.17406727373600006, "learning_rate": 8.072763005577058e-05, "loss": 1.7368, "step": 8567 }, { "epoch": 0.83671875, "grad_norm": 0.14824235439300537, "learning_rate": 8.06917660618271e-05, "loss": 1.7542, "step": 8568 }, { "epoch": 0.83681640625, "grad_norm": 0.17104069888591766, "learning_rate": 8.065592147755472e-05, "loss": 1.7942, "step": 8569 }, { "epoch": 0.8369140625, "grad_norm": 0.1718413084745407, "learning_rate": 8.062009630653406e-05, "loss": 1.7385, "step": 8570 }, { "epoch": 0.83701171875, "grad_norm": 0.17480698227882385, "learning_rate": 8.058429055234371e-05, "loss": 1.724, "step": 8571 }, { "epoch": 0.837109375, "grad_norm": 0.1672711819410324, "learning_rate": 8.05485042185604e-05, "loss": 1.7953, "step": 8572 }, { "epoch": 0.83720703125, "grad_norm": 0.17301736772060394, "learning_rate": 8.05127373087588e-05, "loss": 1.7275, "step": 8573 }, { "epoch": 0.8373046875, "grad_norm": 0.1736856997013092, "learning_rate": 8.047698982651178e-05, "loss": 1.7513, "step": 8574 }, { "epoch": 0.83740234375, "grad_norm": 0.15342216193675995, "learning_rate": 8.044126177539015e-05, "loss": 1.747, "step": 8575 }, { "epoch": 0.8375, "grad_norm": 0.19653311371803284, "learning_rate": 8.040555315896284e-05, "loss": 1.7688, "step": 8576 }, { "epoch": 0.83759765625, "grad_norm": 0.17135022580623627, "learning_rate": 8.036986398079678e-05, "loss": 1.783, "step": 8577 }, { "epoch": 0.8376953125, "grad_norm": 0.16897425055503845, "learning_rate": 8.033419424445706e-05, "loss": 1.7214, "step": 8578 }, { "epoch": 0.83779296875, "grad_norm": 0.16988372802734375, "learning_rate": 8.029854395350675e-05, "loss": 1.7332, "step": 8579 }, { "epoch": 0.837890625, "grad_norm": 0.15007410943508148, "learning_rate": 8.026291311150699e-05, "loss": 1.7667, "step": 8580 }, { "epoch": 0.83798828125, "grad_norm": 0.17450401186943054, "learning_rate": 8.0227301722017e-05, "loss": 1.7678, "step": 8581 }, { "epoch": 0.8380859375, "grad_norm": 0.16680696606636047, "learning_rate": 8.019170978859408e-05, "loss": 1.748, "step": 8582 }, { "epoch": 0.83818359375, "grad_norm": 0.18496227264404297, "learning_rate": 8.015613731479346e-05, "loss": 1.7773, "step": 8583 }, { "epoch": 0.83828125, "grad_norm": 0.1511314958333969, "learning_rate": 8.012058430416859e-05, "loss": 1.7844, "step": 8584 }, { "epoch": 0.83837890625, "grad_norm": 0.1696626991033554, "learning_rate": 8.008505076027081e-05, "loss": 1.7599, "step": 8585 }, { "epoch": 0.8384765625, "grad_norm": 0.1464739888906479, "learning_rate": 8.004953668664972e-05, "loss": 1.7536, "step": 8586 }, { "epoch": 0.83857421875, "grad_norm": 0.16891834139823914, "learning_rate": 8.00140420868528e-05, "loss": 1.7368, "step": 8587 }, { "epoch": 0.838671875, "grad_norm": 0.15555822849273682, "learning_rate": 7.997856696442564e-05, "loss": 1.7651, "step": 8588 }, { "epoch": 0.83876953125, "grad_norm": 0.15834428369998932, "learning_rate": 7.994311132291197e-05, "loss": 1.7716, "step": 8589 }, { "epoch": 0.8388671875, "grad_norm": 0.15188704431056976, "learning_rate": 7.990767516585338e-05, "loss": 1.7531, "step": 8590 }, { "epoch": 0.83896484375, "grad_norm": 0.14837466180324554, "learning_rate": 7.987225849678968e-05, "loss": 1.7287, "step": 8591 }, { "epoch": 0.8390625, "grad_norm": 0.13774575293064117, "learning_rate": 7.983686131925866e-05, "loss": 1.7711, "step": 8592 }, { "epoch": 0.83916015625, "grad_norm": 0.1446312665939331, "learning_rate": 7.980148363679624e-05, "loss": 1.7518, "step": 8593 }, { "epoch": 0.8392578125, "grad_norm": 0.1608283668756485, "learning_rate": 7.976612545293626e-05, "loss": 1.7919, "step": 8594 }, { "epoch": 0.83935546875, "grad_norm": 0.14625351130962372, "learning_rate": 7.973078677121076e-05, "loss": 1.7453, "step": 8595 }, { "epoch": 0.839453125, "grad_norm": 0.1667589247226715, "learning_rate": 7.969546759514978e-05, "loss": 1.7713, "step": 8596 }, { "epoch": 0.83955078125, "grad_norm": 0.1762782484292984, "learning_rate": 7.966016792828127e-05, "loss": 1.7888, "step": 8597 }, { "epoch": 0.8396484375, "grad_norm": 0.13893422484397888, "learning_rate": 7.962488777413147e-05, "loss": 1.7615, "step": 8598 }, { "epoch": 0.83974609375, "grad_norm": 0.18336449563503265, "learning_rate": 7.958962713622448e-05, "loss": 1.7463, "step": 8599 }, { "epoch": 0.83984375, "grad_norm": 0.1516614705324173, "learning_rate": 7.955438601808257e-05, "loss": 1.7973, "step": 8600 }, { "epoch": 0.83994140625, "grad_norm": 0.16402161121368408, "learning_rate": 7.9519164423226e-05, "loss": 1.7904, "step": 8601 }, { "epoch": 0.8400390625, "grad_norm": 0.1652250587940216, "learning_rate": 7.94839623551731e-05, "loss": 1.7654, "step": 8602 }, { "epoch": 0.84013671875, "grad_norm": 0.13700176775455475, "learning_rate": 7.94487798174403e-05, "loss": 1.7581, "step": 8603 }, { "epoch": 0.840234375, "grad_norm": 0.15450945496559143, "learning_rate": 7.941361681354193e-05, "loss": 1.7985, "step": 8604 }, { "epoch": 0.84033203125, "grad_norm": 0.15956947207450867, "learning_rate": 7.937847334699047e-05, "loss": 1.7505, "step": 8605 }, { "epoch": 0.8404296875, "grad_norm": 0.14806438982486725, "learning_rate": 7.934334942129649e-05, "loss": 1.7404, "step": 8606 }, { "epoch": 0.84052734375, "grad_norm": 0.15551333129405975, "learning_rate": 7.930824503996856e-05, "loss": 1.7272, "step": 8607 }, { "epoch": 0.840625, "grad_norm": 0.17026452720165253, "learning_rate": 7.927316020651326e-05, "loss": 1.7721, "step": 8608 }, { "epoch": 0.84072265625, "grad_norm": 0.2556716501712799, "learning_rate": 7.923809492443526e-05, "loss": 1.7503, "step": 8609 }, { "epoch": 0.8408203125, "grad_norm": 0.17214985191822052, "learning_rate": 7.920304919723737e-05, "loss": 1.782, "step": 8610 }, { "epoch": 0.84091796875, "grad_norm": 0.1682480424642563, "learning_rate": 7.916802302842017e-05, "loss": 1.7736, "step": 8611 }, { "epoch": 0.841015625, "grad_norm": 0.1469680815935135, "learning_rate": 7.913301642148256e-05, "loss": 1.7448, "step": 8612 }, { "epoch": 0.84111328125, "grad_norm": 0.15450184047222137, "learning_rate": 7.909802937992138e-05, "loss": 1.7453, "step": 8613 }, { "epoch": 0.8412109375, "grad_norm": 0.16066943109035492, "learning_rate": 7.906306190723155e-05, "loss": 1.7657, "step": 8614 }, { "epoch": 0.84130859375, "grad_norm": 0.1590714454650879, "learning_rate": 7.902811400690598e-05, "loss": 1.7279, "step": 8615 }, { "epoch": 0.84140625, "grad_norm": 0.15845851600170135, "learning_rate": 7.899318568243568e-05, "loss": 1.7967, "step": 8616 }, { "epoch": 0.84150390625, "grad_norm": 0.15010811388492584, "learning_rate": 7.89582769373097e-05, "loss": 1.7647, "step": 8617 }, { "epoch": 0.8416015625, "grad_norm": 0.1659887731075287, "learning_rate": 7.892338777501504e-05, "loss": 1.7668, "step": 8618 }, { "epoch": 0.84169921875, "grad_norm": 0.1445939540863037, "learning_rate": 7.888851819903686e-05, "loss": 1.7893, "step": 8619 }, { "epoch": 0.841796875, "grad_norm": 0.17217402160167694, "learning_rate": 7.885366821285831e-05, "loss": 1.7773, "step": 8620 }, { "epoch": 0.84189453125, "grad_norm": 0.1595388650894165, "learning_rate": 7.88188378199606e-05, "loss": 1.7607, "step": 8621 }, { "epoch": 0.8419921875, "grad_norm": 0.16421262919902802, "learning_rate": 7.878402702382301e-05, "loss": 1.7734, "step": 8622 }, { "epoch": 0.84208984375, "grad_norm": 0.14159685373306274, "learning_rate": 7.874923582792278e-05, "loss": 1.7962, "step": 8623 }, { "epoch": 0.8421875, "grad_norm": 0.14571677148342133, "learning_rate": 7.871446423573532e-05, "loss": 1.8013, "step": 8624 }, { "epoch": 0.84228515625, "grad_norm": 0.1525963693857193, "learning_rate": 7.86797122507339e-05, "loss": 1.7328, "step": 8625 }, { "epoch": 0.8423828125, "grad_norm": 0.15472987294197083, "learning_rate": 7.864497987638999e-05, "loss": 1.7792, "step": 8626 }, { "epoch": 0.84248046875, "grad_norm": 0.1684083640575409, "learning_rate": 7.861026711617305e-05, "loss": 1.75, "step": 8627 }, { "epoch": 0.842578125, "grad_norm": 0.1579395979642868, "learning_rate": 7.857557397355054e-05, "loss": 1.7409, "step": 8628 }, { "epoch": 0.84267578125, "grad_norm": 0.14995913207530975, "learning_rate": 7.854090045198806e-05, "loss": 1.7767, "step": 8629 }, { "epoch": 0.8427734375, "grad_norm": 0.1905980408191681, "learning_rate": 7.850624655494913e-05, "loss": 1.7499, "step": 8630 }, { "epoch": 0.84287109375, "grad_norm": 0.15442153811454773, "learning_rate": 7.847161228589546e-05, "loss": 1.7476, "step": 8631 }, { "epoch": 0.84296875, "grad_norm": 0.19490738213062286, "learning_rate": 7.843699764828658e-05, "loss": 1.7539, "step": 8632 }, { "epoch": 0.84306640625, "grad_norm": 0.17873917520046234, "learning_rate": 7.840240264558025e-05, "loss": 1.7879, "step": 8633 }, { "epoch": 0.8431640625, "grad_norm": 0.16108116507530212, "learning_rate": 7.836782728123218e-05, "loss": 1.7693, "step": 8634 }, { "epoch": 0.84326171875, "grad_norm": 0.21398577094078064, "learning_rate": 7.833327155869617e-05, "loss": 1.7569, "step": 8635 }, { "epoch": 0.843359375, "grad_norm": 0.15447008609771729, "learning_rate": 7.829873548142402e-05, "loss": 1.793, "step": 8636 }, { "epoch": 0.84345703125, "grad_norm": 0.15315519273281097, "learning_rate": 7.826421905286559e-05, "loss": 1.8179, "step": 8637 }, { "epoch": 0.8435546875, "grad_norm": 0.17110691964626312, "learning_rate": 7.822972227646876e-05, "loss": 1.723, "step": 8638 }, { "epoch": 0.84365234375, "grad_norm": 0.15883760154247284, "learning_rate": 7.819524515567944e-05, "loss": 1.7921, "step": 8639 }, { "epoch": 0.84375, "grad_norm": 0.1550407111644745, "learning_rate": 7.81607876939416e-05, "loss": 1.7835, "step": 8640 }, { "epoch": 0.84384765625, "grad_norm": 0.17537052929401398, "learning_rate": 7.812634989469723e-05, "loss": 1.7529, "step": 8641 }, { "epoch": 0.8439453125, "grad_norm": 0.14970663189888, "learning_rate": 7.809193176138633e-05, "loss": 1.8304, "step": 8642 }, { "epoch": 0.84404296875, "grad_norm": 0.1806529015302658, "learning_rate": 7.805753329744704e-05, "loss": 1.7863, "step": 8643 }, { "epoch": 0.844140625, "grad_norm": 0.18027731776237488, "learning_rate": 7.802315450631538e-05, "loss": 1.7356, "step": 8644 }, { "epoch": 0.84423828125, "grad_norm": 0.15332059562206268, "learning_rate": 7.79887953914256e-05, "loss": 1.7558, "step": 8645 }, { "epoch": 0.8443359375, "grad_norm": 0.17090369760990143, "learning_rate": 7.795445595620974e-05, "loss": 1.7639, "step": 8646 }, { "epoch": 0.84443359375, "grad_norm": 0.18502339720726013, "learning_rate": 7.792013620409803e-05, "loss": 1.7739, "step": 8647 }, { "epoch": 0.84453125, "grad_norm": 0.16128988564014435, "learning_rate": 7.788583613851878e-05, "loss": 1.7688, "step": 8648 }, { "epoch": 0.84462890625, "grad_norm": 0.1567084938287735, "learning_rate": 7.785155576289829e-05, "loss": 1.7429, "step": 8649 }, { "epoch": 0.8447265625, "grad_norm": 0.17000479996204376, "learning_rate": 7.781729508066072e-05, "loss": 1.7634, "step": 8650 }, { "epoch": 0.84482421875, "grad_norm": 0.16767150163650513, "learning_rate": 7.778305409522851e-05, "loss": 1.7696, "step": 8651 }, { "epoch": 0.844921875, "grad_norm": 0.1420109122991562, "learning_rate": 7.774883281002205e-05, "loss": 1.7948, "step": 8652 }, { "epoch": 0.84501953125, "grad_norm": 0.17132557928562164, "learning_rate": 7.771463122845966e-05, "loss": 1.7642, "step": 8653 }, { "epoch": 0.8451171875, "grad_norm": 0.17003366351127625, "learning_rate": 7.768044935395779e-05, "loss": 1.7813, "step": 8654 }, { "epoch": 0.84521484375, "grad_norm": 0.1520814299583435, "learning_rate": 7.764628718993097e-05, "loss": 1.7875, "step": 8655 }, { "epoch": 0.8453125, "grad_norm": 0.1612364649772644, "learning_rate": 7.761214473979173e-05, "loss": 1.8121, "step": 8656 }, { "epoch": 0.84541015625, "grad_norm": 0.17314405739307404, "learning_rate": 7.757802200695044e-05, "loss": 1.764, "step": 8657 }, { "epoch": 0.8455078125, "grad_norm": 0.16880205273628235, "learning_rate": 7.754391899481579e-05, "loss": 1.7537, "step": 8658 }, { "epoch": 0.84560546875, "grad_norm": 0.1528070867061615, "learning_rate": 7.750983570679433e-05, "loss": 1.7563, "step": 8659 }, { "epoch": 0.845703125, "grad_norm": 0.1710202395915985, "learning_rate": 7.747577214629067e-05, "loss": 1.7924, "step": 8660 }, { "epoch": 0.84580078125, "grad_norm": 0.15477795898914337, "learning_rate": 7.744172831670742e-05, "loss": 1.7839, "step": 8661 }, { "epoch": 0.8458984375, "grad_norm": 0.14182984828948975, "learning_rate": 7.740770422144533e-05, "loss": 1.7546, "step": 8662 }, { "epoch": 0.84599609375, "grad_norm": 0.1772458255290985, "learning_rate": 7.737369986390314e-05, "loss": 1.761, "step": 8663 }, { "epoch": 0.84609375, "grad_norm": 0.17876631021499634, "learning_rate": 7.733971524747747e-05, "loss": 1.7727, "step": 8664 }, { "epoch": 0.84619140625, "grad_norm": 0.14362557232379913, "learning_rate": 7.730575037556314e-05, "loss": 1.7753, "step": 8665 }, { "epoch": 0.8462890625, "grad_norm": 0.18804088234901428, "learning_rate": 7.727180525155298e-05, "loss": 1.7908, "step": 8666 }, { "epoch": 0.84638671875, "grad_norm": 0.16888779401779175, "learning_rate": 7.72378798788377e-05, "loss": 1.8028, "step": 8667 }, { "epoch": 0.846484375, "grad_norm": 0.1549595147371292, "learning_rate": 7.720397426080617e-05, "loss": 1.7758, "step": 8668 }, { "epoch": 0.84658203125, "grad_norm": 0.15022696554660797, "learning_rate": 7.717008840084534e-05, "loss": 1.7604, "step": 8669 }, { "epoch": 0.8466796875, "grad_norm": 0.1932218074798584, "learning_rate": 7.71362223023401e-05, "loss": 1.7881, "step": 8670 }, { "epoch": 0.84677734375, "grad_norm": 0.15454979240894318, "learning_rate": 7.710237596867332e-05, "loss": 1.7285, "step": 8671 }, { "epoch": 0.846875, "grad_norm": 0.16503454744815826, "learning_rate": 7.706854940322594e-05, "loss": 1.7618, "step": 8672 }, { "epoch": 0.84697265625, "grad_norm": 0.1545722782611847, "learning_rate": 7.703474260937697e-05, "loss": 1.7687, "step": 8673 }, { "epoch": 0.8470703125, "grad_norm": 0.1735692173242569, "learning_rate": 7.700095559050343e-05, "loss": 1.735, "step": 8674 }, { "epoch": 0.84716796875, "grad_norm": 0.14508718252182007, "learning_rate": 7.696718834998024e-05, "loss": 1.7097, "step": 8675 }, { "epoch": 0.847265625, "grad_norm": 0.14594249427318573, "learning_rate": 7.693344089118056e-05, "loss": 1.7387, "step": 8676 }, { "epoch": 0.84736328125, "grad_norm": 0.15676791965961456, "learning_rate": 7.689971321747546e-05, "loss": 1.7158, "step": 8677 }, { "epoch": 0.8474609375, "grad_norm": 0.16458246111869812, "learning_rate": 7.686600533223395e-05, "loss": 1.7604, "step": 8678 }, { "epoch": 0.84755859375, "grad_norm": 0.14343498647212982, "learning_rate": 7.683231723882324e-05, "loss": 1.7618, "step": 8679 }, { "epoch": 0.84765625, "grad_norm": 0.1660676896572113, "learning_rate": 7.679864894060838e-05, "loss": 1.7291, "step": 8680 }, { "epoch": 0.84775390625, "grad_norm": 0.1771949976682663, "learning_rate": 7.676500044095267e-05, "loss": 1.8026, "step": 8681 }, { "epoch": 0.8478515625, "grad_norm": 0.15079796314239502, "learning_rate": 7.67313717432171e-05, "loss": 1.7737, "step": 8682 }, { "epoch": 0.84794921875, "grad_norm": 0.1753825843334198, "learning_rate": 7.669776285076102e-05, "loss": 1.7652, "step": 8683 }, { "epoch": 0.848046875, "grad_norm": 0.1469496637582779, "learning_rate": 7.666417376694171e-05, "loss": 1.7419, "step": 8684 }, { "epoch": 0.84814453125, "grad_norm": 0.15480691194534302, "learning_rate": 7.663060449511429e-05, "loss": 1.7578, "step": 8685 }, { "epoch": 0.8482421875, "grad_norm": 0.1576964259147644, "learning_rate": 7.659705503863207e-05, "loss": 1.7451, "step": 8686 }, { "epoch": 0.84833984375, "grad_norm": 0.14825838804244995, "learning_rate": 7.656352540084637e-05, "loss": 1.7831, "step": 8687 }, { "epoch": 0.8484375, "grad_norm": 0.15359289944171906, "learning_rate": 7.653001558510656e-05, "loss": 1.8275, "step": 8688 }, { "epoch": 0.84853515625, "grad_norm": 0.15602609515190125, "learning_rate": 7.64965255947598e-05, "loss": 1.7567, "step": 8689 }, { "epoch": 0.8486328125, "grad_norm": 0.14219003915786743, "learning_rate": 7.646305543315158e-05, "loss": 1.7638, "step": 8690 }, { "epoch": 0.84873046875, "grad_norm": 0.15562419593334198, "learning_rate": 7.642960510362528e-05, "loss": 1.7474, "step": 8691 }, { "epoch": 0.848828125, "grad_norm": 0.13905146718025208, "learning_rate": 7.639617460952224e-05, "loss": 1.7722, "step": 8692 }, { "epoch": 0.84892578125, "grad_norm": 0.15757344663143158, "learning_rate": 7.636276395418188e-05, "loss": 1.7893, "step": 8693 }, { "epoch": 0.8490234375, "grad_norm": 0.1389034241437912, "learning_rate": 7.632937314094163e-05, "loss": 1.7598, "step": 8694 }, { "epoch": 0.84912109375, "grad_norm": 0.15515516698360443, "learning_rate": 7.629600217313699e-05, "loss": 1.7898, "step": 8695 }, { "epoch": 0.84921875, "grad_norm": 0.1466154307126999, "learning_rate": 7.626265105410127e-05, "loss": 1.7718, "step": 8696 }, { "epoch": 0.84931640625, "grad_norm": 0.16003058850765228, "learning_rate": 7.62293197871661e-05, "loss": 1.7118, "step": 8697 }, { "epoch": 0.8494140625, "grad_norm": 0.16343791782855988, "learning_rate": 7.6196008375661e-05, "loss": 1.7855, "step": 8698 }, { "epoch": 0.84951171875, "grad_norm": 0.14182841777801514, "learning_rate": 7.616271682291337e-05, "loss": 1.7716, "step": 8699 }, { "epoch": 0.849609375, "grad_norm": 0.15275156497955322, "learning_rate": 7.612944513224876e-05, "loss": 1.7362, "step": 8700 }, { "epoch": 0.84970703125, "grad_norm": 0.16155919432640076, "learning_rate": 7.609619330699077e-05, "loss": 1.7521, "step": 8701 }, { "epoch": 0.8498046875, "grad_norm": 0.15020422637462616, "learning_rate": 7.6062961350461e-05, "loss": 1.7615, "step": 8702 }, { "epoch": 0.84990234375, "grad_norm": 0.14441651105880737, "learning_rate": 7.602974926597885e-05, "loss": 1.7737, "step": 8703 }, { "epoch": 0.85, "grad_norm": 0.1595880687236786, "learning_rate": 7.599655705686209e-05, "loss": 1.7909, "step": 8704 }, { "epoch": 0.85009765625, "grad_norm": 0.16762593388557434, "learning_rate": 7.59633847264263e-05, "loss": 1.7461, "step": 8705 }, { "epoch": 0.8501953125, "grad_norm": 0.14934410154819489, "learning_rate": 7.593023227798504e-05, "loss": 1.7953, "step": 8706 }, { "epoch": 0.85029296875, "grad_norm": 0.1482257843017578, "learning_rate": 7.589709971484995e-05, "loss": 1.7606, "step": 8707 }, { "epoch": 0.850390625, "grad_norm": 0.1473088264465332, "learning_rate": 7.586398704033072e-05, "loss": 1.7633, "step": 8708 }, { "epoch": 0.85048828125, "grad_norm": 0.16974741220474243, "learning_rate": 7.583089425773507e-05, "loss": 1.767, "step": 8709 }, { "epoch": 0.8505859375, "grad_norm": 0.14213930070400238, "learning_rate": 7.57978213703685e-05, "loss": 1.7667, "step": 8710 }, { "epoch": 0.85068359375, "grad_norm": 0.16614030301570892, "learning_rate": 7.576476838153483e-05, "loss": 1.7636, "step": 8711 }, { "epoch": 0.85078125, "grad_norm": 0.15503565967082977, "learning_rate": 7.57317352945358e-05, "loss": 1.7255, "step": 8712 }, { "epoch": 0.85087890625, "grad_norm": 0.16029350459575653, "learning_rate": 7.569872211267098e-05, "loss": 1.7651, "step": 8713 }, { "epoch": 0.8509765625, "grad_norm": 0.14866706728935242, "learning_rate": 7.566572883923819e-05, "loss": 1.8018, "step": 8714 }, { "epoch": 0.85107421875, "grad_norm": 0.16018620133399963, "learning_rate": 7.563275547753315e-05, "loss": 1.7982, "step": 8715 }, { "epoch": 0.851171875, "grad_norm": 0.15006540715694427, "learning_rate": 7.559980203084966e-05, "loss": 1.7684, "step": 8716 }, { "epoch": 0.85126953125, "grad_norm": 0.15156207978725433, "learning_rate": 7.556686850247929e-05, "loss": 1.745, "step": 8717 }, { "epoch": 0.8513671875, "grad_norm": 0.1522635966539383, "learning_rate": 7.553395489571202e-05, "loss": 1.7361, "step": 8718 }, { "epoch": 0.85146484375, "grad_norm": 0.16044947504997253, "learning_rate": 7.550106121383558e-05, "loss": 1.7878, "step": 8719 }, { "epoch": 0.8515625, "grad_norm": 0.15144410729408264, "learning_rate": 7.546818746013567e-05, "loss": 1.7493, "step": 8720 }, { "epoch": 0.85166015625, "grad_norm": 0.15889763832092285, "learning_rate": 7.543533363789614e-05, "loss": 1.7664, "step": 8721 }, { "epoch": 0.8517578125, "grad_norm": 0.15334145724773407, "learning_rate": 7.540249975039881e-05, "loss": 1.7591, "step": 8722 }, { "epoch": 0.85185546875, "grad_norm": 0.15576831996440887, "learning_rate": 7.536968580092354e-05, "loss": 1.7627, "step": 8723 }, { "epoch": 0.851953125, "grad_norm": 0.13994479179382324, "learning_rate": 7.533689179274796e-05, "loss": 1.7186, "step": 8724 }, { "epoch": 0.85205078125, "grad_norm": 0.14032498002052307, "learning_rate": 7.530411772914811e-05, "loss": 1.7802, "step": 8725 }, { "epoch": 0.8521484375, "grad_norm": 0.16669344902038574, "learning_rate": 7.527136361339779e-05, "loss": 1.7878, "step": 8726 }, { "epoch": 0.85224609375, "grad_norm": 0.1448146104812622, "learning_rate": 7.523862944876876e-05, "loss": 1.7799, "step": 8727 }, { "epoch": 0.85234375, "grad_norm": 0.15872935950756073, "learning_rate": 7.520591523853093e-05, "loss": 1.772, "step": 8728 }, { "epoch": 0.85244140625, "grad_norm": 0.1280146837234497, "learning_rate": 7.517322098595215e-05, "loss": 1.7286, "step": 8729 }, { "epoch": 0.8525390625, "grad_norm": 0.14689016342163086, "learning_rate": 7.514054669429831e-05, "loss": 1.725, "step": 8730 }, { "epoch": 0.85263671875, "grad_norm": 0.14435580372810364, "learning_rate": 7.51078923668332e-05, "loss": 1.7688, "step": 8731 }, { "epoch": 0.852734375, "grad_norm": 0.1471579223871231, "learning_rate": 7.507525800681879e-05, "loss": 1.7353, "step": 8732 }, { "epoch": 0.85283203125, "grad_norm": 0.14269834756851196, "learning_rate": 7.504264361751497e-05, "loss": 1.7944, "step": 8733 }, { "epoch": 0.8529296875, "grad_norm": 0.140056312084198, "learning_rate": 7.501004920217956e-05, "loss": 1.7701, "step": 8734 }, { "epoch": 0.85302734375, "grad_norm": 0.1485159695148468, "learning_rate": 7.497747476406848e-05, "loss": 1.7069, "step": 8735 }, { "epoch": 0.853125, "grad_norm": 0.1469627320766449, "learning_rate": 7.494492030643563e-05, "loss": 1.7868, "step": 8736 }, { "epoch": 0.85322265625, "grad_norm": 0.15003561973571777, "learning_rate": 7.491238583253296e-05, "loss": 1.7378, "step": 8737 }, { "epoch": 0.8533203125, "grad_norm": 0.1634555459022522, "learning_rate": 7.487987134561023e-05, "loss": 1.7635, "step": 8738 }, { "epoch": 0.85341796875, "grad_norm": 0.15713946521282196, "learning_rate": 7.484737684891549e-05, "loss": 1.7506, "step": 8739 }, { "epoch": 0.853515625, "grad_norm": 0.1392672210931778, "learning_rate": 7.481490234569466e-05, "loss": 1.6874, "step": 8740 }, { "epoch": 0.85361328125, "grad_norm": 0.14243671298027039, "learning_rate": 7.478244783919157e-05, "loss": 1.7386, "step": 8741 }, { "epoch": 0.8537109375, "grad_norm": 0.15178564190864563, "learning_rate": 7.475001333264816e-05, "loss": 1.689, "step": 8742 }, { "epoch": 0.85380859375, "grad_norm": 0.14381247758865356, "learning_rate": 7.471759882930438e-05, "loss": 1.8276, "step": 8743 }, { "epoch": 0.85390625, "grad_norm": 0.16713084280490875, "learning_rate": 7.468520433239816e-05, "loss": 1.7631, "step": 8744 }, { "epoch": 0.85400390625, "grad_norm": 0.14407901465892792, "learning_rate": 7.465282984516532e-05, "loss": 1.7607, "step": 8745 }, { "epoch": 0.8541015625, "grad_norm": 0.15529708564281464, "learning_rate": 7.462047537083989e-05, "loss": 1.7452, "step": 8746 }, { "epoch": 0.85419921875, "grad_norm": 0.14500491321086884, "learning_rate": 7.458814091265384e-05, "loss": 1.7839, "step": 8747 }, { "epoch": 0.854296875, "grad_norm": 0.15329240262508392, "learning_rate": 7.455582647383697e-05, "loss": 1.7626, "step": 8748 }, { "epoch": 0.85439453125, "grad_norm": 0.16472281515598297, "learning_rate": 7.452353205761725e-05, "loss": 1.7939, "step": 8749 }, { "epoch": 0.8544921875, "grad_norm": 0.14227651059627533, "learning_rate": 7.449125766722062e-05, "loss": 1.7709, "step": 8750 }, { "epoch": 0.85458984375, "grad_norm": 0.19374686479568481, "learning_rate": 7.445900330587105e-05, "loss": 1.7504, "step": 8751 }, { "epoch": 0.8546875, "grad_norm": 0.14485880732536316, "learning_rate": 7.442676897679032e-05, "loss": 1.7907, "step": 8752 }, { "epoch": 0.85478515625, "grad_norm": 0.1893666535615921, "learning_rate": 7.439455468319848e-05, "loss": 1.7538, "step": 8753 }, { "epoch": 0.8548828125, "grad_norm": 0.15806758403778076, "learning_rate": 7.43623604283135e-05, "loss": 1.7221, "step": 8754 }, { "epoch": 0.85498046875, "grad_norm": 0.15597805380821228, "learning_rate": 7.433018621535116e-05, "loss": 1.7556, "step": 8755 }, { "epoch": 0.855078125, "grad_norm": 0.15900593996047974, "learning_rate": 7.429803204752544e-05, "loss": 1.7635, "step": 8756 }, { "epoch": 0.85517578125, "grad_norm": 0.154429629445076, "learning_rate": 7.426589792804829e-05, "loss": 1.7529, "step": 8757 }, { "epoch": 0.8552734375, "grad_norm": 0.17240235209465027, "learning_rate": 7.423378386012962e-05, "loss": 1.7617, "step": 8758 }, { "epoch": 0.85537109375, "grad_norm": 0.14883705973625183, "learning_rate": 7.420168984697722e-05, "loss": 1.7216, "step": 8759 }, { "epoch": 0.85546875, "grad_norm": 0.1630050092935562, "learning_rate": 7.416961589179713e-05, "loss": 1.7801, "step": 8760 }, { "epoch": 0.85556640625, "grad_norm": 0.15738067030906677, "learning_rate": 7.413756199779326e-05, "loss": 1.7846, "step": 8761 }, { "epoch": 0.8556640625, "grad_norm": 0.1670789122581482, "learning_rate": 7.410552816816743e-05, "loss": 1.7524, "step": 8762 }, { "epoch": 0.85576171875, "grad_norm": 0.16231949627399445, "learning_rate": 7.407351440611957e-05, "loss": 1.7863, "step": 8763 }, { "epoch": 0.855859375, "grad_norm": 0.15692614018917084, "learning_rate": 7.404152071484757e-05, "loss": 1.7636, "step": 8764 }, { "epoch": 0.85595703125, "grad_norm": 0.1578112095594406, "learning_rate": 7.400954709754731e-05, "loss": 1.7422, "step": 8765 }, { "epoch": 0.8560546875, "grad_norm": 0.1631753146648407, "learning_rate": 7.39775935574127e-05, "loss": 1.7351, "step": 8766 }, { "epoch": 0.85615234375, "grad_norm": 0.1701115071773529, "learning_rate": 7.394566009763558e-05, "loss": 1.7814, "step": 8767 }, { "epoch": 0.85625, "grad_norm": 0.15621617436408997, "learning_rate": 7.39137467214059e-05, "loss": 1.7355, "step": 8768 }, { "epoch": 0.85634765625, "grad_norm": 0.168375626206398, "learning_rate": 7.38818534319114e-05, "loss": 1.7974, "step": 8769 }, { "epoch": 0.8564453125, "grad_norm": 0.1658746302127838, "learning_rate": 7.3849980232338e-05, "loss": 1.7864, "step": 8770 }, { "epoch": 0.85654296875, "grad_norm": 0.15553300082683563, "learning_rate": 7.381812712586953e-05, "loss": 1.7477, "step": 8771 }, { "epoch": 0.856640625, "grad_norm": 0.17264670133590698, "learning_rate": 7.378629411568786e-05, "loss": 1.7735, "step": 8772 }, { "epoch": 0.85673828125, "grad_norm": 0.15423953533172607, "learning_rate": 7.375448120497281e-05, "loss": 1.7777, "step": 8773 }, { "epoch": 0.8568359375, "grad_norm": 0.15888544917106628, "learning_rate": 7.372268839690221e-05, "loss": 1.7336, "step": 8774 }, { "epoch": 0.85693359375, "grad_norm": 0.17169474065303802, "learning_rate": 7.369091569465193e-05, "loss": 1.8107, "step": 8775 }, { "epoch": 0.85703125, "grad_norm": 0.15917547047138214, "learning_rate": 7.365916310139572e-05, "loss": 1.7477, "step": 8776 }, { "epoch": 0.85712890625, "grad_norm": 0.1485113799571991, "learning_rate": 7.362743062030536e-05, "loss": 1.7536, "step": 8777 }, { "epoch": 0.8572265625, "grad_norm": 0.18282170593738556, "learning_rate": 7.359571825455069e-05, "loss": 1.7633, "step": 8778 }, { "epoch": 0.85732421875, "grad_norm": 0.19662846624851227, "learning_rate": 7.356402600729949e-05, "loss": 1.7491, "step": 8779 }, { "epoch": 0.857421875, "grad_norm": 0.17176972329616547, "learning_rate": 7.353235388171752e-05, "loss": 1.7473, "step": 8780 }, { "epoch": 0.85751953125, "grad_norm": 0.18337056040763855, "learning_rate": 7.350070188096859e-05, "loss": 1.7406, "step": 8781 }, { "epoch": 0.8576171875, "grad_norm": 0.14200235903263092, "learning_rate": 7.34690700082144e-05, "loss": 1.8304, "step": 8782 }, { "epoch": 0.85771484375, "grad_norm": 0.16962622106075287, "learning_rate": 7.343745826661473e-05, "loss": 1.7759, "step": 8783 }, { "epoch": 0.8578125, "grad_norm": 0.2012474536895752, "learning_rate": 7.340586665932726e-05, "loss": 1.7786, "step": 8784 }, { "epoch": 0.85791015625, "grad_norm": 0.148380845785141, "learning_rate": 7.337429518950774e-05, "loss": 1.7419, "step": 8785 }, { "epoch": 0.8580078125, "grad_norm": 0.14242947101593018, "learning_rate": 7.334274386030993e-05, "loss": 1.7418, "step": 8786 }, { "epoch": 0.85810546875, "grad_norm": 0.18385373055934906, "learning_rate": 7.331121267488544e-05, "loss": 1.7948, "step": 8787 }, { "epoch": 0.858203125, "grad_norm": 0.1508544534444809, "learning_rate": 7.327970163638403e-05, "loss": 1.7853, "step": 8788 }, { "epoch": 0.85830078125, "grad_norm": 0.13989749550819397, "learning_rate": 7.324821074795339e-05, "loss": 1.7503, "step": 8789 }, { "epoch": 0.8583984375, "grad_norm": 0.17195872962474823, "learning_rate": 7.321674001273907e-05, "loss": 1.7774, "step": 8790 }, { "epoch": 0.85849609375, "grad_norm": 0.14921835064888, "learning_rate": 7.31852894338848e-05, "loss": 1.745, "step": 8791 }, { "epoch": 0.85859375, "grad_norm": 0.1356123685836792, "learning_rate": 7.315385901453222e-05, "loss": 1.7576, "step": 8792 }, { "epoch": 0.85869140625, "grad_norm": 0.17748121917247772, "learning_rate": 7.312244875782091e-05, "loss": 1.7723, "step": 8793 }, { "epoch": 0.8587890625, "grad_norm": 0.13847695291042328, "learning_rate": 7.309105866688848e-05, "loss": 1.7303, "step": 8794 }, { "epoch": 0.85888671875, "grad_norm": 0.1754063218832016, "learning_rate": 7.305968874487055e-05, "loss": 1.7417, "step": 8795 }, { "epoch": 0.858984375, "grad_norm": 0.15101860463619232, "learning_rate": 7.302833899490072e-05, "loss": 1.8408, "step": 8796 }, { "epoch": 0.85908203125, "grad_norm": 0.16354958713054657, "learning_rate": 7.299700942011048e-05, "loss": 1.7675, "step": 8797 }, { "epoch": 0.8591796875, "grad_norm": 0.15012097358703613, "learning_rate": 7.296570002362941e-05, "loss": 1.7431, "step": 8798 }, { "epoch": 0.85927734375, "grad_norm": 0.1581173986196518, "learning_rate": 7.293441080858504e-05, "loss": 1.7668, "step": 8799 }, { "epoch": 0.859375, "grad_norm": 0.1562919020652771, "learning_rate": 7.290314177810289e-05, "loss": 1.7646, "step": 8800 }, { "epoch": 0.85947265625, "grad_norm": 0.13811513781547546, "learning_rate": 7.287189293530648e-05, "loss": 1.7459, "step": 8801 }, { "epoch": 0.8595703125, "grad_norm": 0.16014476120471954, "learning_rate": 7.284066428331726e-05, "loss": 1.7789, "step": 8802 }, { "epoch": 0.85966796875, "grad_norm": 0.15447679162025452, "learning_rate": 7.280945582525477e-05, "loss": 1.7614, "step": 8803 }, { "epoch": 0.859765625, "grad_norm": 0.15808089077472687, "learning_rate": 7.277826756423632e-05, "loss": 1.7565, "step": 8804 }, { "epoch": 0.85986328125, "grad_norm": 0.14857396483421326, "learning_rate": 7.274709950337743e-05, "loss": 1.7905, "step": 8805 }, { "epoch": 0.8599609375, "grad_norm": 0.16069890558719635, "learning_rate": 7.27159516457915e-05, "loss": 1.7789, "step": 8806 }, { "epoch": 0.86005859375, "grad_norm": 0.1338893622159958, "learning_rate": 7.268482399458992e-05, "loss": 1.7423, "step": 8807 }, { "epoch": 0.86015625, "grad_norm": 0.1512761116027832, "learning_rate": 7.265371655288208e-05, "loss": 1.7226, "step": 8808 }, { "epoch": 0.86025390625, "grad_norm": 0.15068042278289795, "learning_rate": 7.262262932377534e-05, "loss": 1.7252, "step": 8809 }, { "epoch": 0.8603515625, "grad_norm": 0.13870441913604736, "learning_rate": 7.259156231037506e-05, "loss": 1.7386, "step": 8810 }, { "epoch": 0.86044921875, "grad_norm": 0.1601220667362213, "learning_rate": 7.256051551578448e-05, "loss": 1.8066, "step": 8811 }, { "epoch": 0.860546875, "grad_norm": 0.14539878070354462, "learning_rate": 7.252948894310494e-05, "loss": 1.7534, "step": 8812 }, { "epoch": 0.86064453125, "grad_norm": 0.14514930546283722, "learning_rate": 7.249848259543575e-05, "loss": 1.7501, "step": 8813 }, { "epoch": 0.8607421875, "grad_norm": 0.15810328722000122, "learning_rate": 7.246749647587413e-05, "loss": 1.7614, "step": 8814 }, { "epoch": 0.86083984375, "grad_norm": 0.14284087717533112, "learning_rate": 7.243653058751535e-05, "loss": 1.7516, "step": 8815 }, { "epoch": 0.8609375, "grad_norm": 0.14629770815372467, "learning_rate": 7.240558493345261e-05, "loss": 1.7284, "step": 8816 }, { "epoch": 0.86103515625, "grad_norm": 0.16380690038204193, "learning_rate": 7.237465951677715e-05, "loss": 1.7545, "step": 8817 }, { "epoch": 0.8611328125, "grad_norm": 0.1430993527173996, "learning_rate": 7.234375434057805e-05, "loss": 1.7336, "step": 8818 }, { "epoch": 0.86123046875, "grad_norm": 0.15947818756103516, "learning_rate": 7.231286940794252e-05, "loss": 1.7476, "step": 8819 }, { "epoch": 0.861328125, "grad_norm": 0.1458704173564911, "learning_rate": 7.228200472195573e-05, "loss": 1.7523, "step": 8820 }, { "epoch": 0.86142578125, "grad_norm": 0.15504522621631622, "learning_rate": 7.225116028570071e-05, "loss": 1.7746, "step": 8821 }, { "epoch": 0.8615234375, "grad_norm": 0.14377470314502716, "learning_rate": 7.222033610225858e-05, "loss": 1.7571, "step": 8822 }, { "epoch": 0.86162109375, "grad_norm": 0.15027034282684326, "learning_rate": 7.218953217470842e-05, "loss": 1.7807, "step": 8823 }, { "epoch": 0.86171875, "grad_norm": 0.1504157930612564, "learning_rate": 7.215874850612731e-05, "loss": 1.797, "step": 8824 }, { "epoch": 0.86181640625, "grad_norm": 0.14749808609485626, "learning_rate": 7.212798509959015e-05, "loss": 1.8078, "step": 8825 }, { "epoch": 0.8619140625, "grad_norm": 0.1344059705734253, "learning_rate": 7.209724195817001e-05, "loss": 1.7739, "step": 8826 }, { "epoch": 0.86201171875, "grad_norm": 0.15152817964553833, "learning_rate": 7.206651908493783e-05, "loss": 1.6924, "step": 8827 }, { "epoch": 0.862109375, "grad_norm": 0.14291062951087952, "learning_rate": 7.203581648296256e-05, "loss": 1.7444, "step": 8828 }, { "epoch": 0.86220703125, "grad_norm": 0.1345437467098236, "learning_rate": 7.200513415531114e-05, "loss": 1.7767, "step": 8829 }, { "epoch": 0.8623046875, "grad_norm": 0.1475142538547516, "learning_rate": 7.197447210504845e-05, "loss": 1.7591, "step": 8830 }, { "epoch": 0.86240234375, "grad_norm": 0.14127394556999207, "learning_rate": 7.194383033523736e-05, "loss": 1.7257, "step": 8831 }, { "epoch": 0.8625, "grad_norm": 0.1493285596370697, "learning_rate": 7.191320884893869e-05, "loss": 1.732, "step": 8832 }, { "epoch": 0.86259765625, "grad_norm": 0.1443900614976883, "learning_rate": 7.188260764921128e-05, "loss": 1.7395, "step": 8833 }, { "epoch": 0.8626953125, "grad_norm": 0.1696087270975113, "learning_rate": 7.185202673911186e-05, "loss": 1.7004, "step": 8834 }, { "epoch": 0.86279296875, "grad_norm": 0.1520203799009323, "learning_rate": 7.18214661216953e-05, "loss": 1.7691, "step": 8835 }, { "epoch": 0.862890625, "grad_norm": 0.14679810404777527, "learning_rate": 7.179092580001425e-05, "loss": 1.738, "step": 8836 }, { "epoch": 0.86298828125, "grad_norm": 0.16233095526695251, "learning_rate": 7.176040577711943e-05, "loss": 1.7628, "step": 8837 }, { "epoch": 0.8630859375, "grad_norm": 0.1333589404821396, "learning_rate": 7.172990605605957e-05, "loss": 1.7499, "step": 8838 }, { "epoch": 0.86318359375, "grad_norm": 0.14552268385887146, "learning_rate": 7.169942663988127e-05, "loss": 1.7576, "step": 8839 }, { "epoch": 0.86328125, "grad_norm": 0.15766772627830505, "learning_rate": 7.166896753162916e-05, "loss": 1.7472, "step": 8840 }, { "epoch": 0.86337890625, "grad_norm": 0.15369905531406403, "learning_rate": 7.163852873434582e-05, "loss": 1.7815, "step": 8841 }, { "epoch": 0.8634765625, "grad_norm": 0.16127027571201324, "learning_rate": 7.160811025107187e-05, "loss": 1.8064, "step": 8842 }, { "epoch": 0.86357421875, "grad_norm": 0.15213990211486816, "learning_rate": 7.157771208484578e-05, "loss": 1.7921, "step": 8843 }, { "epoch": 0.863671875, "grad_norm": 0.16379162669181824, "learning_rate": 7.154733423870415e-05, "loss": 1.7859, "step": 8844 }, { "epoch": 0.86376953125, "grad_norm": 0.1482042521238327, "learning_rate": 7.151697671568138e-05, "loss": 1.7244, "step": 8845 }, { "epoch": 0.8638671875, "grad_norm": 0.18187211453914642, "learning_rate": 7.148663951880994e-05, "loss": 1.7894, "step": 8846 }, { "epoch": 0.86396484375, "grad_norm": 0.16086414456367493, "learning_rate": 7.145632265112023e-05, "loss": 1.7485, "step": 8847 }, { "epoch": 0.8640625, "grad_norm": 0.15109635889530182, "learning_rate": 7.142602611564065e-05, "loss": 1.7267, "step": 8848 }, { "epoch": 0.86416015625, "grad_norm": 0.15219809114933014, "learning_rate": 7.139574991539756e-05, "loss": 1.7272, "step": 8849 }, { "epoch": 0.8642578125, "grad_norm": 0.17691610753536224, "learning_rate": 7.136549405341529e-05, "loss": 1.7265, "step": 8850 }, { "epoch": 0.86435546875, "grad_norm": 0.14727534353733063, "learning_rate": 7.133525853271609e-05, "loss": 1.7376, "step": 8851 }, { "epoch": 0.864453125, "grad_norm": 0.15109893679618835, "learning_rate": 7.130504335632035e-05, "loss": 1.7652, "step": 8852 }, { "epoch": 0.86455078125, "grad_norm": 0.15290209650993347, "learning_rate": 7.127484852724613e-05, "loss": 1.7624, "step": 8853 }, { "epoch": 0.8646484375, "grad_norm": 0.1534123569726944, "learning_rate": 7.12446740485097e-05, "loss": 1.7874, "step": 8854 }, { "epoch": 0.86474609375, "grad_norm": 0.13519920408725739, "learning_rate": 7.121451992312525e-05, "loss": 1.7444, "step": 8855 }, { "epoch": 0.86484375, "grad_norm": 0.15635065734386444, "learning_rate": 7.118438615410484e-05, "loss": 1.778, "step": 8856 }, { "epoch": 0.86494140625, "grad_norm": 0.1575852930545807, "learning_rate": 7.115427274445864e-05, "loss": 1.7501, "step": 8857 }, { "epoch": 0.8650390625, "grad_norm": 0.1726468801498413, "learning_rate": 7.112417969719467e-05, "loss": 1.7194, "step": 8858 }, { "epoch": 0.86513671875, "grad_norm": 0.16890330612659454, "learning_rate": 7.109410701531902e-05, "loss": 1.7992, "step": 8859 }, { "epoch": 0.865234375, "grad_norm": 0.1441158950328827, "learning_rate": 7.106405470183559e-05, "loss": 1.7696, "step": 8860 }, { "epoch": 0.86533203125, "grad_norm": 0.17783862352371216, "learning_rate": 7.103402275974641e-05, "loss": 1.8078, "step": 8861 }, { "epoch": 0.8654296875, "grad_norm": 0.1490647792816162, "learning_rate": 7.100401119205135e-05, "loss": 1.771, "step": 8862 }, { "epoch": 0.86552734375, "grad_norm": 0.14158184826374054, "learning_rate": 7.097402000174832e-05, "loss": 1.7382, "step": 8863 }, { "epoch": 0.865625, "grad_norm": 0.15081503987312317, "learning_rate": 7.09440491918332e-05, "loss": 1.7872, "step": 8864 }, { "epoch": 0.86572265625, "grad_norm": 0.15064221620559692, "learning_rate": 7.091409876529982e-05, "loss": 1.7583, "step": 8865 }, { "epoch": 0.8658203125, "grad_norm": 0.14604799449443817, "learning_rate": 7.088416872513995e-05, "loss": 1.7763, "step": 8866 }, { "epoch": 0.86591796875, "grad_norm": 0.15429124236106873, "learning_rate": 7.085425907434326e-05, "loss": 1.7142, "step": 8867 }, { "epoch": 0.866015625, "grad_norm": 0.15713296830654144, "learning_rate": 7.082436981589755e-05, "loss": 1.7423, "step": 8868 }, { "epoch": 0.86611328125, "grad_norm": 0.14792172610759735, "learning_rate": 7.079450095278845e-05, "loss": 1.757, "step": 8869 }, { "epoch": 0.8662109375, "grad_norm": 0.15563976764678955, "learning_rate": 7.076465248799963e-05, "loss": 1.8126, "step": 8870 }, { "epoch": 0.86630859375, "grad_norm": 0.14956894516944885, "learning_rate": 7.073482442451262e-05, "loss": 1.7215, "step": 8871 }, { "epoch": 0.86640625, "grad_norm": 0.14671257138252258, "learning_rate": 7.070501676530705e-05, "loss": 1.7715, "step": 8872 }, { "epoch": 0.86650390625, "grad_norm": 0.1661485880613327, "learning_rate": 7.067522951336045e-05, "loss": 1.7335, "step": 8873 }, { "epoch": 0.8666015625, "grad_norm": 0.1444893330335617, "learning_rate": 7.064546267164823e-05, "loss": 1.7086, "step": 8874 }, { "epoch": 0.86669921875, "grad_norm": 0.16219966113567352, "learning_rate": 7.06157162431439e-05, "loss": 1.8001, "step": 8875 }, { "epoch": 0.866796875, "grad_norm": 0.15740177035331726, "learning_rate": 7.058599023081879e-05, "loss": 1.7677, "step": 8876 }, { "epoch": 0.86689453125, "grad_norm": 0.1564018726348877, "learning_rate": 7.055628463764233e-05, "loss": 1.7619, "step": 8877 }, { "epoch": 0.8669921875, "grad_norm": 0.14710108935832977, "learning_rate": 7.052659946658181e-05, "loss": 1.7776, "step": 8878 }, { "epoch": 0.86708984375, "grad_norm": 0.16611860692501068, "learning_rate": 7.049693472060257e-05, "loss": 1.7855, "step": 8879 }, { "epoch": 0.8671875, "grad_norm": 0.14660517871379852, "learning_rate": 7.04672904026678e-05, "loss": 1.7454, "step": 8880 }, { "epoch": 0.86728515625, "grad_norm": 0.15910331904888153, "learning_rate": 7.043766651573872e-05, "loss": 1.7567, "step": 8881 }, { "epoch": 0.8673828125, "grad_norm": 0.15078842639923096, "learning_rate": 7.040806306277452e-05, "loss": 1.7536, "step": 8882 }, { "epoch": 0.86748046875, "grad_norm": 0.1525779366493225, "learning_rate": 7.037848004673219e-05, "loss": 1.7691, "step": 8883 }, { "epoch": 0.867578125, "grad_norm": 0.14805588126182556, "learning_rate": 7.034891747056706e-05, "loss": 1.7925, "step": 8884 }, { "epoch": 0.86767578125, "grad_norm": 0.16027145087718964, "learning_rate": 7.031937533723197e-05, "loss": 1.7441, "step": 8885 }, { "epoch": 0.8677734375, "grad_norm": 0.14382827281951904, "learning_rate": 7.028985364967798e-05, "loss": 1.7875, "step": 8886 }, { "epoch": 0.86787109375, "grad_norm": 0.1611250936985016, "learning_rate": 7.026035241085409e-05, "loss": 1.7559, "step": 8887 }, { "epoch": 0.86796875, "grad_norm": 0.1449974626302719, "learning_rate": 7.02308716237071e-05, "loss": 1.7781, "step": 8888 }, { "epoch": 0.86806640625, "grad_norm": 0.15636828541755676, "learning_rate": 7.020141129118197e-05, "loss": 1.7877, "step": 8889 }, { "epoch": 0.8681640625, "grad_norm": 0.15541024506092072, "learning_rate": 7.017197141622145e-05, "loss": 1.7808, "step": 8890 }, { "epoch": 0.86826171875, "grad_norm": 0.19486522674560547, "learning_rate": 7.014255200176643e-05, "loss": 1.7654, "step": 8891 }, { "epoch": 0.868359375, "grad_norm": 0.1529075652360916, "learning_rate": 7.011315305075557e-05, "loss": 1.7405, "step": 8892 }, { "epoch": 0.86845703125, "grad_norm": 0.15753358602523804, "learning_rate": 7.00837745661256e-05, "loss": 1.7594, "step": 8893 }, { "epoch": 0.8685546875, "grad_norm": 0.17173835635185242, "learning_rate": 7.005441655081113e-05, "loss": 1.8152, "step": 8894 }, { "epoch": 0.86865234375, "grad_norm": 0.14495635032653809, "learning_rate": 7.002507900774485e-05, "loss": 1.8105, "step": 8895 }, { "epoch": 0.86875, "grad_norm": 0.18149994313716888, "learning_rate": 6.99957619398572e-05, "loss": 1.81, "step": 8896 }, { "epoch": 0.86884765625, "grad_norm": 0.16213470697402954, "learning_rate": 6.996646535007675e-05, "loss": 1.7048, "step": 8897 }, { "epoch": 0.8689453125, "grad_norm": 0.16573619842529297, "learning_rate": 6.993718924133005e-05, "loss": 1.7462, "step": 8898 }, { "epoch": 0.86904296875, "grad_norm": 0.16498231887817383, "learning_rate": 6.990793361654141e-05, "loss": 1.7622, "step": 8899 }, { "epoch": 0.869140625, "grad_norm": 0.15104158222675323, "learning_rate": 6.987869847863323e-05, "loss": 1.7418, "step": 8900 }, { "epoch": 0.86923828125, "grad_norm": 0.16500724852085114, "learning_rate": 6.984948383052588e-05, "loss": 1.8108, "step": 8901 }, { "epoch": 0.8693359375, "grad_norm": 0.15470604598522186, "learning_rate": 6.982028967513768e-05, "loss": 1.7739, "step": 8902 }, { "epoch": 0.86943359375, "grad_norm": 0.14457210898399353, "learning_rate": 6.979111601538474e-05, "loss": 1.7828, "step": 8903 }, { "epoch": 0.86953125, "grad_norm": 0.1662529557943344, "learning_rate": 6.976196285418131e-05, "loss": 1.7686, "step": 8904 }, { "epoch": 0.86962890625, "grad_norm": 0.13921701908111572, "learning_rate": 6.97328301944396e-05, "loss": 1.7123, "step": 8905 }, { "epoch": 0.8697265625, "grad_norm": 0.15632519125938416, "learning_rate": 6.970371803906963e-05, "loss": 1.7563, "step": 8906 }, { "epoch": 0.86982421875, "grad_norm": 0.16243310272693634, "learning_rate": 6.967462639097946e-05, "loss": 1.7262, "step": 8907 }, { "epoch": 0.869921875, "grad_norm": 0.15929079055786133, "learning_rate": 6.96455552530751e-05, "loss": 1.7769, "step": 8908 }, { "epoch": 0.87001953125, "grad_norm": 0.16266091167926788, "learning_rate": 6.961650462826053e-05, "loss": 1.7942, "step": 8909 }, { "epoch": 0.8701171875, "grad_norm": 0.14858464896678925, "learning_rate": 6.958747451943757e-05, "loss": 1.7745, "step": 8910 }, { "epoch": 0.87021484375, "grad_norm": 0.1757795214653015, "learning_rate": 6.955846492950608e-05, "loss": 1.7689, "step": 8911 }, { "epoch": 0.8703125, "grad_norm": 0.15529392659664154, "learning_rate": 6.952947586136396e-05, "loss": 1.7242, "step": 8912 }, { "epoch": 0.87041015625, "grad_norm": 0.153442844748497, "learning_rate": 6.950050731790686e-05, "loss": 1.7287, "step": 8913 }, { "epoch": 0.8705078125, "grad_norm": 0.17016641795635223, "learning_rate": 6.947155930202853e-05, "loss": 1.7776, "step": 8914 }, { "epoch": 0.87060546875, "grad_norm": 0.16354811191558838, "learning_rate": 6.944263181662062e-05, "loss": 1.7807, "step": 8915 }, { "epoch": 0.870703125, "grad_norm": 0.15941888093948364, "learning_rate": 6.941372486457272e-05, "loss": 1.735, "step": 8916 }, { "epoch": 0.87080078125, "grad_norm": 0.16541604697704315, "learning_rate": 6.938483844877239e-05, "loss": 1.7453, "step": 8917 }, { "epoch": 0.8708984375, "grad_norm": 0.15651006996631622, "learning_rate": 6.935597257210503e-05, "loss": 1.7185, "step": 8918 }, { "epoch": 0.87099609375, "grad_norm": 0.15301112830638885, "learning_rate": 6.932712723745424e-05, "loss": 1.7543, "step": 8919 }, { "epoch": 0.87109375, "grad_norm": 0.17927753925323486, "learning_rate": 6.929830244770134e-05, "loss": 1.7673, "step": 8920 }, { "epoch": 0.87119140625, "grad_norm": 0.15526936948299408, "learning_rate": 6.926949820572565e-05, "loss": 1.75, "step": 8921 }, { "epoch": 0.8712890625, "grad_norm": 0.15466520190238953, "learning_rate": 6.924071451440448e-05, "loss": 1.7932, "step": 8922 }, { "epoch": 0.87138671875, "grad_norm": 0.17954225838184357, "learning_rate": 6.92119513766131e-05, "loss": 1.7503, "step": 8923 }, { "epoch": 0.871484375, "grad_norm": 0.24672746658325195, "learning_rate": 6.918320879522463e-05, "loss": 1.7605, "step": 8924 }, { "epoch": 0.87158203125, "grad_norm": 0.15830571949481964, "learning_rate": 6.915448677311018e-05, "loss": 1.7831, "step": 8925 }, { "epoch": 0.8716796875, "grad_norm": 0.19131062924861908, "learning_rate": 6.912578531313897e-05, "loss": 1.7736, "step": 8926 }, { "epoch": 0.87177734375, "grad_norm": 0.15627118945121765, "learning_rate": 6.909710441817789e-05, "loss": 1.771, "step": 8927 }, { "epoch": 0.871875, "grad_norm": 0.14758579432964325, "learning_rate": 6.906844409109191e-05, "loss": 1.7139, "step": 8928 }, { "epoch": 0.87197265625, "grad_norm": 0.18663953244686127, "learning_rate": 6.903980433474402e-05, "loss": 1.8003, "step": 8929 }, { "epoch": 0.8720703125, "grad_norm": 0.14489060640335083, "learning_rate": 6.901118515199503e-05, "loss": 1.7778, "step": 8930 }, { "epoch": 0.87216796875, "grad_norm": 0.1438188999891281, "learning_rate": 6.898258654570374e-05, "loss": 1.7512, "step": 8931 }, { "epoch": 0.872265625, "grad_norm": 0.14114484190940857, "learning_rate": 6.895400851872685e-05, "loss": 1.7489, "step": 8932 }, { "epoch": 0.87236328125, "grad_norm": 0.15697292983531952, "learning_rate": 6.89254510739192e-05, "loss": 1.7926, "step": 8933 }, { "epoch": 0.8724609375, "grad_norm": 0.1565883904695511, "learning_rate": 6.889691421413327e-05, "loss": 1.776, "step": 8934 }, { "epoch": 0.87255859375, "grad_norm": 0.13779932260513306, "learning_rate": 6.886839794221972e-05, "loss": 1.7414, "step": 8935 }, { "epoch": 0.87265625, "grad_norm": 0.15274763107299805, "learning_rate": 6.883990226102704e-05, "loss": 1.8274, "step": 8936 }, { "epoch": 0.87275390625, "grad_norm": 0.1495838463306427, "learning_rate": 6.881142717340176e-05, "loss": 1.7249, "step": 8937 }, { "epoch": 0.8728515625, "grad_norm": 0.158989816904068, "learning_rate": 6.878297268218819e-05, "loss": 1.7292, "step": 8938 }, { "epoch": 0.87294921875, "grad_norm": 0.14992043375968933, "learning_rate": 6.87545387902287e-05, "loss": 1.7597, "step": 8939 }, { "epoch": 0.873046875, "grad_norm": 0.15427491068840027, "learning_rate": 6.872612550036369e-05, "loss": 1.7405, "step": 8940 }, { "epoch": 0.87314453125, "grad_norm": 0.1506883054971695, "learning_rate": 6.869773281543127e-05, "loss": 1.7136, "step": 8941 }, { "epoch": 0.8732421875, "grad_norm": 0.14017271995544434, "learning_rate": 6.866936073826768e-05, "loss": 1.7563, "step": 8942 }, { "epoch": 0.87333984375, "grad_norm": 0.15259890258312225, "learning_rate": 6.864100927170703e-05, "loss": 1.7696, "step": 8943 }, { "epoch": 0.8734375, "grad_norm": 0.14505590498447418, "learning_rate": 6.861267841858142e-05, "loss": 1.7882, "step": 8944 }, { "epoch": 0.87353515625, "grad_norm": 0.15889889001846313, "learning_rate": 6.858436818172075e-05, "loss": 1.802, "step": 8945 }, { "epoch": 0.8736328125, "grad_norm": 0.1486695408821106, "learning_rate": 6.855607856395299e-05, "loss": 1.7525, "step": 8946 }, { "epoch": 0.87373046875, "grad_norm": 0.1625017672777176, "learning_rate": 6.852780956810413e-05, "loss": 1.7583, "step": 8947 }, { "epoch": 0.873828125, "grad_norm": 0.16133128106594086, "learning_rate": 6.84995611969979e-05, "loss": 1.7802, "step": 8948 }, { "epoch": 0.87392578125, "grad_norm": 0.15485677123069763, "learning_rate": 6.847133345345607e-05, "loss": 1.7187, "step": 8949 }, { "epoch": 0.8740234375, "grad_norm": 0.14030379056930542, "learning_rate": 6.844312634029833e-05, "loss": 1.7495, "step": 8950 }, { "epoch": 0.87412109375, "grad_norm": 0.17834116518497467, "learning_rate": 6.841493986034238e-05, "loss": 1.7412, "step": 8951 }, { "epoch": 0.87421875, "grad_norm": 0.14275477826595306, "learning_rate": 6.838677401640372e-05, "loss": 1.7587, "step": 8952 }, { "epoch": 0.87431640625, "grad_norm": 0.1668403297662735, "learning_rate": 6.835862881129586e-05, "loss": 1.7149, "step": 8953 }, { "epoch": 0.8744140625, "grad_norm": 0.16045695543289185, "learning_rate": 6.833050424783043e-05, "loss": 1.7369, "step": 8954 }, { "epoch": 0.87451171875, "grad_norm": 0.17281466722488403, "learning_rate": 6.830240032881663e-05, "loss": 1.7768, "step": 8955 }, { "epoch": 0.874609375, "grad_norm": 0.1683100461959839, "learning_rate": 6.827431705706184e-05, "loss": 1.7811, "step": 8956 }, { "epoch": 0.87470703125, "grad_norm": 0.14729095995426178, "learning_rate": 6.824625443537138e-05, "loss": 1.7927, "step": 8957 }, { "epoch": 0.8748046875, "grad_norm": 0.15930943191051483, "learning_rate": 6.821821246654845e-05, "loss": 1.7736, "step": 8958 }, { "epoch": 0.87490234375, "grad_norm": 0.16301298141479492, "learning_rate": 6.819019115339413e-05, "loss": 1.699, "step": 8959 }, { "epoch": 0.875, "grad_norm": 0.15527383983135223, "learning_rate": 6.816219049870752e-05, "loss": 1.7669, "step": 8960 }, { "epoch": 0.87509765625, "grad_norm": 0.16341912746429443, "learning_rate": 6.813421050528576e-05, "loss": 1.7678, "step": 8961 }, { "epoch": 0.8751953125, "grad_norm": 0.16938906908035278, "learning_rate": 6.810625117592364e-05, "loss": 1.7523, "step": 8962 }, { "epoch": 0.87529296875, "grad_norm": 0.15817390382289886, "learning_rate": 6.807831251341411e-05, "loss": 1.7574, "step": 8963 }, { "epoch": 0.875390625, "grad_norm": 0.14526911079883575, "learning_rate": 6.805039452054799e-05, "loss": 1.7803, "step": 8964 }, { "epoch": 0.87548828125, "grad_norm": 0.16385993361473083, "learning_rate": 6.80224972001141e-05, "loss": 1.7495, "step": 8965 }, { "epoch": 0.8755859375, "grad_norm": 0.1443818211555481, "learning_rate": 6.799462055489905e-05, "loss": 1.7241, "step": 8966 }, { "epoch": 0.87568359375, "grad_norm": 0.1553456336259842, "learning_rate": 6.796676458768747e-05, "loss": 1.6997, "step": 8967 }, { "epoch": 0.87578125, "grad_norm": 0.14642269909381866, "learning_rate": 6.793892930126201e-05, "loss": 1.7504, "step": 8968 }, { "epoch": 0.87587890625, "grad_norm": 0.14061753451824188, "learning_rate": 6.791111469840311e-05, "loss": 1.7348, "step": 8969 }, { "epoch": 0.8759765625, "grad_norm": 0.14389000833034515, "learning_rate": 6.788332078188917e-05, "loss": 1.7404, "step": 8970 }, { "epoch": 0.87607421875, "grad_norm": 0.13614484667778015, "learning_rate": 6.785554755449664e-05, "loss": 1.7146, "step": 8971 }, { "epoch": 0.876171875, "grad_norm": 0.13347987830638885, "learning_rate": 6.78277950189998e-05, "loss": 1.7852, "step": 8972 }, { "epoch": 0.87626953125, "grad_norm": 0.13198091089725494, "learning_rate": 6.78000631781708e-05, "loss": 1.7293, "step": 8973 }, { "epoch": 0.8763671875, "grad_norm": 0.1395569145679474, "learning_rate": 6.777235203477985e-05, "loss": 1.8125, "step": 8974 }, { "epoch": 0.87646484375, "grad_norm": 0.13994932174682617, "learning_rate": 6.774466159159511e-05, "loss": 1.7693, "step": 8975 }, { "epoch": 0.8765625, "grad_norm": 0.1416814774274826, "learning_rate": 6.771699185138253e-05, "loss": 1.7632, "step": 8976 }, { "epoch": 0.87666015625, "grad_norm": 0.15085849165916443, "learning_rate": 6.768934281690611e-05, "loss": 1.792, "step": 8977 }, { "epoch": 0.8767578125, "grad_norm": 0.14363664388656616, "learning_rate": 6.766171449092774e-05, "loss": 1.7492, "step": 8978 }, { "epoch": 0.87685546875, "grad_norm": 0.16478238999843597, "learning_rate": 6.763410687620728e-05, "loss": 1.7784, "step": 8979 }, { "epoch": 0.876953125, "grad_norm": 0.14044199883937836, "learning_rate": 6.76065199755024e-05, "loss": 1.7766, "step": 8980 }, { "epoch": 0.87705078125, "grad_norm": 0.1719333678483963, "learning_rate": 6.757895379156878e-05, "loss": 1.7502, "step": 8981 }, { "epoch": 0.8771484375, "grad_norm": 0.14690881967544556, "learning_rate": 6.755140832716019e-05, "loss": 1.7634, "step": 8982 }, { "epoch": 0.87724609375, "grad_norm": 0.15851426124572754, "learning_rate": 6.752388358502803e-05, "loss": 1.784, "step": 8983 }, { "epoch": 0.87734375, "grad_norm": 0.1610487997531891, "learning_rate": 6.749637956792183e-05, "loss": 1.7835, "step": 8984 }, { "epoch": 0.87744140625, "grad_norm": 0.14001230895519257, "learning_rate": 6.7468896278589e-05, "loss": 1.7339, "step": 8985 }, { "epoch": 0.8775390625, "grad_norm": 0.1654079407453537, "learning_rate": 6.74414337197749e-05, "loss": 1.7646, "step": 8986 }, { "epoch": 0.87763671875, "grad_norm": 0.13475671410560608, "learning_rate": 6.741399189422276e-05, "loss": 1.7462, "step": 8987 }, { "epoch": 0.877734375, "grad_norm": 0.14911594986915588, "learning_rate": 6.738657080467369e-05, "loss": 1.7452, "step": 8988 }, { "epoch": 0.87783203125, "grad_norm": 0.13710373640060425, "learning_rate": 6.735917045386703e-05, "loss": 1.8211, "step": 8989 }, { "epoch": 0.8779296875, "grad_norm": 0.1564307063817978, "learning_rate": 6.733179084453966e-05, "loss": 1.778, "step": 8990 }, { "epoch": 0.87802734375, "grad_norm": 0.1486091911792755, "learning_rate": 6.730443197942661e-05, "loss": 1.7858, "step": 8991 }, { "epoch": 0.878125, "grad_norm": 0.1428772658109665, "learning_rate": 6.727709386126076e-05, "loss": 1.7399, "step": 8992 }, { "epoch": 0.87822265625, "grad_norm": 0.14292004704475403, "learning_rate": 6.724977649277305e-05, "loss": 1.7733, "step": 8993 }, { "epoch": 0.8783203125, "grad_norm": 0.15667007863521576, "learning_rate": 6.722247987669212e-05, "loss": 1.7501, "step": 8994 }, { "epoch": 0.87841796875, "grad_norm": 0.14067308604717255, "learning_rate": 6.719520401574467e-05, "loss": 1.7529, "step": 8995 }, { "epoch": 0.878515625, "grad_norm": 0.14893601834774017, "learning_rate": 6.716794891265542e-05, "loss": 1.7438, "step": 8996 }, { "epoch": 0.87861328125, "grad_norm": 0.18535102903842926, "learning_rate": 6.714071457014681e-05, "loss": 1.7315, "step": 8997 }, { "epoch": 0.8787109375, "grad_norm": 0.15291054546833038, "learning_rate": 6.711350099093936e-05, "loss": 1.7544, "step": 8998 }, { "epoch": 0.87880859375, "grad_norm": 0.15590864419937134, "learning_rate": 6.708630817775145e-05, "loss": 1.7345, "step": 8999 }, { "epoch": 0.87890625, "grad_norm": 0.16943489015102386, "learning_rate": 6.705913613329944e-05, "loss": 1.7696, "step": 9000 }, { "epoch": 0.87900390625, "grad_norm": 0.1424112319946289, "learning_rate": 6.703198486029743e-05, "loss": 1.7485, "step": 9001 }, { "epoch": 0.8791015625, "grad_norm": 0.15409930050373077, "learning_rate": 6.70048543614578e-05, "loss": 1.7536, "step": 9002 }, { "epoch": 0.87919921875, "grad_norm": 0.1362774670124054, "learning_rate": 6.697774463949054e-05, "loss": 1.7571, "step": 9003 }, { "epoch": 0.879296875, "grad_norm": 0.13522306084632874, "learning_rate": 6.695065569710366e-05, "loss": 1.7464, "step": 9004 }, { "epoch": 0.87939453125, "grad_norm": 0.14759598672389984, "learning_rate": 6.692358753700312e-05, "loss": 1.7617, "step": 9005 }, { "epoch": 0.8794921875, "grad_norm": 0.14188162982463837, "learning_rate": 6.68965401618928e-05, "loss": 1.7753, "step": 9006 }, { "epoch": 0.87958984375, "grad_norm": 0.14505144953727722, "learning_rate": 6.68695135744745e-05, "loss": 1.7703, "step": 9007 }, { "epoch": 0.8796875, "grad_norm": 0.1606588363647461, "learning_rate": 6.684250777744789e-05, "loss": 1.7345, "step": 9008 }, { "epoch": 0.87978515625, "grad_norm": 0.15906259417533875, "learning_rate": 6.681552277351067e-05, "loss": 1.7446, "step": 9009 }, { "epoch": 0.8798828125, "grad_norm": 0.14095468819141388, "learning_rate": 6.678855856535839e-05, "loss": 1.7472, "step": 9010 }, { "epoch": 0.87998046875, "grad_norm": 0.1462736427783966, "learning_rate": 6.67616151556845e-05, "loss": 1.7588, "step": 9011 }, { "epoch": 0.880078125, "grad_norm": 0.16378334164619446, "learning_rate": 6.673469254718043e-05, "loss": 1.758, "step": 9012 }, { "epoch": 0.88017578125, "grad_norm": 0.14525917172431946, "learning_rate": 6.67077907425355e-05, "loss": 1.7826, "step": 9013 }, { "epoch": 0.8802734375, "grad_norm": 0.16450010240077972, "learning_rate": 6.668090974443703e-05, "loss": 1.7828, "step": 9014 }, { "epoch": 0.88037109375, "grad_norm": 0.1559068262577057, "learning_rate": 6.665404955557004e-05, "loss": 1.7806, "step": 9015 }, { "epoch": 0.88046875, "grad_norm": 0.15184302628040314, "learning_rate": 6.662721017861778e-05, "loss": 1.7446, "step": 9016 }, { "epoch": 0.88056640625, "grad_norm": 0.16201409697532654, "learning_rate": 6.660039161626122e-05, "loss": 1.7413, "step": 9017 }, { "epoch": 0.8806640625, "grad_norm": 0.14214251935482025, "learning_rate": 6.657359387117926e-05, "loss": 1.7535, "step": 9018 }, { "epoch": 0.88076171875, "grad_norm": 0.14862637221813202, "learning_rate": 6.654681694604878e-05, "loss": 1.7707, "step": 9019 }, { "epoch": 0.880859375, "grad_norm": 0.14617513120174408, "learning_rate": 6.652006084354455e-05, "loss": 1.7547, "step": 9020 }, { "epoch": 0.88095703125, "grad_norm": 0.1521608531475067, "learning_rate": 6.649332556633932e-05, "loss": 1.7703, "step": 9021 }, { "epoch": 0.8810546875, "grad_norm": 0.15547946095466614, "learning_rate": 6.646661111710359e-05, "loss": 1.7585, "step": 9022 }, { "epoch": 0.88115234375, "grad_norm": 0.14925965666770935, "learning_rate": 6.643991749850601e-05, "loss": 1.7319, "step": 9023 }, { "epoch": 0.88125, "grad_norm": 0.1541021466255188, "learning_rate": 6.641324471321302e-05, "loss": 1.7583, "step": 9024 }, { "epoch": 0.88134765625, "grad_norm": 0.1536448746919632, "learning_rate": 6.638659276388893e-05, "loss": 1.7527, "step": 9025 }, { "epoch": 0.8814453125, "grad_norm": 0.15031377971172333, "learning_rate": 6.635996165319609e-05, "loss": 1.7916, "step": 9026 }, { "epoch": 0.88154296875, "grad_norm": 0.14721260964870453, "learning_rate": 6.63333513837947e-05, "loss": 1.7679, "step": 9027 }, { "epoch": 0.881640625, "grad_norm": 0.1566339135169983, "learning_rate": 6.630676195834291e-05, "loss": 1.7257, "step": 9028 }, { "epoch": 0.88173828125, "grad_norm": 0.14985966682434082, "learning_rate": 6.628019337949668e-05, "loss": 1.7522, "step": 9029 }, { "epoch": 0.8818359375, "grad_norm": 0.16468752920627594, "learning_rate": 6.625364564991007e-05, "loss": 1.7674, "step": 9030 }, { "epoch": 0.88193359375, "grad_norm": 0.15011411905288696, "learning_rate": 6.6227118772235e-05, "loss": 1.7505, "step": 9031 }, { "epoch": 0.88203125, "grad_norm": 0.15570296347141266, "learning_rate": 6.620061274912112e-05, "loss": 1.7218, "step": 9032 }, { "epoch": 0.88212890625, "grad_norm": 0.15863768756389618, "learning_rate": 6.617412758321627e-05, "loss": 1.8046, "step": 9033 }, { "epoch": 0.8822265625, "grad_norm": 0.14682498574256897, "learning_rate": 6.614766327716601e-05, "loss": 1.7917, "step": 9034 }, { "epoch": 0.88232421875, "grad_norm": 0.17875529825687408, "learning_rate": 6.6121219833614e-05, "loss": 1.7326, "step": 9035 }, { "epoch": 0.882421875, "grad_norm": 0.14025868475437164, "learning_rate": 6.609479725520151e-05, "loss": 1.7048, "step": 9036 }, { "epoch": 0.88251953125, "grad_norm": 0.15017220377922058, "learning_rate": 6.60683955445681e-05, "loss": 1.7032, "step": 9037 }, { "epoch": 0.8826171875, "grad_norm": 0.14794805645942688, "learning_rate": 6.604201470435105e-05, "loss": 1.7421, "step": 9038 }, { "epoch": 0.88271484375, "grad_norm": 0.1439708173274994, "learning_rate": 6.601565473718547e-05, "loss": 1.7417, "step": 9039 }, { "epoch": 0.8828125, "grad_norm": 0.13389739394187927, "learning_rate": 6.598931564570458e-05, "loss": 1.784, "step": 9040 }, { "epoch": 0.88291015625, "grad_norm": 0.13837160170078278, "learning_rate": 6.596299743253937e-05, "loss": 1.7917, "step": 9041 }, { "epoch": 0.8830078125, "grad_norm": 0.145233154296875, "learning_rate": 6.593670010031885e-05, "loss": 1.7433, "step": 9042 }, { "epoch": 0.88310546875, "grad_norm": 0.1386052668094635, "learning_rate": 6.591042365166979e-05, "loss": 1.7529, "step": 9043 }, { "epoch": 0.883203125, "grad_norm": 0.1408596634864807, "learning_rate": 6.588416808921707e-05, "loss": 1.7571, "step": 9044 }, { "epoch": 0.88330078125, "grad_norm": 0.144602432847023, "learning_rate": 6.58579334155834e-05, "loss": 1.7537, "step": 9045 }, { "epoch": 0.8833984375, "grad_norm": 0.14536438882350922, "learning_rate": 6.583171963338932e-05, "loss": 1.7609, "step": 9046 }, { "epoch": 0.88349609375, "grad_norm": 0.14387334883213043, "learning_rate": 6.580552674525337e-05, "loss": 1.8401, "step": 9047 }, { "epoch": 0.88359375, "grad_norm": 0.15393562614917755, "learning_rate": 6.577935475379202e-05, "loss": 1.7655, "step": 9048 }, { "epoch": 0.88369140625, "grad_norm": 0.15046003460884094, "learning_rate": 6.575320366161964e-05, "loss": 1.7298, "step": 9049 }, { "epoch": 0.8837890625, "grad_norm": 0.1527356505393982, "learning_rate": 6.572707347134835e-05, "loss": 1.7789, "step": 9050 }, { "epoch": 0.88388671875, "grad_norm": 0.14552420377731323, "learning_rate": 6.570096418558849e-05, "loss": 1.7855, "step": 9051 }, { "epoch": 0.883984375, "grad_norm": 0.14477133750915527, "learning_rate": 6.567487580694812e-05, "loss": 1.7255, "step": 9052 }, { "epoch": 0.88408203125, "grad_norm": 0.14964592456817627, "learning_rate": 6.564880833803317e-05, "loss": 1.8029, "step": 9053 }, { "epoch": 0.8841796875, "grad_norm": 0.1447337120771408, "learning_rate": 6.562276178144758e-05, "loss": 1.7316, "step": 9054 }, { "epoch": 0.88427734375, "grad_norm": 0.1464676409959793, "learning_rate": 6.559673613979315e-05, "loss": 1.7678, "step": 9055 }, { "epoch": 0.884375, "grad_norm": 0.1426936686038971, "learning_rate": 6.557073141566969e-05, "loss": 1.782, "step": 9056 }, { "epoch": 0.88447265625, "grad_norm": 0.14879420399665833, "learning_rate": 6.554474761167471e-05, "loss": 1.6856, "step": 9057 }, { "epoch": 0.8845703125, "grad_norm": 0.145311176776886, "learning_rate": 6.551878473040387e-05, "loss": 1.7558, "step": 9058 }, { "epoch": 0.88466796875, "grad_norm": 0.1379217505455017, "learning_rate": 6.549284277445065e-05, "loss": 1.7682, "step": 9059 }, { "epoch": 0.884765625, "grad_norm": 0.142307847738266, "learning_rate": 6.546692174640633e-05, "loss": 1.7325, "step": 9060 }, { "epoch": 0.88486328125, "grad_norm": 0.1364014744758606, "learning_rate": 6.544102164886022e-05, "loss": 1.7221, "step": 9061 }, { "epoch": 0.8849609375, "grad_norm": 0.1519678384065628, "learning_rate": 6.541514248439955e-05, "loss": 1.7438, "step": 9062 }, { "epoch": 0.88505859375, "grad_norm": 0.1340998411178589, "learning_rate": 6.53892842556094e-05, "loss": 1.772, "step": 9063 }, { "epoch": 0.88515625, "grad_norm": 0.1457136571407318, "learning_rate": 6.53634469650727e-05, "loss": 1.7478, "step": 9064 }, { "epoch": 0.88525390625, "grad_norm": 0.13674339652061462, "learning_rate": 6.533763061537048e-05, "loss": 1.7716, "step": 9065 }, { "epoch": 0.8853515625, "grad_norm": 0.1629391312599182, "learning_rate": 6.531183520908153e-05, "loss": 1.7574, "step": 9066 }, { "epoch": 0.88544921875, "grad_norm": 0.1528262048959732, "learning_rate": 6.528606074878259e-05, "loss": 1.6812, "step": 9067 }, { "epoch": 0.885546875, "grad_norm": 0.16109755635261536, "learning_rate": 6.526030723704823e-05, "loss": 1.74, "step": 9068 }, { "epoch": 0.88564453125, "grad_norm": 0.15532805025577545, "learning_rate": 6.523457467645109e-05, "loss": 1.7778, "step": 9069 }, { "epoch": 0.8857421875, "grad_norm": 0.15064042806625366, "learning_rate": 6.520886306956158e-05, "loss": 1.7593, "step": 9070 }, { "epoch": 0.88583984375, "grad_norm": 0.1535540372133255, "learning_rate": 6.518317241894802e-05, "loss": 1.7936, "step": 9071 }, { "epoch": 0.8859375, "grad_norm": 0.14110004901885986, "learning_rate": 6.515750272717677e-05, "loss": 1.7021, "step": 9072 }, { "epoch": 0.88603515625, "grad_norm": 0.1728866994380951, "learning_rate": 6.513185399681197e-05, "loss": 1.7444, "step": 9073 }, { "epoch": 0.8861328125, "grad_norm": 0.143694669008255, "learning_rate": 6.510622623041569e-05, "loss": 1.8082, "step": 9074 }, { "epoch": 0.88623046875, "grad_norm": 0.17309200763702393, "learning_rate": 6.50806194305479e-05, "loss": 1.7641, "step": 9075 }, { "epoch": 0.886328125, "grad_norm": 0.15016314387321472, "learning_rate": 6.50550335997665e-05, "loss": 1.8078, "step": 9076 }, { "epoch": 0.88642578125, "grad_norm": 0.14868691563606262, "learning_rate": 6.502946874062736e-05, "loss": 1.7699, "step": 9077 }, { "epoch": 0.8865234375, "grad_norm": 0.14277659356594086, "learning_rate": 6.500392485568404e-05, "loss": 1.7474, "step": 9078 }, { "epoch": 0.88662109375, "grad_norm": 0.14956317842006683, "learning_rate": 6.497840194748825e-05, "loss": 1.7838, "step": 9079 }, { "epoch": 0.88671875, "grad_norm": 0.143964484333992, "learning_rate": 6.495290001858952e-05, "loss": 1.7851, "step": 9080 }, { "epoch": 0.88681640625, "grad_norm": 0.1391940861940384, "learning_rate": 6.49274190715352e-05, "loss": 1.7664, "step": 9081 }, { "epoch": 0.8869140625, "grad_norm": 0.1579352170228958, "learning_rate": 6.490195910887065e-05, "loss": 1.7437, "step": 9082 }, { "epoch": 0.88701171875, "grad_norm": 0.15662088990211487, "learning_rate": 6.487652013313908e-05, "loss": 1.7831, "step": 9083 }, { "epoch": 0.887109375, "grad_norm": 0.14468419551849365, "learning_rate": 6.485110214688163e-05, "loss": 1.7628, "step": 9084 }, { "epoch": 0.88720703125, "grad_norm": 0.16219808161258698, "learning_rate": 6.48257051526373e-05, "loss": 1.7636, "step": 9085 }, { "epoch": 0.8873046875, "grad_norm": 0.13354481756687164, "learning_rate": 6.480032915294304e-05, "loss": 1.7518, "step": 9086 }, { "epoch": 0.88740234375, "grad_norm": 0.15562786161899567, "learning_rate": 6.477497415033375e-05, "loss": 1.7481, "step": 9087 }, { "epoch": 0.8875, "grad_norm": 0.1527252495288849, "learning_rate": 6.474964014734209e-05, "loss": 1.7586, "step": 9088 }, { "epoch": 0.88759765625, "grad_norm": 0.16971968114376068, "learning_rate": 6.47243271464987e-05, "loss": 1.7967, "step": 9089 }, { "epoch": 0.8876953125, "grad_norm": 0.14753904938697815, "learning_rate": 6.46990351503322e-05, "loss": 1.7214, "step": 9090 }, { "epoch": 0.88779296875, "grad_norm": 0.1403457075357437, "learning_rate": 6.467376416136898e-05, "loss": 1.7394, "step": 9091 }, { "epoch": 0.887890625, "grad_norm": 0.1408848762512207, "learning_rate": 6.464851418213334e-05, "loss": 1.6965, "step": 9092 }, { "epoch": 0.88798828125, "grad_norm": 0.14575520157814026, "learning_rate": 6.462328521514762e-05, "loss": 1.7401, "step": 9093 }, { "epoch": 0.8880859375, "grad_norm": 0.14113909006118774, "learning_rate": 6.459807726293198e-05, "loss": 1.7141, "step": 9094 }, { "epoch": 0.88818359375, "grad_norm": 0.1427791267633438, "learning_rate": 6.457289032800441e-05, "loss": 1.7621, "step": 9095 }, { "epoch": 0.88828125, "grad_norm": 0.13991563022136688, "learning_rate": 6.454772441288085e-05, "loss": 1.7869, "step": 9096 }, { "epoch": 0.88837890625, "grad_norm": 0.1381155252456665, "learning_rate": 6.452257952007519e-05, "loss": 1.7569, "step": 9097 }, { "epoch": 0.8884765625, "grad_norm": 0.14110061526298523, "learning_rate": 6.449745565209924e-05, "loss": 1.7803, "step": 9098 }, { "epoch": 0.88857421875, "grad_norm": 0.14035402238368988, "learning_rate": 6.447235281146247e-05, "loss": 1.7175, "step": 9099 }, { "epoch": 0.888671875, "grad_norm": 0.15068140625953674, "learning_rate": 6.44472710006726e-05, "loss": 1.7708, "step": 9100 }, { "epoch": 0.88876953125, "grad_norm": 0.14246007800102234, "learning_rate": 6.442221022223509e-05, "loss": 1.7667, "step": 9101 }, { "epoch": 0.8888671875, "grad_norm": 0.13406963646411896, "learning_rate": 6.439717047865314e-05, "loss": 1.8004, "step": 9102 }, { "epoch": 0.88896484375, "grad_norm": 0.13929390907287598, "learning_rate": 6.437215177242813e-05, "loss": 1.7265, "step": 9103 }, { "epoch": 0.8890625, "grad_norm": 0.1474560648202896, "learning_rate": 6.434715410605914e-05, "loss": 1.771, "step": 9104 }, { "epoch": 0.88916015625, "grad_norm": 0.13814018666744232, "learning_rate": 6.432217748204329e-05, "loss": 1.796, "step": 9105 }, { "epoch": 0.8892578125, "grad_norm": 0.14969682693481445, "learning_rate": 6.429722190287539e-05, "loss": 1.7568, "step": 9106 }, { "epoch": 0.88935546875, "grad_norm": 0.16294366121292114, "learning_rate": 6.427228737104838e-05, "loss": 1.7431, "step": 9107 }, { "epoch": 0.889453125, "grad_norm": 0.15061712265014648, "learning_rate": 6.424737388905302e-05, "loss": 1.7856, "step": 9108 }, { "epoch": 0.88955078125, "grad_norm": 0.13982802629470825, "learning_rate": 6.422248145937791e-05, "loss": 1.7486, "step": 9109 }, { "epoch": 0.8896484375, "grad_norm": 0.15623891353607178, "learning_rate": 6.419761008450957e-05, "loss": 1.7839, "step": 9110 }, { "epoch": 0.88974609375, "grad_norm": 0.14623387157917023, "learning_rate": 6.417275976693244e-05, "loss": 1.7943, "step": 9111 }, { "epoch": 0.88984375, "grad_norm": 0.14919981360435486, "learning_rate": 6.414793050912889e-05, "loss": 1.8367, "step": 9112 }, { "epoch": 0.88994140625, "grad_norm": 0.15404553711414337, "learning_rate": 6.412312231357902e-05, "loss": 1.7654, "step": 9113 }, { "epoch": 0.8900390625, "grad_norm": 0.13683779537677765, "learning_rate": 6.409833518276106e-05, "loss": 1.7657, "step": 9114 }, { "epoch": 0.89013671875, "grad_norm": 0.1433734893798828, "learning_rate": 6.407356911915098e-05, "loss": 1.7339, "step": 9115 }, { "epoch": 0.890234375, "grad_norm": 0.14284566044807434, "learning_rate": 6.404882412522277e-05, "loss": 1.7528, "step": 9116 }, { "epoch": 0.89033203125, "grad_norm": 0.13847610354423523, "learning_rate": 6.402410020344813e-05, "loss": 1.7097, "step": 9117 }, { "epoch": 0.8904296875, "grad_norm": 0.1466650515794754, "learning_rate": 6.399939735629679e-05, "loss": 1.7476, "step": 9118 }, { "epoch": 0.89052734375, "grad_norm": 0.13822951912879944, "learning_rate": 6.397471558623637e-05, "loss": 1.7473, "step": 9119 }, { "epoch": 0.890625, "grad_norm": 0.14378994703292847, "learning_rate": 6.395005489573232e-05, "loss": 1.7444, "step": 9120 }, { "epoch": 0.89072265625, "grad_norm": 0.15439282357692719, "learning_rate": 6.392541528724807e-05, "loss": 1.796, "step": 9121 }, { "epoch": 0.8908203125, "grad_norm": 0.13169732689857483, "learning_rate": 6.39007967632449e-05, "loss": 1.7058, "step": 9122 }, { "epoch": 0.89091796875, "grad_norm": 0.14256511628627777, "learning_rate": 6.387619932618197e-05, "loss": 1.7784, "step": 9123 }, { "epoch": 0.891015625, "grad_norm": 0.14332593977451324, "learning_rate": 6.385162297851632e-05, "loss": 1.7855, "step": 9124 }, { "epoch": 0.89111328125, "grad_norm": 0.13421782851219177, "learning_rate": 6.38270677227029e-05, "loss": 1.8024, "step": 9125 }, { "epoch": 0.8912109375, "grad_norm": 0.1416565626859665, "learning_rate": 6.380253356119463e-05, "loss": 1.7556, "step": 9126 }, { "epoch": 0.89130859375, "grad_norm": 0.14670808613300323, "learning_rate": 6.377802049644222e-05, "loss": 1.77, "step": 9127 }, { "epoch": 0.89140625, "grad_norm": 0.14956800639629364, "learning_rate": 6.375352853089425e-05, "loss": 1.7327, "step": 9128 }, { "epoch": 0.89150390625, "grad_norm": 0.15839111804962158, "learning_rate": 6.372905766699734e-05, "loss": 1.7388, "step": 9129 }, { "epoch": 0.8916015625, "grad_norm": 0.15262062847614288, "learning_rate": 6.370460790719591e-05, "loss": 1.751, "step": 9130 }, { "epoch": 0.89169921875, "grad_norm": 0.15131323039531708, "learning_rate": 6.368017925393221e-05, "loss": 1.7882, "step": 9131 }, { "epoch": 0.891796875, "grad_norm": 0.15755966305732727, "learning_rate": 6.365577170964646e-05, "loss": 1.7412, "step": 9132 }, { "epoch": 0.89189453125, "grad_norm": 0.16295813024044037, "learning_rate": 6.36313852767768e-05, "loss": 1.7643, "step": 9133 }, { "epoch": 0.8919921875, "grad_norm": 0.16187523305416107, "learning_rate": 6.360701995775919e-05, "loss": 1.7746, "step": 9134 }, { "epoch": 0.89208984375, "grad_norm": 0.18614119291305542, "learning_rate": 6.358267575502753e-05, "loss": 1.788, "step": 9135 }, { "epoch": 0.8921875, "grad_norm": 0.14622071385383606, "learning_rate": 6.35583526710135e-05, "loss": 1.7307, "step": 9136 }, { "epoch": 0.89228515625, "grad_norm": 0.1564631164073944, "learning_rate": 6.353405070814694e-05, "loss": 1.7428, "step": 9137 }, { "epoch": 0.8923828125, "grad_norm": 0.16559675335884094, "learning_rate": 6.350976986885525e-05, "loss": 1.7339, "step": 9138 }, { "epoch": 0.89248046875, "grad_norm": 0.1618010252714157, "learning_rate": 6.348551015556389e-05, "loss": 1.7499, "step": 9139 }, { "epoch": 0.892578125, "grad_norm": 0.15735086798667908, "learning_rate": 6.346127157069621e-05, "loss": 1.7296, "step": 9140 }, { "epoch": 0.89267578125, "grad_norm": 0.15961287915706635, "learning_rate": 6.343705411667347e-05, "loss": 1.7646, "step": 9141 }, { "epoch": 0.8927734375, "grad_norm": 0.1535537838935852, "learning_rate": 6.341285779591474e-05, "loss": 1.7195, "step": 9142 }, { "epoch": 0.89287109375, "grad_norm": 0.16102269291877747, "learning_rate": 6.3388682610837e-05, "loss": 1.7585, "step": 9143 }, { "epoch": 0.89296875, "grad_norm": 0.17645186185836792, "learning_rate": 6.33645285638552e-05, "loss": 1.7661, "step": 9144 }, { "epoch": 0.89306640625, "grad_norm": 0.13636209070682526, "learning_rate": 6.334039565738205e-05, "loss": 1.7709, "step": 9145 }, { "epoch": 0.8931640625, "grad_norm": 0.17085476219654083, "learning_rate": 6.331628389382823e-05, "loss": 1.7587, "step": 9146 }, { "epoch": 0.89326171875, "grad_norm": 0.1724812239408493, "learning_rate": 6.329219327560233e-05, "loss": 1.7341, "step": 9147 }, { "epoch": 0.893359375, "grad_norm": 0.1587895303964615, "learning_rate": 6.326812380511074e-05, "loss": 1.7764, "step": 9148 }, { "epoch": 0.89345703125, "grad_norm": 0.15250571072101593, "learning_rate": 6.324407548475779e-05, "loss": 1.7804, "step": 9149 }, { "epoch": 0.8935546875, "grad_norm": 0.16541774570941925, "learning_rate": 6.322004831694572e-05, "loss": 1.7643, "step": 9150 }, { "epoch": 0.89365234375, "grad_norm": 0.15906384587287903, "learning_rate": 6.319604230407464e-05, "loss": 1.7389, "step": 9151 }, { "epoch": 0.89375, "grad_norm": 0.16064941883087158, "learning_rate": 6.317205744854252e-05, "loss": 1.7806, "step": 9152 }, { "epoch": 0.89384765625, "grad_norm": 0.16109368205070496, "learning_rate": 6.314809375274523e-05, "loss": 1.7808, "step": 9153 }, { "epoch": 0.8939453125, "grad_norm": 0.14454041421413422, "learning_rate": 6.312415121907651e-05, "loss": 1.7284, "step": 9154 }, { "epoch": 0.89404296875, "grad_norm": 0.19029155373573303, "learning_rate": 6.310022984992803e-05, "loss": 1.7709, "step": 9155 }, { "epoch": 0.894140625, "grad_norm": 0.14939555525779724, "learning_rate": 6.307632964768936e-05, "loss": 1.7388, "step": 9156 }, { "epoch": 0.89423828125, "grad_norm": 0.15885646641254425, "learning_rate": 6.305245061474787e-05, "loss": 1.7464, "step": 9157 }, { "epoch": 0.8943359375, "grad_norm": 0.18014976382255554, "learning_rate": 6.302859275348889e-05, "loss": 1.7504, "step": 9158 }, { "epoch": 0.89443359375, "grad_norm": 0.1360538899898529, "learning_rate": 6.300475606629558e-05, "loss": 1.7559, "step": 9159 }, { "epoch": 0.89453125, "grad_norm": 0.1506013721227646, "learning_rate": 6.298094055554902e-05, "loss": 1.6943, "step": 9160 }, { "epoch": 0.89462890625, "grad_norm": 0.14334556460380554, "learning_rate": 6.295714622362819e-05, "loss": 1.7229, "step": 9161 }, { "epoch": 0.8947265625, "grad_norm": 0.1452188342809677, "learning_rate": 6.293337307290992e-05, "loss": 1.7733, "step": 9162 }, { "epoch": 0.89482421875, "grad_norm": 0.1456870138645172, "learning_rate": 6.290962110576894e-05, "loss": 1.7243, "step": 9163 }, { "epoch": 0.894921875, "grad_norm": 0.14569108188152313, "learning_rate": 6.288589032457785e-05, "loss": 1.7743, "step": 9164 }, { "epoch": 0.89501953125, "grad_norm": 0.1497659534215927, "learning_rate": 6.286218073170721e-05, "loss": 1.7238, "step": 9165 }, { "epoch": 0.8951171875, "grad_norm": 0.15693412721157074, "learning_rate": 6.283849232952529e-05, "loss": 1.7737, "step": 9166 }, { "epoch": 0.89521484375, "grad_norm": 0.14548145234584808, "learning_rate": 6.281482512039841e-05, "loss": 1.7817, "step": 9167 }, { "epoch": 0.8953125, "grad_norm": 0.1730877310037613, "learning_rate": 6.27911791066907e-05, "loss": 1.7772, "step": 9168 }, { "epoch": 0.89541015625, "grad_norm": 0.1413734257221222, "learning_rate": 6.276755429076422e-05, "loss": 1.7288, "step": 9169 }, { "epoch": 0.8955078125, "grad_norm": 0.163112074136734, "learning_rate": 6.274395067497885e-05, "loss": 1.7655, "step": 9170 }, { "epoch": 0.89560546875, "grad_norm": 0.15582147240638733, "learning_rate": 6.272036826169236e-05, "loss": 1.8023, "step": 9171 }, { "epoch": 0.895703125, "grad_norm": 0.16078436374664307, "learning_rate": 6.269680705326052e-05, "loss": 1.756, "step": 9172 }, { "epoch": 0.89580078125, "grad_norm": 0.1939917504787445, "learning_rate": 6.267326705203676e-05, "loss": 1.7477, "step": 9173 }, { "epoch": 0.8958984375, "grad_norm": 0.1409221589565277, "learning_rate": 6.264974826037261e-05, "loss": 1.7503, "step": 9174 }, { "epoch": 0.89599609375, "grad_norm": 0.16805116832256317, "learning_rate": 6.262625068061729e-05, "loss": 1.7405, "step": 9175 }, { "epoch": 0.89609375, "grad_norm": 0.16198095679283142, "learning_rate": 6.260277431511812e-05, "loss": 1.809, "step": 9176 }, { "epoch": 0.89619140625, "grad_norm": 0.15132038295269012, "learning_rate": 6.257931916622013e-05, "loss": 1.7694, "step": 9177 }, { "epoch": 0.8962890625, "grad_norm": 0.14060378074645996, "learning_rate": 6.255588523626626e-05, "loss": 1.7311, "step": 9178 }, { "epoch": 0.89638671875, "grad_norm": 0.17685578763484955, "learning_rate": 6.253247252759739e-05, "loss": 1.8004, "step": 9179 }, { "epoch": 0.896484375, "grad_norm": 0.14892169833183289, "learning_rate": 6.25090810425522e-05, "loss": 1.7731, "step": 9180 }, { "epoch": 0.89658203125, "grad_norm": 0.15961338579654694, "learning_rate": 6.248571078346732e-05, "loss": 1.7576, "step": 9181 }, { "epoch": 0.8966796875, "grad_norm": 0.14980192482471466, "learning_rate": 6.246236175267722e-05, "loss": 1.7606, "step": 9182 }, { "epoch": 0.89677734375, "grad_norm": 0.1498861163854599, "learning_rate": 6.243903395251427e-05, "loss": 1.7496, "step": 9183 }, { "epoch": 0.896875, "grad_norm": 0.14739753305912018, "learning_rate": 6.241572738530871e-05, "loss": 1.778, "step": 9184 }, { "epoch": 0.89697265625, "grad_norm": 0.16511006653308868, "learning_rate": 6.239244205338865e-05, "loss": 1.7203, "step": 9185 }, { "epoch": 0.8970703125, "grad_norm": 0.15513190627098083, "learning_rate": 6.236917795908012e-05, "loss": 1.7757, "step": 9186 }, { "epoch": 0.89716796875, "grad_norm": 0.14113330841064453, "learning_rate": 6.234593510470695e-05, "loss": 1.7637, "step": 9187 }, { "epoch": 0.897265625, "grad_norm": 0.16250328719615936, "learning_rate": 6.232271349259093e-05, "loss": 1.7543, "step": 9188 }, { "epoch": 0.89736328125, "grad_norm": 0.1584545075893402, "learning_rate": 6.229951312505167e-05, "loss": 1.7575, "step": 9189 }, { "epoch": 0.8974609375, "grad_norm": 0.1363738477230072, "learning_rate": 6.22763340044067e-05, "loss": 1.7576, "step": 9190 }, { "epoch": 0.89755859375, "grad_norm": 0.18538932502269745, "learning_rate": 6.225317613297137e-05, "loss": 1.7255, "step": 9191 }, { "epoch": 0.89765625, "grad_norm": 0.1626472771167755, "learning_rate": 6.223003951305901e-05, "loss": 1.7303, "step": 9192 }, { "epoch": 0.89775390625, "grad_norm": 0.17060506343841553, "learning_rate": 6.220692414698075e-05, "loss": 1.7276, "step": 9193 }, { "epoch": 0.8978515625, "grad_norm": 0.17397665977478027, "learning_rate": 6.218383003704558e-05, "loss": 1.7361, "step": 9194 }, { "epoch": 0.89794921875, "grad_norm": 0.1625894457101822, "learning_rate": 6.216075718556039e-05, "loss": 1.7689, "step": 9195 }, { "epoch": 0.898046875, "grad_norm": 0.16648687422275543, "learning_rate": 6.213770559482996e-05, "loss": 1.7416, "step": 9196 }, { "epoch": 0.89814453125, "grad_norm": 0.21388018131256104, "learning_rate": 6.211467526715698e-05, "loss": 1.7421, "step": 9197 }, { "epoch": 0.8982421875, "grad_norm": 0.14452412724494934, "learning_rate": 6.209166620484192e-05, "loss": 1.7609, "step": 9198 }, { "epoch": 0.89833984375, "grad_norm": 0.20300887525081635, "learning_rate": 6.206867841018322e-05, "loss": 1.8124, "step": 9199 }, { "epoch": 0.8984375, "grad_norm": 0.17442427575588226, "learning_rate": 6.204571188547717e-05, "loss": 1.749, "step": 9200 }, { "epoch": 0.89853515625, "grad_norm": 0.16951903700828552, "learning_rate": 6.202276663301787e-05, "loss": 1.7872, "step": 9201 }, { "epoch": 0.8986328125, "grad_norm": 0.15523983538150787, "learning_rate": 6.199984265509738e-05, "loss": 1.7313, "step": 9202 }, { "epoch": 0.89873046875, "grad_norm": 0.18640001118183136, "learning_rate": 6.197693995400558e-05, "loss": 1.7979, "step": 9203 }, { "epoch": 0.898828125, "grad_norm": 0.1760626584291458, "learning_rate": 6.195405853203028e-05, "loss": 1.7485, "step": 9204 }, { "epoch": 0.89892578125, "grad_norm": 0.16055090725421906, "learning_rate": 6.193119839145709e-05, "loss": 1.746, "step": 9205 }, { "epoch": 0.8990234375, "grad_norm": 0.19048671424388885, "learning_rate": 6.19083595345696e-05, "loss": 1.7611, "step": 9206 }, { "epoch": 0.89912109375, "grad_norm": 0.16136963665485382, "learning_rate": 6.188554196364918e-05, "loss": 1.7356, "step": 9207 }, { "epoch": 0.89921875, "grad_norm": 0.1458498239517212, "learning_rate": 6.186274568097507e-05, "loss": 1.7773, "step": 9208 }, { "epoch": 0.89931640625, "grad_norm": 0.15970750153064728, "learning_rate": 6.183997068882442e-05, "loss": 1.74, "step": 9209 }, { "epoch": 0.8994140625, "grad_norm": 0.14664967358112335, "learning_rate": 6.18172169894723e-05, "loss": 1.7482, "step": 9210 }, { "epoch": 0.89951171875, "grad_norm": 0.14693546295166016, "learning_rate": 6.179448458519155e-05, "loss": 1.8152, "step": 9211 }, { "epoch": 0.899609375, "grad_norm": 0.13705562055110931, "learning_rate": 6.177177347825298e-05, "loss": 1.7612, "step": 9212 }, { "epoch": 0.89970703125, "grad_norm": 0.17192956805229187, "learning_rate": 6.17490836709252e-05, "loss": 1.7784, "step": 9213 }, { "epoch": 0.8998046875, "grad_norm": 0.15427419543266296, "learning_rate": 6.172641516547476e-05, "loss": 1.7875, "step": 9214 }, { "epoch": 0.89990234375, "grad_norm": 0.15456998348236084, "learning_rate": 6.170376796416598e-05, "loss": 1.7405, "step": 9215 }, { "epoch": 0.9, "grad_norm": 0.1818009614944458, "learning_rate": 6.168114206926115e-05, "loss": 1.7738, "step": 9216 }, { "epoch": 0.90009765625, "grad_norm": 0.14666244387626648, "learning_rate": 6.165853748302038e-05, "loss": 1.753, "step": 9217 }, { "epoch": 0.9001953125, "grad_norm": 0.161835715174675, "learning_rate": 6.163595420770167e-05, "loss": 1.7843, "step": 9218 }, { "epoch": 0.90029296875, "grad_norm": 0.17683003842830658, "learning_rate": 6.161339224556092e-05, "loss": 1.7823, "step": 9219 }, { "epoch": 0.900390625, "grad_norm": 0.1464739888906479, "learning_rate": 6.159085159885186e-05, "loss": 1.7453, "step": 9220 }, { "epoch": 0.90048828125, "grad_norm": 0.1449514925479889, "learning_rate": 6.15683322698261e-05, "loss": 1.766, "step": 9221 }, { "epoch": 0.9005859375, "grad_norm": 0.15959210693836212, "learning_rate": 6.15458342607331e-05, "loss": 1.7717, "step": 9222 }, { "epoch": 0.90068359375, "grad_norm": 0.14225754141807556, "learning_rate": 6.152335757382018e-05, "loss": 1.7088, "step": 9223 }, { "epoch": 0.90078125, "grad_norm": 0.14151281118392944, "learning_rate": 6.150090221133264e-05, "loss": 1.8027, "step": 9224 }, { "epoch": 0.90087890625, "grad_norm": 0.14406703412532806, "learning_rate": 6.147846817551354e-05, "loss": 1.7555, "step": 9225 }, { "epoch": 0.9009765625, "grad_norm": 0.15447044372558594, "learning_rate": 6.145605546860382e-05, "loss": 1.7213, "step": 9226 }, { "epoch": 0.90107421875, "grad_norm": 0.13846048712730408, "learning_rate": 6.143366409284236e-05, "loss": 1.7403, "step": 9227 }, { "epoch": 0.901171875, "grad_norm": 0.14559496939182281, "learning_rate": 6.141129405046585e-05, "loss": 1.7424, "step": 9228 }, { "epoch": 0.90126953125, "grad_norm": 0.15747955441474915, "learning_rate": 6.138894534370878e-05, "loss": 1.8032, "step": 9229 }, { "epoch": 0.9013671875, "grad_norm": 0.1382923275232315, "learning_rate": 6.136661797480368e-05, "loss": 1.7522, "step": 9230 }, { "epoch": 0.90146484375, "grad_norm": 0.14610570669174194, "learning_rate": 6.134431194598081e-05, "loss": 1.7617, "step": 9231 }, { "epoch": 0.9015625, "grad_norm": 0.14669254422187805, "learning_rate": 6.132202725946837e-05, "loss": 1.7067, "step": 9232 }, { "epoch": 0.90166015625, "grad_norm": 0.1702256202697754, "learning_rate": 6.129976391749237e-05, "loss": 1.8126, "step": 9233 }, { "epoch": 0.9017578125, "grad_norm": 0.14415661990642548, "learning_rate": 6.127752192227676e-05, "loss": 1.7295, "step": 9234 }, { "epoch": 0.90185546875, "grad_norm": 0.13894160091876984, "learning_rate": 6.12553012760433e-05, "loss": 1.7845, "step": 9235 }, { "epoch": 0.901953125, "grad_norm": 0.15731094777584076, "learning_rate": 6.123310198101164e-05, "loss": 1.7664, "step": 9236 }, { "epoch": 0.90205078125, "grad_norm": 0.14713801443576813, "learning_rate": 6.121092403939924e-05, "loss": 1.793, "step": 9237 }, { "epoch": 0.9021484375, "grad_norm": 0.1550867259502411, "learning_rate": 6.11887674534216e-05, "loss": 1.7568, "step": 9238 }, { "epoch": 0.90224609375, "grad_norm": 0.15890029072761536, "learning_rate": 6.116663222529183e-05, "loss": 1.749, "step": 9239 }, { "epoch": 0.90234375, "grad_norm": 0.13564661145210266, "learning_rate": 6.114451835722114e-05, "loss": 1.7908, "step": 9240 }, { "epoch": 0.90244140625, "grad_norm": 0.15463151037693024, "learning_rate": 6.112242585141848e-05, "loss": 1.7621, "step": 9241 }, { "epoch": 0.9025390625, "grad_norm": 0.16384688019752502, "learning_rate": 6.110035471009071e-05, "loss": 1.7603, "step": 9242 }, { "epoch": 0.90263671875, "grad_norm": 0.14952410757541656, "learning_rate": 6.107830493544248e-05, "loss": 1.7949, "step": 9243 }, { "epoch": 0.902734375, "grad_norm": 0.1597311794757843, "learning_rate": 6.10562765296764e-05, "loss": 1.7782, "step": 9244 }, { "epoch": 0.90283203125, "grad_norm": 0.1529453694820404, "learning_rate": 6.103426949499299e-05, "loss": 1.7514, "step": 9245 }, { "epoch": 0.9029296875, "grad_norm": 0.15587131679058075, "learning_rate": 6.1012283833590465e-05, "loss": 1.7459, "step": 9246 }, { "epoch": 0.90302734375, "grad_norm": 0.1479564905166626, "learning_rate": 6.099031954766501e-05, "loss": 1.7804, "step": 9247 }, { "epoch": 0.903125, "grad_norm": 0.15163537859916687, "learning_rate": 6.0968376639410676e-05, "loss": 1.7353, "step": 9248 }, { "epoch": 0.90322265625, "grad_norm": 0.15399906039237976, "learning_rate": 6.0946455111019396e-05, "loss": 1.8013, "step": 9249 }, { "epoch": 0.9033203125, "grad_norm": 0.15000757575035095, "learning_rate": 6.092455496468088e-05, "loss": 1.7958, "step": 9250 }, { "epoch": 0.90341796875, "grad_norm": 0.14150485396385193, "learning_rate": 6.090267620258276e-05, "loss": 1.7698, "step": 9251 }, { "epoch": 0.903515625, "grad_norm": 0.13819342851638794, "learning_rate": 6.088081882691061e-05, "loss": 1.7441, "step": 9252 }, { "epoch": 0.90361328125, "grad_norm": 0.14259935915470123, "learning_rate": 6.085898283984773e-05, "loss": 1.8225, "step": 9253 }, { "epoch": 0.9037109375, "grad_norm": 0.15385563671588898, "learning_rate": 6.08371682435753e-05, "loss": 1.7581, "step": 9254 }, { "epoch": 0.90380859375, "grad_norm": 0.14810112118721008, "learning_rate": 6.081537504027247e-05, "loss": 1.7336, "step": 9255 }, { "epoch": 0.90390625, "grad_norm": 0.16262555122375488, "learning_rate": 6.07936032321162e-05, "loss": 1.7644, "step": 9256 }, { "epoch": 0.90400390625, "grad_norm": 0.15924111008644104, "learning_rate": 6.077185282128124e-05, "loss": 1.736, "step": 9257 }, { "epoch": 0.9041015625, "grad_norm": 0.15045799314975739, "learning_rate": 6.0750123809940286e-05, "loss": 1.7618, "step": 9258 }, { "epoch": 0.90419921875, "grad_norm": 0.15301521122455597, "learning_rate": 6.0728416200263915e-05, "loss": 1.7552, "step": 9259 }, { "epoch": 0.904296875, "grad_norm": 0.15323443710803986, "learning_rate": 6.070672999442046e-05, "loss": 1.7594, "step": 9260 }, { "epoch": 0.90439453125, "grad_norm": 0.15122078359127045, "learning_rate": 6.068506519457622e-05, "loss": 1.7956, "step": 9261 }, { "epoch": 0.9044921875, "grad_norm": 0.16282887756824493, "learning_rate": 6.0663421802895304e-05, "loss": 1.7741, "step": 9262 }, { "epoch": 0.90458984375, "grad_norm": 0.1625196784734726, "learning_rate": 6.0641799821539725e-05, "loss": 1.785, "step": 9263 }, { "epoch": 0.9046875, "grad_norm": 0.16526097059249878, "learning_rate": 6.0620199252669274e-05, "loss": 1.7409, "step": 9264 }, { "epoch": 0.90478515625, "grad_norm": 0.17061421275138855, "learning_rate": 6.0598620098441655e-05, "loss": 1.7957, "step": 9265 }, { "epoch": 0.9048828125, "grad_norm": 0.17854280769824982, "learning_rate": 6.0577062361012506e-05, "loss": 1.7685, "step": 9266 }, { "epoch": 0.90498046875, "grad_norm": 0.14576488733291626, "learning_rate": 6.0555526042535214e-05, "loss": 1.7656, "step": 9267 }, { "epoch": 0.905078125, "grad_norm": 0.15388718247413635, "learning_rate": 6.0534011145161034e-05, "loss": 1.7502, "step": 9268 }, { "epoch": 0.90517578125, "grad_norm": 0.1552794724702835, "learning_rate": 6.051251767103916e-05, "loss": 1.7912, "step": 9269 }, { "epoch": 0.9052734375, "grad_norm": 0.13973233103752136, "learning_rate": 6.049104562231661e-05, "loss": 1.7722, "step": 9270 }, { "epoch": 0.90537109375, "grad_norm": 0.1374737024307251, "learning_rate": 6.0469595001138176e-05, "loss": 1.7612, "step": 9271 }, { "epoch": 0.90546875, "grad_norm": 0.16162343323230743, "learning_rate": 6.044816580964662e-05, "loss": 1.7567, "step": 9272 }, { "epoch": 0.90556640625, "grad_norm": 0.13844813406467438, "learning_rate": 6.04267580499826e-05, "loss": 1.7628, "step": 9273 }, { "epoch": 0.9056640625, "grad_norm": 0.13303472101688385, "learning_rate": 6.040537172428448e-05, "loss": 1.7786, "step": 9274 }, { "epoch": 0.90576171875, "grad_norm": 0.17523930966854095, "learning_rate": 6.038400683468857e-05, "loss": 1.769, "step": 9275 }, { "epoch": 0.905859375, "grad_norm": 0.14389726519584656, "learning_rate": 6.036266338332906e-05, "loss": 1.7658, "step": 9276 }, { "epoch": 0.90595703125, "grad_norm": 0.13554732501506805, "learning_rate": 6.0341341372338e-05, "loss": 1.7781, "step": 9277 }, { "epoch": 0.9060546875, "grad_norm": 0.143146350979805, "learning_rate": 6.032004080384519e-05, "loss": 1.7729, "step": 9278 }, { "epoch": 0.90615234375, "grad_norm": 0.19705277681350708, "learning_rate": 6.029876167997841e-05, "loss": 1.7495, "step": 9279 }, { "epoch": 0.90625, "grad_norm": 0.1463702768087387, "learning_rate": 6.0277504002863274e-05, "loss": 1.7329, "step": 9280 }, { "epoch": 0.90634765625, "grad_norm": 0.14149829745292664, "learning_rate": 6.025626777462322e-05, "loss": 1.7838, "step": 9281 }, { "epoch": 0.9064453125, "grad_norm": 0.14343585073947906, "learning_rate": 6.0235052997379544e-05, "loss": 1.7598, "step": 9282 }, { "epoch": 0.90654296875, "grad_norm": 0.1432090699672699, "learning_rate": 6.021385967325142e-05, "loss": 1.7686, "step": 9283 }, { "epoch": 0.906640625, "grad_norm": 0.1552753448486328, "learning_rate": 6.0192687804355926e-05, "loss": 1.701, "step": 9284 }, { "epoch": 0.90673828125, "grad_norm": 0.13807594776153564, "learning_rate": 6.017153739280787e-05, "loss": 1.7894, "step": 9285 }, { "epoch": 0.9068359375, "grad_norm": 0.1494501680135727, "learning_rate": 6.015040844071997e-05, "loss": 1.7299, "step": 9286 }, { "epoch": 0.90693359375, "grad_norm": 0.14437496662139893, "learning_rate": 6.012930095020295e-05, "loss": 1.7429, "step": 9287 }, { "epoch": 0.90703125, "grad_norm": 0.14841453731060028, "learning_rate": 6.010821492336513e-05, "loss": 1.7726, "step": 9288 }, { "epoch": 0.90712890625, "grad_norm": 0.14920414984226227, "learning_rate": 6.008715036231289e-05, "loss": 1.7512, "step": 9289 }, { "epoch": 0.9072265625, "grad_norm": 0.15237167477607727, "learning_rate": 6.006610726915036e-05, "loss": 1.7484, "step": 9290 }, { "epoch": 0.90732421875, "grad_norm": 0.15943516790866852, "learning_rate": 6.0045085645979626e-05, "loss": 1.7373, "step": 9291 }, { "epoch": 0.907421875, "grad_norm": 0.15199552476406097, "learning_rate": 6.002408549490046e-05, "loss": 1.7478, "step": 9292 }, { "epoch": 0.90751953125, "grad_norm": 0.14914779365062714, "learning_rate": 6.00031068180106e-05, "loss": 1.7197, "step": 9293 }, { "epoch": 0.9076171875, "grad_norm": 0.14346224069595337, "learning_rate": 5.9982149617405734e-05, "loss": 1.7882, "step": 9294 }, { "epoch": 0.90771484375, "grad_norm": 0.13666944205760956, "learning_rate": 5.996121389517921e-05, "loss": 1.7287, "step": 9295 }, { "epoch": 0.9078125, "grad_norm": 0.16056902706623077, "learning_rate": 5.994029965342233e-05, "loss": 1.743, "step": 9296 }, { "epoch": 0.90791015625, "grad_norm": 0.15428398549556732, "learning_rate": 5.9919406894224274e-05, "loss": 1.7574, "step": 9297 }, { "epoch": 0.9080078125, "grad_norm": 0.15970277786254883, "learning_rate": 5.9898535619672037e-05, "loss": 1.7446, "step": 9298 }, { "epoch": 0.90810546875, "grad_norm": 0.13793352246284485, "learning_rate": 5.987768583185045e-05, "loss": 1.7157, "step": 9299 }, { "epoch": 0.908203125, "grad_norm": 0.14548243582248688, "learning_rate": 5.985685753284221e-05, "loss": 1.7681, "step": 9300 }, { "epoch": 0.90830078125, "grad_norm": 0.16567453742027283, "learning_rate": 5.983605072472796e-05, "loss": 1.7677, "step": 9301 }, { "epoch": 0.9083984375, "grad_norm": 0.15076477825641632, "learning_rate": 5.981526540958604e-05, "loss": 1.7566, "step": 9302 }, { "epoch": 0.90849609375, "grad_norm": 0.141315296292305, "learning_rate": 5.9794501589492736e-05, "loss": 1.7538, "step": 9303 }, { "epoch": 0.90859375, "grad_norm": 0.16835889220237732, "learning_rate": 5.9773759266522164e-05, "loss": 1.676, "step": 9304 }, { "epoch": 0.90869140625, "grad_norm": 0.14413350820541382, "learning_rate": 5.975303844274634e-05, "loss": 1.7602, "step": 9305 }, { "epoch": 0.9087890625, "grad_norm": 0.14835183322429657, "learning_rate": 5.973233912023503e-05, "loss": 1.7419, "step": 9306 }, { "epoch": 0.90888671875, "grad_norm": 0.17893259227275848, "learning_rate": 5.971166130105592e-05, "loss": 1.8058, "step": 9307 }, { "epoch": 0.908984375, "grad_norm": 0.18946197628974915, "learning_rate": 5.969100498727461e-05, "loss": 1.6477, "step": 9308 }, { "epoch": 0.90908203125, "grad_norm": 0.15877147018909454, "learning_rate": 5.967037018095442e-05, "loss": 1.7656, "step": 9309 }, { "epoch": 0.9091796875, "grad_norm": 0.157100111246109, "learning_rate": 5.96497568841566e-05, "loss": 1.7412, "step": 9310 }, { "epoch": 0.90927734375, "grad_norm": 0.18372835218906403, "learning_rate": 5.962916509894023e-05, "loss": 1.7523, "step": 9311 }, { "epoch": 0.909375, "grad_norm": 0.13422641158103943, "learning_rate": 5.9608594827362295e-05, "loss": 1.6902, "step": 9312 }, { "epoch": 0.90947265625, "grad_norm": 0.16665253043174744, "learning_rate": 5.95880460714775e-05, "loss": 1.6881, "step": 9313 }, { "epoch": 0.9095703125, "grad_norm": 0.15215176343917847, "learning_rate": 5.9567518833338496e-05, "loss": 1.7138, "step": 9314 }, { "epoch": 0.90966796875, "grad_norm": 0.14013466238975525, "learning_rate": 5.9547013114995835e-05, "loss": 1.7598, "step": 9315 }, { "epoch": 0.909765625, "grad_norm": 0.14868174493312836, "learning_rate": 5.952652891849784e-05, "loss": 1.7966, "step": 9316 }, { "epoch": 0.90986328125, "grad_norm": 0.15703502297401428, "learning_rate": 5.950606624589065e-05, "loss": 1.7621, "step": 9317 }, { "epoch": 0.9099609375, "grad_norm": 0.1504364311695099, "learning_rate": 5.9485625099218356e-05, "loss": 1.7695, "step": 9318 }, { "epoch": 0.91005859375, "grad_norm": 0.1360432505607605, "learning_rate": 5.946520548052283e-05, "loss": 1.7477, "step": 9319 }, { "epoch": 0.91015625, "grad_norm": 0.16781561076641083, "learning_rate": 5.944480739184381e-05, "loss": 1.7246, "step": 9320 }, { "epoch": 0.91025390625, "grad_norm": 0.14611288905143738, "learning_rate": 5.942443083521884e-05, "loss": 1.7314, "step": 9321 }, { "epoch": 0.9103515625, "grad_norm": 0.14283151924610138, "learning_rate": 5.9404075812683465e-05, "loss": 1.735, "step": 9322 }, { "epoch": 0.91044921875, "grad_norm": 0.1421424001455307, "learning_rate": 5.9383742326270905e-05, "loss": 1.7368, "step": 9323 }, { "epoch": 0.910546875, "grad_norm": 0.1430794894695282, "learning_rate": 5.936343037801227e-05, "loss": 1.7487, "step": 9324 }, { "epoch": 0.91064453125, "grad_norm": 0.13414336740970612, "learning_rate": 5.934313996993658e-05, "loss": 1.7731, "step": 9325 }, { "epoch": 0.9107421875, "grad_norm": 0.13213711977005005, "learning_rate": 5.932287110407068e-05, "loss": 1.7263, "step": 9326 }, { "epoch": 0.91083984375, "grad_norm": 0.1516907811164856, "learning_rate": 5.93026237824392e-05, "loss": 1.8018, "step": 9327 }, { "epoch": 0.9109375, "grad_norm": 0.1508868932723999, "learning_rate": 5.928239800706467e-05, "loss": 1.732, "step": 9328 }, { "epoch": 0.91103515625, "grad_norm": 0.14662234485149384, "learning_rate": 5.9262193779967525e-05, "loss": 1.7322, "step": 9329 }, { "epoch": 0.9111328125, "grad_norm": 0.1758737415075302, "learning_rate": 5.924201110316599e-05, "loss": 1.7412, "step": 9330 }, { "epoch": 0.91123046875, "grad_norm": 0.13626569509506226, "learning_rate": 5.9221849978676053e-05, "loss": 1.7578, "step": 9331 }, { "epoch": 0.911328125, "grad_norm": 0.16815687716007233, "learning_rate": 5.92017104085117e-05, "loss": 1.7707, "step": 9332 }, { "epoch": 0.91142578125, "grad_norm": 0.14118772745132446, "learning_rate": 5.9181592394684706e-05, "loss": 1.7753, "step": 9333 }, { "epoch": 0.9115234375, "grad_norm": 0.14078889787197113, "learning_rate": 5.916149593920462e-05, "loss": 1.7752, "step": 9334 }, { "epoch": 0.91162109375, "grad_norm": 0.16563326120376587, "learning_rate": 5.914142104407892e-05, "loss": 1.7564, "step": 9335 }, { "epoch": 0.91171875, "grad_norm": 0.13450175523757935, "learning_rate": 5.912136771131293e-05, "loss": 1.7303, "step": 9336 }, { "epoch": 0.91181640625, "grad_norm": 0.13636405766010284, "learning_rate": 5.910133594290981e-05, "loss": 1.7561, "step": 9337 }, { "epoch": 0.9119140625, "grad_norm": 0.1531520038843155, "learning_rate": 5.908132574087054e-05, "loss": 1.7729, "step": 9338 }, { "epoch": 0.91201171875, "grad_norm": 0.13433964550495148, "learning_rate": 5.906133710719395e-05, "loss": 1.7457, "step": 9339 }, { "epoch": 0.912109375, "grad_norm": 0.17327332496643066, "learning_rate": 5.904137004387677e-05, "loss": 1.7449, "step": 9340 }, { "epoch": 0.91220703125, "grad_norm": 0.15173473954200745, "learning_rate": 5.902142455291348e-05, "loss": 1.7265, "step": 9341 }, { "epoch": 0.9123046875, "grad_norm": 0.1575126051902771, "learning_rate": 5.9001500636296436e-05, "loss": 1.7796, "step": 9342 }, { "epoch": 0.91240234375, "grad_norm": 0.14806540310382843, "learning_rate": 5.8981598296015944e-05, "loss": 1.719, "step": 9343 }, { "epoch": 0.9125, "grad_norm": 0.15365523099899292, "learning_rate": 5.896171753406005e-05, "loss": 1.7969, "step": 9344 }, { "epoch": 0.91259765625, "grad_norm": 0.1324182152748108, "learning_rate": 5.894185835241462e-05, "loss": 1.7681, "step": 9345 }, { "epoch": 0.9126953125, "grad_norm": 0.15972810983657837, "learning_rate": 5.892202075306344e-05, "loss": 1.7637, "step": 9346 }, { "epoch": 0.91279296875, "grad_norm": 0.147715762257576, "learning_rate": 5.890220473798813e-05, "loss": 1.737, "step": 9347 }, { "epoch": 0.912890625, "grad_norm": 0.1416546255350113, "learning_rate": 5.888241030916811e-05, "loss": 1.7322, "step": 9348 }, { "epoch": 0.91298828125, "grad_norm": 0.15389801561832428, "learning_rate": 5.88626374685806e-05, "loss": 1.7591, "step": 9349 }, { "epoch": 0.9130859375, "grad_norm": 0.13949310779571533, "learning_rate": 5.884288621820087e-05, "loss": 1.7616, "step": 9350 }, { "epoch": 0.91318359375, "grad_norm": 0.13609060645103455, "learning_rate": 5.882315656000184e-05, "loss": 1.7329, "step": 9351 }, { "epoch": 0.91328125, "grad_norm": 0.15569041669368744, "learning_rate": 5.8803448495954296e-05, "loss": 1.7799, "step": 9352 }, { "epoch": 0.91337890625, "grad_norm": 0.13317731022834778, "learning_rate": 5.878376202802689e-05, "loss": 1.7511, "step": 9353 }, { "epoch": 0.9134765625, "grad_norm": 0.15188027918338776, "learning_rate": 5.876409715818619e-05, "loss": 1.7512, "step": 9354 }, { "epoch": 0.91357421875, "grad_norm": 0.14733240008354187, "learning_rate": 5.874445388839648e-05, "loss": 1.7279, "step": 9355 }, { "epoch": 0.913671875, "grad_norm": 0.1406514048576355, "learning_rate": 5.8724832220619995e-05, "loss": 1.7828, "step": 9356 }, { "epoch": 0.91376953125, "grad_norm": 0.18344494700431824, "learning_rate": 5.870523215681672e-05, "loss": 1.7749, "step": 9357 }, { "epoch": 0.9138671875, "grad_norm": 0.14417676627635956, "learning_rate": 5.8685653698944596e-05, "loss": 1.7977, "step": 9358 }, { "epoch": 0.91396484375, "grad_norm": 0.15694917738437653, "learning_rate": 5.8666096848959276e-05, "loss": 1.7238, "step": 9359 }, { "epoch": 0.9140625, "grad_norm": 0.13868515193462372, "learning_rate": 5.864656160881432e-05, "loss": 1.7361, "step": 9360 }, { "epoch": 0.91416015625, "grad_norm": 0.14001043140888214, "learning_rate": 5.862704798046114e-05, "loss": 1.7533, "step": 9361 }, { "epoch": 0.9142578125, "grad_norm": 0.14931344985961914, "learning_rate": 5.860755596584899e-05, "loss": 1.7435, "step": 9362 }, { "epoch": 0.91435546875, "grad_norm": 0.15377135574817657, "learning_rate": 5.858808556692493e-05, "loss": 1.7562, "step": 9363 }, { "epoch": 0.914453125, "grad_norm": 0.1485341638326645, "learning_rate": 5.856863678563388e-05, "loss": 1.7997, "step": 9364 }, { "epoch": 0.91455078125, "grad_norm": 0.14647462964057922, "learning_rate": 5.854920962391865e-05, "loss": 1.7179, "step": 9365 }, { "epoch": 0.9146484375, "grad_norm": 0.17826461791992188, "learning_rate": 5.852980408371976e-05, "loss": 1.7955, "step": 9366 }, { "epoch": 0.91474609375, "grad_norm": 0.12972669303417206, "learning_rate": 5.851042016697566e-05, "loss": 1.7671, "step": 9367 }, { "epoch": 0.91484375, "grad_norm": 0.16971151530742645, "learning_rate": 5.849105787562269e-05, "loss": 1.7517, "step": 9368 }, { "epoch": 0.91494140625, "grad_norm": 0.14884422719478607, "learning_rate": 5.8471717211594927e-05, "loss": 1.8095, "step": 9369 }, { "epoch": 0.9150390625, "grad_norm": 0.14797349274158478, "learning_rate": 5.845239817682438e-05, "loss": 1.7613, "step": 9370 }, { "epoch": 0.91513671875, "grad_norm": 0.1686272919178009, "learning_rate": 5.843310077324077e-05, "loss": 1.7928, "step": 9371 }, { "epoch": 0.915234375, "grad_norm": 0.153645321726799, "learning_rate": 5.841382500277183e-05, "loss": 1.7499, "step": 9372 }, { "epoch": 0.91533203125, "grad_norm": 0.14327919483184814, "learning_rate": 5.8394570867342984e-05, "loss": 1.786, "step": 9373 }, { "epoch": 0.9154296875, "grad_norm": 0.16660326719284058, "learning_rate": 5.837533836887753e-05, "loss": 1.7173, "step": 9374 }, { "epoch": 0.91552734375, "grad_norm": 0.152215376496315, "learning_rate": 5.835612750929666e-05, "loss": 1.7412, "step": 9375 }, { "epoch": 0.915625, "grad_norm": 0.16111895442008972, "learning_rate": 5.833693829051936e-05, "loss": 1.7801, "step": 9376 }, { "epoch": 0.91572265625, "grad_norm": 0.1694248616695404, "learning_rate": 5.831777071446246e-05, "loss": 1.7582, "step": 9377 }, { "epoch": 0.9158203125, "grad_norm": 0.16651666164398193, "learning_rate": 5.829862478304064e-05, "loss": 1.7431, "step": 9378 }, { "epoch": 0.91591796875, "grad_norm": 0.15165120363235474, "learning_rate": 5.8279500498166446e-05, "loss": 1.7553, "step": 9379 }, { "epoch": 0.916015625, "grad_norm": 0.16584962606430054, "learning_rate": 5.826039786175013e-05, "loss": 1.7638, "step": 9380 }, { "epoch": 0.91611328125, "grad_norm": 0.15173600614070892, "learning_rate": 5.824131687569994e-05, "loss": 1.7656, "step": 9381 }, { "epoch": 0.9162109375, "grad_norm": 0.1453447937965393, "learning_rate": 5.822225754192188e-05, "loss": 1.7828, "step": 9382 }, { "epoch": 0.91630859375, "grad_norm": 0.16904257237911224, "learning_rate": 5.8203219862319816e-05, "loss": 1.7805, "step": 9383 }, { "epoch": 0.91640625, "grad_norm": 0.14732715487480164, "learning_rate": 5.818420383879543e-05, "loss": 1.7398, "step": 9384 }, { "epoch": 0.91650390625, "grad_norm": 0.15373316407203674, "learning_rate": 5.816520947324826e-05, "loss": 1.7766, "step": 9385 }, { "epoch": 0.9166015625, "grad_norm": 0.15531432628631592, "learning_rate": 5.814623676757573e-05, "loss": 1.7635, "step": 9386 }, { "epoch": 0.91669921875, "grad_norm": 0.1328270137310028, "learning_rate": 5.812728572367296e-05, "loss": 1.7728, "step": 9387 }, { "epoch": 0.916796875, "grad_norm": 0.14958934485912323, "learning_rate": 5.8108356343433026e-05, "loss": 1.748, "step": 9388 }, { "epoch": 0.91689453125, "grad_norm": 0.15696552395820618, "learning_rate": 5.8089448628746795e-05, "loss": 1.7404, "step": 9389 }, { "epoch": 0.9169921875, "grad_norm": 0.14154352247714996, "learning_rate": 5.8070562581503e-05, "loss": 1.6923, "step": 9390 }, { "epoch": 0.91708984375, "grad_norm": 0.14306579530239105, "learning_rate": 5.805169820358819e-05, "loss": 1.8048, "step": 9391 }, { "epoch": 0.9171875, "grad_norm": 0.13463035225868225, "learning_rate": 5.803285549688675e-05, "loss": 1.7431, "step": 9392 }, { "epoch": 0.91728515625, "grad_norm": 0.14674513041973114, "learning_rate": 5.80140344632809e-05, "loss": 1.7196, "step": 9393 }, { "epoch": 0.9173828125, "grad_norm": 0.1416177898645401, "learning_rate": 5.799523510465064e-05, "loss": 1.7322, "step": 9394 }, { "epoch": 0.91748046875, "grad_norm": 0.13252533972263336, "learning_rate": 5.797645742287393e-05, "loss": 1.7526, "step": 9395 }, { "epoch": 0.917578125, "grad_norm": 0.1400875598192215, "learning_rate": 5.7957701419826465e-05, "loss": 1.7541, "step": 9396 }, { "epoch": 0.91767578125, "grad_norm": 0.14313450455665588, "learning_rate": 5.79389670973818e-05, "loss": 1.7708, "step": 9397 }, { "epoch": 0.9177734375, "grad_norm": 0.12603671848773956, "learning_rate": 5.7920254457411353e-05, "loss": 1.7712, "step": 9398 }, { "epoch": 0.91787109375, "grad_norm": 0.13917255401611328, "learning_rate": 5.790156350178431e-05, "loss": 1.7676, "step": 9399 }, { "epoch": 0.91796875, "grad_norm": 0.13565994799137115, "learning_rate": 5.7882894232367786e-05, "loss": 1.7272, "step": 9400 }, { "epoch": 0.91806640625, "grad_norm": 0.13158077001571655, "learning_rate": 5.7864246651026616e-05, "loss": 1.7265, "step": 9401 }, { "epoch": 0.9181640625, "grad_norm": 0.14835377037525177, "learning_rate": 5.784562075962357e-05, "loss": 1.7556, "step": 9402 }, { "epoch": 0.91826171875, "grad_norm": 0.14352284371852875, "learning_rate": 5.782701656001918e-05, "loss": 1.7887, "step": 9403 }, { "epoch": 0.918359375, "grad_norm": 0.13186237215995789, "learning_rate": 5.780843405407186e-05, "loss": 1.7512, "step": 9404 }, { "epoch": 0.91845703125, "grad_norm": 0.1529168039560318, "learning_rate": 5.7789873243637826e-05, "loss": 1.7703, "step": 9405 }, { "epoch": 0.9185546875, "grad_norm": 0.13531440496444702, "learning_rate": 5.7771334130571166e-05, "loss": 1.7903, "step": 9406 }, { "epoch": 0.91865234375, "grad_norm": 0.1580737680196762, "learning_rate": 5.7752816716723756e-05, "loss": 1.7863, "step": 9407 }, { "epoch": 0.91875, "grad_norm": 0.1917700469493866, "learning_rate": 5.773432100394531e-05, "loss": 1.7333, "step": 9408 }, { "epoch": 0.91884765625, "grad_norm": 0.16823706030845642, "learning_rate": 5.771584699408341e-05, "loss": 1.7585, "step": 9409 }, { "epoch": 0.9189453125, "grad_norm": 0.19027896225452423, "learning_rate": 5.7697394688983394e-05, "loss": 1.764, "step": 9410 }, { "epoch": 0.91904296875, "grad_norm": 0.18697969615459442, "learning_rate": 5.767896409048853e-05, "loss": 1.7621, "step": 9411 }, { "epoch": 0.919140625, "grad_norm": 0.14698876440525055, "learning_rate": 5.7660555200439876e-05, "loss": 1.7277, "step": 9412 }, { "epoch": 0.91923828125, "grad_norm": 0.1716105043888092, "learning_rate": 5.7642168020676306e-05, "loss": 1.7608, "step": 9413 }, { "epoch": 0.9193359375, "grad_norm": 0.1644965261220932, "learning_rate": 5.762380255303457e-05, "loss": 1.7516, "step": 9414 }, { "epoch": 0.91943359375, "grad_norm": 0.15965566039085388, "learning_rate": 5.760545879934913e-05, "loss": 1.6899, "step": 9415 }, { "epoch": 0.91953125, "grad_norm": 0.15675337612628937, "learning_rate": 5.758713676145241e-05, "loss": 1.7409, "step": 9416 }, { "epoch": 0.91962890625, "grad_norm": 0.14612145721912384, "learning_rate": 5.7568836441174646e-05, "loss": 1.7358, "step": 9417 }, { "epoch": 0.9197265625, "grad_norm": 0.16682296991348267, "learning_rate": 5.7550557840343826e-05, "loss": 1.7675, "step": 9418 }, { "epoch": 0.91982421875, "grad_norm": 0.14535261690616608, "learning_rate": 5.753230096078588e-05, "loss": 1.7875, "step": 9419 }, { "epoch": 0.919921875, "grad_norm": 0.15981604158878326, "learning_rate": 5.751406580432446e-05, "loss": 1.7806, "step": 9420 }, { "epoch": 0.92001953125, "grad_norm": 0.15392103791236877, "learning_rate": 5.7495852372781126e-05, "loss": 1.6871, "step": 9421 }, { "epoch": 0.9201171875, "grad_norm": 0.14338837563991547, "learning_rate": 5.7477660667975235e-05, "loss": 1.7396, "step": 9422 }, { "epoch": 0.92021484375, "grad_norm": 0.16205459833145142, "learning_rate": 5.745949069172394e-05, "loss": 1.7611, "step": 9423 }, { "epoch": 0.9203125, "grad_norm": 0.15047021210193634, "learning_rate": 5.74413424458423e-05, "loss": 1.7229, "step": 9424 }, { "epoch": 0.92041015625, "grad_norm": 0.15595224499702454, "learning_rate": 5.742321593214316e-05, "loss": 1.7576, "step": 9425 }, { "epoch": 0.9205078125, "grad_norm": 0.17403143644332886, "learning_rate": 5.7405111152437186e-05, "loss": 1.7645, "step": 9426 }, { "epoch": 0.92060546875, "grad_norm": 0.13309882581233978, "learning_rate": 5.738702810853289e-05, "loss": 1.7308, "step": 9427 }, { "epoch": 0.920703125, "grad_norm": 0.1503812074661255, "learning_rate": 5.7368966802236633e-05, "loss": 1.7938, "step": 9428 }, { "epoch": 0.92080078125, "grad_norm": 0.146943598985672, "learning_rate": 5.7350927235352543e-05, "loss": 1.7884, "step": 9429 }, { "epoch": 0.9208984375, "grad_norm": 0.16252662241458893, "learning_rate": 5.733290940968261e-05, "loss": 1.7377, "step": 9430 }, { "epoch": 0.92099609375, "grad_norm": 0.1559133231639862, "learning_rate": 5.7314913327026666e-05, "loss": 1.7314, "step": 9431 }, { "epoch": 0.92109375, "grad_norm": 0.13345488905906677, "learning_rate": 5.7296938989182376e-05, "loss": 1.7267, "step": 9432 }, { "epoch": 0.92119140625, "grad_norm": 0.1785549521446228, "learning_rate": 5.727898639794519e-05, "loss": 1.7626, "step": 9433 }, { "epoch": 0.9212890625, "grad_norm": 0.14996975660324097, "learning_rate": 5.726105555510845e-05, "loss": 1.7237, "step": 9434 }, { "epoch": 0.92138671875, "grad_norm": 0.1440293788909912, "learning_rate": 5.72431464624633e-05, "loss": 1.7437, "step": 9435 }, { "epoch": 0.921484375, "grad_norm": 0.16825884580612183, "learning_rate": 5.722525912179861e-05, "loss": 1.6891, "step": 9436 }, { "epoch": 0.92158203125, "grad_norm": 0.155620738863945, "learning_rate": 5.720739353490124e-05, "loss": 1.7386, "step": 9437 }, { "epoch": 0.9216796875, "grad_norm": 0.1486097276210785, "learning_rate": 5.718954970355579e-05, "loss": 1.7556, "step": 9438 }, { "epoch": 0.92177734375, "grad_norm": 0.17195436358451843, "learning_rate": 5.717172762954469e-05, "loss": 1.778, "step": 9439 }, { "epoch": 0.921875, "grad_norm": 0.13883517682552338, "learning_rate": 5.715392731464822e-05, "loss": 1.7422, "step": 9440 }, { "epoch": 0.92197265625, "grad_norm": 0.16298969089984894, "learning_rate": 5.713614876064447e-05, "loss": 1.7265, "step": 9441 }, { "epoch": 0.9220703125, "grad_norm": 0.17330047488212585, "learning_rate": 5.7118391969309384e-05, "loss": 1.7497, "step": 9442 }, { "epoch": 0.92216796875, "grad_norm": 0.15852926671504974, "learning_rate": 5.710065694241664e-05, "loss": 1.7466, "step": 9443 }, { "epoch": 0.922265625, "grad_norm": 0.1654817909002304, "learning_rate": 5.708294368173787e-05, "loss": 1.7189, "step": 9444 }, { "epoch": 0.92236328125, "grad_norm": 0.15202441811561584, "learning_rate": 5.7065252189042446e-05, "loss": 1.813, "step": 9445 }, { "epoch": 0.9224609375, "grad_norm": 0.14371302723884583, "learning_rate": 5.704758246609761e-05, "loss": 1.7215, "step": 9446 }, { "epoch": 0.92255859375, "grad_norm": 0.1512024849653244, "learning_rate": 5.702993451466839e-05, "loss": 1.7933, "step": 9447 }, { "epoch": 0.92265625, "grad_norm": 0.13477680087089539, "learning_rate": 5.70123083365177e-05, "loss": 1.7307, "step": 9448 }, { "epoch": 0.92275390625, "grad_norm": 0.13969910144805908, "learning_rate": 5.6994703933406205e-05, "loss": 1.7167, "step": 9449 }, { "epoch": 0.9228515625, "grad_norm": 0.13844826817512512, "learning_rate": 5.697712130709242e-05, "loss": 1.7419, "step": 9450 }, { "epoch": 0.92294921875, "grad_norm": 0.13640819489955902, "learning_rate": 5.69595604593327e-05, "loss": 1.6961, "step": 9451 }, { "epoch": 0.923046875, "grad_norm": 0.13366495072841644, "learning_rate": 5.694202139188123e-05, "loss": 1.7748, "step": 9452 }, { "epoch": 0.92314453125, "grad_norm": 0.13441388309001923, "learning_rate": 5.6924504106489996e-05, "loss": 1.752, "step": 9453 }, { "epoch": 0.9232421875, "grad_norm": 0.13825014233589172, "learning_rate": 5.690700860490885e-05, "loss": 1.7573, "step": 9454 }, { "epoch": 0.92333984375, "grad_norm": 0.14114603400230408, "learning_rate": 5.6889534888885405e-05, "loss": 1.7571, "step": 9455 }, { "epoch": 0.9234375, "grad_norm": 0.14767256379127502, "learning_rate": 5.6872082960165146e-05, "loss": 1.7435, "step": 9456 }, { "epoch": 0.92353515625, "grad_norm": 0.16233301162719727, "learning_rate": 5.685465282049136e-05, "loss": 1.7729, "step": 9457 }, { "epoch": 0.9236328125, "grad_norm": 0.16140328347682953, "learning_rate": 5.683724447160514e-05, "loss": 1.7048, "step": 9458 }, { "epoch": 0.92373046875, "grad_norm": 0.13763490319252014, "learning_rate": 5.6819857915245456e-05, "loss": 1.764, "step": 9459 }, { "epoch": 0.923828125, "grad_norm": 0.16076315939426422, "learning_rate": 5.68024931531491e-05, "loss": 1.7869, "step": 9460 }, { "epoch": 0.92392578125, "grad_norm": 0.15337146818637848, "learning_rate": 5.678515018705059e-05, "loss": 1.7939, "step": 9461 }, { "epoch": 0.9240234375, "grad_norm": 0.1495906263589859, "learning_rate": 5.676782901868235e-05, "loss": 1.728, "step": 9462 }, { "epoch": 0.92412109375, "grad_norm": 0.15392597019672394, "learning_rate": 5.675052964977467e-05, "loss": 1.7518, "step": 9463 }, { "epoch": 0.92421875, "grad_norm": 0.1441519558429718, "learning_rate": 5.6733252082055554e-05, "loss": 1.8042, "step": 9464 }, { "epoch": 0.92431640625, "grad_norm": 0.13910412788391113, "learning_rate": 5.671599631725086e-05, "loss": 1.7665, "step": 9465 }, { "epoch": 0.9244140625, "grad_norm": 0.14773640036582947, "learning_rate": 5.6698762357084326e-05, "loss": 1.7542, "step": 9466 }, { "epoch": 0.92451171875, "grad_norm": 0.1474866271018982, "learning_rate": 5.668155020327745e-05, "loss": 1.8299, "step": 9467 }, { "epoch": 0.924609375, "grad_norm": 0.13713574409484863, "learning_rate": 5.666435985754958e-05, "loss": 1.768, "step": 9468 }, { "epoch": 0.92470703125, "grad_norm": 0.15397492051124573, "learning_rate": 5.6647191321617886e-05, "loss": 1.8021, "step": 9469 }, { "epoch": 0.9248046875, "grad_norm": 0.1514683961868286, "learning_rate": 5.663004459719738e-05, "loss": 1.763, "step": 9470 }, { "epoch": 0.92490234375, "grad_norm": 0.1635110080242157, "learning_rate": 5.661291968600081e-05, "loss": 1.7732, "step": 9471 }, { "epoch": 0.925, "grad_norm": 0.1550174355506897, "learning_rate": 5.6595816589738824e-05, "loss": 1.7115, "step": 9472 }, { "epoch": 0.92509765625, "grad_norm": 0.13811564445495605, "learning_rate": 5.657873531011985e-05, "loss": 1.7399, "step": 9473 }, { "epoch": 0.9251953125, "grad_norm": 0.1545625776052475, "learning_rate": 5.656167584885021e-05, "loss": 1.7433, "step": 9474 }, { "epoch": 0.92529296875, "grad_norm": 0.14624635875225067, "learning_rate": 5.654463820763395e-05, "loss": 1.7627, "step": 9475 }, { "epoch": 0.925390625, "grad_norm": 0.1487712264060974, "learning_rate": 5.652762238817299e-05, "loss": 1.7652, "step": 9476 }, { "epoch": 0.92548828125, "grad_norm": 0.1666698455810547, "learning_rate": 5.651062839216711e-05, "loss": 1.7446, "step": 9477 }, { "epoch": 0.9255859375, "grad_norm": 0.1706482321023941, "learning_rate": 5.649365622131376e-05, "loss": 1.7988, "step": 9478 }, { "epoch": 0.92568359375, "grad_norm": 0.14570626616477966, "learning_rate": 5.6476705877308366e-05, "loss": 1.7507, "step": 9479 }, { "epoch": 0.92578125, "grad_norm": 0.14290036261081696, "learning_rate": 5.645977736184412e-05, "loss": 1.7535, "step": 9480 }, { "epoch": 0.92587890625, "grad_norm": 0.14502593874931335, "learning_rate": 5.644287067661205e-05, "loss": 1.7533, "step": 9481 }, { "epoch": 0.9259765625, "grad_norm": 0.15559139847755432, "learning_rate": 5.642598582330092e-05, "loss": 1.7666, "step": 9482 }, { "epoch": 0.92607421875, "grad_norm": 0.13931894302368164, "learning_rate": 5.640912280359742e-05, "loss": 1.778, "step": 9483 }, { "epoch": 0.926171875, "grad_norm": 0.15563459694385529, "learning_rate": 5.639228161918606e-05, "loss": 1.7675, "step": 9484 }, { "epoch": 0.92626953125, "grad_norm": 0.16149772703647614, "learning_rate": 5.6375462271749025e-05, "loss": 1.7776, "step": 9485 }, { "epoch": 0.9263671875, "grad_norm": 0.14971859753131866, "learning_rate": 5.635866476296647e-05, "loss": 1.7789, "step": 9486 }, { "epoch": 0.92646484375, "grad_norm": 0.1447547823190689, "learning_rate": 5.634188909451634e-05, "loss": 1.7745, "step": 9487 }, { "epoch": 0.9265625, "grad_norm": 0.1592385172843933, "learning_rate": 5.632513526807434e-05, "loss": 1.7854, "step": 9488 }, { "epoch": 0.92666015625, "grad_norm": 0.15546225011348724, "learning_rate": 5.630840328531403e-05, "loss": 1.7592, "step": 9489 }, { "epoch": 0.9267578125, "grad_norm": 0.14432300627231598, "learning_rate": 5.629169314790682e-05, "loss": 1.7443, "step": 9490 }, { "epoch": 0.92685546875, "grad_norm": 0.14211204648017883, "learning_rate": 5.6275004857521875e-05, "loss": 1.7289, "step": 9491 }, { "epoch": 0.926953125, "grad_norm": 0.16861510276794434, "learning_rate": 5.625833841582622e-05, "loss": 1.7675, "step": 9492 }, { "epoch": 0.92705078125, "grad_norm": 0.14296334981918335, "learning_rate": 5.624169382448469e-05, "loss": 1.7317, "step": 9493 }, { "epoch": 0.9271484375, "grad_norm": 0.14908485114574432, "learning_rate": 5.6225071085159905e-05, "loss": 1.7516, "step": 9494 }, { "epoch": 0.92724609375, "grad_norm": 0.15136855840682983, "learning_rate": 5.6208470199512336e-05, "loss": 1.7385, "step": 9495 }, { "epoch": 0.92734375, "grad_norm": 0.15880393981933594, "learning_rate": 5.61918911692003e-05, "loss": 1.7847, "step": 9496 }, { "epoch": 0.92744140625, "grad_norm": 0.14415612816810608, "learning_rate": 5.6175333995879866e-05, "loss": 1.7414, "step": 9497 }, { "epoch": 0.9275390625, "grad_norm": 0.1558876782655716, "learning_rate": 5.615879868120498e-05, "loss": 1.7771, "step": 9498 }, { "epoch": 0.92763671875, "grad_norm": 0.14913725852966309, "learning_rate": 5.614228522682731e-05, "loss": 1.7011, "step": 9499 }, { "epoch": 0.927734375, "grad_norm": 0.1698063760995865, "learning_rate": 5.612579363439644e-05, "loss": 1.7637, "step": 9500 }, { "epoch": 0.92783203125, "grad_norm": 0.14392510056495667, "learning_rate": 5.610932390555973e-05, "loss": 1.7323, "step": 9501 }, { "epoch": 0.9279296875, "grad_norm": 0.163613960146904, "learning_rate": 5.609287604196239e-05, "loss": 1.7299, "step": 9502 }, { "epoch": 0.92802734375, "grad_norm": 0.15262150764465332, "learning_rate": 5.607645004524738e-05, "loss": 1.7465, "step": 9503 }, { "epoch": 0.928125, "grad_norm": 0.1377040445804596, "learning_rate": 5.606004591705552e-05, "loss": 1.7953, "step": 9504 }, { "epoch": 0.92822265625, "grad_norm": 0.15916667878627777, "learning_rate": 5.604366365902546e-05, "loss": 1.7504, "step": 9505 }, { "epoch": 0.9283203125, "grad_norm": 0.1433727890253067, "learning_rate": 5.6027303272793594e-05, "loss": 1.7595, "step": 9506 }, { "epoch": 0.92841796875, "grad_norm": 0.14865882694721222, "learning_rate": 5.6010964759994225e-05, "loss": 1.7757, "step": 9507 }, { "epoch": 0.928515625, "grad_norm": 0.14403073489665985, "learning_rate": 5.599464812225939e-05, "loss": 1.7617, "step": 9508 }, { "epoch": 0.92861328125, "grad_norm": 0.14790786802768707, "learning_rate": 5.597835336121901e-05, "loss": 1.7781, "step": 9509 }, { "epoch": 0.9287109375, "grad_norm": 0.1547408550977707, "learning_rate": 5.596208047850076e-05, "loss": 1.776, "step": 9510 }, { "epoch": 0.92880859375, "grad_norm": 0.14770439267158508, "learning_rate": 5.594582947573018e-05, "loss": 1.7991, "step": 9511 }, { "epoch": 0.92890625, "grad_norm": 0.15731871128082275, "learning_rate": 5.5929600354530614e-05, "loss": 1.7319, "step": 9512 }, { "epoch": 0.92900390625, "grad_norm": 0.14880222082138062, "learning_rate": 5.591339311652316e-05, "loss": 1.7524, "step": 9513 }, { "epoch": 0.9291015625, "grad_norm": 0.14780579507350922, "learning_rate": 5.589720776332678e-05, "loss": 1.7121, "step": 9514 }, { "epoch": 0.92919921875, "grad_norm": 0.1407061070203781, "learning_rate": 5.5881044296558286e-05, "loss": 1.708, "step": 9515 }, { "epoch": 0.929296875, "grad_norm": 0.13748209178447723, "learning_rate": 5.586490271783226e-05, "loss": 1.8049, "step": 9516 }, { "epoch": 0.92939453125, "grad_norm": 0.14713209867477417, "learning_rate": 5.584878302876108e-05, "loss": 1.7501, "step": 9517 }, { "epoch": 0.9294921875, "grad_norm": 0.13495796918869019, "learning_rate": 5.583268523095494e-05, "loss": 1.7382, "step": 9518 }, { "epoch": 0.92958984375, "grad_norm": 0.14138571918010712, "learning_rate": 5.581660932602195e-05, "loss": 1.7685, "step": 9519 }, { "epoch": 0.9296875, "grad_norm": 0.14172746241092682, "learning_rate": 5.580055531556789e-05, "loss": 1.7516, "step": 9520 }, { "epoch": 0.92978515625, "grad_norm": 0.14048714935779572, "learning_rate": 5.57845232011964e-05, "loss": 1.7742, "step": 9521 }, { "epoch": 0.9298828125, "grad_norm": 0.14172911643981934, "learning_rate": 5.576851298450897e-05, "loss": 1.7816, "step": 9522 }, { "epoch": 0.92998046875, "grad_norm": 0.13133904337882996, "learning_rate": 5.575252466710488e-05, "loss": 1.7145, "step": 9523 }, { "epoch": 0.930078125, "grad_norm": 0.13715589046478271, "learning_rate": 5.573655825058122e-05, "loss": 1.7451, "step": 9524 }, { "epoch": 0.93017578125, "grad_norm": 0.13348396122455597, "learning_rate": 5.5720613736532877e-05, "loss": 1.7202, "step": 9525 }, { "epoch": 0.9302734375, "grad_norm": 0.1313544660806656, "learning_rate": 5.5704691126552635e-05, "loss": 1.7661, "step": 9526 }, { "epoch": 0.93037109375, "grad_norm": 0.15201818943023682, "learning_rate": 5.5688790422230915e-05, "loss": 1.755, "step": 9527 }, { "epoch": 0.93046875, "grad_norm": 0.12813700735569, "learning_rate": 5.5672911625156126e-05, "loss": 1.7439, "step": 9528 }, { "epoch": 0.93056640625, "grad_norm": 0.14766767621040344, "learning_rate": 5.5657054736914395e-05, "loss": 1.7376, "step": 9529 }, { "epoch": 0.9306640625, "grad_norm": 0.15958207845687866, "learning_rate": 5.564121975908969e-05, "loss": 1.7951, "step": 9530 }, { "epoch": 0.93076171875, "grad_norm": 0.14209003746509552, "learning_rate": 5.56254066932638e-05, "loss": 1.7745, "step": 9531 }, { "epoch": 0.930859375, "grad_norm": 0.14810065925121307, "learning_rate": 5.560961554101629e-05, "loss": 1.7366, "step": 9532 }, { "epoch": 0.93095703125, "grad_norm": 0.1396838128566742, "learning_rate": 5.559384630392459e-05, "loss": 1.7305, "step": 9533 }, { "epoch": 0.9310546875, "grad_norm": 0.13612063229084015, "learning_rate": 5.557809898356385e-05, "loss": 1.7819, "step": 9534 }, { "epoch": 0.93115234375, "grad_norm": 0.14099407196044922, "learning_rate": 5.556237358150713e-05, "loss": 1.7362, "step": 9535 }, { "epoch": 0.93125, "grad_norm": 0.14185141026973724, "learning_rate": 5.5546670099325244e-05, "loss": 1.7414, "step": 9536 }, { "epoch": 0.93134765625, "grad_norm": 0.15764866769313812, "learning_rate": 5.553098853858682e-05, "loss": 1.7806, "step": 9537 }, { "epoch": 0.9314453125, "grad_norm": 0.1490548700094223, "learning_rate": 5.551532890085834e-05, "loss": 1.7459, "step": 9538 }, { "epoch": 0.93154296875, "grad_norm": 0.15033231675624847, "learning_rate": 5.549969118770403e-05, "loss": 1.7523, "step": 9539 }, { "epoch": 0.931640625, "grad_norm": 0.2158004194498062, "learning_rate": 5.548407540068598e-05, "loss": 1.7721, "step": 9540 }, { "epoch": 0.93173828125, "grad_norm": 0.13820262253284454, "learning_rate": 5.5468481541364067e-05, "loss": 1.7411, "step": 9541 }, { "epoch": 0.9318359375, "grad_norm": 0.1663188636302948, "learning_rate": 5.5452909611295963e-05, "loss": 1.7611, "step": 9542 }, { "epoch": 0.93193359375, "grad_norm": 0.15785641968250275, "learning_rate": 5.5437359612037175e-05, "loss": 1.7491, "step": 9543 }, { "epoch": 0.93203125, "grad_norm": 0.15088915824890137, "learning_rate": 5.5421831545141016e-05, "loss": 1.7447, "step": 9544 }, { "epoch": 0.93212890625, "grad_norm": 0.16930924355983734, "learning_rate": 5.540632541215862e-05, "loss": 1.7611, "step": 9545 }, { "epoch": 0.9322265625, "grad_norm": 0.16358640789985657, "learning_rate": 5.5390841214638864e-05, "loss": 1.8034, "step": 9546 }, { "epoch": 0.93232421875, "grad_norm": 0.13486115634441376, "learning_rate": 5.5375378954128525e-05, "loss": 1.7276, "step": 9547 }, { "epoch": 0.932421875, "grad_norm": 0.17850340902805328, "learning_rate": 5.5359938632172126e-05, "loss": 1.7891, "step": 9548 }, { "epoch": 0.93251953125, "grad_norm": 0.1514258235692978, "learning_rate": 5.5344520250312e-05, "loss": 1.7549, "step": 9549 }, { "epoch": 0.9326171875, "grad_norm": 0.14744509756565094, "learning_rate": 5.5329123810088326e-05, "loss": 1.7738, "step": 9550 }, { "epoch": 0.93271484375, "grad_norm": 0.15231208503246307, "learning_rate": 5.5313749313039104e-05, "loss": 1.7106, "step": 9551 }, { "epoch": 0.9328125, "grad_norm": 0.1492513120174408, "learning_rate": 5.5298396760700085e-05, "loss": 1.701, "step": 9552 }, { "epoch": 0.93291015625, "grad_norm": 0.13931752741336823, "learning_rate": 5.528306615460484e-05, "loss": 1.7214, "step": 9553 }, { "epoch": 0.9330078125, "grad_norm": 0.16987617313861847, "learning_rate": 5.526775749628476e-05, "loss": 1.781, "step": 9554 }, { "epoch": 0.93310546875, "grad_norm": 0.14695048332214355, "learning_rate": 5.525247078726905e-05, "loss": 1.7262, "step": 9555 }, { "epoch": 0.933203125, "grad_norm": 0.14402338862419128, "learning_rate": 5.523720602908473e-05, "loss": 1.7489, "step": 9556 }, { "epoch": 0.93330078125, "grad_norm": 0.1395728886127472, "learning_rate": 5.5221963223256593e-05, "loss": 1.7632, "step": 9557 }, { "epoch": 0.9333984375, "grad_norm": 0.16045847535133362, "learning_rate": 5.520674237130729e-05, "loss": 1.738, "step": 9558 }, { "epoch": 0.93349609375, "grad_norm": 0.141441211104393, "learning_rate": 5.5191543474757236e-05, "loss": 1.76, "step": 9559 }, { "epoch": 0.93359375, "grad_norm": 0.14369556307792664, "learning_rate": 5.517636653512465e-05, "loss": 1.7444, "step": 9560 }, { "epoch": 0.93369140625, "grad_norm": 0.1454237401485443, "learning_rate": 5.51612115539256e-05, "loss": 1.724, "step": 9561 }, { "epoch": 0.9337890625, "grad_norm": 0.13717947900295258, "learning_rate": 5.514607853267388e-05, "loss": 1.7635, "step": 9562 }, { "epoch": 0.93388671875, "grad_norm": 0.14188235998153687, "learning_rate": 5.513096747288122e-05, "loss": 1.7291, "step": 9563 }, { "epoch": 0.933984375, "grad_norm": 0.1474727839231491, "learning_rate": 5.5115878376057006e-05, "loss": 1.7555, "step": 9564 }, { "epoch": 0.93408203125, "grad_norm": 0.1366715282201767, "learning_rate": 5.510081124370857e-05, "loss": 1.7707, "step": 9565 }, { "epoch": 0.9341796875, "grad_norm": 0.14980633556842804, "learning_rate": 5.508576607734098e-05, "loss": 1.7302, "step": 9566 }, { "epoch": 0.93427734375, "grad_norm": 0.13902385532855988, "learning_rate": 5.507074287845707e-05, "loss": 1.7161, "step": 9567 }, { "epoch": 0.934375, "grad_norm": 0.1371515542268753, "learning_rate": 5.5055741648557564e-05, "loss": 1.7866, "step": 9568 }, { "epoch": 0.93447265625, "grad_norm": 0.15206152200698853, "learning_rate": 5.5040762389140915e-05, "loss": 1.7392, "step": 9569 }, { "epoch": 0.9345703125, "grad_norm": 0.14321306347846985, "learning_rate": 5.5025805101703454e-05, "loss": 1.7103, "step": 9570 }, { "epoch": 0.93466796875, "grad_norm": 0.13413956761360168, "learning_rate": 5.501086978773923e-05, "loss": 1.7642, "step": 9571 }, { "epoch": 0.934765625, "grad_norm": 0.15593856573104858, "learning_rate": 5.499595644874026e-05, "loss": 1.7888, "step": 9572 }, { "epoch": 0.93486328125, "grad_norm": 0.13974013924598694, "learning_rate": 5.498106508619612e-05, "loss": 1.7139, "step": 9573 }, { "epoch": 0.9349609375, "grad_norm": 0.1345612108707428, "learning_rate": 5.496619570159442e-05, "loss": 1.691, "step": 9574 }, { "epoch": 0.93505859375, "grad_norm": 0.1356162428855896, "learning_rate": 5.495134829642042e-05, "loss": 1.7277, "step": 9575 }, { "epoch": 0.93515625, "grad_norm": 0.1436801701784134, "learning_rate": 5.493652287215731e-05, "loss": 1.7603, "step": 9576 }, { "epoch": 0.93525390625, "grad_norm": 0.14131833612918854, "learning_rate": 5.492171943028594e-05, "loss": 1.7498, "step": 9577 }, { "epoch": 0.9353515625, "grad_norm": 0.1388276219367981, "learning_rate": 5.490693797228506e-05, "loss": 1.7305, "step": 9578 }, { "epoch": 0.93544921875, "grad_norm": 0.15177448093891144, "learning_rate": 5.4892178499631284e-05, "loss": 1.7411, "step": 9579 }, { "epoch": 0.935546875, "grad_norm": 0.14594286680221558, "learning_rate": 5.487744101379886e-05, "loss": 1.7622, "step": 9580 }, { "epoch": 0.93564453125, "grad_norm": 0.1676730066537857, "learning_rate": 5.4862725516259975e-05, "loss": 1.7511, "step": 9581 }, { "epoch": 0.9357421875, "grad_norm": 0.151187926530838, "learning_rate": 5.484803200848458e-05, "loss": 1.7245, "step": 9582 }, { "epoch": 0.93583984375, "grad_norm": 0.15270192921161652, "learning_rate": 5.483336049194042e-05, "loss": 1.7413, "step": 9583 }, { "epoch": 0.9359375, "grad_norm": 0.14444199204444885, "learning_rate": 5.481871096809304e-05, "loss": 1.7883, "step": 9584 }, { "epoch": 0.93603515625, "grad_norm": 0.14779460430145264, "learning_rate": 5.480408343840579e-05, "loss": 1.7783, "step": 9585 }, { "epoch": 0.9361328125, "grad_norm": 0.1475062221288681, "learning_rate": 5.4789477904339854e-05, "loss": 1.7391, "step": 9586 }, { "epoch": 0.93623046875, "grad_norm": 0.14178816974163055, "learning_rate": 5.477489436735418e-05, "loss": 1.75, "step": 9587 }, { "epoch": 0.936328125, "grad_norm": 0.14421983063220978, "learning_rate": 5.4760332828905554e-05, "loss": 1.7247, "step": 9588 }, { "epoch": 0.93642578125, "grad_norm": 0.1500566005706787, "learning_rate": 5.474579329044852e-05, "loss": 1.7547, "step": 9589 }, { "epoch": 0.9365234375, "grad_norm": 0.1402856409549713, "learning_rate": 5.473127575343548e-05, "loss": 1.7832, "step": 9590 }, { "epoch": 0.93662109375, "grad_norm": 0.14690731465816498, "learning_rate": 5.471678021931655e-05, "loss": 1.7683, "step": 9591 }, { "epoch": 0.93671875, "grad_norm": 0.15186801552772522, "learning_rate": 5.470230668953977e-05, "loss": 1.8045, "step": 9592 }, { "epoch": 0.93681640625, "grad_norm": 0.16504254937171936, "learning_rate": 5.468785516555088e-05, "loss": 1.7675, "step": 9593 }, { "epoch": 0.9369140625, "grad_norm": 0.17105290293693542, "learning_rate": 5.467342564879348e-05, "loss": 1.782, "step": 9594 }, { "epoch": 0.93701171875, "grad_norm": 0.16295786201953888, "learning_rate": 5.465901814070893e-05, "loss": 1.7477, "step": 9595 }, { "epoch": 0.937109375, "grad_norm": 0.15577486157417297, "learning_rate": 5.464463264273642e-05, "loss": 1.7328, "step": 9596 }, { "epoch": 0.93720703125, "grad_norm": 0.16691681742668152, "learning_rate": 5.4630269156312944e-05, "loss": 1.7607, "step": 9597 }, { "epoch": 0.9373046875, "grad_norm": 0.15616802871227264, "learning_rate": 5.461592768287325e-05, "loss": 1.7243, "step": 9598 }, { "epoch": 0.93740234375, "grad_norm": 0.1552988588809967, "learning_rate": 5.460160822385e-05, "loss": 1.75, "step": 9599 }, { "epoch": 0.9375, "grad_norm": 0.1737525463104248, "learning_rate": 5.4587310780673526e-05, "loss": 1.8073, "step": 9600 }, { "epoch": 0.93759765625, "grad_norm": 0.13688310980796814, "learning_rate": 5.457303535477202e-05, "loss": 1.757, "step": 9601 }, { "epoch": 0.9376953125, "grad_norm": 0.14564920961856842, "learning_rate": 5.455878194757145e-05, "loss": 1.7513, "step": 9602 }, { "epoch": 0.93779296875, "grad_norm": 0.16988873481750488, "learning_rate": 5.454455056049564e-05, "loss": 1.8049, "step": 9603 }, { "epoch": 0.937890625, "grad_norm": 0.15057744085788727, "learning_rate": 5.453034119496621e-05, "loss": 1.7606, "step": 9604 }, { "epoch": 0.93798828125, "grad_norm": 0.14949153363704681, "learning_rate": 5.4516153852402455e-05, "loss": 1.7396, "step": 9605 }, { "epoch": 0.9380859375, "grad_norm": 0.14138539135456085, "learning_rate": 5.450198853422162e-05, "loss": 1.7338, "step": 9606 }, { "epoch": 0.93818359375, "grad_norm": 0.15464581549167633, "learning_rate": 5.448784524183874e-05, "loss": 1.7613, "step": 9607 }, { "epoch": 0.93828125, "grad_norm": 0.1454283446073532, "learning_rate": 5.447372397666652e-05, "loss": 1.753, "step": 9608 }, { "epoch": 0.93837890625, "grad_norm": 0.16514913737773895, "learning_rate": 5.4459624740115605e-05, "loss": 1.7552, "step": 9609 }, { "epoch": 0.9384765625, "grad_norm": 0.13445134460926056, "learning_rate": 5.444554753359436e-05, "loss": 1.7582, "step": 9610 }, { "epoch": 0.93857421875, "grad_norm": 0.1601143777370453, "learning_rate": 5.443149235850899e-05, "loss": 1.7952, "step": 9611 }, { "epoch": 0.938671875, "grad_norm": 0.14449697732925415, "learning_rate": 5.4417459216263456e-05, "loss": 1.7436, "step": 9612 }, { "epoch": 0.93876953125, "grad_norm": 0.14362715184688568, "learning_rate": 5.440344810825957e-05, "loss": 1.7773, "step": 9613 }, { "epoch": 0.9388671875, "grad_norm": 0.1551503986120224, "learning_rate": 5.4389459035896924e-05, "loss": 1.7496, "step": 9614 }, { "epoch": 0.93896484375, "grad_norm": 0.14954043924808502, "learning_rate": 5.437549200057287e-05, "loss": 1.708, "step": 9615 }, { "epoch": 0.9390625, "grad_norm": 0.15258921682834625, "learning_rate": 5.4361547003682594e-05, "loss": 1.7642, "step": 9616 }, { "epoch": 0.93916015625, "grad_norm": 0.1503753662109375, "learning_rate": 5.434762404661911e-05, "loss": 1.7685, "step": 9617 }, { "epoch": 0.9392578125, "grad_norm": 0.14526794850826263, "learning_rate": 5.43337231307732e-05, "loss": 1.7947, "step": 9618 }, { "epoch": 0.93935546875, "grad_norm": 0.1331133097410202, "learning_rate": 5.4319844257533376e-05, "loss": 1.7159, "step": 9619 }, { "epoch": 0.939453125, "grad_norm": 0.1675090789794922, "learning_rate": 5.430598742828609e-05, "loss": 1.7374, "step": 9620 }, { "epoch": 0.93955078125, "grad_norm": 0.14366990327835083, "learning_rate": 5.429215264441551e-05, "loss": 1.7573, "step": 9621 }, { "epoch": 0.9396484375, "grad_norm": 0.14041373133659363, "learning_rate": 5.4278339907303534e-05, "loss": 1.7703, "step": 9622 }, { "epoch": 0.93974609375, "grad_norm": 0.14375340938568115, "learning_rate": 5.426454921833e-05, "loss": 1.7684, "step": 9623 }, { "epoch": 0.93984375, "grad_norm": 0.17110306024551392, "learning_rate": 5.425078057887246e-05, "loss": 1.7494, "step": 9624 }, { "epoch": 0.93994140625, "grad_norm": 0.1486978828907013, "learning_rate": 5.42370339903063e-05, "loss": 1.7436, "step": 9625 }, { "epoch": 0.9400390625, "grad_norm": 0.14208245277404785, "learning_rate": 5.422330945400461e-05, "loss": 1.7644, "step": 9626 }, { "epoch": 0.94013671875, "grad_norm": 0.1485341191291809, "learning_rate": 5.420960697133844e-05, "loss": 1.7326, "step": 9627 }, { "epoch": 0.940234375, "grad_norm": 0.1504320353269577, "learning_rate": 5.419592654367651e-05, "loss": 1.7501, "step": 9628 }, { "epoch": 0.94033203125, "grad_norm": 0.15165887773036957, "learning_rate": 5.418226817238537e-05, "loss": 1.7787, "step": 9629 }, { "epoch": 0.9404296875, "grad_norm": 0.1346198320388794, "learning_rate": 5.4168631858829365e-05, "loss": 1.7543, "step": 9630 }, { "epoch": 0.94052734375, "grad_norm": 0.1407509744167328, "learning_rate": 5.415501760437063e-05, "loss": 1.7567, "step": 9631 }, { "epoch": 0.940625, "grad_norm": 0.15087643265724182, "learning_rate": 5.4141425410369165e-05, "loss": 1.7572, "step": 9632 }, { "epoch": 0.94072265625, "grad_norm": 0.14333799481391907, "learning_rate": 5.412785527818261e-05, "loss": 1.748, "step": 9633 }, { "epoch": 0.9408203125, "grad_norm": 0.1463695764541626, "learning_rate": 5.411430720916661e-05, "loss": 1.778, "step": 9634 }, { "epoch": 0.94091796875, "grad_norm": 0.16157643496990204, "learning_rate": 5.4100781204674456e-05, "loss": 1.7544, "step": 9635 }, { "epoch": 0.941015625, "grad_norm": 0.16833308339118958, "learning_rate": 5.408727726605723e-05, "loss": 1.7465, "step": 9636 }, { "epoch": 0.94111328125, "grad_norm": 0.1465718299150467, "learning_rate": 5.407379539466391e-05, "loss": 1.7457, "step": 9637 }, { "epoch": 0.9412109375, "grad_norm": 0.16391253471374512, "learning_rate": 5.4060335591841216e-05, "loss": 1.7602, "step": 9638 }, { "epoch": 0.94130859375, "grad_norm": 0.15320904552936554, "learning_rate": 5.404689785893363e-05, "loss": 1.7299, "step": 9639 }, { "epoch": 0.94140625, "grad_norm": 0.14962805807590485, "learning_rate": 5.4033482197283475e-05, "loss": 1.7376, "step": 9640 }, { "epoch": 0.94150390625, "grad_norm": 0.14151069521903992, "learning_rate": 5.402008860823086e-05, "loss": 1.7284, "step": 9641 }, { "epoch": 0.9416015625, "grad_norm": 0.1704002171754837, "learning_rate": 5.4006717093113735e-05, "loss": 1.7401, "step": 9642 }, { "epoch": 0.94169921875, "grad_norm": 0.14413423836231232, "learning_rate": 5.399336765326769e-05, "loss": 1.7443, "step": 9643 }, { "epoch": 0.941796875, "grad_norm": 0.15171261131763458, "learning_rate": 5.3980040290026315e-05, "loss": 1.7714, "step": 9644 }, { "epoch": 0.94189453125, "grad_norm": 0.1556403636932373, "learning_rate": 5.3966735004720815e-05, "loss": 1.7268, "step": 9645 }, { "epoch": 0.9419921875, "grad_norm": 0.14903421700000763, "learning_rate": 5.3953451798680367e-05, "loss": 1.7355, "step": 9646 }, { "epoch": 0.94208984375, "grad_norm": 0.16280169785022736, "learning_rate": 5.3940190673231715e-05, "loss": 1.756, "step": 9647 }, { "epoch": 0.9421875, "grad_norm": 0.15408851206302643, "learning_rate": 5.3926951629699634e-05, "loss": 1.7606, "step": 9648 }, { "epoch": 0.94228515625, "grad_norm": 0.13501505553722382, "learning_rate": 5.391373466940657e-05, "loss": 1.6955, "step": 9649 }, { "epoch": 0.9423828125, "grad_norm": 0.13672678172588348, "learning_rate": 5.3900539793672755e-05, "loss": 1.7639, "step": 9650 }, { "epoch": 0.94248046875, "grad_norm": 0.14011350274085999, "learning_rate": 5.3887367003816254e-05, "loss": 1.7569, "step": 9651 }, { "epoch": 0.942578125, "grad_norm": 0.14124925434589386, "learning_rate": 5.387421630115289e-05, "loss": 1.7139, "step": 9652 }, { "epoch": 0.94267578125, "grad_norm": 0.15335984528064728, "learning_rate": 5.3861087686996334e-05, "loss": 1.7416, "step": 9653 }, { "epoch": 0.9427734375, "grad_norm": 0.1359250545501709, "learning_rate": 5.3847981162658e-05, "loss": 1.744, "step": 9654 }, { "epoch": 0.94287109375, "grad_norm": 0.15394507348537445, "learning_rate": 5.383489672944711e-05, "loss": 1.7669, "step": 9655 }, { "epoch": 0.94296875, "grad_norm": 0.13463802635669708, "learning_rate": 5.382183438867071e-05, "loss": 1.7303, "step": 9656 }, { "epoch": 0.94306640625, "grad_norm": 0.1461130827665329, "learning_rate": 5.3808794141633574e-05, "loss": 1.7733, "step": 9657 }, { "epoch": 0.9431640625, "grad_norm": 0.15436963737010956, "learning_rate": 5.379577598963833e-05, "loss": 1.7876, "step": 9658 }, { "epoch": 0.94326171875, "grad_norm": 0.15938116610050201, "learning_rate": 5.378277993398537e-05, "loss": 1.7371, "step": 9659 }, { "epoch": 0.943359375, "grad_norm": 0.16502906382083893, "learning_rate": 5.3769805975972905e-05, "loss": 1.7428, "step": 9660 }, { "epoch": 0.94345703125, "grad_norm": 0.15102067589759827, "learning_rate": 5.375685411689686e-05, "loss": 1.7257, "step": 9661 }, { "epoch": 0.9435546875, "grad_norm": 0.15799298882484436, "learning_rate": 5.374392435805108e-05, "loss": 1.7718, "step": 9662 }, { "epoch": 0.94365234375, "grad_norm": 0.15762917697429657, "learning_rate": 5.3731016700727116e-05, "loss": 1.7555, "step": 9663 }, { "epoch": 0.94375, "grad_norm": 0.14687925577163696, "learning_rate": 5.3718131146214293e-05, "loss": 1.7357, "step": 9664 }, { "epoch": 0.94384765625, "grad_norm": 0.15379135310649872, "learning_rate": 5.3705267695799794e-05, "loss": 1.776, "step": 9665 }, { "epoch": 0.9439453125, "grad_norm": 0.15780843794345856, "learning_rate": 5.3692426350768573e-05, "loss": 1.7389, "step": 9666 }, { "epoch": 0.94404296875, "grad_norm": 0.14754267036914825, "learning_rate": 5.367960711240335e-05, "loss": 1.7753, "step": 9667 }, { "epoch": 0.944140625, "grad_norm": 0.13812269270420074, "learning_rate": 5.3666809981984626e-05, "loss": 1.7823, "step": 9668 }, { "epoch": 0.94423828125, "grad_norm": 0.1568593978881836, "learning_rate": 5.365403496079075e-05, "loss": 1.7525, "step": 9669 }, { "epoch": 0.9443359375, "grad_norm": 0.14504589140415192, "learning_rate": 5.3641282050097874e-05, "loss": 1.7751, "step": 9670 }, { "epoch": 0.94443359375, "grad_norm": 0.13136795163154602, "learning_rate": 5.362855125117982e-05, "loss": 1.7585, "step": 9671 }, { "epoch": 0.94453125, "grad_norm": 0.14085446298122406, "learning_rate": 5.361584256530832e-05, "loss": 1.7362, "step": 9672 }, { "epoch": 0.94462890625, "grad_norm": 0.1519334316253662, "learning_rate": 5.3603155993752897e-05, "loss": 1.7191, "step": 9673 }, { "epoch": 0.9447265625, "grad_norm": 0.1419547200202942, "learning_rate": 5.359049153778077e-05, "loss": 1.7326, "step": 9674 }, { "epoch": 0.94482421875, "grad_norm": 0.14287972450256348, "learning_rate": 5.3577849198657024e-05, "loss": 1.7945, "step": 9675 }, { "epoch": 0.944921875, "grad_norm": 0.16628387570381165, "learning_rate": 5.35652289776445e-05, "loss": 1.79, "step": 9676 }, { "epoch": 0.94501953125, "grad_norm": 0.14882385730743408, "learning_rate": 5.355263087600391e-05, "loss": 1.7875, "step": 9677 }, { "epoch": 0.9451171875, "grad_norm": 0.15155582129955292, "learning_rate": 5.354005489499361e-05, "loss": 1.7594, "step": 9678 }, { "epoch": 0.94521484375, "grad_norm": 0.16570313274860382, "learning_rate": 5.3527501035869895e-05, "loss": 1.7556, "step": 9679 }, { "epoch": 0.9453125, "grad_norm": 0.16755105555057526, "learning_rate": 5.351496929988675e-05, "loss": 1.7726, "step": 9680 }, { "epoch": 0.94541015625, "grad_norm": 0.13984628021717072, "learning_rate": 5.350245968829599e-05, "loss": 1.7843, "step": 9681 }, { "epoch": 0.9455078125, "grad_norm": 0.1524660736322403, "learning_rate": 5.348997220234721e-05, "loss": 1.753, "step": 9682 }, { "epoch": 0.94560546875, "grad_norm": 0.14554443955421448, "learning_rate": 5.347750684328779e-05, "loss": 1.768, "step": 9683 }, { "epoch": 0.945703125, "grad_norm": 0.16853901743888855, "learning_rate": 5.3465063612362946e-05, "loss": 1.8187, "step": 9684 }, { "epoch": 0.94580078125, "grad_norm": 0.13927994668483734, "learning_rate": 5.345264251081564e-05, "loss": 1.7431, "step": 9685 }, { "epoch": 0.9458984375, "grad_norm": 0.1512438803911209, "learning_rate": 5.344024353988659e-05, "loss": 1.7469, "step": 9686 }, { "epoch": 0.94599609375, "grad_norm": 0.1369524598121643, "learning_rate": 5.342786670081436e-05, "loss": 1.7686, "step": 9687 }, { "epoch": 0.94609375, "grad_norm": 0.15294590592384338, "learning_rate": 5.3415511994835325e-05, "loss": 1.733, "step": 9688 }, { "epoch": 0.94619140625, "grad_norm": 0.1492982804775238, "learning_rate": 5.3403179423183556e-05, "loss": 1.7657, "step": 9689 }, { "epoch": 0.9462890625, "grad_norm": 0.17701078951358795, "learning_rate": 5.3390868987090966e-05, "loss": 1.7519, "step": 9690 }, { "epoch": 0.94638671875, "grad_norm": 0.1309693455696106, "learning_rate": 5.337858068778733e-05, "loss": 1.7553, "step": 9691 }, { "epoch": 0.946484375, "grad_norm": 0.15269261598587036, "learning_rate": 5.336631452650009e-05, "loss": 1.7393, "step": 9692 }, { "epoch": 0.94658203125, "grad_norm": 0.17046058177947998, "learning_rate": 5.335407050445452e-05, "loss": 1.7759, "step": 9693 }, { "epoch": 0.9466796875, "grad_norm": 0.13652271032333374, "learning_rate": 5.334184862287368e-05, "loss": 1.7678, "step": 9694 }, { "epoch": 0.94677734375, "grad_norm": 0.14945974946022034, "learning_rate": 5.33296488829785e-05, "loss": 1.7354, "step": 9695 }, { "epoch": 0.946875, "grad_norm": 0.16588227450847626, "learning_rate": 5.331747128598751e-05, "loss": 1.707, "step": 9696 }, { "epoch": 0.94697265625, "grad_norm": 0.14763125777244568, "learning_rate": 5.3305315833117253e-05, "loss": 1.7853, "step": 9697 }, { "epoch": 0.9470703125, "grad_norm": 0.15380235016345978, "learning_rate": 5.3293182525581895e-05, "loss": 1.7193, "step": 9698 }, { "epoch": 0.94716796875, "grad_norm": 0.14665217697620392, "learning_rate": 5.3281071364593455e-05, "loss": 1.7336, "step": 9699 }, { "epoch": 0.947265625, "grad_norm": 0.1597006469964981, "learning_rate": 5.326898235136174e-05, "loss": 1.767, "step": 9700 }, { "epoch": 0.94736328125, "grad_norm": 0.14159587025642395, "learning_rate": 5.3256915487094315e-05, "loss": 1.7311, "step": 9701 }, { "epoch": 0.9474609375, "grad_norm": 0.15877284109592438, "learning_rate": 5.324487077299658e-05, "loss": 1.7709, "step": 9702 }, { "epoch": 0.94755859375, "grad_norm": 0.14029107987880707, "learning_rate": 5.323284821027166e-05, "loss": 1.7171, "step": 9703 }, { "epoch": 0.94765625, "grad_norm": 0.15219879150390625, "learning_rate": 5.322084780012055e-05, "loss": 1.7349, "step": 9704 }, { "epoch": 0.94775390625, "grad_norm": 0.15058782696723938, "learning_rate": 5.3208869543741945e-05, "loss": 1.739, "step": 9705 }, { "epoch": 0.9478515625, "grad_norm": 0.1473449319601059, "learning_rate": 5.3196913442332377e-05, "loss": 1.7408, "step": 9706 }, { "epoch": 0.94794921875, "grad_norm": 0.15796789526939392, "learning_rate": 5.318497949708617e-05, "loss": 1.7908, "step": 9707 }, { "epoch": 0.948046875, "grad_norm": 0.13812652230262756, "learning_rate": 5.317306770919541e-05, "loss": 1.7441, "step": 9708 }, { "epoch": 0.94814453125, "grad_norm": 0.17296795547008514, "learning_rate": 5.316117807984995e-05, "loss": 1.743, "step": 9709 }, { "epoch": 0.9482421875, "grad_norm": 0.1425560861825943, "learning_rate": 5.3149310610237504e-05, "loss": 1.7148, "step": 9710 }, { "epoch": 0.94833984375, "grad_norm": 0.15279194712638855, "learning_rate": 5.3137465301543494e-05, "loss": 1.7655, "step": 9711 }, { "epoch": 0.9484375, "grad_norm": 0.14186939597129822, "learning_rate": 5.3125642154951206e-05, "loss": 1.7221, "step": 9712 }, { "epoch": 0.94853515625, "grad_norm": 0.14186277985572815, "learning_rate": 5.311384117164161e-05, "loss": 1.7308, "step": 9713 }, { "epoch": 0.9486328125, "grad_norm": 0.13877104222774506, "learning_rate": 5.310206235279354e-05, "loss": 1.777, "step": 9714 }, { "epoch": 0.94873046875, "grad_norm": 0.1527310311794281, "learning_rate": 5.309030569958363e-05, "loss": 1.761, "step": 9715 }, { "epoch": 0.948828125, "grad_norm": 0.1486811637878418, "learning_rate": 5.307857121318621e-05, "loss": 1.7085, "step": 9716 }, { "epoch": 0.94892578125, "grad_norm": 0.14378154277801514, "learning_rate": 5.306685889477351e-05, "loss": 1.7623, "step": 9717 }, { "epoch": 0.9490234375, "grad_norm": 0.13761432468891144, "learning_rate": 5.3055168745515424e-05, "loss": 1.7556, "step": 9718 }, { "epoch": 0.94912109375, "grad_norm": 0.16176234185695648, "learning_rate": 5.304350076657976e-05, "loss": 1.8014, "step": 9719 }, { "epoch": 0.94921875, "grad_norm": 0.13901932537555695, "learning_rate": 5.3031854959131995e-05, "loss": 1.7836, "step": 9720 }, { "epoch": 0.94931640625, "grad_norm": 0.13584284484386444, "learning_rate": 5.3020231324335444e-05, "loss": 1.7345, "step": 9721 }, { "epoch": 0.9494140625, "grad_norm": 0.15879127383232117, "learning_rate": 5.3008629863351225e-05, "loss": 1.7163, "step": 9722 }, { "epoch": 0.94951171875, "grad_norm": 0.13192866742610931, "learning_rate": 5.2997050577338224e-05, "loss": 1.7408, "step": 9723 }, { "epoch": 0.949609375, "grad_norm": 0.14464986324310303, "learning_rate": 5.29854934674531e-05, "loss": 1.7986, "step": 9724 }, { "epoch": 0.94970703125, "grad_norm": 0.15369461476802826, "learning_rate": 5.297395853485031e-05, "loss": 1.7453, "step": 9725 }, { "epoch": 0.9498046875, "grad_norm": 0.13490596413612366, "learning_rate": 5.2962445780682115e-05, "loss": 1.7302, "step": 9726 }, { "epoch": 0.94990234375, "grad_norm": 0.12921904027462006, "learning_rate": 5.295095520609847e-05, "loss": 1.7552, "step": 9727 }, { "epoch": 0.95, "grad_norm": 0.13702909648418427, "learning_rate": 5.2939486812247234e-05, "loss": 1.7732, "step": 9728 }, { "epoch": 0.95009765625, "grad_norm": 0.1478365957736969, "learning_rate": 5.2928040600273996e-05, "loss": 1.7784, "step": 9729 }, { "epoch": 0.9501953125, "grad_norm": 0.14154841005802155, "learning_rate": 5.2916616571322095e-05, "loss": 1.7795, "step": 9730 }, { "epoch": 0.95029296875, "grad_norm": 0.15265358984470367, "learning_rate": 5.290521472653273e-05, "loss": 1.7711, "step": 9731 }, { "epoch": 0.950390625, "grad_norm": 0.14045588672161102, "learning_rate": 5.2893835067044834e-05, "loss": 1.7292, "step": 9732 }, { "epoch": 0.95048828125, "grad_norm": 0.145099475979805, "learning_rate": 5.288247759399515e-05, "loss": 1.7083, "step": 9733 }, { "epoch": 0.9505859375, "grad_norm": 0.1357146054506302, "learning_rate": 5.2871142308518176e-05, "loss": 1.775, "step": 9734 }, { "epoch": 0.95068359375, "grad_norm": 0.14485229551792145, "learning_rate": 5.2859829211746166e-05, "loss": 1.747, "step": 9735 }, { "epoch": 0.95078125, "grad_norm": 0.16009047627449036, "learning_rate": 5.284853830480923e-05, "loss": 1.7143, "step": 9736 }, { "epoch": 0.95087890625, "grad_norm": 0.14395591616630554, "learning_rate": 5.283726958883526e-05, "loss": 1.7683, "step": 9737 }, { "epoch": 0.9509765625, "grad_norm": 0.15190599858760834, "learning_rate": 5.282602306494986e-05, "loss": 1.744, "step": 9738 }, { "epoch": 0.95107421875, "grad_norm": 0.18286320567131042, "learning_rate": 5.281479873427647e-05, "loss": 1.7717, "step": 9739 }, { "epoch": 0.951171875, "grad_norm": 0.14734326303005219, "learning_rate": 5.2803596597936305e-05, "loss": 1.7183, "step": 9740 }, { "epoch": 0.95126953125, "grad_norm": 0.16295954585075378, "learning_rate": 5.2792416657048367e-05, "loss": 1.7446, "step": 9741 }, { "epoch": 0.9513671875, "grad_norm": 0.158379465341568, "learning_rate": 5.278125891272942e-05, "loss": 1.758, "step": 9742 }, { "epoch": 0.95146484375, "grad_norm": 0.14285267889499664, "learning_rate": 5.277012336609403e-05, "loss": 1.7556, "step": 9743 }, { "epoch": 0.9515625, "grad_norm": 0.16852611303329468, "learning_rate": 5.275901001825452e-05, "loss": 1.7881, "step": 9744 }, { "epoch": 0.95166015625, "grad_norm": 0.14829829335212708, "learning_rate": 5.274791887032102e-05, "loss": 1.7389, "step": 9745 }, { "epoch": 0.9517578125, "grad_norm": 0.14372125267982483, "learning_rate": 5.273684992340148e-05, "loss": 1.7633, "step": 9746 }, { "epoch": 0.95185546875, "grad_norm": 0.152838796377182, "learning_rate": 5.272580317860158e-05, "loss": 1.7318, "step": 9747 }, { "epoch": 0.951953125, "grad_norm": 0.13861367106437683, "learning_rate": 5.27147786370247e-05, "loss": 1.7503, "step": 9748 }, { "epoch": 0.95205078125, "grad_norm": 0.14429891109466553, "learning_rate": 5.2703776299772213e-05, "loss": 1.7608, "step": 9749 }, { "epoch": 0.9521484375, "grad_norm": 0.14219792187213898, "learning_rate": 5.269279616794311e-05, "loss": 1.7477, "step": 9750 }, { "epoch": 0.95224609375, "grad_norm": 0.1500730812549591, "learning_rate": 5.268183824263416e-05, "loss": 1.79, "step": 9751 }, { "epoch": 0.95234375, "grad_norm": 0.15124870836734772, "learning_rate": 5.2670902524940046e-05, "loss": 1.7524, "step": 9752 }, { "epoch": 0.95244140625, "grad_norm": 0.1608966439962387, "learning_rate": 5.26599890159531e-05, "loss": 1.7691, "step": 9753 }, { "epoch": 0.9525390625, "grad_norm": 0.14132200181484222, "learning_rate": 5.264909771676353e-05, "loss": 1.7435, "step": 9754 }, { "epoch": 0.95263671875, "grad_norm": 0.15569786727428436, "learning_rate": 5.263822862845921e-05, "loss": 1.7505, "step": 9755 }, { "epoch": 0.952734375, "grad_norm": 0.14820490777492523, "learning_rate": 5.262738175212589e-05, "loss": 1.7071, "step": 9756 }, { "epoch": 0.95283203125, "grad_norm": 0.15012401342391968, "learning_rate": 5.2616557088847105e-05, "loss": 1.7811, "step": 9757 }, { "epoch": 0.9529296875, "grad_norm": 0.14126309752464294, "learning_rate": 5.2605754639704135e-05, "loss": 1.7465, "step": 9758 }, { "epoch": 0.95302734375, "grad_norm": 0.13762986660003662, "learning_rate": 5.259497440577603e-05, "loss": 1.7552, "step": 9759 }, { "epoch": 0.953125, "grad_norm": 0.14950351417064667, "learning_rate": 5.258421638813965e-05, "loss": 1.7667, "step": 9760 }, { "epoch": 0.95322265625, "grad_norm": 0.13981014490127563, "learning_rate": 5.257348058786965e-05, "loss": 1.7584, "step": 9761 }, { "epoch": 0.9533203125, "grad_norm": 0.13902729749679565, "learning_rate": 5.256276700603841e-05, "loss": 1.791, "step": 9762 }, { "epoch": 0.95341796875, "grad_norm": 0.1501462459564209, "learning_rate": 5.2552075643716125e-05, "loss": 1.7876, "step": 9763 }, { "epoch": 0.953515625, "grad_norm": 0.13712246716022491, "learning_rate": 5.254140650197075e-05, "loss": 1.7515, "step": 9764 }, { "epoch": 0.95361328125, "grad_norm": 0.16392797231674194, "learning_rate": 5.253075958186807e-05, "loss": 1.7437, "step": 9765 }, { "epoch": 0.9537109375, "grad_norm": 0.18154552578926086, "learning_rate": 5.252013488447162e-05, "loss": 1.7452, "step": 9766 }, { "epoch": 0.95380859375, "grad_norm": 0.1564246565103531, "learning_rate": 5.250953241084269e-05, "loss": 1.7419, "step": 9767 }, { "epoch": 0.95390625, "grad_norm": 0.14539532363414764, "learning_rate": 5.249895216204042e-05, "loss": 1.7661, "step": 9768 }, { "epoch": 0.95400390625, "grad_norm": 0.15641264617443085, "learning_rate": 5.2488394139121605e-05, "loss": 1.7485, "step": 9769 }, { "epoch": 0.9541015625, "grad_norm": 0.1653488725423813, "learning_rate": 5.2477858343140954e-05, "loss": 1.7701, "step": 9770 }, { "epoch": 0.95419921875, "grad_norm": 0.14094914495944977, "learning_rate": 5.2467344775150884e-05, "loss": 1.7372, "step": 9771 }, { "epoch": 0.954296875, "grad_norm": 0.16770319640636444, "learning_rate": 5.2456853436201585e-05, "loss": 1.7794, "step": 9772 }, { "epoch": 0.95439453125, "grad_norm": 0.1647878736257553, "learning_rate": 5.2446384327341117e-05, "loss": 1.7727, "step": 9773 }, { "epoch": 0.9544921875, "grad_norm": 0.15557368099689484, "learning_rate": 5.2435937449615183e-05, "loss": 1.7577, "step": 9774 }, { "epoch": 0.95458984375, "grad_norm": 0.16647598147392273, "learning_rate": 5.2425512804067365e-05, "loss": 1.7664, "step": 9775 }, { "epoch": 0.9546875, "grad_norm": 0.15995268523693085, "learning_rate": 5.241511039173899e-05, "loss": 1.7826, "step": 9776 }, { "epoch": 0.95478515625, "grad_norm": 0.16113397479057312, "learning_rate": 5.240473021366915e-05, "loss": 1.7832, "step": 9777 }, { "epoch": 0.9548828125, "grad_norm": 0.1500106006860733, "learning_rate": 5.239437227089476e-05, "loss": 1.7402, "step": 9778 }, { "epoch": 0.95498046875, "grad_norm": 0.14547623693943024, "learning_rate": 5.2384036564450454e-05, "loss": 1.7364, "step": 9779 }, { "epoch": 0.955078125, "grad_norm": 0.13164393603801727, "learning_rate": 5.23737230953687e-05, "loss": 1.7206, "step": 9780 }, { "epoch": 0.95517578125, "grad_norm": 0.14686040580272675, "learning_rate": 5.236343186467971e-05, "loss": 1.7463, "step": 9781 }, { "epoch": 0.9552734375, "grad_norm": 0.159920334815979, "learning_rate": 5.235316287341151e-05, "loss": 1.7411, "step": 9782 }, { "epoch": 0.95537109375, "grad_norm": 0.14460837841033936, "learning_rate": 5.234291612258987e-05, "loss": 1.7771, "step": 9783 }, { "epoch": 0.95546875, "grad_norm": 0.13917967677116394, "learning_rate": 5.233269161323835e-05, "loss": 1.7748, "step": 9784 }, { "epoch": 0.95556640625, "grad_norm": 0.16187816858291626, "learning_rate": 5.232248934637825e-05, "loss": 1.7449, "step": 9785 }, { "epoch": 0.9556640625, "grad_norm": 0.13739722967147827, "learning_rate": 5.231230932302875e-05, "loss": 1.7692, "step": 9786 }, { "epoch": 0.95576171875, "grad_norm": 0.13613706827163696, "learning_rate": 5.230215154420671e-05, "loss": 1.7541, "step": 9787 }, { "epoch": 0.955859375, "grad_norm": 0.14893119037151337, "learning_rate": 5.229201601092679e-05, "loss": 1.734, "step": 9788 }, { "epoch": 0.95595703125, "grad_norm": 0.14092542231082916, "learning_rate": 5.228190272420148e-05, "loss": 1.7765, "step": 9789 }, { "epoch": 0.9560546875, "grad_norm": 0.1454109102487564, "learning_rate": 5.227181168504095e-05, "loss": 1.7871, "step": 9790 }, { "epoch": 0.95615234375, "grad_norm": 0.13866956532001495, "learning_rate": 5.2261742894453256e-05, "loss": 1.7639, "step": 9791 }, { "epoch": 0.95625, "grad_norm": 0.14176131784915924, "learning_rate": 5.225169635344417e-05, "loss": 1.7935, "step": 9792 }, { "epoch": 0.95634765625, "grad_norm": 0.14462070167064667, "learning_rate": 5.2241672063017234e-05, "loss": 1.7091, "step": 9793 }, { "epoch": 0.9564453125, "grad_norm": 0.15206368267536163, "learning_rate": 5.22316700241738e-05, "loss": 1.7396, "step": 9794 }, { "epoch": 0.95654296875, "grad_norm": 0.1302817463874817, "learning_rate": 5.222169023791297e-05, "loss": 1.7586, "step": 9795 }, { "epoch": 0.956640625, "grad_norm": 0.14827409386634827, "learning_rate": 5.2211732705231664e-05, "loss": 1.7389, "step": 9796 }, { "epoch": 0.95673828125, "grad_norm": 0.14333099126815796, "learning_rate": 5.220179742712454e-05, "loss": 1.76, "step": 9797 }, { "epoch": 0.9568359375, "grad_norm": 0.1335545778274536, "learning_rate": 5.2191884404584034e-05, "loss": 1.751, "step": 9798 }, { "epoch": 0.95693359375, "grad_norm": 0.14895343780517578, "learning_rate": 5.218199363860035e-05, "loss": 1.7136, "step": 9799 }, { "epoch": 0.95703125, "grad_norm": 0.13867689669132233, "learning_rate": 5.217212513016155e-05, "loss": 1.7607, "step": 9800 }, { "epoch": 0.95712890625, "grad_norm": 0.13837990164756775, "learning_rate": 5.216227888025334e-05, "loss": 1.7758, "step": 9801 }, { "epoch": 0.9572265625, "grad_norm": 0.15990902483463287, "learning_rate": 5.215245488985932e-05, "loss": 1.7848, "step": 9802 }, { "epoch": 0.95732421875, "grad_norm": 0.13140006363391876, "learning_rate": 5.214265315996079e-05, "loss": 1.7364, "step": 9803 }, { "epoch": 0.957421875, "grad_norm": 0.14013227820396423, "learning_rate": 5.21328736915369e-05, "loss": 1.7482, "step": 9804 }, { "epoch": 0.95751953125, "grad_norm": 0.16975870728492737, "learning_rate": 5.212311648556449e-05, "loss": 1.7438, "step": 9805 }, { "epoch": 0.9576171875, "grad_norm": 0.13773271441459656, "learning_rate": 5.2113381543018236e-05, "loss": 1.742, "step": 9806 }, { "epoch": 0.95771484375, "grad_norm": 0.14341530203819275, "learning_rate": 5.210366886487056e-05, "loss": 1.7333, "step": 9807 }, { "epoch": 0.9578125, "grad_norm": 0.1522994041442871, "learning_rate": 5.209397845209168e-05, "loss": 1.7167, "step": 9808 }, { "epoch": 0.95791015625, "grad_norm": 0.1510804146528244, "learning_rate": 5.208431030564959e-05, "loss": 1.7285, "step": 9809 }, { "epoch": 0.9580078125, "grad_norm": 0.13801011443138123, "learning_rate": 5.207466442651004e-05, "loss": 1.7215, "step": 9810 }, { "epoch": 0.95810546875, "grad_norm": 0.15379686653614044, "learning_rate": 5.206504081563659e-05, "loss": 1.7522, "step": 9811 }, { "epoch": 0.958203125, "grad_norm": 0.15224523842334747, "learning_rate": 5.2055439473990506e-05, "loss": 1.7505, "step": 9812 }, { "epoch": 0.95830078125, "grad_norm": 0.14360292255878448, "learning_rate": 5.2045860402530934e-05, "loss": 1.7347, "step": 9813 }, { "epoch": 0.9583984375, "grad_norm": 0.14182303845882416, "learning_rate": 5.203630360221469e-05, "loss": 1.7343, "step": 9814 }, { "epoch": 0.95849609375, "grad_norm": 0.14070861041545868, "learning_rate": 5.202676907399645e-05, "loss": 1.7484, "step": 9815 }, { "epoch": 0.95859375, "grad_norm": 0.13198833167552948, "learning_rate": 5.201725681882861e-05, "loss": 1.7027, "step": 9816 }, { "epoch": 0.95869140625, "grad_norm": 0.1440756618976593, "learning_rate": 5.2007766837661355e-05, "loss": 1.7197, "step": 9817 }, { "epoch": 0.9587890625, "grad_norm": 0.14051511883735657, "learning_rate": 5.19982991314427e-05, "loss": 1.7064, "step": 9818 }, { "epoch": 0.95888671875, "grad_norm": 0.14933715760707855, "learning_rate": 5.1988853701118286e-05, "loss": 1.7128, "step": 9819 }, { "epoch": 0.958984375, "grad_norm": 0.14994874596595764, "learning_rate": 5.1979430547631725e-05, "loss": 1.7411, "step": 9820 }, { "epoch": 0.95908203125, "grad_norm": 0.1473350077867508, "learning_rate": 5.197002967192426e-05, "loss": 1.7286, "step": 9821 }, { "epoch": 0.9591796875, "grad_norm": 0.14892680943012238, "learning_rate": 5.196065107493494e-05, "loss": 1.7405, "step": 9822 }, { "epoch": 0.95927734375, "grad_norm": 0.1871940940618515, "learning_rate": 5.195129475760067e-05, "loss": 1.7441, "step": 9823 }, { "epoch": 0.959375, "grad_norm": 0.14954039454460144, "learning_rate": 5.1941960720855994e-05, "loss": 1.737, "step": 9824 }, { "epoch": 0.95947265625, "grad_norm": 0.14618945121765137, "learning_rate": 5.193264896563333e-05, "loss": 1.7968, "step": 9825 }, { "epoch": 0.9595703125, "grad_norm": 0.13943913578987122, "learning_rate": 5.192335949286284e-05, "loss": 1.7392, "step": 9826 }, { "epoch": 0.95966796875, "grad_norm": 0.14575551450252533, "learning_rate": 5.191409230347245e-05, "loss": 1.7017, "step": 9827 }, { "epoch": 0.959765625, "grad_norm": 0.1514337658882141, "learning_rate": 5.190484739838787e-05, "loss": 1.771, "step": 9828 }, { "epoch": 0.95986328125, "grad_norm": 0.1423460841178894, "learning_rate": 5.189562477853259e-05, "loss": 1.741, "step": 9829 }, { "epoch": 0.9599609375, "grad_norm": 0.14361444115638733, "learning_rate": 5.1886424444827874e-05, "loss": 1.7636, "step": 9830 }, { "epoch": 0.96005859375, "grad_norm": 0.13859564065933228, "learning_rate": 5.187724639819276e-05, "loss": 1.8191, "step": 9831 }, { "epoch": 0.96015625, "grad_norm": 0.15787574648857117, "learning_rate": 5.1868090639544046e-05, "loss": 1.7485, "step": 9832 }, { "epoch": 0.96025390625, "grad_norm": 0.1522829383611679, "learning_rate": 5.185895716979629e-05, "loss": 1.7554, "step": 9833 }, { "epoch": 0.9603515625, "grad_norm": 0.14407368004322052, "learning_rate": 5.184984598986184e-05, "loss": 1.778, "step": 9834 }, { "epoch": 0.96044921875, "grad_norm": 0.14093250036239624, "learning_rate": 5.184075710065087e-05, "loss": 1.666, "step": 9835 }, { "epoch": 0.960546875, "grad_norm": 0.1399955004453659, "learning_rate": 5.183169050307125e-05, "loss": 1.7275, "step": 9836 }, { "epoch": 0.96064453125, "grad_norm": 0.1418941766023636, "learning_rate": 5.182264619802864e-05, "loss": 1.7545, "step": 9837 }, { "epoch": 0.9607421875, "grad_norm": 0.15154936909675598, "learning_rate": 5.181362418642651e-05, "loss": 1.799, "step": 9838 }, { "epoch": 0.96083984375, "grad_norm": 0.13488145172595978, "learning_rate": 5.180462446916611e-05, "loss": 1.7388, "step": 9839 }, { "epoch": 0.9609375, "grad_norm": 0.15648405253887177, "learning_rate": 5.179564704714634e-05, "loss": 1.7644, "step": 9840 }, { "epoch": 0.96103515625, "grad_norm": 0.15131159126758575, "learning_rate": 5.178669192126402e-05, "loss": 1.735, "step": 9841 }, { "epoch": 0.9611328125, "grad_norm": 0.1400216966867447, "learning_rate": 5.177775909241371e-05, "loss": 1.7799, "step": 9842 }, { "epoch": 0.96123046875, "grad_norm": 0.1512681394815445, "learning_rate": 5.1768848561487695e-05, "loss": 1.7462, "step": 9843 }, { "epoch": 0.961328125, "grad_norm": 0.15432795882225037, "learning_rate": 5.1759960329376036e-05, "loss": 1.7829, "step": 9844 }, { "epoch": 0.96142578125, "grad_norm": 0.14041361212730408, "learning_rate": 5.175109439696662e-05, "loss": 1.7631, "step": 9845 }, { "epoch": 0.9615234375, "grad_norm": 0.13926494121551514, "learning_rate": 5.17422507651451e-05, "loss": 1.7573, "step": 9846 }, { "epoch": 0.96162109375, "grad_norm": 0.14673393964767456, "learning_rate": 5.173342943479482e-05, "loss": 1.7561, "step": 9847 }, { "epoch": 0.96171875, "grad_norm": 0.13796427845954895, "learning_rate": 5.172463040679694e-05, "loss": 1.7559, "step": 9848 }, { "epoch": 0.96181640625, "grad_norm": 0.1413441002368927, "learning_rate": 5.171585368203049e-05, "loss": 1.7521, "step": 9849 }, { "epoch": 0.9619140625, "grad_norm": 0.14839547872543335, "learning_rate": 5.1707099261372117e-05, "loss": 1.8004, "step": 9850 }, { "epoch": 0.96201171875, "grad_norm": 0.1462574005126953, "learning_rate": 5.169836714569633e-05, "loss": 1.7598, "step": 9851 }, { "epoch": 0.962109375, "grad_norm": 0.14214396476745605, "learning_rate": 5.168965733587539e-05, "loss": 1.7499, "step": 9852 }, { "epoch": 0.96220703125, "grad_norm": 0.1537688970565796, "learning_rate": 5.168096983277935e-05, "loss": 1.7341, "step": 9853 }, { "epoch": 0.9623046875, "grad_norm": 0.133389413356781, "learning_rate": 5.167230463727597e-05, "loss": 1.733, "step": 9854 }, { "epoch": 0.96240234375, "grad_norm": 0.1608576774597168, "learning_rate": 5.166366175023084e-05, "loss": 1.7036, "step": 9855 }, { "epoch": 0.9625, "grad_norm": 0.1406978964805603, "learning_rate": 5.165504117250736e-05, "loss": 1.6998, "step": 9856 }, { "epoch": 0.96259765625, "grad_norm": 0.14797349274158478, "learning_rate": 5.1646442904966595e-05, "loss": 1.7241, "step": 9857 }, { "epoch": 0.9626953125, "grad_norm": 0.13762392103672028, "learning_rate": 5.163786694846743e-05, "loss": 1.721, "step": 9858 }, { "epoch": 0.96279296875, "grad_norm": 0.16401667892932892, "learning_rate": 5.1629313303866564e-05, "loss": 1.7831, "step": 9859 }, { "epoch": 0.962890625, "grad_norm": 0.1603327989578247, "learning_rate": 5.162078197201842e-05, "loss": 1.7365, "step": 9860 }, { "epoch": 0.96298828125, "grad_norm": 0.13740214705467224, "learning_rate": 5.1612272953775176e-05, "loss": 1.7812, "step": 9861 }, { "epoch": 0.9630859375, "grad_norm": 0.1615331768989563, "learning_rate": 5.160378624998683e-05, "loss": 1.7407, "step": 9862 }, { "epoch": 0.96318359375, "grad_norm": 0.15644821524620056, "learning_rate": 5.159532186150113e-05, "loss": 1.7229, "step": 9863 }, { "epoch": 0.96328125, "grad_norm": 0.14624477922916412, "learning_rate": 5.15868797891636e-05, "loss": 1.7648, "step": 9864 }, { "epoch": 0.96337890625, "grad_norm": 0.18547755479812622, "learning_rate": 5.157846003381751e-05, "loss": 1.7873, "step": 9865 }, { "epoch": 0.9634765625, "grad_norm": 0.14698432385921478, "learning_rate": 5.157006259630391e-05, "loss": 1.7485, "step": 9866 }, { "epoch": 0.96357421875, "grad_norm": 0.1551302969455719, "learning_rate": 5.1561687477461673e-05, "loss": 1.5796, "step": 9867 }, { "epoch": 0.963671875, "grad_norm": 0.16691991686820984, "learning_rate": 5.155333467812735e-05, "loss": 1.7491, "step": 9868 }, { "epoch": 0.96376953125, "grad_norm": 0.17548663914203644, "learning_rate": 5.1545004199135345e-05, "loss": 1.7065, "step": 9869 }, { "epoch": 0.9638671875, "grad_norm": 0.15538671612739563, "learning_rate": 5.1536696041317786e-05, "loss": 1.7271, "step": 9870 }, { "epoch": 0.96396484375, "grad_norm": 0.16135118901729584, "learning_rate": 5.1528410205504586e-05, "loss": 1.7329, "step": 9871 }, { "epoch": 0.9640625, "grad_norm": 0.15775802731513977, "learning_rate": 5.1520146692523425e-05, "loss": 1.7196, "step": 9872 }, { "epoch": 0.96416015625, "grad_norm": 0.14341408014297485, "learning_rate": 5.151190550319973e-05, "loss": 1.7416, "step": 9873 }, { "epoch": 0.9642578125, "grad_norm": 0.15412406623363495, "learning_rate": 5.150368663835681e-05, "loss": 1.7404, "step": 9874 }, { "epoch": 0.96435546875, "grad_norm": 0.15843328833580017, "learning_rate": 5.1495490098815536e-05, "loss": 1.7015, "step": 9875 }, { "epoch": 0.964453125, "grad_norm": 0.1646747589111328, "learning_rate": 5.148731588539474e-05, "loss": 1.7421, "step": 9876 }, { "epoch": 0.96455078125, "grad_norm": 0.14387108385562897, "learning_rate": 5.1479163998910975e-05, "loss": 1.7193, "step": 9877 }, { "epoch": 0.9646484375, "grad_norm": 0.15753188729286194, "learning_rate": 5.147103444017848e-05, "loss": 1.8003, "step": 9878 }, { "epoch": 0.96474609375, "grad_norm": 0.1470082700252533, "learning_rate": 5.1462927210009384e-05, "loss": 1.7025, "step": 9879 }, { "epoch": 0.96484375, "grad_norm": 0.13114619255065918, "learning_rate": 5.145484230921346e-05, "loss": 1.7178, "step": 9880 }, { "epoch": 0.96494140625, "grad_norm": 0.13801923394203186, "learning_rate": 5.14467797385984e-05, "loss": 1.7945, "step": 9881 }, { "epoch": 0.9650390625, "grad_norm": 0.15197636187076569, "learning_rate": 5.143873949896952e-05, "loss": 1.735, "step": 9882 }, { "epoch": 0.96513671875, "grad_norm": 0.14089657366275787, "learning_rate": 5.1430721591129975e-05, "loss": 1.7298, "step": 9883 }, { "epoch": 0.965234375, "grad_norm": 0.1410873681306839, "learning_rate": 5.1422726015880746e-05, "loss": 1.6795, "step": 9884 }, { "epoch": 0.96533203125, "grad_norm": 0.16113001108169556, "learning_rate": 5.141475277402042e-05, "loss": 1.7103, "step": 9885 }, { "epoch": 0.9654296875, "grad_norm": 0.1360035389661789, "learning_rate": 5.140680186634552e-05, "loss": 1.7945, "step": 9886 }, { "epoch": 0.96552734375, "grad_norm": 0.15370120108127594, "learning_rate": 5.139887329365026e-05, "loss": 1.7006, "step": 9887 }, { "epoch": 0.965625, "grad_norm": 0.13792259991168976, "learning_rate": 5.139096705672664e-05, "loss": 1.7301, "step": 9888 }, { "epoch": 0.96572265625, "grad_norm": 0.13873735070228577, "learning_rate": 5.1383083156364416e-05, "loss": 1.7548, "step": 9889 }, { "epoch": 0.9658203125, "grad_norm": 0.13634537160396576, "learning_rate": 5.137522159335109e-05, "loss": 1.754, "step": 9890 }, { "epoch": 0.96591796875, "grad_norm": 0.1475607007741928, "learning_rate": 5.136738236847203e-05, "loss": 1.7216, "step": 9891 }, { "epoch": 0.966015625, "grad_norm": 0.1404491513967514, "learning_rate": 5.135956548251023e-05, "loss": 1.8324, "step": 9892 }, { "epoch": 0.96611328125, "grad_norm": 0.14163148403167725, "learning_rate": 5.135177093624655e-05, "loss": 1.7918, "step": 9893 }, { "epoch": 0.9662109375, "grad_norm": 0.15781231224536896, "learning_rate": 5.134399873045964e-05, "loss": 1.7291, "step": 9894 }, { "epoch": 0.96630859375, "grad_norm": 0.14131808280944824, "learning_rate": 5.133624886592585e-05, "loss": 1.7687, "step": 9895 }, { "epoch": 0.96640625, "grad_norm": 0.14461414515972137, "learning_rate": 5.1328521343419296e-05, "loss": 1.766, "step": 9896 }, { "epoch": 0.96650390625, "grad_norm": 0.14314833283424377, "learning_rate": 5.1320816163711894e-05, "loss": 1.7612, "step": 9897 }, { "epoch": 0.9666015625, "grad_norm": 0.15486958622932434, "learning_rate": 5.131313332757337e-05, "loss": 1.7339, "step": 9898 }, { "epoch": 0.96669921875, "grad_norm": 0.13304464519023895, "learning_rate": 5.1305472835771125e-05, "loss": 1.7414, "step": 9899 }, { "epoch": 0.966796875, "grad_norm": 0.14099562168121338, "learning_rate": 5.129783468907039e-05, "loss": 1.7884, "step": 9900 }, { "epoch": 0.96689453125, "grad_norm": 0.14239823818206787, "learning_rate": 5.129021888823415e-05, "loss": 1.7145, "step": 9901 }, { "epoch": 0.9669921875, "grad_norm": 0.13633406162261963, "learning_rate": 5.1282625434023156e-05, "loss": 1.7538, "step": 9902 }, { "epoch": 0.96708984375, "grad_norm": 0.14578616619110107, "learning_rate": 5.127505432719593e-05, "loss": 1.7599, "step": 9903 }, { "epoch": 0.9671875, "grad_norm": 0.15483273565769196, "learning_rate": 5.126750556850872e-05, "loss": 1.7929, "step": 9904 }, { "epoch": 0.96728515625, "grad_norm": 0.13346710801124573, "learning_rate": 5.125997915871564e-05, "loss": 1.7698, "step": 9905 }, { "epoch": 0.9673828125, "grad_norm": 0.1595238596200943, "learning_rate": 5.1252475098568475e-05, "loss": 1.745, "step": 9906 }, { "epoch": 0.96748046875, "grad_norm": 0.14916718006134033, "learning_rate": 5.1244993388816816e-05, "loss": 1.7739, "step": 9907 }, { "epoch": 0.967578125, "grad_norm": 0.1502687782049179, "learning_rate": 5.1237534030208046e-05, "loss": 1.7652, "step": 9908 }, { "epoch": 0.96767578125, "grad_norm": 0.1394888460636139, "learning_rate": 5.123009702348726e-05, "loss": 1.7827, "step": 9909 }, { "epoch": 0.9677734375, "grad_norm": 0.15174581110477448, "learning_rate": 5.1222682369397354e-05, "loss": 1.7722, "step": 9910 }, { "epoch": 0.96787109375, "grad_norm": 0.15026740729808807, "learning_rate": 5.1215290068679e-05, "loss": 1.7768, "step": 9911 }, { "epoch": 0.96796875, "grad_norm": 0.13054323196411133, "learning_rate": 5.12079201220706e-05, "loss": 1.7212, "step": 9912 }, { "epoch": 0.96806640625, "grad_norm": 0.14113520085811615, "learning_rate": 5.1200572530308384e-05, "loss": 1.6693, "step": 9913 }, { "epoch": 0.9681640625, "grad_norm": 0.14104501903057098, "learning_rate": 5.1193247294126246e-05, "loss": 1.7605, "step": 9914 }, { "epoch": 0.96826171875, "grad_norm": 0.13340340554714203, "learning_rate": 5.1185944414256e-05, "loss": 1.7402, "step": 9915 }, { "epoch": 0.968359375, "grad_norm": 0.1344335824251175, "learning_rate": 5.1178663891427086e-05, "loss": 1.7589, "step": 9916 }, { "epoch": 0.96845703125, "grad_norm": 0.160738006234169, "learning_rate": 5.117140572636674e-05, "loss": 1.7468, "step": 9917 }, { "epoch": 0.9685546875, "grad_norm": 0.14838218688964844, "learning_rate": 5.116416991980004e-05, "loss": 1.7648, "step": 9918 }, { "epoch": 0.96865234375, "grad_norm": 0.1391696184873581, "learning_rate": 5.115695647244975e-05, "loss": 1.7235, "step": 9919 }, { "epoch": 0.96875, "grad_norm": 0.14682884514331818, "learning_rate": 5.114976538503643e-05, "loss": 1.7268, "step": 9920 }, { "epoch": 0.96884765625, "grad_norm": 0.17881524562835693, "learning_rate": 5.1142596658278455e-05, "loss": 1.7596, "step": 9921 }, { "epoch": 0.9689453125, "grad_norm": 0.13998842239379883, "learning_rate": 5.1135450292891845e-05, "loss": 1.7534, "step": 9922 }, { "epoch": 0.96904296875, "grad_norm": 0.15185463428497314, "learning_rate": 5.1128326289590496e-05, "loss": 1.75, "step": 9923 }, { "epoch": 0.969140625, "grad_norm": 0.15092071890830994, "learning_rate": 5.1121224649086016e-05, "loss": 1.6823, "step": 9924 }, { "epoch": 0.96923828125, "grad_norm": 0.1554771512746811, "learning_rate": 5.11141453720878e-05, "loss": 1.787, "step": 9925 }, { "epoch": 0.9693359375, "grad_norm": 0.14879174530506134, "learning_rate": 5.1107088459303044e-05, "loss": 1.7677, "step": 9926 }, { "epoch": 0.96943359375, "grad_norm": 0.15773294866085052, "learning_rate": 5.110005391143661e-05, "loss": 1.766, "step": 9927 }, { "epoch": 0.96953125, "grad_norm": 0.15567466616630554, "learning_rate": 5.109304172919122e-05, "loss": 1.717, "step": 9928 }, { "epoch": 0.96962890625, "grad_norm": 0.14783085882663727, "learning_rate": 5.108605191326733e-05, "loss": 1.7095, "step": 9929 }, { "epoch": 0.9697265625, "grad_norm": 0.1461096853017807, "learning_rate": 5.107908446436316e-05, "loss": 1.769, "step": 9930 }, { "epoch": 0.96982421875, "grad_norm": 0.15029358863830566, "learning_rate": 5.107213938317468e-05, "loss": 1.7443, "step": 9931 }, { "epoch": 0.969921875, "grad_norm": 0.14144311845302582, "learning_rate": 5.106521667039564e-05, "loss": 1.7605, "step": 9932 }, { "epoch": 0.97001953125, "grad_norm": 0.14020927250385284, "learning_rate": 5.105831632671761e-05, "loss": 1.7702, "step": 9933 }, { "epoch": 0.9701171875, "grad_norm": 0.14846843481063843, "learning_rate": 5.105143835282981e-05, "loss": 1.7366, "step": 9934 }, { "epoch": 0.97021484375, "grad_norm": 0.13858196139335632, "learning_rate": 5.104458274941932e-05, "loss": 1.7205, "step": 9935 }, { "epoch": 0.9703125, "grad_norm": 0.1554141342639923, "learning_rate": 5.103774951717093e-05, "loss": 1.7795, "step": 9936 }, { "epoch": 0.97041015625, "grad_norm": 0.14348344504833221, "learning_rate": 5.1030938656767274e-05, "loss": 1.7558, "step": 9937 }, { "epoch": 0.9705078125, "grad_norm": 0.15078876912593842, "learning_rate": 5.1024150168888614e-05, "loss": 1.7667, "step": 9938 }, { "epoch": 0.97060546875, "grad_norm": 0.13944028317928314, "learning_rate": 5.1017384054213124e-05, "loss": 1.7138, "step": 9939 }, { "epoch": 0.970703125, "grad_norm": 0.13910841941833496, "learning_rate": 5.101064031341666e-05, "loss": 1.7544, "step": 9940 }, { "epoch": 0.97080078125, "grad_norm": 0.14166489243507385, "learning_rate": 5.1003918947172866e-05, "loss": 1.7466, "step": 9941 }, { "epoch": 0.9708984375, "grad_norm": 0.13841886818408966, "learning_rate": 5.099721995615313e-05, "loss": 1.7226, "step": 9942 }, { "epoch": 0.97099609375, "grad_norm": 0.14389757812023163, "learning_rate": 5.0990543341026635e-05, "loss": 1.7681, "step": 9943 }, { "epoch": 0.97109375, "grad_norm": 0.15168921649456024, "learning_rate": 5.098388910246035e-05, "loss": 1.7942, "step": 9944 }, { "epoch": 0.97119140625, "grad_norm": 0.1431775987148285, "learning_rate": 5.0977257241118885e-05, "loss": 1.8038, "step": 9945 }, { "epoch": 0.9712890625, "grad_norm": 0.15917684137821198, "learning_rate": 5.0970647757664796e-05, "loss": 1.7445, "step": 9946 }, { "epoch": 0.97138671875, "grad_norm": 0.16595634818077087, "learning_rate": 5.096406065275826e-05, "loss": 1.8153, "step": 9947 }, { "epoch": 0.971484375, "grad_norm": 0.17123891413211823, "learning_rate": 5.09574959270573e-05, "loss": 1.7562, "step": 9948 }, { "epoch": 0.97158203125, "grad_norm": 0.1477079838514328, "learning_rate": 5.095095358121764e-05, "loss": 1.7173, "step": 9949 }, { "epoch": 0.9716796875, "grad_norm": 0.15175674855709076, "learning_rate": 5.094443361589284e-05, "loss": 1.767, "step": 9950 }, { "epoch": 0.97177734375, "grad_norm": 0.17104843258857727, "learning_rate": 5.093793603173417e-05, "loss": 1.7097, "step": 9951 }, { "epoch": 0.971875, "grad_norm": 0.15471631288528442, "learning_rate": 5.093146082939065e-05, "loss": 1.7413, "step": 9952 }, { "epoch": 0.97197265625, "grad_norm": 0.14385907351970673, "learning_rate": 5.092500800950915e-05, "loss": 1.7279, "step": 9953 }, { "epoch": 0.9720703125, "grad_norm": 0.16724316775798798, "learning_rate": 5.091857757273424e-05, "loss": 1.7735, "step": 9954 }, { "epoch": 0.97216796875, "grad_norm": 0.16895835101604462, "learning_rate": 5.0912169519708204e-05, "loss": 1.7791, "step": 9955 }, { "epoch": 0.972265625, "grad_norm": 0.1446603238582611, "learning_rate": 5.090578385107121e-05, "loss": 1.7451, "step": 9956 }, { "epoch": 0.97236328125, "grad_norm": 0.16485725343227386, "learning_rate": 5.089942056746113e-05, "loss": 1.7624, "step": 9957 }, { "epoch": 0.9724609375, "grad_norm": 0.15510082244873047, "learning_rate": 5.089307966951357e-05, "loss": 1.7365, "step": 9958 }, { "epoch": 0.97255859375, "grad_norm": 0.15497490763664246, "learning_rate": 5.088676115786195e-05, "loss": 1.7731, "step": 9959 }, { "epoch": 0.97265625, "grad_norm": 0.13933759927749634, "learning_rate": 5.0880465033137426e-05, "loss": 1.7387, "step": 9960 }, { "epoch": 0.97275390625, "grad_norm": 0.160038024187088, "learning_rate": 5.087419129596892e-05, "loss": 1.7537, "step": 9961 }, { "epoch": 0.9728515625, "grad_norm": 0.13751892745494843, "learning_rate": 5.0867939946983114e-05, "loss": 1.7024, "step": 9962 }, { "epoch": 0.97294921875, "grad_norm": 0.13433068990707397, "learning_rate": 5.0861710986804505e-05, "loss": 1.7222, "step": 9963 }, { "epoch": 0.973046875, "grad_norm": 0.15366089344024658, "learning_rate": 5.085550441605526e-05, "loss": 1.7354, "step": 9964 }, { "epoch": 0.97314453125, "grad_norm": 0.15698228776454926, "learning_rate": 5.084932023535538e-05, "loss": 1.708, "step": 9965 }, { "epoch": 0.9732421875, "grad_norm": 0.14866618812084198, "learning_rate": 5.084315844532262e-05, "loss": 1.7999, "step": 9966 }, { "epoch": 0.97333984375, "grad_norm": 0.16936221718788147, "learning_rate": 5.083701904657249e-05, "loss": 1.7664, "step": 9967 }, { "epoch": 0.9734375, "grad_norm": 0.14828476309776306, "learning_rate": 5.083090203971824e-05, "loss": 1.7645, "step": 9968 }, { "epoch": 0.97353515625, "grad_norm": 0.14929825067520142, "learning_rate": 5.082480742537092e-05, "loss": 1.7359, "step": 9969 }, { "epoch": 0.9736328125, "grad_norm": 0.1630171835422516, "learning_rate": 5.081873520413931e-05, "loss": 1.7473, "step": 9970 }, { "epoch": 0.97373046875, "grad_norm": 0.1458127349615097, "learning_rate": 5.081268537663e-05, "loss": 1.7064, "step": 9971 }, { "epoch": 0.973828125, "grad_norm": 0.14303037524223328, "learning_rate": 5.080665794344731e-05, "loss": 1.7145, "step": 9972 }, { "epoch": 0.97392578125, "grad_norm": 0.13988932967185974, "learning_rate": 5.080065290519331e-05, "loss": 1.7384, "step": 9973 }, { "epoch": 0.9740234375, "grad_norm": 0.1487143337726593, "learning_rate": 5.079467026246786e-05, "loss": 1.7677, "step": 9974 }, { "epoch": 0.97412109375, "grad_norm": 0.14473266899585724, "learning_rate": 5.078871001586857e-05, "loss": 1.7441, "step": 9975 }, { "epoch": 0.97421875, "grad_norm": 0.1285949945449829, "learning_rate": 5.0782772165990805e-05, "loss": 1.7442, "step": 9976 }, { "epoch": 0.97431640625, "grad_norm": 0.1450158953666687, "learning_rate": 5.0776856713427714e-05, "loss": 1.7421, "step": 9977 }, { "epoch": 0.9744140625, "grad_norm": 0.14845162630081177, "learning_rate": 5.077096365877021e-05, "loss": 1.7456, "step": 9978 }, { "epoch": 0.97451171875, "grad_norm": 0.14813317358493805, "learning_rate": 5.0765093002606965e-05, "loss": 1.7409, "step": 9979 }, { "epoch": 0.974609375, "grad_norm": 0.15383249521255493, "learning_rate": 5.075924474552436e-05, "loss": 1.7729, "step": 9980 }, { "epoch": 0.97470703125, "grad_norm": 0.13455945253372192, "learning_rate": 5.075341888810659e-05, "loss": 1.7536, "step": 9981 }, { "epoch": 0.9748046875, "grad_norm": 0.14359785616397858, "learning_rate": 5.0747615430935665e-05, "loss": 1.7576, "step": 9982 }, { "epoch": 0.97490234375, "grad_norm": 0.148885577917099, "learning_rate": 5.0741834374591257e-05, "loss": 1.7586, "step": 9983 }, { "epoch": 0.975, "grad_norm": 0.14037886261940002, "learning_rate": 5.073607571965083e-05, "loss": 1.7676, "step": 9984 }, { "epoch": 0.97509765625, "grad_norm": 0.14097924530506134, "learning_rate": 5.0730339466689646e-05, "loss": 1.7348, "step": 9985 }, { "epoch": 0.9751953125, "grad_norm": 0.15750406682491302, "learning_rate": 5.07246256162807e-05, "loss": 1.7317, "step": 9986 }, { "epoch": 0.97529296875, "grad_norm": 0.15389131009578705, "learning_rate": 5.071893416899477e-05, "loss": 1.7749, "step": 9987 }, { "epoch": 0.975390625, "grad_norm": 0.142015740275383, "learning_rate": 5.071326512540034e-05, "loss": 1.7113, "step": 9988 }, { "epoch": 0.97548828125, "grad_norm": 0.14819012582302094, "learning_rate": 5.070761848606373e-05, "loss": 1.7705, "step": 9989 }, { "epoch": 0.9755859375, "grad_norm": 0.15923024713993073, "learning_rate": 5.070199425154899e-05, "loss": 1.776, "step": 9990 }, { "epoch": 0.97568359375, "grad_norm": 0.15409564971923828, "learning_rate": 5.069639242241793e-05, "loss": 1.7891, "step": 9991 }, { "epoch": 0.97578125, "grad_norm": 0.14092694222927094, "learning_rate": 5.069081299923009e-05, "loss": 1.7329, "step": 9992 }, { "epoch": 0.97587890625, "grad_norm": 0.13956573605537415, "learning_rate": 5.068525598254286e-05, "loss": 1.7483, "step": 9993 }, { "epoch": 0.9759765625, "grad_norm": 0.1400446891784668, "learning_rate": 5.067972137291126e-05, "loss": 1.7566, "step": 9994 }, { "epoch": 0.97607421875, "grad_norm": 0.14099180698394775, "learning_rate": 5.0674209170888234e-05, "loss": 1.769, "step": 9995 }, { "epoch": 0.976171875, "grad_norm": 0.15091805160045624, "learning_rate": 5.0668719377024384e-05, "loss": 1.7642, "step": 9996 }, { "epoch": 0.97626953125, "grad_norm": 0.12742038071155548, "learning_rate": 5.0663251991868025e-05, "loss": 1.7058, "step": 9997 }, { "epoch": 0.9763671875, "grad_norm": 0.15031276643276215, "learning_rate": 5.0657807015965365e-05, "loss": 1.7287, "step": 9998 }, { "epoch": 0.97646484375, "grad_norm": 0.14556948840618134, "learning_rate": 5.0652384449860295e-05, "loss": 1.7035, "step": 9999 }, { "epoch": 0.9765625, "grad_norm": 0.16276825964450836, "learning_rate": 5.064698429409449e-05, "loss": 1.7755, "step": 10000 }, { "epoch": 0.97666015625, "grad_norm": 0.13123583793640137, "learning_rate": 5.0641606549207336e-05, "loss": 1.7489, "step": 10001 }, { "epoch": 0.9767578125, "grad_norm": 0.1650131493806839, "learning_rate": 5.063625121573609e-05, "loss": 1.6951, "step": 10002 }, { "epoch": 0.97685546875, "grad_norm": 0.17374928295612335, "learning_rate": 5.0630918294215626e-05, "loss": 1.7596, "step": 10003 }, { "epoch": 0.976953125, "grad_norm": 0.14619632065296173, "learning_rate": 5.062560778517871e-05, "loss": 1.7754, "step": 10004 }, { "epoch": 0.97705078125, "grad_norm": 0.16286182403564453, "learning_rate": 5.062031968915582e-05, "loss": 1.7367, "step": 10005 }, { "epoch": 0.9771484375, "grad_norm": 0.1506439745426178, "learning_rate": 5.061505400667514e-05, "loss": 1.7827, "step": 10006 }, { "epoch": 0.97724609375, "grad_norm": 0.1669347584247589, "learning_rate": 5.0609810738262694e-05, "loss": 1.7698, "step": 10007 }, { "epoch": 0.97734375, "grad_norm": 0.15124599635601044, "learning_rate": 5.0604589884442235e-05, "loss": 1.7815, "step": 10008 }, { "epoch": 0.97744140625, "grad_norm": 0.14549654722213745, "learning_rate": 5.0599391445735284e-05, "loss": 1.7787, "step": 10009 }, { "epoch": 0.9775390625, "grad_norm": 0.1585574895143509, "learning_rate": 5.059421542266113e-05, "loss": 1.7688, "step": 10010 }, { "epoch": 0.97763671875, "grad_norm": 0.14852789044380188, "learning_rate": 5.0589061815736794e-05, "loss": 1.7787, "step": 10011 }, { "epoch": 0.977734375, "grad_norm": 0.15427833795547485, "learning_rate": 5.058393062547708e-05, "loss": 1.7721, "step": 10012 }, { "epoch": 0.97783203125, "grad_norm": 0.15052808821201324, "learning_rate": 5.057882185239453e-05, "loss": 1.7817, "step": 10013 }, { "epoch": 0.9779296875, "grad_norm": 0.13668851554393768, "learning_rate": 5.057373549699951e-05, "loss": 1.7545, "step": 10014 }, { "epoch": 0.97802734375, "grad_norm": 0.13895921409130096, "learning_rate": 5.056867155980007e-05, "loss": 1.7395, "step": 10015 }, { "epoch": 0.978125, "grad_norm": 0.15127162635326385, "learning_rate": 5.056363004130205e-05, "loss": 1.7144, "step": 10016 }, { "epoch": 0.97822265625, "grad_norm": 0.13936050236225128, "learning_rate": 5.055861094200907e-05, "loss": 1.7818, "step": 10017 }, { "epoch": 0.9783203125, "grad_norm": 0.16788840293884277, "learning_rate": 5.0553614262422504e-05, "loss": 1.7733, "step": 10018 }, { "epoch": 0.97841796875, "grad_norm": 0.13920214772224426, "learning_rate": 5.054864000304143e-05, "loss": 1.7574, "step": 10019 }, { "epoch": 0.978515625, "grad_norm": 0.1373671591281891, "learning_rate": 5.0543688164362755e-05, "loss": 1.7677, "step": 10020 }, { "epoch": 0.97861328125, "grad_norm": 0.1493276208639145, "learning_rate": 5.053875874688116e-05, "loss": 1.735, "step": 10021 }, { "epoch": 0.9787109375, "grad_norm": 0.14521166682243347, "learning_rate": 5.053385175108898e-05, "loss": 1.7494, "step": 10022 }, { "epoch": 0.97880859375, "grad_norm": 0.14841167628765106, "learning_rate": 5.052896717747645e-05, "loss": 1.7783, "step": 10023 }, { "epoch": 0.97890625, "grad_norm": 0.1405102163553238, "learning_rate": 5.052410502653144e-05, "loss": 1.7202, "step": 10024 }, { "epoch": 0.97900390625, "grad_norm": 0.1468498706817627, "learning_rate": 5.051926529873968e-05, "loss": 1.7243, "step": 10025 }, { "epoch": 0.9791015625, "grad_norm": 0.1444617360830307, "learning_rate": 5.05144479945846e-05, "loss": 1.7854, "step": 10026 }, { "epoch": 0.97919921875, "grad_norm": 0.1368781477212906, "learning_rate": 5.050965311454739e-05, "loss": 1.7589, "step": 10027 }, { "epoch": 0.979296875, "grad_norm": 0.15048636496067047, "learning_rate": 5.050488065910702e-05, "loss": 1.729, "step": 10028 }, { "epoch": 0.97939453125, "grad_norm": 0.14686833322048187, "learning_rate": 5.0500130628740234e-05, "loss": 1.7705, "step": 10029 }, { "epoch": 0.9794921875, "grad_norm": 0.14009863138198853, "learning_rate": 5.049540302392151e-05, "loss": 1.7625, "step": 10030 }, { "epoch": 0.97958984375, "grad_norm": 0.14694584906101227, "learning_rate": 5.049069784512308e-05, "loss": 1.7474, "step": 10031 }, { "epoch": 0.9796875, "grad_norm": 0.13915352523326874, "learning_rate": 5.048601509281499e-05, "loss": 1.7185, "step": 10032 }, { "epoch": 0.97978515625, "grad_norm": 0.15277601778507233, "learning_rate": 5.048135476746496e-05, "loss": 1.7843, "step": 10033 }, { "epoch": 0.9798828125, "grad_norm": 0.13953673839569092, "learning_rate": 5.047671686953853e-05, "loss": 1.8036, "step": 10034 }, { "epoch": 0.97998046875, "grad_norm": 0.14567483961582184, "learning_rate": 5.0472101399498994e-05, "loss": 1.7269, "step": 10035 }, { "epoch": 0.980078125, "grad_norm": 0.14738798141479492, "learning_rate": 5.046750835780739e-05, "loss": 1.776, "step": 10036 }, { "epoch": 0.98017578125, "grad_norm": 0.14349795877933502, "learning_rate": 5.046293774492252e-05, "loss": 1.7682, "step": 10037 }, { "epoch": 0.9802734375, "grad_norm": 0.14158543944358826, "learning_rate": 5.045838956130095e-05, "loss": 1.7785, "step": 10038 }, { "epoch": 0.98037109375, "grad_norm": 0.14729897677898407, "learning_rate": 5.045386380739701e-05, "loss": 1.7235, "step": 10039 }, { "epoch": 0.98046875, "grad_norm": 0.14754866063594818, "learning_rate": 5.044936048366277e-05, "loss": 1.7368, "step": 10040 }, { "epoch": 0.98056640625, "grad_norm": 0.1458842009305954, "learning_rate": 5.044487959054806e-05, "loss": 1.7805, "step": 10041 }, { "epoch": 0.9806640625, "grad_norm": 0.1598397195339203, "learning_rate": 5.044042112850054e-05, "loss": 1.7546, "step": 10042 }, { "epoch": 0.98076171875, "grad_norm": 0.13843289017677307, "learning_rate": 5.043598509796548e-05, "loss": 1.7382, "step": 10043 }, { "epoch": 0.980859375, "grad_norm": 0.15034233033657074, "learning_rate": 5.0431571499386096e-05, "loss": 1.8033, "step": 10044 }, { "epoch": 0.98095703125, "grad_norm": 0.13841871917247772, "learning_rate": 5.0427180333203194e-05, "loss": 1.727, "step": 10045 }, { "epoch": 0.9810546875, "grad_norm": 0.142915740609169, "learning_rate": 5.042281159985547e-05, "loss": 1.7749, "step": 10046 }, { "epoch": 0.98115234375, "grad_norm": 0.1465313732624054, "learning_rate": 5.0418465299779254e-05, "loss": 1.7743, "step": 10047 }, { "epoch": 0.98125, "grad_norm": 0.14435917139053345, "learning_rate": 5.0414141433408776e-05, "loss": 1.6938, "step": 10048 }, { "epoch": 0.98134765625, "grad_norm": 0.13682590425014496, "learning_rate": 5.040984000117591e-05, "loss": 1.7505, "step": 10049 }, { "epoch": 0.9814453125, "grad_norm": 0.1413172334432602, "learning_rate": 5.0405561003510305e-05, "loss": 1.7371, "step": 10050 }, { "epoch": 0.98154296875, "grad_norm": 0.1585964858531952, "learning_rate": 5.040130444083947e-05, "loss": 1.7305, "step": 10051 }, { "epoch": 0.981640625, "grad_norm": 0.16102387011051178, "learning_rate": 5.0397070313588535e-05, "loss": 1.7342, "step": 10052 }, { "epoch": 0.98173828125, "grad_norm": 0.1384226679801941, "learning_rate": 5.039285862218048e-05, "loss": 1.7318, "step": 10053 }, { "epoch": 0.9818359375, "grad_norm": 0.15216033160686493, "learning_rate": 5.0388669367035994e-05, "loss": 1.7922, "step": 10054 }, { "epoch": 0.98193359375, "grad_norm": 0.14792785048484802, "learning_rate": 5.0384502548573574e-05, "loss": 1.7775, "step": 10055 }, { "epoch": 0.98203125, "grad_norm": 0.14774584770202637, "learning_rate": 5.0380358167209435e-05, "loss": 1.7682, "step": 10056 }, { "epoch": 0.98212890625, "grad_norm": 0.14679698646068573, "learning_rate": 5.0376236223357566e-05, "loss": 1.7178, "step": 10057 }, { "epoch": 0.9822265625, "grad_norm": 0.16715483367443085, "learning_rate": 5.037213671742972e-05, "loss": 1.707, "step": 10058 }, { "epoch": 0.98232421875, "grad_norm": 0.15510253608226776, "learning_rate": 5.036805964983536e-05, "loss": 1.7553, "step": 10059 }, { "epoch": 0.982421875, "grad_norm": 0.14839808642864227, "learning_rate": 5.036400502098182e-05, "loss": 1.8029, "step": 10060 }, { "epoch": 0.98251953125, "grad_norm": 0.14031124114990234, "learning_rate": 5.0359972831274064e-05, "loss": 1.7198, "step": 10061 }, { "epoch": 0.9826171875, "grad_norm": 0.1507638692855835, "learning_rate": 5.035596308111489e-05, "loss": 1.7524, "step": 10062 }, { "epoch": 0.98271484375, "grad_norm": 0.1588565707206726, "learning_rate": 5.035197577090483e-05, "loss": 1.7873, "step": 10063 }, { "epoch": 0.9828125, "grad_norm": 0.1563289314508438, "learning_rate": 5.034801090104221e-05, "loss": 1.7611, "step": 10064 }, { "epoch": 0.98291015625, "grad_norm": 0.15962998569011688, "learning_rate": 5.034406847192304e-05, "loss": 1.7513, "step": 10065 }, { "epoch": 0.9830078125, "grad_norm": 0.13859353959560394, "learning_rate": 5.0340148483941176e-05, "loss": 1.7635, "step": 10066 }, { "epoch": 0.98310546875, "grad_norm": 0.13959355652332306, "learning_rate": 5.033625093748817e-05, "loss": 1.7348, "step": 10067 }, { "epoch": 0.983203125, "grad_norm": 0.16250212490558624, "learning_rate": 5.033237583295334e-05, "loss": 1.7404, "step": 10068 }, { "epoch": 0.98330078125, "grad_norm": 0.14631067216396332, "learning_rate": 5.0328523170723794e-05, "loss": 1.746, "step": 10069 }, { "epoch": 0.9833984375, "grad_norm": 0.13938458263874054, "learning_rate": 5.0324692951184384e-05, "loss": 1.7785, "step": 10070 }, { "epoch": 0.98349609375, "grad_norm": 0.1456540822982788, "learning_rate": 5.0320885174717676e-05, "loss": 1.7374, "step": 10071 }, { "epoch": 0.98359375, "grad_norm": 0.1378055363893509, "learning_rate": 5.0317099841704084e-05, "loss": 1.7307, "step": 10072 }, { "epoch": 0.98369140625, "grad_norm": 0.1414007991552353, "learning_rate": 5.0313336952521716e-05, "loss": 1.7571, "step": 10073 }, { "epoch": 0.9837890625, "grad_norm": 0.1453467458486557, "learning_rate": 5.030959650754644e-05, "loss": 1.7744, "step": 10074 }, { "epoch": 0.98388671875, "grad_norm": 0.13391396403312683, "learning_rate": 5.030587850715188e-05, "loss": 1.7186, "step": 10075 }, { "epoch": 0.983984375, "grad_norm": 0.1405848264694214, "learning_rate": 5.030218295170944e-05, "loss": 1.7468, "step": 10076 }, { "epoch": 0.98408203125, "grad_norm": 0.14227747917175293, "learning_rate": 5.029850984158828e-05, "loss": 1.752, "step": 10077 }, { "epoch": 0.9841796875, "grad_norm": 0.13619476556777954, "learning_rate": 5.029485917715533e-05, "loss": 1.7727, "step": 10078 }, { "epoch": 0.98427734375, "grad_norm": 0.1472906470298767, "learning_rate": 5.029123095877524e-05, "loss": 1.7461, "step": 10079 }, { "epoch": 0.984375, "grad_norm": 0.1447203904390335, "learning_rate": 5.0287625186810404e-05, "loss": 1.7669, "step": 10080 }, { "epoch": 0.98447265625, "grad_norm": 0.14600208401679993, "learning_rate": 5.0284041861621085e-05, "loss": 1.757, "step": 10081 }, { "epoch": 0.9845703125, "grad_norm": 0.14102114737033844, "learning_rate": 5.0280480983565164e-05, "loss": 1.7914, "step": 10082 }, { "epoch": 0.98466796875, "grad_norm": 0.14788223803043365, "learning_rate": 5.027694255299834e-05, "loss": 1.779, "step": 10083 }, { "epoch": 0.984765625, "grad_norm": 0.14173395931720734, "learning_rate": 5.0273426570274097e-05, "loss": 1.77, "step": 10084 }, { "epoch": 0.98486328125, "grad_norm": 0.13825592398643494, "learning_rate": 5.026993303574366e-05, "loss": 1.7185, "step": 10085 }, { "epoch": 0.9849609375, "grad_norm": 0.15453357994556427, "learning_rate": 5.0266461949755944e-05, "loss": 1.7782, "step": 10086 }, { "epoch": 0.98505859375, "grad_norm": 0.13705924153327942, "learning_rate": 5.0263013312657756e-05, "loss": 1.7026, "step": 10087 }, { "epoch": 0.98515625, "grad_norm": 0.15381409227848053, "learning_rate": 5.0259587124793534e-05, "loss": 1.7632, "step": 10088 }, { "epoch": 0.98525390625, "grad_norm": 0.1557580679655075, "learning_rate": 5.025618338650554e-05, "loss": 1.7563, "step": 10089 }, { "epoch": 0.9853515625, "grad_norm": 0.15564167499542236, "learning_rate": 5.025280209813377e-05, "loss": 1.7729, "step": 10090 }, { "epoch": 0.98544921875, "grad_norm": 0.16429518163204193, "learning_rate": 5.0249443260016e-05, "loss": 1.7656, "step": 10091 }, { "epoch": 0.985546875, "grad_norm": 0.13806460797786713, "learning_rate": 5.024610687248773e-05, "loss": 1.7179, "step": 10092 }, { "epoch": 0.98564453125, "grad_norm": 0.14527757465839386, "learning_rate": 5.024279293588224e-05, "loss": 1.7525, "step": 10093 }, { "epoch": 0.9857421875, "grad_norm": 0.14801572263240814, "learning_rate": 5.023950145053057e-05, "loss": 1.7354, "step": 10094 }, { "epoch": 0.98583984375, "grad_norm": 0.13225778937339783, "learning_rate": 5.0236232416761514e-05, "loss": 1.7709, "step": 10095 }, { "epoch": 0.9859375, "grad_norm": 0.14504064619541168, "learning_rate": 5.023298583490159e-05, "loss": 1.8066, "step": 10096 }, { "epoch": 0.98603515625, "grad_norm": 0.14867286384105682, "learning_rate": 5.022976170527514e-05, "loss": 1.7347, "step": 10097 }, { "epoch": 0.9861328125, "grad_norm": 0.14233332872390747, "learning_rate": 5.022656002820422e-05, "loss": 1.7516, "step": 10098 }, { "epoch": 0.98623046875, "grad_norm": 0.1479499340057373, "learning_rate": 5.0223380804008616e-05, "loss": 1.719, "step": 10099 }, { "epoch": 0.986328125, "grad_norm": 0.13828901946544647, "learning_rate": 5.022022403300594e-05, "loss": 1.7589, "step": 10100 }, { "epoch": 0.98642578125, "grad_norm": 0.13755843043327332, "learning_rate": 5.021708971551152e-05, "loss": 1.7623, "step": 10101 }, { "epoch": 0.9865234375, "grad_norm": 0.14124304056167603, "learning_rate": 5.0213977851838424e-05, "loss": 1.7787, "step": 10102 }, { "epoch": 0.98662109375, "grad_norm": 0.13150203227996826, "learning_rate": 5.0210888442297536e-05, "loss": 1.6959, "step": 10103 }, { "epoch": 0.98671875, "grad_norm": 0.13045498728752136, "learning_rate": 5.020782148719743e-05, "loss": 1.7436, "step": 10104 }, { "epoch": 0.98681640625, "grad_norm": 0.13220655918121338, "learning_rate": 5.020477698684448e-05, "loss": 1.7599, "step": 10105 }, { "epoch": 0.9869140625, "grad_norm": 0.13759960234165192, "learning_rate": 5.02017549415428e-05, "loss": 1.7758, "step": 10106 }, { "epoch": 0.98701171875, "grad_norm": 0.15984517335891724, "learning_rate": 5.019875535159427e-05, "loss": 1.7525, "step": 10107 }, { "epoch": 0.987109375, "grad_norm": 0.13346801698207855, "learning_rate": 5.019577821729851e-05, "loss": 1.7547, "step": 10108 }, { "epoch": 0.98720703125, "grad_norm": 0.15822958946228027, "learning_rate": 5.0192823538952944e-05, "loss": 1.732, "step": 10109 }, { "epoch": 0.9873046875, "grad_norm": 0.14846542477607727, "learning_rate": 5.018989131685269e-05, "loss": 1.7771, "step": 10110 }, { "epoch": 0.98740234375, "grad_norm": 0.13655339181423187, "learning_rate": 5.018698155129064e-05, "loss": 1.7151, "step": 10111 }, { "epoch": 0.9875, "grad_norm": 0.16769932210445404, "learning_rate": 5.018409424255747e-05, "loss": 1.7404, "step": 10112 }, { "epoch": 0.98759765625, "grad_norm": 0.15097911655902863, "learning_rate": 5.0181229390941605e-05, "loss": 1.7374, "step": 10113 }, { "epoch": 0.9876953125, "grad_norm": 0.14887021481990814, "learning_rate": 5.0178386996729204e-05, "loss": 1.6724, "step": 10114 }, { "epoch": 0.98779296875, "grad_norm": 0.16513672471046448, "learning_rate": 5.017556706020419e-05, "loss": 1.7263, "step": 10115 }, { "epoch": 0.987890625, "grad_norm": 0.1365361511707306, "learning_rate": 5.017276958164829e-05, "loss": 1.7384, "step": 10116 }, { "epoch": 0.98798828125, "grad_norm": 0.14326900243759155, "learning_rate": 5.01699945613409e-05, "loss": 1.8079, "step": 10117 }, { "epoch": 0.9880859375, "grad_norm": 0.15236935019493103, "learning_rate": 5.016724199955924e-05, "loss": 1.7644, "step": 10118 }, { "epoch": 0.98818359375, "grad_norm": 0.1410330832004547, "learning_rate": 5.016451189657828e-05, "loss": 1.7813, "step": 10119 }, { "epoch": 0.98828125, "grad_norm": 0.15914198756217957, "learning_rate": 5.0161804252670696e-05, "loss": 1.8094, "step": 10120 }, { "epoch": 0.98837890625, "grad_norm": 0.14066147804260254, "learning_rate": 5.0159119068106996e-05, "loss": 1.759, "step": 10121 }, { "epoch": 0.9884765625, "grad_norm": 0.12801553308963776, "learning_rate": 5.015645634315539e-05, "loss": 1.7099, "step": 10122 }, { "epoch": 0.98857421875, "grad_norm": 0.1386307030916214, "learning_rate": 5.0153816078081846e-05, "loss": 1.732, "step": 10123 }, { "epoch": 0.988671875, "grad_norm": 0.15575361251831055, "learning_rate": 5.015119827315012e-05, "loss": 1.743, "step": 10124 }, { "epoch": 0.98876953125, "grad_norm": 0.13394734263420105, "learning_rate": 5.014860292862173e-05, "loss": 1.7763, "step": 10125 }, { "epoch": 0.9888671875, "grad_norm": 0.14078542590141296, "learning_rate": 5.0146030044755865e-05, "loss": 1.787, "step": 10126 }, { "epoch": 0.98896484375, "grad_norm": 0.13918502628803253, "learning_rate": 5.014347962180961e-05, "loss": 1.7653, "step": 10127 }, { "epoch": 0.9890625, "grad_norm": 0.13404348492622375, "learning_rate": 5.014095166003767e-05, "loss": 1.7404, "step": 10128 }, { "epoch": 0.98916015625, "grad_norm": 0.13534797728061676, "learning_rate": 5.01384461596926e-05, "loss": 1.7438, "step": 10129 }, { "epoch": 0.9892578125, "grad_norm": 0.1437867432832718, "learning_rate": 5.013596312102465e-05, "loss": 1.8015, "step": 10130 }, { "epoch": 0.98935546875, "grad_norm": 0.1403081864118576, "learning_rate": 5.0133502544281867e-05, "loss": 1.7151, "step": 10131 }, { "epoch": 0.989453125, "grad_norm": 0.14355938136577606, "learning_rate": 5.013106442971005e-05, "loss": 1.7463, "step": 10132 }, { "epoch": 0.98955078125, "grad_norm": 0.1525718718767166, "learning_rate": 5.012864877755275e-05, "loss": 1.7494, "step": 10133 }, { "epoch": 0.9896484375, "grad_norm": 0.1537349820137024, "learning_rate": 5.0126255588051226e-05, "loss": 1.763, "step": 10134 }, { "epoch": 0.98974609375, "grad_norm": 0.14247439801692963, "learning_rate": 5.01238848614446e-05, "loss": 1.7815, "step": 10135 }, { "epoch": 0.98984375, "grad_norm": 0.1480875015258789, "learning_rate": 5.012153659796963e-05, "loss": 1.7646, "step": 10136 }, { "epoch": 0.98994140625, "grad_norm": 0.13995063304901123, "learning_rate": 5.011921079786093e-05, "loss": 1.7604, "step": 10137 }, { "epoch": 0.9900390625, "grad_norm": 0.14824624359607697, "learning_rate": 5.011690746135078e-05, "loss": 1.7562, "step": 10138 }, { "epoch": 0.99013671875, "grad_norm": 0.1353156417608261, "learning_rate": 5.0114626588669305e-05, "loss": 1.7871, "step": 10139 }, { "epoch": 0.990234375, "grad_norm": 0.1616741269826889, "learning_rate": 5.011236818004434e-05, "loss": 1.7525, "step": 10140 }, { "epoch": 0.99033203125, "grad_norm": 0.14534015953540802, "learning_rate": 5.011013223570144e-05, "loss": 1.7606, "step": 10141 }, { "epoch": 0.9904296875, "grad_norm": 0.16772140562534332, "learning_rate": 5.010791875586402e-05, "loss": 1.6973, "step": 10142 }, { "epoch": 0.99052734375, "grad_norm": 0.15803009271621704, "learning_rate": 5.010572774075313e-05, "loss": 1.7371, "step": 10143 }, { "epoch": 0.990625, "grad_norm": 0.13514499366283417, "learning_rate": 5.0103559190587665e-05, "loss": 1.7341, "step": 10144 }, { "epoch": 0.99072265625, "grad_norm": 0.1545884907245636, "learning_rate": 5.0101413105584205e-05, "loss": 1.7231, "step": 10145 }, { "epoch": 0.9908203125, "grad_norm": 0.16269631683826447, "learning_rate": 5.009928948595718e-05, "loss": 1.7941, "step": 10146 }, { "epoch": 0.99091796875, "grad_norm": 0.1419914960861206, "learning_rate": 5.00971883319187e-05, "loss": 1.7399, "step": 10147 }, { "epoch": 0.991015625, "grad_norm": 0.15757222473621368, "learning_rate": 5.009510964367864e-05, "loss": 1.7715, "step": 10148 }, { "epoch": 0.99111328125, "grad_norm": 0.15623074769973755, "learning_rate": 5.0093053421444626e-05, "loss": 1.7085, "step": 10149 }, { "epoch": 0.9912109375, "grad_norm": 0.1376177966594696, "learning_rate": 5.0091019665422115e-05, "loss": 1.7355, "step": 10150 }, { "epoch": 0.99130859375, "grad_norm": 0.14206235110759735, "learning_rate": 5.0089008375814203e-05, "loss": 1.7068, "step": 10151 }, { "epoch": 0.99140625, "grad_norm": 0.15313436090946198, "learning_rate": 5.0087019552821835e-05, "loss": 1.749, "step": 10152 }, { "epoch": 0.99150390625, "grad_norm": 0.16709040105342865, "learning_rate": 5.008505319664365e-05, "loss": 1.7124, "step": 10153 }, { "epoch": 0.9916015625, "grad_norm": 0.1569378674030304, "learning_rate": 5.00831093074761e-05, "loss": 1.7126, "step": 10154 }, { "epoch": 0.99169921875, "grad_norm": 0.16699160635471344, "learning_rate": 5.008118788551333e-05, "loss": 1.776, "step": 10155 }, { "epoch": 0.991796875, "grad_norm": 0.15098391473293304, "learning_rate": 5.007928893094729e-05, "loss": 1.8092, "step": 10156 }, { "epoch": 0.99189453125, "grad_norm": 0.15102660655975342, "learning_rate": 5.0077412443967687e-05, "loss": 1.7358, "step": 10157 }, { "epoch": 0.9919921875, "grad_norm": 0.13465797901153564, "learning_rate": 5.007555842476193e-05, "loss": 1.7543, "step": 10158 }, { "epoch": 0.99208984375, "grad_norm": 0.14491602778434753, "learning_rate": 5.0073726873515235e-05, "loss": 1.7615, "step": 10159 }, { "epoch": 0.9921875, "grad_norm": 0.151759535074234, "learning_rate": 5.007191779041057e-05, "loss": 1.7557, "step": 10160 }, { "epoch": 0.99228515625, "grad_norm": 0.13345809280872345, "learning_rate": 5.007013117562862e-05, "loss": 1.7948, "step": 10161 }, { "epoch": 0.9923828125, "grad_norm": 0.13175331056118011, "learning_rate": 5.006836702934786e-05, "loss": 1.7621, "step": 10162 }, { "epoch": 0.99248046875, "grad_norm": 0.14332705736160278, "learning_rate": 5.006662535174453e-05, "loss": 1.7579, "step": 10163 }, { "epoch": 0.992578125, "grad_norm": 0.13660448789596558, "learning_rate": 5.006490614299261e-05, "loss": 1.7678, "step": 10164 }, { "epoch": 0.99267578125, "grad_norm": 0.14487048983573914, "learning_rate": 5.00632094032638e-05, "loss": 1.7504, "step": 10165 }, { "epoch": 0.9927734375, "grad_norm": 0.15858644247055054, "learning_rate": 5.0061535132727634e-05, "loss": 1.7445, "step": 10166 }, { "epoch": 0.99287109375, "grad_norm": 0.1380414515733719, "learning_rate": 5.005988333155129e-05, "loss": 1.7076, "step": 10167 }, { "epoch": 0.99296875, "grad_norm": 0.1447162926197052, "learning_rate": 5.005825399989985e-05, "loss": 1.7802, "step": 10168 }, { "epoch": 0.99306640625, "grad_norm": 0.12982869148254395, "learning_rate": 5.0056647137936035e-05, "loss": 1.7742, "step": 10169 }, { "epoch": 0.9931640625, "grad_norm": 0.1475370079278946, "learning_rate": 5.005506274582033e-05, "loss": 1.6968, "step": 10170 }, { "epoch": 0.99326171875, "grad_norm": 0.1361812949180603, "learning_rate": 5.005350082371103e-05, "loss": 1.716, "step": 10171 }, { "epoch": 0.993359375, "grad_norm": 0.13730302453041077, "learning_rate": 5.0051961371764164e-05, "loss": 1.7607, "step": 10172 }, { "epoch": 0.99345703125, "grad_norm": 0.1577260047197342, "learning_rate": 5.0050444390133473e-05, "loss": 1.7096, "step": 10173 }, { "epoch": 0.9935546875, "grad_norm": 0.14537537097930908, "learning_rate": 5.004894987897052e-05, "loss": 1.7576, "step": 10174 }, { "epoch": 0.99365234375, "grad_norm": 0.1382758766412735, "learning_rate": 5.004747783842459e-05, "loss": 1.7662, "step": 10175 }, { "epoch": 0.99375, "grad_norm": 0.14768566191196442, "learning_rate": 5.004602826864273e-05, "loss": 1.7625, "step": 10176 }, { "epoch": 0.99384765625, "grad_norm": 0.14900919795036316, "learning_rate": 5.004460116976974e-05, "loss": 1.8079, "step": 10177 }, { "epoch": 0.9939453125, "grad_norm": 0.14206573367118835, "learning_rate": 5.004319654194815e-05, "loss": 1.7322, "step": 10178 }, { "epoch": 0.99404296875, "grad_norm": 0.15075136721134186, "learning_rate": 5.00418143853183e-05, "loss": 1.7507, "step": 10179 }, { "epoch": 0.994140625, "grad_norm": 0.14910390973091125, "learning_rate": 5.0040454700018254e-05, "loss": 1.7645, "step": 10180 }, { "epoch": 0.99423828125, "grad_norm": 0.1473272740840912, "learning_rate": 5.003911748618379e-05, "loss": 1.7229, "step": 10181 }, { "epoch": 0.9943359375, "grad_norm": 0.1749107986688614, "learning_rate": 5.003780274394853e-05, "loss": 1.7595, "step": 10182 }, { "epoch": 0.99443359375, "grad_norm": 0.13767200708389282, "learning_rate": 5.00365104734438e-05, "loss": 1.7795, "step": 10183 }, { "epoch": 0.99453125, "grad_norm": 0.1729462891817093, "learning_rate": 5.003524067479867e-05, "loss": 1.7072, "step": 10184 }, { "epoch": 0.99462890625, "grad_norm": 0.16077080368995667, "learning_rate": 5.003399334813999e-05, "loss": 1.7867, "step": 10185 }, { "epoch": 0.9947265625, "grad_norm": 0.1510819047689438, "learning_rate": 5.0032768493592355e-05, "loss": 1.7614, "step": 10186 }, { "epoch": 0.99482421875, "grad_norm": 0.14747557044029236, "learning_rate": 5.003156611127811e-05, "loss": 1.7072, "step": 10187 }, { "epoch": 0.994921875, "grad_norm": 0.15875203907489777, "learning_rate": 5.003038620131738e-05, "loss": 1.7709, "step": 10188 }, { "epoch": 0.99501953125, "grad_norm": 0.1523493379354477, "learning_rate": 5.0029228763827995e-05, "loss": 1.7632, "step": 10189 }, { "epoch": 0.9951171875, "grad_norm": 0.15394005179405212, "learning_rate": 5.002809379892562e-05, "loss": 1.7273, "step": 10190 }, { "epoch": 0.99521484375, "grad_norm": 0.15556710958480835, "learning_rate": 5.0026981306723575e-05, "loss": 1.7727, "step": 10191 }, { "epoch": 0.9953125, "grad_norm": 0.1474258005619049, "learning_rate": 5.002589128733306e-05, "loss": 1.763, "step": 10192 }, { "epoch": 0.99541015625, "grad_norm": 0.15444007515907288, "learning_rate": 5.002482374086288e-05, "loss": 1.719, "step": 10193 }, { "epoch": 0.9955078125, "grad_norm": 0.15633748471736908, "learning_rate": 5.002377866741972e-05, "loss": 1.7388, "step": 10194 }, { "epoch": 0.99560546875, "grad_norm": 0.14781521260738373, "learning_rate": 5.0022756067107945e-05, "loss": 1.7419, "step": 10195 }, { "epoch": 0.995703125, "grad_norm": 0.14431701600551605, "learning_rate": 5.002175594002974e-05, "loss": 1.7533, "step": 10196 }, { "epoch": 0.99580078125, "grad_norm": 0.15154847502708435, "learning_rate": 5.002077828628498e-05, "loss": 1.7395, "step": 10197 }, { "epoch": 0.9958984375, "grad_norm": 0.1440584808588028, "learning_rate": 5.001982310597134e-05, "loss": 1.751, "step": 10198 }, { "epoch": 0.99599609375, "grad_norm": 0.148819699883461, "learning_rate": 5.0018890399184235e-05, "loss": 1.7785, "step": 10199 }, { "epoch": 0.99609375, "grad_norm": 0.15343788266181946, "learning_rate": 5.001798016601679e-05, "loss": 1.7481, "step": 10200 } ], "logging_steps": 1.0, "max_steps": 10240, "num_input_tokens_seen": 0, "num_train_epochs": 9223372036854775807, "save_steps": 200, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 6.2449560641234534e+19, "train_batch_size": 16, "trial_name": null, "trial_params": null }