diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,64433 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.8984375, + "eval_steps": 500, + "global_step": 9200, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 9.765625e-05, + "grad_norm": 79.3869857788086, + "learning_rate": 3.3333333333333337e-06, + "loss": 7.7773, + "step": 1 + }, + { + "epoch": 0.0001953125, + "grad_norm": 80.6998291015625, + "learning_rate": 6.6666666666666675e-06, + "loss": 7.7617, + "step": 2 + }, + { + "epoch": 0.00029296875, + "grad_norm": 67.86173248291016, + "learning_rate": 1e-05, + "loss": 7.6523, + "step": 3 + }, + { + "epoch": 0.000390625, + "grad_norm": 31.295886993408203, + "learning_rate": 1.3333333333333335e-05, + "loss": 7.3242, + "step": 4 + }, + { + "epoch": 0.00048828125, + "grad_norm": 27.488962173461914, + "learning_rate": 1.6666666666666667e-05, + "loss": 7.2773, + "step": 5 + }, + { + "epoch": 0.0005859375, + "grad_norm": 16.267820358276367, + "learning_rate": 2e-05, + "loss": 7.1641, + "step": 6 + }, + { + "epoch": 0.00068359375, + "grad_norm": 37.23747253417969, + "learning_rate": 2.3333333333333336e-05, + "loss": 7.2812, + "step": 7 + }, + { + "epoch": 0.00078125, + "grad_norm": 17.467384338378906, + "learning_rate": 2.666666666666667e-05, + "loss": 7.0508, + "step": 8 + }, + { + "epoch": 0.00087890625, + "grad_norm": 9.459285736083984, + "learning_rate": 3e-05, + "loss": 6.9375, + "step": 9 + }, + { + "epoch": 0.0009765625, + "grad_norm": 8.56839656829834, + "learning_rate": 3.3333333333333335e-05, + "loss": 6.8789, + "step": 10 + }, + { + "epoch": 0.00107421875, + "grad_norm": 24.074443817138672, + "learning_rate": 3.6666666666666666e-05, + "loss": 6.8672, + "step": 11 + }, + { + "epoch": 0.001171875, + "grad_norm": 11.778696060180664, + "learning_rate": 4e-05, + "loss": 6.7969, + "step": 12 + }, + { + "epoch": 0.00126953125, + "grad_norm": 14.282492637634277, + "learning_rate": 4.3333333333333334e-05, + "loss": 6.8047, + "step": 13 + }, + { + "epoch": 0.0013671875, + "grad_norm": 8.801307678222656, + "learning_rate": 4.666666666666667e-05, + "loss": 6.7188, + "step": 14 + }, + { + "epoch": 0.00146484375, + "grad_norm": 8.514760971069336, + "learning_rate": 5e-05, + "loss": 6.7148, + "step": 15 + }, + { + "epoch": 0.0015625, + "grad_norm": 9.48074722290039, + "learning_rate": 5.333333333333334e-05, + "loss": 6.6641, + "step": 16 + }, + { + "epoch": 0.00166015625, + "grad_norm": 7.557260036468506, + "learning_rate": 5.6666666666666664e-05, + "loss": 6.6172, + "step": 17 + }, + { + "epoch": 0.0017578125, + "grad_norm": 6.613265037536621, + "learning_rate": 6e-05, + "loss": 6.5977, + "step": 18 + }, + { + "epoch": 0.00185546875, + "grad_norm": 13.777167320251465, + "learning_rate": 6.333333333333335e-05, + "loss": 6.6328, + "step": 19 + }, + { + "epoch": 0.001953125, + "grad_norm": 6.775121688842773, + "learning_rate": 6.666666666666667e-05, + "loss": 6.5391, + "step": 20 + }, + { + "epoch": 0.00205078125, + "grad_norm": 9.053375244140625, + "learning_rate": 7.000000000000001e-05, + "loss": 6.543, + "step": 21 + }, + { + "epoch": 0.0021484375, + "grad_norm": 5.147669792175293, + "learning_rate": 7.333333333333333e-05, + "loss": 6.5039, + "step": 22 + }, + { + "epoch": 0.00224609375, + "grad_norm": 9.595170974731445, + "learning_rate": 7.666666666666667e-05, + "loss": 6.5, + "step": 23 + }, + { + "epoch": 0.00234375, + "grad_norm": 6.9720964431762695, + "learning_rate": 8e-05, + "loss": 6.4883, + "step": 24 + }, + { + "epoch": 0.00244140625, + "grad_norm": 9.805245399475098, + "learning_rate": 8.333333333333333e-05, + "loss": 6.4414, + "step": 25 + }, + { + "epoch": 0.0025390625, + "grad_norm": 5.9478044509887695, + "learning_rate": 8.666666666666667e-05, + "loss": 6.4297, + "step": 26 + }, + { + "epoch": 0.00263671875, + "grad_norm": 8.962249755859375, + "learning_rate": 8.999999999999999e-05, + "loss": 6.4297, + "step": 27 + }, + { + "epoch": 0.002734375, + "grad_norm": 5.202462673187256, + "learning_rate": 9.333333333333334e-05, + "loss": 6.375, + "step": 28 + }, + { + "epoch": 0.00283203125, + "grad_norm": 5.034671306610107, + "learning_rate": 9.666666666666667e-05, + "loss": 6.3555, + "step": 29 + }, + { + "epoch": 0.0029296875, + "grad_norm": 7.446832656860352, + "learning_rate": 0.0001, + "loss": 6.3477, + "step": 30 + }, + { + "epoch": 0.00302734375, + "grad_norm": 5.554169654846191, + "learning_rate": 0.00010333333333333333, + "loss": 6.3086, + "step": 31 + }, + { + "epoch": 0.003125, + "grad_norm": 5.340260028839111, + "learning_rate": 0.00010666666666666668, + "loss": 6.293, + "step": 32 + }, + { + "epoch": 0.00322265625, + "grad_norm": 6.529757976531982, + "learning_rate": 0.00011, + "loss": 6.2812, + "step": 33 + }, + { + "epoch": 0.0033203125, + "grad_norm": 8.140447616577148, + "learning_rate": 0.00011333333333333333, + "loss": 6.2656, + "step": 34 + }, + { + "epoch": 0.00341796875, + "grad_norm": 5.15455961227417, + "learning_rate": 0.00011666666666666667, + "loss": 6.2266, + "step": 35 + }, + { + "epoch": 0.003515625, + "grad_norm": 7.219542980194092, + "learning_rate": 0.00012, + "loss": 6.2539, + "step": 36 + }, + { + "epoch": 0.00361328125, + "grad_norm": 5.522006988525391, + "learning_rate": 0.00012333333333333334, + "loss": 6.2305, + "step": 37 + }, + { + "epoch": 0.0037109375, + "grad_norm": 4.108240127563477, + "learning_rate": 0.0001266666666666667, + "loss": 6.2109, + "step": 38 + }, + { + "epoch": 0.00380859375, + "grad_norm": 5.117391586303711, + "learning_rate": 0.00013000000000000002, + "loss": 6.1992, + "step": 39 + }, + { + "epoch": 0.00390625, + "grad_norm": 9.84037971496582, + "learning_rate": 0.00013333333333333334, + "loss": 6.2227, + "step": 40 + }, + { + "epoch": 0.00400390625, + "grad_norm": 5.794247150421143, + "learning_rate": 0.00013666666666666666, + "loss": 6.1953, + "step": 41 + }, + { + "epoch": 0.0041015625, + "grad_norm": 5.9350810050964355, + "learning_rate": 0.00014000000000000001, + "loss": 6.1836, + "step": 42 + }, + { + "epoch": 0.00419921875, + "grad_norm": 6.219013214111328, + "learning_rate": 0.00014333333333333334, + "loss": 6.168, + "step": 43 + }, + { + "epoch": 0.004296875, + "grad_norm": 4.501729488372803, + "learning_rate": 0.00014666666666666666, + "loss": 6.1055, + "step": 44 + }, + { + "epoch": 0.00439453125, + "grad_norm": 6.2689948081970215, + "learning_rate": 0.00015, + "loss": 6.1328, + "step": 45 + }, + { + "epoch": 0.0044921875, + "grad_norm": 4.54839563369751, + "learning_rate": 0.00015333333333333334, + "loss": 6.1055, + "step": 46 + }, + { + "epoch": 0.00458984375, + "grad_norm": 4.619740009307861, + "learning_rate": 0.0001566666666666667, + "loss": 6.082, + "step": 47 + }, + { + "epoch": 0.0046875, + "grad_norm": 7.260074615478516, + "learning_rate": 0.00016, + "loss": 6.0938, + "step": 48 + }, + { + "epoch": 0.00478515625, + "grad_norm": 5.270298480987549, + "learning_rate": 0.00016333333333333334, + "loss": 6.0859, + "step": 49 + }, + { + "epoch": 0.0048828125, + "grad_norm": 6.634472846984863, + "learning_rate": 0.00016666666666666666, + "loss": 6.0625, + "step": 50 + }, + { + "epoch": 0.00498046875, + "grad_norm": 4.172934532165527, + "learning_rate": 0.00017, + "loss": 6.0469, + "step": 51 + }, + { + "epoch": 0.005078125, + "grad_norm": 5.719873905181885, + "learning_rate": 0.00017333333333333334, + "loss": 6.0273, + "step": 52 + }, + { + "epoch": 0.00517578125, + "grad_norm": 4.851602554321289, + "learning_rate": 0.00017666666666666666, + "loss": 6.0234, + "step": 53 + }, + { + "epoch": 0.0052734375, + "grad_norm": 4.784290313720703, + "learning_rate": 0.00017999999999999998, + "loss": 5.9883, + "step": 54 + }, + { + "epoch": 0.00537109375, + "grad_norm": 5.738783359527588, + "learning_rate": 0.00018333333333333334, + "loss": 6.0078, + "step": 55 + }, + { + "epoch": 0.00546875, + "grad_norm": 4.525517463684082, + "learning_rate": 0.0001866666666666667, + "loss": 5.9844, + "step": 56 + }, + { + "epoch": 0.00556640625, + "grad_norm": 5.545010566711426, + "learning_rate": 0.00019, + "loss": 6.0078, + "step": 57 + }, + { + "epoch": 0.0056640625, + "grad_norm": 5.725442409515381, + "learning_rate": 0.00019333333333333333, + "loss": 5.9688, + "step": 58 + }, + { + "epoch": 0.00576171875, + "grad_norm": 4.413902282714844, + "learning_rate": 0.00019666666666666666, + "loss": 5.9414, + "step": 59 + }, + { + "epoch": 0.005859375, + "grad_norm": 4.734899520874023, + "learning_rate": 0.0002, + "loss": 5.9609, + "step": 60 + }, + { + "epoch": 0.00595703125, + "grad_norm": 4.493259906768799, + "learning_rate": 0.00020333333333333333, + "loss": 5.9648, + "step": 61 + }, + { + "epoch": 0.0060546875, + "grad_norm": 4.654330253601074, + "learning_rate": 0.00020666666666666666, + "loss": 5.957, + "step": 62 + }, + { + "epoch": 0.00615234375, + "grad_norm": 5.410141944885254, + "learning_rate": 0.00021, + "loss": 5.9766, + "step": 63 + }, + { + "epoch": 0.00625, + "grad_norm": 3.376593589782715, + "learning_rate": 0.00021333333333333336, + "loss": 5.9062, + "step": 64 + }, + { + "epoch": 0.00634765625, + "grad_norm": 5.334528923034668, + "learning_rate": 0.00021666666666666668, + "loss": 5.9141, + "step": 65 + }, + { + "epoch": 0.0064453125, + "grad_norm": 4.844014644622803, + "learning_rate": 0.00022, + "loss": 5.9062, + "step": 66 + }, + { + "epoch": 0.00654296875, + "grad_norm": 4.3729939460754395, + "learning_rate": 0.00022333333333333333, + "loss": 5.9023, + "step": 67 + }, + { + "epoch": 0.006640625, + "grad_norm": 6.744809627532959, + "learning_rate": 0.00022666666666666666, + "loss": 5.9648, + "step": 68 + }, + { + "epoch": 0.00673828125, + "grad_norm": 4.010498046875, + "learning_rate": 0.00023, + "loss": 5.8945, + "step": 69 + }, + { + "epoch": 0.0068359375, + "grad_norm": 5.671877384185791, + "learning_rate": 0.00023333333333333333, + "loss": 5.9414, + "step": 70 + }, + { + "epoch": 0.00693359375, + "grad_norm": 3.9647581577301025, + "learning_rate": 0.00023666666666666668, + "loss": 5.8672, + "step": 71 + }, + { + "epoch": 0.00703125, + "grad_norm": 4.879064083099365, + "learning_rate": 0.00024, + "loss": 5.8672, + "step": 72 + }, + { + "epoch": 0.00712890625, + "grad_norm": 5.130377292633057, + "learning_rate": 0.00024333333333333336, + "loss": 5.8711, + "step": 73 + }, + { + "epoch": 0.0072265625, + "grad_norm": 5.8605122566223145, + "learning_rate": 0.0002466666666666667, + "loss": 5.875, + "step": 74 + }, + { + "epoch": 0.00732421875, + "grad_norm": 5.346503257751465, + "learning_rate": 0.00025, + "loss": 5.9023, + "step": 75 + }, + { + "epoch": 0.007421875, + "grad_norm": 5.273228645324707, + "learning_rate": 0.0002533333333333334, + "loss": 5.8867, + "step": 76 + }, + { + "epoch": 0.00751953125, + "grad_norm": 3.8530828952789307, + "learning_rate": 0.00025666666666666665, + "loss": 5.8477, + "step": 77 + }, + { + "epoch": 0.0076171875, + "grad_norm": 5.188773155212402, + "learning_rate": 0.00026000000000000003, + "loss": 5.8633, + "step": 78 + }, + { + "epoch": 0.00771484375, + "grad_norm": 3.4620485305786133, + "learning_rate": 0.0002633333333333333, + "loss": 5.8398, + "step": 79 + }, + { + "epoch": 0.0078125, + "grad_norm": 4.68696403503418, + "learning_rate": 0.0002666666666666667, + "loss": 5.8359, + "step": 80 + }, + { + "epoch": 0.00791015625, + "grad_norm": 4.243693828582764, + "learning_rate": 0.00027, + "loss": 5.8555, + "step": 81 + }, + { + "epoch": 0.0080078125, + "grad_norm": 5.6479363441467285, + "learning_rate": 0.00027333333333333333, + "loss": 5.832, + "step": 82 + }, + { + "epoch": 0.00810546875, + "grad_norm": 3.7417171001434326, + "learning_rate": 0.00027666666666666665, + "loss": 5.8281, + "step": 83 + }, + { + "epoch": 0.008203125, + "grad_norm": 4.857771873474121, + "learning_rate": 0.00028000000000000003, + "loss": 5.8086, + "step": 84 + }, + { + "epoch": 0.00830078125, + "grad_norm": 4.6594085693359375, + "learning_rate": 0.00028333333333333335, + "loss": 5.832, + "step": 85 + }, + { + "epoch": 0.0083984375, + "grad_norm": 4.48290491104126, + "learning_rate": 0.0002866666666666667, + "loss": 5.8164, + "step": 86 + }, + { + "epoch": 0.00849609375, + "grad_norm": 6.183064937591553, + "learning_rate": 0.00029, + "loss": 5.8398, + "step": 87 + }, + { + "epoch": 0.00859375, + "grad_norm": 4.494201183319092, + "learning_rate": 0.0002933333333333333, + "loss": 5.7969, + "step": 88 + }, + { + "epoch": 0.00869140625, + "grad_norm": 5.650720596313477, + "learning_rate": 0.0002966666666666667, + "loss": 5.8438, + "step": 89 + }, + { + "epoch": 0.0087890625, + "grad_norm": 5.558387756347656, + "learning_rate": 0.0003, + "loss": 5.8555, + "step": 90 + }, + { + "epoch": 0.00888671875, + "grad_norm": 5.16208553314209, + "learning_rate": 0.00030333333333333335, + "loss": 5.8086, + "step": 91 + }, + { + "epoch": 0.008984375, + "grad_norm": 6.598042964935303, + "learning_rate": 0.0003066666666666667, + "loss": 5.8516, + "step": 92 + }, + { + "epoch": 0.00908203125, + "grad_norm": 4.591591835021973, + "learning_rate": 0.00031, + "loss": 5.8555, + "step": 93 + }, + { + "epoch": 0.0091796875, + "grad_norm": 4.879761695861816, + "learning_rate": 0.0003133333333333334, + "loss": 5.8203, + "step": 94 + }, + { + "epoch": 0.00927734375, + "grad_norm": 5.21987771987915, + "learning_rate": 0.00031666666666666665, + "loss": 5.8203, + "step": 95 + }, + { + "epoch": 0.009375, + "grad_norm": 4.257232666015625, + "learning_rate": 0.00032, + "loss": 5.8047, + "step": 96 + }, + { + "epoch": 0.00947265625, + "grad_norm": 6.07011604309082, + "learning_rate": 0.0003233333333333333, + "loss": 5.8359, + "step": 97 + }, + { + "epoch": 0.0095703125, + "grad_norm": 5.662813186645508, + "learning_rate": 0.0003266666666666667, + "loss": 5.8633, + "step": 98 + }, + { + "epoch": 0.00966796875, + "grad_norm": 4.59433650970459, + "learning_rate": 0.00033, + "loss": 5.8086, + "step": 99 + }, + { + "epoch": 0.009765625, + "grad_norm": 4.295780181884766, + "learning_rate": 0.0003333333333333333, + "loss": 5.8008, + "step": 100 + }, + { + "epoch": 0.00986328125, + "grad_norm": 4.587396144866943, + "learning_rate": 0.0003366666666666667, + "loss": 5.8008, + "step": 101 + }, + { + "epoch": 0.0099609375, + "grad_norm": 4.299502849578857, + "learning_rate": 0.00034, + "loss": 5.7773, + "step": 102 + }, + { + "epoch": 0.01005859375, + "grad_norm": 4.284260272979736, + "learning_rate": 0.00034333333333333335, + "loss": 5.793, + "step": 103 + }, + { + "epoch": 0.01015625, + "grad_norm": 6.048828601837158, + "learning_rate": 0.00034666666666666667, + "loss": 5.8008, + "step": 104 + }, + { + "epoch": 0.01025390625, + "grad_norm": 4.235161304473877, + "learning_rate": 0.00035, + "loss": 5.7656, + "step": 105 + }, + { + "epoch": 0.0103515625, + "grad_norm": 5.187899589538574, + "learning_rate": 0.0003533333333333333, + "loss": 5.8125, + "step": 106 + }, + { + "epoch": 0.01044921875, + "grad_norm": 4.133037567138672, + "learning_rate": 0.0003566666666666667, + "loss": 5.8047, + "step": 107 + }, + { + "epoch": 0.010546875, + "grad_norm": 5.822926044464111, + "learning_rate": 0.00035999999999999997, + "loss": 5.8281, + "step": 108 + }, + { + "epoch": 0.01064453125, + "grad_norm": 4.3357672691345215, + "learning_rate": 0.00036333333333333335, + "loss": 5.7734, + "step": 109 + }, + { + "epoch": 0.0107421875, + "grad_norm": 4.930606842041016, + "learning_rate": 0.00036666666666666667, + "loss": 5.7812, + "step": 110 + }, + { + "epoch": 0.01083984375, + "grad_norm": 4.797028064727783, + "learning_rate": 0.00037, + "loss": 5.7695, + "step": 111 + }, + { + "epoch": 0.0109375, + "grad_norm": 3.1973586082458496, + "learning_rate": 0.0003733333333333334, + "loss": 5.7539, + "step": 112 + }, + { + "epoch": 0.01103515625, + "grad_norm": 4.5399980545043945, + "learning_rate": 0.00037666666666666664, + "loss": 5.7852, + "step": 113 + }, + { + "epoch": 0.0111328125, + "grad_norm": 4.550619602203369, + "learning_rate": 0.00038, + "loss": 5.7773, + "step": 114 + }, + { + "epoch": 0.01123046875, + "grad_norm": 5.377904415130615, + "learning_rate": 0.00038333333333333334, + "loss": 5.7891, + "step": 115 + }, + { + "epoch": 0.011328125, + "grad_norm": 4.06483268737793, + "learning_rate": 0.00038666666666666667, + "loss": 5.6992, + "step": 116 + }, + { + "epoch": 0.01142578125, + "grad_norm": 3.8046791553497314, + "learning_rate": 0.00039000000000000005, + "loss": 5.7344, + "step": 117 + }, + { + "epoch": 0.0115234375, + "grad_norm": 4.709420204162598, + "learning_rate": 0.0003933333333333333, + "loss": 5.7461, + "step": 118 + }, + { + "epoch": 0.01162109375, + "grad_norm": 4.36158561706543, + "learning_rate": 0.0003966666666666667, + "loss": 5.75, + "step": 119 + }, + { + "epoch": 0.01171875, + "grad_norm": 4.972657680511475, + "learning_rate": 0.0004, + "loss": 5.7188, + "step": 120 + }, + { + "epoch": 0.01181640625, + "grad_norm": 3.862230062484741, + "learning_rate": 0.00040333333333333334, + "loss": 5.7617, + "step": 121 + }, + { + "epoch": 0.0119140625, + "grad_norm": 6.694273948669434, + "learning_rate": 0.00040666666666666667, + "loss": 5.7734, + "step": 122 + }, + { + "epoch": 0.01201171875, + "grad_norm": 4.289857864379883, + "learning_rate": 0.00041, + "loss": 5.7539, + "step": 123 + }, + { + "epoch": 0.012109375, + "grad_norm": 4.241764545440674, + "learning_rate": 0.0004133333333333333, + "loss": 5.7422, + "step": 124 + }, + { + "epoch": 0.01220703125, + "grad_norm": 5.350276947021484, + "learning_rate": 0.0004166666666666667, + "loss": 5.7578, + "step": 125 + }, + { + "epoch": 0.0123046875, + "grad_norm": 4.058553695678711, + "learning_rate": 0.00042, + "loss": 5.6992, + "step": 126 + }, + { + "epoch": 0.01240234375, + "grad_norm": 4.70885705947876, + "learning_rate": 0.00042333333333333334, + "loss": 5.7383, + "step": 127 + }, + { + "epoch": 0.0125, + "grad_norm": 4.190490245819092, + "learning_rate": 0.0004266666666666667, + "loss": 5.7305, + "step": 128 + }, + { + "epoch": 0.01259765625, + "grad_norm": 6.176610469818115, + "learning_rate": 0.00043, + "loss": 5.7344, + "step": 129 + }, + { + "epoch": 0.0126953125, + "grad_norm": 4.583580017089844, + "learning_rate": 0.00043333333333333337, + "loss": 5.7266, + "step": 130 + }, + { + "epoch": 0.01279296875, + "grad_norm": 3.8871593475341797, + "learning_rate": 0.00043666666666666664, + "loss": 5.7422, + "step": 131 + }, + { + "epoch": 0.012890625, + "grad_norm": 6.952763557434082, + "learning_rate": 0.00044, + "loss": 5.7812, + "step": 132 + }, + { + "epoch": 0.01298828125, + "grad_norm": 4.236309051513672, + "learning_rate": 0.00044333333333333334, + "loss": 5.75, + "step": 133 + }, + { + "epoch": 0.0130859375, + "grad_norm": 7.308345794677734, + "learning_rate": 0.00044666666666666666, + "loss": 5.8125, + "step": 134 + }, + { + "epoch": 0.01318359375, + "grad_norm": 8.599268913269043, + "learning_rate": 0.00045000000000000004, + "loss": 5.918, + "step": 135 + }, + { + "epoch": 0.01328125, + "grad_norm": 4.937841415405273, + "learning_rate": 0.0004533333333333333, + "loss": 5.8242, + "step": 136 + }, + { + "epoch": 0.01337890625, + "grad_norm": 4.781856536865234, + "learning_rate": 0.0004566666666666667, + "loss": 5.7891, + "step": 137 + }, + { + "epoch": 0.0134765625, + "grad_norm": 7.261330604553223, + "learning_rate": 0.00046, + "loss": 5.8203, + "step": 138 + }, + { + "epoch": 0.01357421875, + "grad_norm": 5.891605854034424, + "learning_rate": 0.00046333333333333334, + "loss": 5.875, + "step": 139 + }, + { + "epoch": 0.013671875, + "grad_norm": 7.775816440582275, + "learning_rate": 0.00046666666666666666, + "loss": 5.8672, + "step": 140 + }, + { + "epoch": 0.01376953125, + "grad_norm": 3.8871777057647705, + "learning_rate": 0.00047, + "loss": 5.8359, + "step": 141 + }, + { + "epoch": 0.0138671875, + "grad_norm": 5.772259712219238, + "learning_rate": 0.00047333333333333336, + "loss": 5.8477, + "step": 142 + }, + { + "epoch": 0.01396484375, + "grad_norm": 4.485783100128174, + "learning_rate": 0.0004766666666666667, + "loss": 5.793, + "step": 143 + }, + { + "epoch": 0.0140625, + "grad_norm": 4.418201446533203, + "learning_rate": 0.00048, + "loss": 5.7812, + "step": 144 + }, + { + "epoch": 0.01416015625, + "grad_norm": 7.3408074378967285, + "learning_rate": 0.00048333333333333334, + "loss": 5.8516, + "step": 145 + }, + { + "epoch": 0.0142578125, + "grad_norm": 5.989964962005615, + "learning_rate": 0.0004866666666666667, + "loss": 5.8945, + "step": 146 + }, + { + "epoch": 0.01435546875, + "grad_norm": 4.477835655212402, + "learning_rate": 0.00049, + "loss": 5.8477, + "step": 147 + }, + { + "epoch": 0.014453125, + "grad_norm": 6.493783950805664, + "learning_rate": 0.0004933333333333334, + "loss": 5.8594, + "step": 148 + }, + { + "epoch": 0.01455078125, + "grad_norm": 6.190314292907715, + "learning_rate": 0.0004966666666666666, + "loss": 5.8906, + "step": 149 + }, + { + "epoch": 0.0146484375, + "grad_norm": 6.708803653717041, + "learning_rate": 0.0005, + "loss": 5.8711, + "step": 150 + }, + { + "epoch": 0.01474609375, + "grad_norm": 5.7710113525390625, + "learning_rate": 0.0004999999890938886, + "loss": 5.8633, + "step": 151 + }, + { + "epoch": 0.01484375, + "grad_norm": 5.553577899932861, + "learning_rate": 0.0004999999563755552, + "loss": 5.8594, + "step": 152 + }, + { + "epoch": 0.01494140625, + "grad_norm": 4.852464199066162, + "learning_rate": 0.0004999999018450032, + "loss": 5.8555, + "step": 153 + }, + { + "epoch": 0.0150390625, + "grad_norm": 4.127274990081787, + "learning_rate": 0.0004999998255022377, + "loss": 5.793, + "step": 154 + }, + { + "epoch": 0.01513671875, + "grad_norm": 5.139339923858643, + "learning_rate": 0.0004999997273472664, + "loss": 5.8398, + "step": 155 + }, + { + "epoch": 0.015234375, + "grad_norm": 3.9165873527526855, + "learning_rate": 0.0004999996073800985, + "loss": 5.7852, + "step": 156 + }, + { + "epoch": 0.01533203125, + "grad_norm": 4.544178485870361, + "learning_rate": 0.0004999994656007457, + "loss": 5.8125, + "step": 157 + }, + { + "epoch": 0.0154296875, + "grad_norm": 4.998808860778809, + "learning_rate": 0.0004999993020092219, + "loss": 5.8359, + "step": 158 + }, + { + "epoch": 0.01552734375, + "grad_norm": 5.184920787811279, + "learning_rate": 0.0004999991166055426, + "loss": 5.8281, + "step": 159 + }, + { + "epoch": 0.015625, + "grad_norm": 3.716250419616699, + "learning_rate": 0.0004999989093897262, + "loss": 5.7969, + "step": 160 + }, + { + "epoch": 0.01572265625, + "grad_norm": 3.8317952156066895, + "learning_rate": 0.0004999986803617926, + "loss": 5.7617, + "step": 161 + }, + { + "epoch": 0.0158203125, + "grad_norm": 4.461795330047607, + "learning_rate": 0.0004999984295217641, + "loss": 5.8047, + "step": 162 + }, + { + "epoch": 0.01591796875, + "grad_norm": 3.0825703144073486, + "learning_rate": 0.0004999981568696648, + "loss": 5.7656, + "step": 163 + }, + { + "epoch": 0.016015625, + "grad_norm": 4.151459217071533, + "learning_rate": 0.0004999978624055212, + "loss": 5.7617, + "step": 164 + }, + { + "epoch": 0.01611328125, + "grad_norm": 3.66987681388855, + "learning_rate": 0.0004999975461293621, + "loss": 5.7461, + "step": 165 + }, + { + "epoch": 0.0162109375, + "grad_norm": 4.092290878295898, + "learning_rate": 0.0004999972080412177, + "loss": 5.75, + "step": 166 + }, + { + "epoch": 0.01630859375, + "grad_norm": 4.104146480560303, + "learning_rate": 0.0004999968481411212, + "loss": 5.7656, + "step": 167 + }, + { + "epoch": 0.01640625, + "grad_norm": 4.437919616699219, + "learning_rate": 0.0004999964664291073, + "loss": 5.7812, + "step": 168 + }, + { + "epoch": 0.01650390625, + "grad_norm": 7.200653553009033, + "learning_rate": 0.0004999960629052131, + "loss": 5.7578, + "step": 169 + }, + { + "epoch": 0.0166015625, + "grad_norm": 2.731268882751465, + "learning_rate": 0.0004999956375694776, + "loss": 5.707, + "step": 170 + }, + { + "epoch": 0.01669921875, + "grad_norm": 5.332357406616211, + "learning_rate": 0.0004999951904219421, + "loss": 5.7461, + "step": 171 + }, + { + "epoch": 0.016796875, + "grad_norm": 3.130514144897461, + "learning_rate": 0.0004999947214626501, + "loss": 5.7109, + "step": 172 + }, + { + "epoch": 0.01689453125, + "grad_norm": 3.6535258293151855, + "learning_rate": 0.0004999942306916466, + "loss": 5.7344, + "step": 173 + }, + { + "epoch": 0.0169921875, + "grad_norm": 3.5438027381896973, + "learning_rate": 0.0004999937181089796, + "loss": 5.6953, + "step": 174 + }, + { + "epoch": 0.01708984375, + "grad_norm": 4.228607177734375, + "learning_rate": 0.0004999931837146987, + "loss": 5.7031, + "step": 175 + }, + { + "epoch": 0.0171875, + "grad_norm": 3.217113971710205, + "learning_rate": 0.0004999926275088556, + "loss": 5.7148, + "step": 176 + }, + { + "epoch": 0.01728515625, + "grad_norm": 4.9072041511535645, + "learning_rate": 0.0004999920494915043, + "loss": 5.75, + "step": 177 + }, + { + "epoch": 0.0173828125, + "grad_norm": 2.8025128841400146, + "learning_rate": 0.0004999914496627009, + "loss": 5.7148, + "step": 178 + }, + { + "epoch": 0.01748046875, + "grad_norm": 2.481431245803833, + "learning_rate": 0.0004999908280225035, + "loss": 5.6445, + "step": 179 + }, + { + "epoch": 0.017578125, + "grad_norm": 3.0694146156311035, + "learning_rate": 0.0004999901845709722, + "loss": 5.6602, + "step": 180 + }, + { + "epoch": 0.01767578125, + "grad_norm": 2.6255242824554443, + "learning_rate": 0.0004999895193081698, + "loss": 5.6523, + "step": 181 + }, + { + "epoch": 0.0177734375, + "grad_norm": 3.0787289142608643, + "learning_rate": 0.0004999888322341602, + "loss": 5.6367, + "step": 182 + }, + { + "epoch": 0.01787109375, + "grad_norm": 3.5675113201141357, + "learning_rate": 0.0004999881233490104, + "loss": 5.6875, + "step": 183 + }, + { + "epoch": 0.01796875, + "grad_norm": 3.278024673461914, + "learning_rate": 0.0004999873926527891, + "loss": 5.6406, + "step": 184 + }, + { + "epoch": 0.01806640625, + "grad_norm": 3.4470205307006836, + "learning_rate": 0.0004999866401455671, + "loss": 5.6328, + "step": 185 + }, + { + "epoch": 0.0181640625, + "grad_norm": 2.537705659866333, + "learning_rate": 0.0004999858658274172, + "loss": 5.625, + "step": 186 + }, + { + "epoch": 0.01826171875, + "grad_norm": 3.446849822998047, + "learning_rate": 0.0004999850696984147, + "loss": 5.6367, + "step": 187 + }, + { + "epoch": 0.018359375, + "grad_norm": 2.9557571411132812, + "learning_rate": 0.0004999842517586367, + "loss": 5.6523, + "step": 188 + }, + { + "epoch": 0.01845703125, + "grad_norm": 2.224954605102539, + "learning_rate": 0.0004999834120081624, + "loss": 5.6133, + "step": 189 + }, + { + "epoch": 0.0185546875, + "grad_norm": 2.847534418106079, + "learning_rate": 0.0004999825504470732, + "loss": 5.5977, + "step": 190 + }, + { + "epoch": 0.01865234375, + "grad_norm": 3.08176851272583, + "learning_rate": 0.0004999816670754527, + "loss": 5.6094, + "step": 191 + }, + { + "epoch": 0.01875, + "grad_norm": 2.509631872177124, + "learning_rate": 0.0004999807618933866, + "loss": 5.582, + "step": 192 + }, + { + "epoch": 0.01884765625, + "grad_norm": 2.551470994949341, + "learning_rate": 0.0004999798349009626, + "loss": 5.582, + "step": 193 + }, + { + "epoch": 0.0189453125, + "grad_norm": 2.8080625534057617, + "learning_rate": 0.0004999788860982706, + "loss": 5.5898, + "step": 194 + }, + { + "epoch": 0.01904296875, + "grad_norm": 2.784714698791504, + "learning_rate": 0.0004999779154854024, + "loss": 5.6016, + "step": 195 + }, + { + "epoch": 0.019140625, + "grad_norm": 2.6262359619140625, + "learning_rate": 0.0004999769230624524, + "loss": 5.5664, + "step": 196 + }, + { + "epoch": 0.01923828125, + "grad_norm": 2.2458934783935547, + "learning_rate": 0.0004999759088295165, + "loss": 5.5508, + "step": 197 + }, + { + "epoch": 0.0193359375, + "grad_norm": 2.5689525604248047, + "learning_rate": 0.0004999748727866932, + "loss": 5.5625, + "step": 198 + }, + { + "epoch": 0.01943359375, + "grad_norm": 2.44174861907959, + "learning_rate": 0.0004999738149340828, + "loss": 5.5352, + "step": 199 + }, + { + "epoch": 0.01953125, + "grad_norm": 2.5862767696380615, + "learning_rate": 0.000499972735271788, + "loss": 5.5234, + "step": 200 + }, + { + "epoch": 0.01962890625, + "grad_norm": 2.2569644451141357, + "learning_rate": 0.0004999716337999135, + "loss": 5.5078, + "step": 201 + }, + { + "epoch": 0.0197265625, + "grad_norm": 2.5576300621032715, + "learning_rate": 0.0004999705105185659, + "loss": 5.5391, + "step": 202 + }, + { + "epoch": 0.01982421875, + "grad_norm": 2.7671685218811035, + "learning_rate": 0.0004999693654278542, + "loss": 5.5234, + "step": 203 + }, + { + "epoch": 0.019921875, + "grad_norm": 1.9716945886611938, + "learning_rate": 0.0004999681985278894, + "loss": 5.4961, + "step": 204 + }, + { + "epoch": 0.02001953125, + "grad_norm": 2.345658302307129, + "learning_rate": 0.0004999670098187846, + "loss": 5.4961, + "step": 205 + }, + { + "epoch": 0.0201171875, + "grad_norm": 2.1516318321228027, + "learning_rate": 0.0004999657993006551, + "loss": 5.5078, + "step": 206 + }, + { + "epoch": 0.02021484375, + "grad_norm": 2.983196258544922, + "learning_rate": 0.0004999645669736181, + "loss": 5.5273, + "step": 207 + }, + { + "epoch": 0.0203125, + "grad_norm": 2.5061614513397217, + "learning_rate": 0.0004999633128377932, + "loss": 5.5195, + "step": 208 + }, + { + "epoch": 0.02041015625, + "grad_norm": 1.7015206813812256, + "learning_rate": 0.000499962036893302, + "loss": 5.4961, + "step": 209 + }, + { + "epoch": 0.0205078125, + "grad_norm": 2.2488410472869873, + "learning_rate": 0.0004999607391402681, + "loss": 5.4258, + "step": 210 + }, + { + "epoch": 0.02060546875, + "grad_norm": 2.7714498043060303, + "learning_rate": 0.0004999594195788175, + "loss": 5.5039, + "step": 211 + }, + { + "epoch": 0.020703125, + "grad_norm": 2.1639745235443115, + "learning_rate": 0.0004999580782090778, + "loss": 5.4922, + "step": 212 + }, + { + "epoch": 0.02080078125, + "grad_norm": 1.9490896463394165, + "learning_rate": 0.0004999567150311793, + "loss": 5.4492, + "step": 213 + }, + { + "epoch": 0.0208984375, + "grad_norm": 2.291276454925537, + "learning_rate": 0.0004999553300452541, + "loss": 5.4648, + "step": 214 + }, + { + "epoch": 0.02099609375, + "grad_norm": 2.4977705478668213, + "learning_rate": 0.0004999539232514363, + "loss": 5.4688, + "step": 215 + }, + { + "epoch": 0.02109375, + "grad_norm": 2.013157367706299, + "learning_rate": 0.0004999524946498624, + "loss": 5.4727, + "step": 216 + }, + { + "epoch": 0.02119140625, + "grad_norm": 2.35416579246521, + "learning_rate": 0.000499951044240671, + "loss": 5.457, + "step": 217 + }, + { + "epoch": 0.0212890625, + "grad_norm": 1.7172154188156128, + "learning_rate": 0.0004999495720240027, + "loss": 5.4531, + "step": 218 + }, + { + "epoch": 0.02138671875, + "grad_norm": 2.345547676086426, + "learning_rate": 0.000499948078, + "loss": 5.4609, + "step": 219 + }, + { + "epoch": 0.021484375, + "grad_norm": 2.2435221672058105, + "learning_rate": 0.0004999465621688078, + "loss": 5.4453, + "step": 220 + }, + { + "epoch": 0.02158203125, + "grad_norm": 2.306879997253418, + "learning_rate": 0.0004999450245305732, + "loss": 5.4414, + "step": 221 + }, + { + "epoch": 0.0216796875, + "grad_norm": 2.2427804470062256, + "learning_rate": 0.0004999434650854452, + "loss": 5.4531, + "step": 222 + }, + { + "epoch": 0.02177734375, + "grad_norm": 1.790202260017395, + "learning_rate": 0.000499941883833575, + "loss": 5.457, + "step": 223 + }, + { + "epoch": 0.021875, + "grad_norm": 1.838149070739746, + "learning_rate": 0.0004999402807751157, + "loss": 5.4297, + "step": 224 + }, + { + "epoch": 0.02197265625, + "grad_norm": 2.016653299331665, + "learning_rate": 0.000499938655910223, + "loss": 5.4141, + "step": 225 + }, + { + "epoch": 0.0220703125, + "grad_norm": 2.0161890983581543, + "learning_rate": 0.0004999370092390541, + "loss": 5.3906, + "step": 226 + }, + { + "epoch": 0.02216796875, + "grad_norm": 2.036649465560913, + "learning_rate": 0.0004999353407617689, + "loss": 5.4297, + "step": 227 + }, + { + "epoch": 0.022265625, + "grad_norm": 1.5296927690505981, + "learning_rate": 0.000499933650478529, + "loss": 5.4023, + "step": 228 + }, + { + "epoch": 0.02236328125, + "grad_norm": 1.713457703590393, + "learning_rate": 0.0004999319383894985, + "loss": 5.4219, + "step": 229 + }, + { + "epoch": 0.0224609375, + "grad_norm": 2.3026342391967773, + "learning_rate": 0.000499930204494843, + "loss": 5.4453, + "step": 230 + }, + { + "epoch": 0.02255859375, + "grad_norm": 2.2491424083709717, + "learning_rate": 0.0004999284487947308, + "loss": 5.4219, + "step": 231 + }, + { + "epoch": 0.02265625, + "grad_norm": 2.2838447093963623, + "learning_rate": 0.0004999266712893322, + "loss": 5.4336, + "step": 232 + }, + { + "epoch": 0.02275390625, + "grad_norm": 1.7964040040969849, + "learning_rate": 0.0004999248719788193, + "loss": 5.4141, + "step": 233 + }, + { + "epoch": 0.0228515625, + "grad_norm": 2.0387890338897705, + "learning_rate": 0.0004999230508633667, + "loss": 5.4102, + "step": 234 + }, + { + "epoch": 0.02294921875, + "grad_norm": 2.3577651977539062, + "learning_rate": 0.0004999212079431507, + "loss": 5.4297, + "step": 235 + }, + { + "epoch": 0.023046875, + "grad_norm": 1.8143409490585327, + "learning_rate": 0.0004999193432183504, + "loss": 5.4023, + "step": 236 + }, + { + "epoch": 0.02314453125, + "grad_norm": 1.6521782875061035, + "learning_rate": 0.0004999174566891461, + "loss": 5.3906, + "step": 237 + }, + { + "epoch": 0.0232421875, + "grad_norm": 1.8481347560882568, + "learning_rate": 0.000499915548355721, + "loss": 5.3984, + "step": 238 + }, + { + "epoch": 0.02333984375, + "grad_norm": 2.1952035427093506, + "learning_rate": 0.0004999136182182601, + "loss": 5.3984, + "step": 239 + }, + { + "epoch": 0.0234375, + "grad_norm": 2.0808660984039307, + "learning_rate": 0.0004999116662769502, + "loss": 5.3945, + "step": 240 + }, + { + "epoch": 0.02353515625, + "grad_norm": 2.293430805206299, + "learning_rate": 0.0004999096925319808, + "loss": 5.4102, + "step": 241 + }, + { + "epoch": 0.0236328125, + "grad_norm": 1.6741044521331787, + "learning_rate": 0.0004999076969835432, + "loss": 5.4062, + "step": 242 + }, + { + "epoch": 0.02373046875, + "grad_norm": 1.95404851436615, + "learning_rate": 0.0004999056796318308, + "loss": 5.3672, + "step": 243 + }, + { + "epoch": 0.023828125, + "grad_norm": 1.8848334550857544, + "learning_rate": 0.0004999036404770391, + "loss": 5.375, + "step": 244 + }, + { + "epoch": 0.02392578125, + "grad_norm": 1.6347148418426514, + "learning_rate": 0.0004999015795193661, + "loss": 5.3711, + "step": 245 + }, + { + "epoch": 0.0240234375, + "grad_norm": 2.3380987644195557, + "learning_rate": 0.0004998994967590113, + "loss": 5.4023, + "step": 246 + }, + { + "epoch": 0.02412109375, + "grad_norm": 1.4733819961547852, + "learning_rate": 0.0004998973921961767, + "loss": 5.3711, + "step": 247 + }, + { + "epoch": 0.02421875, + "grad_norm": 2.443174362182617, + "learning_rate": 0.0004998952658310662, + "loss": 5.3633, + "step": 248 + }, + { + "epoch": 0.02431640625, + "grad_norm": 1.9105093479156494, + "learning_rate": 0.0004998931176638861, + "loss": 5.375, + "step": 249 + }, + { + "epoch": 0.0244140625, + "grad_norm": 2.14117431640625, + "learning_rate": 0.0004998909476948446, + "loss": 5.3672, + "step": 250 + }, + { + "epoch": 0.02451171875, + "grad_norm": 1.5175701379776, + "learning_rate": 0.0004998887559241521, + "loss": 5.3711, + "step": 251 + }, + { + "epoch": 0.024609375, + "grad_norm": 1.7954213619232178, + "learning_rate": 0.000499886542352021, + "loss": 5.3359, + "step": 252 + }, + { + "epoch": 0.02470703125, + "grad_norm": 2.0219264030456543, + "learning_rate": 0.0004998843069786659, + "loss": 5.3516, + "step": 253 + }, + { + "epoch": 0.0248046875, + "grad_norm": 2.42618465423584, + "learning_rate": 0.0004998820498043036, + "loss": 5.3594, + "step": 254 + }, + { + "epoch": 0.02490234375, + "grad_norm": 2.4111266136169434, + "learning_rate": 0.0004998797708291528, + "loss": 5.3477, + "step": 255 + }, + { + "epoch": 0.025, + "grad_norm": 1.6236835718154907, + "learning_rate": 0.0004998774700534346, + "loss": 5.3516, + "step": 256 + }, + { + "epoch": 0.02509765625, + "grad_norm": 2.249431610107422, + "learning_rate": 0.0004998751474773718, + "loss": 5.3789, + "step": 257 + }, + { + "epoch": 0.0251953125, + "grad_norm": 1.878957986831665, + "learning_rate": 0.0004998728031011897, + "loss": 5.3359, + "step": 258 + }, + { + "epoch": 0.02529296875, + "grad_norm": 1.7500239610671997, + "learning_rate": 0.0004998704369251156, + "loss": 5.3086, + "step": 259 + }, + { + "epoch": 0.025390625, + "grad_norm": 1.5108247995376587, + "learning_rate": 0.0004998680489493788, + "loss": 5.3008, + "step": 260 + }, + { + "epoch": 0.02548828125, + "grad_norm": 1.8362010717391968, + "learning_rate": 0.0004998656391742108, + "loss": 5.3398, + "step": 261 + }, + { + "epoch": 0.0255859375, + "grad_norm": 1.7218053340911865, + "learning_rate": 0.0004998632075998453, + "loss": 5.3633, + "step": 262 + }, + { + "epoch": 0.02568359375, + "grad_norm": 1.8602724075317383, + "learning_rate": 0.000499860754226518, + "loss": 5.3477, + "step": 263 + }, + { + "epoch": 0.02578125, + "grad_norm": 1.3672821521759033, + "learning_rate": 0.0004998582790544667, + "loss": 5.3047, + "step": 264 + }, + { + "epoch": 0.02587890625, + "grad_norm": 1.5733098983764648, + "learning_rate": 0.0004998557820839313, + "loss": 5.3203, + "step": 265 + }, + { + "epoch": 0.0259765625, + "grad_norm": 1.547677993774414, + "learning_rate": 0.0004998532633151539, + "loss": 5.3203, + "step": 266 + }, + { + "epoch": 0.02607421875, + "grad_norm": 1.8635348081588745, + "learning_rate": 0.0004998507227483789, + "loss": 5.332, + "step": 267 + }, + { + "epoch": 0.026171875, + "grad_norm": 1.7884019613265991, + "learning_rate": 0.0004998481603838523, + "loss": 5.3281, + "step": 268 + }, + { + "epoch": 0.02626953125, + "grad_norm": 1.6009703874588013, + "learning_rate": 0.0004998455762218225, + "loss": 5.3125, + "step": 269 + }, + { + "epoch": 0.0263671875, + "grad_norm": 1.79560387134552, + "learning_rate": 0.0004998429702625401, + "loss": 5.3125, + "step": 270 + }, + { + "epoch": 0.02646484375, + "grad_norm": 2.2909066677093506, + "learning_rate": 0.0004998403425062579, + "loss": 5.3242, + "step": 271 + }, + { + "epoch": 0.0265625, + "grad_norm": 1.209547758102417, + "learning_rate": 0.0004998376929532305, + "loss": 5.3164, + "step": 272 + }, + { + "epoch": 0.02666015625, + "grad_norm": 1.811440110206604, + "learning_rate": 0.0004998350216037146, + "loss": 5.3047, + "step": 273 + }, + { + "epoch": 0.0267578125, + "grad_norm": 1.956322193145752, + "learning_rate": 0.0004998323284579694, + "loss": 5.3086, + "step": 274 + }, + { + "epoch": 0.02685546875, + "grad_norm": 2.4985668659210205, + "learning_rate": 0.000499829613516256, + "loss": 5.3047, + "step": 275 + }, + { + "epoch": 0.026953125, + "grad_norm": 2.2196872234344482, + "learning_rate": 0.0004998268767788373, + "loss": 5.3125, + "step": 276 + }, + { + "epoch": 0.02705078125, + "grad_norm": 2.2135283946990967, + "learning_rate": 0.0004998241182459789, + "loss": 5.3242, + "step": 277 + }, + { + "epoch": 0.0271484375, + "grad_norm": 1.4025847911834717, + "learning_rate": 0.0004998213379179481, + "loss": 5.3398, + "step": 278 + }, + { + "epoch": 0.02724609375, + "grad_norm": 2.832472801208496, + "learning_rate": 0.0004998185357950144, + "loss": 5.3047, + "step": 279 + }, + { + "epoch": 0.02734375, + "grad_norm": 1.4372154474258423, + "learning_rate": 0.0004998157118774496, + "loss": 5.3047, + "step": 280 + }, + { + "epoch": 0.02744140625, + "grad_norm": 2.209719657897949, + "learning_rate": 0.0004998128661655274, + "loss": 5.3008, + "step": 281 + }, + { + "epoch": 0.0275390625, + "grad_norm": 2.680424928665161, + "learning_rate": 0.0004998099986595235, + "loss": 5.3047, + "step": 282 + }, + { + "epoch": 0.02763671875, + "grad_norm": 2.1611225605010986, + "learning_rate": 0.0004998071093597162, + "loss": 5.3008, + "step": 283 + }, + { + "epoch": 0.027734375, + "grad_norm": 1.9571424722671509, + "learning_rate": 0.0004998041982663851, + "loss": 5.2891, + "step": 284 + }, + { + "epoch": 0.02783203125, + "grad_norm": 1.3447707891464233, + "learning_rate": 0.000499801265379813, + "loss": 5.2852, + "step": 285 + }, + { + "epoch": 0.0279296875, + "grad_norm": 2.0131936073303223, + "learning_rate": 0.0004997983107002838, + "loss": 5.3086, + "step": 286 + }, + { + "epoch": 0.02802734375, + "grad_norm": 1.1873258352279663, + "learning_rate": 0.0004997953342280843, + "loss": 5.2656, + "step": 287 + }, + { + "epoch": 0.028125, + "grad_norm": 2.239952564239502, + "learning_rate": 0.0004997923359635027, + "loss": 5.2773, + "step": 288 + }, + { + "epoch": 0.02822265625, + "grad_norm": 1.3604589700698853, + "learning_rate": 0.0004997893159068297, + "loss": 5.2773, + "step": 289 + }, + { + "epoch": 0.0283203125, + "grad_norm": 1.888407826423645, + "learning_rate": 0.0004997862740583584, + "loss": 5.2852, + "step": 290 + }, + { + "epoch": 0.02841796875, + "grad_norm": 1.8944205045700073, + "learning_rate": 0.0004997832104183833, + "loss": 5.2812, + "step": 291 + }, + { + "epoch": 0.028515625, + "grad_norm": 1.1297378540039062, + "learning_rate": 0.0004997801249872016, + "loss": 5.25, + "step": 292 + }, + { + "epoch": 0.02861328125, + "grad_norm": 1.6378464698791504, + "learning_rate": 0.0004997770177651123, + "loss": 5.2461, + "step": 293 + }, + { + "epoch": 0.0287109375, + "grad_norm": 1.3006982803344727, + "learning_rate": 0.0004997738887524168, + "loss": 5.2578, + "step": 294 + }, + { + "epoch": 0.02880859375, + "grad_norm": 1.8940247297286987, + "learning_rate": 0.0004997707379494183, + "loss": 5.2773, + "step": 295 + }, + { + "epoch": 0.02890625, + "grad_norm": 1.5350399017333984, + "learning_rate": 0.0004997675653564223, + "loss": 5.2656, + "step": 296 + }, + { + "epoch": 0.02900390625, + "grad_norm": 1.58504056930542, + "learning_rate": 0.0004997643709737363, + "loss": 5.2695, + "step": 297 + }, + { + "epoch": 0.0291015625, + "grad_norm": 1.5148996114730835, + "learning_rate": 0.0004997611548016699, + "loss": 5.2734, + "step": 298 + }, + { + "epoch": 0.02919921875, + "grad_norm": 1.2484545707702637, + "learning_rate": 0.0004997579168405352, + "loss": 5.25, + "step": 299 + }, + { + "epoch": 0.029296875, + "grad_norm": 1.4745291471481323, + "learning_rate": 0.0004997546570906458, + "loss": 5.2773, + "step": 300 + }, + { + "epoch": 0.02939453125, + "grad_norm": 1.0975582599639893, + "learning_rate": 0.0004997513755523179, + "loss": 5.2344, + "step": 301 + }, + { + "epoch": 0.0294921875, + "grad_norm": 1.4826176166534424, + "learning_rate": 0.0004997480722258694, + "loss": 5.2617, + "step": 302 + }, + { + "epoch": 0.02958984375, + "grad_norm": 1.3512847423553467, + "learning_rate": 0.0004997447471116207, + "loss": 5.2422, + "step": 303 + }, + { + "epoch": 0.0296875, + "grad_norm": 1.2088016271591187, + "learning_rate": 0.0004997414002098941, + "loss": 5.2539, + "step": 304 + }, + { + "epoch": 0.02978515625, + "grad_norm": 1.3594310283660889, + "learning_rate": 0.0004997380315210142, + "loss": 5.2695, + "step": 305 + }, + { + "epoch": 0.0298828125, + "grad_norm": 1.6119142770767212, + "learning_rate": 0.0004997346410453074, + "loss": 5.2383, + "step": 306 + }, + { + "epoch": 0.02998046875, + "grad_norm": 1.2976305484771729, + "learning_rate": 0.0004997312287831024, + "loss": 5.2773, + "step": 307 + }, + { + "epoch": 0.030078125, + "grad_norm": 1.1194605827331543, + "learning_rate": 0.00049972779473473, + "loss": 5.2656, + "step": 308 + }, + { + "epoch": 0.03017578125, + "grad_norm": 1.244971752166748, + "learning_rate": 0.0004997243389005232, + "loss": 5.2383, + "step": 309 + }, + { + "epoch": 0.0302734375, + "grad_norm": 1.1144415140151978, + "learning_rate": 0.0004997208612808168, + "loss": 5.2188, + "step": 310 + }, + { + "epoch": 0.03037109375, + "grad_norm": 1.1849431991577148, + "learning_rate": 0.0004997173618759482, + "loss": 5.2422, + "step": 311 + }, + { + "epoch": 0.03046875, + "grad_norm": 1.3395856618881226, + "learning_rate": 0.0004997138406862565, + "loss": 5.2422, + "step": 312 + }, + { + "epoch": 0.03056640625, + "grad_norm": 1.1774636507034302, + "learning_rate": 0.0004997102977120832, + "loss": 5.2188, + "step": 313 + }, + { + "epoch": 0.0306640625, + "grad_norm": 1.4251941442489624, + "learning_rate": 0.0004997067329537714, + "loss": 5.2578, + "step": 314 + }, + { + "epoch": 0.03076171875, + "grad_norm": 1.4117344617843628, + "learning_rate": 0.0004997031464116672, + "loss": 5.207, + "step": 315 + }, + { + "epoch": 0.030859375, + "grad_norm": 1.3700896501541138, + "learning_rate": 0.0004996995380861178, + "loss": 5.2305, + "step": 316 + }, + { + "epoch": 0.03095703125, + "grad_norm": 1.682862401008606, + "learning_rate": 0.0004996959079774734, + "loss": 5.25, + "step": 317 + }, + { + "epoch": 0.0310546875, + "grad_norm": 1.1571077108383179, + "learning_rate": 0.0004996922560860856, + "loss": 5.2109, + "step": 318 + }, + { + "epoch": 0.03115234375, + "grad_norm": 1.1706202030181885, + "learning_rate": 0.0004996885824123087, + "loss": 5.2344, + "step": 319 + }, + { + "epoch": 0.03125, + "grad_norm": 1.116774082183838, + "learning_rate": 0.0004996848869564986, + "loss": 5.2227, + "step": 320 + }, + { + "epoch": 0.03134765625, + "grad_norm": 1.3174951076507568, + "learning_rate": 0.0004996811697190137, + "loss": 5.207, + "step": 321 + }, + { + "epoch": 0.0314453125, + "grad_norm": 1.7159594297409058, + "learning_rate": 0.0004996774307002142, + "loss": 5.2305, + "step": 322 + }, + { + "epoch": 0.03154296875, + "grad_norm": 1.1668405532836914, + "learning_rate": 0.0004996736699004628, + "loss": 5.2344, + "step": 323 + }, + { + "epoch": 0.031640625, + "grad_norm": 1.4254777431488037, + "learning_rate": 0.000499669887320124, + "loss": 5.2188, + "step": 324 + }, + { + "epoch": 0.03173828125, + "grad_norm": 1.4150874614715576, + "learning_rate": 0.0004996660829595643, + "loss": 5.2461, + "step": 325 + }, + { + "epoch": 0.0318359375, + "grad_norm": 1.4464291334152222, + "learning_rate": 0.0004996622568191529, + "loss": 5.207, + "step": 326 + }, + { + "epoch": 0.03193359375, + "grad_norm": 1.3123974800109863, + "learning_rate": 0.0004996584088992603, + "loss": 5.2227, + "step": 327 + }, + { + "epoch": 0.03203125, + "grad_norm": 1.2821930646896362, + "learning_rate": 0.0004996545392002597, + "loss": 5.2344, + "step": 328 + }, + { + "epoch": 0.03212890625, + "grad_norm": 1.6003687381744385, + "learning_rate": 0.0004996506477225264, + "loss": 5.1836, + "step": 329 + }, + { + "epoch": 0.0322265625, + "grad_norm": 1.305853009223938, + "learning_rate": 0.0004996467344664374, + "loss": 5.2266, + "step": 330 + }, + { + "epoch": 0.03232421875, + "grad_norm": 1.6081973314285278, + "learning_rate": 0.0004996427994323723, + "loss": 5.2031, + "step": 331 + }, + { + "epoch": 0.032421875, + "grad_norm": 1.0995270013809204, + "learning_rate": 0.0004996388426207123, + "loss": 5.1797, + "step": 332 + }, + { + "epoch": 0.03251953125, + "grad_norm": 1.1569955348968506, + "learning_rate": 0.0004996348640318412, + "loss": 5.2305, + "step": 333 + }, + { + "epoch": 0.0326171875, + "grad_norm": 1.3775110244750977, + "learning_rate": 0.0004996308636661447, + "loss": 5.2266, + "step": 334 + }, + { + "epoch": 0.03271484375, + "grad_norm": 1.2185978889465332, + "learning_rate": 0.0004996268415240104, + "loss": 5.1641, + "step": 335 + }, + { + "epoch": 0.0328125, + "grad_norm": 1.349692702293396, + "learning_rate": 0.0004996227976058285, + "loss": 5.1914, + "step": 336 + }, + { + "epoch": 0.03291015625, + "grad_norm": 0.9755382537841797, + "learning_rate": 0.0004996187319119908, + "loss": 5.1914, + "step": 337 + }, + { + "epoch": 0.0330078125, + "grad_norm": 1.187360405921936, + "learning_rate": 0.0004996146444428916, + "loss": 5.1875, + "step": 338 + }, + { + "epoch": 0.03310546875, + "grad_norm": 1.2067431211471558, + "learning_rate": 0.000499610535198927, + "loss": 5.2109, + "step": 339 + }, + { + "epoch": 0.033203125, + "grad_norm": 1.2557358741760254, + "learning_rate": 0.0004996064041804956, + "loss": 5.1953, + "step": 340 + }, + { + "epoch": 0.03330078125, + "grad_norm": 1.191425085067749, + "learning_rate": 0.0004996022513879977, + "loss": 5.1719, + "step": 341 + }, + { + "epoch": 0.0333984375, + "grad_norm": 0.985287606716156, + "learning_rate": 0.0004995980768218358, + "loss": 5.1719, + "step": 342 + }, + { + "epoch": 0.03349609375, + "grad_norm": 2.132200002670288, + "learning_rate": 0.0004995938804824146, + "loss": 5.2109, + "step": 343 + }, + { + "epoch": 0.03359375, + "grad_norm": 0.8363533616065979, + "learning_rate": 0.0004995896623701412, + "loss": 5.1914, + "step": 344 + }, + { + "epoch": 0.03369140625, + "grad_norm": 1.090147614479065, + "learning_rate": 0.0004995854224854243, + "loss": 5.2031, + "step": 345 + }, + { + "epoch": 0.0337890625, + "grad_norm": 1.313947319984436, + "learning_rate": 0.000499581160828675, + "loss": 5.1523, + "step": 346 + }, + { + "epoch": 0.03388671875, + "grad_norm": 1.3441343307495117, + "learning_rate": 0.0004995768774003062, + "loss": 5.1797, + "step": 347 + }, + { + "epoch": 0.033984375, + "grad_norm": 1.4852529764175415, + "learning_rate": 0.0004995725722007335, + "loss": 5.2031, + "step": 348 + }, + { + "epoch": 0.03408203125, + "grad_norm": 1.9018231630325317, + "learning_rate": 0.0004995682452303741, + "loss": 5.1797, + "step": 349 + }, + { + "epoch": 0.0341796875, + "grad_norm": 0.8266469836235046, + "learning_rate": 0.0004995638964896475, + "loss": 5.1758, + "step": 350 + }, + { + "epoch": 0.03427734375, + "grad_norm": 0.971101701259613, + "learning_rate": 0.0004995595259789752, + "loss": 5.168, + "step": 351 + }, + { + "epoch": 0.034375, + "grad_norm": 1.2720019817352295, + "learning_rate": 0.0004995551336987809, + "loss": 5.1914, + "step": 352 + }, + { + "epoch": 0.03447265625, + "grad_norm": 1.524877667427063, + "learning_rate": 0.0004995507196494905, + "loss": 5.1836, + "step": 353 + }, + { + "epoch": 0.0345703125, + "grad_norm": 1.155196189880371, + "learning_rate": 0.0004995462838315319, + "loss": 5.1484, + "step": 354 + }, + { + "epoch": 0.03466796875, + "grad_norm": 1.2725322246551514, + "learning_rate": 0.0004995418262453351, + "loss": 5.1562, + "step": 355 + }, + { + "epoch": 0.034765625, + "grad_norm": 1.064315915107727, + "learning_rate": 0.000499537346891332, + "loss": 5.1641, + "step": 356 + }, + { + "epoch": 0.03486328125, + "grad_norm": 1.1174203157424927, + "learning_rate": 0.0004995328457699573, + "loss": 5.1289, + "step": 357 + }, + { + "epoch": 0.0349609375, + "grad_norm": 1.5893704891204834, + "learning_rate": 0.000499528322881647, + "loss": 5.1523, + "step": 358 + }, + { + "epoch": 0.03505859375, + "grad_norm": 1.3740280866622925, + "learning_rate": 0.0004995237782268397, + "loss": 5.1289, + "step": 359 + }, + { + "epoch": 0.03515625, + "grad_norm": 1.560889720916748, + "learning_rate": 0.000499519211805976, + "loss": 5.1875, + "step": 360 + }, + { + "epoch": 0.03525390625, + "grad_norm": 1.1352694034576416, + "learning_rate": 0.0004995146236194984, + "loss": 5.1758, + "step": 361 + }, + { + "epoch": 0.0353515625, + "grad_norm": 1.4046801328659058, + "learning_rate": 0.0004995100136678519, + "loss": 5.1758, + "step": 362 + }, + { + "epoch": 0.03544921875, + "grad_norm": 1.10200035572052, + "learning_rate": 0.0004995053819514834, + "loss": 5.1523, + "step": 363 + }, + { + "epoch": 0.035546875, + "grad_norm": 1.179033875465393, + "learning_rate": 0.0004995007284708418, + "loss": 5.1367, + "step": 364 + }, + { + "epoch": 0.03564453125, + "grad_norm": 1.1701732873916626, + "learning_rate": 0.0004994960532263782, + "loss": 5.1406, + "step": 365 + }, + { + "epoch": 0.0357421875, + "grad_norm": 1.096832513809204, + "learning_rate": 0.000499491356218546, + "loss": 5.1484, + "step": 366 + }, + { + "epoch": 0.03583984375, + "grad_norm": 1.4660563468933105, + "learning_rate": 0.0004994866374478003, + "loss": 5.1562, + "step": 367 + }, + { + "epoch": 0.0359375, + "grad_norm": 1.2725329399108887, + "learning_rate": 0.0004994818969145989, + "loss": 5.1719, + "step": 368 + }, + { + "epoch": 0.03603515625, + "grad_norm": 1.5378856658935547, + "learning_rate": 0.000499477134619401, + "loss": 5.1758, + "step": 369 + }, + { + "epoch": 0.0361328125, + "grad_norm": 1.0054930448532104, + "learning_rate": 0.0004994723505626685, + "loss": 5.2031, + "step": 370 + }, + { + "epoch": 0.03623046875, + "grad_norm": 1.3905240297317505, + "learning_rate": 0.0004994675447448651, + "loss": 5.1562, + "step": 371 + }, + { + "epoch": 0.036328125, + "grad_norm": 1.2946956157684326, + "learning_rate": 0.0004994627171664565, + "loss": 5.1641, + "step": 372 + }, + { + "epoch": 0.03642578125, + "grad_norm": 0.8594855070114136, + "learning_rate": 0.0004994578678279112, + "loss": 5.1133, + "step": 373 + }, + { + "epoch": 0.0365234375, + "grad_norm": 1.0982156991958618, + "learning_rate": 0.0004994529967296989, + "loss": 5.1445, + "step": 374 + }, + { + "epoch": 0.03662109375, + "grad_norm": 1.197832703590393, + "learning_rate": 0.0004994481038722919, + "loss": 5.1875, + "step": 375 + }, + { + "epoch": 0.03671875, + "grad_norm": 0.903097927570343, + "learning_rate": 0.0004994431892561646, + "loss": 5.1523, + "step": 376 + }, + { + "epoch": 0.03681640625, + "grad_norm": 1.064278483390808, + "learning_rate": 0.0004994382528817935, + "loss": 5.1602, + "step": 377 + }, + { + "epoch": 0.0369140625, + "grad_norm": 1.0776413679122925, + "learning_rate": 0.0004994332947496568, + "loss": 5.1406, + "step": 378 + }, + { + "epoch": 0.03701171875, + "grad_norm": 0.99156254529953, + "learning_rate": 0.0004994283148602357, + "loss": 5.1484, + "step": 379 + }, + { + "epoch": 0.037109375, + "grad_norm": 1.25286066532135, + "learning_rate": 0.0004994233132140125, + "loss": 5.1367, + "step": 380 + }, + { + "epoch": 0.03720703125, + "grad_norm": 0.9926852583885193, + "learning_rate": 0.0004994182898114722, + "loss": 5.1328, + "step": 381 + }, + { + "epoch": 0.0373046875, + "grad_norm": 1.0031254291534424, + "learning_rate": 0.0004994132446531019, + "loss": 5.125, + "step": 382 + }, + { + "epoch": 0.03740234375, + "grad_norm": 1.1352587938308716, + "learning_rate": 0.0004994081777393907, + "loss": 5.1719, + "step": 383 + }, + { + "epoch": 0.0375, + "grad_norm": 1.145836591720581, + "learning_rate": 0.0004994030890708297, + "loss": 5.1211, + "step": 384 + }, + { + "epoch": 0.03759765625, + "grad_norm": 1.125534176826477, + "learning_rate": 0.0004993979786479121, + "loss": 5.125, + "step": 385 + }, + { + "epoch": 0.0376953125, + "grad_norm": 0.9138064384460449, + "learning_rate": 0.0004993928464711337, + "loss": 5.1289, + "step": 386 + }, + { + "epoch": 0.03779296875, + "grad_norm": 1.0376123189926147, + "learning_rate": 0.0004993876925409917, + "loss": 5.1328, + "step": 387 + }, + { + "epoch": 0.037890625, + "grad_norm": 1.0181726217269897, + "learning_rate": 0.0004993825168579857, + "loss": 5.1289, + "step": 388 + }, + { + "epoch": 0.03798828125, + "grad_norm": 1.191735029220581, + "learning_rate": 0.0004993773194226178, + "loss": 5.1172, + "step": 389 + }, + { + "epoch": 0.0380859375, + "grad_norm": 1.0628931522369385, + "learning_rate": 0.0004993721002353915, + "loss": 5.1133, + "step": 390 + }, + { + "epoch": 0.03818359375, + "grad_norm": 1.2050843238830566, + "learning_rate": 0.0004993668592968129, + "loss": 5.1172, + "step": 391 + }, + { + "epoch": 0.03828125, + "grad_norm": 0.8847583532333374, + "learning_rate": 0.0004993615966073902, + "loss": 5.1328, + "step": 392 + }, + { + "epoch": 0.03837890625, + "grad_norm": 0.9037215709686279, + "learning_rate": 0.0004993563121676332, + "loss": 5.1406, + "step": 393 + }, + { + "epoch": 0.0384765625, + "grad_norm": 0.8788461685180664, + "learning_rate": 0.0004993510059780546, + "loss": 5.1523, + "step": 394 + }, + { + "epoch": 0.03857421875, + "grad_norm": 0.788327693939209, + "learning_rate": 0.0004993456780391686, + "loss": 5.1016, + "step": 395 + }, + { + "epoch": 0.038671875, + "grad_norm": 0.874692440032959, + "learning_rate": 0.0004993403283514919, + "loss": 5.0938, + "step": 396 + }, + { + "epoch": 0.03876953125, + "grad_norm": 1.0633156299591064, + "learning_rate": 0.0004993349569155428, + "loss": 5.1289, + "step": 397 + }, + { + "epoch": 0.0388671875, + "grad_norm": 1.076167106628418, + "learning_rate": 0.0004993295637318423, + "loss": 5.1172, + "step": 398 + }, + { + "epoch": 0.03896484375, + "grad_norm": 0.9785029292106628, + "learning_rate": 0.0004993241488009131, + "loss": 5.0977, + "step": 399 + }, + { + "epoch": 0.0390625, + "grad_norm": 1.0597959756851196, + "learning_rate": 0.0004993187121232801, + "loss": 5.1094, + "step": 400 + }, + { + "epoch": 0.03916015625, + "grad_norm": 1.2436349391937256, + "learning_rate": 0.0004993132536994705, + "loss": 5.1289, + "step": 401 + }, + { + "epoch": 0.0392578125, + "grad_norm": 1.0709871053695679, + "learning_rate": 0.0004993077735300133, + "loss": 5.1328, + "step": 402 + }, + { + "epoch": 0.03935546875, + "grad_norm": 1.018754005432129, + "learning_rate": 0.0004993022716154399, + "loss": 5.1094, + "step": 403 + }, + { + "epoch": 0.039453125, + "grad_norm": 0.8745573163032532, + "learning_rate": 0.0004992967479562836, + "loss": 5.1172, + "step": 404 + }, + { + "epoch": 0.03955078125, + "grad_norm": 0.8379338979721069, + "learning_rate": 0.0004992912025530799, + "loss": 5.1406, + "step": 405 + }, + { + "epoch": 0.0396484375, + "grad_norm": 0.8244563341140747, + "learning_rate": 0.0004992856354063663, + "loss": 5.1328, + "step": 406 + }, + { + "epoch": 0.03974609375, + "grad_norm": 1.0282996892929077, + "learning_rate": 0.0004992800465166826, + "loss": 5.1016, + "step": 407 + }, + { + "epoch": 0.03984375, + "grad_norm": 0.8590074777603149, + "learning_rate": 0.0004992744358845707, + "loss": 5.1094, + "step": 408 + }, + { + "epoch": 0.03994140625, + "grad_norm": 0.7324565052986145, + "learning_rate": 0.0004992688035105744, + "loss": 5.1094, + "step": 409 + }, + { + "epoch": 0.0400390625, + "grad_norm": 0.8005876541137695, + "learning_rate": 0.0004992631493952395, + "loss": 5.1094, + "step": 410 + }, + { + "epoch": 0.04013671875, + "grad_norm": 1.217882513999939, + "learning_rate": 0.0004992574735391144, + "loss": 5.0938, + "step": 411 + }, + { + "epoch": 0.040234375, + "grad_norm": 1.186151385307312, + "learning_rate": 0.0004992517759427494, + "loss": 5.1094, + "step": 412 + }, + { + "epoch": 0.04033203125, + "grad_norm": 0.851876974105835, + "learning_rate": 0.0004992460566066967, + "loss": 5.0781, + "step": 413 + }, + { + "epoch": 0.0404296875, + "grad_norm": 0.8220931887626648, + "learning_rate": 0.0004992403155315107, + "loss": 5.0977, + "step": 414 + }, + { + "epoch": 0.04052734375, + "grad_norm": 0.848565399646759, + "learning_rate": 0.0004992345527177482, + "loss": 5.0898, + "step": 415 + }, + { + "epoch": 0.040625, + "grad_norm": 1.1592659950256348, + "learning_rate": 0.0004992287681659676, + "loss": 5.0781, + "step": 416 + }, + { + "epoch": 0.04072265625, + "grad_norm": 1.0059559345245361, + "learning_rate": 0.0004992229618767298, + "loss": 5.0781, + "step": 417 + }, + { + "epoch": 0.0408203125, + "grad_norm": 0.9418927431106567, + "learning_rate": 0.0004992171338505975, + "loss": 5.0898, + "step": 418 + }, + { + "epoch": 0.04091796875, + "grad_norm": 0.8450960516929626, + "learning_rate": 0.0004992112840881359, + "loss": 5.0898, + "step": 419 + }, + { + "epoch": 0.041015625, + "grad_norm": 0.739745557308197, + "learning_rate": 0.0004992054125899122, + "loss": 5.0625, + "step": 420 + }, + { + "epoch": 0.04111328125, + "grad_norm": 0.6499938368797302, + "learning_rate": 0.0004991995193564953, + "loss": 5.0742, + "step": 421 + }, + { + "epoch": 0.0412109375, + "grad_norm": 0.5470448136329651, + "learning_rate": 0.0004991936043884566, + "loss": 5.0859, + "step": 422 + }, + { + "epoch": 0.04130859375, + "grad_norm": 0.582874059677124, + "learning_rate": 0.0004991876676863697, + "loss": 5.0664, + "step": 423 + }, + { + "epoch": 0.04140625, + "grad_norm": 0.61722731590271, + "learning_rate": 0.0004991817092508099, + "loss": 5.0898, + "step": 424 + }, + { + "epoch": 0.04150390625, + "grad_norm": 0.6884714365005493, + "learning_rate": 0.0004991757290823548, + "loss": 5.0703, + "step": 425 + }, + { + "epoch": 0.0416015625, + "grad_norm": 0.8316398859024048, + "learning_rate": 0.0004991697271815845, + "loss": 5.1367, + "step": 426 + }, + { + "epoch": 0.04169921875, + "grad_norm": 0.9839829802513123, + "learning_rate": 0.0004991637035490805, + "loss": 5.0703, + "step": 427 + }, + { + "epoch": 0.041796875, + "grad_norm": 1.0404824018478394, + "learning_rate": 0.0004991576581854267, + "loss": 5.0586, + "step": 428 + }, + { + "epoch": 0.04189453125, + "grad_norm": 0.8250628709793091, + "learning_rate": 0.0004991515910912095, + "loss": 5.0625, + "step": 429 + }, + { + "epoch": 0.0419921875, + "grad_norm": 0.7997605204582214, + "learning_rate": 0.0004991455022670168, + "loss": 5.0938, + "step": 430 + }, + { + "epoch": 0.04208984375, + "grad_norm": 0.6865594983100891, + "learning_rate": 0.0004991393917134388, + "loss": 5.0781, + "step": 431 + }, + { + "epoch": 0.0421875, + "grad_norm": 0.6902148723602295, + "learning_rate": 0.0004991332594310682, + "loss": 5.0586, + "step": 432 + }, + { + "epoch": 0.04228515625, + "grad_norm": 0.6424334645271301, + "learning_rate": 0.0004991271054204992, + "loss": 5.0742, + "step": 433 + }, + { + "epoch": 0.0423828125, + "grad_norm": 0.6057596802711487, + "learning_rate": 0.0004991209296823285, + "loss": 5.0625, + "step": 434 + }, + { + "epoch": 0.04248046875, + "grad_norm": 0.6817944645881653, + "learning_rate": 0.0004991147322171548, + "loss": 5.082, + "step": 435 + }, + { + "epoch": 0.042578125, + "grad_norm": 0.7707593441009521, + "learning_rate": 0.0004991085130255789, + "loss": 5.0586, + "step": 436 + }, + { + "epoch": 0.04267578125, + "grad_norm": 0.9107893109321594, + "learning_rate": 0.0004991022721082037, + "loss": 5.0898, + "step": 437 + }, + { + "epoch": 0.0427734375, + "grad_norm": 1.1807111501693726, + "learning_rate": 0.0004990960094656342, + "loss": 5.0195, + "step": 438 + }, + { + "epoch": 0.04287109375, + "grad_norm": 1.1950851678848267, + "learning_rate": 0.0004990897250984774, + "loss": 5.0469, + "step": 439 + }, + { + "epoch": 0.04296875, + "grad_norm": 0.7908074855804443, + "learning_rate": 0.0004990834190073428, + "loss": 5.1016, + "step": 440 + }, + { + "epoch": 0.04306640625, + "grad_norm": 0.7892453670501709, + "learning_rate": 0.0004990770911928416, + "loss": 5.0938, + "step": 441 + }, + { + "epoch": 0.0431640625, + "grad_norm": 0.7331576347351074, + "learning_rate": 0.0004990707416555871, + "loss": 5.0508, + "step": 442 + }, + { + "epoch": 0.04326171875, + "grad_norm": 0.7229627966880798, + "learning_rate": 0.000499064370396195, + "loss": 5.0234, + "step": 443 + }, + { + "epoch": 0.043359375, + "grad_norm": 0.8491083979606628, + "learning_rate": 0.000499057977415283, + "loss": 5.0859, + "step": 444 + }, + { + "epoch": 0.04345703125, + "grad_norm": 1.1333506107330322, + "learning_rate": 0.0004990515627134706, + "loss": 5.0859, + "step": 445 + }, + { + "epoch": 0.0435546875, + "grad_norm": 1.0400718450546265, + "learning_rate": 0.00049904512629138, + "loss": 5.0703, + "step": 446 + }, + { + "epoch": 0.04365234375, + "grad_norm": 0.8677104115486145, + "learning_rate": 0.000499038668149635, + "loss": 5.0273, + "step": 447 + }, + { + "epoch": 0.04375, + "grad_norm": 1.0702966451644897, + "learning_rate": 0.0004990321882888617, + "loss": 5.0664, + "step": 448 + }, + { + "epoch": 0.04384765625, + "grad_norm": 0.9019585847854614, + "learning_rate": 0.0004990256867096881, + "loss": 5.0352, + "step": 449 + }, + { + "epoch": 0.0439453125, + "grad_norm": 0.7547096014022827, + "learning_rate": 0.0004990191634127448, + "loss": 5.0664, + "step": 450 + }, + { + "epoch": 0.04404296875, + "grad_norm": 0.6731748580932617, + "learning_rate": 0.0004990126183986639, + "loss": 5.0625, + "step": 451 + }, + { + "epoch": 0.044140625, + "grad_norm": 0.738862931728363, + "learning_rate": 0.0004990060516680802, + "loss": 5.082, + "step": 452 + }, + { + "epoch": 0.04423828125, + "grad_norm": 0.7783480286598206, + "learning_rate": 0.00049899946322163, + "loss": 5.0703, + "step": 453 + }, + { + "epoch": 0.0443359375, + "grad_norm": 0.7385779023170471, + "learning_rate": 0.0004989928530599521, + "loss": 5.0547, + "step": 454 + }, + { + "epoch": 0.04443359375, + "grad_norm": 1.394942283630371, + "learning_rate": 0.0004989862211836873, + "loss": 5.0547, + "step": 455 + }, + { + "epoch": 0.04453125, + "grad_norm": 0.5369975566864014, + "learning_rate": 0.0004989795675934787, + "loss": 5.0781, + "step": 456 + }, + { + "epoch": 0.04462890625, + "grad_norm": 0.6641459465026855, + "learning_rate": 0.0004989728922899712, + "loss": 5.0117, + "step": 457 + }, + { + "epoch": 0.0447265625, + "grad_norm": 0.7093449831008911, + "learning_rate": 0.0004989661952738118, + "loss": 5.0586, + "step": 458 + }, + { + "epoch": 0.04482421875, + "grad_norm": 0.8085068464279175, + "learning_rate": 0.0004989594765456498, + "loss": 5.0508, + "step": 459 + }, + { + "epoch": 0.044921875, + "grad_norm": 1.188184142112732, + "learning_rate": 0.0004989527361061366, + "loss": 5.0391, + "step": 460 + }, + { + "epoch": 0.04501953125, + "grad_norm": 0.9588766098022461, + "learning_rate": 0.0004989459739559257, + "loss": 5.0195, + "step": 461 + }, + { + "epoch": 0.0451171875, + "grad_norm": 0.8630503416061401, + "learning_rate": 0.0004989391900956725, + "loss": 5.0195, + "step": 462 + }, + { + "epoch": 0.04521484375, + "grad_norm": 0.8916110396385193, + "learning_rate": 0.0004989323845260346, + "loss": 5.0586, + "step": 463 + }, + { + "epoch": 0.0453125, + "grad_norm": 0.8974289298057556, + "learning_rate": 0.0004989255572476719, + "loss": 5.0547, + "step": 464 + }, + { + "epoch": 0.04541015625, + "grad_norm": 0.9970776438713074, + "learning_rate": 0.0004989187082612462, + "loss": 5.0469, + "step": 465 + }, + { + "epoch": 0.0455078125, + "grad_norm": 0.885099470615387, + "learning_rate": 0.0004989118375674216, + "loss": 5.043, + "step": 466 + }, + { + "epoch": 0.04560546875, + "grad_norm": 0.659532368183136, + "learning_rate": 0.0004989049451668639, + "loss": 5.0547, + "step": 467 + }, + { + "epoch": 0.045703125, + "grad_norm": 0.8999412655830383, + "learning_rate": 0.0004988980310602415, + "loss": 5.0117, + "step": 468 + }, + { + "epoch": 0.04580078125, + "grad_norm": 0.7855193018913269, + "learning_rate": 0.0004988910952482246, + "loss": 5.0859, + "step": 469 + }, + { + "epoch": 0.0458984375, + "grad_norm": 0.9827584028244019, + "learning_rate": 0.0004988841377314855, + "loss": 5.0586, + "step": 470 + }, + { + "epoch": 0.04599609375, + "grad_norm": 1.13511323928833, + "learning_rate": 0.0004988771585106988, + "loss": 5.0547, + "step": 471 + }, + { + "epoch": 0.04609375, + "grad_norm": 0.8385241627693176, + "learning_rate": 0.000498870157586541, + "loss": 5.0586, + "step": 472 + }, + { + "epoch": 0.04619140625, + "grad_norm": 0.7410266995429993, + "learning_rate": 0.0004988631349596909, + "loss": 5.0664, + "step": 473 + }, + { + "epoch": 0.0462890625, + "grad_norm": 0.6939350962638855, + "learning_rate": 0.0004988560906308291, + "loss": 5.043, + "step": 474 + }, + { + "epoch": 0.04638671875, + "grad_norm": 0.7858873605728149, + "learning_rate": 0.0004988490246006388, + "loss": 5.0469, + "step": 475 + }, + { + "epoch": 0.046484375, + "grad_norm": 1.0619468688964844, + "learning_rate": 0.0004988419368698047, + "loss": 5.0234, + "step": 476 + }, + { + "epoch": 0.04658203125, + "grad_norm": 1.3020342588424683, + "learning_rate": 0.0004988348274390141, + "loss": 5.0469, + "step": 477 + }, + { + "epoch": 0.0466796875, + "grad_norm": 0.7949604988098145, + "learning_rate": 0.0004988276963089561, + "loss": 5.0391, + "step": 478 + }, + { + "epoch": 0.04677734375, + "grad_norm": 0.8782525658607483, + "learning_rate": 0.0004988205434803222, + "loss": 5.043, + "step": 479 + }, + { + "epoch": 0.046875, + "grad_norm": 0.9638606309890747, + "learning_rate": 0.0004988133689538055, + "loss": 5.0703, + "step": 480 + }, + { + "epoch": 0.04697265625, + "grad_norm": 0.8633697032928467, + "learning_rate": 0.0004988061727301019, + "loss": 5.0, + "step": 481 + }, + { + "epoch": 0.0470703125, + "grad_norm": 0.719272255897522, + "learning_rate": 0.0004987989548099087, + "loss": 5.043, + "step": 482 + }, + { + "epoch": 0.04716796875, + "grad_norm": 0.7947590947151184, + "learning_rate": 0.0004987917151939257, + "loss": 5.0273, + "step": 483 + }, + { + "epoch": 0.047265625, + "grad_norm": 0.8721004128456116, + "learning_rate": 0.000498784453882855, + "loss": 5.0469, + "step": 484 + }, + { + "epoch": 0.04736328125, + "grad_norm": 0.840782880783081, + "learning_rate": 0.0004987771708774001, + "loss": 5.0273, + "step": 485 + }, + { + "epoch": 0.0474609375, + "grad_norm": 0.6539798974990845, + "learning_rate": 0.0004987698661782674, + "loss": 5.0273, + "step": 486 + }, + { + "epoch": 0.04755859375, + "grad_norm": 0.6629400253295898, + "learning_rate": 0.0004987625397861649, + "loss": 5.0273, + "step": 487 + }, + { + "epoch": 0.04765625, + "grad_norm": 0.69793301820755, + "learning_rate": 0.0004987551917018028, + "loss": 4.9961, + "step": 488 + }, + { + "epoch": 0.04775390625, + "grad_norm": 0.8525344133377075, + "learning_rate": 0.0004987478219258936, + "loss": 5.0508, + "step": 489 + }, + { + "epoch": 0.0478515625, + "grad_norm": 0.9601410627365112, + "learning_rate": 0.0004987404304591514, + "loss": 5.0391, + "step": 490 + }, + { + "epoch": 0.04794921875, + "grad_norm": 0.8520921468734741, + "learning_rate": 0.0004987330173022932, + "loss": 5.0273, + "step": 491 + }, + { + "epoch": 0.048046875, + "grad_norm": 0.8635746240615845, + "learning_rate": 0.0004987255824560374, + "loss": 5.0234, + "step": 492 + }, + { + "epoch": 0.04814453125, + "grad_norm": 0.998202383518219, + "learning_rate": 0.0004987181259211048, + "loss": 5.0273, + "step": 493 + }, + { + "epoch": 0.0482421875, + "grad_norm": 1.2011066675186157, + "learning_rate": 0.0004987106476982184, + "loss": 5.0312, + "step": 494 + }, + { + "epoch": 0.04833984375, + "grad_norm": 0.7638375163078308, + "learning_rate": 0.0004987031477881029, + "loss": 5.0312, + "step": 495 + }, + { + "epoch": 0.0484375, + "grad_norm": 0.70158451795578, + "learning_rate": 0.0004986956261914856, + "loss": 5.0156, + "step": 496 + }, + { + "epoch": 0.04853515625, + "grad_norm": 0.7986351847648621, + "learning_rate": 0.0004986880829090954, + "loss": 5.0234, + "step": 497 + }, + { + "epoch": 0.0486328125, + "grad_norm": 0.8812294006347656, + "learning_rate": 0.0004986805179416638, + "loss": 5.0156, + "step": 498 + }, + { + "epoch": 0.04873046875, + "grad_norm": 0.8842159509658813, + "learning_rate": 0.0004986729312899242, + "loss": 5.0156, + "step": 499 + }, + { + "epoch": 0.048828125, + "grad_norm": 0.8196664452552795, + "learning_rate": 0.000498665322954612, + "loss": 5.0078, + "step": 500 + }, + { + "epoch": 0.04892578125, + "grad_norm": 0.774849534034729, + "learning_rate": 0.0004986576929364646, + "loss": 5.0273, + "step": 501 + }, + { + "epoch": 0.0490234375, + "grad_norm": 0.6376418471336365, + "learning_rate": 0.0004986500412362219, + "loss": 5.0234, + "step": 502 + }, + { + "epoch": 0.04912109375, + "grad_norm": 0.49704745411872864, + "learning_rate": 0.0004986423678546257, + "loss": 4.9922, + "step": 503 + }, + { + "epoch": 0.04921875, + "grad_norm": 0.49575215578079224, + "learning_rate": 0.0004986346727924197, + "loss": 5.0234, + "step": 504 + }, + { + "epoch": 0.04931640625, + "grad_norm": 0.5091367363929749, + "learning_rate": 0.0004986269560503501, + "loss": 4.9961, + "step": 505 + }, + { + "epoch": 0.0494140625, + "grad_norm": 0.7224910855293274, + "learning_rate": 0.0004986192176291647, + "loss": 4.9922, + "step": 506 + }, + { + "epoch": 0.04951171875, + "grad_norm": 0.8590059280395508, + "learning_rate": 0.0004986114575296141, + "loss": 5.0234, + "step": 507 + }, + { + "epoch": 0.049609375, + "grad_norm": 0.7906845808029175, + "learning_rate": 0.0004986036757524501, + "loss": 5.0195, + "step": 508 + }, + { + "epoch": 0.04970703125, + "grad_norm": 0.6573899388313293, + "learning_rate": 0.0004985958722984275, + "loss": 5.0117, + "step": 509 + }, + { + "epoch": 0.0498046875, + "grad_norm": 0.5708054900169373, + "learning_rate": 0.0004985880471683026, + "loss": 5.0, + "step": 510 + }, + { + "epoch": 0.04990234375, + "grad_norm": 0.6391012668609619, + "learning_rate": 0.000498580200362834, + "loss": 5.0156, + "step": 511 + }, + { + "epoch": 0.05, + "grad_norm": 0.6893157362937927, + "learning_rate": 0.0004985723318827824, + "loss": 5.0078, + "step": 512 + }, + { + "epoch": 0.05009765625, + "grad_norm": 0.7473337054252625, + "learning_rate": 0.0004985644417289107, + "loss": 5.0352, + "step": 513 + }, + { + "epoch": 0.0501953125, + "grad_norm": 0.6992818713188171, + "learning_rate": 0.0004985565299019836, + "loss": 4.9766, + "step": 514 + }, + { + "epoch": 0.05029296875, + "grad_norm": 0.5503939986228943, + "learning_rate": 0.0004985485964027682, + "loss": 5.0156, + "step": 515 + }, + { + "epoch": 0.050390625, + "grad_norm": 0.5531403422355652, + "learning_rate": 0.0004985406412320337, + "loss": 5.0234, + "step": 516 + }, + { + "epoch": 0.05048828125, + "grad_norm": 0.5232397317886353, + "learning_rate": 0.0004985326643905511, + "loss": 4.9922, + "step": 517 + }, + { + "epoch": 0.0505859375, + "grad_norm": 0.5153787732124329, + "learning_rate": 0.0004985246658790939, + "loss": 4.9922, + "step": 518 + }, + { + "epoch": 0.05068359375, + "grad_norm": 0.7395737767219543, + "learning_rate": 0.0004985166456984375, + "loss": 4.9961, + "step": 519 + }, + { + "epoch": 0.05078125, + "grad_norm": 0.7297258973121643, + "learning_rate": 0.0004985086038493591, + "loss": 5.0195, + "step": 520 + }, + { + "epoch": 0.05087890625, + "grad_norm": 0.8792772889137268, + "learning_rate": 0.0004985005403326387, + "loss": 5.0195, + "step": 521 + }, + { + "epoch": 0.0509765625, + "grad_norm": 0.8485317230224609, + "learning_rate": 0.0004984924551490577, + "loss": 5.0039, + "step": 522 + }, + { + "epoch": 0.05107421875, + "grad_norm": 0.7504170536994934, + "learning_rate": 0.0004984843482994001, + "loss": 5.0391, + "step": 523 + }, + { + "epoch": 0.051171875, + "grad_norm": 0.6931923031806946, + "learning_rate": 0.0004984762197844516, + "loss": 4.9883, + "step": 524 + }, + { + "epoch": 0.05126953125, + "grad_norm": 0.7201303839683533, + "learning_rate": 0.0004984680696050004, + "loss": 4.9844, + "step": 525 + }, + { + "epoch": 0.0513671875, + "grad_norm": 0.8321727514266968, + "learning_rate": 0.0004984598977618366, + "loss": 5.0234, + "step": 526 + }, + { + "epoch": 0.05146484375, + "grad_norm": 1.0283933877944946, + "learning_rate": 0.0004984517042557523, + "loss": 5.0156, + "step": 527 + }, + { + "epoch": 0.0515625, + "grad_norm": 1.3810787200927734, + "learning_rate": 0.0004984434890875417, + "loss": 5.0273, + "step": 528 + }, + { + "epoch": 0.05166015625, + "grad_norm": 0.75733482837677, + "learning_rate": 0.0004984352522580015, + "loss": 4.9844, + "step": 529 + }, + { + "epoch": 0.0517578125, + "grad_norm": 0.7140693068504333, + "learning_rate": 0.0004984269937679301, + "loss": 4.9961, + "step": 530 + }, + { + "epoch": 0.05185546875, + "grad_norm": 0.9317624568939209, + "learning_rate": 0.000498418713618128, + "loss": 4.9805, + "step": 531 + }, + { + "epoch": 0.051953125, + "grad_norm": 0.8527510762214661, + "learning_rate": 0.0004984104118093979, + "loss": 4.9961, + "step": 532 + }, + { + "epoch": 0.05205078125, + "grad_norm": 0.8085970878601074, + "learning_rate": 0.0004984020883425447, + "loss": 4.9883, + "step": 533 + }, + { + "epoch": 0.0521484375, + "grad_norm": 0.8414132595062256, + "learning_rate": 0.0004983937432183754, + "loss": 4.9727, + "step": 534 + }, + { + "epoch": 0.05224609375, + "grad_norm": 0.7883987426757812, + "learning_rate": 0.0004983853764376987, + "loss": 4.9766, + "step": 535 + }, + { + "epoch": 0.05234375, + "grad_norm": 0.8301931619644165, + "learning_rate": 0.000498376988001326, + "loss": 4.9688, + "step": 536 + }, + { + "epoch": 0.05244140625, + "grad_norm": 0.815565288066864, + "learning_rate": 0.0004983685779100702, + "loss": 5.0273, + "step": 537 + }, + { + "epoch": 0.0525390625, + "grad_norm": 0.7249122262001038, + "learning_rate": 0.0004983601461647469, + "loss": 5.0234, + "step": 538 + }, + { + "epoch": 0.05263671875, + "grad_norm": 0.6938775181770325, + "learning_rate": 0.0004983516927661733, + "loss": 4.9805, + "step": 539 + }, + { + "epoch": 0.052734375, + "grad_norm": 0.6271894574165344, + "learning_rate": 0.0004983432177151691, + "loss": 4.9922, + "step": 540 + }, + { + "epoch": 0.05283203125, + "grad_norm": 0.5879946351051331, + "learning_rate": 0.0004983347210125558, + "loss": 4.9883, + "step": 541 + }, + { + "epoch": 0.0529296875, + "grad_norm": 0.6193601489067078, + "learning_rate": 0.0004983262026591569, + "loss": 4.9688, + "step": 542 + }, + { + "epoch": 0.05302734375, + "grad_norm": 0.7117279767990112, + "learning_rate": 0.0004983176626557986, + "loss": 5.0195, + "step": 543 + }, + { + "epoch": 0.053125, + "grad_norm": 0.6014550924301147, + "learning_rate": 0.0004983091010033083, + "loss": 5.0, + "step": 544 + }, + { + "epoch": 0.05322265625, + "grad_norm": 0.5970187783241272, + "learning_rate": 0.0004983005177025164, + "loss": 4.9688, + "step": 545 + }, + { + "epoch": 0.0533203125, + "grad_norm": 0.602258563041687, + "learning_rate": 0.0004982919127542549, + "loss": 5.0156, + "step": 546 + }, + { + "epoch": 0.05341796875, + "grad_norm": 0.5443257689476013, + "learning_rate": 0.0004982832861593579, + "loss": 4.9805, + "step": 547 + }, + { + "epoch": 0.053515625, + "grad_norm": 0.5458142161369324, + "learning_rate": 0.0004982746379186616, + "loss": 4.9531, + "step": 548 + }, + { + "epoch": 0.05361328125, + "grad_norm": 0.5225309729576111, + "learning_rate": 0.0004982659680330047, + "loss": 5.0039, + "step": 549 + }, + { + "epoch": 0.0537109375, + "grad_norm": 0.5642147660255432, + "learning_rate": 0.0004982572765032275, + "loss": 4.9961, + "step": 550 + }, + { + "epoch": 0.05380859375, + "grad_norm": 0.6520267724990845, + "learning_rate": 0.0004982485633301725, + "loss": 5.0078, + "step": 551 + }, + { + "epoch": 0.05390625, + "grad_norm": 0.7051550149917603, + "learning_rate": 0.0004982398285146846, + "loss": 5.0, + "step": 552 + }, + { + "epoch": 0.05400390625, + "grad_norm": 0.6687464714050293, + "learning_rate": 0.0004982310720576103, + "loss": 4.9766, + "step": 553 + }, + { + "epoch": 0.0541015625, + "grad_norm": 0.60595703125, + "learning_rate": 0.0004982222939597989, + "loss": 4.957, + "step": 554 + }, + { + "epoch": 0.05419921875, + "grad_norm": 0.5594586133956909, + "learning_rate": 0.0004982134942221009, + "loss": 4.9883, + "step": 555 + }, + { + "epoch": 0.054296875, + "grad_norm": 0.5148446559906006, + "learning_rate": 0.0004982046728453696, + "loss": 4.9844, + "step": 556 + }, + { + "epoch": 0.05439453125, + "grad_norm": 0.5946763753890991, + "learning_rate": 0.0004981958298304602, + "loss": 5.0156, + "step": 557 + }, + { + "epoch": 0.0544921875, + "grad_norm": 0.716316282749176, + "learning_rate": 0.0004981869651782299, + "loss": 4.9805, + "step": 558 + }, + { + "epoch": 0.05458984375, + "grad_norm": 0.794476330280304, + "learning_rate": 0.0004981780788895382, + "loss": 5.0117, + "step": 559 + }, + { + "epoch": 0.0546875, + "grad_norm": 0.7105295658111572, + "learning_rate": 0.0004981691709652464, + "loss": 4.9766, + "step": 560 + }, + { + "epoch": 0.05478515625, + "grad_norm": 0.5912784337997437, + "learning_rate": 0.0004981602414062181, + "loss": 4.9414, + "step": 561 + }, + { + "epoch": 0.0548828125, + "grad_norm": 0.5801540613174438, + "learning_rate": 0.000498151290213319, + "loss": 4.9883, + "step": 562 + }, + { + "epoch": 0.05498046875, + "grad_norm": 0.6369305849075317, + "learning_rate": 0.0004981423173874169, + "loss": 5.0039, + "step": 563 + }, + { + "epoch": 0.055078125, + "grad_norm": 0.6735507845878601, + "learning_rate": 0.0004981333229293816, + "loss": 4.9648, + "step": 564 + }, + { + "epoch": 0.05517578125, + "grad_norm": 0.6315239667892456, + "learning_rate": 0.000498124306840085, + "loss": 5.0078, + "step": 565 + }, + { + "epoch": 0.0552734375, + "grad_norm": 0.710253119468689, + "learning_rate": 0.0004981152691204011, + "loss": 4.9844, + "step": 566 + }, + { + "epoch": 0.05537109375, + "grad_norm": 0.6913606524467468, + "learning_rate": 0.0004981062097712064, + "loss": 4.9766, + "step": 567 + }, + { + "epoch": 0.05546875, + "grad_norm": 0.5899546146392822, + "learning_rate": 0.0004980971287933787, + "loss": 4.9688, + "step": 568 + }, + { + "epoch": 0.05556640625, + "grad_norm": 0.5400623083114624, + "learning_rate": 0.0004980880261877986, + "loss": 4.9805, + "step": 569 + }, + { + "epoch": 0.0556640625, + "grad_norm": 0.5193256139755249, + "learning_rate": 0.0004980789019553484, + "loss": 4.9453, + "step": 570 + }, + { + "epoch": 0.05576171875, + "grad_norm": 0.5622848272323608, + "learning_rate": 0.0004980697560969127, + "loss": 4.957, + "step": 571 + }, + { + "epoch": 0.055859375, + "grad_norm": 0.6163820028305054, + "learning_rate": 0.0004980605886133782, + "loss": 4.9766, + "step": 572 + }, + { + "epoch": 0.05595703125, + "grad_norm": 0.6021759510040283, + "learning_rate": 0.0004980513995056334, + "loss": 4.957, + "step": 573 + }, + { + "epoch": 0.0560546875, + "grad_norm": 0.5321788191795349, + "learning_rate": 0.0004980421887745695, + "loss": 4.9805, + "step": 574 + }, + { + "epoch": 0.05615234375, + "grad_norm": 0.4939315617084503, + "learning_rate": 0.0004980329564210792, + "loss": 5.0195, + "step": 575 + }, + { + "epoch": 0.05625, + "grad_norm": 0.4567241370677948, + "learning_rate": 0.0004980237024460573, + "loss": 4.9648, + "step": 576 + }, + { + "epoch": 0.05634765625, + "grad_norm": 0.44762229919433594, + "learning_rate": 0.0004980144268504012, + "loss": 4.9961, + "step": 577 + }, + { + "epoch": 0.0564453125, + "grad_norm": 0.5556433200836182, + "learning_rate": 0.00049800512963501, + "loss": 4.9844, + "step": 578 + }, + { + "epoch": 0.05654296875, + "grad_norm": 0.6940642595291138, + "learning_rate": 0.0004979958108007851, + "loss": 4.9844, + "step": 579 + }, + { + "epoch": 0.056640625, + "grad_norm": 0.7792614102363586, + "learning_rate": 0.0004979864703486297, + "loss": 4.9648, + "step": 580 + }, + { + "epoch": 0.05673828125, + "grad_norm": 0.8071824908256531, + "learning_rate": 0.0004979771082794495, + "loss": 4.9727, + "step": 581 + }, + { + "epoch": 0.0568359375, + "grad_norm": 0.9120080471038818, + "learning_rate": 0.0004979677245941519, + "loss": 4.9883, + "step": 582 + }, + { + "epoch": 0.05693359375, + "grad_norm": 0.9239040017127991, + "learning_rate": 0.0004979583192936468, + "loss": 4.9609, + "step": 583 + }, + { + "epoch": 0.05703125, + "grad_norm": 1.002406120300293, + "learning_rate": 0.0004979488923788459, + "loss": 4.9766, + "step": 584 + }, + { + "epoch": 0.05712890625, + "grad_norm": 0.8661404848098755, + "learning_rate": 0.0004979394438506629, + "loss": 5.0, + "step": 585 + }, + { + "epoch": 0.0572265625, + "grad_norm": 0.6225184798240662, + "learning_rate": 0.000497929973710014, + "loss": 4.9727, + "step": 586 + }, + { + "epoch": 0.05732421875, + "grad_norm": 0.5944101810455322, + "learning_rate": 0.0004979204819578172, + "loss": 4.9414, + "step": 587 + }, + { + "epoch": 0.057421875, + "grad_norm": 0.5297685265541077, + "learning_rate": 0.0004979109685949926, + "loss": 4.9844, + "step": 588 + }, + { + "epoch": 0.05751953125, + "grad_norm": 0.43607059121131897, + "learning_rate": 0.0004979014336224625, + "loss": 4.9961, + "step": 589 + }, + { + "epoch": 0.0576171875, + "grad_norm": 0.48334574699401855, + "learning_rate": 0.0004978918770411513, + "loss": 4.9648, + "step": 590 + }, + { + "epoch": 0.05771484375, + "grad_norm": 0.5257386565208435, + "learning_rate": 0.0004978822988519853, + "loss": 4.9766, + "step": 591 + }, + { + "epoch": 0.0578125, + "grad_norm": 0.6425731182098389, + "learning_rate": 0.0004978726990558931, + "loss": 4.9492, + "step": 592 + }, + { + "epoch": 0.05791015625, + "grad_norm": 1.0080450773239136, + "learning_rate": 0.0004978630776538056, + "loss": 4.9727, + "step": 593 + }, + { + "epoch": 0.0580078125, + "grad_norm": 1.2617547512054443, + "learning_rate": 0.000497853434646655, + "loss": 5.0156, + "step": 594 + }, + { + "epoch": 0.05810546875, + "grad_norm": 0.8185104727745056, + "learning_rate": 0.0004978437700353766, + "loss": 4.9648, + "step": 595 + }, + { + "epoch": 0.058203125, + "grad_norm": 0.996583104133606, + "learning_rate": 0.0004978340838209071, + "loss": 4.9336, + "step": 596 + }, + { + "epoch": 0.05830078125, + "grad_norm": 0.8999793529510498, + "learning_rate": 0.0004978243760041855, + "loss": 4.9805, + "step": 597 + }, + { + "epoch": 0.0583984375, + "grad_norm": 0.7666017413139343, + "learning_rate": 0.0004978146465861531, + "loss": 4.918, + "step": 598 + }, + { + "epoch": 0.05849609375, + "grad_norm": 0.7835460901260376, + "learning_rate": 0.0004978048955677529, + "loss": 4.9766, + "step": 599 + }, + { + "epoch": 0.05859375, + "grad_norm": 0.7298296689987183, + "learning_rate": 0.0004977951229499302, + "loss": 4.9648, + "step": 600 + }, + { + "epoch": 0.05869140625, + "grad_norm": 0.7160419225692749, + "learning_rate": 0.0004977853287336325, + "loss": 4.9531, + "step": 601 + }, + { + "epoch": 0.0587890625, + "grad_norm": 0.9255022406578064, + "learning_rate": 0.0004977755129198092, + "loss": 4.9727, + "step": 602 + }, + { + "epoch": 0.05888671875, + "grad_norm": 1.1262624263763428, + "learning_rate": 0.0004977656755094119, + "loss": 4.9766, + "step": 603 + }, + { + "epoch": 0.058984375, + "grad_norm": 0.8687927722930908, + "learning_rate": 0.0004977558165033942, + "loss": 4.9727, + "step": 604 + }, + { + "epoch": 0.05908203125, + "grad_norm": 0.827407717704773, + "learning_rate": 0.0004977459359027121, + "loss": 4.957, + "step": 605 + }, + { + "epoch": 0.0591796875, + "grad_norm": 0.7043539881706238, + "learning_rate": 0.0004977360337083232, + "loss": 4.9961, + "step": 606 + }, + { + "epoch": 0.05927734375, + "grad_norm": 0.6146707534790039, + "learning_rate": 0.0004977261099211876, + "loss": 4.957, + "step": 607 + }, + { + "epoch": 0.059375, + "grad_norm": 0.5867034196853638, + "learning_rate": 0.0004977161645422672, + "loss": 4.9648, + "step": 608 + }, + { + "epoch": 0.05947265625, + "grad_norm": 0.456230491399765, + "learning_rate": 0.0004977061975725264, + "loss": 4.9805, + "step": 609 + }, + { + "epoch": 0.0595703125, + "grad_norm": 0.4468291997909546, + "learning_rate": 0.000497696209012931, + "loss": 4.9844, + "step": 610 + }, + { + "epoch": 0.05966796875, + "grad_norm": 0.4772776961326599, + "learning_rate": 0.0004976861988644498, + "loss": 4.9453, + "step": 611 + }, + { + "epoch": 0.059765625, + "grad_norm": 0.510353147983551, + "learning_rate": 0.0004976761671280529, + "loss": 4.9688, + "step": 612 + }, + { + "epoch": 0.05986328125, + "grad_norm": 0.4383707046508789, + "learning_rate": 0.0004976661138047128, + "loss": 4.9531, + "step": 613 + }, + { + "epoch": 0.0599609375, + "grad_norm": 0.42035961151123047, + "learning_rate": 0.0004976560388954044, + "loss": 4.9453, + "step": 614 + }, + { + "epoch": 0.06005859375, + "grad_norm": 0.45699962973594666, + "learning_rate": 0.0004976459424011041, + "loss": 4.9531, + "step": 615 + }, + { + "epoch": 0.06015625, + "grad_norm": 0.47027596831321716, + "learning_rate": 0.0004976358243227908, + "loss": 4.9219, + "step": 616 + }, + { + "epoch": 0.06025390625, + "grad_norm": 0.4678795337677002, + "learning_rate": 0.0004976256846614454, + "loss": 4.9609, + "step": 617 + }, + { + "epoch": 0.0603515625, + "grad_norm": 0.4823755919933319, + "learning_rate": 0.0004976155234180507, + "loss": 4.957, + "step": 618 + }, + { + "epoch": 0.06044921875, + "grad_norm": 0.4279949367046356, + "learning_rate": 0.0004976053405935921, + "loss": 4.918, + "step": 619 + }, + { + "epoch": 0.060546875, + "grad_norm": 0.44116416573524475, + "learning_rate": 0.0004975951361890565, + "loss": 4.9297, + "step": 620 + }, + { + "epoch": 0.06064453125, + "grad_norm": 0.5708990097045898, + "learning_rate": 0.0004975849102054332, + "loss": 4.9492, + "step": 621 + }, + { + "epoch": 0.0607421875, + "grad_norm": 0.6033137440681458, + "learning_rate": 0.0004975746626437135, + "loss": 4.9336, + "step": 622 + }, + { + "epoch": 0.06083984375, + "grad_norm": 0.7036421895027161, + "learning_rate": 0.0004975643935048908, + "loss": 4.9297, + "step": 623 + }, + { + "epoch": 0.0609375, + "grad_norm": 0.6817371249198914, + "learning_rate": 0.0004975541027899609, + "loss": 4.9805, + "step": 624 + }, + { + "epoch": 0.06103515625, + "grad_norm": 0.6419718265533447, + "learning_rate": 0.0004975437904999211, + "loss": 4.9375, + "step": 625 + }, + { + "epoch": 0.0611328125, + "grad_norm": 0.6562728881835938, + "learning_rate": 0.0004975334566357712, + "loss": 4.957, + "step": 626 + }, + { + "epoch": 0.06123046875, + "grad_norm": 0.7153109312057495, + "learning_rate": 0.000497523101198513, + "loss": 4.9336, + "step": 627 + }, + { + "epoch": 0.061328125, + "grad_norm": 0.7886594533920288, + "learning_rate": 0.0004975127241891505, + "loss": 4.9492, + "step": 628 + }, + { + "epoch": 0.06142578125, + "grad_norm": 0.8648955225944519, + "learning_rate": 0.0004975023256086896, + "loss": 4.9453, + "step": 629 + }, + { + "epoch": 0.0615234375, + "grad_norm": 0.7228974103927612, + "learning_rate": 0.0004974919054581382, + "loss": 4.9531, + "step": 630 + }, + { + "epoch": 0.06162109375, + "grad_norm": 0.563917875289917, + "learning_rate": 0.0004974814637385067, + "loss": 4.9219, + "step": 631 + }, + { + "epoch": 0.06171875, + "grad_norm": 0.6323292851448059, + "learning_rate": 0.0004974710004508073, + "loss": 4.957, + "step": 632 + }, + { + "epoch": 0.06181640625, + "grad_norm": 0.6532883048057556, + "learning_rate": 0.0004974605155960545, + "loss": 4.957, + "step": 633 + }, + { + "epoch": 0.0619140625, + "grad_norm": 0.5852863788604736, + "learning_rate": 0.0004974500091752643, + "loss": 4.9492, + "step": 634 + }, + { + "epoch": 0.06201171875, + "grad_norm": 0.5243951082229614, + "learning_rate": 0.0004974394811894555, + "loss": 4.9102, + "step": 635 + }, + { + "epoch": 0.062109375, + "grad_norm": 0.4946766495704651, + "learning_rate": 0.0004974289316396487, + "loss": 4.957, + "step": 636 + }, + { + "epoch": 0.06220703125, + "grad_norm": 0.5417998433113098, + "learning_rate": 0.0004974183605268667, + "loss": 4.9531, + "step": 637 + }, + { + "epoch": 0.0623046875, + "grad_norm": 0.5387380719184875, + "learning_rate": 0.0004974077678521343, + "loss": 4.9805, + "step": 638 + }, + { + "epoch": 0.06240234375, + "grad_norm": 0.4830368161201477, + "learning_rate": 0.000497397153616478, + "loss": 4.9375, + "step": 639 + }, + { + "epoch": 0.0625, + "grad_norm": 0.5016372203826904, + "learning_rate": 0.0004973865178209274, + "loss": 4.9297, + "step": 640 + }, + { + "epoch": 0.06259765625, + "grad_norm": 0.5527303814888, + "learning_rate": 0.000497375860466513, + "loss": 4.9297, + "step": 641 + }, + { + "epoch": 0.0626953125, + "grad_norm": 0.5242739319801331, + "learning_rate": 0.0004973651815542682, + "loss": 4.9805, + "step": 642 + }, + { + "epoch": 0.06279296875, + "grad_norm": 0.48130086064338684, + "learning_rate": 0.0004973544810852284, + "loss": 4.957, + "step": 643 + }, + { + "epoch": 0.062890625, + "grad_norm": 0.5349105596542358, + "learning_rate": 0.0004973437590604307, + "loss": 4.9648, + "step": 644 + }, + { + "epoch": 0.06298828125, + "grad_norm": 0.5169801712036133, + "learning_rate": 0.0004973330154809146, + "loss": 4.9375, + "step": 645 + }, + { + "epoch": 0.0630859375, + "grad_norm": 0.480214387178421, + "learning_rate": 0.0004973222503477216, + "loss": 4.9531, + "step": 646 + }, + { + "epoch": 0.06318359375, + "grad_norm": 0.4224573075771332, + "learning_rate": 0.0004973114636618954, + "loss": 4.9219, + "step": 647 + }, + { + "epoch": 0.06328125, + "grad_norm": 0.41642358899116516, + "learning_rate": 0.0004973006554244816, + "loss": 4.918, + "step": 648 + }, + { + "epoch": 0.06337890625, + "grad_norm": 0.4622966945171356, + "learning_rate": 0.000497289825636528, + "loss": 4.9297, + "step": 649 + }, + { + "epoch": 0.0634765625, + "grad_norm": 0.5849423408508301, + "learning_rate": 0.0004972789742990846, + "loss": 4.9258, + "step": 650 + }, + { + "epoch": 0.06357421875, + "grad_norm": 0.5855206847190857, + "learning_rate": 0.0004972681014132031, + "loss": 4.9414, + "step": 651 + }, + { + "epoch": 0.063671875, + "grad_norm": 0.6138719320297241, + "learning_rate": 0.0004972572069799378, + "loss": 4.9141, + "step": 652 + }, + { + "epoch": 0.06376953125, + "grad_norm": 0.5948016047477722, + "learning_rate": 0.0004972462910003447, + "loss": 4.9414, + "step": 653 + }, + { + "epoch": 0.0638671875, + "grad_norm": 0.558039665222168, + "learning_rate": 0.0004972353534754821, + "loss": 4.918, + "step": 654 + }, + { + "epoch": 0.06396484375, + "grad_norm": 0.6696071624755859, + "learning_rate": 0.0004972243944064103, + "loss": 4.9258, + "step": 655 + }, + { + "epoch": 0.0640625, + "grad_norm": 0.7494735717773438, + "learning_rate": 0.0004972134137941918, + "loss": 4.8945, + "step": 656 + }, + { + "epoch": 0.06416015625, + "grad_norm": 0.8260038495063782, + "learning_rate": 0.0004972024116398908, + "loss": 4.9336, + "step": 657 + }, + { + "epoch": 0.0642578125, + "grad_norm": 0.9088923931121826, + "learning_rate": 0.0004971913879445742, + "loss": 4.9531, + "step": 658 + }, + { + "epoch": 0.06435546875, + "grad_norm": 0.7802959084510803, + "learning_rate": 0.0004971803427093105, + "loss": 4.9219, + "step": 659 + }, + { + "epoch": 0.064453125, + "grad_norm": 0.6456225514411926, + "learning_rate": 0.0004971692759351705, + "loss": 4.9414, + "step": 660 + }, + { + "epoch": 0.06455078125, + "grad_norm": 0.6266503930091858, + "learning_rate": 0.0004971581876232272, + "loss": 4.9219, + "step": 661 + }, + { + "epoch": 0.0646484375, + "grad_norm": 0.5530596971511841, + "learning_rate": 0.0004971470777745553, + "loss": 4.9336, + "step": 662 + }, + { + "epoch": 0.06474609375, + "grad_norm": 0.5629117488861084, + "learning_rate": 0.0004971359463902319, + "loss": 4.9727, + "step": 663 + }, + { + "epoch": 0.06484375, + "grad_norm": 0.5490591526031494, + "learning_rate": 0.0004971247934713362, + "loss": 4.9453, + "step": 664 + }, + { + "epoch": 0.06494140625, + "grad_norm": 0.5852833986282349, + "learning_rate": 0.0004971136190189494, + "loss": 4.9414, + "step": 665 + }, + { + "epoch": 0.0650390625, + "grad_norm": 0.6156993508338928, + "learning_rate": 0.0004971024230341546, + "loss": 4.957, + "step": 666 + }, + { + "epoch": 0.06513671875, + "grad_norm": 0.6128381490707397, + "learning_rate": 0.0004970912055180373, + "loss": 4.8867, + "step": 667 + }, + { + "epoch": 0.065234375, + "grad_norm": 0.4521043598651886, + "learning_rate": 0.000497079966471685, + "loss": 4.9141, + "step": 668 + }, + { + "epoch": 0.06533203125, + "grad_norm": 0.42642197012901306, + "learning_rate": 0.0004970687058961873, + "loss": 4.9336, + "step": 669 + }, + { + "epoch": 0.0654296875, + "grad_norm": 0.41460588574409485, + "learning_rate": 0.0004970574237926356, + "loss": 4.9062, + "step": 670 + }, + { + "epoch": 0.06552734375, + "grad_norm": 0.4534105956554413, + "learning_rate": 0.000497046120162124, + "loss": 4.8945, + "step": 671 + }, + { + "epoch": 0.065625, + "grad_norm": 0.45969158411026, + "learning_rate": 0.0004970347950057478, + "loss": 4.918, + "step": 672 + }, + { + "epoch": 0.06572265625, + "grad_norm": 0.45954084396362305, + "learning_rate": 0.0004970234483246053, + "loss": 4.9375, + "step": 673 + }, + { + "epoch": 0.0658203125, + "grad_norm": 0.5340402126312256, + "learning_rate": 0.0004970120801197964, + "loss": 4.9219, + "step": 674 + }, + { + "epoch": 0.06591796875, + "grad_norm": 0.5017228722572327, + "learning_rate": 0.0004970006903924231, + "loss": 4.9141, + "step": 675 + }, + { + "epoch": 0.066015625, + "grad_norm": 0.4929012060165405, + "learning_rate": 0.0004969892791435896, + "loss": 4.9062, + "step": 676 + }, + { + "epoch": 0.06611328125, + "grad_norm": 0.5938189625740051, + "learning_rate": 0.0004969778463744021, + "loss": 4.9414, + "step": 677 + }, + { + "epoch": 0.0662109375, + "grad_norm": 0.6704724431037903, + "learning_rate": 0.000496966392085969, + "loss": 4.9297, + "step": 678 + }, + { + "epoch": 0.06630859375, + "grad_norm": 0.6303629875183105, + "learning_rate": 0.0004969549162794007, + "loss": 4.9258, + "step": 679 + }, + { + "epoch": 0.06640625, + "grad_norm": 0.5393560528755188, + "learning_rate": 0.0004969434189558096, + "loss": 4.9258, + "step": 680 + }, + { + "epoch": 0.06650390625, + "grad_norm": 0.4611497223377228, + "learning_rate": 0.0004969319001163104, + "loss": 4.9102, + "step": 681 + }, + { + "epoch": 0.0666015625, + "grad_norm": 0.47802746295928955, + "learning_rate": 0.0004969203597620197, + "loss": 4.9219, + "step": 682 + }, + { + "epoch": 0.06669921875, + "grad_norm": 0.5258490443229675, + "learning_rate": 0.0004969087978940564, + "loss": 4.9492, + "step": 683 + }, + { + "epoch": 0.066796875, + "grad_norm": 0.5325090289115906, + "learning_rate": 0.0004968972145135412, + "loss": 4.9727, + "step": 684 + }, + { + "epoch": 0.06689453125, + "grad_norm": 0.5076428651809692, + "learning_rate": 0.0004968856096215971, + "loss": 4.8984, + "step": 685 + }, + { + "epoch": 0.0669921875, + "grad_norm": 0.5641031861305237, + "learning_rate": 0.000496873983219349, + "loss": 4.9414, + "step": 686 + }, + { + "epoch": 0.06708984375, + "grad_norm": 0.7779815793037415, + "learning_rate": 0.0004968623353079242, + "loss": 4.9102, + "step": 687 + }, + { + "epoch": 0.0671875, + "grad_norm": 0.618226170539856, + "learning_rate": 0.0004968506658884517, + "loss": 4.9336, + "step": 688 + }, + { + "epoch": 0.06728515625, + "grad_norm": 0.6069994568824768, + "learning_rate": 0.0004968389749620629, + "loss": 4.9453, + "step": 689 + }, + { + "epoch": 0.0673828125, + "grad_norm": 0.602695643901825, + "learning_rate": 0.000496827262529891, + "loss": 4.9141, + "step": 690 + }, + { + "epoch": 0.06748046875, + "grad_norm": 0.8353970050811768, + "learning_rate": 0.0004968155285930717, + "loss": 4.957, + "step": 691 + }, + { + "epoch": 0.067578125, + "grad_norm": 0.7601311206817627, + "learning_rate": 0.0004968037731527422, + "loss": 4.8867, + "step": 692 + }, + { + "epoch": 0.06767578125, + "grad_norm": 0.8417662978172302, + "learning_rate": 0.0004967919962100424, + "loss": 4.918, + "step": 693 + }, + { + "epoch": 0.0677734375, + "grad_norm": 0.864201009273529, + "learning_rate": 0.0004967801977661138, + "loss": 4.9336, + "step": 694 + }, + { + "epoch": 0.06787109375, + "grad_norm": 0.6699989438056946, + "learning_rate": 0.0004967683778221003, + "loss": 4.9531, + "step": 695 + }, + { + "epoch": 0.06796875, + "grad_norm": 0.5989636778831482, + "learning_rate": 0.0004967565363791478, + "loss": 4.9023, + "step": 696 + }, + { + "epoch": 0.06806640625, + "grad_norm": 0.5926920771598816, + "learning_rate": 0.000496744673438404, + "loss": 4.9141, + "step": 697 + }, + { + "epoch": 0.0681640625, + "grad_norm": 0.6211676597595215, + "learning_rate": 0.0004967327890010192, + "loss": 4.8984, + "step": 698 + }, + { + "epoch": 0.06826171875, + "grad_norm": 0.985095739364624, + "learning_rate": 0.0004967208830681454, + "loss": 4.9102, + "step": 699 + }, + { + "epoch": 0.068359375, + "grad_norm": 0.8903228044509888, + "learning_rate": 0.0004967089556409367, + "loss": 4.918, + "step": 700 + }, + { + "epoch": 0.06845703125, + "grad_norm": 0.7326058745384216, + "learning_rate": 0.0004966970067205496, + "loss": 4.9219, + "step": 701 + }, + { + "epoch": 0.0685546875, + "grad_norm": 0.5591720938682556, + "learning_rate": 0.0004966850363081423, + "loss": 4.9062, + "step": 702 + }, + { + "epoch": 0.06865234375, + "grad_norm": 0.5330259799957275, + "learning_rate": 0.0004966730444048754, + "loss": 4.9258, + "step": 703 + }, + { + "epoch": 0.06875, + "grad_norm": 0.5547099113464355, + "learning_rate": 0.0004966610310119113, + "loss": 4.9141, + "step": 704 + }, + { + "epoch": 0.06884765625, + "grad_norm": 0.5156508088111877, + "learning_rate": 0.0004966489961304147, + "loss": 4.918, + "step": 705 + }, + { + "epoch": 0.0689453125, + "grad_norm": 0.5710839629173279, + "learning_rate": 0.0004966369397615522, + "loss": 4.875, + "step": 706 + }, + { + "epoch": 0.06904296875, + "grad_norm": 0.6018747091293335, + "learning_rate": 0.0004966248619064927, + "loss": 4.9102, + "step": 707 + }, + { + "epoch": 0.069140625, + "grad_norm": 0.7630068063735962, + "learning_rate": 0.000496612762566407, + "loss": 4.9375, + "step": 708 + }, + { + "epoch": 0.06923828125, + "grad_norm": 0.9202441573143005, + "learning_rate": 0.000496600641742468, + "loss": 4.9375, + "step": 709 + }, + { + "epoch": 0.0693359375, + "grad_norm": 0.801213800907135, + "learning_rate": 0.0004965884994358508, + "loss": 4.9102, + "step": 710 + }, + { + "epoch": 0.06943359375, + "grad_norm": 0.6991446614265442, + "learning_rate": 0.0004965763356477326, + "loss": 4.9219, + "step": 711 + }, + { + "epoch": 0.06953125, + "grad_norm": 0.6677018404006958, + "learning_rate": 0.0004965641503792924, + "loss": 4.8984, + "step": 712 + }, + { + "epoch": 0.06962890625, + "grad_norm": 0.5393925905227661, + "learning_rate": 0.0004965519436317115, + "loss": 4.9141, + "step": 713 + }, + { + "epoch": 0.0697265625, + "grad_norm": 0.4885493814945221, + "learning_rate": 0.0004965397154061736, + "loss": 4.9102, + "step": 714 + }, + { + "epoch": 0.06982421875, + "grad_norm": 0.42598259449005127, + "learning_rate": 0.0004965274657038637, + "loss": 4.8867, + "step": 715 + }, + { + "epoch": 0.069921875, + "grad_norm": 0.3984520733356476, + "learning_rate": 0.0004965151945259696, + "loss": 4.918, + "step": 716 + }, + { + "epoch": 0.07001953125, + "grad_norm": 0.4602973163127899, + "learning_rate": 0.0004965029018736807, + "loss": 4.9336, + "step": 717 + }, + { + "epoch": 0.0701171875, + "grad_norm": 0.4962187111377716, + "learning_rate": 0.0004964905877481889, + "loss": 4.9141, + "step": 718 + }, + { + "epoch": 0.07021484375, + "grad_norm": 0.572699785232544, + "learning_rate": 0.0004964782521506879, + "loss": 4.9219, + "step": 719 + }, + { + "epoch": 0.0703125, + "grad_norm": 0.6187204718589783, + "learning_rate": 0.0004964658950823734, + "loss": 4.9219, + "step": 720 + }, + { + "epoch": 0.07041015625, + "grad_norm": 0.5086959600448608, + "learning_rate": 0.0004964535165444436, + "loss": 4.8945, + "step": 721 + }, + { + "epoch": 0.0705078125, + "grad_norm": 0.4606127142906189, + "learning_rate": 0.0004964411165380983, + "loss": 4.9102, + "step": 722 + }, + { + "epoch": 0.07060546875, + "grad_norm": 0.4611072242259979, + "learning_rate": 0.0004964286950645397, + "loss": 4.8945, + "step": 723 + }, + { + "epoch": 0.070703125, + "grad_norm": 0.4993979036808014, + "learning_rate": 0.000496416252124972, + "loss": 4.9023, + "step": 724 + }, + { + "epoch": 0.07080078125, + "grad_norm": 0.5374334454536438, + "learning_rate": 0.0004964037877206014, + "loss": 4.9023, + "step": 725 + }, + { + "epoch": 0.0708984375, + "grad_norm": 0.5764836668968201, + "learning_rate": 0.0004963913018526363, + "loss": 4.8789, + "step": 726 + }, + { + "epoch": 0.07099609375, + "grad_norm": 0.8342440724372864, + "learning_rate": 0.000496378794522287, + "loss": 4.8945, + "step": 727 + }, + { + "epoch": 0.07109375, + "grad_norm": 0.8987082839012146, + "learning_rate": 0.0004963662657307661, + "loss": 4.9375, + "step": 728 + }, + { + "epoch": 0.07119140625, + "grad_norm": 0.7202743291854858, + "learning_rate": 0.0004963537154792881, + "loss": 4.918, + "step": 729 + }, + { + "epoch": 0.0712890625, + "grad_norm": 0.8998580574989319, + "learning_rate": 0.0004963411437690696, + "loss": 4.8867, + "step": 730 + }, + { + "epoch": 0.07138671875, + "grad_norm": 0.808499813079834, + "learning_rate": 0.0004963285506013297, + "loss": 4.9414, + "step": 731 + }, + { + "epoch": 0.071484375, + "grad_norm": 0.5454872250556946, + "learning_rate": 0.0004963159359772889, + "loss": 4.9219, + "step": 732 + }, + { + "epoch": 0.07158203125, + "grad_norm": 0.4970705807209015, + "learning_rate": 0.0004963032998981702, + "loss": 4.918, + "step": 733 + }, + { + "epoch": 0.0716796875, + "grad_norm": 0.4672599732875824, + "learning_rate": 0.0004962906423651985, + "loss": 4.9102, + "step": 734 + }, + { + "epoch": 0.07177734375, + "grad_norm": 0.4656890630722046, + "learning_rate": 0.000496277963379601, + "loss": 4.9062, + "step": 735 + }, + { + "epoch": 0.071875, + "grad_norm": 0.4242008626461029, + "learning_rate": 0.0004962652629426068, + "loss": 4.9023, + "step": 736 + }, + { + "epoch": 0.07197265625, + "grad_norm": 0.38737159967422485, + "learning_rate": 0.000496252541055447, + "loss": 4.9102, + "step": 737 + }, + { + "epoch": 0.0720703125, + "grad_norm": 0.4093025326728821, + "learning_rate": 0.000496239797719355, + "loss": 4.8984, + "step": 738 + }, + { + "epoch": 0.07216796875, + "grad_norm": 0.4339083135128021, + "learning_rate": 0.0004962270329355662, + "loss": 4.9414, + "step": 739 + }, + { + "epoch": 0.072265625, + "grad_norm": 0.5110611915588379, + "learning_rate": 0.000496214246705318, + "loss": 4.9141, + "step": 740 + }, + { + "epoch": 0.07236328125, + "grad_norm": 0.5828584432601929, + "learning_rate": 0.00049620143902985, + "loss": 4.918, + "step": 741 + }, + { + "epoch": 0.0724609375, + "grad_norm": 0.5776299834251404, + "learning_rate": 0.0004961886099104038, + "loss": 4.8945, + "step": 742 + }, + { + "epoch": 0.07255859375, + "grad_norm": 0.48184171319007874, + "learning_rate": 0.0004961757593482229, + "loss": 4.8945, + "step": 743 + }, + { + "epoch": 0.07265625, + "grad_norm": 0.4601769745349884, + "learning_rate": 0.0004961628873445535, + "loss": 4.9023, + "step": 744 + }, + { + "epoch": 0.07275390625, + "grad_norm": 0.44548672437667847, + "learning_rate": 0.0004961499939006431, + "loss": 4.9141, + "step": 745 + }, + { + "epoch": 0.0728515625, + "grad_norm": 0.5392785668373108, + "learning_rate": 0.0004961370790177418, + "loss": 4.8945, + "step": 746 + }, + { + "epoch": 0.07294921875, + "grad_norm": 0.5730900764465332, + "learning_rate": 0.0004961241426971014, + "loss": 4.9258, + "step": 747 + }, + { + "epoch": 0.073046875, + "grad_norm": 0.6566439270973206, + "learning_rate": 0.0004961111849399763, + "loss": 4.9102, + "step": 748 + }, + { + "epoch": 0.07314453125, + "grad_norm": 0.6477753520011902, + "learning_rate": 0.0004960982057476224, + "loss": 4.918, + "step": 749 + }, + { + "epoch": 0.0732421875, + "grad_norm": 0.5533462166786194, + "learning_rate": 0.0004960852051212982, + "loss": 4.8711, + "step": 750 + }, + { + "epoch": 0.07333984375, + "grad_norm": 0.5516164302825928, + "learning_rate": 0.0004960721830622637, + "loss": 4.8828, + "step": 751 + }, + { + "epoch": 0.0734375, + "grad_norm": 0.5473514795303345, + "learning_rate": 0.0004960591395717816, + "loss": 4.9297, + "step": 752 + }, + { + "epoch": 0.07353515625, + "grad_norm": 0.5541347861289978, + "learning_rate": 0.0004960460746511162, + "loss": 4.8906, + "step": 753 + }, + { + "epoch": 0.0736328125, + "grad_norm": 0.5999252200126648, + "learning_rate": 0.0004960329883015341, + "loss": 4.9023, + "step": 754 + }, + { + "epoch": 0.07373046875, + "grad_norm": 0.5422800779342651, + "learning_rate": 0.0004960198805243039, + "loss": 4.9023, + "step": 755 + }, + { + "epoch": 0.073828125, + "grad_norm": 0.4828493595123291, + "learning_rate": 0.0004960067513206964, + "loss": 4.8555, + "step": 756 + }, + { + "epoch": 0.07392578125, + "grad_norm": 0.44730255007743835, + "learning_rate": 0.0004959936006919843, + "loss": 4.9141, + "step": 757 + }, + { + "epoch": 0.0740234375, + "grad_norm": 0.5136485695838928, + "learning_rate": 0.0004959804286394425, + "loss": 4.9062, + "step": 758 + }, + { + "epoch": 0.07412109375, + "grad_norm": 0.558743417263031, + "learning_rate": 0.000495967235164348, + "loss": 4.9297, + "step": 759 + }, + { + "epoch": 0.07421875, + "grad_norm": 0.503806471824646, + "learning_rate": 0.0004959540202679797, + "loss": 4.9102, + "step": 760 + }, + { + "epoch": 0.07431640625, + "grad_norm": 0.4152722954750061, + "learning_rate": 0.0004959407839516188, + "loss": 4.8711, + "step": 761 + }, + { + "epoch": 0.0744140625, + "grad_norm": 0.38589704036712646, + "learning_rate": 0.0004959275262165485, + "loss": 4.8906, + "step": 762 + }, + { + "epoch": 0.07451171875, + "grad_norm": 0.43141141533851624, + "learning_rate": 0.0004959142470640539, + "loss": 4.8516, + "step": 763 + }, + { + "epoch": 0.074609375, + "grad_norm": 0.42636358737945557, + "learning_rate": 0.0004959009464954224, + "loss": 4.8789, + "step": 764 + }, + { + "epoch": 0.07470703125, + "grad_norm": 0.4754694700241089, + "learning_rate": 0.0004958876245119433, + "loss": 4.8828, + "step": 765 + }, + { + "epoch": 0.0748046875, + "grad_norm": 0.4849173128604889, + "learning_rate": 0.0004958742811149083, + "loss": 4.9062, + "step": 766 + }, + { + "epoch": 0.07490234375, + "grad_norm": 0.5047995448112488, + "learning_rate": 0.0004958609163056108, + "loss": 4.8828, + "step": 767 + }, + { + "epoch": 0.075, + "grad_norm": 0.5260794758796692, + "learning_rate": 0.0004958475300853464, + "loss": 4.8828, + "step": 768 + }, + { + "epoch": 0.07509765625, + "grad_norm": 0.4804491102695465, + "learning_rate": 0.0004958341224554129, + "loss": 4.8906, + "step": 769 + }, + { + "epoch": 0.0751953125, + "grad_norm": 0.4267667233943939, + "learning_rate": 0.00049582069341711, + "loss": 4.875, + "step": 770 + }, + { + "epoch": 0.07529296875, + "grad_norm": 0.460480660200119, + "learning_rate": 0.0004958072429717395, + "loss": 4.8789, + "step": 771 + }, + { + "epoch": 0.075390625, + "grad_norm": 0.4774636924266815, + "learning_rate": 0.0004957937711206055, + "loss": 4.8945, + "step": 772 + }, + { + "epoch": 0.07548828125, + "grad_norm": 0.44169384241104126, + "learning_rate": 0.000495780277865014, + "loss": 4.875, + "step": 773 + }, + { + "epoch": 0.0755859375, + "grad_norm": 0.4531261622905731, + "learning_rate": 0.000495766763206273, + "loss": 4.8945, + "step": 774 + }, + { + "epoch": 0.07568359375, + "grad_norm": 0.49369335174560547, + "learning_rate": 0.0004957532271456926, + "loss": 4.9023, + "step": 775 + }, + { + "epoch": 0.07578125, + "grad_norm": 0.5407163500785828, + "learning_rate": 0.000495739669684585, + "loss": 4.9023, + "step": 776 + }, + { + "epoch": 0.07587890625, + "grad_norm": 0.6588068008422852, + "learning_rate": 0.0004957260908242647, + "loss": 4.8906, + "step": 777 + }, + { + "epoch": 0.0759765625, + "grad_norm": 0.708760142326355, + "learning_rate": 0.000495712490566048, + "loss": 4.8828, + "step": 778 + }, + { + "epoch": 0.07607421875, + "grad_norm": 0.6345651745796204, + "learning_rate": 0.0004956988689112533, + "loss": 4.8984, + "step": 779 + }, + { + "epoch": 0.076171875, + "grad_norm": 0.5177327990531921, + "learning_rate": 0.0004956852258612011, + "loss": 4.8906, + "step": 780 + }, + { + "epoch": 0.07626953125, + "grad_norm": 0.5784163475036621, + "learning_rate": 0.0004956715614172141, + "loss": 4.8906, + "step": 781 + }, + { + "epoch": 0.0763671875, + "grad_norm": 0.4898063540458679, + "learning_rate": 0.0004956578755806168, + "loss": 4.8945, + "step": 782 + }, + { + "epoch": 0.07646484375, + "grad_norm": 0.43215012550354004, + "learning_rate": 0.0004956441683527361, + "loss": 4.9219, + "step": 783 + }, + { + "epoch": 0.0765625, + "grad_norm": 0.42248815298080444, + "learning_rate": 0.0004956304397349009, + "loss": 4.8711, + "step": 784 + }, + { + "epoch": 0.07666015625, + "grad_norm": 0.4588373303413391, + "learning_rate": 0.0004956166897284419, + "loss": 4.8828, + "step": 785 + }, + { + "epoch": 0.0767578125, + "grad_norm": 0.5150899887084961, + "learning_rate": 0.0004956029183346922, + "loss": 4.875, + "step": 786 + }, + { + "epoch": 0.07685546875, + "grad_norm": 0.49109452962875366, + "learning_rate": 0.0004955891255549868, + "loss": 4.8828, + "step": 787 + }, + { + "epoch": 0.076953125, + "grad_norm": 0.5134442448616028, + "learning_rate": 0.0004955753113906629, + "loss": 4.8789, + "step": 788 + }, + { + "epoch": 0.07705078125, + "grad_norm": 0.5372751951217651, + "learning_rate": 0.0004955614758430594, + "loss": 4.8711, + "step": 789 + }, + { + "epoch": 0.0771484375, + "grad_norm": 0.5520480871200562, + "learning_rate": 0.0004955476189135179, + "loss": 4.875, + "step": 790 + }, + { + "epoch": 0.07724609375, + "grad_norm": 0.5645185112953186, + "learning_rate": 0.0004955337406033817, + "loss": 4.8555, + "step": 791 + }, + { + "epoch": 0.07734375, + "grad_norm": 0.5430117249488831, + "learning_rate": 0.000495519840913996, + "loss": 4.8906, + "step": 792 + }, + { + "epoch": 0.07744140625, + "grad_norm": 0.5487677454948425, + "learning_rate": 0.0004955059198467085, + "loss": 4.8906, + "step": 793 + }, + { + "epoch": 0.0775390625, + "grad_norm": 0.5324546098709106, + "learning_rate": 0.0004954919774028685, + "loss": 4.8672, + "step": 794 + }, + { + "epoch": 0.07763671875, + "grad_norm": 0.48905646800994873, + "learning_rate": 0.0004954780135838278, + "loss": 4.8906, + "step": 795 + }, + { + "epoch": 0.077734375, + "grad_norm": 0.49760356545448303, + "learning_rate": 0.0004954640283909401, + "loss": 4.8984, + "step": 796 + }, + { + "epoch": 0.07783203125, + "grad_norm": 0.5531620979309082, + "learning_rate": 0.0004954500218255613, + "loss": 4.9062, + "step": 797 + }, + { + "epoch": 0.0779296875, + "grad_norm": 0.5735150575637817, + "learning_rate": 0.0004954359938890489, + "loss": 4.8594, + "step": 798 + }, + { + "epoch": 0.07802734375, + "grad_norm": 0.5884649753570557, + "learning_rate": 0.000495421944582763, + "loss": 4.918, + "step": 799 + }, + { + "epoch": 0.078125, + "grad_norm": 0.48785004019737244, + "learning_rate": 0.0004954078739080656, + "loss": 4.9102, + "step": 800 + }, + { + "epoch": 0.07822265625, + "grad_norm": 0.47542282938957214, + "learning_rate": 0.0004953937818663208, + "loss": 4.8789, + "step": 801 + }, + { + "epoch": 0.0783203125, + "grad_norm": 0.5039560794830322, + "learning_rate": 0.0004953796684588946, + "loss": 4.8906, + "step": 802 + }, + { + "epoch": 0.07841796875, + "grad_norm": 0.47064536809921265, + "learning_rate": 0.0004953655336871553, + "loss": 4.8594, + "step": 803 + }, + { + "epoch": 0.078515625, + "grad_norm": 0.44826674461364746, + "learning_rate": 0.0004953513775524731, + "loss": 4.8633, + "step": 804 + }, + { + "epoch": 0.07861328125, + "grad_norm": 0.403845876455307, + "learning_rate": 0.0004953372000562204, + "loss": 4.8984, + "step": 805 + }, + { + "epoch": 0.0787109375, + "grad_norm": 0.4083589017391205, + "learning_rate": 0.0004953230011997716, + "loss": 4.9141, + "step": 806 + }, + { + "epoch": 0.07880859375, + "grad_norm": 0.3878018260002136, + "learning_rate": 0.0004953087809845031, + "loss": 4.875, + "step": 807 + }, + { + "epoch": 0.07890625, + "grad_norm": 0.39088737964630127, + "learning_rate": 0.0004952945394117936, + "loss": 4.9336, + "step": 808 + }, + { + "epoch": 0.07900390625, + "grad_norm": 0.46204832196235657, + "learning_rate": 0.0004952802764830236, + "loss": 4.8477, + "step": 809 + }, + { + "epoch": 0.0791015625, + "grad_norm": 0.5762801766395569, + "learning_rate": 0.0004952659921995758, + "loss": 4.8945, + "step": 810 + }, + { + "epoch": 0.07919921875, + "grad_norm": 0.6087040305137634, + "learning_rate": 0.0004952516865628352, + "loss": 4.8945, + "step": 811 + }, + { + "epoch": 0.079296875, + "grad_norm": 0.6062765717506409, + "learning_rate": 0.0004952373595741883, + "loss": 4.8711, + "step": 812 + }, + { + "epoch": 0.07939453125, + "grad_norm": 0.6468778252601624, + "learning_rate": 0.0004952230112350241, + "loss": 4.8633, + "step": 813 + }, + { + "epoch": 0.0794921875, + "grad_norm": 0.6803391575813293, + "learning_rate": 0.0004952086415467337, + "loss": 4.8945, + "step": 814 + }, + { + "epoch": 0.07958984375, + "grad_norm": 0.6036785244941711, + "learning_rate": 0.0004951942505107101, + "loss": 4.8633, + "step": 815 + }, + { + "epoch": 0.0796875, + "grad_norm": 0.5079777240753174, + "learning_rate": 0.0004951798381283482, + "loss": 4.8555, + "step": 816 + }, + { + "epoch": 0.07978515625, + "grad_norm": 0.47106999158859253, + "learning_rate": 0.0004951654044010455, + "loss": 4.832, + "step": 817 + }, + { + "epoch": 0.0798828125, + "grad_norm": 0.418499618768692, + "learning_rate": 0.0004951509493302011, + "loss": 4.8711, + "step": 818 + }, + { + "epoch": 0.07998046875, + "grad_norm": 0.3655601143836975, + "learning_rate": 0.0004951364729172163, + "loss": 4.8516, + "step": 819 + }, + { + "epoch": 0.080078125, + "grad_norm": 0.42946258187294006, + "learning_rate": 0.0004951219751634945, + "loss": 4.8789, + "step": 820 + }, + { + "epoch": 0.08017578125, + "grad_norm": 0.412626177072525, + "learning_rate": 0.0004951074560704412, + "loss": 4.8828, + "step": 821 + }, + { + "epoch": 0.0802734375, + "grad_norm": 0.44532856345176697, + "learning_rate": 0.0004950929156394639, + "loss": 4.8828, + "step": 822 + }, + { + "epoch": 0.08037109375, + "grad_norm": 0.4855706989765167, + "learning_rate": 0.0004950783538719723, + "loss": 4.9062, + "step": 823 + }, + { + "epoch": 0.08046875, + "grad_norm": 0.504410982131958, + "learning_rate": 0.0004950637707693779, + "loss": 4.8945, + "step": 824 + }, + { + "epoch": 0.08056640625, + "grad_norm": 0.5246967077255249, + "learning_rate": 0.0004950491663330946, + "loss": 4.875, + "step": 825 + }, + { + "epoch": 0.0806640625, + "grad_norm": 0.5594045519828796, + "learning_rate": 0.000495034540564538, + "loss": 4.8867, + "step": 826 + }, + { + "epoch": 0.08076171875, + "grad_norm": 0.5870455503463745, + "learning_rate": 0.000495019893465126, + "loss": 4.8789, + "step": 827 + }, + { + "epoch": 0.080859375, + "grad_norm": 0.5400936007499695, + "learning_rate": 0.0004950052250362786, + "loss": 4.8906, + "step": 828 + }, + { + "epoch": 0.08095703125, + "grad_norm": 0.4675130546092987, + "learning_rate": 0.000494990535279418, + "loss": 4.8789, + "step": 829 + }, + { + "epoch": 0.0810546875, + "grad_norm": 0.45326483249664307, + "learning_rate": 0.0004949758241959679, + "loss": 4.8906, + "step": 830 + }, + { + "epoch": 0.08115234375, + "grad_norm": 0.43965944647789, + "learning_rate": 0.0004949610917873547, + "loss": 4.8555, + "step": 831 + }, + { + "epoch": 0.08125, + "grad_norm": 0.5271297693252563, + "learning_rate": 0.0004949463380550065, + "loss": 4.8398, + "step": 832 + }, + { + "epoch": 0.08134765625, + "grad_norm": 0.6631085872650146, + "learning_rate": 0.0004949315630003537, + "loss": 4.8711, + "step": 833 + }, + { + "epoch": 0.0814453125, + "grad_norm": 0.5798559188842773, + "learning_rate": 0.0004949167666248285, + "loss": 4.8672, + "step": 834 + }, + { + "epoch": 0.08154296875, + "grad_norm": 0.45223212242126465, + "learning_rate": 0.0004949019489298653, + "loss": 4.8555, + "step": 835 + }, + { + "epoch": 0.081640625, + "grad_norm": 0.4504897892475128, + "learning_rate": 0.0004948871099169006, + "loss": 4.8555, + "step": 836 + }, + { + "epoch": 0.08173828125, + "grad_norm": 0.4103120267391205, + "learning_rate": 0.0004948722495873732, + "loss": 4.8906, + "step": 837 + }, + { + "epoch": 0.0818359375, + "grad_norm": 0.4391990602016449, + "learning_rate": 0.0004948573679427233, + "loss": 4.8633, + "step": 838 + }, + { + "epoch": 0.08193359375, + "grad_norm": 0.5236042141914368, + "learning_rate": 0.0004948424649843938, + "loss": 4.8633, + "step": 839 + }, + { + "epoch": 0.08203125, + "grad_norm": 0.6059775948524475, + "learning_rate": 0.0004948275407138293, + "loss": 4.8477, + "step": 840 + }, + { + "epoch": 0.08212890625, + "grad_norm": 0.6779847145080566, + "learning_rate": 0.0004948125951324768, + "loss": 4.8828, + "step": 841 + }, + { + "epoch": 0.0822265625, + "grad_norm": 0.6596094369888306, + "learning_rate": 0.000494797628241785, + "loss": 4.8633, + "step": 842 + }, + { + "epoch": 0.08232421875, + "grad_norm": 0.550703763961792, + "learning_rate": 0.000494782640043205, + "loss": 4.8672, + "step": 843 + }, + { + "epoch": 0.082421875, + "grad_norm": 0.4469245970249176, + "learning_rate": 0.0004947676305381897, + "loss": 4.8984, + "step": 844 + }, + { + "epoch": 0.08251953125, + "grad_norm": 0.4176386892795563, + "learning_rate": 0.0004947525997281941, + "loss": 4.8516, + "step": 845 + }, + { + "epoch": 0.0826171875, + "grad_norm": 0.362447589635849, + "learning_rate": 0.0004947375476146755, + "loss": 4.8633, + "step": 846 + }, + { + "epoch": 0.08271484375, + "grad_norm": 0.39331042766571045, + "learning_rate": 0.000494722474199093, + "loss": 4.8711, + "step": 847 + }, + { + "epoch": 0.0828125, + "grad_norm": 0.37120670080184937, + "learning_rate": 0.0004947073794829079, + "loss": 4.8828, + "step": 848 + }, + { + "epoch": 0.08291015625, + "grad_norm": 0.40028196573257446, + "learning_rate": 0.0004946922634675836, + "loss": 4.8789, + "step": 849 + }, + { + "epoch": 0.0830078125, + "grad_norm": 0.47256696224212646, + "learning_rate": 0.0004946771261545853, + "loss": 4.8828, + "step": 850 + }, + { + "epoch": 0.08310546875, + "grad_norm": 0.5021369457244873, + "learning_rate": 0.0004946619675453806, + "loss": 4.8555, + "step": 851 + }, + { + "epoch": 0.083203125, + "grad_norm": 0.4181872010231018, + "learning_rate": 0.000494646787641439, + "loss": 4.8672, + "step": 852 + }, + { + "epoch": 0.08330078125, + "grad_norm": 0.40219172835350037, + "learning_rate": 0.000494631586444232, + "loss": 4.8672, + "step": 853 + }, + { + "epoch": 0.0833984375, + "grad_norm": 0.42699527740478516, + "learning_rate": 0.0004946163639552335, + "loss": 4.8984, + "step": 854 + }, + { + "epoch": 0.08349609375, + "grad_norm": 0.46114829182624817, + "learning_rate": 0.0004946011201759189, + "loss": 4.832, + "step": 855 + }, + { + "epoch": 0.08359375, + "grad_norm": 0.5074323415756226, + "learning_rate": 0.0004945858551077662, + "loss": 4.8789, + "step": 856 + }, + { + "epoch": 0.08369140625, + "grad_norm": 0.5219331979751587, + "learning_rate": 0.0004945705687522552, + "loss": 4.8438, + "step": 857 + }, + { + "epoch": 0.0837890625, + "grad_norm": 0.6114364266395569, + "learning_rate": 0.0004945552611108679, + "loss": 4.8672, + "step": 858 + }, + { + "epoch": 0.08388671875, + "grad_norm": 0.7798656821250916, + "learning_rate": 0.0004945399321850879, + "loss": 4.8789, + "step": 859 + }, + { + "epoch": 0.083984375, + "grad_norm": 0.7879101634025574, + "learning_rate": 0.0004945245819764016, + "loss": 4.8828, + "step": 860 + }, + { + "epoch": 0.08408203125, + "grad_norm": 0.7016037106513977, + "learning_rate": 0.0004945092104862971, + "loss": 4.875, + "step": 861 + }, + { + "epoch": 0.0841796875, + "grad_norm": 0.49669718742370605, + "learning_rate": 0.0004944938177162644, + "loss": 4.8359, + "step": 862 + }, + { + "epoch": 0.08427734375, + "grad_norm": 0.4770626425743103, + "learning_rate": 0.0004944784036677958, + "loss": 4.8594, + "step": 863 + }, + { + "epoch": 0.084375, + "grad_norm": 0.4248046278953552, + "learning_rate": 0.0004944629683423855, + "loss": 4.8867, + "step": 864 + }, + { + "epoch": 0.08447265625, + "grad_norm": 0.4632871747016907, + "learning_rate": 0.00049444751174153, + "loss": 4.8594, + "step": 865 + }, + { + "epoch": 0.0845703125, + "grad_norm": 0.48169225454330444, + "learning_rate": 0.0004944320338667276, + "loss": 4.8633, + "step": 866 + }, + { + "epoch": 0.08466796875, + "grad_norm": 0.42320355772972107, + "learning_rate": 0.0004944165347194788, + "loss": 4.8867, + "step": 867 + }, + { + "epoch": 0.084765625, + "grad_norm": 0.39030560851097107, + "learning_rate": 0.0004944010143012861, + "loss": 4.8828, + "step": 868 + }, + { + "epoch": 0.08486328125, + "grad_norm": 0.4671940207481384, + "learning_rate": 0.0004943854726136542, + "loss": 4.8203, + "step": 869 + }, + { + "epoch": 0.0849609375, + "grad_norm": 0.4762789309024811, + "learning_rate": 0.0004943699096580897, + "loss": 4.8906, + "step": 870 + }, + { + "epoch": 0.08505859375, + "grad_norm": 0.5251975655555725, + "learning_rate": 0.0004943543254361013, + "loss": 4.8672, + "step": 871 + }, + { + "epoch": 0.08515625, + "grad_norm": 0.5693690180778503, + "learning_rate": 0.0004943387199491998, + "loss": 4.9102, + "step": 872 + }, + { + "epoch": 0.08525390625, + "grad_norm": 0.5571799278259277, + "learning_rate": 0.0004943230931988981, + "loss": 4.8672, + "step": 873 + }, + { + "epoch": 0.0853515625, + "grad_norm": 0.4701318144798279, + "learning_rate": 0.0004943074451867111, + "loss": 4.8789, + "step": 874 + }, + { + "epoch": 0.08544921875, + "grad_norm": 0.43155744671821594, + "learning_rate": 0.0004942917759141556, + "loss": 4.8555, + "step": 875 + }, + { + "epoch": 0.085546875, + "grad_norm": 0.43980658054351807, + "learning_rate": 0.0004942760853827509, + "loss": 4.8789, + "step": 876 + }, + { + "epoch": 0.08564453125, + "grad_norm": 0.46005746722221375, + "learning_rate": 0.0004942603735940179, + "loss": 4.8867, + "step": 877 + }, + { + "epoch": 0.0857421875, + "grad_norm": 0.4600110948085785, + "learning_rate": 0.0004942446405494798, + "loss": 4.8633, + "step": 878 + }, + { + "epoch": 0.08583984375, + "grad_norm": 0.40931013226509094, + "learning_rate": 0.0004942288862506618, + "loss": 4.8516, + "step": 879 + }, + { + "epoch": 0.0859375, + "grad_norm": 0.4222780466079712, + "learning_rate": 0.0004942131106990911, + "loss": 4.8789, + "step": 880 + }, + { + "epoch": 0.08603515625, + "grad_norm": 0.44859063625335693, + "learning_rate": 0.0004941973138962973, + "loss": 4.8672, + "step": 881 + }, + { + "epoch": 0.0861328125, + "grad_norm": 0.48440831899642944, + "learning_rate": 0.0004941814958438115, + "loss": 4.8555, + "step": 882 + }, + { + "epoch": 0.08623046875, + "grad_norm": 0.525878369808197, + "learning_rate": 0.0004941656565431673, + "loss": 4.8281, + "step": 883 + }, + { + "epoch": 0.086328125, + "grad_norm": 0.6066216230392456, + "learning_rate": 0.0004941497959959003, + "loss": 4.8477, + "step": 884 + }, + { + "epoch": 0.08642578125, + "grad_norm": 0.6154394149780273, + "learning_rate": 0.0004941339142035478, + "loss": 4.875, + "step": 885 + }, + { + "epoch": 0.0865234375, + "grad_norm": 0.5415538549423218, + "learning_rate": 0.0004941180111676497, + "loss": 4.8359, + "step": 886 + }, + { + "epoch": 0.08662109375, + "grad_norm": 0.43851184844970703, + "learning_rate": 0.0004941020868897474, + "loss": 4.8477, + "step": 887 + }, + { + "epoch": 0.08671875, + "grad_norm": 0.4420389235019684, + "learning_rate": 0.0004940861413713849, + "loss": 4.8516, + "step": 888 + }, + { + "epoch": 0.08681640625, + "grad_norm": 0.49204596877098083, + "learning_rate": 0.000494070174614108, + "loss": 4.8516, + "step": 889 + }, + { + "epoch": 0.0869140625, + "grad_norm": 0.4975183308124542, + "learning_rate": 0.0004940541866194645, + "loss": 4.875, + "step": 890 + }, + { + "epoch": 0.08701171875, + "grad_norm": 0.4811098873615265, + "learning_rate": 0.0004940381773890043, + "loss": 4.8555, + "step": 891 + }, + { + "epoch": 0.087109375, + "grad_norm": 0.5079295039176941, + "learning_rate": 0.0004940221469242794, + "loss": 4.8633, + "step": 892 + }, + { + "epoch": 0.08720703125, + "grad_norm": 0.5155340433120728, + "learning_rate": 0.000494006095226844, + "loss": 4.8828, + "step": 893 + }, + { + "epoch": 0.0873046875, + "grad_norm": 0.47939589619636536, + "learning_rate": 0.0004939900222982539, + "loss": 4.8164, + "step": 894 + }, + { + "epoch": 0.08740234375, + "grad_norm": 0.5215303897857666, + "learning_rate": 0.0004939739281400674, + "loss": 4.8633, + "step": 895 + }, + { + "epoch": 0.0875, + "grad_norm": 0.48365482687950134, + "learning_rate": 0.0004939578127538449, + "loss": 4.8711, + "step": 896 + }, + { + "epoch": 0.08759765625, + "grad_norm": 0.414248526096344, + "learning_rate": 0.0004939416761411484, + "loss": 4.8594, + "step": 897 + }, + { + "epoch": 0.0876953125, + "grad_norm": 0.4160768389701843, + "learning_rate": 0.0004939255183035424, + "loss": 4.8633, + "step": 898 + }, + { + "epoch": 0.08779296875, + "grad_norm": 0.4438508152961731, + "learning_rate": 0.0004939093392425933, + "loss": 4.8398, + "step": 899 + }, + { + "epoch": 0.087890625, + "grad_norm": 0.5406934022903442, + "learning_rate": 0.0004938931389598695, + "loss": 4.832, + "step": 900 + }, + { + "epoch": 0.08798828125, + "grad_norm": 0.6314405202865601, + "learning_rate": 0.0004938769174569413, + "loss": 4.8672, + "step": 901 + }, + { + "epoch": 0.0880859375, + "grad_norm": 0.6316208839416504, + "learning_rate": 0.0004938606747353818, + "loss": 4.8711, + "step": 902 + }, + { + "epoch": 0.08818359375, + "grad_norm": 0.49028605222702026, + "learning_rate": 0.0004938444107967651, + "loss": 4.8398, + "step": 903 + }, + { + "epoch": 0.08828125, + "grad_norm": 0.40383410453796387, + "learning_rate": 0.0004938281256426681, + "loss": 4.8281, + "step": 904 + }, + { + "epoch": 0.08837890625, + "grad_norm": 0.4249477982521057, + "learning_rate": 0.0004938118192746695, + "loss": 4.8633, + "step": 905 + }, + { + "epoch": 0.0884765625, + "grad_norm": 0.4824749827384949, + "learning_rate": 0.0004937954916943502, + "loss": 4.8164, + "step": 906 + }, + { + "epoch": 0.08857421875, + "grad_norm": 0.5105504989624023, + "learning_rate": 0.0004937791429032929, + "loss": 4.8477, + "step": 907 + }, + { + "epoch": 0.088671875, + "grad_norm": 0.4530431032180786, + "learning_rate": 0.0004937627729030825, + "loss": 4.8672, + "step": 908 + }, + { + "epoch": 0.08876953125, + "grad_norm": 0.37337881326675415, + "learning_rate": 0.0004937463816953061, + "loss": 4.875, + "step": 909 + }, + { + "epoch": 0.0888671875, + "grad_norm": 0.3546503484249115, + "learning_rate": 0.0004937299692815525, + "loss": 4.8555, + "step": 910 + }, + { + "epoch": 0.08896484375, + "grad_norm": 0.3876660466194153, + "learning_rate": 0.000493713535663413, + "loss": 4.8594, + "step": 911 + }, + { + "epoch": 0.0890625, + "grad_norm": 0.4789651036262512, + "learning_rate": 0.0004936970808424807, + "loss": 4.8477, + "step": 912 + }, + { + "epoch": 0.08916015625, + "grad_norm": 0.5593496561050415, + "learning_rate": 0.0004936806048203506, + "loss": 4.8711, + "step": 913 + }, + { + "epoch": 0.0892578125, + "grad_norm": 0.6056161522865295, + "learning_rate": 0.0004936641075986201, + "loss": 4.8516, + "step": 914 + }, + { + "epoch": 0.08935546875, + "grad_norm": 0.5245595574378967, + "learning_rate": 0.0004936475891788883, + "loss": 4.8164, + "step": 915 + }, + { + "epoch": 0.089453125, + "grad_norm": 0.49999815225601196, + "learning_rate": 0.0004936310495627569, + "loss": 4.832, + "step": 916 + }, + { + "epoch": 0.08955078125, + "grad_norm": 0.6258394718170166, + "learning_rate": 0.0004936144887518291, + "loss": 4.8711, + "step": 917 + }, + { + "epoch": 0.0896484375, + "grad_norm": 0.6583293080329895, + "learning_rate": 0.0004935979067477103, + "loss": 4.8633, + "step": 918 + }, + { + "epoch": 0.08974609375, + "grad_norm": 0.5308080911636353, + "learning_rate": 0.000493581303552008, + "loss": 4.8672, + "step": 919 + }, + { + "epoch": 0.08984375, + "grad_norm": 0.42577865719795227, + "learning_rate": 0.0004935646791663318, + "loss": 4.8555, + "step": 920 + }, + { + "epoch": 0.08994140625, + "grad_norm": 0.3947119414806366, + "learning_rate": 0.0004935480335922935, + "loss": 4.8359, + "step": 921 + }, + { + "epoch": 0.0900390625, + "grad_norm": 0.400806725025177, + "learning_rate": 0.0004935313668315065, + "loss": 4.8359, + "step": 922 + }, + { + "epoch": 0.09013671875, + "grad_norm": 0.3871156573295593, + "learning_rate": 0.0004935146788855868, + "loss": 4.875, + "step": 923 + }, + { + "epoch": 0.090234375, + "grad_norm": 0.3819091022014618, + "learning_rate": 0.000493497969756152, + "loss": 4.8242, + "step": 924 + }, + { + "epoch": 0.09033203125, + "grad_norm": 0.3758518397808075, + "learning_rate": 0.000493481239444822, + "loss": 4.8477, + "step": 925 + }, + { + "epoch": 0.0904296875, + "grad_norm": 0.4341889023780823, + "learning_rate": 0.0004934644879532185, + "loss": 4.832, + "step": 926 + }, + { + "epoch": 0.09052734375, + "grad_norm": 0.44294634461402893, + "learning_rate": 0.0004934477152829658, + "loss": 4.832, + "step": 927 + }, + { + "epoch": 0.090625, + "grad_norm": 0.4348030984401703, + "learning_rate": 0.0004934309214356897, + "loss": 4.8203, + "step": 928 + }, + { + "epoch": 0.09072265625, + "grad_norm": 0.497432142496109, + "learning_rate": 0.0004934141064130181, + "loss": 4.8633, + "step": 929 + }, + { + "epoch": 0.0908203125, + "grad_norm": 0.5283389687538147, + "learning_rate": 0.0004933972702165814, + "loss": 4.8672, + "step": 930 + }, + { + "epoch": 0.09091796875, + "grad_norm": 0.5673421025276184, + "learning_rate": 0.0004933804128480117, + "loss": 4.8359, + "step": 931 + }, + { + "epoch": 0.091015625, + "grad_norm": 0.5137133002281189, + "learning_rate": 0.000493363534308943, + "loss": 4.8359, + "step": 932 + }, + { + "epoch": 0.09111328125, + "grad_norm": 0.40789496898651123, + "learning_rate": 0.0004933466346010117, + "loss": 4.8359, + "step": 933 + }, + { + "epoch": 0.0912109375, + "grad_norm": 0.4613848328590393, + "learning_rate": 0.0004933297137258561, + "loss": 4.8594, + "step": 934 + }, + { + "epoch": 0.09130859375, + "grad_norm": 0.49260833859443665, + "learning_rate": 0.0004933127716851167, + "loss": 4.8359, + "step": 935 + }, + { + "epoch": 0.09140625, + "grad_norm": 0.4962104856967926, + "learning_rate": 0.0004932958084804356, + "loss": 4.8594, + "step": 936 + }, + { + "epoch": 0.09150390625, + "grad_norm": 0.5404455661773682, + "learning_rate": 0.0004932788241134576, + "loss": 4.8438, + "step": 937 + }, + { + "epoch": 0.0916015625, + "grad_norm": 0.5084996819496155, + "learning_rate": 0.0004932618185858288, + "loss": 4.8633, + "step": 938 + }, + { + "epoch": 0.09169921875, + "grad_norm": 0.4125216603279114, + "learning_rate": 0.0004932447918991983, + "loss": 4.8438, + "step": 939 + }, + { + "epoch": 0.091796875, + "grad_norm": 0.3724025785923004, + "learning_rate": 0.0004932277440552164, + "loss": 4.8359, + "step": 940 + }, + { + "epoch": 0.09189453125, + "grad_norm": 0.4485456645488739, + "learning_rate": 0.0004932106750555358, + "loss": 4.8594, + "step": 941 + }, + { + "epoch": 0.0919921875, + "grad_norm": 0.4383455216884613, + "learning_rate": 0.0004931935849018112, + "loss": 4.8359, + "step": 942 + }, + { + "epoch": 0.09208984375, + "grad_norm": 0.4460577368736267, + "learning_rate": 0.0004931764735956995, + "loss": 4.8516, + "step": 943 + }, + { + "epoch": 0.0921875, + "grad_norm": 0.4385360777378082, + "learning_rate": 0.0004931593411388594, + "loss": 4.8633, + "step": 944 + }, + { + "epoch": 0.09228515625, + "grad_norm": 0.44357937574386597, + "learning_rate": 0.0004931421875329519, + "loss": 4.8398, + "step": 945 + }, + { + "epoch": 0.0923828125, + "grad_norm": 0.5505833625793457, + "learning_rate": 0.0004931250127796398, + "loss": 4.8398, + "step": 946 + }, + { + "epoch": 0.09248046875, + "grad_norm": 0.5369737148284912, + "learning_rate": 0.0004931078168805881, + "loss": 4.8555, + "step": 947 + }, + { + "epoch": 0.092578125, + "grad_norm": 0.5083466172218323, + "learning_rate": 0.0004930905998374639, + "loss": 4.8398, + "step": 948 + }, + { + "epoch": 0.09267578125, + "grad_norm": 0.48525020480155945, + "learning_rate": 0.0004930733616519363, + "loss": 4.8594, + "step": 949 + }, + { + "epoch": 0.0927734375, + "grad_norm": 0.42595598101615906, + "learning_rate": 0.0004930561023256762, + "loss": 4.8359, + "step": 950 + }, + { + "epoch": 0.09287109375, + "grad_norm": 0.40371179580688477, + "learning_rate": 0.000493038821860357, + "loss": 4.8711, + "step": 951 + }, + { + "epoch": 0.09296875, + "grad_norm": 0.4778882563114166, + "learning_rate": 0.0004930215202576539, + "loss": 4.8359, + "step": 952 + }, + { + "epoch": 0.09306640625, + "grad_norm": 0.5457689166069031, + "learning_rate": 0.000493004197519244, + "loss": 4.8242, + "step": 953 + }, + { + "epoch": 0.0931640625, + "grad_norm": 0.4889971613883972, + "learning_rate": 0.0004929868536468069, + "loss": 4.8477, + "step": 954 + }, + { + "epoch": 0.09326171875, + "grad_norm": 0.4244244396686554, + "learning_rate": 0.0004929694886420239, + "loss": 4.8672, + "step": 955 + }, + { + "epoch": 0.093359375, + "grad_norm": 0.44931143522262573, + "learning_rate": 0.0004929521025065782, + "loss": 4.8594, + "step": 956 + }, + { + "epoch": 0.09345703125, + "grad_norm": 0.5113864541053772, + "learning_rate": 0.0004929346952421553, + "loss": 4.8359, + "step": 957 + }, + { + "epoch": 0.0935546875, + "grad_norm": 0.47967684268951416, + "learning_rate": 0.000492917266850443, + "loss": 4.8438, + "step": 958 + }, + { + "epoch": 0.09365234375, + "grad_norm": 0.41450557112693787, + "learning_rate": 0.0004928998173331306, + "loss": 4.8594, + "step": 959 + }, + { + "epoch": 0.09375, + "grad_norm": 0.4654901325702667, + "learning_rate": 0.0004928823466919098, + "loss": 4.8672, + "step": 960 + }, + { + "epoch": 0.09384765625, + "grad_norm": 0.3816904127597809, + "learning_rate": 0.0004928648549284743, + "loss": 4.8164, + "step": 961 + }, + { + "epoch": 0.0939453125, + "grad_norm": 0.37036556005477905, + "learning_rate": 0.0004928473420445198, + "loss": 4.8359, + "step": 962 + }, + { + "epoch": 0.09404296875, + "grad_norm": 0.427219420671463, + "learning_rate": 0.0004928298080417439, + "loss": 4.8516, + "step": 963 + }, + { + "epoch": 0.094140625, + "grad_norm": 0.47933682799339294, + "learning_rate": 0.0004928122529218467, + "loss": 4.8359, + "step": 964 + }, + { + "epoch": 0.09423828125, + "grad_norm": 0.5197924375534058, + "learning_rate": 0.0004927946766865298, + "loss": 4.8281, + "step": 965 + }, + { + "epoch": 0.0943359375, + "grad_norm": 0.5642294883728027, + "learning_rate": 0.0004927770793374971, + "loss": 4.8203, + "step": 966 + }, + { + "epoch": 0.09443359375, + "grad_norm": 0.5989418029785156, + "learning_rate": 0.0004927594608764546, + "loss": 4.8086, + "step": 967 + }, + { + "epoch": 0.09453125, + "grad_norm": 0.5477548837661743, + "learning_rate": 0.0004927418213051104, + "loss": 4.8555, + "step": 968 + }, + { + "epoch": 0.09462890625, + "grad_norm": 0.43198466300964355, + "learning_rate": 0.0004927241606251745, + "loss": 4.8125, + "step": 969 + }, + { + "epoch": 0.0947265625, + "grad_norm": 0.42610111832618713, + "learning_rate": 0.0004927064788383587, + "loss": 4.8477, + "step": 970 + }, + { + "epoch": 0.09482421875, + "grad_norm": 0.4663979709148407, + "learning_rate": 0.0004926887759463776, + "loss": 4.8281, + "step": 971 + }, + { + "epoch": 0.094921875, + "grad_norm": 0.46881988644599915, + "learning_rate": 0.000492671051950947, + "loss": 4.8438, + "step": 972 + }, + { + "epoch": 0.09501953125, + "grad_norm": 0.36195266246795654, + "learning_rate": 0.0004926533068537852, + "loss": 4.8477, + "step": 973 + }, + { + "epoch": 0.0951171875, + "grad_norm": 0.3922168016433716, + "learning_rate": 0.0004926355406566127, + "loss": 4.8555, + "step": 974 + }, + { + "epoch": 0.09521484375, + "grad_norm": 0.425672709941864, + "learning_rate": 0.0004926177533611514, + "loss": 4.8164, + "step": 975 + }, + { + "epoch": 0.0953125, + "grad_norm": 0.5194612145423889, + "learning_rate": 0.0004925999449691261, + "loss": 4.8281, + "step": 976 + }, + { + "epoch": 0.09541015625, + "grad_norm": 0.5651121735572815, + "learning_rate": 0.000492582115482263, + "loss": 4.8164, + "step": 977 + }, + { + "epoch": 0.0955078125, + "grad_norm": 0.5677608847618103, + "learning_rate": 0.0004925642649022903, + "loss": 4.8711, + "step": 978 + }, + { + "epoch": 0.09560546875, + "grad_norm": 0.5311588644981384, + "learning_rate": 0.000492546393230939, + "loss": 4.8242, + "step": 979 + }, + { + "epoch": 0.095703125, + "grad_norm": 0.5084513425827026, + "learning_rate": 0.0004925285004699411, + "loss": 4.832, + "step": 980 + }, + { + "epoch": 0.09580078125, + "grad_norm": 0.6232282519340515, + "learning_rate": 0.0004925105866210316, + "loss": 4.8672, + "step": 981 + }, + { + "epoch": 0.0958984375, + "grad_norm": 0.695502519607544, + "learning_rate": 0.0004924926516859469, + "loss": 4.8281, + "step": 982 + }, + { + "epoch": 0.09599609375, + "grad_norm": 0.6766898036003113, + "learning_rate": 0.000492474695666426, + "loss": 4.8203, + "step": 983 + }, + { + "epoch": 0.09609375, + "grad_norm": 0.5815792679786682, + "learning_rate": 0.0004924567185642091, + "loss": 4.8359, + "step": 984 + }, + { + "epoch": 0.09619140625, + "grad_norm": 0.48480623960494995, + "learning_rate": 0.0004924387203810393, + "loss": 4.8633, + "step": 985 + }, + { + "epoch": 0.0962890625, + "grad_norm": 0.46070924401283264, + "learning_rate": 0.0004924207011186613, + "loss": 4.8516, + "step": 986 + }, + { + "epoch": 0.09638671875, + "grad_norm": 0.4500672221183777, + "learning_rate": 0.0004924026607788219, + "loss": 4.8438, + "step": 987 + }, + { + "epoch": 0.096484375, + "grad_norm": 0.42609307169914246, + "learning_rate": 0.0004923845993632702, + "loss": 4.8164, + "step": 988 + }, + { + "epoch": 0.09658203125, + "grad_norm": 0.4055596590042114, + "learning_rate": 0.000492366516873757, + "loss": 4.8633, + "step": 989 + }, + { + "epoch": 0.0966796875, + "grad_norm": 0.37348657846450806, + "learning_rate": 0.0004923484133120351, + "loss": 4.8203, + "step": 990 + }, + { + "epoch": 0.09677734375, + "grad_norm": 0.3880004584789276, + "learning_rate": 0.0004923302886798598, + "loss": 4.8125, + "step": 991 + }, + { + "epoch": 0.096875, + "grad_norm": 0.381511926651001, + "learning_rate": 0.000492312142978988, + "loss": 4.8359, + "step": 992 + }, + { + "epoch": 0.09697265625, + "grad_norm": 0.4143732786178589, + "learning_rate": 0.0004922939762111788, + "loss": 4.8281, + "step": 993 + }, + { + "epoch": 0.0970703125, + "grad_norm": 0.4248274266719818, + "learning_rate": 0.0004922757883781934, + "loss": 4.8555, + "step": 994 + }, + { + "epoch": 0.09716796875, + "grad_norm": 0.43763217329978943, + "learning_rate": 0.000492257579481795, + "loss": 4.8047, + "step": 995 + }, + { + "epoch": 0.097265625, + "grad_norm": 0.4777696430683136, + "learning_rate": 0.0004922393495237488, + "loss": 4.8125, + "step": 996 + }, + { + "epoch": 0.09736328125, + "grad_norm": 0.5218069553375244, + "learning_rate": 0.000492221098505822, + "loss": 4.8125, + "step": 997 + }, + { + "epoch": 0.0974609375, + "grad_norm": 0.5178037285804749, + "learning_rate": 0.000492202826429784, + "loss": 4.8242, + "step": 998 + }, + { + "epoch": 0.09755859375, + "grad_norm": 0.45934903621673584, + "learning_rate": 0.0004921845332974062, + "loss": 4.8125, + "step": 999 + }, + { + "epoch": 0.09765625, + "grad_norm": 0.4091397821903229, + "learning_rate": 0.0004921662191104619, + "loss": 4.8359, + "step": 1000 + }, + { + "epoch": 0.09775390625, + "grad_norm": 0.3937183618545532, + "learning_rate": 0.0004921478838707266, + "loss": 4.8359, + "step": 1001 + }, + { + "epoch": 0.0978515625, + "grad_norm": 0.4575563073158264, + "learning_rate": 0.0004921295275799778, + "loss": 4.8281, + "step": 1002 + }, + { + "epoch": 0.09794921875, + "grad_norm": 0.490375280380249, + "learning_rate": 0.0004921111502399949, + "loss": 4.832, + "step": 1003 + }, + { + "epoch": 0.098046875, + "grad_norm": 0.4874227046966553, + "learning_rate": 0.0004920927518525594, + "loss": 4.8203, + "step": 1004 + }, + { + "epoch": 0.09814453125, + "grad_norm": 0.42199403047561646, + "learning_rate": 0.0004920743324194552, + "loss": 4.8555, + "step": 1005 + }, + { + "epoch": 0.0982421875, + "grad_norm": 0.49443531036376953, + "learning_rate": 0.0004920558919424677, + "loss": 4.8047, + "step": 1006 + }, + { + "epoch": 0.09833984375, + "grad_norm": 0.39844444394111633, + "learning_rate": 0.0004920374304233846, + "loss": 4.8203, + "step": 1007 + }, + { + "epoch": 0.0984375, + "grad_norm": 0.462501585483551, + "learning_rate": 0.0004920189478639957, + "loss": 4.8477, + "step": 1008 + }, + { + "epoch": 0.09853515625, + "grad_norm": 0.5070082545280457, + "learning_rate": 0.0004920004442660927, + "loss": 4.8203, + "step": 1009 + }, + { + "epoch": 0.0986328125, + "grad_norm": 0.47550520300865173, + "learning_rate": 0.0004919819196314695, + "loss": 4.8008, + "step": 1010 + }, + { + "epoch": 0.09873046875, + "grad_norm": 0.4895987808704376, + "learning_rate": 0.0004919633739619218, + "loss": 4.8125, + "step": 1011 + }, + { + "epoch": 0.098828125, + "grad_norm": 0.48678672313690186, + "learning_rate": 0.0004919448072592474, + "loss": 4.8086, + "step": 1012 + }, + { + "epoch": 0.09892578125, + "grad_norm": 0.4505287706851959, + "learning_rate": 0.0004919262195252465, + "loss": 4.8281, + "step": 1013 + }, + { + "epoch": 0.0990234375, + "grad_norm": 0.8226277232170105, + "learning_rate": 0.0004919076107617209, + "loss": 4.8047, + "step": 1014 + }, + { + "epoch": 0.09912109375, + "grad_norm": 0.6768991947174072, + "learning_rate": 0.0004918889809704745, + "loss": 4.8359, + "step": 1015 + }, + { + "epoch": 0.09921875, + "grad_norm": 0.573598325252533, + "learning_rate": 0.0004918703301533135, + "loss": 4.832, + "step": 1016 + }, + { + "epoch": 0.09931640625, + "grad_norm": 0.49422210454940796, + "learning_rate": 0.0004918516583120458, + "loss": 4.8555, + "step": 1017 + }, + { + "epoch": 0.0994140625, + "grad_norm": 0.5521137714385986, + "learning_rate": 0.0004918329654484817, + "loss": 4.8203, + "step": 1018 + }, + { + "epoch": 0.09951171875, + "grad_norm": 0.6036257743835449, + "learning_rate": 0.0004918142515644332, + "loss": 4.8008, + "step": 1019 + }, + { + "epoch": 0.099609375, + "grad_norm": 0.8331734538078308, + "learning_rate": 0.0004917955166617146, + "loss": 4.8203, + "step": 1020 + }, + { + "epoch": 0.09970703125, + "grad_norm": 1.3778009414672852, + "learning_rate": 0.000491776760742142, + "loss": 4.8281, + "step": 1021 + }, + { + "epoch": 0.0998046875, + "grad_norm": 0.785008430480957, + "learning_rate": 0.0004917579838075337, + "loss": 4.8477, + "step": 1022 + }, + { + "epoch": 0.09990234375, + "grad_norm": 0.7883325815200806, + "learning_rate": 0.0004917391858597099, + "loss": 4.8398, + "step": 1023 + }, + { + "epoch": 0.1, + "grad_norm": 0.5901327729225159, + "learning_rate": 0.0004917203669004932, + "loss": 4.8555, + "step": 1024 + }, + { + "epoch": 0.10009765625, + "grad_norm": 0.4483931064605713, + "learning_rate": 0.0004917015269317079, + "loss": 4.832, + "step": 1025 + }, + { + "epoch": 0.1001953125, + "grad_norm": 0.6025552749633789, + "learning_rate": 0.0004916826659551802, + "loss": 4.8438, + "step": 1026 + }, + { + "epoch": 0.10029296875, + "grad_norm": 0.691528856754303, + "learning_rate": 0.0004916637839727387, + "loss": 4.8477, + "step": 1027 + }, + { + "epoch": 0.100390625, + "grad_norm": 0.922143816947937, + "learning_rate": 0.0004916448809862137, + "loss": 4.8398, + "step": 1028 + }, + { + "epoch": 0.10048828125, + "grad_norm": 0.7501768469810486, + "learning_rate": 0.000491625956997438, + "loss": 4.8438, + "step": 1029 + }, + { + "epoch": 0.1005859375, + "grad_norm": 0.5718377828598022, + "learning_rate": 0.000491607012008246, + "loss": 4.8047, + "step": 1030 + }, + { + "epoch": 0.10068359375, + "grad_norm": 0.5166970491409302, + "learning_rate": 0.0004915880460204743, + "loss": 4.832, + "step": 1031 + }, + { + "epoch": 0.10078125, + "grad_norm": 0.41339635848999023, + "learning_rate": 0.0004915690590359615, + "loss": 4.832, + "step": 1032 + }, + { + "epoch": 0.10087890625, + "grad_norm": 0.42061278223991394, + "learning_rate": 0.0004915500510565483, + "loss": 4.832, + "step": 1033 + }, + { + "epoch": 0.1009765625, + "grad_norm": 0.8583418130874634, + "learning_rate": 0.0004915310220840774, + "loss": 4.8047, + "step": 1034 + }, + { + "epoch": 0.10107421875, + "grad_norm": 1.503048062324524, + "learning_rate": 0.0004915119721203935, + "loss": 4.8555, + "step": 1035 + }, + { + "epoch": 0.101171875, + "grad_norm": 1.4394514560699463, + "learning_rate": 0.0004914929011673434, + "loss": 4.8281, + "step": 1036 + }, + { + "epoch": 0.10126953125, + "grad_norm": 1.117308259010315, + "learning_rate": 0.0004914738092267758, + "loss": 4.8398, + "step": 1037 + }, + { + "epoch": 0.1013671875, + "grad_norm": 0.6745375990867615, + "learning_rate": 0.0004914546963005416, + "loss": 4.8438, + "step": 1038 + }, + { + "epoch": 0.10146484375, + "grad_norm": 1.0236302614212036, + "learning_rate": 0.0004914355623904938, + "loss": 4.8398, + "step": 1039 + }, + { + "epoch": 0.1015625, + "grad_norm": 0.8115978240966797, + "learning_rate": 0.0004914164074984872, + "loss": 4.8633, + "step": 1040 + }, + { + "epoch": 0.10166015625, + "grad_norm": 0.5963826775550842, + "learning_rate": 0.0004913972316263785, + "loss": 4.8203, + "step": 1041 + }, + { + "epoch": 0.1017578125, + "grad_norm": 0.49297934770584106, + "learning_rate": 0.000491378034776027, + "loss": 4.8086, + "step": 1042 + }, + { + "epoch": 0.10185546875, + "grad_norm": 0.4009886085987091, + "learning_rate": 0.0004913588169492937, + "loss": 4.8203, + "step": 1043 + }, + { + "epoch": 0.101953125, + "grad_norm": 0.4827432930469513, + "learning_rate": 0.0004913395781480414, + "loss": 4.8398, + "step": 1044 + }, + { + "epoch": 0.10205078125, + "grad_norm": 0.402046799659729, + "learning_rate": 0.0004913203183741354, + "loss": 4.832, + "step": 1045 + }, + { + "epoch": 0.1021484375, + "grad_norm": 0.4627341330051422, + "learning_rate": 0.0004913010376294425, + "loss": 4.8125, + "step": 1046 + }, + { + "epoch": 0.10224609375, + "grad_norm": 0.4753624200820923, + "learning_rate": 0.0004912817359158322, + "loss": 4.8203, + "step": 1047 + }, + { + "epoch": 0.10234375, + "grad_norm": 0.5196328163146973, + "learning_rate": 0.0004912624132351755, + "loss": 4.7969, + "step": 1048 + }, + { + "epoch": 0.10244140625, + "grad_norm": 0.9553819894790649, + "learning_rate": 0.0004912430695893456, + "loss": 4.7969, + "step": 1049 + }, + { + "epoch": 0.1025390625, + "grad_norm": 0.7087034583091736, + "learning_rate": 0.0004912237049802178, + "loss": 4.8008, + "step": 1050 + }, + { + "epoch": 0.10263671875, + "grad_norm": 0.6159484386444092, + "learning_rate": 0.0004912043194096693, + "loss": 4.8047, + "step": 1051 + }, + { + "epoch": 0.102734375, + "grad_norm": 0.5763092637062073, + "learning_rate": 0.0004911849128795793, + "loss": 4.8359, + "step": 1052 + }, + { + "epoch": 0.10283203125, + "grad_norm": 0.5660399198532104, + "learning_rate": 0.0004911654853918293, + "loss": 4.7891, + "step": 1053 + }, + { + "epoch": 0.1029296875, + "grad_norm": 0.5883915424346924, + "learning_rate": 0.0004911460369483026, + "loss": 4.8203, + "step": 1054 + }, + { + "epoch": 0.10302734375, + "grad_norm": 0.5926139950752258, + "learning_rate": 0.0004911265675508847, + "loss": 4.8438, + "step": 1055 + }, + { + "epoch": 0.103125, + "grad_norm": 0.5742599964141846, + "learning_rate": 0.000491107077201463, + "loss": 4.8203, + "step": 1056 + }, + { + "epoch": 0.10322265625, + "grad_norm": 0.4908924102783203, + "learning_rate": 0.0004910875659019267, + "loss": 4.8164, + "step": 1057 + }, + { + "epoch": 0.1033203125, + "grad_norm": 0.42929133772850037, + "learning_rate": 0.0004910680336541676, + "loss": 4.8125, + "step": 1058 + }, + { + "epoch": 0.10341796875, + "grad_norm": 0.384483277797699, + "learning_rate": 0.000491048480460079, + "loss": 4.8008, + "step": 1059 + }, + { + "epoch": 0.103515625, + "grad_norm": 0.36012735962867737, + "learning_rate": 0.0004910289063215564, + "loss": 4.8203, + "step": 1060 + }, + { + "epoch": 0.10361328125, + "grad_norm": 0.3993228077888489, + "learning_rate": 0.0004910093112404978, + "loss": 4.8477, + "step": 1061 + }, + { + "epoch": 0.1037109375, + "grad_norm": 0.3865804970264435, + "learning_rate": 0.0004909896952188024, + "loss": 4.8125, + "step": 1062 + }, + { + "epoch": 0.10380859375, + "grad_norm": 0.4794972240924835, + "learning_rate": 0.0004909700582583721, + "loss": 4.7969, + "step": 1063 + }, + { + "epoch": 0.10390625, + "grad_norm": 0.5843653082847595, + "learning_rate": 0.0004909504003611103, + "loss": 4.8008, + "step": 1064 + }, + { + "epoch": 0.10400390625, + "grad_norm": 0.7108316421508789, + "learning_rate": 0.000490930721528923, + "loss": 4.8164, + "step": 1065 + }, + { + "epoch": 0.1041015625, + "grad_norm": 0.7192037105560303, + "learning_rate": 0.0004909110217637177, + "loss": 4.8477, + "step": 1066 + }, + { + "epoch": 0.10419921875, + "grad_norm": 0.6272011995315552, + "learning_rate": 0.0004908913010674041, + "loss": 4.8125, + "step": 1067 + }, + { + "epoch": 0.104296875, + "grad_norm": 0.5953921675682068, + "learning_rate": 0.0004908715594418942, + "loss": 4.8398, + "step": 1068 + }, + { + "epoch": 0.10439453125, + "grad_norm": 0.4558872878551483, + "learning_rate": 0.0004908517968891018, + "loss": 4.8125, + "step": 1069 + }, + { + "epoch": 0.1044921875, + "grad_norm": 0.3889329135417938, + "learning_rate": 0.0004908320134109427, + "loss": 4.8281, + "step": 1070 + }, + { + "epoch": 0.10458984375, + "grad_norm": 0.3157101571559906, + "learning_rate": 0.0004908122090093347, + "loss": 4.7969, + "step": 1071 + }, + { + "epoch": 0.1046875, + "grad_norm": 0.3802729547023773, + "learning_rate": 0.0004907923836861978, + "loss": 4.8086, + "step": 1072 + }, + { + "epoch": 0.10478515625, + "grad_norm": 0.402535080909729, + "learning_rate": 0.0004907725374434539, + "loss": 4.8086, + "step": 1073 + }, + { + "epoch": 0.1048828125, + "grad_norm": 0.4550858438014984, + "learning_rate": 0.0004907526702830268, + "loss": 4.8125, + "step": 1074 + }, + { + "epoch": 0.10498046875, + "grad_norm": 0.45571887493133545, + "learning_rate": 0.0004907327822068427, + "loss": 4.8086, + "step": 1075 + }, + { + "epoch": 0.105078125, + "grad_norm": 0.3914882242679596, + "learning_rate": 0.0004907128732168297, + "loss": 4.8242, + "step": 1076 + }, + { + "epoch": 0.10517578125, + "grad_norm": 0.34561851620674133, + "learning_rate": 0.0004906929433149175, + "loss": 4.8203, + "step": 1077 + }, + { + "epoch": 0.1052734375, + "grad_norm": 0.3450526297092438, + "learning_rate": 0.0004906729925030385, + "loss": 4.8203, + "step": 1078 + }, + { + "epoch": 0.10537109375, + "grad_norm": 0.3435506224632263, + "learning_rate": 0.0004906530207831266, + "loss": 4.7969, + "step": 1079 + }, + { + "epoch": 0.10546875, + "grad_norm": 0.3784290552139282, + "learning_rate": 0.0004906330281571179, + "loss": 4.8359, + "step": 1080 + }, + { + "epoch": 0.10556640625, + "grad_norm": 0.4976685345172882, + "learning_rate": 0.0004906130146269507, + "loss": 4.793, + "step": 1081 + }, + { + "epoch": 0.1056640625, + "grad_norm": 0.5724058747291565, + "learning_rate": 0.000490592980194565, + "loss": 4.8008, + "step": 1082 + }, + { + "epoch": 0.10576171875, + "grad_norm": 0.546415388584137, + "learning_rate": 0.0004905729248619032, + "loss": 4.7891, + "step": 1083 + }, + { + "epoch": 0.105859375, + "grad_norm": 0.5786755681037903, + "learning_rate": 0.0004905528486309095, + "loss": 4.8125, + "step": 1084 + }, + { + "epoch": 0.10595703125, + "grad_norm": 0.46551647782325745, + "learning_rate": 0.00049053275150353, + "loss": 4.793, + "step": 1085 + }, + { + "epoch": 0.1060546875, + "grad_norm": 0.3895544707775116, + "learning_rate": 0.0004905126334817131, + "loss": 4.7891, + "step": 1086 + }, + { + "epoch": 0.10615234375, + "grad_norm": 0.3977148234844208, + "learning_rate": 0.000490492494567409, + "loss": 4.8516, + "step": 1087 + }, + { + "epoch": 0.10625, + "grad_norm": 0.41639018058776855, + "learning_rate": 0.0004904723347625702, + "loss": 4.8125, + "step": 1088 + }, + { + "epoch": 0.10634765625, + "grad_norm": 0.4226835370063782, + "learning_rate": 0.0004904521540691509, + "loss": 4.8242, + "step": 1089 + }, + { + "epoch": 0.1064453125, + "grad_norm": 0.47172993421554565, + "learning_rate": 0.0004904319524891076, + "loss": 4.7734, + "step": 1090 + }, + { + "epoch": 0.10654296875, + "grad_norm": 0.438104510307312, + "learning_rate": 0.0004904117300243986, + "loss": 4.8203, + "step": 1091 + }, + { + "epoch": 0.106640625, + "grad_norm": 0.38510963320732117, + "learning_rate": 0.0004903914866769843, + "loss": 4.8164, + "step": 1092 + }, + { + "epoch": 0.10673828125, + "grad_norm": 0.404243528842926, + "learning_rate": 0.0004903712224488273, + "loss": 4.8359, + "step": 1093 + }, + { + "epoch": 0.1068359375, + "grad_norm": 0.4017542898654938, + "learning_rate": 0.0004903509373418921, + "loss": 4.8242, + "step": 1094 + }, + { + "epoch": 0.10693359375, + "grad_norm": 0.4477319121360779, + "learning_rate": 0.000490330631358145, + "loss": 4.7969, + "step": 1095 + }, + { + "epoch": 0.10703125, + "grad_norm": 0.3956342041492462, + "learning_rate": 0.0004903103044995548, + "loss": 4.8047, + "step": 1096 + }, + { + "epoch": 0.10712890625, + "grad_norm": 0.43987393379211426, + "learning_rate": 0.0004902899567680917, + "loss": 4.7734, + "step": 1097 + }, + { + "epoch": 0.1072265625, + "grad_norm": 0.4343857169151306, + "learning_rate": 0.0004902695881657286, + "loss": 4.8477, + "step": 1098 + }, + { + "epoch": 0.10732421875, + "grad_norm": 0.3674246370792389, + "learning_rate": 0.0004902491986944399, + "loss": 4.8125, + "step": 1099 + }, + { + "epoch": 0.107421875, + "grad_norm": 0.3673463761806488, + "learning_rate": 0.0004902287883562023, + "loss": 4.8125, + "step": 1100 + }, + { + "epoch": 0.10751953125, + "grad_norm": 0.4112495481967926, + "learning_rate": 0.0004902083571529944, + "loss": 4.8047, + "step": 1101 + }, + { + "epoch": 0.1076171875, + "grad_norm": 0.4617629647254944, + "learning_rate": 0.000490187905086797, + "loss": 4.7695, + "step": 1102 + }, + { + "epoch": 0.10771484375, + "grad_norm": 0.49318692088127136, + "learning_rate": 0.0004901674321595925, + "loss": 4.7812, + "step": 1103 + }, + { + "epoch": 0.1078125, + "grad_norm": 0.5360081195831299, + "learning_rate": 0.0004901469383733659, + "loss": 4.8242, + "step": 1104 + }, + { + "epoch": 0.10791015625, + "grad_norm": 0.5867539048194885, + "learning_rate": 0.0004901264237301039, + "loss": 4.7852, + "step": 1105 + }, + { + "epoch": 0.1080078125, + "grad_norm": 0.5440882444381714, + "learning_rate": 0.0004901058882317951, + "loss": 4.8203, + "step": 1106 + }, + { + "epoch": 0.10810546875, + "grad_norm": 0.4591827094554901, + "learning_rate": 0.0004900853318804304, + "loss": 4.8164, + "step": 1107 + }, + { + "epoch": 0.108203125, + "grad_norm": 0.4257602095603943, + "learning_rate": 0.0004900647546780026, + "loss": 4.8008, + "step": 1108 + }, + { + "epoch": 0.10830078125, + "grad_norm": 0.418489009141922, + "learning_rate": 0.0004900441566265064, + "loss": 4.832, + "step": 1109 + }, + { + "epoch": 0.1083984375, + "grad_norm": 0.4998464286327362, + "learning_rate": 0.0004900235377279388, + "loss": 4.7773, + "step": 1110 + }, + { + "epoch": 0.10849609375, + "grad_norm": 0.47938549518585205, + "learning_rate": 0.0004900028979842986, + "loss": 4.8125, + "step": 1111 + }, + { + "epoch": 0.10859375, + "grad_norm": 0.4215286672115326, + "learning_rate": 0.0004899822373975866, + "loss": 4.7812, + "step": 1112 + }, + { + "epoch": 0.10869140625, + "grad_norm": 0.41954976320266724, + "learning_rate": 0.0004899615559698058, + "loss": 4.8047, + "step": 1113 + }, + { + "epoch": 0.1087890625, + "grad_norm": 0.4177006781101227, + "learning_rate": 0.0004899408537029611, + "loss": 4.8047, + "step": 1114 + }, + { + "epoch": 0.10888671875, + "grad_norm": 0.5628485679626465, + "learning_rate": 0.0004899201305990594, + "loss": 4.7969, + "step": 1115 + }, + { + "epoch": 0.108984375, + "grad_norm": 0.6657236814498901, + "learning_rate": 0.0004898993866601098, + "loss": 4.8086, + "step": 1116 + }, + { + "epoch": 0.10908203125, + "grad_norm": 0.6637690663337708, + "learning_rate": 0.0004898786218881232, + "loss": 4.8203, + "step": 1117 + }, + { + "epoch": 0.1091796875, + "grad_norm": 0.5784990787506104, + "learning_rate": 0.0004898578362851124, + "loss": 4.832, + "step": 1118 + }, + { + "epoch": 0.10927734375, + "grad_norm": 0.42443713545799255, + "learning_rate": 0.0004898370298530928, + "loss": 4.8008, + "step": 1119 + }, + { + "epoch": 0.109375, + "grad_norm": 0.3830028474330902, + "learning_rate": 0.0004898162025940812, + "loss": 4.8398, + "step": 1120 + }, + { + "epoch": 0.10947265625, + "grad_norm": 0.45647111535072327, + "learning_rate": 0.0004897953545100966, + "loss": 4.8086, + "step": 1121 + }, + { + "epoch": 0.1095703125, + "grad_norm": 0.49465152621269226, + "learning_rate": 0.0004897744856031604, + "loss": 4.8477, + "step": 1122 + }, + { + "epoch": 0.10966796875, + "grad_norm": 0.5248700380325317, + "learning_rate": 0.0004897535958752954, + "loss": 4.7891, + "step": 1123 + }, + { + "epoch": 0.109765625, + "grad_norm": 0.5552516579627991, + "learning_rate": 0.0004897326853285268, + "loss": 4.8125, + "step": 1124 + }, + { + "epoch": 0.10986328125, + "grad_norm": 0.4863637387752533, + "learning_rate": 0.0004897117539648818, + "loss": 4.8203, + "step": 1125 + }, + { + "epoch": 0.1099609375, + "grad_norm": 0.4048306941986084, + "learning_rate": 0.0004896908017863895, + "loss": 4.7852, + "step": 1126 + }, + { + "epoch": 0.11005859375, + "grad_norm": 0.4323672652244568, + "learning_rate": 0.0004896698287950812, + "loss": 4.7695, + "step": 1127 + }, + { + "epoch": 0.11015625, + "grad_norm": 0.5875673890113831, + "learning_rate": 0.0004896488349929898, + "loss": 4.793, + "step": 1128 + }, + { + "epoch": 0.11025390625, + "grad_norm": 0.5504245758056641, + "learning_rate": 0.0004896278203821506, + "loss": 4.7852, + "step": 1129 + }, + { + "epoch": 0.1103515625, + "grad_norm": 0.44111916422843933, + "learning_rate": 0.0004896067849646011, + "loss": 4.7852, + "step": 1130 + }, + { + "epoch": 0.11044921875, + "grad_norm": 0.37442272901535034, + "learning_rate": 0.0004895857287423802, + "loss": 4.7578, + "step": 1131 + }, + { + "epoch": 0.110546875, + "grad_norm": 0.38579708337783813, + "learning_rate": 0.0004895646517175294, + "loss": 4.7734, + "step": 1132 + }, + { + "epoch": 0.11064453125, + "grad_norm": 0.350492388010025, + "learning_rate": 0.0004895435538920918, + "loss": 4.7969, + "step": 1133 + }, + { + "epoch": 0.1107421875, + "grad_norm": 0.3320639431476593, + "learning_rate": 0.0004895224352681127, + "loss": 4.7969, + "step": 1134 + }, + { + "epoch": 0.11083984375, + "grad_norm": 0.33712780475616455, + "learning_rate": 0.0004895012958476396, + "loss": 4.793, + "step": 1135 + }, + { + "epoch": 0.1109375, + "grad_norm": 0.3295286297798157, + "learning_rate": 0.0004894801356327217, + "loss": 4.8047, + "step": 1136 + }, + { + "epoch": 0.11103515625, + "grad_norm": 0.3195537030696869, + "learning_rate": 0.0004894589546254102, + "loss": 4.8047, + "step": 1137 + }, + { + "epoch": 0.1111328125, + "grad_norm": 0.36060598492622375, + "learning_rate": 0.0004894377528277587, + "loss": 4.8008, + "step": 1138 + }, + { + "epoch": 0.11123046875, + "grad_norm": 0.4128583073616028, + "learning_rate": 0.0004894165302418224, + "loss": 4.8125, + "step": 1139 + }, + { + "epoch": 0.111328125, + "grad_norm": 0.6390661001205444, + "learning_rate": 0.0004893952868696588, + "loss": 4.8086, + "step": 1140 + }, + { + "epoch": 0.11142578125, + "grad_norm": 0.8795298933982849, + "learning_rate": 0.0004893740227133272, + "loss": 4.793, + "step": 1141 + }, + { + "epoch": 0.1115234375, + "grad_norm": 0.5830650329589844, + "learning_rate": 0.000489352737774889, + "loss": 4.8164, + "step": 1142 + }, + { + "epoch": 0.11162109375, + "grad_norm": 0.6256621479988098, + "learning_rate": 0.0004893314320564078, + "loss": 4.8086, + "step": 1143 + }, + { + "epoch": 0.11171875, + "grad_norm": 0.6185265779495239, + "learning_rate": 0.0004893101055599488, + "loss": 4.7969, + "step": 1144 + }, + { + "epoch": 0.11181640625, + "grad_norm": 0.5876705646514893, + "learning_rate": 0.0004892887582875797, + "loss": 4.7969, + "step": 1145 + }, + { + "epoch": 0.1119140625, + "grad_norm": 0.5868121981620789, + "learning_rate": 0.0004892673902413699, + "loss": 4.8125, + "step": 1146 + }, + { + "epoch": 0.11201171875, + "grad_norm": 0.46918195486068726, + "learning_rate": 0.0004892460014233907, + "loss": 4.8086, + "step": 1147 + }, + { + "epoch": 0.112109375, + "grad_norm": 0.3593907058238983, + "learning_rate": 0.0004892245918357159, + "loss": 4.832, + "step": 1148 + }, + { + "epoch": 0.11220703125, + "grad_norm": 0.4415476620197296, + "learning_rate": 0.0004892031614804208, + "loss": 4.7852, + "step": 1149 + }, + { + "epoch": 0.1123046875, + "grad_norm": 0.7689213156700134, + "learning_rate": 0.000489181710359583, + "loss": 4.7891, + "step": 1150 + }, + { + "epoch": 0.11240234375, + "grad_norm": 1.3944097757339478, + "learning_rate": 0.000489160238475282, + "loss": 4.8086, + "step": 1151 + }, + { + "epoch": 0.1125, + "grad_norm": 0.6750385165214539, + "learning_rate": 0.0004891387458295995, + "loss": 4.8047, + "step": 1152 + }, + { + "epoch": 0.11259765625, + "grad_norm": 0.6533737778663635, + "learning_rate": 0.0004891172324246189, + "loss": 4.8164, + "step": 1153 + }, + { + "epoch": 0.1126953125, + "grad_norm": 0.698330283164978, + "learning_rate": 0.0004890956982624258, + "loss": 4.8203, + "step": 1154 + }, + { + "epoch": 0.11279296875, + "grad_norm": 0.5275533199310303, + "learning_rate": 0.0004890741433451079, + "loss": 4.7969, + "step": 1155 + }, + { + "epoch": 0.112890625, + "grad_norm": 0.39748457074165344, + "learning_rate": 0.0004890525676747547, + "loss": 4.8086, + "step": 1156 + }, + { + "epoch": 0.11298828125, + "grad_norm": 0.49489057064056396, + "learning_rate": 0.0004890309712534578, + "loss": 4.8047, + "step": 1157 + }, + { + "epoch": 0.1130859375, + "grad_norm": 0.4166368544101715, + "learning_rate": 0.0004890093540833108, + "loss": 4.832, + "step": 1158 + }, + { + "epoch": 0.11318359375, + "grad_norm": 0.3709654211997986, + "learning_rate": 0.0004889877161664096, + "loss": 4.8281, + "step": 1159 + }, + { + "epoch": 0.11328125, + "grad_norm": 0.41362860798835754, + "learning_rate": 0.0004889660575048515, + "loss": 4.7773, + "step": 1160 + }, + { + "epoch": 0.11337890625, + "grad_norm": 0.3981860280036926, + "learning_rate": 0.0004889443781007364, + "loss": 4.7969, + "step": 1161 + }, + { + "epoch": 0.1134765625, + "grad_norm": 0.4014473557472229, + "learning_rate": 0.000488922677956166, + "loss": 4.793, + "step": 1162 + }, + { + "epoch": 0.11357421875, + "grad_norm": 0.5000125765800476, + "learning_rate": 0.0004889009570732436, + "loss": 4.7891, + "step": 1163 + }, + { + "epoch": 0.113671875, + "grad_norm": 0.6138625144958496, + "learning_rate": 0.0004888792154540753, + "loss": 4.7812, + "step": 1164 + }, + { + "epoch": 0.11376953125, + "grad_norm": 0.6484094858169556, + "learning_rate": 0.0004888574531007687, + "loss": 4.8008, + "step": 1165 + }, + { + "epoch": 0.1138671875, + "grad_norm": 0.44802355766296387, + "learning_rate": 0.0004888356700154333, + "loss": 4.8047, + "step": 1166 + }, + { + "epoch": 0.11396484375, + "grad_norm": 0.42703965306282043, + "learning_rate": 0.0004888138662001813, + "loss": 4.7969, + "step": 1167 + }, + { + "epoch": 0.1140625, + "grad_norm": 0.41006794571876526, + "learning_rate": 0.0004887920416571259, + "loss": 4.793, + "step": 1168 + }, + { + "epoch": 0.11416015625, + "grad_norm": 0.39348462224006653, + "learning_rate": 0.0004887701963883831, + "loss": 4.793, + "step": 1169 + }, + { + "epoch": 0.1142578125, + "grad_norm": 0.3154531419277191, + "learning_rate": 0.0004887483303960706, + "loss": 4.793, + "step": 1170 + }, + { + "epoch": 0.11435546875, + "grad_norm": 0.3364889323711395, + "learning_rate": 0.0004887264436823083, + "loss": 4.7812, + "step": 1171 + }, + { + "epoch": 0.114453125, + "grad_norm": 0.3992083966732025, + "learning_rate": 0.0004887045362492178, + "loss": 4.793, + "step": 1172 + }, + { + "epoch": 0.11455078125, + "grad_norm": 0.42253056168556213, + "learning_rate": 0.0004886826080989229, + "loss": 4.7734, + "step": 1173 + }, + { + "epoch": 0.1146484375, + "grad_norm": 0.36724087595939636, + "learning_rate": 0.0004886606592335495, + "loss": 4.793, + "step": 1174 + }, + { + "epoch": 0.11474609375, + "grad_norm": 0.3279463052749634, + "learning_rate": 0.0004886386896552252, + "loss": 4.7891, + "step": 1175 + }, + { + "epoch": 0.11484375, + "grad_norm": 0.3356574773788452, + "learning_rate": 0.0004886166993660799, + "loss": 4.7891, + "step": 1176 + }, + { + "epoch": 0.11494140625, + "grad_norm": 0.42385661602020264, + "learning_rate": 0.0004885946883682455, + "loss": 4.7969, + "step": 1177 + }, + { + "epoch": 0.1150390625, + "grad_norm": 0.582700252532959, + "learning_rate": 0.0004885726566638557, + "loss": 4.7812, + "step": 1178 + }, + { + "epoch": 0.11513671875, + "grad_norm": 0.5969910025596619, + "learning_rate": 0.0004885506042550464, + "loss": 4.8359, + "step": 1179 + }, + { + "epoch": 0.115234375, + "grad_norm": 0.5986462831497192, + "learning_rate": 0.0004885285311439553, + "loss": 4.7891, + "step": 1180 + }, + { + "epoch": 0.11533203125, + "grad_norm": 0.49811697006225586, + "learning_rate": 0.0004885064373327223, + "loss": 4.7773, + "step": 1181 + }, + { + "epoch": 0.1154296875, + "grad_norm": 0.38979536294937134, + "learning_rate": 0.0004884843228234895, + "loss": 4.8047, + "step": 1182 + }, + { + "epoch": 0.11552734375, + "grad_norm": 0.40310540795326233, + "learning_rate": 0.0004884621876184004, + "loss": 4.8047, + "step": 1183 + }, + { + "epoch": 0.115625, + "grad_norm": 0.42200982570648193, + "learning_rate": 0.0004884400317196009, + "loss": 4.8203, + "step": 1184 + }, + { + "epoch": 0.11572265625, + "grad_norm": 0.4246567487716675, + "learning_rate": 0.000488417855129239, + "loss": 4.7812, + "step": 1185 + }, + { + "epoch": 0.1158203125, + "grad_norm": 0.4499056935310364, + "learning_rate": 0.0004883956578494645, + "loss": 4.7891, + "step": 1186 + }, + { + "epoch": 0.11591796875, + "grad_norm": 0.42340394854545593, + "learning_rate": 0.0004883734398824294, + "loss": 4.7734, + "step": 1187 + }, + { + "epoch": 0.116015625, + "grad_norm": 0.42388978600502014, + "learning_rate": 0.0004883512012302874, + "loss": 4.8125, + "step": 1188 + }, + { + "epoch": 0.11611328125, + "grad_norm": 0.4048875868320465, + "learning_rate": 0.0004883289418951943, + "loss": 4.7812, + "step": 1189 + }, + { + "epoch": 0.1162109375, + "grad_norm": 0.39005008339881897, + "learning_rate": 0.0004883066618793083, + "loss": 4.7891, + "step": 1190 + }, + { + "epoch": 0.11630859375, + "grad_norm": 0.38380053639411926, + "learning_rate": 0.0004882843611847892, + "loss": 4.7852, + "step": 1191 + }, + { + "epoch": 0.11640625, + "grad_norm": 0.35080215334892273, + "learning_rate": 0.0004882620398137988, + "loss": 4.8125, + "step": 1192 + }, + { + "epoch": 0.11650390625, + "grad_norm": 0.3683309555053711, + "learning_rate": 0.00048823969776850103, + "loss": 4.8086, + "step": 1193 + }, + { + "epoch": 0.1166015625, + "grad_norm": 0.39639347791671753, + "learning_rate": 0.00048821733505106186, + "loss": 4.8164, + "step": 1194 + }, + { + "epoch": 0.11669921875, + "grad_norm": 0.41327810287475586, + "learning_rate": 0.0004881949516636491, + "loss": 4.7539, + "step": 1195 + }, + { + "epoch": 0.116796875, + "grad_norm": 0.4666888117790222, + "learning_rate": 0.0004881725476084328, + "loss": 4.7539, + "step": 1196 + }, + { + "epoch": 0.11689453125, + "grad_norm": 0.6009286046028137, + "learning_rate": 0.00048815012288758484, + "loss": 4.8086, + "step": 1197 + }, + { + "epoch": 0.1169921875, + "grad_norm": 0.6095237731933594, + "learning_rate": 0.00048812767750327905, + "loss": 4.8203, + "step": 1198 + }, + { + "epoch": 0.11708984375, + "grad_norm": 0.5841214656829834, + "learning_rate": 0.0004881052114576915, + "loss": 4.7852, + "step": 1199 + }, + { + "epoch": 0.1171875, + "grad_norm": 0.5666095614433289, + "learning_rate": 0.00048808272475299994, + "loss": 4.8164, + "step": 1200 + }, + { + "epoch": 0.11728515625, + "grad_norm": 0.4737398624420166, + "learning_rate": 0.00048806021739138453, + "loss": 4.7891, + "step": 1201 + }, + { + "epoch": 0.1173828125, + "grad_norm": 0.48223602771759033, + "learning_rate": 0.000488037689375027, + "loss": 4.7812, + "step": 1202 + }, + { + "epoch": 0.11748046875, + "grad_norm": 0.5186586976051331, + "learning_rate": 0.00048801514070611143, + "loss": 4.7812, + "step": 1203 + }, + { + "epoch": 0.117578125, + "grad_norm": 0.499870628118515, + "learning_rate": 0.0004879925713868236, + "loss": 4.7812, + "step": 1204 + }, + { + "epoch": 0.11767578125, + "grad_norm": 0.42764419317245483, + "learning_rate": 0.0004879699814193517, + "loss": 4.7617, + "step": 1205 + }, + { + "epoch": 0.1177734375, + "grad_norm": 0.42484956979751587, + "learning_rate": 0.0004879473708058855, + "loss": 4.7852, + "step": 1206 + }, + { + "epoch": 0.11787109375, + "grad_norm": 0.35906827449798584, + "learning_rate": 0.00048792473954861694, + "loss": 4.7852, + "step": 1207 + }, + { + "epoch": 0.11796875, + "grad_norm": 0.32980072498321533, + "learning_rate": 0.00048790208764973997, + "loss": 4.7695, + "step": 1208 + }, + { + "epoch": 0.11806640625, + "grad_norm": 0.33791640400886536, + "learning_rate": 0.0004878794151114507, + "loss": 4.8008, + "step": 1209 + }, + { + "epoch": 0.1181640625, + "grad_norm": 0.30102401971817017, + "learning_rate": 0.0004878567219359469, + "loss": 4.7852, + "step": 1210 + }, + { + "epoch": 0.11826171875, + "grad_norm": 0.32327136397361755, + "learning_rate": 0.0004878340081254285, + "loss": 4.7852, + "step": 1211 + }, + { + "epoch": 0.118359375, + "grad_norm": 0.37602269649505615, + "learning_rate": 0.0004878112736820976, + "loss": 4.7852, + "step": 1212 + }, + { + "epoch": 0.11845703125, + "grad_norm": 0.4111282229423523, + "learning_rate": 0.000487788518608158, + "loss": 4.7734, + "step": 1213 + }, + { + "epoch": 0.1185546875, + "grad_norm": 0.4097578525543213, + "learning_rate": 0.0004877657429058158, + "loss": 4.7852, + "step": 1214 + }, + { + "epoch": 0.11865234375, + "grad_norm": 0.3875778615474701, + "learning_rate": 0.0004877429465772788, + "loss": 4.7734, + "step": 1215 + }, + { + "epoch": 0.11875, + "grad_norm": 0.372007817029953, + "learning_rate": 0.000487720129624757, + "loss": 4.7852, + "step": 1216 + }, + { + "epoch": 0.11884765625, + "grad_norm": 0.3939751088619232, + "learning_rate": 0.00048769729205046247, + "loss": 4.7773, + "step": 1217 + }, + { + "epoch": 0.1189453125, + "grad_norm": 0.4261675775051117, + "learning_rate": 0.000487674433856609, + "loss": 4.7812, + "step": 1218 + }, + { + "epoch": 0.11904296875, + "grad_norm": 0.5432133078575134, + "learning_rate": 0.00048765155504541265, + "loss": 4.8086, + "step": 1219 + }, + { + "epoch": 0.119140625, + "grad_norm": 0.5995720624923706, + "learning_rate": 0.0004876286556190912, + "loss": 4.8125, + "step": 1220 + }, + { + "epoch": 0.11923828125, + "grad_norm": 0.6350622773170471, + "learning_rate": 0.00048760573557986476, + "loss": 4.7812, + "step": 1221 + }, + { + "epoch": 0.1193359375, + "grad_norm": 0.5896903872489929, + "learning_rate": 0.00048758279492995527, + "loss": 4.7852, + "step": 1222 + }, + { + "epoch": 0.11943359375, + "grad_norm": 0.4300802946090698, + "learning_rate": 0.0004875598336715865, + "loss": 4.7773, + "step": 1223 + }, + { + "epoch": 0.11953125, + "grad_norm": 0.45555663108825684, + "learning_rate": 0.00048753685180698465, + "loss": 4.7852, + "step": 1224 + }, + { + "epoch": 0.11962890625, + "grad_norm": 0.42764827609062195, + "learning_rate": 0.00048751384933837737, + "loss": 4.7812, + "step": 1225 + }, + { + "epoch": 0.1197265625, + "grad_norm": 0.37654969096183777, + "learning_rate": 0.0004874908262679948, + "loss": 4.7891, + "step": 1226 + }, + { + "epoch": 0.11982421875, + "grad_norm": 0.32424551248550415, + "learning_rate": 0.00048746778259806876, + "loss": 4.8086, + "step": 1227 + }, + { + "epoch": 0.119921875, + "grad_norm": 0.35854217410087585, + "learning_rate": 0.0004874447183308333, + "loss": 4.8086, + "step": 1228 + }, + { + "epoch": 0.12001953125, + "grad_norm": 0.38739636540412903, + "learning_rate": 0.0004874216334685242, + "loss": 4.7812, + "step": 1229 + }, + { + "epoch": 0.1201171875, + "grad_norm": 0.38605567812919617, + "learning_rate": 0.0004873985280133795, + "loss": 4.8086, + "step": 1230 + }, + { + "epoch": 0.12021484375, + "grad_norm": 0.36838725209236145, + "learning_rate": 0.00048737540196763904, + "loss": 4.7812, + "step": 1231 + }, + { + "epoch": 0.1203125, + "grad_norm": 0.3206044137477875, + "learning_rate": 0.0004873522553335447, + "loss": 4.7969, + "step": 1232 + }, + { + "epoch": 0.12041015625, + "grad_norm": 0.3341485261917114, + "learning_rate": 0.00048732908811334046, + "loss": 4.793, + "step": 1233 + }, + { + "epoch": 0.1205078125, + "grad_norm": 0.38392874598503113, + "learning_rate": 0.00048730590030927217, + "loss": 4.7852, + "step": 1234 + }, + { + "epoch": 0.12060546875, + "grad_norm": 0.42469480633735657, + "learning_rate": 0.0004872826919235879, + "loss": 4.7578, + "step": 1235 + }, + { + "epoch": 0.120703125, + "grad_norm": 0.5113615989685059, + "learning_rate": 0.00048725946295853737, + "loss": 4.8008, + "step": 1236 + }, + { + "epoch": 0.12080078125, + "grad_norm": 0.5344850420951843, + "learning_rate": 0.0004872362134163724, + "loss": 4.7969, + "step": 1237 + }, + { + "epoch": 0.1208984375, + "grad_norm": 0.4661124050617218, + "learning_rate": 0.0004872129432993471, + "loss": 4.8008, + "step": 1238 + }, + { + "epoch": 0.12099609375, + "grad_norm": 0.45510703325271606, + "learning_rate": 0.00048718965260971726, + "loss": 4.7812, + "step": 1239 + }, + { + "epoch": 0.12109375, + "grad_norm": 0.4054417610168457, + "learning_rate": 0.0004871663413497407, + "loss": 4.8203, + "step": 1240 + }, + { + "epoch": 0.12119140625, + "grad_norm": 0.450013130903244, + "learning_rate": 0.0004871430095216773, + "loss": 4.7812, + "step": 1241 + }, + { + "epoch": 0.1212890625, + "grad_norm": 0.46598708629608154, + "learning_rate": 0.000487119657127789, + "loss": 4.832, + "step": 1242 + }, + { + "epoch": 0.12138671875, + "grad_norm": 0.4022948741912842, + "learning_rate": 0.00048709628417033956, + "loss": 4.7891, + "step": 1243 + }, + { + "epoch": 0.121484375, + "grad_norm": 0.3764280378818512, + "learning_rate": 0.00048707289065159486, + "loss": 4.7852, + "step": 1244 + }, + { + "epoch": 0.12158203125, + "grad_norm": 0.3581683039665222, + "learning_rate": 0.0004870494765738228, + "loss": 4.7578, + "step": 1245 + }, + { + "epoch": 0.1216796875, + "grad_norm": 0.3513264060020447, + "learning_rate": 0.0004870260419392931, + "loss": 4.7734, + "step": 1246 + }, + { + "epoch": 0.12177734375, + "grad_norm": 0.3424482047557831, + "learning_rate": 0.00048700258675027776, + "loss": 4.8125, + "step": 1247 + }, + { + "epoch": 0.121875, + "grad_norm": 0.3603288233280182, + "learning_rate": 0.0004869791110090504, + "loss": 4.7969, + "step": 1248 + }, + { + "epoch": 0.12197265625, + "grad_norm": 0.3708992600440979, + "learning_rate": 0.00048695561471788696, + "loss": 4.7734, + "step": 1249 + }, + { + "epoch": 0.1220703125, + "grad_norm": 0.4168321490287781, + "learning_rate": 0.0004869320978790653, + "loss": 4.8008, + "step": 1250 + }, + { + "epoch": 0.12216796875, + "grad_norm": 0.4125996530056, + "learning_rate": 0.0004869085604948651, + "loss": 4.7734, + "step": 1251 + }, + { + "epoch": 0.122265625, + "grad_norm": 0.42782822251319885, + "learning_rate": 0.0004868850025675681, + "loss": 4.7656, + "step": 1252 + }, + { + "epoch": 0.12236328125, + "grad_norm": 0.34903016686439514, + "learning_rate": 0.0004868614240994583, + "loss": 4.7812, + "step": 1253 + }, + { + "epoch": 0.1224609375, + "grad_norm": 0.37179067730903625, + "learning_rate": 0.00048683782509282127, + "loss": 4.7578, + "step": 1254 + }, + { + "epoch": 0.12255859375, + "grad_norm": 0.3796963393688202, + "learning_rate": 0.0004868142055499448, + "loss": 4.7617, + "step": 1255 + }, + { + "epoch": 0.12265625, + "grad_norm": 0.4198116362094879, + "learning_rate": 0.0004867905654731187, + "loss": 4.8047, + "step": 1256 + }, + { + "epoch": 0.12275390625, + "grad_norm": 0.4762858748435974, + "learning_rate": 0.00048676690486463474, + "loss": 4.7617, + "step": 1257 + }, + { + "epoch": 0.1228515625, + "grad_norm": 0.5248888731002808, + "learning_rate": 0.0004867432237267867, + "loss": 4.7383, + "step": 1258 + }, + { + "epoch": 0.12294921875, + "grad_norm": 0.547907829284668, + "learning_rate": 0.00048671952206187007, + "loss": 4.8047, + "step": 1259 + }, + { + "epoch": 0.123046875, + "grad_norm": 0.42803826928138733, + "learning_rate": 0.00048669579987218285, + "loss": 4.7969, + "step": 1260 + }, + { + "epoch": 0.12314453125, + "grad_norm": 0.322132408618927, + "learning_rate": 0.00048667205716002455, + "loss": 4.7852, + "step": 1261 + }, + { + "epoch": 0.1232421875, + "grad_norm": 0.38514775037765503, + "learning_rate": 0.0004866482939276969, + "loss": 4.7695, + "step": 1262 + }, + { + "epoch": 0.12333984375, + "grad_norm": 0.4669879674911499, + "learning_rate": 0.00048662451017750377, + "loss": 4.7969, + "step": 1263 + }, + { + "epoch": 0.1234375, + "grad_norm": 0.47549909353256226, + "learning_rate": 0.0004866007059117505, + "loss": 4.7773, + "step": 1264 + }, + { + "epoch": 0.12353515625, + "grad_norm": 0.3875703513622284, + "learning_rate": 0.00048657688113274507, + "loss": 4.7852, + "step": 1265 + }, + { + "epoch": 0.1236328125, + "grad_norm": 0.31729233264923096, + "learning_rate": 0.00048655303584279686, + "loss": 4.7773, + "step": 1266 + }, + { + "epoch": 0.12373046875, + "grad_norm": 0.33539122343063354, + "learning_rate": 0.0004865291700442177, + "loss": 4.7617, + "step": 1267 + }, + { + "epoch": 0.123828125, + "grad_norm": 0.3871065676212311, + "learning_rate": 0.0004865052837393212, + "loss": 4.7773, + "step": 1268 + }, + { + "epoch": 0.12392578125, + "grad_norm": 0.40682950615882874, + "learning_rate": 0.00048648137693042283, + "loss": 4.7812, + "step": 1269 + }, + { + "epoch": 0.1240234375, + "grad_norm": 0.4068271219730377, + "learning_rate": 0.0004864574496198404, + "loss": 4.8008, + "step": 1270 + }, + { + "epoch": 0.12412109375, + "grad_norm": 0.3539895713329315, + "learning_rate": 0.0004864335018098933, + "loss": 4.7891, + "step": 1271 + }, + { + "epoch": 0.12421875, + "grad_norm": 0.34347373247146606, + "learning_rate": 0.00048640953350290324, + "loss": 4.7773, + "step": 1272 + }, + { + "epoch": 0.12431640625, + "grad_norm": 0.40734362602233887, + "learning_rate": 0.0004863855447011938, + "loss": 4.7734, + "step": 1273 + }, + { + "epoch": 0.1244140625, + "grad_norm": 0.4735758900642395, + "learning_rate": 0.00048636153540709045, + "loss": 4.7656, + "step": 1274 + }, + { + "epoch": 0.12451171875, + "grad_norm": 0.5711696147918701, + "learning_rate": 0.0004863375056229208, + "loss": 4.8203, + "step": 1275 + }, + { + "epoch": 0.124609375, + "grad_norm": 0.5063640475273132, + "learning_rate": 0.00048631345535101426, + "loss": 4.7617, + "step": 1276 + }, + { + "epoch": 0.12470703125, + "grad_norm": 0.35711005330085754, + "learning_rate": 0.0004862893845937024, + "loss": 4.8125, + "step": 1277 + }, + { + "epoch": 0.1248046875, + "grad_norm": 0.3374655842781067, + "learning_rate": 0.0004862652933533188, + "loss": 4.7734, + "step": 1278 + }, + { + "epoch": 0.12490234375, + "grad_norm": 0.3303259015083313, + "learning_rate": 0.00048624118163219875, + "loss": 4.7812, + "step": 1279 + }, + { + "epoch": 0.125, + "grad_norm": 0.31290751695632935, + "learning_rate": 0.00048621704943267995, + "loss": 4.7539, + "step": 1280 + }, + { + "epoch": 0.12509765625, + "grad_norm": 0.3193962574005127, + "learning_rate": 0.00048619289675710177, + "loss": 4.7578, + "step": 1281 + }, + { + "epoch": 0.1251953125, + "grad_norm": 0.2991279065608978, + "learning_rate": 0.0004861687236078055, + "loss": 4.7734, + "step": 1282 + }, + { + "epoch": 0.12529296875, + "grad_norm": 0.3635973334312439, + "learning_rate": 0.0004861445299871348, + "loss": 4.7344, + "step": 1283 + }, + { + "epoch": 0.125390625, + "grad_norm": 0.4499048888683319, + "learning_rate": 0.0004861203158974349, + "loss": 4.7812, + "step": 1284 + }, + { + "epoch": 0.12548828125, + "grad_norm": 0.6076017022132874, + "learning_rate": 0.00048609608134105324, + "loss": 4.7773, + "step": 1285 + }, + { + "epoch": 0.1255859375, + "grad_norm": 0.6585017442703247, + "learning_rate": 0.0004860718263203393, + "loss": 4.7812, + "step": 1286 + }, + { + "epoch": 0.12568359375, + "grad_norm": 0.48385024070739746, + "learning_rate": 0.0004860475508376442, + "loss": 4.7695, + "step": 1287 + }, + { + "epoch": 0.12578125, + "grad_norm": 0.3241603374481201, + "learning_rate": 0.00048602325489532146, + "loss": 4.7617, + "step": 1288 + }, + { + "epoch": 0.12587890625, + "grad_norm": 0.4593919813632965, + "learning_rate": 0.00048599893849572646, + "loss": 4.7734, + "step": 1289 + }, + { + "epoch": 0.1259765625, + "grad_norm": 0.46703073382377625, + "learning_rate": 0.00048597460164121636, + "loss": 4.7617, + "step": 1290 + }, + { + "epoch": 0.12607421875, + "grad_norm": 0.3566134572029114, + "learning_rate": 0.00048595024433415054, + "loss": 4.7695, + "step": 1291 + }, + { + "epoch": 0.126171875, + "grad_norm": 0.3164132833480835, + "learning_rate": 0.0004859258665768903, + "loss": 4.793, + "step": 1292 + }, + { + "epoch": 0.12626953125, + "grad_norm": 0.37039509415626526, + "learning_rate": 0.00048590146837179876, + "loss": 4.7773, + "step": 1293 + }, + { + "epoch": 0.1263671875, + "grad_norm": 0.37872517108917236, + "learning_rate": 0.00048587704972124135, + "loss": 4.8164, + "step": 1294 + }, + { + "epoch": 0.12646484375, + "grad_norm": 0.35898569226264954, + "learning_rate": 0.0004858526106275851, + "loss": 4.7773, + "step": 1295 + }, + { + "epoch": 0.1265625, + "grad_norm": 0.33450818061828613, + "learning_rate": 0.00048582815109319936, + "loss": 4.7734, + "step": 1296 + }, + { + "epoch": 0.12666015625, + "grad_norm": 0.3074091970920563, + "learning_rate": 0.0004858036711204553, + "loss": 4.7656, + "step": 1297 + }, + { + "epoch": 0.1267578125, + "grad_norm": 0.32790830731391907, + "learning_rate": 0.000485779170711726, + "loss": 4.7852, + "step": 1298 + }, + { + "epoch": 0.12685546875, + "grad_norm": 0.33045750856399536, + "learning_rate": 0.00048575464986938674, + "loss": 4.7383, + "step": 1299 + }, + { + "epoch": 0.126953125, + "grad_norm": 0.3500734269618988, + "learning_rate": 0.0004857301085958145, + "loss": 4.7578, + "step": 1300 + }, + { + "epoch": 0.12705078125, + "grad_norm": 0.39776694774627686, + "learning_rate": 0.0004857055468933885, + "loss": 4.7617, + "step": 1301 + }, + { + "epoch": 0.1271484375, + "grad_norm": 0.36641398072242737, + "learning_rate": 0.0004856809647644897, + "loss": 4.7773, + "step": 1302 + }, + { + "epoch": 0.12724609375, + "grad_norm": 0.3984200656414032, + "learning_rate": 0.00048565636221150135, + "loss": 4.7852, + "step": 1303 + }, + { + "epoch": 0.12734375, + "grad_norm": 0.40479159355163574, + "learning_rate": 0.0004856317392368084, + "loss": 4.7578, + "step": 1304 + }, + { + "epoch": 0.12744140625, + "grad_norm": 0.38542646169662476, + "learning_rate": 0.0004856070958427979, + "loss": 4.7969, + "step": 1305 + }, + { + "epoch": 0.1275390625, + "grad_norm": 0.41619765758514404, + "learning_rate": 0.0004855824320318589, + "loss": 4.7578, + "step": 1306 + }, + { + "epoch": 0.12763671875, + "grad_norm": 0.5476976633071899, + "learning_rate": 0.0004855577478063822, + "loss": 4.7656, + "step": 1307 + }, + { + "epoch": 0.127734375, + "grad_norm": 0.7227765917778015, + "learning_rate": 0.000485533043168761, + "loss": 4.7734, + "step": 1308 + }, + { + "epoch": 0.12783203125, + "grad_norm": 0.6441882848739624, + "learning_rate": 0.0004855083181213902, + "loss": 4.7695, + "step": 1309 + }, + { + "epoch": 0.1279296875, + "grad_norm": 0.5060856938362122, + "learning_rate": 0.00048548357266666657, + "loss": 4.793, + "step": 1310 + }, + { + "epoch": 0.12802734375, + "grad_norm": 0.46759769320487976, + "learning_rate": 0.0004854588068069892, + "loss": 4.7773, + "step": 1311 + }, + { + "epoch": 0.128125, + "grad_norm": 0.40848907828330994, + "learning_rate": 0.0004854340205447589, + "loss": 4.7578, + "step": 1312 + }, + { + "epoch": 0.12822265625, + "grad_norm": 0.3958721160888672, + "learning_rate": 0.00048540921388237856, + "loss": 4.7773, + "step": 1313 + }, + { + "epoch": 0.1283203125, + "grad_norm": 0.4191996157169342, + "learning_rate": 0.0004853843868222529, + "loss": 4.7891, + "step": 1314 + }, + { + "epoch": 0.12841796875, + "grad_norm": 0.38634946942329407, + "learning_rate": 0.00048535953936678885, + "loss": 4.8047, + "step": 1315 + }, + { + "epoch": 0.128515625, + "grad_norm": 0.36390602588653564, + "learning_rate": 0.00048533467151839517, + "loss": 4.7656, + "step": 1316 + }, + { + "epoch": 0.12861328125, + "grad_norm": 0.32959216833114624, + "learning_rate": 0.0004853097832794827, + "loss": 4.7461, + "step": 1317 + }, + { + "epoch": 0.1287109375, + "grad_norm": 0.3805353045463562, + "learning_rate": 0.000485284874652464, + "loss": 4.7695, + "step": 1318 + }, + { + "epoch": 0.12880859375, + "grad_norm": 0.37332767248153687, + "learning_rate": 0.000485259945639754, + "loss": 4.7734, + "step": 1319 + }, + { + "epoch": 0.12890625, + "grad_norm": 0.3370415270328522, + "learning_rate": 0.00048523499624376925, + "loss": 4.7695, + "step": 1320 + }, + { + "epoch": 0.12900390625, + "grad_norm": 0.32865846157073975, + "learning_rate": 0.00048521002646692855, + "loss": 4.8008, + "step": 1321 + }, + { + "epoch": 0.1291015625, + "grad_norm": 0.35084402561187744, + "learning_rate": 0.0004851850363116524, + "loss": 4.7656, + "step": 1322 + }, + { + "epoch": 0.12919921875, + "grad_norm": 0.38480809330940247, + "learning_rate": 0.0004851600257803636, + "loss": 4.7734, + "step": 1323 + }, + { + "epoch": 0.129296875, + "grad_norm": 0.36452239751815796, + "learning_rate": 0.00048513499487548665, + "loss": 4.7461, + "step": 1324 + }, + { + "epoch": 0.12939453125, + "grad_norm": 0.36055848002433777, + "learning_rate": 0.00048510994359944804, + "loss": 4.7656, + "step": 1325 + }, + { + "epoch": 0.1294921875, + "grad_norm": 0.3260059952735901, + "learning_rate": 0.00048508487195467653, + "loss": 4.7344, + "step": 1326 + }, + { + "epoch": 0.12958984375, + "grad_norm": 0.339093953371048, + "learning_rate": 0.0004850597799436025, + "loss": 4.7617, + "step": 1327 + }, + { + "epoch": 0.1296875, + "grad_norm": 0.40155380964279175, + "learning_rate": 0.00048503466756865847, + "loss": 4.7695, + "step": 1328 + }, + { + "epoch": 0.12978515625, + "grad_norm": 0.578285813331604, + "learning_rate": 0.00048500953483227895, + "loss": 4.7344, + "step": 1329 + }, + { + "epoch": 0.1298828125, + "grad_norm": 0.7531477212905884, + "learning_rate": 0.0004849843817369003, + "loss": 4.7812, + "step": 1330 + }, + { + "epoch": 0.12998046875, + "grad_norm": 0.6945119500160217, + "learning_rate": 0.0004849592082849611, + "loss": 4.7617, + "step": 1331 + }, + { + "epoch": 0.130078125, + "grad_norm": 0.40566393733024597, + "learning_rate": 0.0004849340144789016, + "loss": 4.7969, + "step": 1332 + }, + { + "epoch": 0.13017578125, + "grad_norm": 0.4642469584941864, + "learning_rate": 0.00048490880032116425, + "loss": 4.7539, + "step": 1333 + }, + { + "epoch": 0.1302734375, + "grad_norm": 0.5508970618247986, + "learning_rate": 0.0004848835658141934, + "loss": 4.7539, + "step": 1334 + }, + { + "epoch": 0.13037109375, + "grad_norm": 0.4666309356689453, + "learning_rate": 0.00048485831096043526, + "loss": 4.8047, + "step": 1335 + }, + { + "epoch": 0.13046875, + "grad_norm": 0.3999099135398865, + "learning_rate": 0.0004848330357623382, + "loss": 4.7617, + "step": 1336 + }, + { + "epoch": 0.13056640625, + "grad_norm": 0.41524437069892883, + "learning_rate": 0.0004848077402223524, + "loss": 4.75, + "step": 1337 + }, + { + "epoch": 0.1306640625, + "grad_norm": 0.37947431206703186, + "learning_rate": 0.0004847824243429302, + "loss": 4.7734, + "step": 1338 + }, + { + "epoch": 0.13076171875, + "grad_norm": 0.39928561449050903, + "learning_rate": 0.0004847570881265259, + "loss": 4.7852, + "step": 1339 + }, + { + "epoch": 0.130859375, + "grad_norm": 0.3333352208137512, + "learning_rate": 0.0004847317315755953, + "loss": 4.7891, + "step": 1340 + }, + { + "epoch": 0.13095703125, + "grad_norm": 0.3555909991264343, + "learning_rate": 0.00048470635469259697, + "loss": 4.7344, + "step": 1341 + }, + { + "epoch": 0.1310546875, + "grad_norm": 0.3695686459541321, + "learning_rate": 0.00048468095747999067, + "loss": 4.75, + "step": 1342 + }, + { + "epoch": 0.13115234375, + "grad_norm": 0.32535481452941895, + "learning_rate": 0.00048465553994023875, + "loss": 4.7852, + "step": 1343 + }, + { + "epoch": 0.13125, + "grad_norm": 0.3166685402393341, + "learning_rate": 0.00048463010207580517, + "loss": 4.7656, + "step": 1344 + }, + { + "epoch": 0.13134765625, + "grad_norm": 0.30036693811416626, + "learning_rate": 0.000484604643889156, + "loss": 4.7773, + "step": 1345 + }, + { + "epoch": 0.1314453125, + "grad_norm": 0.3111598491668701, + "learning_rate": 0.0004845791653827591, + "loss": 4.7695, + "step": 1346 + }, + { + "epoch": 0.13154296875, + "grad_norm": 0.3160509765148163, + "learning_rate": 0.00048455366655908455, + "loss": 4.7695, + "step": 1347 + }, + { + "epoch": 0.131640625, + "grad_norm": 0.3423873782157898, + "learning_rate": 0.0004845281474206043, + "loss": 4.7422, + "step": 1348 + }, + { + "epoch": 0.13173828125, + "grad_norm": 0.34536194801330566, + "learning_rate": 0.00048450260796979223, + "loss": 4.7891, + "step": 1349 + }, + { + "epoch": 0.1318359375, + "grad_norm": 0.347699373960495, + "learning_rate": 0.0004844770482091242, + "loss": 4.7773, + "step": 1350 + }, + { + "epoch": 0.13193359375, + "grad_norm": 0.3164156675338745, + "learning_rate": 0.00048445146814107804, + "loss": 4.7656, + "step": 1351 + }, + { + "epoch": 0.13203125, + "grad_norm": 0.2843954265117645, + "learning_rate": 0.00048442586776813363, + "loss": 4.7695, + "step": 1352 + }, + { + "epoch": 0.13212890625, + "grad_norm": 0.34582918882369995, + "learning_rate": 0.00048440024709277274, + "loss": 4.7734, + "step": 1353 + }, + { + "epoch": 0.1322265625, + "grad_norm": 0.4413771331310272, + "learning_rate": 0.00048437460611747916, + "loss": 4.7461, + "step": 1354 + }, + { + "epoch": 0.13232421875, + "grad_norm": 0.4797145426273346, + "learning_rate": 0.0004843489448447385, + "loss": 4.793, + "step": 1355 + }, + { + "epoch": 0.132421875, + "grad_norm": 0.47246068716049194, + "learning_rate": 0.0004843232632770384, + "loss": 4.8008, + "step": 1356 + }, + { + "epoch": 0.13251953125, + "grad_norm": 0.39250800013542175, + "learning_rate": 0.00048429756141686863, + "loss": 4.7773, + "step": 1357 + }, + { + "epoch": 0.1326171875, + "grad_norm": 0.3332385718822479, + "learning_rate": 0.00048427183926672083, + "loss": 4.7773, + "step": 1358 + }, + { + "epoch": 0.13271484375, + "grad_norm": 0.3847937285900116, + "learning_rate": 0.00048424609682908856, + "loss": 4.7617, + "step": 1359 + }, + { + "epoch": 0.1328125, + "grad_norm": 0.3584742546081543, + "learning_rate": 0.0004842203341064673, + "loss": 4.7734, + "step": 1360 + }, + { + "epoch": 0.13291015625, + "grad_norm": 0.34857839345932007, + "learning_rate": 0.0004841945511013547, + "loss": 4.7344, + "step": 1361 + }, + { + "epoch": 0.1330078125, + "grad_norm": 0.3411064147949219, + "learning_rate": 0.00048416874781625016, + "loss": 4.793, + "step": 1362 + }, + { + "epoch": 0.13310546875, + "grad_norm": 0.3658686578273773, + "learning_rate": 0.00048414292425365507, + "loss": 4.7617, + "step": 1363 + }, + { + "epoch": 0.133203125, + "grad_norm": 0.40414950251579285, + "learning_rate": 0.00048411708041607305, + "loss": 4.7539, + "step": 1364 + }, + { + "epoch": 0.13330078125, + "grad_norm": 0.37495216727256775, + "learning_rate": 0.0004840912163060093, + "loss": 4.7773, + "step": 1365 + }, + { + "epoch": 0.1333984375, + "grad_norm": 0.40422677993774414, + "learning_rate": 0.00048406533192597124, + "loss": 4.7539, + "step": 1366 + }, + { + "epoch": 0.13349609375, + "grad_norm": 0.454258531332016, + "learning_rate": 0.0004840394272784682, + "loss": 4.7812, + "step": 1367 + }, + { + "epoch": 0.13359375, + "grad_norm": 0.39893728494644165, + "learning_rate": 0.00048401350236601146, + "loss": 4.7695, + "step": 1368 + }, + { + "epoch": 0.13369140625, + "grad_norm": 0.3277254104614258, + "learning_rate": 0.00048398755719111417, + "loss": 4.8086, + "step": 1369 + }, + { + "epoch": 0.1337890625, + "grad_norm": 0.436477392911911, + "learning_rate": 0.00048396159175629174, + "loss": 4.7461, + "step": 1370 + }, + { + "epoch": 0.13388671875, + "grad_norm": 0.33649933338165283, + "learning_rate": 0.00048393560606406114, + "loss": 4.7539, + "step": 1371 + }, + { + "epoch": 0.133984375, + "grad_norm": 0.34157708287239075, + "learning_rate": 0.0004839096001169416, + "loss": 4.7539, + "step": 1372 + }, + { + "epoch": 0.13408203125, + "grad_norm": 0.4028213322162628, + "learning_rate": 0.0004838835739174541, + "loss": 4.7773, + "step": 1373 + }, + { + "epoch": 0.1341796875, + "grad_norm": 0.46322396397590637, + "learning_rate": 0.0004838575274681219, + "loss": 4.7617, + "step": 1374 + }, + { + "epoch": 0.13427734375, + "grad_norm": 0.5127856731414795, + "learning_rate": 0.0004838314607714699, + "loss": 4.7266, + "step": 1375 + }, + { + "epoch": 0.134375, + "grad_norm": 0.5536565780639648, + "learning_rate": 0.00048380537383002517, + "loss": 4.75, + "step": 1376 + }, + { + "epoch": 0.13447265625, + "grad_norm": 0.5256456732749939, + "learning_rate": 0.0004837792666463166, + "loss": 4.7695, + "step": 1377 + }, + { + "epoch": 0.1345703125, + "grad_norm": 0.5643973350524902, + "learning_rate": 0.00048375313922287505, + "loss": 4.7656, + "step": 1378 + }, + { + "epoch": 0.13466796875, + "grad_norm": 0.8128441572189331, + "learning_rate": 0.00048372699156223355, + "loss": 4.7734, + "step": 1379 + }, + { + "epoch": 0.134765625, + "grad_norm": 1.026166558265686, + "learning_rate": 0.0004837008236669268, + "loss": 4.7227, + "step": 1380 + }, + { + "epoch": 0.13486328125, + "grad_norm": 0.4733608365058899, + "learning_rate": 0.00048367463553949166, + "loss": 4.7734, + "step": 1381 + }, + { + "epoch": 0.1349609375, + "grad_norm": 0.46688416600227356, + "learning_rate": 0.00048364842718246685, + "loss": 4.7461, + "step": 1382 + }, + { + "epoch": 0.13505859375, + "grad_norm": 0.42120811343193054, + "learning_rate": 0.00048362219859839317, + "loss": 4.7734, + "step": 1383 + }, + { + "epoch": 0.13515625, + "grad_norm": 0.4719356298446655, + "learning_rate": 0.0004835959497898133, + "loss": 4.7695, + "step": 1384 + }, + { + "epoch": 0.13525390625, + "grad_norm": 0.5680752992630005, + "learning_rate": 0.0004835696807592718, + "loss": 4.7383, + "step": 1385 + }, + { + "epoch": 0.1353515625, + "grad_norm": 0.7138311862945557, + "learning_rate": 0.0004835433915093153, + "loss": 4.7812, + "step": 1386 + }, + { + "epoch": 0.13544921875, + "grad_norm": 0.8844993114471436, + "learning_rate": 0.00048351708204249247, + "loss": 4.7383, + "step": 1387 + }, + { + "epoch": 0.135546875, + "grad_norm": 1.3054742813110352, + "learning_rate": 0.00048349075236135366, + "loss": 4.7773, + "step": 1388 + }, + { + "epoch": 0.13564453125, + "grad_norm": 0.5796347260475159, + "learning_rate": 0.0004834644024684515, + "loss": 4.7578, + "step": 1389 + }, + { + "epoch": 0.1357421875, + "grad_norm": 0.5956249833106995, + "learning_rate": 0.00048343803236634023, + "loss": 4.7461, + "step": 1390 + }, + { + "epoch": 0.13583984375, + "grad_norm": 0.53680819272995, + "learning_rate": 0.00048341164205757654, + "loss": 4.7773, + "step": 1391 + }, + { + "epoch": 0.1359375, + "grad_norm": 0.4875905513763428, + "learning_rate": 0.0004833852315447186, + "loss": 4.75, + "step": 1392 + }, + { + "epoch": 0.13603515625, + "grad_norm": 0.4100656807422638, + "learning_rate": 0.0004833588008303267, + "loss": 4.7305, + "step": 1393 + }, + { + "epoch": 0.1361328125, + "grad_norm": 0.344696044921875, + "learning_rate": 0.00048333234991696335, + "loss": 4.7891, + "step": 1394 + }, + { + "epoch": 0.13623046875, + "grad_norm": 0.37795841693878174, + "learning_rate": 0.0004833058788071925, + "loss": 4.7383, + "step": 1395 + }, + { + "epoch": 0.136328125, + "grad_norm": 0.41078394651412964, + "learning_rate": 0.0004832793875035805, + "loss": 4.7148, + "step": 1396 + }, + { + "epoch": 0.13642578125, + "grad_norm": 0.3967403471469879, + "learning_rate": 0.0004832528760086956, + "loss": 4.7734, + "step": 1397 + }, + { + "epoch": 0.1365234375, + "grad_norm": 0.3479665517807007, + "learning_rate": 0.00048322634432510766, + "loss": 4.7539, + "step": 1398 + }, + { + "epoch": 0.13662109375, + "grad_norm": 0.3690146803855896, + "learning_rate": 0.00048319979245538887, + "loss": 4.7695, + "step": 1399 + }, + { + "epoch": 0.13671875, + "grad_norm": 0.3363431394100189, + "learning_rate": 0.0004831732204021134, + "loss": 4.7734, + "step": 1400 + }, + { + "epoch": 0.13681640625, + "grad_norm": 0.32045620679855347, + "learning_rate": 0.00048314662816785687, + "loss": 4.7734, + "step": 1401 + }, + { + "epoch": 0.1369140625, + "grad_norm": 0.3061015009880066, + "learning_rate": 0.00048312001575519757, + "loss": 4.7656, + "step": 1402 + }, + { + "epoch": 0.13701171875, + "grad_norm": 0.38208630681037903, + "learning_rate": 0.0004830933831667152, + "loss": 4.7539, + "step": 1403 + }, + { + "epoch": 0.137109375, + "grad_norm": 0.3805338442325592, + "learning_rate": 0.0004830667304049918, + "loss": 4.7461, + "step": 1404 + }, + { + "epoch": 0.13720703125, + "grad_norm": 0.5099437236785889, + "learning_rate": 0.0004830400574726109, + "loss": 4.7812, + "step": 1405 + }, + { + "epoch": 0.1373046875, + "grad_norm": 0.6449255347251892, + "learning_rate": 0.00048301336437215844, + "loss": 4.7812, + "step": 1406 + }, + { + "epoch": 0.13740234375, + "grad_norm": 0.49494004249572754, + "learning_rate": 0.0004829866511062221, + "loss": 4.7734, + "step": 1407 + }, + { + "epoch": 0.1375, + "grad_norm": 0.34257587790489197, + "learning_rate": 0.0004829599176773916, + "loss": 4.7617, + "step": 1408 + }, + { + "epoch": 0.13759765625, + "grad_norm": 0.37970221042633057, + "learning_rate": 0.0004829331640882584, + "loss": 4.7539, + "step": 1409 + }, + { + "epoch": 0.1376953125, + "grad_norm": 0.48333415389060974, + "learning_rate": 0.0004829063903414162, + "loss": 4.7773, + "step": 1410 + }, + { + "epoch": 0.13779296875, + "grad_norm": 0.3654765486717224, + "learning_rate": 0.00048287959643946056, + "loss": 4.7578, + "step": 1411 + }, + { + "epoch": 0.137890625, + "grad_norm": 0.5381461381912231, + "learning_rate": 0.0004828527823849889, + "loss": 4.7695, + "step": 1412 + }, + { + "epoch": 0.13798828125, + "grad_norm": 0.5346956849098206, + "learning_rate": 0.0004828259481806007, + "loss": 4.7383, + "step": 1413 + }, + { + "epoch": 0.1380859375, + "grad_norm": 0.5548373460769653, + "learning_rate": 0.00048279909382889735, + "loss": 4.7773, + "step": 1414 + }, + { + "epoch": 0.13818359375, + "grad_norm": 0.6224201917648315, + "learning_rate": 0.00048277221933248226, + "loss": 4.7422, + "step": 1415 + }, + { + "epoch": 0.13828125, + "grad_norm": 0.5974454283714294, + "learning_rate": 0.0004827453246939606, + "loss": 4.7734, + "step": 1416 + }, + { + "epoch": 0.13837890625, + "grad_norm": 0.4552706182003021, + "learning_rate": 0.00048271840991593966, + "loss": 4.75, + "step": 1417 + }, + { + "epoch": 0.1384765625, + "grad_norm": 0.5470201373100281, + "learning_rate": 0.00048269147500102873, + "loss": 4.7695, + "step": 1418 + }, + { + "epoch": 0.13857421875, + "grad_norm": 0.48209336400032043, + "learning_rate": 0.00048266451995183885, + "loss": 4.793, + "step": 1419 + }, + { + "epoch": 0.138671875, + "grad_norm": 0.43414023518562317, + "learning_rate": 0.0004826375447709832, + "loss": 4.7695, + "step": 1420 + }, + { + "epoch": 0.13876953125, + "grad_norm": 0.3743106424808502, + "learning_rate": 0.00048261054946107686, + "loss": 4.75, + "step": 1421 + }, + { + "epoch": 0.1388671875, + "grad_norm": 0.3445884585380554, + "learning_rate": 0.0004825835340247368, + "loss": 4.7188, + "step": 1422 + }, + { + "epoch": 0.13896484375, + "grad_norm": 0.3742680847644806, + "learning_rate": 0.00048255649846458187, + "loss": 4.7617, + "step": 1423 + }, + { + "epoch": 0.1390625, + "grad_norm": 0.4068181812763214, + "learning_rate": 0.00048252944278323324, + "loss": 4.7227, + "step": 1424 + }, + { + "epoch": 0.13916015625, + "grad_norm": 0.33727332949638367, + "learning_rate": 0.0004825023669833136, + "loss": 4.7734, + "step": 1425 + }, + { + "epoch": 0.1392578125, + "grad_norm": 0.27452924847602844, + "learning_rate": 0.00048247527106744784, + "loss": 4.7539, + "step": 1426 + }, + { + "epoch": 0.13935546875, + "grad_norm": 0.323626309633255, + "learning_rate": 0.00048244815503826257, + "loss": 4.7422, + "step": 1427 + }, + { + "epoch": 0.139453125, + "grad_norm": 0.33976978063583374, + "learning_rate": 0.0004824210188983867, + "loss": 4.7617, + "step": 1428 + }, + { + "epoch": 0.13955078125, + "grad_norm": 0.3159013092517853, + "learning_rate": 0.0004823938626504508, + "loss": 4.7617, + "step": 1429 + }, + { + "epoch": 0.1396484375, + "grad_norm": 0.3308809995651245, + "learning_rate": 0.0004823666862970876, + "loss": 4.7734, + "step": 1430 + }, + { + "epoch": 0.13974609375, + "grad_norm": 0.37637704610824585, + "learning_rate": 0.00048233948984093147, + "loss": 4.7617, + "step": 1431 + }, + { + "epoch": 0.13984375, + "grad_norm": 0.3979220986366272, + "learning_rate": 0.000482312273284619, + "loss": 4.7617, + "step": 1432 + }, + { + "epoch": 0.13994140625, + "grad_norm": 0.41099435091018677, + "learning_rate": 0.0004822850366307887, + "loss": 4.7734, + "step": 1433 + }, + { + "epoch": 0.1400390625, + "grad_norm": 0.4159080684185028, + "learning_rate": 0.00048225777988208094, + "loss": 4.75, + "step": 1434 + }, + { + "epoch": 0.14013671875, + "grad_norm": 0.440417617559433, + "learning_rate": 0.0004822305030411381, + "loss": 4.7344, + "step": 1435 + }, + { + "epoch": 0.140234375, + "grad_norm": 0.35094818472862244, + "learning_rate": 0.0004822032061106045, + "loss": 4.7422, + "step": 1436 + }, + { + "epoch": 0.14033203125, + "grad_norm": 0.33545538783073425, + "learning_rate": 0.00048217588909312627, + "loss": 4.7656, + "step": 1437 + }, + { + "epoch": 0.1404296875, + "grad_norm": 0.363555908203125, + "learning_rate": 0.0004821485519913518, + "loss": 4.7305, + "step": 1438 + }, + { + "epoch": 0.14052734375, + "grad_norm": 0.39379504323005676, + "learning_rate": 0.00048212119480793103, + "loss": 4.7656, + "step": 1439 + }, + { + "epoch": 0.140625, + "grad_norm": 0.3747332692146301, + "learning_rate": 0.00048209381754551616, + "loss": 4.7266, + "step": 1440 + }, + { + "epoch": 0.14072265625, + "grad_norm": 0.38222065567970276, + "learning_rate": 0.00048206642020676133, + "loss": 4.7656, + "step": 1441 + }, + { + "epoch": 0.1408203125, + "grad_norm": 0.4481048583984375, + "learning_rate": 0.00048203900279432247, + "loss": 4.7383, + "step": 1442 + }, + { + "epoch": 0.14091796875, + "grad_norm": 0.5370760560035706, + "learning_rate": 0.0004820115653108573, + "loss": 4.75, + "step": 1443 + }, + { + "epoch": 0.141015625, + "grad_norm": 0.5712428092956543, + "learning_rate": 0.00048198410775902596, + "loss": 4.7539, + "step": 1444 + }, + { + "epoch": 0.14111328125, + "grad_norm": 0.4819755256175995, + "learning_rate": 0.0004819566301414901, + "loss": 4.75, + "step": 1445 + }, + { + "epoch": 0.1412109375, + "grad_norm": 0.39142805337905884, + "learning_rate": 0.0004819291324609137, + "loss": 4.7188, + "step": 1446 + }, + { + "epoch": 0.14130859375, + "grad_norm": 0.391284316778183, + "learning_rate": 0.0004819016147199622, + "loss": 4.7539, + "step": 1447 + }, + { + "epoch": 0.14140625, + "grad_norm": 0.37572604417800903, + "learning_rate": 0.00048187407692130345, + "loss": 4.7539, + "step": 1448 + }, + { + "epoch": 0.14150390625, + "grad_norm": 0.40346717834472656, + "learning_rate": 0.000481846519067607, + "loss": 4.7383, + "step": 1449 + }, + { + "epoch": 0.1416015625, + "grad_norm": 0.39226216077804565, + "learning_rate": 0.00048181894116154444, + "loss": 4.7656, + "step": 1450 + }, + { + "epoch": 0.14169921875, + "grad_norm": 0.3652878403663635, + "learning_rate": 0.0004817913432057892, + "loss": 4.7461, + "step": 1451 + }, + { + "epoch": 0.141796875, + "grad_norm": 0.3091000020503998, + "learning_rate": 0.0004817637252030167, + "loss": 4.7734, + "step": 1452 + }, + { + "epoch": 0.14189453125, + "grad_norm": 0.2839193344116211, + "learning_rate": 0.00048173608715590437, + "loss": 4.7539, + "step": 1453 + }, + { + "epoch": 0.1419921875, + "grad_norm": 0.3217602074146271, + "learning_rate": 0.0004817084290671315, + "loss": 4.7031, + "step": 1454 + }, + { + "epoch": 0.14208984375, + "grad_norm": 0.3004675805568695, + "learning_rate": 0.00048168075093937943, + "loss": 4.7109, + "step": 1455 + }, + { + "epoch": 0.1421875, + "grad_norm": 0.4077158570289612, + "learning_rate": 0.0004816530527753312, + "loss": 4.7539, + "step": 1456 + }, + { + "epoch": 0.14228515625, + "grad_norm": 0.3414487838745117, + "learning_rate": 0.0004816253345776722, + "loss": 4.7695, + "step": 1457 + }, + { + "epoch": 0.1423828125, + "grad_norm": 0.30606889724731445, + "learning_rate": 0.0004815975963490893, + "loss": 4.7188, + "step": 1458 + }, + { + "epoch": 0.14248046875, + "grad_norm": 0.35760390758514404, + "learning_rate": 0.0004815698380922716, + "loss": 4.7695, + "step": 1459 + }, + { + "epoch": 0.142578125, + "grad_norm": 0.4264749586582184, + "learning_rate": 0.0004815420598099101, + "loss": 4.7344, + "step": 1460 + }, + { + "epoch": 0.14267578125, + "grad_norm": 0.48403796553611755, + "learning_rate": 0.00048151426150469767, + "loss": 4.7656, + "step": 1461 + }, + { + "epoch": 0.1427734375, + "grad_norm": 0.4852380156517029, + "learning_rate": 0.00048148644317932927, + "loss": 4.75, + "step": 1462 + }, + { + "epoch": 0.14287109375, + "grad_norm": 0.46373042464256287, + "learning_rate": 0.0004814586048365016, + "loss": 4.7539, + "step": 1463 + }, + { + "epoch": 0.14296875, + "grad_norm": 0.41835281252861023, + "learning_rate": 0.00048143074647891344, + "loss": 4.7422, + "step": 1464 + }, + { + "epoch": 0.14306640625, + "grad_norm": 0.34529751539230347, + "learning_rate": 0.00048140286810926543, + "loss": 4.7578, + "step": 1465 + }, + { + "epoch": 0.1431640625, + "grad_norm": 0.32219383120536804, + "learning_rate": 0.0004813749697302603, + "loss": 4.7695, + "step": 1466 + }, + { + "epoch": 0.14326171875, + "grad_norm": 0.33539384603500366, + "learning_rate": 0.0004813470513446025, + "loss": 4.7617, + "step": 1467 + }, + { + "epoch": 0.143359375, + "grad_norm": 0.27315089106559753, + "learning_rate": 0.00048131911295499845, + "loss": 4.7578, + "step": 1468 + }, + { + "epoch": 0.14345703125, + "grad_norm": 0.29842984676361084, + "learning_rate": 0.00048129115456415684, + "loss": 4.7188, + "step": 1469 + }, + { + "epoch": 0.1435546875, + "grad_norm": 0.3583134710788727, + "learning_rate": 0.00048126317617478776, + "loss": 4.7734, + "step": 1470 + }, + { + "epoch": 0.14365234375, + "grad_norm": 0.41695231199264526, + "learning_rate": 0.00048123517778960373, + "loss": 4.7617, + "step": 1471 + }, + { + "epoch": 0.14375, + "grad_norm": 0.5007773637771606, + "learning_rate": 0.00048120715941131893, + "loss": 4.7305, + "step": 1472 + }, + { + "epoch": 0.14384765625, + "grad_norm": 0.6086747050285339, + "learning_rate": 0.00048117912104264957, + "loss": 4.7734, + "step": 1473 + }, + { + "epoch": 0.1439453125, + "grad_norm": 0.5901089310646057, + "learning_rate": 0.00048115106268631374, + "loss": 4.7344, + "step": 1474 + }, + { + "epoch": 0.14404296875, + "grad_norm": 0.5069621205329895, + "learning_rate": 0.00048112298434503154, + "loss": 4.7773, + "step": 1475 + }, + { + "epoch": 0.144140625, + "grad_norm": 0.4707411527633667, + "learning_rate": 0.00048109488602152503, + "loss": 4.7578, + "step": 1476 + }, + { + "epoch": 0.14423828125, + "grad_norm": 0.4005588889122009, + "learning_rate": 0.00048106676771851804, + "loss": 4.7422, + "step": 1477 + }, + { + "epoch": 0.1443359375, + "grad_norm": 0.3813304007053375, + "learning_rate": 0.00048103862943873647, + "loss": 4.7461, + "step": 1478 + }, + { + "epoch": 0.14443359375, + "grad_norm": 0.3576384484767914, + "learning_rate": 0.0004810104711849082, + "loss": 4.7695, + "step": 1479 + }, + { + "epoch": 0.14453125, + "grad_norm": 0.3286953270435333, + "learning_rate": 0.0004809822929597629, + "loss": 4.7461, + "step": 1480 + }, + { + "epoch": 0.14462890625, + "grad_norm": 0.32706767320632935, + "learning_rate": 0.00048095409476603233, + "loss": 4.75, + "step": 1481 + }, + { + "epoch": 0.1447265625, + "grad_norm": 0.33062857389450073, + "learning_rate": 0.0004809258766064501, + "loss": 4.7578, + "step": 1482 + }, + { + "epoch": 0.14482421875, + "grad_norm": 0.32870644330978394, + "learning_rate": 0.0004808976384837518, + "loss": 4.7461, + "step": 1483 + }, + { + "epoch": 0.144921875, + "grad_norm": 0.28449103236198425, + "learning_rate": 0.00048086938040067486, + "loss": 4.7539, + "step": 1484 + }, + { + "epoch": 0.14501953125, + "grad_norm": 0.2756710946559906, + "learning_rate": 0.00048084110235995864, + "loss": 4.7266, + "step": 1485 + }, + { + "epoch": 0.1451171875, + "grad_norm": 0.28041645884513855, + "learning_rate": 0.00048081280436434467, + "loss": 4.7188, + "step": 1486 + }, + { + "epoch": 0.14521484375, + "grad_norm": 0.3484303951263428, + "learning_rate": 0.0004807844864165761, + "loss": 4.7383, + "step": 1487 + }, + { + "epoch": 0.1453125, + "grad_norm": 0.4143260419368744, + "learning_rate": 0.0004807561485193983, + "loss": 4.7578, + "step": 1488 + }, + { + "epoch": 0.14541015625, + "grad_norm": 0.42858263850212097, + "learning_rate": 0.00048072779067555847, + "loss": 4.7188, + "step": 1489 + }, + { + "epoch": 0.1455078125, + "grad_norm": 0.482190877199173, + "learning_rate": 0.00048069941288780545, + "loss": 4.7578, + "step": 1490 + }, + { + "epoch": 0.14560546875, + "grad_norm": 0.4588344991207123, + "learning_rate": 0.0004806710151588905, + "loss": 4.7266, + "step": 1491 + }, + { + "epoch": 0.145703125, + "grad_norm": 0.4522359371185303, + "learning_rate": 0.0004806425974915665, + "loss": 4.7773, + "step": 1492 + }, + { + "epoch": 0.14580078125, + "grad_norm": 0.47912994027137756, + "learning_rate": 0.0004806141598885884, + "loss": 4.707, + "step": 1493 + }, + { + "epoch": 0.1458984375, + "grad_norm": 0.4162638187408447, + "learning_rate": 0.000480585702352713, + "loss": 4.7422, + "step": 1494 + }, + { + "epoch": 0.14599609375, + "grad_norm": 0.3735276460647583, + "learning_rate": 0.0004805572248866991, + "loss": 4.7422, + "step": 1495 + }, + { + "epoch": 0.14609375, + "grad_norm": 0.33471450209617615, + "learning_rate": 0.0004805287274933073, + "loss": 4.7344, + "step": 1496 + }, + { + "epoch": 0.14619140625, + "grad_norm": 0.3382073640823364, + "learning_rate": 0.00048050021017530037, + "loss": 4.7461, + "step": 1497 + }, + { + "epoch": 0.1462890625, + "grad_norm": 0.35437050461769104, + "learning_rate": 0.0004804716729354428, + "loss": 4.7109, + "step": 1498 + }, + { + "epoch": 0.14638671875, + "grad_norm": 0.3948093056678772, + "learning_rate": 0.000480443115776501, + "loss": 4.707, + "step": 1499 + }, + { + "epoch": 0.146484375, + "grad_norm": 0.41001394391059875, + "learning_rate": 0.0004804145387012435, + "loss": 4.7852, + "step": 1500 + }, + { + "epoch": 0.14658203125, + "grad_norm": 0.41534173488616943, + "learning_rate": 0.00048038594171244067, + "loss": 4.7383, + "step": 1501 + }, + { + "epoch": 0.1466796875, + "grad_norm": 0.3207942247390747, + "learning_rate": 0.0004803573248128647, + "loss": 4.75, + "step": 1502 + }, + { + "epoch": 0.14677734375, + "grad_norm": 0.32587486505508423, + "learning_rate": 0.0004803286880052899, + "loss": 4.7461, + "step": 1503 + }, + { + "epoch": 0.146875, + "grad_norm": 0.33651435375213623, + "learning_rate": 0.0004803000312924923, + "loss": 4.7617, + "step": 1504 + }, + { + "epoch": 0.14697265625, + "grad_norm": 0.3294571340084076, + "learning_rate": 0.00048027135467725013, + "loss": 4.7422, + "step": 1505 + }, + { + "epoch": 0.1470703125, + "grad_norm": 0.32122352719306946, + "learning_rate": 0.00048024265816234323, + "loss": 4.7344, + "step": 1506 + }, + { + "epoch": 0.14716796875, + "grad_norm": 0.33909428119659424, + "learning_rate": 0.00048021394175055363, + "loss": 4.7461, + "step": 1507 + }, + { + "epoch": 0.147265625, + "grad_norm": 0.47826051712036133, + "learning_rate": 0.00048018520544466513, + "loss": 4.7422, + "step": 1508 + }, + { + "epoch": 0.14736328125, + "grad_norm": 0.6739643812179565, + "learning_rate": 0.00048015644924746364, + "loss": 4.7031, + "step": 1509 + }, + { + "epoch": 0.1474609375, + "grad_norm": 0.7271323204040527, + "learning_rate": 0.00048012767316173675, + "loss": 4.7461, + "step": 1510 + }, + { + "epoch": 0.14755859375, + "grad_norm": 0.632926881313324, + "learning_rate": 0.0004800988771902742, + "loss": 4.7148, + "step": 1511 + }, + { + "epoch": 0.14765625, + "grad_norm": 0.4113217294216156, + "learning_rate": 0.00048007006133586746, + "loss": 4.7344, + "step": 1512 + }, + { + "epoch": 0.14775390625, + "grad_norm": 0.37485432624816895, + "learning_rate": 0.00048004122560131016, + "loss": 4.7188, + "step": 1513 + }, + { + "epoch": 0.1478515625, + "grad_norm": 0.484247624874115, + "learning_rate": 0.00048001236998939756, + "loss": 4.7305, + "step": 1514 + }, + { + "epoch": 0.14794921875, + "grad_norm": 0.43032926321029663, + "learning_rate": 0.0004799834945029272, + "loss": 4.7344, + "step": 1515 + }, + { + "epoch": 0.148046875, + "grad_norm": 0.5900740623474121, + "learning_rate": 0.00047995459914469826, + "loss": 4.7695, + "step": 1516 + }, + { + "epoch": 0.14814453125, + "grad_norm": 0.31932970881462097, + "learning_rate": 0.0004799256839175119, + "loss": 4.7188, + "step": 1517 + }, + { + "epoch": 0.1482421875, + "grad_norm": 0.3383445739746094, + "learning_rate": 0.0004798967488241714, + "loss": 4.7305, + "step": 1518 + }, + { + "epoch": 0.14833984375, + "grad_norm": 0.3197227120399475, + "learning_rate": 0.00047986779386748166, + "loss": 4.75, + "step": 1519 + }, + { + "epoch": 0.1484375, + "grad_norm": 0.3146499693393707, + "learning_rate": 0.0004798388190502497, + "loss": 4.7422, + "step": 1520 + }, + { + "epoch": 0.14853515625, + "grad_norm": 0.3118762969970703, + "learning_rate": 0.0004798098243752846, + "loss": 4.7266, + "step": 1521 + }, + { + "epoch": 0.1486328125, + "grad_norm": 0.3077457547187805, + "learning_rate": 0.0004797808098453971, + "loss": 4.7305, + "step": 1522 + }, + { + "epoch": 0.14873046875, + "grad_norm": 0.3942987024784088, + "learning_rate": 0.0004797517754633998, + "loss": 4.7148, + "step": 1523 + }, + { + "epoch": 0.148828125, + "grad_norm": 0.6512936353683472, + "learning_rate": 0.0004797227212321076, + "loss": 4.7305, + "step": 1524 + }, + { + "epoch": 0.14892578125, + "grad_norm": 1.190845251083374, + "learning_rate": 0.00047969364715433707, + "loss": 4.7461, + "step": 1525 + }, + { + "epoch": 0.1490234375, + "grad_norm": 0.8728731870651245, + "learning_rate": 0.0004796645532329066, + "loss": 4.7734, + "step": 1526 + }, + { + "epoch": 0.14912109375, + "grad_norm": 0.551252543926239, + "learning_rate": 0.00047963543947063687, + "loss": 4.75, + "step": 1527 + }, + { + "epoch": 0.14921875, + "grad_norm": 0.6479698419570923, + "learning_rate": 0.0004796063058703501, + "loss": 4.7617, + "step": 1528 + }, + { + "epoch": 0.14931640625, + "grad_norm": 0.5585426688194275, + "learning_rate": 0.0004795771524348707, + "loss": 4.7578, + "step": 1529 + }, + { + "epoch": 0.1494140625, + "grad_norm": 0.5894700884819031, + "learning_rate": 0.0004795479791670247, + "loss": 4.7578, + "step": 1530 + }, + { + "epoch": 0.14951171875, + "grad_norm": 0.6230053305625916, + "learning_rate": 0.0004795187860696405, + "loss": 4.7461, + "step": 1531 + }, + { + "epoch": 0.149609375, + "grad_norm": 0.4770023226737976, + "learning_rate": 0.00047948957314554805, + "loss": 4.7305, + "step": 1532 + }, + { + "epoch": 0.14970703125, + "grad_norm": 0.3334863781929016, + "learning_rate": 0.00047946034039757934, + "loss": 4.7656, + "step": 1533 + }, + { + "epoch": 0.1498046875, + "grad_norm": 0.3881434500217438, + "learning_rate": 0.0004794310878285683, + "loss": 4.7227, + "step": 1534 + }, + { + "epoch": 0.14990234375, + "grad_norm": 0.3674808740615845, + "learning_rate": 0.0004794018154413508, + "loss": 4.7422, + "step": 1535 + }, + { + "epoch": 0.15, + "grad_norm": 0.3127332627773285, + "learning_rate": 0.0004793725232387646, + "loss": 4.7344, + "step": 1536 + }, + { + "epoch": 0.15009765625, + "grad_norm": 0.3168219327926636, + "learning_rate": 0.00047934321122364927, + "loss": 4.7344, + "step": 1537 + }, + { + "epoch": 0.1501953125, + "grad_norm": 0.3331605792045593, + "learning_rate": 0.00047931387939884653, + "loss": 4.7305, + "step": 1538 + }, + { + "epoch": 0.15029296875, + "grad_norm": 0.3181023597717285, + "learning_rate": 0.0004792845277671999, + "loss": 4.7188, + "step": 1539 + }, + { + "epoch": 0.150390625, + "grad_norm": 0.3030186891555786, + "learning_rate": 0.0004792551563315547, + "loss": 4.75, + "step": 1540 + }, + { + "epoch": 0.15048828125, + "grad_norm": 0.30499133467674255, + "learning_rate": 0.00047922576509475844, + "loss": 4.7539, + "step": 1541 + }, + { + "epoch": 0.1505859375, + "grad_norm": 0.30657604336738586, + "learning_rate": 0.0004791963540596603, + "loss": 4.7305, + "step": 1542 + }, + { + "epoch": 0.15068359375, + "grad_norm": 0.34554725885391235, + "learning_rate": 0.0004791669232291116, + "loss": 4.7031, + "step": 1543 + }, + { + "epoch": 0.15078125, + "grad_norm": 0.3566804528236389, + "learning_rate": 0.00047913747260596535, + "loss": 4.7031, + "step": 1544 + }, + { + "epoch": 0.15087890625, + "grad_norm": 0.41564902663230896, + "learning_rate": 0.00047910800219307657, + "loss": 4.7109, + "step": 1545 + }, + { + "epoch": 0.1509765625, + "grad_norm": 0.48682817816734314, + "learning_rate": 0.00047907851199330227, + "loss": 4.7188, + "step": 1546 + }, + { + "epoch": 0.15107421875, + "grad_norm": 0.5196331739425659, + "learning_rate": 0.00047904900200950134, + "loss": 4.7578, + "step": 1547 + }, + { + "epoch": 0.151171875, + "grad_norm": 0.47747114300727844, + "learning_rate": 0.0004790194722445345, + "loss": 4.7188, + "step": 1548 + }, + { + "epoch": 0.15126953125, + "grad_norm": 0.36361950635910034, + "learning_rate": 0.0004789899227012646, + "loss": 4.75, + "step": 1549 + }, + { + "epoch": 0.1513671875, + "grad_norm": 0.3436301052570343, + "learning_rate": 0.0004789603533825562, + "loss": 4.7617, + "step": 1550 + }, + { + "epoch": 0.15146484375, + "grad_norm": 0.3718210458755493, + "learning_rate": 0.00047893076429127575, + "loss": 4.7695, + "step": 1551 + }, + { + "epoch": 0.1515625, + "grad_norm": 0.4168568253517151, + "learning_rate": 0.00047890115543029174, + "loss": 4.7461, + "step": 1552 + }, + { + "epoch": 0.15166015625, + "grad_norm": 0.4080406129360199, + "learning_rate": 0.00047887152680247465, + "loss": 4.7188, + "step": 1553 + }, + { + "epoch": 0.1517578125, + "grad_norm": 0.4210541546344757, + "learning_rate": 0.0004788418784106967, + "loss": 4.7383, + "step": 1554 + }, + { + "epoch": 0.15185546875, + "grad_norm": 0.38915780186653137, + "learning_rate": 0.0004788122102578321, + "loss": 4.7266, + "step": 1555 + }, + { + "epoch": 0.151953125, + "grad_norm": 0.40426111221313477, + "learning_rate": 0.0004787825223467571, + "loss": 4.7031, + "step": 1556 + }, + { + "epoch": 0.15205078125, + "grad_norm": 0.3467988669872284, + "learning_rate": 0.0004787528146803495, + "loss": 4.7344, + "step": 1557 + }, + { + "epoch": 0.1521484375, + "grad_norm": 0.30397987365722656, + "learning_rate": 0.00047872308726148945, + "loss": 4.7422, + "step": 1558 + }, + { + "epoch": 0.15224609375, + "grad_norm": 0.2704983055591583, + "learning_rate": 0.00047869334009305873, + "loss": 4.7227, + "step": 1559 + }, + { + "epoch": 0.15234375, + "grad_norm": 0.28865277767181396, + "learning_rate": 0.00047866357317794125, + "loss": 4.7461, + "step": 1560 + }, + { + "epoch": 0.15244140625, + "grad_norm": 0.3506476581096649, + "learning_rate": 0.0004786337865190225, + "loss": 4.7539, + "step": 1561 + }, + { + "epoch": 0.1525390625, + "grad_norm": 0.38137906789779663, + "learning_rate": 0.00047860398011919026, + "loss": 4.7578, + "step": 1562 + }, + { + "epoch": 0.15263671875, + "grad_norm": 0.35326069593429565, + "learning_rate": 0.00047857415398133403, + "loss": 4.7305, + "step": 1563 + }, + { + "epoch": 0.152734375, + "grad_norm": 0.33928734064102173, + "learning_rate": 0.0004785443081083452, + "loss": 4.7266, + "step": 1564 + }, + { + "epoch": 0.15283203125, + "grad_norm": 0.5283599495887756, + "learning_rate": 0.0004785144425031172, + "loss": 4.7188, + "step": 1565 + }, + { + "epoch": 0.1529296875, + "grad_norm": 0.5691794157028198, + "learning_rate": 0.00047848455716854524, + "loss": 4.7266, + "step": 1566 + }, + { + "epoch": 0.15302734375, + "grad_norm": 0.4781295657157898, + "learning_rate": 0.0004784546521075265, + "loss": 4.7578, + "step": 1567 + }, + { + "epoch": 0.153125, + "grad_norm": 0.46981513500213623, + "learning_rate": 0.00047842472732296013, + "loss": 4.7344, + "step": 1568 + }, + { + "epoch": 0.15322265625, + "grad_norm": 0.4642745852470398, + "learning_rate": 0.0004783947828177471, + "loss": 4.7578, + "step": 1569 + }, + { + "epoch": 0.1533203125, + "grad_norm": 0.4203815758228302, + "learning_rate": 0.00047836481859479026, + "loss": 4.7305, + "step": 1570 + }, + { + "epoch": 0.15341796875, + "grad_norm": 0.472530335187912, + "learning_rate": 0.0004783348346569945, + "loss": 4.7578, + "step": 1571 + }, + { + "epoch": 0.153515625, + "grad_norm": 0.5050036311149597, + "learning_rate": 0.00047830483100726656, + "loss": 4.7227, + "step": 1572 + }, + { + "epoch": 0.15361328125, + "grad_norm": 0.4915096163749695, + "learning_rate": 0.0004782748076485151, + "loss": 4.7578, + "step": 1573 + }, + { + "epoch": 0.1537109375, + "grad_norm": 0.38450387120246887, + "learning_rate": 0.00047824476458365074, + "loss": 4.7344, + "step": 1574 + }, + { + "epoch": 0.15380859375, + "grad_norm": 0.3322133719921112, + "learning_rate": 0.0004782147018155858, + "loss": 4.75, + "step": 1575 + }, + { + "epoch": 0.15390625, + "grad_norm": 0.3817479908466339, + "learning_rate": 0.0004781846193472348, + "loss": 4.7461, + "step": 1576 + }, + { + "epoch": 0.15400390625, + "grad_norm": 0.411730021238327, + "learning_rate": 0.00047815451718151387, + "loss": 4.7148, + "step": 1577 + }, + { + "epoch": 0.1541015625, + "grad_norm": 0.43964841961860657, + "learning_rate": 0.00047812439532134144, + "loss": 4.7305, + "step": 1578 + }, + { + "epoch": 0.15419921875, + "grad_norm": 0.39244744181632996, + "learning_rate": 0.0004780942537696374, + "loss": 4.7344, + "step": 1579 + }, + { + "epoch": 0.154296875, + "grad_norm": 0.3104197084903717, + "learning_rate": 0.0004780640925293239, + "loss": 4.7344, + "step": 1580 + }, + { + "epoch": 0.15439453125, + "grad_norm": 0.34375083446502686, + "learning_rate": 0.00047803391160332483, + "loss": 4.7344, + "step": 1581 + }, + { + "epoch": 0.1544921875, + "grad_norm": 0.3174738585948944, + "learning_rate": 0.000478003710994566, + "loss": 4.6992, + "step": 1582 + }, + { + "epoch": 0.15458984375, + "grad_norm": 0.3029673993587494, + "learning_rate": 0.0004779734907059752, + "loss": 4.7227, + "step": 1583 + }, + { + "epoch": 0.1546875, + "grad_norm": 0.30898958444595337, + "learning_rate": 0.0004779432507404821, + "loss": 4.7148, + "step": 1584 + }, + { + "epoch": 0.15478515625, + "grad_norm": 0.31820470094680786, + "learning_rate": 0.00047791299110101814, + "loss": 4.707, + "step": 1585 + }, + { + "epoch": 0.1548828125, + "grad_norm": 0.32890087366104126, + "learning_rate": 0.0004778827117905169, + "loss": 4.7188, + "step": 1586 + }, + { + "epoch": 0.15498046875, + "grad_norm": 0.2951774597167969, + "learning_rate": 0.0004778524128119138, + "loss": 4.7344, + "step": 1587 + }, + { + "epoch": 0.155078125, + "grad_norm": 0.30525726079940796, + "learning_rate": 0.00047782209416814586, + "loss": 4.707, + "step": 1588 + }, + { + "epoch": 0.15517578125, + "grad_norm": 0.29748696088790894, + "learning_rate": 0.0004777917558621526, + "loss": 4.7188, + "step": 1589 + }, + { + "epoch": 0.1552734375, + "grad_norm": 0.35524943470954895, + "learning_rate": 0.0004777613978968749, + "loss": 4.7461, + "step": 1590 + }, + { + "epoch": 0.15537109375, + "grad_norm": 0.49947482347488403, + "learning_rate": 0.0004777310202752558, + "loss": 4.6875, + "step": 1591 + }, + { + "epoch": 0.15546875, + "grad_norm": 0.6647739410400391, + "learning_rate": 0.00047770062300024033, + "loss": 4.7383, + "step": 1592 + }, + { + "epoch": 0.15556640625, + "grad_norm": 0.6732831597328186, + "learning_rate": 0.00047767020607477505, + "loss": 4.75, + "step": 1593 + }, + { + "epoch": 0.1556640625, + "grad_norm": 0.4741779863834381, + "learning_rate": 0.0004776397695018089, + "loss": 4.7188, + "step": 1594 + }, + { + "epoch": 0.15576171875, + "grad_norm": 0.4117373824119568, + "learning_rate": 0.00047760931328429243, + "loss": 4.7383, + "step": 1595 + }, + { + "epoch": 0.155859375, + "grad_norm": 0.4464336037635803, + "learning_rate": 0.0004775788374251782, + "loss": 4.7461, + "step": 1596 + }, + { + "epoch": 0.15595703125, + "grad_norm": 0.3815647065639496, + "learning_rate": 0.00047754834192742046, + "loss": 4.75, + "step": 1597 + }, + { + "epoch": 0.1560546875, + "grad_norm": 0.3951696455478668, + "learning_rate": 0.00047751782679397573, + "loss": 4.7383, + "step": 1598 + }, + { + "epoch": 0.15615234375, + "grad_norm": 0.49301767349243164, + "learning_rate": 0.0004774872920278022, + "loss": 4.7148, + "step": 1599 + }, + { + "epoch": 0.15625, + "grad_norm": 0.5991638898849487, + "learning_rate": 0.0004774567376318599, + "loss": 4.7266, + "step": 1600 + }, + { + "epoch": 0.15634765625, + "grad_norm": 0.5442166328430176, + "learning_rate": 0.00047742616360911105, + "loss": 4.7344, + "step": 1601 + }, + { + "epoch": 0.1564453125, + "grad_norm": 0.474050909280777, + "learning_rate": 0.0004773955699625196, + "loss": 4.6992, + "step": 1602 + }, + { + "epoch": 0.15654296875, + "grad_norm": 0.38881492614746094, + "learning_rate": 0.0004773649566950512, + "loss": 4.7344, + "step": 1603 + }, + { + "epoch": 0.156640625, + "grad_norm": 0.3230612576007843, + "learning_rate": 0.0004773343238096737, + "loss": 4.7383, + "step": 1604 + }, + { + "epoch": 0.15673828125, + "grad_norm": 0.3541710078716278, + "learning_rate": 0.0004773036713093567, + "loss": 4.7305, + "step": 1605 + }, + { + "epoch": 0.1568359375, + "grad_norm": 0.3545769453048706, + "learning_rate": 0.0004772729991970719, + "loss": 4.7148, + "step": 1606 + }, + { + "epoch": 0.15693359375, + "grad_norm": 0.376462459564209, + "learning_rate": 0.0004772423074757926, + "loss": 4.7383, + "step": 1607 + }, + { + "epoch": 0.15703125, + "grad_norm": 0.40618640184402466, + "learning_rate": 0.00047721159614849424, + "loss": 4.7031, + "step": 1608 + }, + { + "epoch": 0.15712890625, + "grad_norm": 0.5097858905792236, + "learning_rate": 0.000477180865218154, + "loss": 4.7695, + "step": 1609 + }, + { + "epoch": 0.1572265625, + "grad_norm": 0.6011024117469788, + "learning_rate": 0.00047715011468775124, + "loss": 4.7188, + "step": 1610 + }, + { + "epoch": 0.15732421875, + "grad_norm": 0.5320466756820679, + "learning_rate": 0.00047711934456026674, + "loss": 4.7305, + "step": 1611 + }, + { + "epoch": 0.157421875, + "grad_norm": 0.3568183481693268, + "learning_rate": 0.0004770885548386835, + "loss": 4.7422, + "step": 1612 + }, + { + "epoch": 0.15751953125, + "grad_norm": 0.35290566086769104, + "learning_rate": 0.0004770577455259865, + "loss": 4.7227, + "step": 1613 + }, + { + "epoch": 0.1576171875, + "grad_norm": 0.3121565878391266, + "learning_rate": 0.0004770269166251625, + "loss": 4.7461, + "step": 1614 + }, + { + "epoch": 0.15771484375, + "grad_norm": 0.3387918174266815, + "learning_rate": 0.00047699606813920005, + "loss": 4.7031, + "step": 1615 + }, + { + "epoch": 0.1578125, + "grad_norm": 0.3331822156906128, + "learning_rate": 0.0004769652000710898, + "loss": 4.7578, + "step": 1616 + }, + { + "epoch": 0.15791015625, + "grad_norm": 0.38116782903671265, + "learning_rate": 0.00047693431242382405, + "loss": 4.7539, + "step": 1617 + }, + { + "epoch": 0.1580078125, + "grad_norm": 0.38010165095329285, + "learning_rate": 0.0004769034052003973, + "loss": 4.7305, + "step": 1618 + }, + { + "epoch": 0.15810546875, + "grad_norm": 0.3130066692829132, + "learning_rate": 0.00047687247840380576, + "loss": 4.7422, + "step": 1619 + }, + { + "epoch": 0.158203125, + "grad_norm": 0.38829630613327026, + "learning_rate": 0.00047684153203704754, + "loss": 4.7734, + "step": 1620 + }, + { + "epoch": 0.15830078125, + "grad_norm": 0.5447909832000732, + "learning_rate": 0.00047681056610312264, + "loss": 4.7383, + "step": 1621 + }, + { + "epoch": 0.1583984375, + "grad_norm": 0.6322894096374512, + "learning_rate": 0.0004767795806050331, + "loss": 4.7305, + "step": 1622 + }, + { + "epoch": 0.15849609375, + "grad_norm": 0.6145194172859192, + "learning_rate": 0.00047674857554578273, + "loss": 4.7148, + "step": 1623 + }, + { + "epoch": 0.15859375, + "grad_norm": 0.3967234492301941, + "learning_rate": 0.0004767175509283772, + "loss": 4.7578, + "step": 1624 + }, + { + "epoch": 0.15869140625, + "grad_norm": 0.40282419323921204, + "learning_rate": 0.00047668650675582413, + "loss": 4.7227, + "step": 1625 + }, + { + "epoch": 0.1587890625, + "grad_norm": 0.37083151936531067, + "learning_rate": 0.00047665544303113314, + "loss": 4.7539, + "step": 1626 + }, + { + "epoch": 0.15888671875, + "grad_norm": 0.376194566488266, + "learning_rate": 0.0004766243597573155, + "loss": 4.7305, + "step": 1627 + }, + { + "epoch": 0.158984375, + "grad_norm": 0.3005055785179138, + "learning_rate": 0.0004765932569373847, + "loss": 4.7305, + "step": 1628 + }, + { + "epoch": 0.15908203125, + "grad_norm": 0.30844777822494507, + "learning_rate": 0.0004765621345743558, + "loss": 4.7188, + "step": 1629 + }, + { + "epoch": 0.1591796875, + "grad_norm": 0.30865785479545593, + "learning_rate": 0.000476530992671246, + "loss": 4.7383, + "step": 1630 + }, + { + "epoch": 0.15927734375, + "grad_norm": 0.34530970454216003, + "learning_rate": 0.0004764998312310742, + "loss": 4.7422, + "step": 1631 + }, + { + "epoch": 0.159375, + "grad_norm": 0.3432547152042389, + "learning_rate": 0.00047646865025686134, + "loss": 4.7383, + "step": 1632 + }, + { + "epoch": 0.15947265625, + "grad_norm": 0.32131803035736084, + "learning_rate": 0.0004764374497516303, + "loss": 4.7109, + "step": 1633 + }, + { + "epoch": 0.1595703125, + "grad_norm": 0.31007057428359985, + "learning_rate": 0.0004764062297184056, + "loss": 4.7383, + "step": 1634 + }, + { + "epoch": 0.15966796875, + "grad_norm": 0.33511513471603394, + "learning_rate": 0.0004763749901602139, + "loss": 4.7266, + "step": 1635 + }, + { + "epoch": 0.159765625, + "grad_norm": 0.41421255469322205, + "learning_rate": 0.0004763437310800837, + "loss": 4.7188, + "step": 1636 + }, + { + "epoch": 0.15986328125, + "grad_norm": 0.3609168231487274, + "learning_rate": 0.0004763124524810452, + "loss": 4.75, + "step": 1637 + }, + { + "epoch": 0.1599609375, + "grad_norm": 0.32799166440963745, + "learning_rate": 0.0004762811543661308, + "loss": 4.6914, + "step": 1638 + }, + { + "epoch": 0.16005859375, + "grad_norm": 0.33859479427337646, + "learning_rate": 0.0004762498367383746, + "loss": 4.7188, + "step": 1639 + }, + { + "epoch": 0.16015625, + "grad_norm": 0.32123857736587524, + "learning_rate": 0.0004762184996008125, + "loss": 4.7266, + "step": 1640 + }, + { + "epoch": 0.16025390625, + "grad_norm": 0.33974528312683105, + "learning_rate": 0.00047618714295648263, + "loss": 4.7617, + "step": 1641 + }, + { + "epoch": 0.1603515625, + "grad_norm": 0.34150657057762146, + "learning_rate": 0.0004761557668084247, + "loss": 4.7148, + "step": 1642 + }, + { + "epoch": 0.16044921875, + "grad_norm": 0.30895042419433594, + "learning_rate": 0.00047612437115968043, + "loss": 4.7109, + "step": 1643 + }, + { + "epoch": 0.160546875, + "grad_norm": 0.30955177545547485, + "learning_rate": 0.0004760929560132934, + "loss": 4.7031, + "step": 1644 + }, + { + "epoch": 0.16064453125, + "grad_norm": 0.2998163104057312, + "learning_rate": 0.00047606152137230905, + "loss": 4.7227, + "step": 1645 + }, + { + "epoch": 0.1607421875, + "grad_norm": 0.3299749195575714, + "learning_rate": 0.0004760300672397749, + "loss": 4.7188, + "step": 1646 + }, + { + "epoch": 0.16083984375, + "grad_norm": 0.3188712000846863, + "learning_rate": 0.0004759985936187401, + "loss": 4.7188, + "step": 1647 + }, + { + "epoch": 0.1609375, + "grad_norm": 0.2848448157310486, + "learning_rate": 0.00047596710051225585, + "loss": 4.7422, + "step": 1648 + }, + { + "epoch": 0.16103515625, + "grad_norm": 0.32674816250801086, + "learning_rate": 0.00047593558792337516, + "loss": 4.7344, + "step": 1649 + }, + { + "epoch": 0.1611328125, + "grad_norm": 0.30364173650741577, + "learning_rate": 0.00047590405585515294, + "loss": 4.7266, + "step": 1650 + }, + { + "epoch": 0.16123046875, + "grad_norm": 0.31012097001075745, + "learning_rate": 0.0004758725043106461, + "loss": 4.75, + "step": 1651 + }, + { + "epoch": 0.161328125, + "grad_norm": 0.3971497416496277, + "learning_rate": 0.0004758409332929133, + "loss": 4.7148, + "step": 1652 + }, + { + "epoch": 0.16142578125, + "grad_norm": 0.47804024815559387, + "learning_rate": 0.0004758093428050151, + "loss": 4.7344, + "step": 1653 + }, + { + "epoch": 0.1615234375, + "grad_norm": 0.6461985111236572, + "learning_rate": 0.00047577773285001395, + "loss": 4.7109, + "step": 1654 + }, + { + "epoch": 0.16162109375, + "grad_norm": 0.692113995552063, + "learning_rate": 0.0004757461034309744, + "loss": 4.7188, + "step": 1655 + }, + { + "epoch": 0.16171875, + "grad_norm": 0.5253254771232605, + "learning_rate": 0.0004757144545509625, + "loss": 4.7188, + "step": 1656 + }, + { + "epoch": 0.16181640625, + "grad_norm": 0.3248235285282135, + "learning_rate": 0.0004756827862130465, + "loss": 4.6953, + "step": 1657 + }, + { + "epoch": 0.1619140625, + "grad_norm": 0.40135297179222107, + "learning_rate": 0.0004756510984202964, + "loss": 4.7148, + "step": 1658 + }, + { + "epoch": 0.16201171875, + "grad_norm": 0.4172717332839966, + "learning_rate": 0.0004756193911757842, + "loss": 4.7305, + "step": 1659 + }, + { + "epoch": 0.162109375, + "grad_norm": 0.3560163974761963, + "learning_rate": 0.00047558766448258357, + "loss": 4.7695, + "step": 1660 + }, + { + "epoch": 0.16220703125, + "grad_norm": 0.28156837821006775, + "learning_rate": 0.0004755559183437703, + "loss": 4.75, + "step": 1661 + }, + { + "epoch": 0.1623046875, + "grad_norm": 0.3130258321762085, + "learning_rate": 0.00047552415276242186, + "loss": 4.7109, + "step": 1662 + }, + { + "epoch": 0.16240234375, + "grad_norm": 0.3561786115169525, + "learning_rate": 0.0004754923677416178, + "loss": 4.7422, + "step": 1663 + }, + { + "epoch": 0.1625, + "grad_norm": 0.3439064621925354, + "learning_rate": 0.00047546056328443947, + "loss": 4.7383, + "step": 1664 + }, + { + "epoch": 0.16259765625, + "grad_norm": 0.3227904438972473, + "learning_rate": 0.00047542873939397006, + "loss": 4.707, + "step": 1665 + }, + { + "epoch": 0.1626953125, + "grad_norm": 0.29672226309776306, + "learning_rate": 0.00047539689607329467, + "loss": 4.707, + "step": 1666 + }, + { + "epoch": 0.16279296875, + "grad_norm": 0.3555067479610443, + "learning_rate": 0.00047536503332550024, + "loss": 4.6875, + "step": 1667 + }, + { + "epoch": 0.162890625, + "grad_norm": 0.3557547330856323, + "learning_rate": 0.00047533315115367577, + "loss": 4.7305, + "step": 1668 + }, + { + "epoch": 0.16298828125, + "grad_norm": 0.28665271401405334, + "learning_rate": 0.000475301249560912, + "loss": 4.7266, + "step": 1669 + }, + { + "epoch": 0.1630859375, + "grad_norm": 0.2968331575393677, + "learning_rate": 0.0004752693285503014, + "loss": 4.7305, + "step": 1670 + }, + { + "epoch": 0.16318359375, + "grad_norm": 0.33975455164909363, + "learning_rate": 0.0004752373881249387, + "loss": 4.7344, + "step": 1671 + }, + { + "epoch": 0.16328125, + "grad_norm": 0.36191651225090027, + "learning_rate": 0.0004752054282879203, + "loss": 4.7148, + "step": 1672 + }, + { + "epoch": 0.16337890625, + "grad_norm": 0.3275497853755951, + "learning_rate": 0.0004751734490423444, + "loss": 4.7227, + "step": 1673 + }, + { + "epoch": 0.1634765625, + "grad_norm": 0.3115062713623047, + "learning_rate": 0.00047514145039131116, + "loss": 4.6953, + "step": 1674 + }, + { + "epoch": 0.16357421875, + "grad_norm": 0.33993664383888245, + "learning_rate": 0.0004751094323379227, + "loss": 4.7188, + "step": 1675 + }, + { + "epoch": 0.163671875, + "grad_norm": 0.33467310667037964, + "learning_rate": 0.0004750773948852829, + "loss": 4.7148, + "step": 1676 + }, + { + "epoch": 0.16376953125, + "grad_norm": 0.3193880021572113, + "learning_rate": 0.00047504533803649756, + "loss": 4.7227, + "step": 1677 + }, + { + "epoch": 0.1638671875, + "grad_norm": 0.30603379011154175, + "learning_rate": 0.0004750132617946745, + "loss": 4.7578, + "step": 1678 + }, + { + "epoch": 0.16396484375, + "grad_norm": 0.2963157594203949, + "learning_rate": 0.0004749811661629231, + "loss": 4.6953, + "step": 1679 + }, + { + "epoch": 0.1640625, + "grad_norm": 0.3166114091873169, + "learning_rate": 0.000474949051144355, + "loss": 4.6914, + "step": 1680 + }, + { + "epoch": 0.16416015625, + "grad_norm": 0.29735228419303894, + "learning_rate": 0.00047491691674208343, + "loss": 4.75, + "step": 1681 + }, + { + "epoch": 0.1642578125, + "grad_norm": 0.3676241636276245, + "learning_rate": 0.0004748847629592236, + "loss": 4.6992, + "step": 1682 + }, + { + "epoch": 0.16435546875, + "grad_norm": 0.4388222396373749, + "learning_rate": 0.00047485258979889267, + "loss": 4.7422, + "step": 1683 + }, + { + "epoch": 0.164453125, + "grad_norm": 0.5815744400024414, + "learning_rate": 0.0004748203972642096, + "loss": 4.6875, + "step": 1684 + }, + { + "epoch": 0.16455078125, + "grad_norm": 0.6494610905647278, + "learning_rate": 0.00047478818535829503, + "loss": 4.7383, + "step": 1685 + }, + { + "epoch": 0.1646484375, + "grad_norm": 0.5286709070205688, + "learning_rate": 0.00047475595408427197, + "loss": 4.707, + "step": 1686 + }, + { + "epoch": 0.16474609375, + "grad_norm": 0.3297274708747864, + "learning_rate": 0.00047472370344526496, + "loss": 4.7188, + "step": 1687 + }, + { + "epoch": 0.16484375, + "grad_norm": 0.3907630741596222, + "learning_rate": 0.0004746914334444003, + "loss": 4.7305, + "step": 1688 + }, + { + "epoch": 0.16494140625, + "grad_norm": 0.4345732033252716, + "learning_rate": 0.00047465914408480653, + "loss": 4.7188, + "step": 1689 + }, + { + "epoch": 0.1650390625, + "grad_norm": 0.3433893024921417, + "learning_rate": 0.00047462683536961383, + "loss": 4.7461, + "step": 1690 + }, + { + "epoch": 0.16513671875, + "grad_norm": 0.2961682379245758, + "learning_rate": 0.0004745945073019543, + "loss": 4.7305, + "step": 1691 + }, + { + "epoch": 0.165234375, + "grad_norm": 0.3548338711261749, + "learning_rate": 0.000474562159884962, + "loss": 4.7188, + "step": 1692 + }, + { + "epoch": 0.16533203125, + "grad_norm": 0.3313615322113037, + "learning_rate": 0.0004745297931217727, + "loss": 4.6914, + "step": 1693 + }, + { + "epoch": 0.1654296875, + "grad_norm": 0.29579681158065796, + "learning_rate": 0.0004744974070155242, + "loss": 4.7305, + "step": 1694 + }, + { + "epoch": 0.16552734375, + "grad_norm": 0.3224891722202301, + "learning_rate": 0.000474465001569356, + "loss": 4.7422, + "step": 1695 + }, + { + "epoch": 0.165625, + "grad_norm": 0.37823259830474854, + "learning_rate": 0.0004744325767864097, + "loss": 4.6875, + "step": 1696 + }, + { + "epoch": 0.16572265625, + "grad_norm": 0.4113744795322418, + "learning_rate": 0.00047440013266982867, + "loss": 4.7109, + "step": 1697 + }, + { + "epoch": 0.1658203125, + "grad_norm": 0.39243578910827637, + "learning_rate": 0.00047436766922275805, + "loss": 4.7461, + "step": 1698 + }, + { + "epoch": 0.16591796875, + "grad_norm": 0.34951266646385193, + "learning_rate": 0.0004743351864483451, + "loss": 4.7227, + "step": 1699 + }, + { + "epoch": 0.166015625, + "grad_norm": 0.29626819491386414, + "learning_rate": 0.00047430268434973873, + "loss": 4.6953, + "step": 1700 + }, + { + "epoch": 0.16611328125, + "grad_norm": 0.29469388723373413, + "learning_rate": 0.0004742701629300896, + "loss": 4.7305, + "step": 1701 + }, + { + "epoch": 0.1662109375, + "grad_norm": 0.2781272232532501, + "learning_rate": 0.0004742376221925508, + "loss": 4.75, + "step": 1702 + }, + { + "epoch": 0.16630859375, + "grad_norm": 0.32316306233406067, + "learning_rate": 0.0004742050621402767, + "loss": 4.6992, + "step": 1703 + }, + { + "epoch": 0.16640625, + "grad_norm": 0.36234429478645325, + "learning_rate": 0.00047417248277642385, + "loss": 4.7031, + "step": 1704 + }, + { + "epoch": 0.16650390625, + "grad_norm": 0.4072574973106384, + "learning_rate": 0.0004741398841041506, + "loss": 4.7266, + "step": 1705 + }, + { + "epoch": 0.1666015625, + "grad_norm": 0.34564459323883057, + "learning_rate": 0.0004741072661266171, + "loss": 4.6953, + "step": 1706 + }, + { + "epoch": 0.16669921875, + "grad_norm": 0.30190330743789673, + "learning_rate": 0.0004740746288469855, + "loss": 4.7188, + "step": 1707 + }, + { + "epoch": 0.166796875, + "grad_norm": 0.33523130416870117, + "learning_rate": 0.00047404197226841977, + "loss": 4.7344, + "step": 1708 + }, + { + "epoch": 0.16689453125, + "grad_norm": 0.3154967725276947, + "learning_rate": 0.0004740092963940858, + "loss": 4.7188, + "step": 1709 + }, + { + "epoch": 0.1669921875, + "grad_norm": 0.29154172539711, + "learning_rate": 0.00047397660122715116, + "loss": 4.6914, + "step": 1710 + }, + { + "epoch": 0.16708984375, + "grad_norm": 0.29590922594070435, + "learning_rate": 0.0004739438867707855, + "loss": 4.7266, + "step": 1711 + }, + { + "epoch": 0.1671875, + "grad_norm": 0.31150999665260315, + "learning_rate": 0.00047391115302816017, + "loss": 4.7461, + "step": 1712 + }, + { + "epoch": 0.16728515625, + "grad_norm": 0.3638792037963867, + "learning_rate": 0.00047387840000244865, + "loss": 4.7227, + "step": 1713 + }, + { + "epoch": 0.1673828125, + "grad_norm": 0.41439005732536316, + "learning_rate": 0.000473845627696826, + "loss": 4.7109, + "step": 1714 + }, + { + "epoch": 0.16748046875, + "grad_norm": 0.4629388451576233, + "learning_rate": 0.0004738128361144693, + "loss": 4.6797, + "step": 1715 + }, + { + "epoch": 0.167578125, + "grad_norm": 0.48057830333709717, + "learning_rate": 0.00047378002525855765, + "loss": 4.6953, + "step": 1716 + }, + { + "epoch": 0.16767578125, + "grad_norm": 0.4065990746021271, + "learning_rate": 0.00047374719513227156, + "loss": 4.7031, + "step": 1717 + }, + { + "epoch": 0.1677734375, + "grad_norm": 0.2860855758190155, + "learning_rate": 0.00047371434573879374, + "loss": 4.7031, + "step": 1718 + }, + { + "epoch": 0.16787109375, + "grad_norm": 0.28443628549575806, + "learning_rate": 0.0004736814770813089, + "loss": 4.707, + "step": 1719 + }, + { + "epoch": 0.16796875, + "grad_norm": 0.3028384745121002, + "learning_rate": 0.00047364858916300323, + "loss": 4.7188, + "step": 1720 + }, + { + "epoch": 0.16806640625, + "grad_norm": 0.33343642950057983, + "learning_rate": 0.00047361568198706506, + "loss": 4.7109, + "step": 1721 + }, + { + "epoch": 0.1681640625, + "grad_norm": 0.30160287022590637, + "learning_rate": 0.0004735827555566846, + "loss": 4.7422, + "step": 1722 + }, + { + "epoch": 0.16826171875, + "grad_norm": 0.31500527262687683, + "learning_rate": 0.00047354980987505377, + "loss": 4.7344, + "step": 1723 + }, + { + "epoch": 0.168359375, + "grad_norm": 0.29347512125968933, + "learning_rate": 0.00047351684494536633, + "loss": 4.6953, + "step": 1724 + }, + { + "epoch": 0.16845703125, + "grad_norm": 0.29177677631378174, + "learning_rate": 0.00047348386077081825, + "loss": 4.7148, + "step": 1725 + }, + { + "epoch": 0.1685546875, + "grad_norm": 0.27817559242248535, + "learning_rate": 0.0004734508573546069, + "loss": 4.7188, + "step": 1726 + }, + { + "epoch": 0.16865234375, + "grad_norm": 0.2869120240211487, + "learning_rate": 0.0004734178346999318, + "loss": 4.7148, + "step": 1727 + }, + { + "epoch": 0.16875, + "grad_norm": 0.27404704689979553, + "learning_rate": 0.00047338479280999434, + "loss": 4.6992, + "step": 1728 + }, + { + "epoch": 0.16884765625, + "grad_norm": 0.30236533284187317, + "learning_rate": 0.0004733517316879977, + "loss": 4.707, + "step": 1729 + }, + { + "epoch": 0.1689453125, + "grad_norm": 0.3438217043876648, + "learning_rate": 0.0004733186513371468, + "loss": 4.7109, + "step": 1730 + }, + { + "epoch": 0.16904296875, + "grad_norm": 0.42064616084098816, + "learning_rate": 0.00047328555176064866, + "loss": 4.7031, + "step": 1731 + }, + { + "epoch": 0.169140625, + "grad_norm": 0.5444338321685791, + "learning_rate": 0.00047325243296171207, + "loss": 4.7227, + "step": 1732 + }, + { + "epoch": 0.16923828125, + "grad_norm": 0.6516023874282837, + "learning_rate": 0.00047321929494354765, + "loss": 4.6953, + "step": 1733 + }, + { + "epoch": 0.1693359375, + "grad_norm": 0.5612179040908813, + "learning_rate": 0.0004731861377093679, + "loss": 4.7266, + "step": 1734 + }, + { + "epoch": 0.16943359375, + "grad_norm": 0.36678099632263184, + "learning_rate": 0.00047315296126238714, + "loss": 4.7188, + "step": 1735 + }, + { + "epoch": 0.16953125, + "grad_norm": 0.35155007243156433, + "learning_rate": 0.0004731197656058218, + "loss": 4.6953, + "step": 1736 + }, + { + "epoch": 0.16962890625, + "grad_norm": 0.4132586419582367, + "learning_rate": 0.0004730865507428897, + "loss": 4.7148, + "step": 1737 + }, + { + "epoch": 0.1697265625, + "grad_norm": 0.3739107847213745, + "learning_rate": 0.00047305331667681094, + "loss": 4.707, + "step": 1738 + }, + { + "epoch": 0.16982421875, + "grad_norm": 0.27712637186050415, + "learning_rate": 0.0004730200634108073, + "loss": 4.707, + "step": 1739 + }, + { + "epoch": 0.169921875, + "grad_norm": 0.3548940122127533, + "learning_rate": 0.00047298679094810253, + "loss": 4.7695, + "step": 1740 + }, + { + "epoch": 0.17001953125, + "grad_norm": 0.39106234908103943, + "learning_rate": 0.0004729534992919221, + "loss": 4.7383, + "step": 1741 + }, + { + "epoch": 0.1701171875, + "grad_norm": 0.31391414999961853, + "learning_rate": 0.00047292018844549347, + "loss": 4.7344, + "step": 1742 + }, + { + "epoch": 0.17021484375, + "grad_norm": 0.31894975900650024, + "learning_rate": 0.00047288685841204576, + "loss": 4.7109, + "step": 1743 + }, + { + "epoch": 0.1703125, + "grad_norm": 0.3426681458950043, + "learning_rate": 0.0004728535091948103, + "loss": 4.7266, + "step": 1744 + }, + { + "epoch": 0.17041015625, + "grad_norm": 0.3944070339202881, + "learning_rate": 0.0004728201407970199, + "loss": 4.7383, + "step": 1745 + }, + { + "epoch": 0.1705078125, + "grad_norm": 0.33504220843315125, + "learning_rate": 0.00047278675322190943, + "loss": 4.7031, + "step": 1746 + }, + { + "epoch": 0.17060546875, + "grad_norm": 0.29810965061187744, + "learning_rate": 0.00047275334647271566, + "loss": 4.7188, + "step": 1747 + }, + { + "epoch": 0.170703125, + "grad_norm": 0.40818923711776733, + "learning_rate": 0.00047271992055267713, + "loss": 4.7344, + "step": 1748 + }, + { + "epoch": 0.17080078125, + "grad_norm": 0.4044819474220276, + "learning_rate": 0.0004726864754650342, + "loss": 4.7227, + "step": 1749 + }, + { + "epoch": 0.1708984375, + "grad_norm": 0.3737597167491913, + "learning_rate": 0.00047265301121302917, + "loss": 4.7031, + "step": 1750 + }, + { + "epoch": 0.17099609375, + "grad_norm": 0.346331387758255, + "learning_rate": 0.0004726195277999062, + "loss": 4.707, + "step": 1751 + }, + { + "epoch": 0.17109375, + "grad_norm": 0.3120872378349304, + "learning_rate": 0.00047258602522891124, + "loss": 4.7109, + "step": 1752 + }, + { + "epoch": 0.17119140625, + "grad_norm": 0.3185526132583618, + "learning_rate": 0.0004725525035032921, + "loss": 4.7109, + "step": 1753 + }, + { + "epoch": 0.1712890625, + "grad_norm": 0.35277417302131653, + "learning_rate": 0.0004725189626262986, + "loss": 4.7148, + "step": 1754 + }, + { + "epoch": 0.17138671875, + "grad_norm": 0.3095724880695343, + "learning_rate": 0.00047248540260118224, + "loss": 4.7109, + "step": 1755 + }, + { + "epoch": 0.171484375, + "grad_norm": 0.30601778626441956, + "learning_rate": 0.0004724518234311964, + "loss": 4.6914, + "step": 1756 + }, + { + "epoch": 0.17158203125, + "grad_norm": 0.2815275490283966, + "learning_rate": 0.0004724182251195964, + "loss": 4.7148, + "step": 1757 + }, + { + "epoch": 0.1716796875, + "grad_norm": 0.2875475585460663, + "learning_rate": 0.0004723846076696395, + "loss": 4.7109, + "step": 1758 + }, + { + "epoch": 0.17177734375, + "grad_norm": 0.3428354263305664, + "learning_rate": 0.00047235097108458434, + "loss": 4.7188, + "step": 1759 + }, + { + "epoch": 0.171875, + "grad_norm": 0.4065704643726349, + "learning_rate": 0.00047231731536769207, + "loss": 4.6875, + "step": 1760 + }, + { + "epoch": 0.17197265625, + "grad_norm": 0.47537028789520264, + "learning_rate": 0.00047228364052222515, + "loss": 4.7383, + "step": 1761 + }, + { + "epoch": 0.1720703125, + "grad_norm": 0.46676430106163025, + "learning_rate": 0.0004722499465514484, + "loss": 4.7266, + "step": 1762 + }, + { + "epoch": 0.17216796875, + "grad_norm": 0.35790807008743286, + "learning_rate": 0.00047221623345862794, + "loss": 4.6992, + "step": 1763 + }, + { + "epoch": 0.172265625, + "grad_norm": 0.30717775225639343, + "learning_rate": 0.00047218250124703224, + "loss": 4.7031, + "step": 1764 + }, + { + "epoch": 0.17236328125, + "grad_norm": 0.36534759402275085, + "learning_rate": 0.00047214874991993136, + "loss": 4.7109, + "step": 1765 + }, + { + "epoch": 0.1724609375, + "grad_norm": 0.39176201820373535, + "learning_rate": 0.0004721149794805971, + "loss": 4.707, + "step": 1766 + }, + { + "epoch": 0.17255859375, + "grad_norm": 0.3514379560947418, + "learning_rate": 0.0004720811899323035, + "loss": 4.7305, + "step": 1767 + }, + { + "epoch": 0.17265625, + "grad_norm": 0.3105420768260956, + "learning_rate": 0.00047204738127832615, + "loss": 4.7344, + "step": 1768 + }, + { + "epoch": 0.17275390625, + "grad_norm": 0.27019309997558594, + "learning_rate": 0.0004720135535219425, + "loss": 4.6953, + "step": 1769 + }, + { + "epoch": 0.1728515625, + "grad_norm": 0.29011520743370056, + "learning_rate": 0.000471979706666432, + "loss": 4.7148, + "step": 1770 + }, + { + "epoch": 0.17294921875, + "grad_norm": 0.3539043068885803, + "learning_rate": 0.00047194584071507577, + "loss": 4.7266, + "step": 1771 + }, + { + "epoch": 0.173046875, + "grad_norm": 0.33324524760246277, + "learning_rate": 0.000471911955671157, + "loss": 4.6914, + "step": 1772 + }, + { + "epoch": 0.17314453125, + "grad_norm": 0.3441627323627472, + "learning_rate": 0.0004718780515379606, + "loss": 4.707, + "step": 1773 + }, + { + "epoch": 0.1732421875, + "grad_norm": 0.3166051506996155, + "learning_rate": 0.0004718441283187733, + "loss": 4.7344, + "step": 1774 + }, + { + "epoch": 0.17333984375, + "grad_norm": 0.32287782430648804, + "learning_rate": 0.0004718101860168837, + "loss": 4.6953, + "step": 1775 + }, + { + "epoch": 0.1734375, + "grad_norm": 0.3744673430919647, + "learning_rate": 0.0004717762246355824, + "loss": 4.7227, + "step": 1776 + }, + { + "epoch": 0.17353515625, + "grad_norm": 0.4147856831550598, + "learning_rate": 0.0004717422441781616, + "loss": 4.707, + "step": 1777 + }, + { + "epoch": 0.1736328125, + "grad_norm": 0.44820207357406616, + "learning_rate": 0.0004717082446479156, + "loss": 4.6992, + "step": 1778 + }, + { + "epoch": 0.17373046875, + "grad_norm": 0.42957448959350586, + "learning_rate": 0.0004716742260481402, + "loss": 4.6992, + "step": 1779 + }, + { + "epoch": 0.173828125, + "grad_norm": 0.35971277952194214, + "learning_rate": 0.00047164018838213346, + "loss": 4.707, + "step": 1780 + }, + { + "epoch": 0.17392578125, + "grad_norm": 0.29600271582603455, + "learning_rate": 0.0004716061316531951, + "loss": 4.6641, + "step": 1781 + }, + { + "epoch": 0.1740234375, + "grad_norm": 0.3103143870830536, + "learning_rate": 0.00047157205586462664, + "loss": 4.7148, + "step": 1782 + }, + { + "epoch": 0.17412109375, + "grad_norm": 0.2851412892341614, + "learning_rate": 0.0004715379610197315, + "loss": 4.7422, + "step": 1783 + }, + { + "epoch": 0.17421875, + "grad_norm": 0.2680378258228302, + "learning_rate": 0.000471503847121815, + "loss": 4.7188, + "step": 1784 + }, + { + "epoch": 0.17431640625, + "grad_norm": 0.2789458632469177, + "learning_rate": 0.00047146971417418417, + "loss": 4.6836, + "step": 1785 + }, + { + "epoch": 0.1744140625, + "grad_norm": 0.3282760679721832, + "learning_rate": 0.0004714355621801479, + "loss": 4.7305, + "step": 1786 + }, + { + "epoch": 0.17451171875, + "grad_norm": 0.35235342383384705, + "learning_rate": 0.00047140139114301715, + "loss": 4.707, + "step": 1787 + }, + { + "epoch": 0.174609375, + "grad_norm": 0.3573938310146332, + "learning_rate": 0.0004713672010661045, + "loss": 4.7148, + "step": 1788 + }, + { + "epoch": 0.17470703125, + "grad_norm": 0.3707910478115082, + "learning_rate": 0.0004713329919527245, + "loss": 4.7227, + "step": 1789 + }, + { + "epoch": 0.1748046875, + "grad_norm": 0.3104837238788605, + "learning_rate": 0.0004712987638061934, + "loss": 4.6914, + "step": 1790 + }, + { + "epoch": 0.17490234375, + "grad_norm": 0.3016248047351837, + "learning_rate": 0.00047126451662982946, + "loss": 4.707, + "step": 1791 + }, + { + "epoch": 0.175, + "grad_norm": 0.299649715423584, + "learning_rate": 0.00047123025042695267, + "loss": 4.6953, + "step": 1792 + }, + { + "epoch": 0.17509765625, + "grad_norm": 0.3024265170097351, + "learning_rate": 0.0004711959652008849, + "loss": 4.707, + "step": 1793 + }, + { + "epoch": 0.1751953125, + "grad_norm": 0.3478015065193176, + "learning_rate": 0.0004711616609549499, + "loss": 4.7109, + "step": 1794 + }, + { + "epoch": 0.17529296875, + "grad_norm": 0.38603729009628296, + "learning_rate": 0.0004711273376924732, + "loss": 4.7188, + "step": 1795 + }, + { + "epoch": 0.175390625, + "grad_norm": 0.48535963892936707, + "learning_rate": 0.0004710929954167823, + "loss": 4.6914, + "step": 1796 + }, + { + "epoch": 0.17548828125, + "grad_norm": 0.5580281019210815, + "learning_rate": 0.0004710586341312064, + "loss": 4.7148, + "step": 1797 + }, + { + "epoch": 0.1755859375, + "grad_norm": 0.44946280121803284, + "learning_rate": 0.0004710242538390765, + "loss": 4.75, + "step": 1798 + }, + { + "epoch": 0.17568359375, + "grad_norm": 0.3055347502231598, + "learning_rate": 0.0004709898545437256, + "loss": 4.707, + "step": 1799 + }, + { + "epoch": 0.17578125, + "grad_norm": 0.39318904280662537, + "learning_rate": 0.00047095543624848857, + "loss": 4.6836, + "step": 1800 + }, + { + "epoch": 0.17587890625, + "grad_norm": 0.4857766330242157, + "learning_rate": 0.0004709209989567019, + "loss": 4.6992, + "step": 1801 + }, + { + "epoch": 0.1759765625, + "grad_norm": 0.3867643177509308, + "learning_rate": 0.00047088654267170413, + "loss": 4.7148, + "step": 1802 + }, + { + "epoch": 0.17607421875, + "grad_norm": 0.28597569465637207, + "learning_rate": 0.00047085206739683547, + "loss": 4.7109, + "step": 1803 + }, + { + "epoch": 0.176171875, + "grad_norm": 0.322764128446579, + "learning_rate": 0.00047081757313543815, + "loss": 4.6914, + "step": 1804 + }, + { + "epoch": 0.17626953125, + "grad_norm": 0.33930692076683044, + "learning_rate": 0.0004707830598908561, + "loss": 4.7109, + "step": 1805 + }, + { + "epoch": 0.1763671875, + "grad_norm": 0.31630805134773254, + "learning_rate": 0.0004707485276664353, + "loss": 4.7031, + "step": 1806 + }, + { + "epoch": 0.17646484375, + "grad_norm": 0.3265106976032257, + "learning_rate": 0.0004707139764655232, + "loss": 4.7148, + "step": 1807 + }, + { + "epoch": 0.1765625, + "grad_norm": 0.320544570684433, + "learning_rate": 0.0004706794062914694, + "loss": 4.7266, + "step": 1808 + }, + { + "epoch": 0.17666015625, + "grad_norm": 0.35693979263305664, + "learning_rate": 0.00047064481714762527, + "loss": 4.7109, + "step": 1809 + }, + { + "epoch": 0.1767578125, + "grad_norm": 0.3931410014629364, + "learning_rate": 0.00047061020903734396, + "loss": 4.7227, + "step": 1810 + }, + { + "epoch": 0.17685546875, + "grad_norm": 0.4092164635658264, + "learning_rate": 0.0004705755819639804, + "loss": 4.6797, + "step": 1811 + }, + { + "epoch": 0.176953125, + "grad_norm": 0.3898935317993164, + "learning_rate": 0.00047054093593089163, + "loss": 4.7109, + "step": 1812 + }, + { + "epoch": 0.17705078125, + "grad_norm": 0.30018892884254456, + "learning_rate": 0.0004705062709414363, + "loss": 4.7344, + "step": 1813 + }, + { + "epoch": 0.1771484375, + "grad_norm": 0.3135061264038086, + "learning_rate": 0.00047047158699897485, + "loss": 4.7461, + "step": 1814 + }, + { + "epoch": 0.17724609375, + "grad_norm": 0.33332744240760803, + "learning_rate": 0.00047043688410686977, + "loss": 4.6875, + "step": 1815 + }, + { + "epoch": 0.17734375, + "grad_norm": 0.3224795460700989, + "learning_rate": 0.0004704021622684851, + "loss": 4.7148, + "step": 1816 + }, + { + "epoch": 0.17744140625, + "grad_norm": 0.3023662865161896, + "learning_rate": 0.0004703674214871871, + "loss": 4.7148, + "step": 1817 + }, + { + "epoch": 0.1775390625, + "grad_norm": 0.32896143198013306, + "learning_rate": 0.00047033266176634356, + "loss": 4.6992, + "step": 1818 + }, + { + "epoch": 0.17763671875, + "grad_norm": 0.27983352541923523, + "learning_rate": 0.00047029788310932406, + "loss": 4.7266, + "step": 1819 + }, + { + "epoch": 0.177734375, + "grad_norm": 0.29561662673950195, + "learning_rate": 0.0004702630855195003, + "loss": 4.6914, + "step": 1820 + }, + { + "epoch": 0.17783203125, + "grad_norm": 0.2955554127693176, + "learning_rate": 0.0004702282690002458, + "loss": 4.6875, + "step": 1821 + }, + { + "epoch": 0.1779296875, + "grad_norm": 0.31012076139450073, + "learning_rate": 0.00047019343355493554, + "loss": 4.7148, + "step": 1822 + }, + { + "epoch": 0.17802734375, + "grad_norm": 0.3344959318637848, + "learning_rate": 0.00047015857918694665, + "loss": 4.7578, + "step": 1823 + }, + { + "epoch": 0.178125, + "grad_norm": 0.40076538920402527, + "learning_rate": 0.00047012370589965814, + "loss": 4.7227, + "step": 1824 + }, + { + "epoch": 0.17822265625, + "grad_norm": 0.42246541380882263, + "learning_rate": 0.0004700888136964506, + "loss": 4.707, + "step": 1825 + }, + { + "epoch": 0.1783203125, + "grad_norm": 0.4614593982696533, + "learning_rate": 0.00047005390258070663, + "loss": 4.6836, + "step": 1826 + }, + { + "epoch": 0.17841796875, + "grad_norm": 0.4689350426197052, + "learning_rate": 0.0004700189725558107, + "loss": 4.6875, + "step": 1827 + }, + { + "epoch": 0.178515625, + "grad_norm": 0.4179334044456482, + "learning_rate": 0.00046998402362514893, + "loss": 4.6797, + "step": 1828 + }, + { + "epoch": 0.17861328125, + "grad_norm": 0.3429284989833832, + "learning_rate": 0.00046994905579210937, + "loss": 4.707, + "step": 1829 + }, + { + "epoch": 0.1787109375, + "grad_norm": 0.3537569046020508, + "learning_rate": 0.00046991406906008213, + "loss": 4.7305, + "step": 1830 + }, + { + "epoch": 0.17880859375, + "grad_norm": 0.38520169258117676, + "learning_rate": 0.00046987906343245865, + "loss": 4.7031, + "step": 1831 + }, + { + "epoch": 0.17890625, + "grad_norm": 0.3677808940410614, + "learning_rate": 0.0004698440389126327, + "loss": 4.6953, + "step": 1832 + }, + { + "epoch": 0.17900390625, + "grad_norm": 0.2886373698711395, + "learning_rate": 0.0004698089955039995, + "loss": 4.7383, + "step": 1833 + }, + { + "epoch": 0.1791015625, + "grad_norm": 0.2950337529182434, + "learning_rate": 0.0004697739332099564, + "loss": 4.7227, + "step": 1834 + }, + { + "epoch": 0.17919921875, + "grad_norm": 0.37891969084739685, + "learning_rate": 0.00046973885203390245, + "loss": 4.707, + "step": 1835 + }, + { + "epoch": 0.179296875, + "grad_norm": 0.3522384464740753, + "learning_rate": 0.0004697037519792384, + "loss": 4.7031, + "step": 1836 + }, + { + "epoch": 0.17939453125, + "grad_norm": 0.35117805004119873, + "learning_rate": 0.0004696686330493672, + "loss": 4.6953, + "step": 1837 + }, + { + "epoch": 0.1794921875, + "grad_norm": 0.30079415440559387, + "learning_rate": 0.0004696334952476931, + "loss": 4.7266, + "step": 1838 + }, + { + "epoch": 0.17958984375, + "grad_norm": 0.32153111696243286, + "learning_rate": 0.0004695983385776227, + "loss": 4.7617, + "step": 1839 + }, + { + "epoch": 0.1796875, + "grad_norm": 0.3145742416381836, + "learning_rate": 0.0004695631630425641, + "loss": 4.7344, + "step": 1840 + }, + { + "epoch": 0.17978515625, + "grad_norm": 0.30176612734794617, + "learning_rate": 0.00046952796864592727, + "loss": 4.7383, + "step": 1841 + }, + { + "epoch": 0.1798828125, + "grad_norm": 0.3110758364200592, + "learning_rate": 0.00046949275539112423, + "loss": 4.6797, + "step": 1842 + }, + { + "epoch": 0.17998046875, + "grad_norm": 0.29200172424316406, + "learning_rate": 0.0004694575232815686, + "loss": 4.7109, + "step": 1843 + }, + { + "epoch": 0.180078125, + "grad_norm": 0.311064213514328, + "learning_rate": 0.0004694222723206759, + "loss": 4.6992, + "step": 1844 + }, + { + "epoch": 0.18017578125, + "grad_norm": 0.29677876830101013, + "learning_rate": 0.0004693870025118633, + "loss": 4.6719, + "step": 1845 + }, + { + "epoch": 0.1802734375, + "grad_norm": 0.31672269105911255, + "learning_rate": 0.00046935171385855025, + "loss": 4.6602, + "step": 1846 + }, + { + "epoch": 0.18037109375, + "grad_norm": 0.34578704833984375, + "learning_rate": 0.00046931640636415755, + "loss": 4.6875, + "step": 1847 + }, + { + "epoch": 0.18046875, + "grad_norm": 0.3357842266559601, + "learning_rate": 0.00046928108003210805, + "loss": 4.6914, + "step": 1848 + }, + { + "epoch": 0.18056640625, + "grad_norm": 0.35017260909080505, + "learning_rate": 0.0004692457348658265, + "loss": 4.707, + "step": 1849 + }, + { + "epoch": 0.1806640625, + "grad_norm": 0.38192495703697205, + "learning_rate": 0.00046921037086873927, + "loss": 4.6719, + "step": 1850 + }, + { + "epoch": 0.18076171875, + "grad_norm": 0.38436266779899597, + "learning_rate": 0.0004691749880442747, + "loss": 4.7109, + "step": 1851 + }, + { + "epoch": 0.180859375, + "grad_norm": 0.3527016341686249, + "learning_rate": 0.00046913958639586295, + "loss": 4.6914, + "step": 1852 + }, + { + "epoch": 0.18095703125, + "grad_norm": 0.3024858236312866, + "learning_rate": 0.0004691041659269359, + "loss": 4.6875, + "step": 1853 + }, + { + "epoch": 0.1810546875, + "grad_norm": 0.2871468663215637, + "learning_rate": 0.00046906872664092734, + "loss": 4.6875, + "step": 1854 + }, + { + "epoch": 0.18115234375, + "grad_norm": 0.3229468762874603, + "learning_rate": 0.00046903326854127287, + "loss": 4.7109, + "step": 1855 + }, + { + "epoch": 0.18125, + "grad_norm": 0.3446221351623535, + "learning_rate": 0.0004689977916314099, + "loss": 4.6914, + "step": 1856 + }, + { + "epoch": 0.18134765625, + "grad_norm": 0.39268386363983154, + "learning_rate": 0.0004689622959147778, + "loss": 4.7031, + "step": 1857 + }, + { + "epoch": 0.1814453125, + "grad_norm": 0.4465988874435425, + "learning_rate": 0.00046892678139481744, + "loss": 4.7109, + "step": 1858 + }, + { + "epoch": 0.18154296875, + "grad_norm": 0.44179853796958923, + "learning_rate": 0.0004688912480749718, + "loss": 4.6953, + "step": 1859 + }, + { + "epoch": 0.181640625, + "grad_norm": 0.3754976987838745, + "learning_rate": 0.0004688556959586857, + "loss": 4.6836, + "step": 1860 + }, + { + "epoch": 0.18173828125, + "grad_norm": 0.3733668029308319, + "learning_rate": 0.0004688201250494055, + "loss": 4.6914, + "step": 1861 + }, + { + "epoch": 0.1818359375, + "grad_norm": 0.33280256390571594, + "learning_rate": 0.00046878453535057965, + "loss": 4.6875, + "step": 1862 + }, + { + "epoch": 0.18193359375, + "grad_norm": 0.36261236667633057, + "learning_rate": 0.00046874892686565834, + "loss": 4.6992, + "step": 1863 + }, + { + "epoch": 0.18203125, + "grad_norm": 0.3954784572124481, + "learning_rate": 0.0004687132995980935, + "loss": 4.7031, + "step": 1864 + }, + { + "epoch": 0.18212890625, + "grad_norm": 0.3743366003036499, + "learning_rate": 0.00046867765355133905, + "loss": 4.7266, + "step": 1865 + }, + { + "epoch": 0.1822265625, + "grad_norm": 0.325110524892807, + "learning_rate": 0.0004686419887288506, + "loss": 4.7188, + "step": 1866 + }, + { + "epoch": 0.18232421875, + "grad_norm": 0.28901422023773193, + "learning_rate": 0.0004686063051340856, + "loss": 4.7031, + "step": 1867 + }, + { + "epoch": 0.182421875, + "grad_norm": 0.26248887181282043, + "learning_rate": 0.00046857060277050324, + "loss": 4.6875, + "step": 1868 + }, + { + "epoch": 0.18251953125, + "grad_norm": 0.29704171419143677, + "learning_rate": 0.00046853488164156476, + "loss": 4.6953, + "step": 1869 + }, + { + "epoch": 0.1826171875, + "grad_norm": 0.3862922787666321, + "learning_rate": 0.00046849914175073305, + "loss": 4.6758, + "step": 1870 + }, + { + "epoch": 0.18271484375, + "grad_norm": 0.44130098819732666, + "learning_rate": 0.0004684633831014728, + "loss": 4.6992, + "step": 1871 + }, + { + "epoch": 0.1828125, + "grad_norm": 0.44974687695503235, + "learning_rate": 0.00046842760569725065, + "loss": 4.7188, + "step": 1872 + }, + { + "epoch": 0.18291015625, + "grad_norm": 0.3364246189594269, + "learning_rate": 0.00046839180954153485, + "loss": 4.6836, + "step": 1873 + }, + { + "epoch": 0.1830078125, + "grad_norm": 0.2785428762435913, + "learning_rate": 0.00046835599463779573, + "loss": 4.6914, + "step": 1874 + }, + { + "epoch": 0.18310546875, + "grad_norm": 0.3570761978626251, + "learning_rate": 0.0004683201609895052, + "loss": 4.7031, + "step": 1875 + }, + { + "epoch": 0.183203125, + "grad_norm": 0.4263145923614502, + "learning_rate": 0.0004682843086001371, + "loss": 4.6875, + "step": 1876 + }, + { + "epoch": 0.18330078125, + "grad_norm": 0.4154239594936371, + "learning_rate": 0.00046824843747316717, + "loss": 4.7031, + "step": 1877 + }, + { + "epoch": 0.1833984375, + "grad_norm": 0.34865015745162964, + "learning_rate": 0.0004682125476120728, + "loss": 4.7109, + "step": 1878 + }, + { + "epoch": 0.18349609375, + "grad_norm": 0.3163270950317383, + "learning_rate": 0.00046817663902033323, + "loss": 4.6953, + "step": 1879 + }, + { + "epoch": 0.18359375, + "grad_norm": 0.3564985394477844, + "learning_rate": 0.00046814071170142964, + "loss": 4.6914, + "step": 1880 + }, + { + "epoch": 0.18369140625, + "grad_norm": 0.36978796124458313, + "learning_rate": 0.0004681047656588449, + "loss": 4.6914, + "step": 1881 + }, + { + "epoch": 0.1837890625, + "grad_norm": 0.310011625289917, + "learning_rate": 0.00046806880089606375, + "loss": 4.7109, + "step": 1882 + }, + { + "epoch": 0.18388671875, + "grad_norm": 0.25916099548339844, + "learning_rate": 0.00046803281741657264, + "loss": 4.7031, + "step": 1883 + }, + { + "epoch": 0.183984375, + "grad_norm": 0.3122245669364929, + "learning_rate": 0.00046799681522386013, + "loss": 4.6953, + "step": 1884 + }, + { + "epoch": 0.18408203125, + "grad_norm": 0.3948974013328552, + "learning_rate": 0.0004679607943214162, + "loss": 4.7383, + "step": 1885 + }, + { + "epoch": 0.1841796875, + "grad_norm": 0.38093316555023193, + "learning_rate": 0.00046792475471273283, + "loss": 4.6719, + "step": 1886 + }, + { + "epoch": 0.18427734375, + "grad_norm": 0.33030474185943604, + "learning_rate": 0.0004678886964013039, + "loss": 4.7188, + "step": 1887 + }, + { + "epoch": 0.184375, + "grad_norm": 0.3063085675239563, + "learning_rate": 0.000467852619390625, + "loss": 4.6719, + "step": 1888 + }, + { + "epoch": 0.18447265625, + "grad_norm": 0.3019408881664276, + "learning_rate": 0.0004678165236841936, + "loss": 4.7031, + "step": 1889 + }, + { + "epoch": 0.1845703125, + "grad_norm": 0.32429105043411255, + "learning_rate": 0.0004677804092855088, + "loss": 4.7031, + "step": 1890 + }, + { + "epoch": 0.18466796875, + "grad_norm": 0.3127208650112152, + "learning_rate": 0.00046774427619807176, + "loss": 4.6875, + "step": 1891 + }, + { + "epoch": 0.184765625, + "grad_norm": 0.33778461813926697, + "learning_rate": 0.0004677081244253853, + "loss": 4.6523, + "step": 1892 + }, + { + "epoch": 0.18486328125, + "grad_norm": 0.32395920157432556, + "learning_rate": 0.00046767195397095406, + "loss": 4.7031, + "step": 1893 + }, + { + "epoch": 0.1849609375, + "grad_norm": 0.27252617478370667, + "learning_rate": 0.0004676357648382846, + "loss": 4.6992, + "step": 1894 + }, + { + "epoch": 0.18505859375, + "grad_norm": 0.2853223979473114, + "learning_rate": 0.0004675995570308852, + "loss": 4.6992, + "step": 1895 + }, + { + "epoch": 0.18515625, + "grad_norm": 0.2883679270744324, + "learning_rate": 0.0004675633305522658, + "loss": 4.6719, + "step": 1896 + }, + { + "epoch": 0.18525390625, + "grad_norm": 0.25876960158348083, + "learning_rate": 0.00046752708540593853, + "loss": 4.6875, + "step": 1897 + }, + { + "epoch": 0.1853515625, + "grad_norm": 0.2769707143306732, + "learning_rate": 0.00046749082159541696, + "loss": 4.707, + "step": 1898 + }, + { + "epoch": 0.18544921875, + "grad_norm": 0.3227038085460663, + "learning_rate": 0.0004674545391242167, + "loss": 4.707, + "step": 1899 + }, + { + "epoch": 0.185546875, + "grad_norm": 0.38810545206069946, + "learning_rate": 0.0004674182379958551, + "loss": 4.7305, + "step": 1900 + }, + { + "epoch": 0.18564453125, + "grad_norm": 0.479206383228302, + "learning_rate": 0.0004673819182138512, + "loss": 4.7266, + "step": 1901 + }, + { + "epoch": 0.1857421875, + "grad_norm": 0.531310498714447, + "learning_rate": 0.0004673455797817261, + "loss": 4.7109, + "step": 1902 + }, + { + "epoch": 0.18583984375, + "grad_norm": 0.4943915009498596, + "learning_rate": 0.0004673092227030024, + "loss": 4.7109, + "step": 1903 + }, + { + "epoch": 0.1859375, + "grad_norm": 0.3636752963066101, + "learning_rate": 0.0004672728469812049, + "loss": 4.7148, + "step": 1904 + }, + { + "epoch": 0.18603515625, + "grad_norm": 0.30614086985588074, + "learning_rate": 0.0004672364526198598, + "loss": 4.6953, + "step": 1905 + }, + { + "epoch": 0.1861328125, + "grad_norm": 0.36997532844543457, + "learning_rate": 0.00046720003962249525, + "loss": 4.6758, + "step": 1906 + }, + { + "epoch": 0.18623046875, + "grad_norm": 0.3727617859840393, + "learning_rate": 0.00046716360799264135, + "loss": 4.6875, + "step": 1907 + }, + { + "epoch": 0.186328125, + "grad_norm": 0.3147006034851074, + "learning_rate": 0.00046712715773382986, + "loss": 4.7109, + "step": 1908 + }, + { + "epoch": 0.18642578125, + "grad_norm": 0.3325425088405609, + "learning_rate": 0.0004670906888495945, + "loss": 4.7109, + "step": 1909 + }, + { + "epoch": 0.1865234375, + "grad_norm": 0.34814977645874023, + "learning_rate": 0.0004670542013434705, + "loss": 4.7031, + "step": 1910 + }, + { + "epoch": 0.18662109375, + "grad_norm": 0.395072340965271, + "learning_rate": 0.0004670176952189952, + "loss": 4.6914, + "step": 1911 + }, + { + "epoch": 0.18671875, + "grad_norm": 0.3087127208709717, + "learning_rate": 0.0004669811704797075, + "loss": 4.6875, + "step": 1912 + }, + { + "epoch": 0.18681640625, + "grad_norm": 0.2690333425998688, + "learning_rate": 0.00046694462712914837, + "loss": 4.6797, + "step": 1913 + }, + { + "epoch": 0.1869140625, + "grad_norm": 0.2913764417171478, + "learning_rate": 0.0004669080651708604, + "loss": 4.707, + "step": 1914 + }, + { + "epoch": 0.18701171875, + "grad_norm": 0.28636500239372253, + "learning_rate": 0.0004668714846083879, + "loss": 4.6953, + "step": 1915 + }, + { + "epoch": 0.187109375, + "grad_norm": 0.27798396348953247, + "learning_rate": 0.0004668348854452772, + "loss": 4.6953, + "step": 1916 + }, + { + "epoch": 0.18720703125, + "grad_norm": 0.3135933578014374, + "learning_rate": 0.00046679826768507646, + "loss": 4.7344, + "step": 1917 + }, + { + "epoch": 0.1873046875, + "grad_norm": 0.2792069911956787, + "learning_rate": 0.0004667616313313353, + "loss": 4.6562, + "step": 1918 + }, + { + "epoch": 0.18740234375, + "grad_norm": 0.2715051472187042, + "learning_rate": 0.00046672497638760555, + "loss": 4.707, + "step": 1919 + }, + { + "epoch": 0.1875, + "grad_norm": 0.2932470142841339, + "learning_rate": 0.0004666883028574405, + "loss": 4.7109, + "step": 1920 + }, + { + "epoch": 0.18759765625, + "grad_norm": 0.3258669078350067, + "learning_rate": 0.0004666516107443956, + "loss": 4.6953, + "step": 1921 + }, + { + "epoch": 0.1876953125, + "grad_norm": 0.35614919662475586, + "learning_rate": 0.0004666149000520277, + "loss": 4.6953, + "step": 1922 + }, + { + "epoch": 0.18779296875, + "grad_norm": 0.35679420828819275, + "learning_rate": 0.0004665781707838957, + "loss": 4.707, + "step": 1923 + }, + { + "epoch": 0.187890625, + "grad_norm": 0.3622376024723053, + "learning_rate": 0.00046654142294356033, + "loss": 4.7109, + "step": 1924 + }, + { + "epoch": 0.18798828125, + "grad_norm": 0.400008887052536, + "learning_rate": 0.00046650465653458404, + "loss": 4.6836, + "step": 1925 + }, + { + "epoch": 0.1880859375, + "grad_norm": 0.40335577726364136, + "learning_rate": 0.00046646787156053097, + "loss": 4.6875, + "step": 1926 + }, + { + "epoch": 0.18818359375, + "grad_norm": 0.37399107217788696, + "learning_rate": 0.0004664310680249673, + "loss": 4.7031, + "step": 1927 + }, + { + "epoch": 0.18828125, + "grad_norm": 0.34047555923461914, + "learning_rate": 0.0004663942459314608, + "loss": 4.6836, + "step": 1928 + }, + { + "epoch": 0.18837890625, + "grad_norm": 0.2900930643081665, + "learning_rate": 0.00046635740528358125, + "loss": 4.6914, + "step": 1929 + }, + { + "epoch": 0.1884765625, + "grad_norm": 0.3214733600616455, + "learning_rate": 0.0004663205460848999, + "loss": 4.6445, + "step": 1930 + }, + { + "epoch": 0.18857421875, + "grad_norm": 0.29667600989341736, + "learning_rate": 0.0004662836683389901, + "loss": 4.6992, + "step": 1931 + }, + { + "epoch": 0.188671875, + "grad_norm": 0.28890740871429443, + "learning_rate": 0.0004662467720494269, + "loss": 4.6836, + "step": 1932 + }, + { + "epoch": 0.18876953125, + "grad_norm": 0.3385658264160156, + "learning_rate": 0.0004662098572197872, + "loss": 4.6914, + "step": 1933 + }, + { + "epoch": 0.1888671875, + "grad_norm": 0.26928526163101196, + "learning_rate": 0.00046617292385364956, + "loss": 4.7188, + "step": 1934 + }, + { + "epoch": 0.18896484375, + "grad_norm": 0.3358452618122101, + "learning_rate": 0.00046613597195459445, + "loss": 4.6875, + "step": 1935 + }, + { + "epoch": 0.1890625, + "grad_norm": 0.3048867881298065, + "learning_rate": 0.0004660990015262041, + "loss": 4.6836, + "step": 1936 + }, + { + "epoch": 0.18916015625, + "grad_norm": 0.2835708558559418, + "learning_rate": 0.00046606201257206253, + "loss": 4.7031, + "step": 1937 + }, + { + "epoch": 0.1892578125, + "grad_norm": 0.29097795486450195, + "learning_rate": 0.0004660250050957556, + "loss": 4.6875, + "step": 1938 + }, + { + "epoch": 0.18935546875, + "grad_norm": 0.3146597445011139, + "learning_rate": 0.00046598797910087086, + "loss": 4.6992, + "step": 1939 + }, + { + "epoch": 0.189453125, + "grad_norm": 0.37111765146255493, + "learning_rate": 0.00046595093459099793, + "loss": 4.668, + "step": 1940 + }, + { + "epoch": 0.18955078125, + "grad_norm": 0.4816044867038727, + "learning_rate": 0.0004659138715697278, + "loss": 4.6641, + "step": 1941 + }, + { + "epoch": 0.1896484375, + "grad_norm": 0.6332393884658813, + "learning_rate": 0.0004658767900406535, + "loss": 4.7031, + "step": 1942 + }, + { + "epoch": 0.18974609375, + "grad_norm": 0.598022997379303, + "learning_rate": 0.00046583969000737, + "loss": 4.7148, + "step": 1943 + }, + { + "epoch": 0.18984375, + "grad_norm": 0.37639319896698, + "learning_rate": 0.00046580257147347366, + "loss": 4.6797, + "step": 1944 + }, + { + "epoch": 0.18994140625, + "grad_norm": 0.411358505487442, + "learning_rate": 0.00046576543444256307, + "loss": 4.6875, + "step": 1945 + }, + { + "epoch": 0.1900390625, + "grad_norm": 0.5302320718765259, + "learning_rate": 0.00046572827891823833, + "loss": 4.6992, + "step": 1946 + }, + { + "epoch": 0.19013671875, + "grad_norm": 0.41106823086738586, + "learning_rate": 0.0004656911049041014, + "loss": 4.6602, + "step": 1947 + }, + { + "epoch": 0.190234375, + "grad_norm": 0.2646370232105255, + "learning_rate": 0.00046565391240375607, + "loss": 4.7031, + "step": 1948 + }, + { + "epoch": 0.19033203125, + "grad_norm": 0.34403374791145325, + "learning_rate": 0.000465616701420808, + "loss": 4.7422, + "step": 1949 + }, + { + "epoch": 0.1904296875, + "grad_norm": 0.32895779609680176, + "learning_rate": 0.00046557947195886433, + "loss": 4.6992, + "step": 1950 + }, + { + "epoch": 0.19052734375, + "grad_norm": 0.2670215666294098, + "learning_rate": 0.0004655422240215344, + "loss": 4.6992, + "step": 1951 + }, + { + "epoch": 0.190625, + "grad_norm": 0.2915528118610382, + "learning_rate": 0.00046550495761242907, + "loss": 4.6875, + "step": 1952 + }, + { + "epoch": 0.19072265625, + "grad_norm": 0.30449408292770386, + "learning_rate": 0.000465467672735161, + "loss": 4.6523, + "step": 1953 + }, + { + "epoch": 0.1908203125, + "grad_norm": 0.30098956823349, + "learning_rate": 0.00046543036939334476, + "loss": 4.6914, + "step": 1954 + }, + { + "epoch": 0.19091796875, + "grad_norm": 0.31237950921058655, + "learning_rate": 0.0004653930475905967, + "loss": 4.7031, + "step": 1955 + }, + { + "epoch": 0.191015625, + "grad_norm": 0.3021314740180969, + "learning_rate": 0.0004653557073305349, + "loss": 4.6562, + "step": 1956 + }, + { + "epoch": 0.19111328125, + "grad_norm": 0.29421451687812805, + "learning_rate": 0.0004653183486167792, + "loss": 4.6758, + "step": 1957 + }, + { + "epoch": 0.1912109375, + "grad_norm": 0.3067154884338379, + "learning_rate": 0.0004652809714529512, + "loss": 4.7344, + "step": 1958 + }, + { + "epoch": 0.19130859375, + "grad_norm": 0.3098396360874176, + "learning_rate": 0.0004652435758426746, + "loss": 4.6836, + "step": 1959 + }, + { + "epoch": 0.19140625, + "grad_norm": 0.3248749375343323, + "learning_rate": 0.00046520616178957454, + "loss": 4.6797, + "step": 1960 + }, + { + "epoch": 0.19150390625, + "grad_norm": 0.36084792017936707, + "learning_rate": 0.00046516872929727786, + "loss": 4.6875, + "step": 1961 + }, + { + "epoch": 0.1916015625, + "grad_norm": 0.3105924725532532, + "learning_rate": 0.0004651312783694137, + "loss": 4.707, + "step": 1962 + }, + { + "epoch": 0.19169921875, + "grad_norm": 0.2764946520328522, + "learning_rate": 0.0004650938090096125, + "loss": 4.6797, + "step": 1963 + }, + { + "epoch": 0.191796875, + "grad_norm": 0.2781250476837158, + "learning_rate": 0.0004650563212215066, + "loss": 4.6914, + "step": 1964 + }, + { + "epoch": 0.19189453125, + "grad_norm": 0.3017931282520294, + "learning_rate": 0.00046501881500873036, + "loss": 4.6914, + "step": 1965 + }, + { + "epoch": 0.1919921875, + "grad_norm": 0.32291722297668457, + "learning_rate": 0.0004649812903749196, + "loss": 4.6562, + "step": 1966 + }, + { + "epoch": 0.19208984375, + "grad_norm": 0.30429041385650635, + "learning_rate": 0.0004649437473237122, + "loss": 4.6953, + "step": 1967 + }, + { + "epoch": 0.1921875, + "grad_norm": 0.30845269560813904, + "learning_rate": 0.0004649061858587476, + "loss": 4.6914, + "step": 1968 + }, + { + "epoch": 0.19228515625, + "grad_norm": 0.28626877069473267, + "learning_rate": 0.0004648686059836672, + "loss": 4.7188, + "step": 1969 + }, + { + "epoch": 0.1923828125, + "grad_norm": 0.2860487997531891, + "learning_rate": 0.0004648310077021141, + "loss": 4.6875, + "step": 1970 + }, + { + "epoch": 0.19248046875, + "grad_norm": 0.2899281978607178, + "learning_rate": 0.00046479339101773313, + "loss": 4.7266, + "step": 1971 + }, + { + "epoch": 0.192578125, + "grad_norm": 0.3460501432418823, + "learning_rate": 0.0004647557559341712, + "loss": 4.6797, + "step": 1972 + }, + { + "epoch": 0.19267578125, + "grad_norm": 0.32573822140693665, + "learning_rate": 0.00046471810245507646, + "loss": 4.6562, + "step": 1973 + }, + { + "epoch": 0.1927734375, + "grad_norm": 0.31463614106178284, + "learning_rate": 0.00046468043058409933, + "loss": 4.668, + "step": 1974 + }, + { + "epoch": 0.19287109375, + "grad_norm": 0.34310510754585266, + "learning_rate": 0.0004646427403248919, + "loss": 4.6914, + "step": 1975 + }, + { + "epoch": 0.19296875, + "grad_norm": 0.43290114402770996, + "learning_rate": 0.0004646050316811078, + "loss": 4.6758, + "step": 1976 + }, + { + "epoch": 0.19306640625, + "grad_norm": 0.5048022270202637, + "learning_rate": 0.00046456730465640274, + "loss": 4.6719, + "step": 1977 + }, + { + "epoch": 0.1931640625, + "grad_norm": 0.5019853711128235, + "learning_rate": 0.00046452955925443414, + "loss": 4.6992, + "step": 1978 + }, + { + "epoch": 0.19326171875, + "grad_norm": 0.354935884475708, + "learning_rate": 0.00046449179547886104, + "loss": 4.6602, + "step": 1979 + }, + { + "epoch": 0.193359375, + "grad_norm": 0.29509827494621277, + "learning_rate": 0.00046445401333334457, + "loss": 4.6758, + "step": 1980 + }, + { + "epoch": 0.19345703125, + "grad_norm": 0.3451140224933624, + "learning_rate": 0.00046441621282154727, + "loss": 4.7148, + "step": 1981 + }, + { + "epoch": 0.1935546875, + "grad_norm": 0.3382677435874939, + "learning_rate": 0.00046437839394713364, + "loss": 4.6992, + "step": 1982 + }, + { + "epoch": 0.19365234375, + "grad_norm": 0.32868877053260803, + "learning_rate": 0.0004643405567137702, + "loss": 4.6914, + "step": 1983 + }, + { + "epoch": 0.19375, + "grad_norm": 0.3491230607032776, + "learning_rate": 0.00046430270112512474, + "loss": 4.6953, + "step": 1984 + }, + { + "epoch": 0.19384765625, + "grad_norm": 0.36351510882377625, + "learning_rate": 0.00046426482718486725, + "loss": 4.6797, + "step": 1985 + }, + { + "epoch": 0.1939453125, + "grad_norm": 0.32560887932777405, + "learning_rate": 0.00046422693489666923, + "loss": 4.6758, + "step": 1986 + }, + { + "epoch": 0.19404296875, + "grad_norm": 0.28270575404167175, + "learning_rate": 0.0004641890242642042, + "loss": 4.707, + "step": 1987 + }, + { + "epoch": 0.194140625, + "grad_norm": 0.2650778889656067, + "learning_rate": 0.0004641510952911473, + "loss": 4.6602, + "step": 1988 + }, + { + "epoch": 0.19423828125, + "grad_norm": 0.2566971182823181, + "learning_rate": 0.00046411314798117543, + "loss": 4.6797, + "step": 1989 + }, + { + "epoch": 0.1943359375, + "grad_norm": 0.25341349840164185, + "learning_rate": 0.00046407518233796747, + "loss": 4.6641, + "step": 1990 + }, + { + "epoch": 0.19443359375, + "grad_norm": 0.24855858087539673, + "learning_rate": 0.0004640371983652038, + "loss": 4.6914, + "step": 1991 + }, + { + "epoch": 0.19453125, + "grad_norm": 0.2411157190799713, + "learning_rate": 0.0004639991960665668, + "loss": 4.6875, + "step": 1992 + }, + { + "epoch": 0.19462890625, + "grad_norm": 0.28344714641571045, + "learning_rate": 0.00046396117544574033, + "loss": 4.6719, + "step": 1993 + }, + { + "epoch": 0.1947265625, + "grad_norm": 0.26913028955459595, + "learning_rate": 0.00046392313650641054, + "loss": 4.6953, + "step": 1994 + }, + { + "epoch": 0.19482421875, + "grad_norm": 0.26939257979393005, + "learning_rate": 0.0004638850792522649, + "loss": 4.7266, + "step": 1995 + }, + { + "epoch": 0.194921875, + "grad_norm": 0.26686719059944153, + "learning_rate": 0.0004638470036869927, + "loss": 4.6836, + "step": 1996 + }, + { + "epoch": 0.19501953125, + "grad_norm": 0.27638185024261475, + "learning_rate": 0.00046380890981428523, + "loss": 4.6797, + "step": 1997 + }, + { + "epoch": 0.1951171875, + "grad_norm": 0.3258664608001709, + "learning_rate": 0.00046377079763783535, + "loss": 4.6562, + "step": 1998 + }, + { + "epoch": 0.19521484375, + "grad_norm": 0.3155481219291687, + "learning_rate": 0.0004637326671613379, + "loss": 4.6836, + "step": 1999 + }, + { + "epoch": 0.1953125, + "grad_norm": 0.3607693314552307, + "learning_rate": 0.0004636945183884893, + "loss": 4.6641, + "step": 2000 + }, + { + "epoch": 0.19541015625, + "grad_norm": 0.4845200181007385, + "learning_rate": 0.00046365635132298785, + "loss": 4.6758, + "step": 2001 + }, + { + "epoch": 0.1955078125, + "grad_norm": 0.49261143803596497, + "learning_rate": 0.0004636181659685335, + "loss": 4.7227, + "step": 2002 + }, + { + "epoch": 0.19560546875, + "grad_norm": 0.571151077747345, + "learning_rate": 0.00046357996232882805, + "loss": 4.7031, + "step": 2003 + }, + { + "epoch": 0.195703125, + "grad_norm": 0.5619701743125916, + "learning_rate": 0.00046354174040757524, + "loss": 4.7188, + "step": 2004 + }, + { + "epoch": 0.19580078125, + "grad_norm": 0.3851298987865448, + "learning_rate": 0.00046350350020848036, + "loss": 4.6875, + "step": 2005 + }, + { + "epoch": 0.1958984375, + "grad_norm": 0.30450525879859924, + "learning_rate": 0.0004634652417352504, + "loss": 4.6953, + "step": 2006 + }, + { + "epoch": 0.19599609375, + "grad_norm": 0.3562672734260559, + "learning_rate": 0.0004634269649915944, + "loss": 4.6562, + "step": 2007 + }, + { + "epoch": 0.19609375, + "grad_norm": 0.3731500804424286, + "learning_rate": 0.0004633886699812231, + "loss": 4.6836, + "step": 2008 + }, + { + "epoch": 0.19619140625, + "grad_norm": 0.3278704285621643, + "learning_rate": 0.00046335035670784877, + "loss": 4.6797, + "step": 2009 + }, + { + "epoch": 0.1962890625, + "grad_norm": 0.2733931541442871, + "learning_rate": 0.00046331202517518573, + "loss": 4.6641, + "step": 2010 + }, + { + "epoch": 0.19638671875, + "grad_norm": 0.2989756166934967, + "learning_rate": 0.00046327367538694987, + "loss": 4.6914, + "step": 2011 + }, + { + "epoch": 0.196484375, + "grad_norm": 0.2858913242816925, + "learning_rate": 0.00046323530734685906, + "loss": 4.668, + "step": 2012 + }, + { + "epoch": 0.19658203125, + "grad_norm": 0.272771954536438, + "learning_rate": 0.0004631969210586327, + "loss": 4.6836, + "step": 2013 + }, + { + "epoch": 0.1966796875, + "grad_norm": 0.2855667769908905, + "learning_rate": 0.00046315851652599214, + "loss": 4.6875, + "step": 2014 + }, + { + "epoch": 0.19677734375, + "grad_norm": 0.28688544034957886, + "learning_rate": 0.00046312009375266055, + "loss": 4.668, + "step": 2015 + }, + { + "epoch": 0.196875, + "grad_norm": 0.3098505735397339, + "learning_rate": 0.0004630816527423625, + "loss": 4.6758, + "step": 2016 + }, + { + "epoch": 0.19697265625, + "grad_norm": 0.31781521439552307, + "learning_rate": 0.0004630431934988248, + "loss": 4.6641, + "step": 2017 + }, + { + "epoch": 0.1970703125, + "grad_norm": 0.33543410897254944, + "learning_rate": 0.00046300471602577577, + "loss": 4.6953, + "step": 2018 + }, + { + "epoch": 0.19716796875, + "grad_norm": 0.31457531452178955, + "learning_rate": 0.0004629662203269455, + "loss": 4.6953, + "step": 2019 + }, + { + "epoch": 0.197265625, + "grad_norm": 0.3110297918319702, + "learning_rate": 0.00046292770640606593, + "loss": 4.7031, + "step": 2020 + }, + { + "epoch": 0.19736328125, + "grad_norm": 0.2973058521747589, + "learning_rate": 0.00046288917426687054, + "loss": 4.6875, + "step": 2021 + }, + { + "epoch": 0.1974609375, + "grad_norm": 0.3128877878189087, + "learning_rate": 0.000462850623913095, + "loss": 4.6797, + "step": 2022 + }, + { + "epoch": 0.19755859375, + "grad_norm": 0.3213253319263458, + "learning_rate": 0.00046281205534847645, + "loss": 4.6914, + "step": 2023 + }, + { + "epoch": 0.19765625, + "grad_norm": 0.3947957754135132, + "learning_rate": 0.0004627734685767538, + "loss": 4.6797, + "step": 2024 + }, + { + "epoch": 0.19775390625, + "grad_norm": 0.4552753269672394, + "learning_rate": 0.00046273486360166784, + "loss": 4.6602, + "step": 2025 + }, + { + "epoch": 0.1978515625, + "grad_norm": 0.41214901208877563, + "learning_rate": 0.00046269624042696096, + "loss": 4.6641, + "step": 2026 + }, + { + "epoch": 0.19794921875, + "grad_norm": 0.3428378105163574, + "learning_rate": 0.0004626575990563775, + "loss": 4.6875, + "step": 2027 + }, + { + "epoch": 0.198046875, + "grad_norm": 0.29760101437568665, + "learning_rate": 0.0004626189394936634, + "loss": 4.6797, + "step": 2028 + }, + { + "epoch": 0.19814453125, + "grad_norm": 0.2713153064250946, + "learning_rate": 0.0004625802617425665, + "loss": 4.6992, + "step": 2029 + }, + { + "epoch": 0.1982421875, + "grad_norm": 0.29948198795318604, + "learning_rate": 0.00046254156580683635, + "loss": 4.6719, + "step": 2030 + }, + { + "epoch": 0.19833984375, + "grad_norm": 0.32214468717575073, + "learning_rate": 0.00046250285169022426, + "loss": 4.6719, + "step": 2031 + }, + { + "epoch": 0.1984375, + "grad_norm": 0.3129555881023407, + "learning_rate": 0.0004624641193964833, + "loss": 4.668, + "step": 2032 + }, + { + "epoch": 0.19853515625, + "grad_norm": 0.28778257966041565, + "learning_rate": 0.0004624253689293682, + "loss": 4.6992, + "step": 2033 + }, + { + "epoch": 0.1986328125, + "grad_norm": 0.2705881595611572, + "learning_rate": 0.00046238660029263576, + "loss": 4.6641, + "step": 2034 + }, + { + "epoch": 0.19873046875, + "grad_norm": 0.3296143412590027, + "learning_rate": 0.0004623478134900441, + "loss": 4.6406, + "step": 2035 + }, + { + "epoch": 0.198828125, + "grad_norm": 0.3862660229206085, + "learning_rate": 0.00046230900852535354, + "loss": 4.6797, + "step": 2036 + }, + { + "epoch": 0.19892578125, + "grad_norm": 0.3979128301143646, + "learning_rate": 0.00046227018540232585, + "loss": 4.7109, + "step": 2037 + }, + { + "epoch": 0.1990234375, + "grad_norm": 0.37791329622268677, + "learning_rate": 0.00046223134412472466, + "loss": 4.6719, + "step": 2038 + }, + { + "epoch": 0.19912109375, + "grad_norm": 0.28339502215385437, + "learning_rate": 0.00046219248469631547, + "loss": 4.6992, + "step": 2039 + }, + { + "epoch": 0.19921875, + "grad_norm": 0.2708699703216553, + "learning_rate": 0.0004621536071208653, + "loss": 4.6875, + "step": 2040 + }, + { + "epoch": 0.19931640625, + "grad_norm": 0.28507518768310547, + "learning_rate": 0.00046211471140214315, + "loss": 4.6836, + "step": 2041 + }, + { + "epoch": 0.1994140625, + "grad_norm": 0.3589359521865845, + "learning_rate": 0.0004620757975439197, + "loss": 4.6875, + "step": 2042 + }, + { + "epoch": 0.19951171875, + "grad_norm": 0.4032973051071167, + "learning_rate": 0.00046203686554996734, + "loss": 4.6914, + "step": 2043 + }, + { + "epoch": 0.199609375, + "grad_norm": 0.41511282324790955, + "learning_rate": 0.0004619979154240603, + "loss": 4.6758, + "step": 2044 + }, + { + "epoch": 0.19970703125, + "grad_norm": 0.32864612340927124, + "learning_rate": 0.00046195894716997456, + "loss": 4.6562, + "step": 2045 + }, + { + "epoch": 0.1998046875, + "grad_norm": 0.32496780157089233, + "learning_rate": 0.0004619199607914877, + "loss": 4.6797, + "step": 2046 + }, + { + "epoch": 0.19990234375, + "grad_norm": 0.3588610291481018, + "learning_rate": 0.00046188095629237934, + "loss": 4.6641, + "step": 2047 + }, + { + "epoch": 0.2, + "grad_norm": 0.37276491522789, + "learning_rate": 0.00046184193367643055, + "loss": 4.6875, + "step": 2048 + }, + { + "epoch": 0.20009765625, + "grad_norm": 0.38757872581481934, + "learning_rate": 0.0004618028929474245, + "loss": 4.6875, + "step": 2049 + }, + { + "epoch": 0.2001953125, + "grad_norm": 0.3839094340801239, + "learning_rate": 0.00046176383410914576, + "loss": 4.6758, + "step": 2050 + }, + { + "epoch": 0.20029296875, + "grad_norm": 0.3270066976547241, + "learning_rate": 0.0004617247571653809, + "loss": 4.6836, + "step": 2051 + }, + { + "epoch": 0.200390625, + "grad_norm": 0.26854777336120605, + "learning_rate": 0.00046168566211991807, + "loss": 4.7109, + "step": 2052 + }, + { + "epoch": 0.20048828125, + "grad_norm": 0.35013115406036377, + "learning_rate": 0.00046164654897654745, + "loss": 4.6641, + "step": 2053 + }, + { + "epoch": 0.2005859375, + "grad_norm": 0.3897053599357605, + "learning_rate": 0.00046160741773906063, + "loss": 4.6875, + "step": 2054 + }, + { + "epoch": 0.20068359375, + "grad_norm": 0.3971879780292511, + "learning_rate": 0.00046156826841125116, + "loss": 4.7109, + "step": 2055 + }, + { + "epoch": 0.20078125, + "grad_norm": 0.3414461612701416, + "learning_rate": 0.00046152910099691425, + "loss": 4.6719, + "step": 2056 + }, + { + "epoch": 0.20087890625, + "grad_norm": 0.3158649206161499, + "learning_rate": 0.00046148991549984703, + "loss": 4.668, + "step": 2057 + }, + { + "epoch": 0.2009765625, + "grad_norm": 0.2903972268104553, + "learning_rate": 0.00046145071192384824, + "loss": 4.7227, + "step": 2058 + }, + { + "epoch": 0.20107421875, + "grad_norm": 0.27911853790283203, + "learning_rate": 0.0004614114902727183, + "loss": 4.6719, + "step": 2059 + }, + { + "epoch": 0.201171875, + "grad_norm": 0.3521988093852997, + "learning_rate": 0.0004613722505502596, + "loss": 4.6836, + "step": 2060 + }, + { + "epoch": 0.20126953125, + "grad_norm": 0.3642941415309906, + "learning_rate": 0.0004613329927602762, + "loss": 4.6602, + "step": 2061 + }, + { + "epoch": 0.2013671875, + "grad_norm": 0.322725772857666, + "learning_rate": 0.0004612937169065737, + "loss": 4.6445, + "step": 2062 + }, + { + "epoch": 0.20146484375, + "grad_norm": 0.2506091594696045, + "learning_rate": 0.0004612544229929597, + "loss": 4.6836, + "step": 2063 + }, + { + "epoch": 0.2015625, + "grad_norm": 0.29945215582847595, + "learning_rate": 0.00046121511102324356, + "loss": 4.6797, + "step": 2064 + }, + { + "epoch": 0.20166015625, + "grad_norm": 0.3536456227302551, + "learning_rate": 0.00046117578100123626, + "loss": 4.6641, + "step": 2065 + }, + { + "epoch": 0.2017578125, + "grad_norm": 0.34393638372421265, + "learning_rate": 0.0004611364329307505, + "loss": 4.6836, + "step": 2066 + }, + { + "epoch": 0.20185546875, + "grad_norm": 0.3276403248310089, + "learning_rate": 0.000461097066815601, + "loss": 4.6875, + "step": 2067 + }, + { + "epoch": 0.201953125, + "grad_norm": 0.3227459192276001, + "learning_rate": 0.00046105768265960383, + "loss": 4.7031, + "step": 2068 + }, + { + "epoch": 0.20205078125, + "grad_norm": 0.290600448846817, + "learning_rate": 0.00046101828046657704, + "loss": 4.6641, + "step": 2069 + }, + { + "epoch": 0.2021484375, + "grad_norm": 0.2874350845813751, + "learning_rate": 0.0004609788602403406, + "loss": 4.6641, + "step": 2070 + }, + { + "epoch": 0.20224609375, + "grad_norm": 0.2911100685596466, + "learning_rate": 0.0004609394219847159, + "loss": 4.6797, + "step": 2071 + }, + { + "epoch": 0.20234375, + "grad_norm": 0.26147720217704773, + "learning_rate": 0.00046089996570352617, + "loss": 4.6797, + "step": 2072 + }, + { + "epoch": 0.20244140625, + "grad_norm": 0.2521381676197052, + "learning_rate": 0.0004608604914005964, + "loss": 4.6953, + "step": 2073 + }, + { + "epoch": 0.2025390625, + "grad_norm": 0.25944972038269043, + "learning_rate": 0.0004608209990797536, + "loss": 4.6914, + "step": 2074 + }, + { + "epoch": 0.20263671875, + "grad_norm": 0.25827011466026306, + "learning_rate": 0.000460781488744826, + "loss": 4.7109, + "step": 2075 + }, + { + "epoch": 0.202734375, + "grad_norm": 0.2571582496166229, + "learning_rate": 0.00046074196039964395, + "loss": 4.6797, + "step": 2076 + }, + { + "epoch": 0.20283203125, + "grad_norm": 0.25578388571739197, + "learning_rate": 0.00046070241404803946, + "loss": 4.6562, + "step": 2077 + }, + { + "epoch": 0.2029296875, + "grad_norm": 0.2714509069919586, + "learning_rate": 0.00046066284969384635, + "loss": 4.6797, + "step": 2078 + }, + { + "epoch": 0.20302734375, + "grad_norm": 0.30416423082351685, + "learning_rate": 0.0004606232673409, + "loss": 4.6914, + "step": 2079 + }, + { + "epoch": 0.203125, + "grad_norm": 0.29807695746421814, + "learning_rate": 0.00046058366699303776, + "loss": 4.6875, + "step": 2080 + }, + { + "epoch": 0.20322265625, + "grad_norm": 0.2698603868484497, + "learning_rate": 0.00046054404865409856, + "loss": 4.6445, + "step": 2081 + }, + { + "epoch": 0.2033203125, + "grad_norm": 0.29726746678352356, + "learning_rate": 0.0004605044123279232, + "loss": 4.7031, + "step": 2082 + }, + { + "epoch": 0.20341796875, + "grad_norm": 0.30807045102119446, + "learning_rate": 0.000460464758018354, + "loss": 4.6953, + "step": 2083 + }, + { + "epoch": 0.203515625, + "grad_norm": 0.3420082926750183, + "learning_rate": 0.00046042508572923527, + "loss": 4.6797, + "step": 2084 + }, + { + "epoch": 0.20361328125, + "grad_norm": 0.4511962831020355, + "learning_rate": 0.00046038539546441296, + "loss": 4.7031, + "step": 2085 + }, + { + "epoch": 0.2037109375, + "grad_norm": 0.7175701856613159, + "learning_rate": 0.00046034568722773476, + "loss": 4.6953, + "step": 2086 + }, + { + "epoch": 0.20380859375, + "grad_norm": 0.7793725728988647, + "learning_rate": 0.0004603059610230502, + "loss": 4.6641, + "step": 2087 + }, + { + "epoch": 0.20390625, + "grad_norm": 0.4079972207546234, + "learning_rate": 0.0004602662168542103, + "loss": 4.668, + "step": 2088 + }, + { + "epoch": 0.20400390625, + "grad_norm": 0.47446468472480774, + "learning_rate": 0.00046022645472506814, + "loss": 4.6719, + "step": 2089 + }, + { + "epoch": 0.2041015625, + "grad_norm": 0.6069327592849731, + "learning_rate": 0.00046018667463947836, + "loss": 4.6484, + "step": 2090 + }, + { + "epoch": 0.20419921875, + "grad_norm": 0.37124356627464294, + "learning_rate": 0.0004601468766012973, + "loss": 4.6562, + "step": 2091 + }, + { + "epoch": 0.204296875, + "grad_norm": 0.35738274455070496, + "learning_rate": 0.0004601070606143831, + "loss": 4.668, + "step": 2092 + }, + { + "epoch": 0.20439453125, + "grad_norm": 0.35003790259361267, + "learning_rate": 0.00046006722668259575, + "loss": 4.668, + "step": 2093 + }, + { + "epoch": 0.2044921875, + "grad_norm": 0.31405922770500183, + "learning_rate": 0.00046002737480979687, + "loss": 4.707, + "step": 2094 + }, + { + "epoch": 0.20458984375, + "grad_norm": 0.33099257946014404, + "learning_rate": 0.0004599875049998497, + "loss": 4.6797, + "step": 2095 + }, + { + "epoch": 0.2046875, + "grad_norm": 0.3042897880077362, + "learning_rate": 0.00045994761725661956, + "loss": 4.6445, + "step": 2096 + }, + { + "epoch": 0.20478515625, + "grad_norm": 0.27573198080062866, + "learning_rate": 0.000459907711583973, + "loss": 4.6719, + "step": 2097 + }, + { + "epoch": 0.2048828125, + "grad_norm": 0.2779269814491272, + "learning_rate": 0.0004598677879857789, + "loss": 4.6719, + "step": 2098 + }, + { + "epoch": 0.20498046875, + "grad_norm": 0.2661430835723877, + "learning_rate": 0.00045982784646590735, + "loss": 4.6523, + "step": 2099 + }, + { + "epoch": 0.205078125, + "grad_norm": 0.25000330805778503, + "learning_rate": 0.0004597878870282306, + "loss": 4.6875, + "step": 2100 + }, + { + "epoch": 0.20517578125, + "grad_norm": 0.27923548221588135, + "learning_rate": 0.00045974790967662243, + "loss": 4.6914, + "step": 2101 + }, + { + "epoch": 0.2052734375, + "grad_norm": 0.268139123916626, + "learning_rate": 0.0004597079144149582, + "loss": 4.7148, + "step": 2102 + }, + { + "epoch": 0.20537109375, + "grad_norm": 0.23905864357948303, + "learning_rate": 0.0004596679012471153, + "loss": 4.6523, + "step": 2103 + }, + { + "epoch": 0.20546875, + "grad_norm": 0.2799278199672699, + "learning_rate": 0.0004596278701769727, + "loss": 4.707, + "step": 2104 + }, + { + "epoch": 0.20556640625, + "grad_norm": 0.3097785413265228, + "learning_rate": 0.0004595878212084112, + "loss": 4.668, + "step": 2105 + }, + { + "epoch": 0.2056640625, + "grad_norm": 0.29371100664138794, + "learning_rate": 0.0004595477543453132, + "loss": 4.6836, + "step": 2106 + }, + { + "epoch": 0.20576171875, + "grad_norm": 0.27600544691085815, + "learning_rate": 0.00045950766959156297, + "loss": 4.6914, + "step": 2107 + }, + { + "epoch": 0.205859375, + "grad_norm": 0.2540012001991272, + "learning_rate": 0.0004594675669510464, + "loss": 4.6836, + "step": 2108 + }, + { + "epoch": 0.20595703125, + "grad_norm": 0.2706865072250366, + "learning_rate": 0.00045942744642765124, + "loss": 4.6992, + "step": 2109 + }, + { + "epoch": 0.2060546875, + "grad_norm": 0.26352304220199585, + "learning_rate": 0.00045938730802526687, + "loss": 4.6992, + "step": 2110 + }, + { + "epoch": 0.20615234375, + "grad_norm": 0.25635385513305664, + "learning_rate": 0.0004593471517477844, + "loss": 4.6875, + "step": 2111 + }, + { + "epoch": 0.20625, + "grad_norm": 0.27660059928894043, + "learning_rate": 0.0004593069775990967, + "loss": 4.6836, + "step": 2112 + }, + { + "epoch": 0.20634765625, + "grad_norm": 0.26834970712661743, + "learning_rate": 0.00045926678558309847, + "loss": 4.6797, + "step": 2113 + }, + { + "epoch": 0.2064453125, + "grad_norm": 0.2626284062862396, + "learning_rate": 0.000459226575703686, + "loss": 4.6602, + "step": 2114 + }, + { + "epoch": 0.20654296875, + "grad_norm": 0.29228803515434265, + "learning_rate": 0.0004591863479647573, + "loss": 4.7031, + "step": 2115 + }, + { + "epoch": 0.206640625, + "grad_norm": 0.3252204656600952, + "learning_rate": 0.00045914610237021236, + "loss": 4.6602, + "step": 2116 + }, + { + "epoch": 0.20673828125, + "grad_norm": 0.37165123224258423, + "learning_rate": 0.00045910583892395246, + "loss": 4.6523, + "step": 2117 + }, + { + "epoch": 0.2068359375, + "grad_norm": 0.40894705057144165, + "learning_rate": 0.0004590655576298811, + "loss": 4.6445, + "step": 2118 + }, + { + "epoch": 0.20693359375, + "grad_norm": 0.40036091208457947, + "learning_rate": 0.0004590252584919031, + "loss": 4.6797, + "step": 2119 + }, + { + "epoch": 0.20703125, + "grad_norm": 0.3519115149974823, + "learning_rate": 0.00045898494151392537, + "loss": 4.6406, + "step": 2120 + }, + { + "epoch": 0.20712890625, + "grad_norm": 0.2667848467826843, + "learning_rate": 0.0004589446066998563, + "loss": 4.6484, + "step": 2121 + }, + { + "epoch": 0.2072265625, + "grad_norm": 0.2844058573246002, + "learning_rate": 0.00045890425405360595, + "loss": 4.6758, + "step": 2122 + }, + { + "epoch": 0.20732421875, + "grad_norm": 0.30774012207984924, + "learning_rate": 0.00045886388357908636, + "loss": 4.6719, + "step": 2123 + }, + { + "epoch": 0.207421875, + "grad_norm": 0.3178056478500366, + "learning_rate": 0.0004588234952802112, + "loss": 4.6523, + "step": 2124 + }, + { + "epoch": 0.20751953125, + "grad_norm": 0.3244422972202301, + "learning_rate": 0.00045878308916089567, + "loss": 4.6797, + "step": 2125 + }, + { + "epoch": 0.2076171875, + "grad_norm": 0.34837037324905396, + "learning_rate": 0.00045874266522505705, + "loss": 4.6719, + "step": 2126 + }, + { + "epoch": 0.20771484375, + "grad_norm": 0.3734824061393738, + "learning_rate": 0.0004587022234766141, + "loss": 4.668, + "step": 2127 + }, + { + "epoch": 0.2078125, + "grad_norm": 0.338059663772583, + "learning_rate": 0.0004586617639194873, + "loss": 4.668, + "step": 2128 + }, + { + "epoch": 0.20791015625, + "grad_norm": 0.29814890027046204, + "learning_rate": 0.00045862128655759914, + "loss": 4.6719, + "step": 2129 + }, + { + "epoch": 0.2080078125, + "grad_norm": 0.2901763916015625, + "learning_rate": 0.00045858079139487345, + "loss": 4.6914, + "step": 2130 + }, + { + "epoch": 0.20810546875, + "grad_norm": 0.2638110816478729, + "learning_rate": 0.0004585402784352359, + "loss": 4.6836, + "step": 2131 + }, + { + "epoch": 0.208203125, + "grad_norm": 0.3158838748931885, + "learning_rate": 0.00045849974768261413, + "loss": 4.6758, + "step": 2132 + }, + { + "epoch": 0.20830078125, + "grad_norm": 0.3720110058784485, + "learning_rate": 0.0004584591991409373, + "loss": 4.6602, + "step": 2133 + }, + { + "epoch": 0.2083984375, + "grad_norm": 0.3769083023071289, + "learning_rate": 0.00045841863281413615, + "loss": 4.6367, + "step": 2134 + }, + { + "epoch": 0.20849609375, + "grad_norm": 0.3428402543067932, + "learning_rate": 0.0004583780487061435, + "loss": 4.6758, + "step": 2135 + }, + { + "epoch": 0.20859375, + "grad_norm": 0.2708068788051605, + "learning_rate": 0.0004583374468208935, + "loss": 4.6719, + "step": 2136 + }, + { + "epoch": 0.20869140625, + "grad_norm": 0.278901606798172, + "learning_rate": 0.00045829682716232254, + "loss": 4.6367, + "step": 2137 + }, + { + "epoch": 0.2087890625, + "grad_norm": 0.3805471956729889, + "learning_rate": 0.00045825618973436807, + "loss": 4.6836, + "step": 2138 + }, + { + "epoch": 0.20888671875, + "grad_norm": 0.39566126465797424, + "learning_rate": 0.00045821553454096975, + "loss": 4.6836, + "step": 2139 + }, + { + "epoch": 0.208984375, + "grad_norm": 0.3299342095851898, + "learning_rate": 0.0004581748615860689, + "loss": 4.668, + "step": 2140 + }, + { + "epoch": 0.20908203125, + "grad_norm": 0.34395065903663635, + "learning_rate": 0.00045813417087360846, + "loss": 4.6445, + "step": 2141 + }, + { + "epoch": 0.2091796875, + "grad_norm": 0.30713319778442383, + "learning_rate": 0.0004580934624075331, + "loss": 4.6719, + "step": 2142 + }, + { + "epoch": 0.20927734375, + "grad_norm": 0.360476016998291, + "learning_rate": 0.0004580527361917891, + "loss": 4.6797, + "step": 2143 + }, + { + "epoch": 0.209375, + "grad_norm": 0.35653045773506165, + "learning_rate": 0.0004580119922303248, + "loss": 4.6523, + "step": 2144 + }, + { + "epoch": 0.20947265625, + "grad_norm": 0.3684094250202179, + "learning_rate": 0.0004579712305270899, + "loss": 4.6992, + "step": 2145 + }, + { + "epoch": 0.2095703125, + "grad_norm": 0.3084180951118469, + "learning_rate": 0.0004579304510860361, + "loss": 4.6836, + "step": 2146 + }, + { + "epoch": 0.20966796875, + "grad_norm": 0.2643337845802307, + "learning_rate": 0.0004578896539111166, + "loss": 4.668, + "step": 2147 + }, + { + "epoch": 0.209765625, + "grad_norm": 0.29848313331604004, + "learning_rate": 0.00045784883900628644, + "loss": 4.6562, + "step": 2148 + }, + { + "epoch": 0.20986328125, + "grad_norm": 0.3210330903530121, + "learning_rate": 0.0004578080063755023, + "loss": 4.7031, + "step": 2149 + }, + { + "epoch": 0.2099609375, + "grad_norm": 0.5079271197319031, + "learning_rate": 0.00045776715602272267, + "loss": 4.668, + "step": 2150 + }, + { + "epoch": 0.21005859375, + "grad_norm": 0.5599049925804138, + "learning_rate": 0.00045772628795190775, + "loss": 4.6953, + "step": 2151 + }, + { + "epoch": 0.21015625, + "grad_norm": 0.3838483989238739, + "learning_rate": 0.00045768540216701935, + "loss": 4.6836, + "step": 2152 + }, + { + "epoch": 0.21025390625, + "grad_norm": 0.3279328942298889, + "learning_rate": 0.00045764449867202105, + "loss": 4.6875, + "step": 2153 + }, + { + "epoch": 0.2103515625, + "grad_norm": 0.3579613268375397, + "learning_rate": 0.00045760357747087836, + "loss": 4.6523, + "step": 2154 + }, + { + "epoch": 0.21044921875, + "grad_norm": 0.4003296196460724, + "learning_rate": 0.000457562638567558, + "loss": 4.6875, + "step": 2155 + }, + { + "epoch": 0.210546875, + "grad_norm": 0.36784183979034424, + "learning_rate": 0.000457521681966029, + "loss": 4.6758, + "step": 2156 + }, + { + "epoch": 0.21064453125, + "grad_norm": 0.30184993147850037, + "learning_rate": 0.00045748070767026166, + "loss": 4.668, + "step": 2157 + }, + { + "epoch": 0.2107421875, + "grad_norm": 0.28590184450149536, + "learning_rate": 0.00045743971568422827, + "loss": 4.6914, + "step": 2158 + }, + { + "epoch": 0.21083984375, + "grad_norm": 0.34186697006225586, + "learning_rate": 0.0004573987060119026, + "loss": 4.6836, + "step": 2159 + }, + { + "epoch": 0.2109375, + "grad_norm": 0.5154711604118347, + "learning_rate": 0.0004573576786572603, + "loss": 4.6992, + "step": 2160 + }, + { + "epoch": 0.21103515625, + "grad_norm": 1.0158944129943848, + "learning_rate": 0.0004573166336242788, + "loss": 4.6758, + "step": 2161 + }, + { + "epoch": 0.2111328125, + "grad_norm": 1.0703473091125488, + "learning_rate": 0.000457275570916937, + "loss": 4.6953, + "step": 2162 + }, + { + "epoch": 0.21123046875, + "grad_norm": 0.4153207242488861, + "learning_rate": 0.0004572344905392158, + "loss": 4.6758, + "step": 2163 + }, + { + "epoch": 0.211328125, + "grad_norm": 0.5086596608161926, + "learning_rate": 0.00045719339249509746, + "loss": 4.6836, + "step": 2164 + }, + { + "epoch": 0.21142578125, + "grad_norm": 0.43769127130508423, + "learning_rate": 0.0004571522767885663, + "loss": 4.6836, + "step": 2165 + }, + { + "epoch": 0.2115234375, + "grad_norm": 0.45974478125572205, + "learning_rate": 0.00045711114342360823, + "loss": 4.6562, + "step": 2166 + }, + { + "epoch": 0.21162109375, + "grad_norm": 0.4274415969848633, + "learning_rate": 0.0004570699924042108, + "loss": 4.6602, + "step": 2167 + }, + { + "epoch": 0.21171875, + "grad_norm": 2.324859857559204, + "learning_rate": 0.00045702882373436317, + "loss": 4.668, + "step": 2168 + }, + { + "epoch": 0.21181640625, + "grad_norm": 1.2677239179611206, + "learning_rate": 0.00045698763741805666, + "loss": 4.6953, + "step": 2169 + }, + { + "epoch": 0.2119140625, + "grad_norm": 0.7512508630752563, + "learning_rate": 0.0004569464334592838, + "loss": 4.6953, + "step": 2170 + }, + { + "epoch": 0.21201171875, + "grad_norm": 0.6013326048851013, + "learning_rate": 0.0004569052118620391, + "loss": 4.6602, + "step": 2171 + }, + { + "epoch": 0.212109375, + "grad_norm": 0.4670730531215668, + "learning_rate": 0.00045686397263031863, + "loss": 4.6719, + "step": 2172 + }, + { + "epoch": 0.21220703125, + "grad_norm": 0.682818591594696, + "learning_rate": 0.0004568227157681205, + "loss": 4.6875, + "step": 2173 + }, + { + "epoch": 0.2123046875, + "grad_norm": 0.7088087201118469, + "learning_rate": 0.000456781441279444, + "loss": 4.6602, + "step": 2174 + }, + { + "epoch": 0.21240234375, + "grad_norm": 0.42867234349250793, + "learning_rate": 0.0004567401491682905, + "loss": 4.668, + "step": 2175 + }, + { + "epoch": 0.2125, + "grad_norm": 0.36468663811683655, + "learning_rate": 0.00045669883943866307, + "loss": 4.6523, + "step": 2176 + }, + { + "epoch": 0.21259765625, + "grad_norm": 0.28905150294303894, + "learning_rate": 0.0004566575120945663, + "loss": 4.6758, + "step": 2177 + }, + { + "epoch": 0.2126953125, + "grad_norm": 0.3054339289665222, + "learning_rate": 0.0004566161671400067, + "loss": 4.6484, + "step": 2178 + }, + { + "epoch": 0.21279296875, + "grad_norm": 0.2752300798892975, + "learning_rate": 0.0004565748045789923, + "loss": 4.6875, + "step": 2179 + }, + { + "epoch": 0.212890625, + "grad_norm": 0.28184881806373596, + "learning_rate": 0.0004565334244155329, + "loss": 4.6602, + "step": 2180 + }, + { + "epoch": 0.21298828125, + "grad_norm": 0.35365766286849976, + "learning_rate": 0.00045649202665364014, + "loss": 4.6484, + "step": 2181 + }, + { + "epoch": 0.2130859375, + "grad_norm": 0.41137388348579407, + "learning_rate": 0.0004564506112973272, + "loss": 4.6875, + "step": 2182 + }, + { + "epoch": 0.21318359375, + "grad_norm": 0.3376198709011078, + "learning_rate": 0.000456409178350609, + "loss": 4.6523, + "step": 2183 + }, + { + "epoch": 0.21328125, + "grad_norm": 0.28121715784072876, + "learning_rate": 0.0004563677278175021, + "loss": 4.6992, + "step": 2184 + }, + { + "epoch": 0.21337890625, + "grad_norm": 0.2534632086753845, + "learning_rate": 0.0004563262597020251, + "loss": 4.6445, + "step": 2185 + }, + { + "epoch": 0.2134765625, + "grad_norm": 0.24922828376293182, + "learning_rate": 0.00045628477400819776, + "loss": 4.6875, + "step": 2186 + }, + { + "epoch": 0.21357421875, + "grad_norm": 0.32425588369369507, + "learning_rate": 0.0004562432707400419, + "loss": 4.6602, + "step": 2187 + }, + { + "epoch": 0.213671875, + "grad_norm": 0.3913766145706177, + "learning_rate": 0.0004562017499015812, + "loss": 4.668, + "step": 2188 + }, + { + "epoch": 0.21376953125, + "grad_norm": 0.37380436062812805, + "learning_rate": 0.00045616021149684055, + "loss": 4.668, + "step": 2189 + }, + { + "epoch": 0.2138671875, + "grad_norm": 0.4003354609012604, + "learning_rate": 0.00045611865552984697, + "loss": 4.6602, + "step": 2190 + }, + { + "epoch": 0.21396484375, + "grad_norm": 0.42664554715156555, + "learning_rate": 0.000456077082004629, + "loss": 4.668, + "step": 2191 + }, + { + "epoch": 0.2140625, + "grad_norm": 0.4501902461051941, + "learning_rate": 0.00045603549092521696, + "loss": 4.668, + "step": 2192 + }, + { + "epoch": 0.21416015625, + "grad_norm": 0.33647021651268005, + "learning_rate": 0.0004559938822956427, + "loss": 4.6367, + "step": 2193 + }, + { + "epoch": 0.2142578125, + "grad_norm": 0.2943326234817505, + "learning_rate": 0.00045595225611993995, + "loss": 4.6719, + "step": 2194 + }, + { + "epoch": 0.21435546875, + "grad_norm": 0.2942591607570648, + "learning_rate": 0.00045591061240214415, + "loss": 4.6289, + "step": 2195 + }, + { + "epoch": 0.214453125, + "grad_norm": 0.2917780578136444, + "learning_rate": 0.00045586895114629227, + "loss": 4.6953, + "step": 2196 + }, + { + "epoch": 0.21455078125, + "grad_norm": 0.31660106778144836, + "learning_rate": 0.0004558272723564231, + "loss": 4.6445, + "step": 2197 + }, + { + "epoch": 0.2146484375, + "grad_norm": 0.3451404571533203, + "learning_rate": 0.00045578557603657727, + "loss": 4.668, + "step": 2198 + }, + { + "epoch": 0.21474609375, + "grad_norm": 0.3630112409591675, + "learning_rate": 0.0004557438621907968, + "loss": 4.6719, + "step": 2199 + }, + { + "epoch": 0.21484375, + "grad_norm": 0.32094573974609375, + "learning_rate": 0.0004557021308231256, + "loss": 4.668, + "step": 2200 + }, + { + "epoch": 0.21494140625, + "grad_norm": 0.2666279971599579, + "learning_rate": 0.0004556603819376092, + "loss": 4.6602, + "step": 2201 + }, + { + "epoch": 0.2150390625, + "grad_norm": 0.2945239841938019, + "learning_rate": 0.000455618615538295, + "loss": 4.6758, + "step": 2202 + }, + { + "epoch": 0.21513671875, + "grad_norm": 0.3576738238334656, + "learning_rate": 0.00045557683162923175, + "loss": 4.6719, + "step": 2203 + }, + { + "epoch": 0.215234375, + "grad_norm": 0.40367263555526733, + "learning_rate": 0.0004555350302144703, + "loss": 4.7031, + "step": 2204 + }, + { + "epoch": 0.21533203125, + "grad_norm": 0.4846717417240143, + "learning_rate": 0.00045549321129806304, + "loss": 4.6719, + "step": 2205 + }, + { + "epoch": 0.2154296875, + "grad_norm": 0.5076466202735901, + "learning_rate": 0.0004554513748840639, + "loss": 4.6914, + "step": 2206 + }, + { + "epoch": 0.21552734375, + "grad_norm": 0.48110252618789673, + "learning_rate": 0.0004554095209765288, + "loss": 4.6758, + "step": 2207 + }, + { + "epoch": 0.215625, + "grad_norm": 0.39325347542762756, + "learning_rate": 0.00045536764957951494, + "loss": 4.6953, + "step": 2208 + }, + { + "epoch": 0.21572265625, + "grad_norm": 0.3244827091693878, + "learning_rate": 0.00045532576069708163, + "loss": 4.6328, + "step": 2209 + }, + { + "epoch": 0.2158203125, + "grad_norm": 0.3849143087863922, + "learning_rate": 0.0004552838543332897, + "loss": 4.6445, + "step": 2210 + }, + { + "epoch": 0.21591796875, + "grad_norm": 0.3335340917110443, + "learning_rate": 0.00045524193049220174, + "loss": 4.6484, + "step": 2211 + }, + { + "epoch": 0.216015625, + "grad_norm": 0.32355350255966187, + "learning_rate": 0.0004551999891778819, + "loss": 4.6562, + "step": 2212 + }, + { + "epoch": 0.21611328125, + "grad_norm": 0.282289981842041, + "learning_rate": 0.00045515803039439614, + "loss": 4.6797, + "step": 2213 + }, + { + "epoch": 0.2162109375, + "grad_norm": 0.35372570157051086, + "learning_rate": 0.0004551160541458121, + "loss": 4.6992, + "step": 2214 + }, + { + "epoch": 0.21630859375, + "grad_norm": 0.31764501333236694, + "learning_rate": 0.000455074060436199, + "loss": 4.6602, + "step": 2215 + }, + { + "epoch": 0.21640625, + "grad_norm": 0.2709023356437683, + "learning_rate": 0.0004550320492696279, + "loss": 4.6523, + "step": 2216 + }, + { + "epoch": 0.21650390625, + "grad_norm": 0.2475263476371765, + "learning_rate": 0.0004549900206501716, + "loss": 4.6484, + "step": 2217 + }, + { + "epoch": 0.2166015625, + "grad_norm": 0.2568281590938568, + "learning_rate": 0.00045494797458190425, + "loss": 4.6562, + "step": 2218 + }, + { + "epoch": 0.21669921875, + "grad_norm": 0.2753889262676239, + "learning_rate": 0.00045490591106890215, + "loss": 4.6484, + "step": 2219 + }, + { + "epoch": 0.216796875, + "grad_norm": 0.24511894583702087, + "learning_rate": 0.000454863830115243, + "loss": 4.6484, + "step": 2220 + }, + { + "epoch": 0.21689453125, + "grad_norm": 0.24880538880825043, + "learning_rate": 0.00045482173172500627, + "loss": 4.6367, + "step": 2221 + }, + { + "epoch": 0.2169921875, + "grad_norm": 0.3005264103412628, + "learning_rate": 0.00045477961590227313, + "loss": 4.6797, + "step": 2222 + }, + { + "epoch": 0.21708984375, + "grad_norm": 0.36041325330734253, + "learning_rate": 0.0004547374826511263, + "loss": 4.7109, + "step": 2223 + }, + { + "epoch": 0.2171875, + "grad_norm": 0.39673399925231934, + "learning_rate": 0.00045469533197565044, + "loss": 4.6953, + "step": 2224 + }, + { + "epoch": 0.21728515625, + "grad_norm": 0.41942891478538513, + "learning_rate": 0.00045465316387993177, + "loss": 4.6836, + "step": 2225 + }, + { + "epoch": 0.2173828125, + "grad_norm": 0.3348380923271179, + "learning_rate": 0.0004546109783680582, + "loss": 4.668, + "step": 2226 + }, + { + "epoch": 0.21748046875, + "grad_norm": 0.30513080954551697, + "learning_rate": 0.0004545687754441192, + "loss": 4.6797, + "step": 2227 + }, + { + "epoch": 0.217578125, + "grad_norm": 0.27896392345428467, + "learning_rate": 0.0004545265551122063, + "loss": 4.6602, + "step": 2228 + }, + { + "epoch": 0.21767578125, + "grad_norm": 0.3201639950275421, + "learning_rate": 0.0004544843173764122, + "loss": 4.6719, + "step": 2229 + }, + { + "epoch": 0.2177734375, + "grad_norm": 0.40271514654159546, + "learning_rate": 0.0004544420622408318, + "loss": 4.6445, + "step": 2230 + }, + { + "epoch": 0.21787109375, + "grad_norm": 0.5087426900863647, + "learning_rate": 0.0004543997897095613, + "loss": 4.6445, + "step": 2231 + }, + { + "epoch": 0.21796875, + "grad_norm": 0.5589426755905151, + "learning_rate": 0.0004543574997866987, + "loss": 4.6992, + "step": 2232 + }, + { + "epoch": 0.21806640625, + "grad_norm": 0.4211796522140503, + "learning_rate": 0.0004543151924763439, + "loss": 4.6562, + "step": 2233 + }, + { + "epoch": 0.2181640625, + "grad_norm": 0.2638166844844818, + "learning_rate": 0.0004542728677825982, + "loss": 4.668, + "step": 2234 + }, + { + "epoch": 0.21826171875, + "grad_norm": 0.417805939912796, + "learning_rate": 0.00045423052570956466, + "loss": 4.6562, + "step": 2235 + }, + { + "epoch": 0.218359375, + "grad_norm": 0.46362581849098206, + "learning_rate": 0.00045418816626134807, + "loss": 4.6914, + "step": 2236 + }, + { + "epoch": 0.21845703125, + "grad_norm": 0.2860087454319, + "learning_rate": 0.000454145789442055, + "loss": 4.6602, + "step": 2237 + }, + { + "epoch": 0.2185546875, + "grad_norm": 0.28476911783218384, + "learning_rate": 0.00045410339525579334, + "loss": 4.6328, + "step": 2238 + }, + { + "epoch": 0.21865234375, + "grad_norm": 0.32708337903022766, + "learning_rate": 0.0004540609837066733, + "loss": 4.6836, + "step": 2239 + }, + { + "epoch": 0.21875, + "grad_norm": 0.3355824649333954, + "learning_rate": 0.00045401855479880606, + "loss": 4.6602, + "step": 2240 + }, + { + "epoch": 0.21884765625, + "grad_norm": 0.24798880517482758, + "learning_rate": 0.000453976108536305, + "loss": 4.6836, + "step": 2241 + }, + { + "epoch": 0.2189453125, + "grad_norm": 0.2865526080131531, + "learning_rate": 0.00045393364492328487, + "loss": 4.668, + "step": 2242 + }, + { + "epoch": 0.21904296875, + "grad_norm": 0.2711332142353058, + "learning_rate": 0.0004538911639638623, + "loss": 4.6602, + "step": 2243 + }, + { + "epoch": 0.219140625, + "grad_norm": 0.24825860559940338, + "learning_rate": 0.0004538486656621556, + "loss": 4.6836, + "step": 2244 + }, + { + "epoch": 0.21923828125, + "grad_norm": 0.2643031179904938, + "learning_rate": 0.0004538061500222845, + "loss": 4.6758, + "step": 2245 + }, + { + "epoch": 0.2193359375, + "grad_norm": 0.2740848958492279, + "learning_rate": 0.00045376361704837077, + "loss": 4.6562, + "step": 2246 + }, + { + "epoch": 0.21943359375, + "grad_norm": 0.266102135181427, + "learning_rate": 0.0004537210667445376, + "loss": 4.6445, + "step": 2247 + }, + { + "epoch": 0.21953125, + "grad_norm": 0.33453354239463806, + "learning_rate": 0.00045367849911491005, + "loss": 4.6914, + "step": 2248 + }, + { + "epoch": 0.21962890625, + "grad_norm": 0.3667253851890564, + "learning_rate": 0.00045363591416361474, + "loss": 4.6484, + "step": 2249 + }, + { + "epoch": 0.2197265625, + "grad_norm": 0.36363911628723145, + "learning_rate": 0.0004535933118947799, + "loss": 4.6719, + "step": 2250 + }, + { + "epoch": 0.21982421875, + "grad_norm": 0.33891692757606506, + "learning_rate": 0.0004535506923125355, + "loss": 4.6758, + "step": 2251 + }, + { + "epoch": 0.219921875, + "grad_norm": 0.2655598521232605, + "learning_rate": 0.00045350805542101346, + "loss": 4.668, + "step": 2252 + }, + { + "epoch": 0.22001953125, + "grad_norm": 0.2713790535926819, + "learning_rate": 0.0004534654012243469, + "loss": 4.6836, + "step": 2253 + }, + { + "epoch": 0.2201171875, + "grad_norm": 0.3348010182380676, + "learning_rate": 0.000453422729726671, + "loss": 4.6133, + "step": 2254 + }, + { + "epoch": 0.22021484375, + "grad_norm": 0.31185048818588257, + "learning_rate": 0.0004533800409321223, + "loss": 4.6562, + "step": 2255 + }, + { + "epoch": 0.2203125, + "grad_norm": 0.25817927718162537, + "learning_rate": 0.0004533373348448394, + "loss": 4.6406, + "step": 2256 + }, + { + "epoch": 0.22041015625, + "grad_norm": 0.32583194971084595, + "learning_rate": 0.00045329461146896224, + "loss": 4.668, + "step": 2257 + }, + { + "epoch": 0.2205078125, + "grad_norm": 0.3231263756752014, + "learning_rate": 0.0004532518708086326, + "loss": 4.6953, + "step": 2258 + }, + { + "epoch": 0.22060546875, + "grad_norm": 0.29835695028305054, + "learning_rate": 0.00045320911286799386, + "loss": 4.668, + "step": 2259 + }, + { + "epoch": 0.220703125, + "grad_norm": 0.38220587372779846, + "learning_rate": 0.00045316633765119115, + "loss": 4.6875, + "step": 2260 + }, + { + "epoch": 0.22080078125, + "grad_norm": 0.4456774592399597, + "learning_rate": 0.0004531235451623712, + "loss": 4.6367, + "step": 2261 + }, + { + "epoch": 0.2208984375, + "grad_norm": 0.5475150346755981, + "learning_rate": 0.0004530807354056825, + "loss": 4.6641, + "step": 2262 + }, + { + "epoch": 0.22099609375, + "grad_norm": 0.5256811380386353, + "learning_rate": 0.0004530379083852752, + "loss": 4.6758, + "step": 2263 + }, + { + "epoch": 0.22109375, + "grad_norm": 0.3401896357536316, + "learning_rate": 0.0004529950641053009, + "loss": 4.6328, + "step": 2264 + }, + { + "epoch": 0.22119140625, + "grad_norm": 0.31122711300849915, + "learning_rate": 0.00045295220256991327, + "loss": 4.6758, + "step": 2265 + }, + { + "epoch": 0.2212890625, + "grad_norm": 0.41514527797698975, + "learning_rate": 0.0004529093237832674, + "loss": 4.6484, + "step": 2266 + }, + { + "epoch": 0.22138671875, + "grad_norm": 0.29372674226760864, + "learning_rate": 0.00045286642774951995, + "loss": 4.6445, + "step": 2267 + }, + { + "epoch": 0.221484375, + "grad_norm": 0.29551562666893005, + "learning_rate": 0.00045282351447282967, + "loss": 4.6641, + "step": 2268 + }, + { + "epoch": 0.22158203125, + "grad_norm": 0.34244853258132935, + "learning_rate": 0.0004527805839573564, + "loss": 4.6758, + "step": 2269 + }, + { + "epoch": 0.2216796875, + "grad_norm": 0.3226219415664673, + "learning_rate": 0.0004527376362072622, + "loss": 4.6758, + "step": 2270 + }, + { + "epoch": 0.22177734375, + "grad_norm": 0.30032727122306824, + "learning_rate": 0.00045269467122671046, + "loss": 4.6836, + "step": 2271 + }, + { + "epoch": 0.221875, + "grad_norm": 0.2800855338573456, + "learning_rate": 0.0004526516890198663, + "loss": 4.6641, + "step": 2272 + }, + { + "epoch": 0.22197265625, + "grad_norm": 0.31681379675865173, + "learning_rate": 0.00045260868959089666, + "loss": 4.6641, + "step": 2273 + }, + { + "epoch": 0.2220703125, + "grad_norm": 0.331043004989624, + "learning_rate": 0.00045256567294397007, + "loss": 4.6055, + "step": 2274 + }, + { + "epoch": 0.22216796875, + "grad_norm": 0.28322121500968933, + "learning_rate": 0.00045252263908325655, + "loss": 4.6133, + "step": 2275 + }, + { + "epoch": 0.222265625, + "grad_norm": 0.2832389175891876, + "learning_rate": 0.00045247958801292805, + "loss": 4.6641, + "step": 2276 + }, + { + "epoch": 0.22236328125, + "grad_norm": 0.2613706886768341, + "learning_rate": 0.000452436519737158, + "loss": 4.6289, + "step": 2277 + }, + { + "epoch": 0.2224609375, + "grad_norm": 0.28807371854782104, + "learning_rate": 0.0004523934342601218, + "loss": 4.6484, + "step": 2278 + }, + { + "epoch": 0.22255859375, + "grad_norm": 0.3016647398471832, + "learning_rate": 0.0004523503315859959, + "loss": 4.6328, + "step": 2279 + }, + { + "epoch": 0.22265625, + "grad_norm": 0.30947428941726685, + "learning_rate": 0.0004523072117189591, + "loss": 4.6836, + "step": 2280 + }, + { + "epoch": 0.22275390625, + "grad_norm": 0.2584306597709656, + "learning_rate": 0.0004522640746631916, + "loss": 4.6562, + "step": 2281 + }, + { + "epoch": 0.2228515625, + "grad_norm": 0.2622096836566925, + "learning_rate": 0.00045222092042287505, + "loss": 4.6719, + "step": 2282 + }, + { + "epoch": 0.22294921875, + "grad_norm": 0.2631734013557434, + "learning_rate": 0.00045217774900219306, + "loss": 4.625, + "step": 2283 + }, + { + "epoch": 0.223046875, + "grad_norm": 0.26475849747657776, + "learning_rate": 0.0004521345604053309, + "loss": 4.6602, + "step": 2284 + }, + { + "epoch": 0.22314453125, + "grad_norm": 0.27246424555778503, + "learning_rate": 0.00045209135463647525, + "loss": 4.6523, + "step": 2285 + }, + { + "epoch": 0.2232421875, + "grad_norm": 0.26996609568595886, + "learning_rate": 0.00045204813169981477, + "loss": 4.6562, + "step": 2286 + }, + { + "epoch": 0.22333984375, + "grad_norm": 0.25562581419944763, + "learning_rate": 0.0004520048915995395, + "loss": 4.6602, + "step": 2287 + }, + { + "epoch": 0.2234375, + "grad_norm": 0.26432788372039795, + "learning_rate": 0.00045196163433984125, + "loss": 4.6719, + "step": 2288 + }, + { + "epoch": 0.22353515625, + "grad_norm": 0.2868981957435608, + "learning_rate": 0.00045191835992491376, + "loss": 4.6484, + "step": 2289 + }, + { + "epoch": 0.2236328125, + "grad_norm": 0.27830618619918823, + "learning_rate": 0.0004518750683589519, + "loss": 4.6602, + "step": 2290 + }, + { + "epoch": 0.22373046875, + "grad_norm": 0.28667259216308594, + "learning_rate": 0.0004518317596461527, + "loss": 4.668, + "step": 2291 + }, + { + "epoch": 0.223828125, + "grad_norm": 0.29964327812194824, + "learning_rate": 0.00045178843379071445, + "loss": 4.6602, + "step": 2292 + }, + { + "epoch": 0.22392578125, + "grad_norm": 0.3300611078739166, + "learning_rate": 0.00045174509079683753, + "loss": 4.6875, + "step": 2293 + }, + { + "epoch": 0.2240234375, + "grad_norm": 0.3876355290412903, + "learning_rate": 0.00045170173066872354, + "loss": 4.6406, + "step": 2294 + }, + { + "epoch": 0.22412109375, + "grad_norm": 0.50432288646698, + "learning_rate": 0.0004516583534105761, + "loss": 4.6562, + "step": 2295 + }, + { + "epoch": 0.22421875, + "grad_norm": 0.5751743316650391, + "learning_rate": 0.00045161495902660035, + "loss": 4.668, + "step": 2296 + }, + { + "epoch": 0.22431640625, + "grad_norm": 0.46651872992515564, + "learning_rate": 0.0004515715475210028, + "loss": 4.6484, + "step": 2297 + }, + { + "epoch": 0.2244140625, + "grad_norm": 0.29591864347457886, + "learning_rate": 0.0004515281188979923, + "loss": 4.6758, + "step": 2298 + }, + { + "epoch": 0.22451171875, + "grad_norm": 0.4172205924987793, + "learning_rate": 0.00045148467316177864, + "loss": 4.668, + "step": 2299 + }, + { + "epoch": 0.224609375, + "grad_norm": 0.4134126901626587, + "learning_rate": 0.0004514412103165738, + "loss": 4.6406, + "step": 2300 + }, + { + "epoch": 0.22470703125, + "grad_norm": 0.28592267632484436, + "learning_rate": 0.00045139773036659113, + "loss": 4.6641, + "step": 2301 + }, + { + "epoch": 0.2248046875, + "grad_norm": 0.3129323422908783, + "learning_rate": 0.00045135423331604574, + "loss": 4.6523, + "step": 2302 + }, + { + "epoch": 0.22490234375, + "grad_norm": 0.36891794204711914, + "learning_rate": 0.00045131071916915426, + "loss": 4.6367, + "step": 2303 + }, + { + "epoch": 0.225, + "grad_norm": 0.28493818640708923, + "learning_rate": 0.00045126718793013525, + "loss": 4.6641, + "step": 2304 + }, + { + "epoch": 0.22509765625, + "grad_norm": 0.2816375195980072, + "learning_rate": 0.0004512236396032087, + "loss": 4.6875, + "step": 2305 + }, + { + "epoch": 0.2251953125, + "grad_norm": 0.2998899519443512, + "learning_rate": 0.00045118007419259627, + "loss": 4.6797, + "step": 2306 + }, + { + "epoch": 0.22529296875, + "grad_norm": 0.27575162053108215, + "learning_rate": 0.0004511364917025214, + "loss": 4.6641, + "step": 2307 + }, + { + "epoch": 0.225390625, + "grad_norm": 0.24693450331687927, + "learning_rate": 0.00045109289213720916, + "loss": 4.6719, + "step": 2308 + }, + { + "epoch": 0.22548828125, + "grad_norm": 0.29853010177612305, + "learning_rate": 0.0004510492755008861, + "loss": 4.6406, + "step": 2309 + }, + { + "epoch": 0.2255859375, + "grad_norm": 0.2853658199310303, + "learning_rate": 0.0004510056417977807, + "loss": 4.6719, + "step": 2310 + }, + { + "epoch": 0.22568359375, + "grad_norm": 0.28219833970069885, + "learning_rate": 0.0004509619910321229, + "loss": 4.668, + "step": 2311 + }, + { + "epoch": 0.22578125, + "grad_norm": 0.3548610508441925, + "learning_rate": 0.00045091832320814424, + "loss": 4.6719, + "step": 2312 + }, + { + "epoch": 0.22587890625, + "grad_norm": 0.31323540210723877, + "learning_rate": 0.0004508746383300781, + "loss": 4.6445, + "step": 2313 + }, + { + "epoch": 0.2259765625, + "grad_norm": 0.29136282205581665, + "learning_rate": 0.0004508309364021595, + "loss": 4.6797, + "step": 2314 + }, + { + "epoch": 0.22607421875, + "grad_norm": 0.2782500684261322, + "learning_rate": 0.000450787217428625, + "loss": 4.668, + "step": 2315 + }, + { + "epoch": 0.226171875, + "grad_norm": 0.26324713230133057, + "learning_rate": 0.0004507434814137128, + "loss": 4.6445, + "step": 2316 + }, + { + "epoch": 0.22626953125, + "grad_norm": 0.2620137631893158, + "learning_rate": 0.00045069972836166286, + "loss": 4.6875, + "step": 2317 + }, + { + "epoch": 0.2263671875, + "grad_norm": 0.26914480328559875, + "learning_rate": 0.0004506559582767167, + "loss": 4.6406, + "step": 2318 + }, + { + "epoch": 0.22646484375, + "grad_norm": 0.25045204162597656, + "learning_rate": 0.00045061217116311767, + "loss": 4.6719, + "step": 2319 + }, + { + "epoch": 0.2265625, + "grad_norm": 1.1518856287002563, + "learning_rate": 0.0004505683670251104, + "loss": 4.6797, + "step": 2320 + }, + { + "epoch": 0.22666015625, + "grad_norm": 0.35632452368736267, + "learning_rate": 0.00045052454586694165, + "loss": 4.6406, + "step": 2321 + }, + { + "epoch": 0.2267578125, + "grad_norm": 0.35041317343711853, + "learning_rate": 0.00045048070769285935, + "loss": 4.6758, + "step": 2322 + }, + { + "epoch": 0.22685546875, + "grad_norm": 0.3052085041999817, + "learning_rate": 0.0004504368525071135, + "loss": 4.6484, + "step": 2323 + }, + { + "epoch": 0.226953125, + "grad_norm": 0.41141387820243835, + "learning_rate": 0.0004503929803139555, + "loss": 4.6289, + "step": 2324 + }, + { + "epoch": 0.22705078125, + "grad_norm": 0.4647180438041687, + "learning_rate": 0.0004503490911176384, + "loss": 4.6836, + "step": 2325 + }, + { + "epoch": 0.2271484375, + "grad_norm": 0.5217369794845581, + "learning_rate": 0.0004503051849224171, + "loss": 4.6523, + "step": 2326 + }, + { + "epoch": 0.22724609375, + "grad_norm": 0.5150980353355408, + "learning_rate": 0.00045026126173254783, + "loss": 4.6367, + "step": 2327 + }, + { + "epoch": 0.22734375, + "grad_norm": 0.44847163558006287, + "learning_rate": 0.0004502173215522888, + "loss": 4.6602, + "step": 2328 + }, + { + "epoch": 0.22744140625, + "grad_norm": 0.5755159854888916, + "learning_rate": 0.00045017336438589955, + "loss": 4.6406, + "step": 2329 + }, + { + "epoch": 0.2275390625, + "grad_norm": 0.6863377094268799, + "learning_rate": 0.00045012939023764164, + "loss": 4.6758, + "step": 2330 + }, + { + "epoch": 0.22763671875, + "grad_norm": 0.5246580839157104, + "learning_rate": 0.000450085399111778, + "loss": 4.6641, + "step": 2331 + }, + { + "epoch": 0.227734375, + "grad_norm": 0.3387252390384674, + "learning_rate": 0.00045004139101257305, + "loss": 4.6445, + "step": 2332 + }, + { + "epoch": 0.22783203125, + "grad_norm": 0.3342747390270233, + "learning_rate": 0.00044999736594429336, + "loss": 4.6484, + "step": 2333 + }, + { + "epoch": 0.2279296875, + "grad_norm": 0.29663100838661194, + "learning_rate": 0.00044995332391120673, + "loss": 4.6641, + "step": 2334 + }, + { + "epoch": 0.22802734375, + "grad_norm": 0.2716478109359741, + "learning_rate": 0.0004499092649175828, + "loss": 4.6602, + "step": 2335 + }, + { + "epoch": 0.228125, + "grad_norm": 0.30545899271965027, + "learning_rate": 0.0004498651889676927, + "loss": 4.6602, + "step": 2336 + }, + { + "epoch": 0.22822265625, + "grad_norm": 0.382825642824173, + "learning_rate": 0.0004498210960658093, + "loss": 4.6523, + "step": 2337 + }, + { + "epoch": 0.2283203125, + "grad_norm": 0.30765971541404724, + "learning_rate": 0.0004497769862162072, + "loss": 4.6367, + "step": 2338 + }, + { + "epoch": 0.22841796875, + "grad_norm": 0.2779020369052887, + "learning_rate": 0.00044973285942316257, + "loss": 4.668, + "step": 2339 + }, + { + "epoch": 0.228515625, + "grad_norm": 0.24760481715202332, + "learning_rate": 0.00044968871569095307, + "loss": 4.6875, + "step": 2340 + }, + { + "epoch": 0.22861328125, + "grad_norm": 0.25630250573158264, + "learning_rate": 0.00044964455502385817, + "loss": 4.6289, + "step": 2341 + }, + { + "epoch": 0.2287109375, + "grad_norm": 0.2933541238307953, + "learning_rate": 0.000449600377426159, + "loss": 4.6797, + "step": 2342 + }, + { + "epoch": 0.22880859375, + "grad_norm": 0.3524886667728424, + "learning_rate": 0.0004495561829021383, + "loss": 4.6406, + "step": 2343 + }, + { + "epoch": 0.22890625, + "grad_norm": 0.3613327443599701, + "learning_rate": 0.0004495119714560804, + "loss": 4.6562, + "step": 2344 + }, + { + "epoch": 0.22900390625, + "grad_norm": 0.31851640343666077, + "learning_rate": 0.00044946774309227115, + "loss": 4.6523, + "step": 2345 + }, + { + "epoch": 0.2291015625, + "grad_norm": 0.38189268112182617, + "learning_rate": 0.00044942349781499843, + "loss": 4.6445, + "step": 2346 + }, + { + "epoch": 0.22919921875, + "grad_norm": 0.3930104374885559, + "learning_rate": 0.00044937923562855136, + "loss": 4.6602, + "step": 2347 + }, + { + "epoch": 0.229296875, + "grad_norm": 0.3644992411136627, + "learning_rate": 0.000449334956537221, + "loss": 4.6484, + "step": 2348 + }, + { + "epoch": 0.22939453125, + "grad_norm": 0.3021014332771301, + "learning_rate": 0.0004492906605452997, + "loss": 4.6406, + "step": 2349 + }, + { + "epoch": 0.2294921875, + "grad_norm": 0.321632981300354, + "learning_rate": 0.0004492463476570818, + "loss": 4.6523, + "step": 2350 + }, + { + "epoch": 0.22958984375, + "grad_norm": 0.30498725175857544, + "learning_rate": 0.00044920201787686313, + "loss": 4.6562, + "step": 2351 + }, + { + "epoch": 0.2296875, + "grad_norm": 0.31872788071632385, + "learning_rate": 0.0004491576712089412, + "loss": 4.6562, + "step": 2352 + }, + { + "epoch": 0.22978515625, + "grad_norm": 0.24788987636566162, + "learning_rate": 0.00044911330765761494, + "loss": 4.6445, + "step": 2353 + }, + { + "epoch": 0.2298828125, + "grad_norm": 0.26677754521369934, + "learning_rate": 0.0004490689272271853, + "loss": 4.6484, + "step": 2354 + }, + { + "epoch": 0.22998046875, + "grad_norm": 0.270648717880249, + "learning_rate": 0.0004490245299219546, + "loss": 4.6562, + "step": 2355 + }, + { + "epoch": 0.230078125, + "grad_norm": 0.2944333255290985, + "learning_rate": 0.00044898011574622676, + "loss": 4.7109, + "step": 2356 + }, + { + "epoch": 0.23017578125, + "grad_norm": 0.3071500062942505, + "learning_rate": 0.00044893568470430754, + "loss": 4.6523, + "step": 2357 + }, + { + "epoch": 0.2302734375, + "grad_norm": 0.30527162551879883, + "learning_rate": 0.00044889123680050415, + "loss": 4.6602, + "step": 2358 + }, + { + "epoch": 0.23037109375, + "grad_norm": 0.27043697237968445, + "learning_rate": 0.0004488467720391256, + "loss": 4.6484, + "step": 2359 + }, + { + "epoch": 0.23046875, + "grad_norm": 0.2288312166929245, + "learning_rate": 0.0004488022904244824, + "loss": 4.668, + "step": 2360 + }, + { + "epoch": 0.23056640625, + "grad_norm": 0.24104489386081696, + "learning_rate": 0.0004487577919608867, + "loss": 4.6641, + "step": 2361 + }, + { + "epoch": 0.2306640625, + "grad_norm": 0.31805941462516785, + "learning_rate": 0.00044871327665265244, + "loss": 4.6953, + "step": 2362 + }, + { + "epoch": 0.23076171875, + "grad_norm": 0.3708193600177765, + "learning_rate": 0.0004486687445040949, + "loss": 4.6172, + "step": 2363 + }, + { + "epoch": 0.230859375, + "grad_norm": 0.3930080533027649, + "learning_rate": 0.00044862419551953145, + "loss": 4.668, + "step": 2364 + }, + { + "epoch": 0.23095703125, + "grad_norm": 0.5028117895126343, + "learning_rate": 0.00044857962970328046, + "loss": 4.6562, + "step": 2365 + }, + { + "epoch": 0.2310546875, + "grad_norm": 0.5150055289268494, + "learning_rate": 0.00044853504705966255, + "loss": 4.6602, + "step": 2366 + }, + { + "epoch": 0.23115234375, + "grad_norm": 0.4029294550418854, + "learning_rate": 0.00044849044759299957, + "loss": 4.6523, + "step": 2367 + }, + { + "epoch": 0.23125, + "grad_norm": 0.3276505768299103, + "learning_rate": 0.0004484458313076152, + "loss": 4.625, + "step": 2368 + }, + { + "epoch": 0.23134765625, + "grad_norm": 0.3143558204174042, + "learning_rate": 0.00044840119820783466, + "loss": 4.6484, + "step": 2369 + }, + { + "epoch": 0.2314453125, + "grad_norm": 0.3423708975315094, + "learning_rate": 0.00044835654829798483, + "loss": 4.6562, + "step": 2370 + }, + { + "epoch": 0.23154296875, + "grad_norm": 0.3224690556526184, + "learning_rate": 0.00044831188158239423, + "loss": 4.6914, + "step": 2371 + }, + { + "epoch": 0.231640625, + "grad_norm": 0.25867632031440735, + "learning_rate": 0.00044826719806539294, + "loss": 4.6523, + "step": 2372 + }, + { + "epoch": 0.23173828125, + "grad_norm": 0.2700400948524475, + "learning_rate": 0.0004482224977513128, + "loss": 4.6445, + "step": 2373 + }, + { + "epoch": 0.2318359375, + "grad_norm": 0.2851368486881256, + "learning_rate": 0.00044817778064448717, + "loss": 4.6445, + "step": 2374 + }, + { + "epoch": 0.23193359375, + "grad_norm": 0.25908491015434265, + "learning_rate": 0.00044813304674925104, + "loss": 4.6406, + "step": 2375 + }, + { + "epoch": 0.23203125, + "grad_norm": 0.25152525305747986, + "learning_rate": 0.0004480882960699411, + "loss": 4.668, + "step": 2376 + }, + { + "epoch": 0.23212890625, + "grad_norm": 0.23664075136184692, + "learning_rate": 0.0004480435286108956, + "loss": 4.6328, + "step": 2377 + }, + { + "epoch": 0.2322265625, + "grad_norm": 0.28412893414497375, + "learning_rate": 0.00044799874437645453, + "loss": 4.6367, + "step": 2378 + }, + { + "epoch": 0.23232421875, + "grad_norm": 0.3224864602088928, + "learning_rate": 0.0004479539433709592, + "loss": 4.6641, + "step": 2379 + }, + { + "epoch": 0.232421875, + "grad_norm": 0.35265177488327026, + "learning_rate": 0.000447909125598753, + "loss": 4.6836, + "step": 2380 + }, + { + "epoch": 0.23251953125, + "grad_norm": 0.3193668723106384, + "learning_rate": 0.00044786429106418064, + "loss": 4.6523, + "step": 2381 + }, + { + "epoch": 0.2326171875, + "grad_norm": 0.2693856954574585, + "learning_rate": 0.00044781943977158847, + "loss": 4.6602, + "step": 2382 + }, + { + "epoch": 0.23271484375, + "grad_norm": 0.27126598358154297, + "learning_rate": 0.0004477745717253245, + "loss": 4.6367, + "step": 2383 + }, + { + "epoch": 0.2328125, + "grad_norm": 0.2858413755893707, + "learning_rate": 0.00044772968692973836, + "loss": 4.6289, + "step": 2384 + }, + { + "epoch": 0.23291015625, + "grad_norm": 0.2958002984523773, + "learning_rate": 0.0004476847853891815, + "loss": 4.6367, + "step": 2385 + }, + { + "epoch": 0.2330078125, + "grad_norm": 0.3608322739601135, + "learning_rate": 0.0004476398671080067, + "loss": 4.625, + "step": 2386 + }, + { + "epoch": 0.23310546875, + "grad_norm": 0.40920838713645935, + "learning_rate": 0.0004475949320905685, + "loss": 4.6758, + "step": 2387 + }, + { + "epoch": 0.233203125, + "grad_norm": 0.3942396640777588, + "learning_rate": 0.000447549980341223, + "loss": 4.6797, + "step": 2388 + }, + { + "epoch": 0.23330078125, + "grad_norm": 0.3191213011741638, + "learning_rate": 0.00044750501186432805, + "loss": 4.6602, + "step": 2389 + }, + { + "epoch": 0.2333984375, + "grad_norm": 0.2689201831817627, + "learning_rate": 0.00044746002666424297, + "loss": 4.668, + "step": 2390 + }, + { + "epoch": 0.23349609375, + "grad_norm": 0.2625068128108978, + "learning_rate": 0.0004474150247453287, + "loss": 4.6602, + "step": 2391 + }, + { + "epoch": 0.23359375, + "grad_norm": 0.2948114275932312, + "learning_rate": 0.00044737000611194813, + "loss": 4.6562, + "step": 2392 + }, + { + "epoch": 0.23369140625, + "grad_norm": 0.3135228157043457, + "learning_rate": 0.0004473249707684652, + "loss": 4.6641, + "step": 2393 + }, + { + "epoch": 0.2337890625, + "grad_norm": 0.31921929121017456, + "learning_rate": 0.0004472799187192461, + "loss": 4.6406, + "step": 2394 + }, + { + "epoch": 0.23388671875, + "grad_norm": 0.27814632654190063, + "learning_rate": 0.00044723484996865803, + "loss": 4.668, + "step": 2395 + }, + { + "epoch": 0.233984375, + "grad_norm": 0.26085740327835083, + "learning_rate": 0.0004471897645210702, + "loss": 4.6562, + "step": 2396 + }, + { + "epoch": 0.23408203125, + "grad_norm": 0.259788453578949, + "learning_rate": 0.0004471446623808534, + "loss": 4.6445, + "step": 2397 + }, + { + "epoch": 0.2341796875, + "grad_norm": 0.26260846853256226, + "learning_rate": 0.00044709954355238, + "loss": 4.7031, + "step": 2398 + }, + { + "epoch": 0.23427734375, + "grad_norm": 0.24550992250442505, + "learning_rate": 0.00044705440804002376, + "loss": 4.6562, + "step": 2399 + }, + { + "epoch": 0.234375, + "grad_norm": 0.2735583782196045, + "learning_rate": 0.00044700925584816053, + "loss": 4.6484, + "step": 2400 + }, + { + "epoch": 0.23447265625, + "grad_norm": 0.33223646879196167, + "learning_rate": 0.0004469640869811673, + "loss": 4.6602, + "step": 2401 + }, + { + "epoch": 0.2345703125, + "grad_norm": 0.3579213619232178, + "learning_rate": 0.000446918901443423, + "loss": 4.6406, + "step": 2402 + }, + { + "epoch": 0.23466796875, + "grad_norm": 0.3264033794403076, + "learning_rate": 0.0004468736992393079, + "loss": 4.6055, + "step": 2403 + }, + { + "epoch": 0.234765625, + "grad_norm": 0.257488489151001, + "learning_rate": 0.0004468284803732043, + "loss": 4.6328, + "step": 2404 + }, + { + "epoch": 0.23486328125, + "grad_norm": 0.2694514989852905, + "learning_rate": 0.0004467832448494957, + "loss": 4.6562, + "step": 2405 + }, + { + "epoch": 0.2349609375, + "grad_norm": 0.30083775520324707, + "learning_rate": 0.0004467379926725673, + "loss": 4.6406, + "step": 2406 + }, + { + "epoch": 0.23505859375, + "grad_norm": 0.32040271162986755, + "learning_rate": 0.00044669272384680633, + "loss": 4.6289, + "step": 2407 + }, + { + "epoch": 0.23515625, + "grad_norm": 0.36401382088661194, + "learning_rate": 0.0004466474383766008, + "loss": 4.6875, + "step": 2408 + }, + { + "epoch": 0.23525390625, + "grad_norm": 0.370027631521225, + "learning_rate": 0.0004466021362663413, + "loss": 4.6523, + "step": 2409 + }, + { + "epoch": 0.2353515625, + "grad_norm": 0.39714178442955017, + "learning_rate": 0.0004465568175204193, + "loss": 4.6445, + "step": 2410 + }, + { + "epoch": 0.23544921875, + "grad_norm": 0.38746803998947144, + "learning_rate": 0.0004465114821432282, + "loss": 4.6562, + "step": 2411 + }, + { + "epoch": 0.235546875, + "grad_norm": 0.3525507152080536, + "learning_rate": 0.00044646613013916286, + "loss": 4.6875, + "step": 2412 + }, + { + "epoch": 0.23564453125, + "grad_norm": 0.29401838779449463, + "learning_rate": 0.00044642076151262005, + "loss": 4.6641, + "step": 2413 + }, + { + "epoch": 0.2357421875, + "grad_norm": 0.27843591570854187, + "learning_rate": 0.00044637537626799776, + "loss": 4.6562, + "step": 2414 + }, + { + "epoch": 0.23583984375, + "grad_norm": 0.35331761837005615, + "learning_rate": 0.00044632997440969597, + "loss": 4.6484, + "step": 2415 + }, + { + "epoch": 0.2359375, + "grad_norm": 0.38665127754211426, + "learning_rate": 0.000446284555942116, + "loss": 4.6406, + "step": 2416 + }, + { + "epoch": 0.23603515625, + "grad_norm": 0.3681085407733917, + "learning_rate": 0.0004462391208696607, + "loss": 4.6523, + "step": 2417 + }, + { + "epoch": 0.2361328125, + "grad_norm": 0.2906060814857483, + "learning_rate": 0.0004461936691967349, + "loss": 4.6406, + "step": 2418 + }, + { + "epoch": 0.23623046875, + "grad_norm": 0.24668429791927338, + "learning_rate": 0.00044614820092774487, + "loss": 4.6523, + "step": 2419 + }, + { + "epoch": 0.236328125, + "grad_norm": 0.29406383633613586, + "learning_rate": 0.00044610271606709823, + "loss": 4.6328, + "step": 2420 + }, + { + "epoch": 0.23642578125, + "grad_norm": 0.30544033646583557, + "learning_rate": 0.0004460572146192046, + "loss": 4.6523, + "step": 2421 + }, + { + "epoch": 0.2365234375, + "grad_norm": 0.2908661365509033, + "learning_rate": 0.00044601169658847495, + "loss": 4.6602, + "step": 2422 + }, + { + "epoch": 0.23662109375, + "grad_norm": 0.22852034866809845, + "learning_rate": 0.000445966161979322, + "loss": 4.6875, + "step": 2423 + }, + { + "epoch": 0.23671875, + "grad_norm": 0.23027698695659637, + "learning_rate": 0.00044592061079616, + "loss": 4.6641, + "step": 2424 + }, + { + "epoch": 0.23681640625, + "grad_norm": 0.23451147973537445, + "learning_rate": 0.00044587504304340476, + "loss": 4.6719, + "step": 2425 + }, + { + "epoch": 0.2369140625, + "grad_norm": 0.24141405522823334, + "learning_rate": 0.0004458294587254739, + "loss": 4.6602, + "step": 2426 + }, + { + "epoch": 0.23701171875, + "grad_norm": 0.2290583848953247, + "learning_rate": 0.00044578385784678644, + "loss": 4.6484, + "step": 2427 + }, + { + "epoch": 0.237109375, + "grad_norm": 0.26968109607696533, + "learning_rate": 0.00044573824041176303, + "loss": 4.6523, + "step": 2428 + }, + { + "epoch": 0.23720703125, + "grad_norm": 0.3119431734085083, + "learning_rate": 0.0004456926064248261, + "loss": 4.6055, + "step": 2429 + }, + { + "epoch": 0.2373046875, + "grad_norm": 0.3680206835269928, + "learning_rate": 0.0004456469558903994, + "loss": 4.6406, + "step": 2430 + }, + { + "epoch": 0.23740234375, + "grad_norm": 0.39070969820022583, + "learning_rate": 0.00044560128881290844, + "loss": 4.6641, + "step": 2431 + }, + { + "epoch": 0.2375, + "grad_norm": 0.34244853258132935, + "learning_rate": 0.00044555560519678053, + "loss": 4.6484, + "step": 2432 + }, + { + "epoch": 0.23759765625, + "grad_norm": 0.27080127596855164, + "learning_rate": 0.0004455099050464442, + "loss": 4.6445, + "step": 2433 + }, + { + "epoch": 0.2376953125, + "grad_norm": 0.24982893466949463, + "learning_rate": 0.00044546418836632993, + "loss": 4.6367, + "step": 2434 + }, + { + "epoch": 0.23779296875, + "grad_norm": 0.27964505553245544, + "learning_rate": 0.0004454184551608694, + "loss": 4.625, + "step": 2435 + }, + { + "epoch": 0.237890625, + "grad_norm": 0.30475467443466187, + "learning_rate": 0.00044537270543449633, + "loss": 4.6367, + "step": 2436 + }, + { + "epoch": 0.23798828125, + "grad_norm": 0.3220985233783722, + "learning_rate": 0.0004453269391916458, + "loss": 4.6523, + "step": 2437 + }, + { + "epoch": 0.2380859375, + "grad_norm": 0.3175990581512451, + "learning_rate": 0.0004452811564367545, + "loss": 4.6094, + "step": 2438 + }, + { + "epoch": 0.23818359375, + "grad_norm": 0.30360761284828186, + "learning_rate": 0.00044523535717426086, + "loss": 4.6641, + "step": 2439 + }, + { + "epoch": 0.23828125, + "grad_norm": 0.26058733463287354, + "learning_rate": 0.00044518954140860455, + "loss": 4.6211, + "step": 2440 + }, + { + "epoch": 0.23837890625, + "grad_norm": 0.24370011687278748, + "learning_rate": 0.00044514370914422745, + "loss": 4.6719, + "step": 2441 + }, + { + "epoch": 0.2384765625, + "grad_norm": 0.26911863684654236, + "learning_rate": 0.00044509786038557256, + "loss": 4.6328, + "step": 2442 + }, + { + "epoch": 0.23857421875, + "grad_norm": 0.2688811421394348, + "learning_rate": 0.00044505199513708446, + "loss": 4.6523, + "step": 2443 + }, + { + "epoch": 0.238671875, + "grad_norm": 0.27930912375450134, + "learning_rate": 0.0004450061134032096, + "loss": 4.6445, + "step": 2444 + }, + { + "epoch": 0.23876953125, + "grad_norm": 0.27425849437713623, + "learning_rate": 0.00044496021518839585, + "loss": 4.6484, + "step": 2445 + }, + { + "epoch": 0.2388671875, + "grad_norm": 0.29034027457237244, + "learning_rate": 0.0004449143004970928, + "loss": 4.6133, + "step": 2446 + }, + { + "epoch": 0.23896484375, + "grad_norm": 0.27038997411727905, + "learning_rate": 0.0004448683693337515, + "loss": 4.6758, + "step": 2447 + }, + { + "epoch": 0.2390625, + "grad_norm": 0.2953545153141022, + "learning_rate": 0.0004448224217028247, + "loss": 4.6172, + "step": 2448 + }, + { + "epoch": 0.23916015625, + "grad_norm": 0.3312900960445404, + "learning_rate": 0.0004447764576087667, + "loss": 4.6523, + "step": 2449 + }, + { + "epoch": 0.2392578125, + "grad_norm": 0.3634786307811737, + "learning_rate": 0.00044473047705603346, + "loss": 4.6445, + "step": 2450 + }, + { + "epoch": 0.23935546875, + "grad_norm": 0.4030366539955139, + "learning_rate": 0.0004446844800490824, + "loss": 4.625, + "step": 2451 + }, + { + "epoch": 0.239453125, + "grad_norm": 0.357688844203949, + "learning_rate": 0.00044463846659237267, + "loss": 4.6445, + "step": 2452 + }, + { + "epoch": 0.23955078125, + "grad_norm": 0.27814653515815735, + "learning_rate": 0.0004445924366903649, + "loss": 4.6602, + "step": 2453 + }, + { + "epoch": 0.2396484375, + "grad_norm": 0.26309749484062195, + "learning_rate": 0.00044454639034752143, + "loss": 4.6484, + "step": 2454 + }, + { + "epoch": 0.23974609375, + "grad_norm": 0.25811707973480225, + "learning_rate": 0.0004445003275683062, + "loss": 4.6562, + "step": 2455 + }, + { + "epoch": 0.23984375, + "grad_norm": 0.29156291484832764, + "learning_rate": 0.0004444542483571846, + "loss": 4.6719, + "step": 2456 + }, + { + "epoch": 0.23994140625, + "grad_norm": 0.2622072696685791, + "learning_rate": 0.0004444081527186236, + "loss": 4.6367, + "step": 2457 + }, + { + "epoch": 0.2400390625, + "grad_norm": 0.24939043819904327, + "learning_rate": 0.000444362040657092, + "loss": 4.6523, + "step": 2458 + }, + { + "epoch": 0.24013671875, + "grad_norm": 0.2628534436225891, + "learning_rate": 0.0004443159121770601, + "loss": 4.6289, + "step": 2459 + }, + { + "epoch": 0.240234375, + "grad_norm": 0.30028635263442993, + "learning_rate": 0.0004442697672829997, + "loss": 4.6211, + "step": 2460 + }, + { + "epoch": 0.24033203125, + "grad_norm": 0.35397228598594666, + "learning_rate": 0.0004442236059793841, + "loss": 4.6523, + "step": 2461 + }, + { + "epoch": 0.2404296875, + "grad_norm": 0.36456942558288574, + "learning_rate": 0.0004441774282706884, + "loss": 4.6289, + "step": 2462 + }, + { + "epoch": 0.24052734375, + "grad_norm": 0.3626701831817627, + "learning_rate": 0.0004441312341613893, + "loss": 4.6602, + "step": 2463 + }, + { + "epoch": 0.240625, + "grad_norm": 0.31185904145240784, + "learning_rate": 0.0004440850236559649, + "loss": 4.6523, + "step": 2464 + }, + { + "epoch": 0.24072265625, + "grad_norm": 0.28266313672065735, + "learning_rate": 0.0004440387967588951, + "loss": 4.6406, + "step": 2465 + }, + { + "epoch": 0.2408203125, + "grad_norm": 0.26754724979400635, + "learning_rate": 0.0004439925534746612, + "loss": 4.6328, + "step": 2466 + }, + { + "epoch": 0.24091796875, + "grad_norm": 0.2667434811592102, + "learning_rate": 0.0004439462938077462, + "loss": 4.6445, + "step": 2467 + }, + { + "epoch": 0.241015625, + "grad_norm": 0.2924223244190216, + "learning_rate": 0.0004439000177626347, + "loss": 4.6602, + "step": 2468 + }, + { + "epoch": 0.24111328125, + "grad_norm": 0.28878849744796753, + "learning_rate": 0.0004438537253438127, + "loss": 4.6523, + "step": 2469 + }, + { + "epoch": 0.2412109375, + "grad_norm": 0.26754310727119446, + "learning_rate": 0.0004438074165557682, + "loss": 4.6523, + "step": 2470 + }, + { + "epoch": 0.24130859375, + "grad_norm": 0.2961125075817108, + "learning_rate": 0.0004437610914029902, + "loss": 4.6562, + "step": 2471 + }, + { + "epoch": 0.24140625, + "grad_norm": 0.24139150977134705, + "learning_rate": 0.00044371474988996984, + "loss": 4.6562, + "step": 2472 + }, + { + "epoch": 0.24150390625, + "grad_norm": 0.2556939721107483, + "learning_rate": 0.00044366839202119955, + "loss": 4.6719, + "step": 2473 + }, + { + "epoch": 0.2416015625, + "grad_norm": 0.25381579995155334, + "learning_rate": 0.0004436220178011734, + "loss": 4.6406, + "step": 2474 + }, + { + "epoch": 0.24169921875, + "grad_norm": 0.26499536633491516, + "learning_rate": 0.0004435756272343871, + "loss": 4.6328, + "step": 2475 + }, + { + "epoch": 0.241796875, + "grad_norm": 0.325953871011734, + "learning_rate": 0.0004435292203253378, + "loss": 4.6328, + "step": 2476 + }, + { + "epoch": 0.24189453125, + "grad_norm": 0.4252047836780548, + "learning_rate": 0.0004434827970785245, + "loss": 4.6523, + "step": 2477 + }, + { + "epoch": 0.2419921875, + "grad_norm": 0.4018876552581787, + "learning_rate": 0.00044343635749844747, + "loss": 4.6367, + "step": 2478 + }, + { + "epoch": 0.24208984375, + "grad_norm": 0.30718234181404114, + "learning_rate": 0.0004433899015896087, + "loss": 4.6641, + "step": 2479 + }, + { + "epoch": 0.2421875, + "grad_norm": 0.25455838441848755, + "learning_rate": 0.0004433434293565119, + "loss": 4.668, + "step": 2480 + }, + { + "epoch": 0.24228515625, + "grad_norm": 0.25474098324775696, + "learning_rate": 0.00044329694080366217, + "loss": 4.6523, + "step": 2481 + }, + { + "epoch": 0.2423828125, + "grad_norm": 0.26457756757736206, + "learning_rate": 0.0004432504359355663, + "loss": 4.6758, + "step": 2482 + }, + { + "epoch": 0.24248046875, + "grad_norm": 0.34141069650650024, + "learning_rate": 0.00044320391475673247, + "loss": 4.6523, + "step": 2483 + }, + { + "epoch": 0.242578125, + "grad_norm": 0.36278077960014343, + "learning_rate": 0.0004431573772716708, + "loss": 4.6328, + "step": 2484 + }, + { + "epoch": 0.24267578125, + "grad_norm": 0.3451724946498871, + "learning_rate": 0.00044311082348489267, + "loss": 4.6719, + "step": 2485 + }, + { + "epoch": 0.2427734375, + "grad_norm": 0.3399052321910858, + "learning_rate": 0.00044306425340091116, + "loss": 4.6523, + "step": 2486 + }, + { + "epoch": 0.24287109375, + "grad_norm": 0.2941957414150238, + "learning_rate": 0.00044301766702424094, + "loss": 4.6562, + "step": 2487 + }, + { + "epoch": 0.24296875, + "grad_norm": 0.23448634147644043, + "learning_rate": 0.00044297106435939825, + "loss": 4.6211, + "step": 2488 + }, + { + "epoch": 0.24306640625, + "grad_norm": 0.2744929790496826, + "learning_rate": 0.00044292444541090096, + "loss": 4.6445, + "step": 2489 + }, + { + "epoch": 0.2431640625, + "grad_norm": 0.285419762134552, + "learning_rate": 0.0004428778101832683, + "loss": 4.6523, + "step": 2490 + }, + { + "epoch": 0.24326171875, + "grad_norm": 0.3297009766101837, + "learning_rate": 0.00044283115868102137, + "loss": 4.6172, + "step": 2491 + }, + { + "epoch": 0.243359375, + "grad_norm": 0.3377384841442108, + "learning_rate": 0.0004427844909086827, + "loss": 4.6719, + "step": 2492 + }, + { + "epoch": 0.24345703125, + "grad_norm": 0.350247859954834, + "learning_rate": 0.00044273780687077637, + "loss": 4.6328, + "step": 2493 + }, + { + "epoch": 0.2435546875, + "grad_norm": 0.3441883325576782, + "learning_rate": 0.000442691106571828, + "loss": 4.6406, + "step": 2494 + }, + { + "epoch": 0.24365234375, + "grad_norm": 0.27535873651504517, + "learning_rate": 0.00044264439001636514, + "loss": 4.6641, + "step": 2495 + }, + { + "epoch": 0.24375, + "grad_norm": 0.2442348152399063, + "learning_rate": 0.0004425976572089164, + "loss": 4.6602, + "step": 2496 + }, + { + "epoch": 0.24384765625, + "grad_norm": 0.2742549479007721, + "learning_rate": 0.0004425509081540123, + "loss": 4.6797, + "step": 2497 + }, + { + "epoch": 0.2439453125, + "grad_norm": 0.29776203632354736, + "learning_rate": 0.00044250414285618487, + "loss": 4.6406, + "step": 2498 + }, + { + "epoch": 0.24404296875, + "grad_norm": 0.3026568293571472, + "learning_rate": 0.0004424573613199675, + "loss": 4.6289, + "step": 2499 + }, + { + "epoch": 0.244140625, + "grad_norm": 0.29341962933540344, + "learning_rate": 0.00044241056354989557, + "loss": 4.6367, + "step": 2500 + }, + { + "epoch": 0.24423828125, + "grad_norm": 0.25451475381851196, + "learning_rate": 0.00044236374955050574, + "loss": 4.6367, + "step": 2501 + }, + { + "epoch": 0.2443359375, + "grad_norm": 0.26886335015296936, + "learning_rate": 0.0004423169193263363, + "loss": 4.6406, + "step": 2502 + }, + { + "epoch": 0.24443359375, + "grad_norm": 0.2695727050304413, + "learning_rate": 0.0004422700728819271, + "loss": 4.6367, + "step": 2503 + }, + { + "epoch": 0.24453125, + "grad_norm": 0.27542734146118164, + "learning_rate": 0.00044222321022181955, + "loss": 4.6289, + "step": 2504 + }, + { + "epoch": 0.24462890625, + "grad_norm": 0.291258305311203, + "learning_rate": 0.0004421763313505568, + "loss": 4.6484, + "step": 2505 + }, + { + "epoch": 0.2447265625, + "grad_norm": 0.282321959733963, + "learning_rate": 0.00044212943627268327, + "loss": 4.6445, + "step": 2506 + }, + { + "epoch": 0.24482421875, + "grad_norm": 0.27125120162963867, + "learning_rate": 0.00044208252499274524, + "loss": 4.6172, + "step": 2507 + }, + { + "epoch": 0.244921875, + "grad_norm": 0.27181708812713623, + "learning_rate": 0.0004420355975152904, + "loss": 4.6211, + "step": 2508 + }, + { + "epoch": 0.24501953125, + "grad_norm": 0.27521851658821106, + "learning_rate": 0.0004419886538448681, + "loss": 4.6797, + "step": 2509 + }, + { + "epoch": 0.2451171875, + "grad_norm": 0.2899063527584076, + "learning_rate": 0.00044194169398602913, + "loss": 4.625, + "step": 2510 + }, + { + "epoch": 0.24521484375, + "grad_norm": 0.31704601645469666, + "learning_rate": 0.0004418947179433259, + "loss": 4.6562, + "step": 2511 + }, + { + "epoch": 0.2453125, + "grad_norm": 0.37774011492729187, + "learning_rate": 0.0004418477257213126, + "loss": 4.625, + "step": 2512 + }, + { + "epoch": 0.24541015625, + "grad_norm": 0.3827389180660248, + "learning_rate": 0.0004418007173245447, + "loss": 4.6289, + "step": 2513 + }, + { + "epoch": 0.2455078125, + "grad_norm": 0.34198659658432007, + "learning_rate": 0.0004417536927575792, + "loss": 4.6289, + "step": 2514 + }, + { + "epoch": 0.24560546875, + "grad_norm": 0.38824227452278137, + "learning_rate": 0.000441706652024975, + "loss": 4.6914, + "step": 2515 + }, + { + "epoch": 0.245703125, + "grad_norm": 0.32041361927986145, + "learning_rate": 0.00044165959513129245, + "loss": 4.6367, + "step": 2516 + }, + { + "epoch": 0.24580078125, + "grad_norm": 0.27283477783203125, + "learning_rate": 0.0004416125220810932, + "loss": 4.6484, + "step": 2517 + }, + { + "epoch": 0.2458984375, + "grad_norm": 0.2879582643508911, + "learning_rate": 0.0004415654328789407, + "loss": 4.6289, + "step": 2518 + }, + { + "epoch": 0.24599609375, + "grad_norm": 0.3273562490940094, + "learning_rate": 0.00044151832752939993, + "loss": 4.6562, + "step": 2519 + }, + { + "epoch": 0.24609375, + "grad_norm": 0.33376070857048035, + "learning_rate": 0.0004414712060370375, + "loss": 4.6289, + "step": 2520 + }, + { + "epoch": 0.24619140625, + "grad_norm": 0.3266454041004181, + "learning_rate": 0.00044142406840642147, + "loss": 4.6406, + "step": 2521 + }, + { + "epoch": 0.2462890625, + "grad_norm": 0.28334441781044006, + "learning_rate": 0.00044137691464212164, + "loss": 4.6523, + "step": 2522 + }, + { + "epoch": 0.24638671875, + "grad_norm": 0.2528420090675354, + "learning_rate": 0.000441329744748709, + "loss": 4.5977, + "step": 2523 + }, + { + "epoch": 0.246484375, + "grad_norm": 0.274658203125, + "learning_rate": 0.0004412825587307566, + "loss": 4.6445, + "step": 2524 + }, + { + "epoch": 0.24658203125, + "grad_norm": 0.27589505910873413, + "learning_rate": 0.0004412353565928387, + "loss": 4.6562, + "step": 2525 + }, + { + "epoch": 0.2466796875, + "grad_norm": 0.2908239960670471, + "learning_rate": 0.00044118813833953115, + "loss": 4.6484, + "step": 2526 + }, + { + "epoch": 0.24677734375, + "grad_norm": 0.3189093768596649, + "learning_rate": 0.00044114090397541153, + "loss": 4.6094, + "step": 2527 + }, + { + "epoch": 0.246875, + "grad_norm": 0.3125719428062439, + "learning_rate": 0.00044109365350505886, + "loss": 4.6055, + "step": 2528 + }, + { + "epoch": 0.24697265625, + "grad_norm": 0.29585713148117065, + "learning_rate": 0.00044104638693305375, + "loss": 4.668, + "step": 2529 + }, + { + "epoch": 0.2470703125, + "grad_norm": 0.2776036560535431, + "learning_rate": 0.00044099910426397844, + "loss": 4.668, + "step": 2530 + }, + { + "epoch": 0.24716796875, + "grad_norm": 0.24307331442832947, + "learning_rate": 0.0004409518055024166, + "loss": 4.6172, + "step": 2531 + }, + { + "epoch": 0.247265625, + "grad_norm": 0.33505502343177795, + "learning_rate": 0.00044090449065295353, + "loss": 4.6289, + "step": 2532 + }, + { + "epoch": 0.24736328125, + "grad_norm": 0.3090203106403351, + "learning_rate": 0.00044085715972017606, + "loss": 4.6328, + "step": 2533 + }, + { + "epoch": 0.2474609375, + "grad_norm": 0.3120708167552948, + "learning_rate": 0.0004408098127086726, + "loss": 4.625, + "step": 2534 + }, + { + "epoch": 0.24755859375, + "grad_norm": 0.27217045426368713, + "learning_rate": 0.00044076244962303323, + "loss": 4.6523, + "step": 2535 + }, + { + "epoch": 0.24765625, + "grad_norm": 0.2814929485321045, + "learning_rate": 0.0004407150704678494, + "loss": 4.6094, + "step": 2536 + }, + { + "epoch": 0.24775390625, + "grad_norm": 0.2786533236503601, + "learning_rate": 0.00044066767524771414, + "loss": 4.6406, + "step": 2537 + }, + { + "epoch": 0.2478515625, + "grad_norm": 0.26336488127708435, + "learning_rate": 0.0004406202639672222, + "loss": 4.6406, + "step": 2538 + }, + { + "epoch": 0.24794921875, + "grad_norm": 0.2611856460571289, + "learning_rate": 0.0004405728366309697, + "loss": 4.6641, + "step": 2539 + }, + { + "epoch": 0.248046875, + "grad_norm": 0.27430519461631775, + "learning_rate": 0.0004405253932435545, + "loss": 4.6602, + "step": 2540 + }, + { + "epoch": 0.24814453125, + "grad_norm": 0.2710666060447693, + "learning_rate": 0.00044047793380957577, + "loss": 4.6445, + "step": 2541 + }, + { + "epoch": 0.2482421875, + "grad_norm": 0.27449366450309753, + "learning_rate": 0.0004404304583336345, + "loss": 4.6133, + "step": 2542 + }, + { + "epoch": 0.24833984375, + "grad_norm": 0.3431309759616852, + "learning_rate": 0.00044038296682033306, + "loss": 4.6289, + "step": 2543 + }, + { + "epoch": 0.2484375, + "grad_norm": 0.3754369020462036, + "learning_rate": 0.0004403354592742755, + "loss": 4.6211, + "step": 2544 + }, + { + "epoch": 0.24853515625, + "grad_norm": 0.399315744638443, + "learning_rate": 0.00044028793570006727, + "loss": 4.6445, + "step": 2545 + }, + { + "epoch": 0.2486328125, + "grad_norm": 0.3968663513660431, + "learning_rate": 0.00044024039610231544, + "loss": 4.6172, + "step": 2546 + }, + { + "epoch": 0.24873046875, + "grad_norm": 0.28154146671295166, + "learning_rate": 0.0004401928404856287, + "loss": 4.6641, + "step": 2547 + }, + { + "epoch": 0.248828125, + "grad_norm": 0.29224708676338196, + "learning_rate": 0.0004401452688546173, + "loss": 4.6641, + "step": 2548 + }, + { + "epoch": 0.24892578125, + "grad_norm": 0.30365678668022156, + "learning_rate": 0.0004400976812138929, + "loss": 4.6484, + "step": 2549 + }, + { + "epoch": 0.2490234375, + "grad_norm": 0.3276931643486023, + "learning_rate": 0.0004400500775680688, + "loss": 4.6406, + "step": 2550 + }, + { + "epoch": 0.24912109375, + "grad_norm": 0.3508548438549042, + "learning_rate": 0.00044000245792175997, + "loss": 4.6797, + "step": 2551 + }, + { + "epoch": 0.24921875, + "grad_norm": 0.3195402920246124, + "learning_rate": 0.00043995482227958264, + "loss": 4.6562, + "step": 2552 + }, + { + "epoch": 0.24931640625, + "grad_norm": 0.23737281560897827, + "learning_rate": 0.00043990717064615483, + "loss": 4.6797, + "step": 2553 + }, + { + "epoch": 0.2494140625, + "grad_norm": 0.26599809527397156, + "learning_rate": 0.00043985950302609606, + "loss": 4.6641, + "step": 2554 + }, + { + "epoch": 0.24951171875, + "grad_norm": 0.2774026393890381, + "learning_rate": 0.0004398118194240274, + "loss": 4.6602, + "step": 2555 + }, + { + "epoch": 0.249609375, + "grad_norm": 0.2594927251338959, + "learning_rate": 0.00043976411984457143, + "loss": 4.6172, + "step": 2556 + }, + { + "epoch": 0.24970703125, + "grad_norm": 0.2625410556793213, + "learning_rate": 0.00043971640429235224, + "loss": 4.6562, + "step": 2557 + }, + { + "epoch": 0.2498046875, + "grad_norm": 0.2773466408252716, + "learning_rate": 0.00043966867277199566, + "loss": 4.6367, + "step": 2558 + }, + { + "epoch": 0.24990234375, + "grad_norm": 0.2608027160167694, + "learning_rate": 0.0004396209252881289, + "loss": 4.6445, + "step": 2559 + }, + { + "epoch": 0.25, + "grad_norm": 0.2640449106693268, + "learning_rate": 0.0004395731618453806, + "loss": 4.6406, + "step": 2560 + }, + { + "epoch": 0.25009765625, + "grad_norm": 0.2891799211502075, + "learning_rate": 0.0004395253824483813, + "loss": 4.6641, + "step": 2561 + }, + { + "epoch": 0.2501953125, + "grad_norm": 0.27893730998039246, + "learning_rate": 0.0004394775871017628, + "loss": 4.6289, + "step": 2562 + }, + { + "epoch": 0.25029296875, + "grad_norm": 0.24193783104419708, + "learning_rate": 0.0004394297758101586, + "loss": 4.5977, + "step": 2563 + }, + { + "epoch": 0.250390625, + "grad_norm": 0.31036582589149475, + "learning_rate": 0.0004393819485782036, + "loss": 4.625, + "step": 2564 + }, + { + "epoch": 0.25048828125, + "grad_norm": 0.3697652518749237, + "learning_rate": 0.00043933410541053424, + "loss": 4.6797, + "step": 2565 + }, + { + "epoch": 0.2505859375, + "grad_norm": 0.3934558629989624, + "learning_rate": 0.0004392862463117888, + "loss": 4.6289, + "step": 2566 + }, + { + "epoch": 0.25068359375, + "grad_norm": 0.3703542649745941, + "learning_rate": 0.0004392383712866067, + "loss": 4.6211, + "step": 2567 + }, + { + "epoch": 0.25078125, + "grad_norm": 0.3403394818305969, + "learning_rate": 0.0004391904803396293, + "loss": 4.6172, + "step": 2568 + }, + { + "epoch": 0.25087890625, + "grad_norm": 0.3069233000278473, + "learning_rate": 0.00043914257347549913, + "loss": 4.6367, + "step": 2569 + }, + { + "epoch": 0.2509765625, + "grad_norm": 0.26165324449539185, + "learning_rate": 0.0004390946506988605, + "loss": 4.6289, + "step": 2570 + }, + { + "epoch": 0.25107421875, + "grad_norm": 0.23558934032917023, + "learning_rate": 0.00043904671201435927, + "loss": 4.6328, + "step": 2571 + }, + { + "epoch": 0.251171875, + "grad_norm": 0.290761798620224, + "learning_rate": 0.0004389987574266426, + "loss": 4.6406, + "step": 2572 + }, + { + "epoch": 0.25126953125, + "grad_norm": 0.29168501496315, + "learning_rate": 0.0004389507869403595, + "loss": 4.6172, + "step": 2573 + }, + { + "epoch": 0.2513671875, + "grad_norm": 0.3213168978691101, + "learning_rate": 0.0004389028005601604, + "loss": 4.6406, + "step": 2574 + }, + { + "epoch": 0.25146484375, + "grad_norm": 0.3191249966621399, + "learning_rate": 0.0004388547982906971, + "loss": 4.625, + "step": 2575 + }, + { + "epoch": 0.2515625, + "grad_norm": 0.28562894463539124, + "learning_rate": 0.00043880678013662324, + "loss": 4.6797, + "step": 2576 + }, + { + "epoch": 0.25166015625, + "grad_norm": 0.2667846381664276, + "learning_rate": 0.0004387587461025938, + "loss": 4.582, + "step": 2577 + }, + { + "epoch": 0.2517578125, + "grad_norm": 0.2537870705127716, + "learning_rate": 0.00043871069619326545, + "loss": 4.6445, + "step": 2578 + }, + { + "epoch": 0.25185546875, + "grad_norm": 0.2587931156158447, + "learning_rate": 0.0004386626304132961, + "loss": 4.625, + "step": 2579 + }, + { + "epoch": 0.251953125, + "grad_norm": 0.23060335218906403, + "learning_rate": 0.0004386145487673455, + "loss": 4.6328, + "step": 2580 + }, + { + "epoch": 0.25205078125, + "grad_norm": 0.22982822358608246, + "learning_rate": 0.0004385664512600749, + "loss": 4.6523, + "step": 2581 + }, + { + "epoch": 0.2521484375, + "grad_norm": 0.2807593047618866, + "learning_rate": 0.000438518337896147, + "loss": 4.6367, + "step": 2582 + }, + { + "epoch": 0.25224609375, + "grad_norm": 0.2805570363998413, + "learning_rate": 0.000438470208680226, + "loss": 4.6289, + "step": 2583 + }, + { + "epoch": 0.25234375, + "grad_norm": 0.2497144192457199, + "learning_rate": 0.0004384220636169778, + "loss": 4.6367, + "step": 2584 + }, + { + "epoch": 0.25244140625, + "grad_norm": 0.24642769992351532, + "learning_rate": 0.0004383739027110695, + "loss": 4.6406, + "step": 2585 + }, + { + "epoch": 0.2525390625, + "grad_norm": 0.24216412007808685, + "learning_rate": 0.00043832572596717043, + "loss": 4.625, + "step": 2586 + }, + { + "epoch": 0.25263671875, + "grad_norm": 0.2918716371059418, + "learning_rate": 0.0004382775333899505, + "loss": 4.6523, + "step": 2587 + }, + { + "epoch": 0.252734375, + "grad_norm": 0.3243367075920105, + "learning_rate": 0.000438229324984082, + "loss": 4.6523, + "step": 2588 + }, + { + "epoch": 0.25283203125, + "grad_norm": 0.3255716562271118, + "learning_rate": 0.00043818110075423823, + "loss": 4.625, + "step": 2589 + }, + { + "epoch": 0.2529296875, + "grad_norm": 0.2977418601512909, + "learning_rate": 0.00043813286070509426, + "loss": 4.6602, + "step": 2590 + }, + { + "epoch": 0.25302734375, + "grad_norm": 0.2932508587837219, + "learning_rate": 0.0004380846048413267, + "loss": 4.6094, + "step": 2591 + }, + { + "epoch": 0.253125, + "grad_norm": 0.252905935049057, + "learning_rate": 0.0004380363331676135, + "loss": 4.6055, + "step": 2592 + }, + { + "epoch": 0.25322265625, + "grad_norm": 0.2580551505088806, + "learning_rate": 0.0004379880456886343, + "loss": 4.6211, + "step": 2593 + }, + { + "epoch": 0.2533203125, + "grad_norm": 0.2548312544822693, + "learning_rate": 0.0004379397424090703, + "loss": 4.625, + "step": 2594 + }, + { + "epoch": 0.25341796875, + "grad_norm": 0.2486112117767334, + "learning_rate": 0.00043789142333360416, + "loss": 4.6562, + "step": 2595 + }, + { + "epoch": 0.253515625, + "grad_norm": 0.2424216866493225, + "learning_rate": 0.0004378430884669201, + "loss": 4.6367, + "step": 2596 + }, + { + "epoch": 0.25361328125, + "grad_norm": 0.2615583837032318, + "learning_rate": 0.00043779473781370377, + "loss": 4.625, + "step": 2597 + }, + { + "epoch": 0.2537109375, + "grad_norm": 0.2625563442707062, + "learning_rate": 0.0004377463713786426, + "loss": 4.6445, + "step": 2598 + }, + { + "epoch": 0.25380859375, + "grad_norm": 0.3095974326133728, + "learning_rate": 0.00043769798916642517, + "loss": 4.6133, + "step": 2599 + }, + { + "epoch": 0.25390625, + "grad_norm": 0.33262208104133606, + "learning_rate": 0.000437649591181742, + "loss": 4.6523, + "step": 2600 + }, + { + "epoch": 0.25400390625, + "grad_norm": 0.40450748801231384, + "learning_rate": 0.0004376011774292848, + "loss": 4.6719, + "step": 2601 + }, + { + "epoch": 0.2541015625, + "grad_norm": 0.4640595018863678, + "learning_rate": 0.0004375527479137471, + "loss": 4.6211, + "step": 2602 + }, + { + "epoch": 0.25419921875, + "grad_norm": 0.47564366459846497, + "learning_rate": 0.0004375043026398237, + "loss": 4.6445, + "step": 2603 + }, + { + "epoch": 0.254296875, + "grad_norm": 0.35595545172691345, + "learning_rate": 0.0004374558416122111, + "loss": 4.6211, + "step": 2604 + }, + { + "epoch": 0.25439453125, + "grad_norm": 0.256295770406723, + "learning_rate": 0.0004374073648356072, + "loss": 4.6641, + "step": 2605 + }, + { + "epoch": 0.2544921875, + "grad_norm": 0.3992545008659363, + "learning_rate": 0.00043735887231471156, + "loss": 4.6172, + "step": 2606 + }, + { + "epoch": 0.25458984375, + "grad_norm": 0.36927610635757446, + "learning_rate": 0.00043731036405422524, + "loss": 4.6602, + "step": 2607 + }, + { + "epoch": 0.2546875, + "grad_norm": 0.2414986789226532, + "learning_rate": 0.00043726184005885065, + "loss": 4.6172, + "step": 2608 + }, + { + "epoch": 0.25478515625, + "grad_norm": 0.37241289019584656, + "learning_rate": 0.000437213300333292, + "loss": 4.6406, + "step": 2609 + }, + { + "epoch": 0.2548828125, + "grad_norm": 0.4023999571800232, + "learning_rate": 0.0004371647448822548, + "loss": 4.6562, + "step": 2610 + }, + { + "epoch": 0.25498046875, + "grad_norm": 0.2666376233100891, + "learning_rate": 0.0004371161737104463, + "loss": 4.6406, + "step": 2611 + }, + { + "epoch": 0.255078125, + "grad_norm": 0.2868267595767975, + "learning_rate": 0.0004370675868225749, + "loss": 4.6289, + "step": 2612 + }, + { + "epoch": 0.25517578125, + "grad_norm": 0.315245121717453, + "learning_rate": 0.000437018984223351, + "loss": 4.6172, + "step": 2613 + }, + { + "epoch": 0.2552734375, + "grad_norm": 0.27774590253829956, + "learning_rate": 0.0004369703659174861, + "loss": 4.6367, + "step": 2614 + }, + { + "epoch": 0.25537109375, + "grad_norm": 0.23025551438331604, + "learning_rate": 0.0004369217319096936, + "loss": 4.6406, + "step": 2615 + }, + { + "epoch": 0.25546875, + "grad_norm": 0.26377880573272705, + "learning_rate": 0.0004368730822046882, + "loss": 4.6602, + "step": 2616 + }, + { + "epoch": 0.25556640625, + "grad_norm": 0.28632158041000366, + "learning_rate": 0.00043682441680718603, + "loss": 4.6172, + "step": 2617 + }, + { + "epoch": 0.2556640625, + "grad_norm": 0.23507046699523926, + "learning_rate": 0.000436775735721905, + "loss": 4.6289, + "step": 2618 + }, + { + "epoch": 0.25576171875, + "grad_norm": 0.23724232614040375, + "learning_rate": 0.00043672703895356437, + "loss": 4.6289, + "step": 2619 + }, + { + "epoch": 0.255859375, + "grad_norm": 0.2762061059474945, + "learning_rate": 0.0004366783265068849, + "loss": 4.6523, + "step": 2620 + }, + { + "epoch": 0.25595703125, + "grad_norm": 0.3277427852153778, + "learning_rate": 0.00043662959838658904, + "loss": 4.6094, + "step": 2621 + }, + { + "epoch": 0.2560546875, + "grad_norm": 0.30051320791244507, + "learning_rate": 0.0004365808545974006, + "loss": 4.6406, + "step": 2622 + }, + { + "epoch": 0.25615234375, + "grad_norm": 0.27253690361976624, + "learning_rate": 0.0004365320951440449, + "loss": 4.6016, + "step": 2623 + }, + { + "epoch": 0.25625, + "grad_norm": 0.2699415981769562, + "learning_rate": 0.000436483320031249, + "loss": 4.6211, + "step": 2624 + }, + { + "epoch": 0.25634765625, + "grad_norm": 0.2972502112388611, + "learning_rate": 0.00043643452926374115, + "loss": 4.6094, + "step": 2625 + }, + { + "epoch": 0.2564453125, + "grad_norm": 0.28808775544166565, + "learning_rate": 0.0004363857228462514, + "loss": 4.6523, + "step": 2626 + }, + { + "epoch": 0.25654296875, + "grad_norm": 0.2569367587566376, + "learning_rate": 0.00043633690078351105, + "loss": 4.6289, + "step": 2627 + }, + { + "epoch": 0.256640625, + "grad_norm": 0.23887008428573608, + "learning_rate": 0.0004362880630802533, + "loss": 4.6328, + "step": 2628 + }, + { + "epoch": 0.25673828125, + "grad_norm": 0.26324599981307983, + "learning_rate": 0.00043623920974121235, + "loss": 4.6602, + "step": 2629 + }, + { + "epoch": 0.2568359375, + "grad_norm": 0.30115535855293274, + "learning_rate": 0.00043619034077112446, + "loss": 4.6211, + "step": 2630 + }, + { + "epoch": 0.25693359375, + "grad_norm": 0.3043840229511261, + "learning_rate": 0.000436141456174727, + "loss": 4.6406, + "step": 2631 + }, + { + "epoch": 0.25703125, + "grad_norm": 0.32109421491622925, + "learning_rate": 0.000436092555956759, + "loss": 4.6367, + "step": 2632 + }, + { + "epoch": 0.25712890625, + "grad_norm": 0.3065946698188782, + "learning_rate": 0.00043604364012196113, + "loss": 4.6523, + "step": 2633 + }, + { + "epoch": 0.2572265625, + "grad_norm": 0.23051096498966217, + "learning_rate": 0.0004359947086750753, + "loss": 4.6484, + "step": 2634 + }, + { + "epoch": 0.25732421875, + "grad_norm": 0.2702968716621399, + "learning_rate": 0.00043594576162084514, + "loss": 4.6172, + "step": 2635 + }, + { + "epoch": 0.257421875, + "grad_norm": 0.29582664370536804, + "learning_rate": 0.0004358967989640157, + "loss": 4.6289, + "step": 2636 + }, + { + "epoch": 0.25751953125, + "grad_norm": 0.28188806772232056, + "learning_rate": 0.00043584782070933367, + "loss": 4.5977, + "step": 2637 + }, + { + "epoch": 0.2576171875, + "grad_norm": 0.26752015948295593, + "learning_rate": 0.000435798826861547, + "loss": 4.6328, + "step": 2638 + }, + { + "epoch": 0.25771484375, + "grad_norm": 0.258663147687912, + "learning_rate": 0.00043574981742540555, + "loss": 4.5898, + "step": 2639 + }, + { + "epoch": 0.2578125, + "grad_norm": 0.26688337326049805, + "learning_rate": 0.00043570079240566026, + "loss": 4.6172, + "step": 2640 + }, + { + "epoch": 0.25791015625, + "grad_norm": 0.31377387046813965, + "learning_rate": 0.0004356517518070637, + "loss": 4.6016, + "step": 2641 + }, + { + "epoch": 0.2580078125, + "grad_norm": 0.38951390981674194, + "learning_rate": 0.00043560269563437025, + "loss": 4.6641, + "step": 2642 + }, + { + "epoch": 0.25810546875, + "grad_norm": 0.39420628547668457, + "learning_rate": 0.00043555362389233556, + "loss": 4.6445, + "step": 2643 + }, + { + "epoch": 0.258203125, + "grad_norm": 0.3550975024700165, + "learning_rate": 0.0004355045365857166, + "loss": 4.6367, + "step": 2644 + }, + { + "epoch": 0.25830078125, + "grad_norm": 0.29494383931159973, + "learning_rate": 0.0004354554337192722, + "loss": 4.6758, + "step": 2645 + }, + { + "epoch": 0.2583984375, + "grad_norm": 0.22620560228824615, + "learning_rate": 0.00043540631529776245, + "loss": 4.625, + "step": 2646 + }, + { + "epoch": 0.25849609375, + "grad_norm": 0.2794688940048218, + "learning_rate": 0.00043535718132594925, + "loss": 4.6289, + "step": 2647 + }, + { + "epoch": 0.25859375, + "grad_norm": 0.3116011619567871, + "learning_rate": 0.0004353080318085956, + "loss": 4.6211, + "step": 2648 + }, + { + "epoch": 0.25869140625, + "grad_norm": 0.3346978724002838, + "learning_rate": 0.00043525886675046627, + "loss": 4.6562, + "step": 2649 + }, + { + "epoch": 0.2587890625, + "grad_norm": 0.2688816785812378, + "learning_rate": 0.00043520968615632744, + "loss": 4.6133, + "step": 2650 + }, + { + "epoch": 0.25888671875, + "grad_norm": 0.2490513175725937, + "learning_rate": 0.000435160490030947, + "loss": 4.6211, + "step": 2651 + }, + { + "epoch": 0.258984375, + "grad_norm": 0.2807507812976837, + "learning_rate": 0.000435111278379094, + "loss": 4.6562, + "step": 2652 + }, + { + "epoch": 0.25908203125, + "grad_norm": 0.24647557735443115, + "learning_rate": 0.00043506205120553927, + "loss": 4.5938, + "step": 2653 + }, + { + "epoch": 0.2591796875, + "grad_norm": 0.25943833589553833, + "learning_rate": 0.00043501280851505496, + "loss": 4.5898, + "step": 2654 + }, + { + "epoch": 0.25927734375, + "grad_norm": 0.291103720664978, + "learning_rate": 0.0004349635503124149, + "loss": 4.6211, + "step": 2655 + }, + { + "epoch": 0.259375, + "grad_norm": 0.2734870910644531, + "learning_rate": 0.00043491427660239437, + "loss": 4.6328, + "step": 2656 + }, + { + "epoch": 0.25947265625, + "grad_norm": 0.27138301730155945, + "learning_rate": 0.00043486498738976997, + "loss": 4.5938, + "step": 2657 + }, + { + "epoch": 0.2595703125, + "grad_norm": 0.27812081575393677, + "learning_rate": 0.0004348156826793201, + "loss": 4.6406, + "step": 2658 + }, + { + "epoch": 0.25966796875, + "grad_norm": 0.255628764629364, + "learning_rate": 0.0004347663624758245, + "loss": 4.6484, + "step": 2659 + }, + { + "epoch": 0.259765625, + "grad_norm": 0.26133373379707336, + "learning_rate": 0.0004347170267840643, + "loss": 4.6602, + "step": 2660 + }, + { + "epoch": 0.25986328125, + "grad_norm": 0.24307551980018616, + "learning_rate": 0.00043466767560882243, + "loss": 4.6328, + "step": 2661 + }, + { + "epoch": 0.2599609375, + "grad_norm": 0.25641006231307983, + "learning_rate": 0.00043461830895488306, + "loss": 4.6016, + "step": 2662 + }, + { + "epoch": 0.26005859375, + "grad_norm": 0.28057554364204407, + "learning_rate": 0.00043456892682703193, + "loss": 4.5977, + "step": 2663 + }, + { + "epoch": 0.26015625, + "grad_norm": 0.3308285176753998, + "learning_rate": 0.0004345195292300564, + "loss": 4.6445, + "step": 2664 + }, + { + "epoch": 0.26025390625, + "grad_norm": 0.36961767077445984, + "learning_rate": 0.0004344701161687452, + "loss": 4.6289, + "step": 2665 + }, + { + "epoch": 0.2603515625, + "grad_norm": 0.36934393644332886, + "learning_rate": 0.0004344206876478884, + "loss": 4.6289, + "step": 2666 + }, + { + "epoch": 0.26044921875, + "grad_norm": 0.31152284145355225, + "learning_rate": 0.00043437124367227807, + "loss": 4.6562, + "step": 2667 + }, + { + "epoch": 0.260546875, + "grad_norm": 0.2506508231163025, + "learning_rate": 0.0004343217842467074, + "loss": 4.6328, + "step": 2668 + }, + { + "epoch": 0.26064453125, + "grad_norm": 0.2702595293521881, + "learning_rate": 0.0004342723093759709, + "loss": 4.6328, + "step": 2669 + }, + { + "epoch": 0.2607421875, + "grad_norm": 0.2991844713687897, + "learning_rate": 0.00043422281906486504, + "loss": 4.5977, + "step": 2670 + }, + { + "epoch": 0.26083984375, + "grad_norm": 0.33450913429260254, + "learning_rate": 0.0004341733133181875, + "loss": 4.625, + "step": 2671 + }, + { + "epoch": 0.2609375, + "grad_norm": 0.3373546004295349, + "learning_rate": 0.0004341237921407377, + "loss": 4.6484, + "step": 2672 + }, + { + "epoch": 0.26103515625, + "grad_norm": 0.29305994510650635, + "learning_rate": 0.00043407425553731603, + "loss": 4.6211, + "step": 2673 + }, + { + "epoch": 0.2611328125, + "grad_norm": 0.23810382187366486, + "learning_rate": 0.000434024703512725, + "loss": 4.6445, + "step": 2674 + }, + { + "epoch": 0.26123046875, + "grad_norm": 0.25600048899650574, + "learning_rate": 0.0004339751360717683, + "loss": 4.6289, + "step": 2675 + }, + { + "epoch": 0.261328125, + "grad_norm": 0.2791506350040436, + "learning_rate": 0.0004339255532192511, + "loss": 4.6094, + "step": 2676 + }, + { + "epoch": 0.26142578125, + "grad_norm": 0.3102114796638489, + "learning_rate": 0.0004338759549599802, + "loss": 4.6094, + "step": 2677 + }, + { + "epoch": 0.2615234375, + "grad_norm": 0.2775516211986542, + "learning_rate": 0.0004338263412987636, + "loss": 4.6445, + "step": 2678 + }, + { + "epoch": 0.26162109375, + "grad_norm": 0.2559386193752289, + "learning_rate": 0.00043377671224041137, + "loss": 4.5977, + "step": 2679 + }, + { + "epoch": 0.26171875, + "grad_norm": 0.26493871212005615, + "learning_rate": 0.00043372706778973435, + "loss": 4.6211, + "step": 2680 + }, + { + "epoch": 0.26181640625, + "grad_norm": 0.2893526554107666, + "learning_rate": 0.00043367740795154547, + "loss": 4.6562, + "step": 2681 + }, + { + "epoch": 0.2619140625, + "grad_norm": 0.35771387815475464, + "learning_rate": 0.0004336277327306588, + "loss": 4.6055, + "step": 2682 + }, + { + "epoch": 0.26201171875, + "grad_norm": 0.3137047290802002, + "learning_rate": 0.0004335780421318901, + "loss": 4.6094, + "step": 2683 + }, + { + "epoch": 0.262109375, + "grad_norm": 0.2459126114845276, + "learning_rate": 0.00043352833616005647, + "loss": 4.6406, + "step": 2684 + }, + { + "epoch": 0.26220703125, + "grad_norm": 0.25466591119766235, + "learning_rate": 0.0004334786148199765, + "loss": 4.6602, + "step": 2685 + }, + { + "epoch": 0.2623046875, + "grad_norm": 0.2563462555408478, + "learning_rate": 0.0004334288781164705, + "loss": 4.6328, + "step": 2686 + }, + { + "epoch": 0.26240234375, + "grad_norm": 0.26143601536750793, + "learning_rate": 0.0004333791260543601, + "loss": 4.6328, + "step": 2687 + }, + { + "epoch": 0.2625, + "grad_norm": 0.25282952189445496, + "learning_rate": 0.00043332935863846825, + "loss": 4.6367, + "step": 2688 + }, + { + "epoch": 0.26259765625, + "grad_norm": 0.2638847827911377, + "learning_rate": 0.00043327957587361963, + "loss": 4.6172, + "step": 2689 + }, + { + "epoch": 0.2626953125, + "grad_norm": 0.24002081155776978, + "learning_rate": 0.00043322977776464043, + "loss": 4.6172, + "step": 2690 + }, + { + "epoch": 0.26279296875, + "grad_norm": 0.26813840866088867, + "learning_rate": 0.0004331799643163582, + "loss": 4.6367, + "step": 2691 + }, + { + "epoch": 0.262890625, + "grad_norm": 0.24831174314022064, + "learning_rate": 0.000433130135533602, + "loss": 4.6133, + "step": 2692 + }, + { + "epoch": 0.26298828125, + "grad_norm": 0.2469542920589447, + "learning_rate": 0.0004330802914212024, + "loss": 4.625, + "step": 2693 + }, + { + "epoch": 0.2630859375, + "grad_norm": 0.25638291239738464, + "learning_rate": 0.0004330304319839914, + "loss": 4.6172, + "step": 2694 + }, + { + "epoch": 0.26318359375, + "grad_norm": 0.26377740502357483, + "learning_rate": 0.0004329805572268026, + "loss": 4.625, + "step": 2695 + }, + { + "epoch": 0.26328125, + "grad_norm": 0.2609005272388458, + "learning_rate": 0.000432930667154471, + "loss": 4.6328, + "step": 2696 + }, + { + "epoch": 0.26337890625, + "grad_norm": 0.2879921793937683, + "learning_rate": 0.0004328807617718331, + "loss": 4.6523, + "step": 2697 + }, + { + "epoch": 0.2634765625, + "grad_norm": 0.31698736548423767, + "learning_rate": 0.0004328308410837269, + "loss": 4.6172, + "step": 2698 + }, + { + "epoch": 0.26357421875, + "grad_norm": 0.3793221712112427, + "learning_rate": 0.00043278090509499175, + "loss": 4.6367, + "step": 2699 + }, + { + "epoch": 0.263671875, + "grad_norm": 0.40002351999282837, + "learning_rate": 0.0004327309538104688, + "loss": 4.6328, + "step": 2700 + }, + { + "epoch": 0.26376953125, + "grad_norm": 0.340290904045105, + "learning_rate": 0.00043268098723500036, + "loss": 4.6094, + "step": 2701 + }, + { + "epoch": 0.2638671875, + "grad_norm": 0.2754620313644409, + "learning_rate": 0.00043263100537343054, + "loss": 4.6445, + "step": 2702 + }, + { + "epoch": 0.26396484375, + "grad_norm": 0.3039878308773041, + "learning_rate": 0.00043258100823060443, + "loss": 4.6289, + "step": 2703 + }, + { + "epoch": 0.2640625, + "grad_norm": 0.2932458519935608, + "learning_rate": 0.0004325309958113691, + "loss": 4.6445, + "step": 2704 + }, + { + "epoch": 0.26416015625, + "grad_norm": 0.3293628692626953, + "learning_rate": 0.0004324809681205729, + "loss": 4.6289, + "step": 2705 + }, + { + "epoch": 0.2642578125, + "grad_norm": 0.29881125688552856, + "learning_rate": 0.00043243092516306576, + "loss": 4.625, + "step": 2706 + }, + { + "epoch": 0.26435546875, + "grad_norm": 0.24560591578483582, + "learning_rate": 0.00043238086694369875, + "loss": 4.6523, + "step": 2707 + }, + { + "epoch": 0.264453125, + "grad_norm": 0.2545822858810425, + "learning_rate": 0.00043233079346732497, + "loss": 4.6016, + "step": 2708 + }, + { + "epoch": 0.26455078125, + "grad_norm": 0.27566730976104736, + "learning_rate": 0.00043228070473879857, + "loss": 4.5859, + "step": 2709 + }, + { + "epoch": 0.2646484375, + "grad_norm": 0.27875474095344543, + "learning_rate": 0.00043223060076297526, + "loss": 4.6172, + "step": 2710 + }, + { + "epoch": 0.26474609375, + "grad_norm": 0.2811029255390167, + "learning_rate": 0.0004321804815447123, + "loss": 4.6016, + "step": 2711 + }, + { + "epoch": 0.26484375, + "grad_norm": 0.24105289578437805, + "learning_rate": 0.00043213034708886847, + "loss": 4.6406, + "step": 2712 + }, + { + "epoch": 0.26494140625, + "grad_norm": 0.2662856876850128, + "learning_rate": 0.000432080197400304, + "loss": 4.6445, + "step": 2713 + }, + { + "epoch": 0.2650390625, + "grad_norm": 0.25164878368377686, + "learning_rate": 0.00043203003248388046, + "loss": 4.625, + "step": 2714 + }, + { + "epoch": 0.26513671875, + "grad_norm": 0.27431565523147583, + "learning_rate": 0.00043197985234446094, + "loss": 4.625, + "step": 2715 + }, + { + "epoch": 0.265234375, + "grad_norm": 0.2735578417778015, + "learning_rate": 0.0004319296569869103, + "loss": 4.6172, + "step": 2716 + }, + { + "epoch": 0.26533203125, + "grad_norm": 0.2583950161933899, + "learning_rate": 0.00043187944641609444, + "loss": 4.6094, + "step": 2717 + }, + { + "epoch": 0.2654296875, + "grad_norm": 0.2893451154232025, + "learning_rate": 0.000431829220636881, + "loss": 4.6445, + "step": 2718 + }, + { + "epoch": 0.26552734375, + "grad_norm": 0.28681814670562744, + "learning_rate": 0.000431778979654139, + "loss": 4.6172, + "step": 2719 + }, + { + "epoch": 0.265625, + "grad_norm": 0.23266421258449554, + "learning_rate": 0.000431728723472739, + "loss": 4.6289, + "step": 2720 + }, + { + "epoch": 0.26572265625, + "grad_norm": 0.23381997644901276, + "learning_rate": 0.0004316784520975531, + "loss": 4.625, + "step": 2721 + }, + { + "epoch": 0.2658203125, + "grad_norm": 0.2360221892595291, + "learning_rate": 0.00043162816553345444, + "loss": 4.6562, + "step": 2722 + }, + { + "epoch": 0.26591796875, + "grad_norm": 0.23772352933883667, + "learning_rate": 0.0004315778637853183, + "loss": 4.625, + "step": 2723 + }, + { + "epoch": 0.266015625, + "grad_norm": 0.23639456927776337, + "learning_rate": 0.0004315275468580209, + "loss": 4.625, + "step": 2724 + }, + { + "epoch": 0.26611328125, + "grad_norm": 0.2515277862548828, + "learning_rate": 0.00043147721475644027, + "loss": 4.6289, + "step": 2725 + }, + { + "epoch": 0.2662109375, + "grad_norm": 0.25055450201034546, + "learning_rate": 0.00043142686748545565, + "loss": 4.6016, + "step": 2726 + }, + { + "epoch": 0.26630859375, + "grad_norm": 0.3061833679676056, + "learning_rate": 0.00043137650504994785, + "loss": 4.625, + "step": 2727 + }, + { + "epoch": 0.26640625, + "grad_norm": 0.37695106863975525, + "learning_rate": 0.00043132612745479926, + "loss": 4.6289, + "step": 2728 + }, + { + "epoch": 0.26650390625, + "grad_norm": 0.4225936233997345, + "learning_rate": 0.00043127573470489356, + "loss": 4.6211, + "step": 2729 + }, + { + "epoch": 0.2666015625, + "grad_norm": 0.4192008674144745, + "learning_rate": 0.00043122532680511604, + "loss": 4.6172, + "step": 2730 + }, + { + "epoch": 0.26669921875, + "grad_norm": 0.31992414593696594, + "learning_rate": 0.0004311749037603534, + "loss": 4.6406, + "step": 2731 + }, + { + "epoch": 0.266796875, + "grad_norm": 0.233629509806633, + "learning_rate": 0.00043112446557549386, + "loss": 4.6367, + "step": 2732 + }, + { + "epoch": 0.26689453125, + "grad_norm": 0.30248719453811646, + "learning_rate": 0.0004310740122554269, + "loss": 4.6523, + "step": 2733 + }, + { + "epoch": 0.2669921875, + "grad_norm": 0.31114014983177185, + "learning_rate": 0.0004310235438050439, + "loss": 4.6523, + "step": 2734 + }, + { + "epoch": 0.26708984375, + "grad_norm": 0.2623535394668579, + "learning_rate": 0.0004309730602292371, + "loss": 4.6328, + "step": 2735 + }, + { + "epoch": 0.2671875, + "grad_norm": 0.22605332732200623, + "learning_rate": 0.00043092256153290067, + "loss": 4.6133, + "step": 2736 + }, + { + "epoch": 0.26728515625, + "grad_norm": 0.2686997652053833, + "learning_rate": 0.0004308720477209303, + "loss": 4.6211, + "step": 2737 + }, + { + "epoch": 0.2673828125, + "grad_norm": 0.2706778049468994, + "learning_rate": 0.0004308215187982227, + "loss": 4.6211, + "step": 2738 + }, + { + "epoch": 0.26748046875, + "grad_norm": 0.2650245130062103, + "learning_rate": 0.00043077097476967646, + "loss": 4.6016, + "step": 2739 + }, + { + "epoch": 0.267578125, + "grad_norm": 0.2558055818080902, + "learning_rate": 0.0004307204156401914, + "loss": 4.6172, + "step": 2740 + }, + { + "epoch": 0.26767578125, + "grad_norm": 0.2588762938976288, + "learning_rate": 0.00043066984141466896, + "loss": 4.6211, + "step": 2741 + }, + { + "epoch": 0.2677734375, + "grad_norm": 0.26006078720092773, + "learning_rate": 0.000430619252098012, + "loss": 4.668, + "step": 2742 + }, + { + "epoch": 0.26787109375, + "grad_norm": 0.3107823133468628, + "learning_rate": 0.0004305686476951246, + "loss": 4.6445, + "step": 2743 + }, + { + "epoch": 0.26796875, + "grad_norm": 0.2376752346754074, + "learning_rate": 0.00043051802821091273, + "loss": 4.6289, + "step": 2744 + }, + { + "epoch": 0.26806640625, + "grad_norm": 0.23580718040466309, + "learning_rate": 0.00043046739365028346, + "loss": 4.6055, + "step": 2745 + }, + { + "epoch": 0.2681640625, + "grad_norm": 0.23467917740345, + "learning_rate": 0.00043041674401814563, + "loss": 4.625, + "step": 2746 + }, + { + "epoch": 0.26826171875, + "grad_norm": 0.23805610835552216, + "learning_rate": 0.0004303660793194093, + "loss": 4.625, + "step": 2747 + }, + { + "epoch": 0.268359375, + "grad_norm": 0.25022003054618835, + "learning_rate": 0.0004303153995589859, + "loss": 4.6094, + "step": 2748 + }, + { + "epoch": 0.26845703125, + "grad_norm": 0.22743438184261322, + "learning_rate": 0.0004302647047417888, + "loss": 4.6094, + "step": 2749 + }, + { + "epoch": 0.2685546875, + "grad_norm": 0.24583496153354645, + "learning_rate": 0.00043021399487273225, + "loss": 4.6484, + "step": 2750 + }, + { + "epoch": 0.26865234375, + "grad_norm": 0.27424246072769165, + "learning_rate": 0.0004301632699567324, + "loss": 4.6289, + "step": 2751 + }, + { + "epoch": 0.26875, + "grad_norm": 0.2630585730075836, + "learning_rate": 0.0004301125299987066, + "loss": 4.6172, + "step": 2752 + }, + { + "epoch": 0.26884765625, + "grad_norm": 0.2677791714668274, + "learning_rate": 0.0004300617750035738, + "loss": 4.6367, + "step": 2753 + }, + { + "epoch": 0.2689453125, + "grad_norm": 0.23147054016590118, + "learning_rate": 0.00043001100497625427, + "loss": 4.6094, + "step": 2754 + }, + { + "epoch": 0.26904296875, + "grad_norm": 0.23818613588809967, + "learning_rate": 0.00042996021992166997, + "loss": 4.6172, + "step": 2755 + }, + { + "epoch": 0.269140625, + "grad_norm": 0.25780919194221497, + "learning_rate": 0.0004299094198447439, + "loss": 4.6211, + "step": 2756 + }, + { + "epoch": 0.26923828125, + "grad_norm": 0.2769230604171753, + "learning_rate": 0.00042985860475040105, + "loss": 4.6523, + "step": 2757 + }, + { + "epoch": 0.2693359375, + "grad_norm": 0.2465202808380127, + "learning_rate": 0.00042980777464356754, + "loss": 4.625, + "step": 2758 + }, + { + "epoch": 0.26943359375, + "grad_norm": 0.23685935139656067, + "learning_rate": 0.0004297569295291709, + "loss": 4.6328, + "step": 2759 + }, + { + "epoch": 0.26953125, + "grad_norm": 0.26017898321151733, + "learning_rate": 0.0004297060694121403, + "loss": 4.6016, + "step": 2760 + }, + { + "epoch": 0.26962890625, + "grad_norm": 0.32777175307273865, + "learning_rate": 0.0004296551942974062, + "loss": 4.6406, + "step": 2761 + }, + { + "epoch": 0.2697265625, + "grad_norm": 0.4204234778881073, + "learning_rate": 0.00042960430418990074, + "loss": 4.6797, + "step": 2762 + }, + { + "epoch": 0.26982421875, + "grad_norm": 0.5878008008003235, + "learning_rate": 0.0004295533990945573, + "loss": 4.668, + "step": 2763 + }, + { + "epoch": 0.269921875, + "grad_norm": 0.631855845451355, + "learning_rate": 0.0004295024790163108, + "loss": 4.6172, + "step": 2764 + }, + { + "epoch": 0.27001953125, + "grad_norm": 0.3981356918811798, + "learning_rate": 0.0004294515439600975, + "loss": 4.625, + "step": 2765 + }, + { + "epoch": 0.2701171875, + "grad_norm": 0.39184731245040894, + "learning_rate": 0.00042940059393085526, + "loss": 4.6055, + "step": 2766 + }, + { + "epoch": 0.27021484375, + "grad_norm": 0.44468772411346436, + "learning_rate": 0.0004293496289335234, + "loss": 4.6133, + "step": 2767 + }, + { + "epoch": 0.2703125, + "grad_norm": 0.3035581111907959, + "learning_rate": 0.0004292986489730426, + "loss": 4.6016, + "step": 2768 + }, + { + "epoch": 0.27041015625, + "grad_norm": 0.34981152415275574, + "learning_rate": 0.000429247654054355, + "loss": 4.6406, + "step": 2769 + }, + { + "epoch": 0.2705078125, + "grad_norm": 0.3811149597167969, + "learning_rate": 0.0004291966441824042, + "loss": 4.6055, + "step": 2770 + }, + { + "epoch": 0.27060546875, + "grad_norm": 0.3009447753429413, + "learning_rate": 0.00042914561936213534, + "loss": 4.6562, + "step": 2771 + }, + { + "epoch": 0.270703125, + "grad_norm": 0.2834171652793884, + "learning_rate": 0.0004290945795984948, + "loss": 4.6289, + "step": 2772 + }, + { + "epoch": 0.27080078125, + "grad_norm": 0.3370898962020874, + "learning_rate": 0.0004290435248964306, + "loss": 4.6328, + "step": 2773 + }, + { + "epoch": 0.2708984375, + "grad_norm": 0.23335519433021545, + "learning_rate": 0.00042899245526089225, + "loss": 4.6367, + "step": 2774 + }, + { + "epoch": 0.27099609375, + "grad_norm": 0.2687765061855316, + "learning_rate": 0.00042894137069683045, + "loss": 4.6172, + "step": 2775 + }, + { + "epoch": 0.27109375, + "grad_norm": 0.26708272099494934, + "learning_rate": 0.0004288902712091976, + "loss": 4.582, + "step": 2776 + }, + { + "epoch": 0.27119140625, + "grad_norm": 0.24172525107860565, + "learning_rate": 0.0004288391568029474, + "loss": 4.6172, + "step": 2777 + }, + { + "epoch": 0.2712890625, + "grad_norm": 0.25685614347457886, + "learning_rate": 0.000428788027483035, + "loss": 4.6172, + "step": 2778 + }, + { + "epoch": 0.27138671875, + "grad_norm": 0.26206153631210327, + "learning_rate": 0.0004287368832544172, + "loss": 4.6562, + "step": 2779 + }, + { + "epoch": 0.271484375, + "grad_norm": 0.2265661060810089, + "learning_rate": 0.00042868572412205186, + "loss": 4.625, + "step": 2780 + }, + { + "epoch": 0.27158203125, + "grad_norm": 0.26890629529953003, + "learning_rate": 0.0004286345500908987, + "loss": 4.625, + "step": 2781 + }, + { + "epoch": 0.2716796875, + "grad_norm": 0.2556878924369812, + "learning_rate": 0.00042858336116591864, + "loss": 4.625, + "step": 2782 + }, + { + "epoch": 0.27177734375, + "grad_norm": 0.23777686059474945, + "learning_rate": 0.00042853215735207405, + "loss": 4.6055, + "step": 2783 + }, + { + "epoch": 0.271875, + "grad_norm": 0.26949837803840637, + "learning_rate": 0.00042848093865432887, + "loss": 4.6172, + "step": 2784 + }, + { + "epoch": 0.27197265625, + "grad_norm": 0.24976986646652222, + "learning_rate": 0.00042842970507764837, + "loss": 4.6172, + "step": 2785 + }, + { + "epoch": 0.2720703125, + "grad_norm": 0.2393733710050583, + "learning_rate": 0.00042837845662699927, + "loss": 4.6055, + "step": 2786 + }, + { + "epoch": 0.27216796875, + "grad_norm": 0.2875217795372009, + "learning_rate": 0.00042832719330734983, + "loss": 4.6484, + "step": 2787 + }, + { + "epoch": 0.272265625, + "grad_norm": 0.262439489364624, + "learning_rate": 0.00042827591512366954, + "loss": 4.5977, + "step": 2788 + }, + { + "epoch": 0.27236328125, + "grad_norm": 0.2530885338783264, + "learning_rate": 0.0004282246220809297, + "loss": 4.6016, + "step": 2789 + }, + { + "epoch": 0.2724609375, + "grad_norm": 0.256287544965744, + "learning_rate": 0.00042817331418410257, + "loss": 4.6406, + "step": 2790 + }, + { + "epoch": 0.27255859375, + "grad_norm": 0.2764102518558502, + "learning_rate": 0.0004281219914381622, + "loss": 4.5977, + "step": 2791 + }, + { + "epoch": 0.27265625, + "grad_norm": 0.24099981784820557, + "learning_rate": 0.0004280706538480841, + "loss": 4.6367, + "step": 2792 + }, + { + "epoch": 0.27275390625, + "grad_norm": 0.2735477089881897, + "learning_rate": 0.00042801930141884493, + "loss": 4.6133, + "step": 2793 + }, + { + "epoch": 0.2728515625, + "grad_norm": 0.27607008814811707, + "learning_rate": 0.00042796793415542306, + "loss": 4.6328, + "step": 2794 + }, + { + "epoch": 0.27294921875, + "grad_norm": 0.28723159432411194, + "learning_rate": 0.0004279165520627982, + "loss": 4.6094, + "step": 2795 + }, + { + "epoch": 0.273046875, + "grad_norm": 0.2532249093055725, + "learning_rate": 0.0004278651551459514, + "loss": 4.5977, + "step": 2796 + }, + { + "epoch": 0.27314453125, + "grad_norm": 0.2496422827243805, + "learning_rate": 0.0004278137434098654, + "loss": 4.6094, + "step": 2797 + }, + { + "epoch": 0.2732421875, + "grad_norm": 0.2458890676498413, + "learning_rate": 0.0004277623168595241, + "loss": 4.5977, + "step": 2798 + }, + { + "epoch": 0.27333984375, + "grad_norm": 0.2640562951564789, + "learning_rate": 0.0004277108754999129, + "loss": 4.5859, + "step": 2799 + }, + { + "epoch": 0.2734375, + "grad_norm": 0.33778688311576843, + "learning_rate": 0.00042765941933601886, + "loss": 4.6172, + "step": 2800 + }, + { + "epoch": 0.27353515625, + "grad_norm": 0.3696017861366272, + "learning_rate": 0.00042760794837283023, + "loss": 4.6016, + "step": 2801 + }, + { + "epoch": 0.2736328125, + "grad_norm": 0.33612778782844543, + "learning_rate": 0.00042755646261533666, + "loss": 4.625, + "step": 2802 + }, + { + "epoch": 0.27373046875, + "grad_norm": 0.25933703780174255, + "learning_rate": 0.0004275049620685295, + "loss": 4.625, + "step": 2803 + }, + { + "epoch": 0.273828125, + "grad_norm": 0.2520463168621063, + "learning_rate": 0.0004274534467374013, + "loss": 4.6172, + "step": 2804 + }, + { + "epoch": 0.27392578125, + "grad_norm": 0.29942765831947327, + "learning_rate": 0.00042740191662694616, + "loss": 4.6172, + "step": 2805 + }, + { + "epoch": 0.2740234375, + "grad_norm": 0.31573325395584106, + "learning_rate": 0.00042735037174215963, + "loss": 4.6055, + "step": 2806 + }, + { + "epoch": 0.27412109375, + "grad_norm": 0.3361320495605469, + "learning_rate": 0.00042729881208803847, + "loss": 4.6133, + "step": 2807 + }, + { + "epoch": 0.27421875, + "grad_norm": 0.3160136044025421, + "learning_rate": 0.00042724723766958113, + "loss": 4.582, + "step": 2808 + }, + { + "epoch": 0.27431640625, + "grad_norm": 0.2704054117202759, + "learning_rate": 0.00042719564849178746, + "loss": 4.6211, + "step": 2809 + }, + { + "epoch": 0.2744140625, + "grad_norm": 0.23133155703544617, + "learning_rate": 0.0004271440445596586, + "loss": 4.6172, + "step": 2810 + }, + { + "epoch": 0.27451171875, + "grad_norm": 0.2838352918624878, + "learning_rate": 0.0004270924258781972, + "loss": 4.6133, + "step": 2811 + }, + { + "epoch": 0.274609375, + "grad_norm": 0.3360143303871155, + "learning_rate": 0.0004270407924524074, + "loss": 4.6484, + "step": 2812 + }, + { + "epoch": 0.27470703125, + "grad_norm": 0.30174335837364197, + "learning_rate": 0.00042698914428729464, + "loss": 4.6523, + "step": 2813 + }, + { + "epoch": 0.2748046875, + "grad_norm": 0.22768191993236542, + "learning_rate": 0.00042693748138786594, + "loss": 4.6172, + "step": 2814 + }, + { + "epoch": 0.27490234375, + "grad_norm": 0.28229424357414246, + "learning_rate": 0.0004268858037591296, + "loss": 4.6094, + "step": 2815 + }, + { + "epoch": 0.275, + "grad_norm": 0.3260074555873871, + "learning_rate": 0.00042683411140609546, + "loss": 4.6016, + "step": 2816 + }, + { + "epoch": 0.27509765625, + "grad_norm": 0.3027007579803467, + "learning_rate": 0.0004267824043337747, + "loss": 4.6094, + "step": 2817 + }, + { + "epoch": 0.2751953125, + "grad_norm": 0.2403685450553894, + "learning_rate": 0.00042673068254718, + "loss": 4.6797, + "step": 2818 + }, + { + "epoch": 0.27529296875, + "grad_norm": 0.23937687277793884, + "learning_rate": 0.00042667894605132533, + "loss": 4.6094, + "step": 2819 + }, + { + "epoch": 0.275390625, + "grad_norm": 0.27221542596817017, + "learning_rate": 0.0004266271948512264, + "loss": 4.625, + "step": 2820 + }, + { + "epoch": 0.27548828125, + "grad_norm": 0.3142409324645996, + "learning_rate": 0.0004265754289519, + "loss": 4.625, + "step": 2821 + }, + { + "epoch": 0.2755859375, + "grad_norm": 0.27097201347351074, + "learning_rate": 0.00042652364835836455, + "loss": 4.6094, + "step": 2822 + }, + { + "epoch": 0.27568359375, + "grad_norm": 0.2152489274740219, + "learning_rate": 0.00042647185307563974, + "loss": 4.6055, + "step": 2823 + }, + { + "epoch": 0.27578125, + "grad_norm": 0.22030247747898102, + "learning_rate": 0.0004264200431087468, + "loss": 4.6602, + "step": 2824 + }, + { + "epoch": 0.27587890625, + "grad_norm": 0.23404797911643982, + "learning_rate": 0.00042636821846270837, + "loss": 4.6523, + "step": 2825 + }, + { + "epoch": 0.2759765625, + "grad_norm": 0.23591125011444092, + "learning_rate": 0.00042631637914254856, + "loss": 4.5859, + "step": 2826 + }, + { + "epoch": 0.27607421875, + "grad_norm": 0.26286473870277405, + "learning_rate": 0.00042626452515329274, + "loss": 4.6289, + "step": 2827 + }, + { + "epoch": 0.276171875, + "grad_norm": 0.25863903760910034, + "learning_rate": 0.0004262126564999678, + "loss": 4.5938, + "step": 2828 + }, + { + "epoch": 0.27626953125, + "grad_norm": 0.2455669790506363, + "learning_rate": 0.0004261607731876021, + "loss": 4.6367, + "step": 2829 + }, + { + "epoch": 0.2763671875, + "grad_norm": 0.25501546263694763, + "learning_rate": 0.00042610887522122543, + "loss": 4.6172, + "step": 2830 + }, + { + "epoch": 0.27646484375, + "grad_norm": 0.2564796805381775, + "learning_rate": 0.0004260569626058689, + "loss": 4.6055, + "step": 2831 + }, + { + "epoch": 0.2765625, + "grad_norm": 0.26266518235206604, + "learning_rate": 0.00042600503534656506, + "loss": 4.6094, + "step": 2832 + }, + { + "epoch": 0.27666015625, + "grad_norm": 0.2536938786506653, + "learning_rate": 0.0004259530934483479, + "loss": 4.5977, + "step": 2833 + }, + { + "epoch": 0.2767578125, + "grad_norm": 0.23659798502922058, + "learning_rate": 0.0004259011369162528, + "loss": 4.6289, + "step": 2834 + }, + { + "epoch": 0.27685546875, + "grad_norm": 0.23762735724449158, + "learning_rate": 0.00042584916575531676, + "loss": 4.6055, + "step": 2835 + }, + { + "epoch": 0.276953125, + "grad_norm": 0.24754686653614044, + "learning_rate": 0.0004257971799705778, + "loss": 4.6055, + "step": 2836 + }, + { + "epoch": 0.27705078125, + "grad_norm": 0.24599385261535645, + "learning_rate": 0.0004257451795670758, + "loss": 4.6367, + "step": 2837 + }, + { + "epoch": 0.2771484375, + "grad_norm": 0.22704187035560608, + "learning_rate": 0.0004256931645498517, + "loss": 4.625, + "step": 2838 + }, + { + "epoch": 0.27724609375, + "grad_norm": 0.2323259860277176, + "learning_rate": 0.00042564113492394803, + "loss": 4.6367, + "step": 2839 + }, + { + "epoch": 0.27734375, + "grad_norm": 0.26240164041519165, + "learning_rate": 0.0004255890906944087, + "loss": 4.6094, + "step": 2840 + }, + { + "epoch": 0.27744140625, + "grad_norm": 0.27826008200645447, + "learning_rate": 0.0004255370318662792, + "loss": 4.6094, + "step": 2841 + }, + { + "epoch": 0.2775390625, + "grad_norm": 0.30446431040763855, + "learning_rate": 0.0004254849584446061, + "loss": 4.6055, + "step": 2842 + }, + { + "epoch": 0.27763671875, + "grad_norm": 0.3530751168727875, + "learning_rate": 0.0004254328704344376, + "loss": 4.6094, + "step": 2843 + }, + { + "epoch": 0.277734375, + "grad_norm": 0.3839414417743683, + "learning_rate": 0.0004253807678408233, + "loss": 4.6289, + "step": 2844 + }, + { + "epoch": 0.27783203125, + "grad_norm": 0.3700999319553375, + "learning_rate": 0.0004253286506688143, + "loss": 4.5938, + "step": 2845 + }, + { + "epoch": 0.2779296875, + "grad_norm": 0.29115769267082214, + "learning_rate": 0.0004252765189234627, + "loss": 4.6289, + "step": 2846 + }, + { + "epoch": 0.27802734375, + "grad_norm": 0.27739208936691284, + "learning_rate": 0.00042522437260982264, + "loss": 4.6289, + "step": 2847 + }, + { + "epoch": 0.278125, + "grad_norm": 0.34550684690475464, + "learning_rate": 0.0004251722117329493, + "loss": 4.6641, + "step": 2848 + }, + { + "epoch": 0.27822265625, + "grad_norm": 0.3762299716472626, + "learning_rate": 0.0004251200362978991, + "loss": 4.6367, + "step": 2849 + }, + { + "epoch": 0.2783203125, + "grad_norm": 0.3403177559375763, + "learning_rate": 0.0004250678463097304, + "loss": 4.6328, + "step": 2850 + }, + { + "epoch": 0.27841796875, + "grad_norm": 0.26145315170288086, + "learning_rate": 0.00042501564177350245, + "loss": 4.5977, + "step": 2851 + }, + { + "epoch": 0.278515625, + "grad_norm": 0.2693513333797455, + "learning_rate": 0.0004249634226942762, + "loss": 4.6055, + "step": 2852 + }, + { + "epoch": 0.27861328125, + "grad_norm": 0.3143466114997864, + "learning_rate": 0.0004249111890771139, + "loss": 4.5977, + "step": 2853 + }, + { + "epoch": 0.2787109375, + "grad_norm": 0.2941592037677765, + "learning_rate": 0.00042485894092707927, + "loss": 4.6328, + "step": 2854 + }, + { + "epoch": 0.27880859375, + "grad_norm": 0.22874799370765686, + "learning_rate": 0.0004248066782492374, + "loss": 4.6367, + "step": 2855 + }, + { + "epoch": 0.27890625, + "grad_norm": 0.26890864968299866, + "learning_rate": 0.00042475440104865485, + "loss": 4.6055, + "step": 2856 + }, + { + "epoch": 0.27900390625, + "grad_norm": 0.32747411727905273, + "learning_rate": 0.00042470210933039955, + "loss": 4.5977, + "step": 2857 + }, + { + "epoch": 0.2791015625, + "grad_norm": 0.3187156915664673, + "learning_rate": 0.00042464980309954073, + "loss": 4.625, + "step": 2858 + }, + { + "epoch": 0.27919921875, + "grad_norm": 0.25527843832969666, + "learning_rate": 0.00042459748236114915, + "loss": 4.6211, + "step": 2859 + }, + { + "epoch": 0.279296875, + "grad_norm": 0.2707161009311676, + "learning_rate": 0.00042454514712029703, + "loss": 4.6367, + "step": 2860 + }, + { + "epoch": 0.27939453125, + "grad_norm": 0.3107612431049347, + "learning_rate": 0.0004244927973820578, + "loss": 4.6289, + "step": 2861 + }, + { + "epoch": 0.2794921875, + "grad_norm": 0.26684579253196716, + "learning_rate": 0.00042444043315150647, + "loss": 4.6016, + "step": 2862 + }, + { + "epoch": 0.27958984375, + "grad_norm": 0.24667590856552124, + "learning_rate": 0.0004243880544337194, + "loss": 4.6523, + "step": 2863 + }, + { + "epoch": 0.2796875, + "grad_norm": 0.29917576909065247, + "learning_rate": 0.0004243356612337744, + "loss": 4.5898, + "step": 2864 + }, + { + "epoch": 0.27978515625, + "grad_norm": 0.3097784221172333, + "learning_rate": 0.00042428325355675065, + "loss": 4.6172, + "step": 2865 + }, + { + "epoch": 0.2798828125, + "grad_norm": 0.3432117998600006, + "learning_rate": 0.00042423083140772863, + "loss": 4.5859, + "step": 2866 + }, + { + "epoch": 0.27998046875, + "grad_norm": 0.2963063418865204, + "learning_rate": 0.00042417839479179023, + "loss": 4.6406, + "step": 2867 + }, + { + "epoch": 0.280078125, + "grad_norm": 0.2290186583995819, + "learning_rate": 0.00042412594371401895, + "loss": 4.6172, + "step": 2868 + }, + { + "epoch": 0.28017578125, + "grad_norm": 0.2738897204399109, + "learning_rate": 0.0004240734781794996, + "loss": 4.625, + "step": 2869 + }, + { + "epoch": 0.2802734375, + "grad_norm": 0.30701547861099243, + "learning_rate": 0.0004240209981933183, + "loss": 4.6055, + "step": 2870 + }, + { + "epoch": 0.28037109375, + "grad_norm": 0.3018535375595093, + "learning_rate": 0.0004239685037605626, + "loss": 4.6367, + "step": 2871 + }, + { + "epoch": 0.28046875, + "grad_norm": 0.25332704186439514, + "learning_rate": 0.0004239159948863216, + "loss": 4.5859, + "step": 2872 + }, + { + "epoch": 0.28056640625, + "grad_norm": 0.24248512089252472, + "learning_rate": 0.0004238634715756855, + "loss": 4.5859, + "step": 2873 + }, + { + "epoch": 0.2806640625, + "grad_norm": 0.2648017406463623, + "learning_rate": 0.00042381093383374617, + "loss": 4.6172, + "step": 2874 + }, + { + "epoch": 0.28076171875, + "grad_norm": 0.21473804116249084, + "learning_rate": 0.0004237583816655969, + "loss": 4.6055, + "step": 2875 + }, + { + "epoch": 0.280859375, + "grad_norm": 0.21407602727413177, + "learning_rate": 0.00042370581507633197, + "loss": 4.5938, + "step": 2876 + }, + { + "epoch": 0.28095703125, + "grad_norm": 0.2501271367073059, + "learning_rate": 0.00042365323407104766, + "loss": 4.6328, + "step": 2877 + }, + { + "epoch": 0.2810546875, + "grad_norm": 0.2666343152523041, + "learning_rate": 0.00042360063865484125, + "loss": 4.6289, + "step": 2878 + }, + { + "epoch": 0.28115234375, + "grad_norm": 0.2929534912109375, + "learning_rate": 0.0004235480288328114, + "loss": 4.5977, + "step": 2879 + }, + { + "epoch": 0.28125, + "grad_norm": 0.26992547512054443, + "learning_rate": 0.00042349540461005837, + "loss": 4.6133, + "step": 2880 + }, + { + "epoch": 0.28134765625, + "grad_norm": 0.2666148543357849, + "learning_rate": 0.00042344276599168377, + "loss": 4.625, + "step": 2881 + }, + { + "epoch": 0.2814453125, + "grad_norm": 0.25626686215400696, + "learning_rate": 0.00042339011298279044, + "loss": 4.5938, + "step": 2882 + }, + { + "epoch": 0.28154296875, + "grad_norm": 0.3142610192298889, + "learning_rate": 0.0004233374455884828, + "loss": 4.6133, + "step": 2883 + }, + { + "epoch": 0.281640625, + "grad_norm": 0.3389338254928589, + "learning_rate": 0.00042328476381386655, + "loss": 4.6133, + "step": 2884 + }, + { + "epoch": 0.28173828125, + "grad_norm": 0.3196072578430176, + "learning_rate": 0.00042323206766404885, + "loss": 4.6211, + "step": 2885 + }, + { + "epoch": 0.2818359375, + "grad_norm": 0.2588571310043335, + "learning_rate": 0.0004231793571441383, + "loss": 4.6055, + "step": 2886 + }, + { + "epoch": 0.28193359375, + "grad_norm": 0.23146070539951324, + "learning_rate": 0.0004231266322592447, + "loss": 4.6016, + "step": 2887 + }, + { + "epoch": 0.28203125, + "grad_norm": 0.2704641819000244, + "learning_rate": 0.00042307389301447947, + "loss": 4.6484, + "step": 2888 + }, + { + "epoch": 0.28212890625, + "grad_norm": 0.2760467827320099, + "learning_rate": 0.0004230211394149553, + "loss": 4.6211, + "step": 2889 + }, + { + "epoch": 0.2822265625, + "grad_norm": 0.23640359938144684, + "learning_rate": 0.00042296837146578627, + "loss": 4.6133, + "step": 2890 + }, + { + "epoch": 0.28232421875, + "grad_norm": 0.24645601212978363, + "learning_rate": 0.00042291558917208796, + "loss": 4.6133, + "step": 2891 + }, + { + "epoch": 0.282421875, + "grad_norm": 0.3032163381576538, + "learning_rate": 0.0004228627925389771, + "loss": 4.6133, + "step": 2892 + }, + { + "epoch": 0.28251953125, + "grad_norm": 0.36205634474754333, + "learning_rate": 0.000422809981571572, + "loss": 4.6016, + "step": 2893 + }, + { + "epoch": 0.2826171875, + "grad_norm": 0.3425475060939789, + "learning_rate": 0.0004227571562749925, + "loss": 4.6406, + "step": 2894 + }, + { + "epoch": 0.28271484375, + "grad_norm": 0.2646482586860657, + "learning_rate": 0.00042270431665435956, + "loss": 4.6172, + "step": 2895 + }, + { + "epoch": 0.2828125, + "grad_norm": 0.23810359835624695, + "learning_rate": 0.0004226514627147954, + "loss": 4.5938, + "step": 2896 + }, + { + "epoch": 0.28291015625, + "grad_norm": 0.25868889689445496, + "learning_rate": 0.00042259859446142425, + "loss": 4.6133, + "step": 2897 + }, + { + "epoch": 0.2830078125, + "grad_norm": 0.2972021698951721, + "learning_rate": 0.000422545711899371, + "loss": 4.5664, + "step": 2898 + }, + { + "epoch": 0.28310546875, + "grad_norm": 0.25669243931770325, + "learning_rate": 0.00042249281503376244, + "loss": 4.5938, + "step": 2899 + }, + { + "epoch": 0.283203125, + "grad_norm": 0.24249203503131866, + "learning_rate": 0.00042243990386972645, + "loss": 4.6094, + "step": 2900 + }, + { + "epoch": 0.28330078125, + "grad_norm": 0.23716723918914795, + "learning_rate": 0.0004223869784123925, + "loss": 4.6289, + "step": 2901 + }, + { + "epoch": 0.2833984375, + "grad_norm": 0.2518148422241211, + "learning_rate": 0.0004223340386668913, + "loss": 4.6602, + "step": 2902 + }, + { + "epoch": 0.28349609375, + "grad_norm": 0.23172102868556976, + "learning_rate": 0.00042228108463835496, + "loss": 4.6133, + "step": 2903 + }, + { + "epoch": 0.28359375, + "grad_norm": 0.22969980537891388, + "learning_rate": 0.00042222811633191716, + "loss": 4.6055, + "step": 2904 + }, + { + "epoch": 0.28369140625, + "grad_norm": 0.22918497025966644, + "learning_rate": 0.0004221751337527127, + "loss": 4.582, + "step": 2905 + }, + { + "epoch": 0.2837890625, + "grad_norm": 0.22180728614330292, + "learning_rate": 0.00042212213690587784, + "loss": 4.6602, + "step": 2906 + }, + { + "epoch": 0.28388671875, + "grad_norm": 0.2498670220375061, + "learning_rate": 0.00042206912579655033, + "loss": 4.6133, + "step": 2907 + }, + { + "epoch": 0.283984375, + "grad_norm": 0.22155894339084625, + "learning_rate": 0.0004220161004298693, + "loss": 4.6406, + "step": 2908 + }, + { + "epoch": 0.28408203125, + "grad_norm": 0.23338356614112854, + "learning_rate": 0.0004219630608109751, + "loss": 4.6172, + "step": 2909 + }, + { + "epoch": 0.2841796875, + "grad_norm": 0.23743927478790283, + "learning_rate": 0.00042191000694500957, + "loss": 4.6055, + "step": 2910 + }, + { + "epoch": 0.28427734375, + "grad_norm": 0.23867908120155334, + "learning_rate": 0.00042185693883711603, + "loss": 4.6055, + "step": 2911 + }, + { + "epoch": 0.284375, + "grad_norm": 0.25908198952674866, + "learning_rate": 0.00042180385649243893, + "loss": 4.6289, + "step": 2912 + }, + { + "epoch": 0.28447265625, + "grad_norm": 0.2900303304195404, + "learning_rate": 0.0004217507599161242, + "loss": 4.6211, + "step": 2913 + }, + { + "epoch": 0.2845703125, + "grad_norm": 0.3324315845966339, + "learning_rate": 0.0004216976491133195, + "loss": 4.6289, + "step": 2914 + }, + { + "epoch": 0.28466796875, + "grad_norm": 0.3559291362762451, + "learning_rate": 0.00042164452408917325, + "loss": 4.6289, + "step": 2915 + }, + { + "epoch": 0.284765625, + "grad_norm": 0.34815239906311035, + "learning_rate": 0.00042159138484883566, + "loss": 4.6328, + "step": 2916 + }, + { + "epoch": 0.28486328125, + "grad_norm": 0.31247657537460327, + "learning_rate": 0.0004215382313974582, + "loss": 4.625, + "step": 2917 + }, + { + "epoch": 0.2849609375, + "grad_norm": 0.2490827888250351, + "learning_rate": 0.0004214850637401939, + "loss": 4.6133, + "step": 2918 + }, + { + "epoch": 0.28505859375, + "grad_norm": 0.22063325345516205, + "learning_rate": 0.0004214318818821967, + "loss": 4.6562, + "step": 2919 + }, + { + "epoch": 0.28515625, + "grad_norm": 0.26033711433410645, + "learning_rate": 0.00042137868582862255, + "loss": 4.625, + "step": 2920 + }, + { + "epoch": 0.28525390625, + "grad_norm": 0.28995585441589355, + "learning_rate": 0.0004213254755846281, + "loss": 4.5938, + "step": 2921 + }, + { + "epoch": 0.2853515625, + "grad_norm": 0.356697678565979, + "learning_rate": 0.00042127225115537204, + "loss": 4.6133, + "step": 2922 + }, + { + "epoch": 0.28544921875, + "grad_norm": 0.35850605368614197, + "learning_rate": 0.0004212190125460139, + "loss": 4.6289, + "step": 2923 + }, + { + "epoch": 0.285546875, + "grad_norm": 0.2844766080379486, + "learning_rate": 0.00042116575976171495, + "loss": 4.6055, + "step": 2924 + }, + { + "epoch": 0.28564453125, + "grad_norm": 0.219746395945549, + "learning_rate": 0.0004211124928076377, + "loss": 4.6016, + "step": 2925 + }, + { + "epoch": 0.2857421875, + "grad_norm": 0.25024691224098206, + "learning_rate": 0.0004210592116889458, + "loss": 4.5742, + "step": 2926 + }, + { + "epoch": 0.28583984375, + "grad_norm": 0.295181542634964, + "learning_rate": 0.0004210059164108047, + "loss": 4.6328, + "step": 2927 + }, + { + "epoch": 0.2859375, + "grad_norm": 0.28918543457984924, + "learning_rate": 0.0004209526069783809, + "loss": 4.6211, + "step": 2928 + }, + { + "epoch": 0.28603515625, + "grad_norm": 0.2450653612613678, + "learning_rate": 0.0004208992833968425, + "loss": 4.6367, + "step": 2929 + }, + { + "epoch": 0.2861328125, + "grad_norm": 0.23315003514289856, + "learning_rate": 0.00042084594567135875, + "loss": 4.6055, + "step": 2930 + }, + { + "epoch": 0.28623046875, + "grad_norm": 0.27337029576301575, + "learning_rate": 0.00042079259380710046, + "loss": 4.5898, + "step": 2931 + }, + { + "epoch": 0.286328125, + "grad_norm": 0.31190812587738037, + "learning_rate": 0.0004207392278092397, + "loss": 4.5898, + "step": 2932 + }, + { + "epoch": 0.28642578125, + "grad_norm": 0.2923392355442047, + "learning_rate": 0.0004206858476829499, + "loss": 4.5977, + "step": 2933 + }, + { + "epoch": 0.2865234375, + "grad_norm": 0.2682334780693054, + "learning_rate": 0.0004206324534334059, + "loss": 4.6055, + "step": 2934 + }, + { + "epoch": 0.28662109375, + "grad_norm": 0.26648280024528503, + "learning_rate": 0.000420579045065784, + "loss": 4.6172, + "step": 2935 + }, + { + "epoch": 0.28671875, + "grad_norm": 0.2577674686908722, + "learning_rate": 0.00042052562258526176, + "loss": 4.6289, + "step": 2936 + }, + { + "epoch": 0.28681640625, + "grad_norm": 0.2761692404747009, + "learning_rate": 0.0004204721859970181, + "loss": 4.6055, + "step": 2937 + }, + { + "epoch": 0.2869140625, + "grad_norm": 0.28293171525001526, + "learning_rate": 0.00042041873530623326, + "loss": 4.6133, + "step": 2938 + }, + { + "epoch": 0.28701171875, + "grad_norm": 0.21054372191429138, + "learning_rate": 0.000420365270518089, + "loss": 4.5859, + "step": 2939 + }, + { + "epoch": 0.287109375, + "grad_norm": 0.22607965767383575, + "learning_rate": 0.00042031179163776845, + "loss": 4.6094, + "step": 2940 + }, + { + "epoch": 0.28720703125, + "grad_norm": 0.2554449141025543, + "learning_rate": 0.00042025829867045584, + "loss": 4.582, + "step": 2941 + }, + { + "epoch": 0.2873046875, + "grad_norm": 0.2518548369407654, + "learning_rate": 0.00042020479162133714, + "loss": 4.6211, + "step": 2942 + }, + { + "epoch": 0.28740234375, + "grad_norm": 0.2109464406967163, + "learning_rate": 0.0004201512704955993, + "loss": 4.6094, + "step": 2943 + }, + { + "epoch": 0.2875, + "grad_norm": 0.22352716326713562, + "learning_rate": 0.000420097735298431, + "loss": 4.6055, + "step": 2944 + }, + { + "epoch": 0.28759765625, + "grad_norm": 0.23316019773483276, + "learning_rate": 0.00042004418603502203, + "loss": 4.6211, + "step": 2945 + }, + { + "epoch": 0.2876953125, + "grad_norm": 0.2316441684961319, + "learning_rate": 0.00041999062271056364, + "loss": 4.582, + "step": 2946 + }, + { + "epoch": 0.28779296875, + "grad_norm": 0.24526676535606384, + "learning_rate": 0.0004199370453302484, + "loss": 4.6055, + "step": 2947 + }, + { + "epoch": 0.287890625, + "grad_norm": 0.23232483863830566, + "learning_rate": 0.0004198834538992703, + "loss": 4.6133, + "step": 2948 + }, + { + "epoch": 0.28798828125, + "grad_norm": 0.25168493390083313, + "learning_rate": 0.00041982984842282476, + "loss": 4.6055, + "step": 2949 + }, + { + "epoch": 0.2880859375, + "grad_norm": 0.22243353724479675, + "learning_rate": 0.0004197762289061083, + "loss": 4.6289, + "step": 2950 + }, + { + "epoch": 0.28818359375, + "grad_norm": 0.23435381054878235, + "learning_rate": 0.000419722595354319, + "loss": 4.6094, + "step": 2951 + }, + { + "epoch": 0.28828125, + "grad_norm": 0.24319778382778168, + "learning_rate": 0.00041966894777265636, + "loss": 4.6211, + "step": 2952 + }, + { + "epoch": 0.28837890625, + "grad_norm": 0.28030213713645935, + "learning_rate": 0.00041961528616632124, + "loss": 4.6094, + "step": 2953 + }, + { + "epoch": 0.2884765625, + "grad_norm": 0.3685244023799896, + "learning_rate": 0.00041956161054051543, + "loss": 4.5938, + "step": 2954 + }, + { + "epoch": 0.28857421875, + "grad_norm": 0.48517942428588867, + "learning_rate": 0.00041950792090044277, + "loss": 4.6133, + "step": 2955 + }, + { + "epoch": 0.288671875, + "grad_norm": 0.4975493550300598, + "learning_rate": 0.00041945421725130786, + "loss": 4.6172, + "step": 2956 + }, + { + "epoch": 0.28876953125, + "grad_norm": 0.3716197907924652, + "learning_rate": 0.00041940049959831704, + "loss": 4.6211, + "step": 2957 + }, + { + "epoch": 0.2888671875, + "grad_norm": 0.24583058059215546, + "learning_rate": 0.00041934676794667784, + "loss": 4.625, + "step": 2958 + }, + { + "epoch": 0.28896484375, + "grad_norm": 0.3422689139842987, + "learning_rate": 0.0004192930223015991, + "loss": 4.5938, + "step": 2959 + }, + { + "epoch": 0.2890625, + "grad_norm": 0.33649131655693054, + "learning_rate": 0.00041923926266829127, + "loss": 4.6289, + "step": 2960 + }, + { + "epoch": 0.28916015625, + "grad_norm": 0.24181030690670013, + "learning_rate": 0.00041918548905196584, + "loss": 4.6289, + "step": 2961 + }, + { + "epoch": 0.2892578125, + "grad_norm": 0.2525104880332947, + "learning_rate": 0.0004191317014578359, + "loss": 4.6172, + "step": 2962 + }, + { + "epoch": 0.28935546875, + "grad_norm": 0.3044237196445465, + "learning_rate": 0.00041907789989111563, + "loss": 4.6055, + "step": 2963 + }, + { + "epoch": 0.289453125, + "grad_norm": 0.26010990142822266, + "learning_rate": 0.0004190240843570208, + "loss": 4.5938, + "step": 2964 + }, + { + "epoch": 0.28955078125, + "grad_norm": 0.22154372930526733, + "learning_rate": 0.0004189702548607685, + "loss": 4.6055, + "step": 2965 + }, + { + "epoch": 0.2896484375, + "grad_norm": 0.26834234595298767, + "learning_rate": 0.00041891641140757725, + "loss": 4.6016, + "step": 2966 + }, + { + "epoch": 0.28974609375, + "grad_norm": 0.2684724032878876, + "learning_rate": 0.0004188625540026666, + "loss": 4.6133, + "step": 2967 + }, + { + "epoch": 0.28984375, + "grad_norm": 0.2453262358903885, + "learning_rate": 0.0004188086826512577, + "loss": 4.6289, + "step": 2968 + }, + { + "epoch": 0.28994140625, + "grad_norm": 0.22310975193977356, + "learning_rate": 0.00041875479735857307, + "loss": 4.6406, + "step": 2969 + }, + { + "epoch": 0.2900390625, + "grad_norm": 0.2295217663049698, + "learning_rate": 0.00041870089812983655, + "loss": 4.5938, + "step": 2970 + }, + { + "epoch": 0.29013671875, + "grad_norm": 0.24471132457256317, + "learning_rate": 0.00041864698497027315, + "loss": 4.5859, + "step": 2971 + }, + { + "epoch": 0.290234375, + "grad_norm": 0.2298986166715622, + "learning_rate": 0.0004185930578851096, + "loss": 4.5859, + "step": 2972 + }, + { + "epoch": 0.29033203125, + "grad_norm": 0.22361309826374054, + "learning_rate": 0.0004185391168795736, + "loss": 4.6133, + "step": 2973 + }, + { + "epoch": 0.2904296875, + "grad_norm": 0.20974531769752502, + "learning_rate": 0.00041848516195889445, + "loss": 4.6211, + "step": 2974 + }, + { + "epoch": 0.29052734375, + "grad_norm": 0.2296302765607834, + "learning_rate": 0.0004184311931283026, + "loss": 4.6172, + "step": 2975 + }, + { + "epoch": 0.290625, + "grad_norm": 0.2510583996772766, + "learning_rate": 0.0004183772103930302, + "loss": 4.6094, + "step": 2976 + }, + { + "epoch": 0.29072265625, + "grad_norm": 0.223464697599411, + "learning_rate": 0.0004183232137583102, + "loss": 4.582, + "step": 2977 + }, + { + "epoch": 0.2908203125, + "grad_norm": 0.22248142957687378, + "learning_rate": 0.00041826920322937753, + "loss": 4.6172, + "step": 2978 + }, + { + "epoch": 0.29091796875, + "grad_norm": 0.23871302604675293, + "learning_rate": 0.00041821517881146785, + "loss": 4.582, + "step": 2979 + }, + { + "epoch": 0.291015625, + "grad_norm": 0.2379457950592041, + "learning_rate": 0.00041816114050981863, + "loss": 4.625, + "step": 2980 + }, + { + "epoch": 0.29111328125, + "grad_norm": 0.27365514636039734, + "learning_rate": 0.0004181070883296685, + "loss": 4.625, + "step": 2981 + }, + { + "epoch": 0.2912109375, + "grad_norm": 0.27904605865478516, + "learning_rate": 0.0004180530222762574, + "loss": 4.5938, + "step": 2982 + }, + { + "epoch": 0.29130859375, + "grad_norm": 0.2858056426048279, + "learning_rate": 0.00041799894235482665, + "loss": 4.6094, + "step": 2983 + }, + { + "epoch": 0.29140625, + "grad_norm": 0.28733915090560913, + "learning_rate": 0.0004179448485706191, + "loss": 4.5977, + "step": 2984 + }, + { + "epoch": 0.29150390625, + "grad_norm": 0.29109078645706177, + "learning_rate": 0.0004178907409288785, + "loss": 4.6211, + "step": 2985 + }, + { + "epoch": 0.2916015625, + "grad_norm": 0.24826256930828094, + "learning_rate": 0.0004178366194348504, + "loss": 4.5898, + "step": 2986 + }, + { + "epoch": 0.29169921875, + "grad_norm": 0.25492191314697266, + "learning_rate": 0.0004177824840937816, + "loss": 4.6055, + "step": 2987 + }, + { + "epoch": 0.291796875, + "grad_norm": 0.24259042739868164, + "learning_rate": 0.00041772833491091993, + "loss": 4.6172, + "step": 2988 + }, + { + "epoch": 0.29189453125, + "grad_norm": 0.22923807799816132, + "learning_rate": 0.0004176741718915149, + "loss": 4.5898, + "step": 2989 + }, + { + "epoch": 0.2919921875, + "grad_norm": 0.24221837520599365, + "learning_rate": 0.00041761999504081725, + "loss": 4.6328, + "step": 2990 + }, + { + "epoch": 0.29208984375, + "grad_norm": 0.2258581668138504, + "learning_rate": 0.0004175658043640791, + "loss": 4.6133, + "step": 2991 + }, + { + "epoch": 0.2921875, + "grad_norm": 0.21304674446582794, + "learning_rate": 0.00041751159986655375, + "loss": 4.6133, + "step": 2992 + }, + { + "epoch": 0.29228515625, + "grad_norm": 0.22667059302330017, + "learning_rate": 0.000417457381553496, + "loss": 4.6094, + "step": 2993 + }, + { + "epoch": 0.2923828125, + "grad_norm": 0.21091768145561218, + "learning_rate": 0.000417403149430162, + "loss": 4.5898, + "step": 2994 + }, + { + "epoch": 0.29248046875, + "grad_norm": 0.216877743601799, + "learning_rate": 0.0004173489035018092, + "loss": 4.6172, + "step": 2995 + }, + { + "epoch": 0.292578125, + "grad_norm": 0.24574807286262512, + "learning_rate": 0.0004172946437736963, + "loss": 4.6172, + "step": 2996 + }, + { + "epoch": 0.29267578125, + "grad_norm": 0.28834792971611023, + "learning_rate": 0.0004172403702510834, + "loss": 4.6016, + "step": 2997 + }, + { + "epoch": 0.2927734375, + "grad_norm": 0.343079537153244, + "learning_rate": 0.00041718608293923203, + "loss": 4.6133, + "step": 2998 + }, + { + "epoch": 0.29287109375, + "grad_norm": 0.4108991026878357, + "learning_rate": 0.00041713178184340496, + "loss": 4.5859, + "step": 2999 + }, + { + "epoch": 0.29296875, + "grad_norm": 0.4240403175354004, + "learning_rate": 0.0004170774669688662, + "loss": 4.6172, + "step": 3000 + }, + { + "epoch": 0.29306640625, + "grad_norm": 0.31272393465042114, + "learning_rate": 0.0004170231383208814, + "loss": 4.6133, + "step": 3001 + }, + { + "epoch": 0.2931640625, + "grad_norm": 0.23991656303405762, + "learning_rate": 0.0004169687959047173, + "loss": 4.6055, + "step": 3002 + }, + { + "epoch": 0.29326171875, + "grad_norm": 0.35421618819236755, + "learning_rate": 0.0004169144397256418, + "loss": 4.582, + "step": 3003 + }, + { + "epoch": 0.293359375, + "grad_norm": 0.3898334801197052, + "learning_rate": 0.0004168600697889247, + "loss": 4.6016, + "step": 3004 + }, + { + "epoch": 0.29345703125, + "grad_norm": 0.2908141016960144, + "learning_rate": 0.0004168056860998366, + "loss": 4.5898, + "step": 3005 + }, + { + "epoch": 0.2935546875, + "grad_norm": 0.2281796634197235, + "learning_rate": 0.00041675128866364966, + "loss": 4.5625, + "step": 3006 + }, + { + "epoch": 0.29365234375, + "grad_norm": 0.3215753138065338, + "learning_rate": 0.0004166968774856373, + "loss": 4.6289, + "step": 3007 + }, + { + "epoch": 0.29375, + "grad_norm": 0.3463136553764343, + "learning_rate": 0.0004166424525710744, + "loss": 4.6016, + "step": 3008 + }, + { + "epoch": 0.29384765625, + "grad_norm": 0.25969406962394714, + "learning_rate": 0.0004165880139252371, + "loss": 4.582, + "step": 3009 + }, + { + "epoch": 0.2939453125, + "grad_norm": 0.2566334903240204, + "learning_rate": 0.00041653356155340275, + "loss": 4.6055, + "step": 3010 + }, + { + "epoch": 0.29404296875, + "grad_norm": 0.31129300594329834, + "learning_rate": 0.0004164790954608502, + "loss": 4.6133, + "step": 3011 + }, + { + "epoch": 0.294140625, + "grad_norm": 0.24892330169677734, + "learning_rate": 0.0004164246156528597, + "loss": 4.6328, + "step": 3012 + }, + { + "epoch": 0.29423828125, + "grad_norm": 0.21632547676563263, + "learning_rate": 0.00041637012213471245, + "loss": 4.582, + "step": 3013 + }, + { + "epoch": 0.2943359375, + "grad_norm": 0.2577113211154938, + "learning_rate": 0.00041631561491169134, + "loss": 4.625, + "step": 3014 + }, + { + "epoch": 0.29443359375, + "grad_norm": 0.2430252879858017, + "learning_rate": 0.00041626109398908053, + "loss": 4.6055, + "step": 3015 + }, + { + "epoch": 0.29453125, + "grad_norm": 0.20984789729118347, + "learning_rate": 0.0004162065593721653, + "loss": 4.5898, + "step": 3016 + }, + { + "epoch": 0.29462890625, + "grad_norm": 0.23375052213668823, + "learning_rate": 0.00041615201106623255, + "loss": 4.625, + "step": 3017 + }, + { + "epoch": 0.2947265625, + "grad_norm": 0.27082470059394836, + "learning_rate": 0.0004160974490765704, + "loss": 4.6328, + "step": 3018 + }, + { + "epoch": 0.29482421875, + "grad_norm": 0.2469756156206131, + "learning_rate": 0.0004160428734084681, + "loss": 4.6094, + "step": 3019 + }, + { + "epoch": 0.294921875, + "grad_norm": 0.23764625191688538, + "learning_rate": 0.0004159882840672166, + "loss": 4.6094, + "step": 3020 + }, + { + "epoch": 0.29501953125, + "grad_norm": 0.22840583324432373, + "learning_rate": 0.00041593368105810775, + "loss": 4.5938, + "step": 3021 + }, + { + "epoch": 0.2951171875, + "grad_norm": 0.2172738015651703, + "learning_rate": 0.00041587906438643506, + "loss": 4.5938, + "step": 3022 + }, + { + "epoch": 0.29521484375, + "grad_norm": 0.23322579264640808, + "learning_rate": 0.0004158244340574932, + "loss": 4.6172, + "step": 3023 + }, + { + "epoch": 0.2953125, + "grad_norm": 0.2534894645214081, + "learning_rate": 0.0004157697900765783, + "loss": 4.5938, + "step": 3024 + }, + { + "epoch": 0.29541015625, + "grad_norm": 0.24084530770778656, + "learning_rate": 0.00041571513244898764, + "loss": 4.6133, + "step": 3025 + }, + { + "epoch": 0.2955078125, + "grad_norm": 0.24023793637752533, + "learning_rate": 0.00041566046118001994, + "loss": 4.6172, + "step": 3026 + }, + { + "epoch": 0.29560546875, + "grad_norm": 0.24409601092338562, + "learning_rate": 0.00041560577627497517, + "loss": 4.5898, + "step": 3027 + }, + { + "epoch": 0.295703125, + "grad_norm": 0.23981541395187378, + "learning_rate": 0.00041555107773915464, + "loss": 4.6367, + "step": 3028 + }, + { + "epoch": 0.29580078125, + "grad_norm": 0.2710396349430084, + "learning_rate": 0.000415496365577861, + "loss": 4.5977, + "step": 3029 + }, + { + "epoch": 0.2958984375, + "grad_norm": 0.3157922327518463, + "learning_rate": 0.0004154416397963985, + "loss": 4.6094, + "step": 3030 + }, + { + "epoch": 0.29599609375, + "grad_norm": 0.32610803842544556, + "learning_rate": 0.000415386900400072, + "loss": 4.6367, + "step": 3031 + }, + { + "epoch": 0.29609375, + "grad_norm": 0.2904401421546936, + "learning_rate": 0.00041533214739418835, + "loss": 4.5898, + "step": 3032 + }, + { + "epoch": 0.29619140625, + "grad_norm": 0.23196280002593994, + "learning_rate": 0.0004152773807840555, + "loss": 4.6133, + "step": 3033 + }, + { + "epoch": 0.2962890625, + "grad_norm": 0.2660154402256012, + "learning_rate": 0.0004152226005749826, + "loss": 4.5898, + "step": 3034 + }, + { + "epoch": 0.29638671875, + "grad_norm": 0.28788262605667114, + "learning_rate": 0.00041516780677228025, + "loss": 4.6289, + "step": 3035 + }, + { + "epoch": 0.296484375, + "grad_norm": 0.27093082666397095, + "learning_rate": 0.00041511299938126045, + "loss": 4.6094, + "step": 3036 + }, + { + "epoch": 0.29658203125, + "grad_norm": 0.24944792687892914, + "learning_rate": 0.00041505817840723625, + "loss": 4.6094, + "step": 3037 + }, + { + "epoch": 0.2966796875, + "grad_norm": 0.2273198813199997, + "learning_rate": 0.00041500334385552223, + "loss": 4.6094, + "step": 3038 + }, + { + "epoch": 0.29677734375, + "grad_norm": 0.21648380160331726, + "learning_rate": 0.00041494849573143425, + "loss": 4.5898, + "step": 3039 + }, + { + "epoch": 0.296875, + "grad_norm": 0.24864619970321655, + "learning_rate": 0.0004148936340402894, + "loss": 4.5977, + "step": 3040 + }, + { + "epoch": 0.29697265625, + "grad_norm": 0.2995125651359558, + "learning_rate": 0.00041483875878740623, + "loss": 4.6211, + "step": 3041 + }, + { + "epoch": 0.2970703125, + "grad_norm": 0.29794636368751526, + "learning_rate": 0.0004147838699781045, + "loss": 4.5859, + "step": 3042 + }, + { + "epoch": 0.29716796875, + "grad_norm": 0.2821556329727173, + "learning_rate": 0.00041472896761770526, + "loss": 4.625, + "step": 3043 + }, + { + "epoch": 0.297265625, + "grad_norm": 0.24617761373519897, + "learning_rate": 0.00041467405171153097, + "loss": 4.5977, + "step": 3044 + }, + { + "epoch": 0.29736328125, + "grad_norm": 0.22228693962097168, + "learning_rate": 0.0004146191222649053, + "loss": 4.6484, + "step": 3045 + }, + { + "epoch": 0.2974609375, + "grad_norm": 0.24912919104099274, + "learning_rate": 0.00041456417928315334, + "loss": 4.5859, + "step": 3046 + }, + { + "epoch": 0.29755859375, + "grad_norm": 0.30771324038505554, + "learning_rate": 0.00041450922277160143, + "loss": 4.582, + "step": 3047 + }, + { + "epoch": 0.29765625, + "grad_norm": 0.31349432468414307, + "learning_rate": 0.0004144542527355772, + "loss": 4.6016, + "step": 3048 + }, + { + "epoch": 0.29775390625, + "grad_norm": 0.2630326747894287, + "learning_rate": 0.00041439926918040965, + "loss": 4.6172, + "step": 3049 + }, + { + "epoch": 0.2978515625, + "grad_norm": 0.253678560256958, + "learning_rate": 0.0004143442721114291, + "loss": 4.6328, + "step": 3050 + }, + { + "epoch": 0.29794921875, + "grad_norm": 0.2372310608625412, + "learning_rate": 0.00041428926153396704, + "loss": 4.5859, + "step": 3051 + }, + { + "epoch": 0.298046875, + "grad_norm": 0.23570166528224945, + "learning_rate": 0.00041423423745335654, + "loss": 4.5977, + "step": 3052 + }, + { + "epoch": 0.29814453125, + "grad_norm": 0.21423131227493286, + "learning_rate": 0.00041417919987493164, + "loss": 4.6211, + "step": 3053 + }, + { + "epoch": 0.2982421875, + "grad_norm": 0.2505544126033783, + "learning_rate": 0.00041412414880402795, + "loss": 4.5938, + "step": 3054 + }, + { + "epoch": 0.29833984375, + "grad_norm": 0.25505703687667847, + "learning_rate": 0.00041406908424598224, + "loss": 4.6172, + "step": 3055 + }, + { + "epoch": 0.2984375, + "grad_norm": 0.2795094847679138, + "learning_rate": 0.00041401400620613265, + "loss": 4.5938, + "step": 3056 + }, + { + "epoch": 0.29853515625, + "grad_norm": 0.2850538492202759, + "learning_rate": 0.00041395891468981865, + "loss": 4.6289, + "step": 3057 + }, + { + "epoch": 0.2986328125, + "grad_norm": 0.281779408454895, + "learning_rate": 0.00041390380970238107, + "loss": 4.625, + "step": 3058 + }, + { + "epoch": 0.29873046875, + "grad_norm": 0.2647131085395813, + "learning_rate": 0.0004138486912491618, + "loss": 4.6094, + "step": 3059 + }, + { + "epoch": 0.298828125, + "grad_norm": 0.22464655339717865, + "learning_rate": 0.0004137935593355043, + "loss": 4.5898, + "step": 3060 + }, + { + "epoch": 0.29892578125, + "grad_norm": 0.22312195599079132, + "learning_rate": 0.0004137384139667532, + "loss": 4.5898, + "step": 3061 + }, + { + "epoch": 0.2990234375, + "grad_norm": 0.20912885665893555, + "learning_rate": 0.0004136832551482545, + "loss": 4.6289, + "step": 3062 + }, + { + "epoch": 0.29912109375, + "grad_norm": 0.20881575345993042, + "learning_rate": 0.0004136280828853555, + "loss": 4.6133, + "step": 3063 + }, + { + "epoch": 0.29921875, + "grad_norm": 0.22350968420505524, + "learning_rate": 0.00041357289718340473, + "loss": 4.6094, + "step": 3064 + }, + { + "epoch": 0.29931640625, + "grad_norm": 0.2056812345981598, + "learning_rate": 0.00041351769804775205, + "loss": 4.5859, + "step": 3065 + }, + { + "epoch": 0.2994140625, + "grad_norm": 0.21826857328414917, + "learning_rate": 0.00041346248548374865, + "loss": 4.5859, + "step": 3066 + }, + { + "epoch": 0.29951171875, + "grad_norm": 0.22413699328899384, + "learning_rate": 0.00041340725949674714, + "loss": 4.6133, + "step": 3067 + }, + { + "epoch": 0.299609375, + "grad_norm": 0.2368941605091095, + "learning_rate": 0.0004133520200921011, + "loss": 4.6172, + "step": 3068 + }, + { + "epoch": 0.29970703125, + "grad_norm": 0.22185541689395905, + "learning_rate": 0.0004132967672751656, + "loss": 4.6172, + "step": 3069 + }, + { + "epoch": 0.2998046875, + "grad_norm": 0.23173803091049194, + "learning_rate": 0.0004132415010512973, + "loss": 4.6211, + "step": 3070 + }, + { + "epoch": 0.29990234375, + "grad_norm": 0.22552303969860077, + "learning_rate": 0.00041318622142585374, + "loss": 4.5977, + "step": 3071 + }, + { + "epoch": 0.3, + "grad_norm": 0.22088563442230225, + "learning_rate": 0.00041313092840419374, + "loss": 4.5742, + "step": 3072 + }, + { + "epoch": 0.30009765625, + "grad_norm": 0.2542416751384735, + "learning_rate": 0.0004130756219916779, + "loss": 4.5938, + "step": 3073 + }, + { + "epoch": 0.3001953125, + "grad_norm": 0.3776582181453705, + "learning_rate": 0.00041302030219366755, + "loss": 4.6133, + "step": 3074 + }, + { + "epoch": 0.30029296875, + "grad_norm": 0.47800445556640625, + "learning_rate": 0.00041296496901552563, + "loss": 4.6211, + "step": 3075 + }, + { + "epoch": 0.300390625, + "grad_norm": 0.44413772225379944, + "learning_rate": 0.0004129096224626163, + "loss": 4.5898, + "step": 3076 + }, + { + "epoch": 0.30048828125, + "grad_norm": 0.27604517340660095, + "learning_rate": 0.00041285426254030513, + "loss": 4.5781, + "step": 3077 + }, + { + "epoch": 0.3005859375, + "grad_norm": 0.30996596813201904, + "learning_rate": 0.00041279888925395875, + "loss": 4.6094, + "step": 3078 + }, + { + "epoch": 0.30068359375, + "grad_norm": 0.3814452886581421, + "learning_rate": 0.00041274350260894543, + "loss": 4.625, + "step": 3079 + }, + { + "epoch": 0.30078125, + "grad_norm": 0.27754315733909607, + "learning_rate": 0.0004126881026106343, + "loss": 4.6133, + "step": 3080 + }, + { + "epoch": 0.30087890625, + "grad_norm": 0.265481561422348, + "learning_rate": 0.0004126326892643961, + "loss": 4.6523, + "step": 3081 + }, + { + "epoch": 0.3009765625, + "grad_norm": 0.324713796377182, + "learning_rate": 0.00041257726257560293, + "loss": 4.5781, + "step": 3082 + }, + { + "epoch": 0.30107421875, + "grad_norm": 0.3005579710006714, + "learning_rate": 0.0004125218225496277, + "loss": 4.6133, + "step": 3083 + }, + { + "epoch": 0.301171875, + "grad_norm": 0.20995526015758514, + "learning_rate": 0.0004124663691918453, + "loss": 4.6016, + "step": 3084 + }, + { + "epoch": 0.30126953125, + "grad_norm": 0.25372254848480225, + "learning_rate": 0.0004124109025076313, + "loss": 4.5977, + "step": 3085 + }, + { + "epoch": 0.3013671875, + "grad_norm": 0.25096240639686584, + "learning_rate": 0.00041235542250236295, + "loss": 4.5977, + "step": 3086 + }, + { + "epoch": 0.30146484375, + "grad_norm": 0.225476935505867, + "learning_rate": 0.00041229992918141863, + "loss": 4.6211, + "step": 3087 + }, + { + "epoch": 0.3015625, + "grad_norm": 0.22451844811439514, + "learning_rate": 0.000412244422550178, + "loss": 4.6016, + "step": 3088 + }, + { + "epoch": 0.30166015625, + "grad_norm": 0.260402113199234, + "learning_rate": 0.00041218890261402206, + "loss": 4.5977, + "step": 3089 + }, + { + "epoch": 0.3017578125, + "grad_norm": 0.2285398542881012, + "learning_rate": 0.00041213336937833317, + "loss": 4.625, + "step": 3090 + }, + { + "epoch": 0.30185546875, + "grad_norm": 0.24618355929851532, + "learning_rate": 0.00041207782284849475, + "loss": 4.5703, + "step": 3091 + }, + { + "epoch": 0.301953125, + "grad_norm": 0.2767787277698517, + "learning_rate": 0.00041202226302989193, + "loss": 4.582, + "step": 3092 + }, + { + "epoch": 0.30205078125, + "grad_norm": 0.2765655219554901, + "learning_rate": 0.00041196668992791064, + "loss": 4.5625, + "step": 3093 + }, + { + "epoch": 0.3021484375, + "grad_norm": 0.26128485798835754, + "learning_rate": 0.0004119111035479383, + "loss": 4.5938, + "step": 3094 + }, + { + "epoch": 0.30224609375, + "grad_norm": 0.23520393669605255, + "learning_rate": 0.0004118555038953636, + "loss": 4.5664, + "step": 3095 + }, + { + "epoch": 0.30234375, + "grad_norm": 0.25659066438674927, + "learning_rate": 0.00041179989097557676, + "loss": 4.5781, + "step": 3096 + }, + { + "epoch": 0.30244140625, + "grad_norm": 0.3043178617954254, + "learning_rate": 0.00041174426479396894, + "loss": 4.6094, + "step": 3097 + }, + { + "epoch": 0.3025390625, + "grad_norm": 0.28423061966896057, + "learning_rate": 0.00041168862535593273, + "loss": 4.6094, + "step": 3098 + }, + { + "epoch": 0.30263671875, + "grad_norm": 0.25208911299705505, + "learning_rate": 0.00041163297266686203, + "loss": 4.582, + "step": 3099 + }, + { + "epoch": 0.302734375, + "grad_norm": 0.2521250247955322, + "learning_rate": 0.0004115773067321518, + "loss": 4.6406, + "step": 3100 + }, + { + "epoch": 0.30283203125, + "grad_norm": 0.24810180068016052, + "learning_rate": 0.0004115216275571988, + "loss": 4.5898, + "step": 3101 + }, + { + "epoch": 0.3029296875, + "grad_norm": 0.2551381587982178, + "learning_rate": 0.00041146593514740056, + "loss": 4.5781, + "step": 3102 + }, + { + "epoch": 0.30302734375, + "grad_norm": 0.24140433967113495, + "learning_rate": 0.00041141022950815604, + "loss": 4.5938, + "step": 3103 + }, + { + "epoch": 0.303125, + "grad_norm": 0.25926247239112854, + "learning_rate": 0.0004113545106448657, + "loss": 4.6445, + "step": 3104 + }, + { + "epoch": 0.30322265625, + "grad_norm": 0.22866185009479523, + "learning_rate": 0.0004112987785629309, + "loss": 4.6211, + "step": 3105 + }, + { + "epoch": 0.3033203125, + "grad_norm": 0.21660327911376953, + "learning_rate": 0.00041124303326775465, + "loss": 4.5781, + "step": 3106 + }, + { + "epoch": 0.30341796875, + "grad_norm": 0.2487567663192749, + "learning_rate": 0.000411187274764741, + "loss": 4.5938, + "step": 3107 + }, + { + "epoch": 0.303515625, + "grad_norm": 0.24039095640182495, + "learning_rate": 0.00041113150305929525, + "loss": 4.6016, + "step": 3108 + }, + { + "epoch": 0.30361328125, + "grad_norm": 0.23677201569080353, + "learning_rate": 0.00041107571815682427, + "loss": 4.5859, + "step": 3109 + }, + { + "epoch": 0.3037109375, + "grad_norm": 0.226942241191864, + "learning_rate": 0.00041101992006273606, + "loss": 4.582, + "step": 3110 + }, + { + "epoch": 0.30380859375, + "grad_norm": 0.23053967952728271, + "learning_rate": 0.0004109641087824396, + "loss": 4.6016, + "step": 3111 + }, + { + "epoch": 0.30390625, + "grad_norm": 0.2265138179063797, + "learning_rate": 0.00041090828432134574, + "loss": 4.582, + "step": 3112 + }, + { + "epoch": 0.30400390625, + "grad_norm": 0.2620256543159485, + "learning_rate": 0.00041085244668486603, + "loss": 4.6094, + "step": 3113 + }, + { + "epoch": 0.3041015625, + "grad_norm": 0.24052895605564117, + "learning_rate": 0.0004107965958784137, + "loss": 4.6094, + "step": 3114 + }, + { + "epoch": 0.30419921875, + "grad_norm": 0.2501332759857178, + "learning_rate": 0.00041074073190740305, + "loss": 4.6055, + "step": 3115 + }, + { + "epoch": 0.304296875, + "grad_norm": 0.22214585542678833, + "learning_rate": 0.0004106848547772497, + "loss": 4.6172, + "step": 3116 + }, + { + "epoch": 0.30439453125, + "grad_norm": 0.23473069071769714, + "learning_rate": 0.0004106289644933706, + "loss": 4.6211, + "step": 3117 + }, + { + "epoch": 0.3044921875, + "grad_norm": 0.2530396282672882, + "learning_rate": 0.0004105730610611839, + "loss": 4.5977, + "step": 3118 + }, + { + "epoch": 0.30458984375, + "grad_norm": 0.2762342393398285, + "learning_rate": 0.000410517144486109, + "loss": 4.6055, + "step": 3119 + }, + { + "epoch": 0.3046875, + "grad_norm": 0.34767040610313416, + "learning_rate": 0.0004104612147735668, + "loss": 4.6719, + "step": 3120 + }, + { + "epoch": 0.30478515625, + "grad_norm": 0.42801040410995483, + "learning_rate": 0.00041040527192897914, + "loss": 4.5898, + "step": 3121 + }, + { + "epoch": 0.3048828125, + "grad_norm": 0.4466807544231415, + "learning_rate": 0.00041034931595776934, + "loss": 4.6172, + "step": 3122 + }, + { + "epoch": 0.30498046875, + "grad_norm": 0.3470209836959839, + "learning_rate": 0.00041029334686536206, + "loss": 4.5898, + "step": 3123 + }, + { + "epoch": 0.305078125, + "grad_norm": 0.2505682706832886, + "learning_rate": 0.00041023736465718296, + "loss": 4.5977, + "step": 3124 + }, + { + "epoch": 0.30517578125, + "grad_norm": 0.2896220088005066, + "learning_rate": 0.0004101813693386593, + "loss": 4.6016, + "step": 3125 + }, + { + "epoch": 0.3052734375, + "grad_norm": 0.3011440336704254, + "learning_rate": 0.00041012536091521927, + "loss": 4.625, + "step": 3126 + }, + { + "epoch": 0.30537109375, + "grad_norm": 0.2665983736515045, + "learning_rate": 0.00041006933939229264, + "loss": 4.5859, + "step": 3127 + }, + { + "epoch": 0.30546875, + "grad_norm": 0.24819502234458923, + "learning_rate": 0.0004100133047753103, + "loss": 4.6406, + "step": 3128 + }, + { + "epoch": 0.30556640625, + "grad_norm": 0.23398233950138092, + "learning_rate": 0.00040995725706970436, + "loss": 4.6094, + "step": 3129 + }, + { + "epoch": 0.3056640625, + "grad_norm": 0.2516607642173767, + "learning_rate": 0.00040990119628090836, + "loss": 4.6055, + "step": 3130 + }, + { + "epoch": 0.30576171875, + "grad_norm": 0.2212022840976715, + "learning_rate": 0.00040984512241435696, + "loss": 4.6094, + "step": 3131 + }, + { + "epoch": 0.305859375, + "grad_norm": 0.2298537790775299, + "learning_rate": 0.0004097890354754861, + "loss": 4.5781, + "step": 3132 + }, + { + "epoch": 0.30595703125, + "grad_norm": 0.2298981100320816, + "learning_rate": 0.00040973293546973303, + "loss": 4.6055, + "step": 3133 + }, + { + "epoch": 0.3060546875, + "grad_norm": 0.23705105483531952, + "learning_rate": 0.0004096768224025364, + "loss": 4.6289, + "step": 3134 + }, + { + "epoch": 0.30615234375, + "grad_norm": 0.2321608066558838, + "learning_rate": 0.0004096206962793358, + "loss": 4.625, + "step": 3135 + }, + { + "epoch": 0.30625, + "grad_norm": 0.21394218504428864, + "learning_rate": 0.00040956455710557235, + "loss": 4.5977, + "step": 3136 + }, + { + "epoch": 0.30634765625, + "grad_norm": 0.2466316521167755, + "learning_rate": 0.0004095084048866885, + "loss": 4.6094, + "step": 3137 + }, + { + "epoch": 0.3064453125, + "grad_norm": 0.22929345071315765, + "learning_rate": 0.00040945223962812754, + "loss": 4.5977, + "step": 3138 + }, + { + "epoch": 0.30654296875, + "grad_norm": 0.22851169109344482, + "learning_rate": 0.0004093960613353346, + "loss": 4.5938, + "step": 3139 + }, + { + "epoch": 0.306640625, + "grad_norm": 0.2288745492696762, + "learning_rate": 0.00040933987001375564, + "loss": 4.5859, + "step": 3140 + }, + { + "epoch": 0.30673828125, + "grad_norm": 0.2141110599040985, + "learning_rate": 0.00040928366566883806, + "loss": 4.6172, + "step": 3141 + }, + { + "epoch": 0.3068359375, + "grad_norm": 0.2379048466682434, + "learning_rate": 0.0004092274483060305, + "loss": 4.5742, + "step": 3142 + }, + { + "epoch": 0.30693359375, + "grad_norm": 0.2746284306049347, + "learning_rate": 0.0004091712179307828, + "loss": 4.6055, + "step": 3143 + }, + { + "epoch": 0.30703125, + "grad_norm": 0.22405464947223663, + "learning_rate": 0.0004091149745485461, + "loss": 4.5664, + "step": 3144 + }, + { + "epoch": 0.30712890625, + "grad_norm": 0.22005097568035126, + "learning_rate": 0.0004090587181647729, + "loss": 4.6094, + "step": 3145 + }, + { + "epoch": 0.3072265625, + "grad_norm": 0.21633227169513702, + "learning_rate": 0.00040900244878491683, + "loss": 4.6094, + "step": 3146 + }, + { + "epoch": 0.30732421875, + "grad_norm": 0.2605668902397156, + "learning_rate": 0.00040894616641443284, + "loss": 4.5859, + "step": 3147 + }, + { + "epoch": 0.307421875, + "grad_norm": 0.2578687071800232, + "learning_rate": 0.0004088898710587772, + "loss": 4.5859, + "step": 3148 + }, + { + "epoch": 0.30751953125, + "grad_norm": 0.29490926861763, + "learning_rate": 0.0004088335627234071, + "loss": 4.6367, + "step": 3149 + }, + { + "epoch": 0.3076171875, + "grad_norm": 0.3420464098453522, + "learning_rate": 0.00040877724141378147, + "loss": 4.6133, + "step": 3150 + }, + { + "epoch": 0.30771484375, + "grad_norm": 0.4831574261188507, + "learning_rate": 0.0004087209071353604, + "loss": 4.6602, + "step": 3151 + }, + { + "epoch": 0.3078125, + "grad_norm": 0.3242834508419037, + "learning_rate": 0.0004086645598936047, + "loss": 4.6172, + "step": 3152 + }, + { + "epoch": 0.30791015625, + "grad_norm": 0.2743362784385681, + "learning_rate": 0.00040860819969397726, + "loss": 4.6055, + "step": 3153 + }, + { + "epoch": 0.3080078125, + "grad_norm": 0.21650637686252594, + "learning_rate": 0.0004085518265419416, + "loss": 4.6211, + "step": 3154 + }, + { + "epoch": 0.30810546875, + "grad_norm": 0.2714868187904358, + "learning_rate": 0.0004084954404429629, + "loss": 4.6055, + "step": 3155 + }, + { + "epoch": 0.308203125, + "grad_norm": 0.3045331835746765, + "learning_rate": 0.0004084390414025071, + "loss": 4.5938, + "step": 3156 + }, + { + "epoch": 0.30830078125, + "grad_norm": 0.2940587103366852, + "learning_rate": 0.00040838262942604193, + "loss": 4.6055, + "step": 3157 + }, + { + "epoch": 0.3083984375, + "grad_norm": 0.26624470949172974, + "learning_rate": 0.0004083262045190362, + "loss": 4.6055, + "step": 3158 + }, + { + "epoch": 0.30849609375, + "grad_norm": 0.21879519522190094, + "learning_rate": 0.0004082697666869598, + "loss": 4.6211, + "step": 3159 + }, + { + "epoch": 0.30859375, + "grad_norm": 0.2236805111169815, + "learning_rate": 0.00040821331593528395, + "loss": 4.5781, + "step": 3160 + }, + { + "epoch": 0.30869140625, + "grad_norm": 0.2388748675584793, + "learning_rate": 0.00040815685226948135, + "loss": 4.582, + "step": 3161 + }, + { + "epoch": 0.3087890625, + "grad_norm": 0.23700404167175293, + "learning_rate": 0.00040810037569502554, + "loss": 4.6055, + "step": 3162 + }, + { + "epoch": 0.30888671875, + "grad_norm": 0.23144055902957916, + "learning_rate": 0.00040804388621739176, + "loss": 4.5781, + "step": 3163 + }, + { + "epoch": 0.308984375, + "grad_norm": 0.2227616161108017, + "learning_rate": 0.0004079873838420562, + "loss": 4.6211, + "step": 3164 + }, + { + "epoch": 0.30908203125, + "grad_norm": 0.2652563452720642, + "learning_rate": 0.00040793086857449627, + "loss": 4.5938, + "step": 3165 + }, + { + "epoch": 0.3091796875, + "grad_norm": 0.28821277618408203, + "learning_rate": 0.00040787434042019086, + "loss": 4.6016, + "step": 3166 + }, + { + "epoch": 0.30927734375, + "grad_norm": 0.33470702171325684, + "learning_rate": 0.00040781779938462, + "loss": 4.5898, + "step": 3167 + }, + { + "epoch": 0.309375, + "grad_norm": 0.39411285519599915, + "learning_rate": 0.00040776124547326495, + "loss": 4.6133, + "step": 3168 + }, + { + "epoch": 0.30947265625, + "grad_norm": 0.32375186681747437, + "learning_rate": 0.0004077046786916081, + "loss": 4.6055, + "step": 3169 + }, + { + "epoch": 0.3095703125, + "grad_norm": 0.23588910698890686, + "learning_rate": 0.0004076480990451334, + "loss": 4.6055, + "step": 3170 + }, + { + "epoch": 0.30966796875, + "grad_norm": 0.22065633535385132, + "learning_rate": 0.00040759150653932574, + "loss": 4.5977, + "step": 3171 + }, + { + "epoch": 0.309765625, + "grad_norm": 0.24864408373832703, + "learning_rate": 0.0004075349011796715, + "loss": 4.5859, + "step": 3172 + }, + { + "epoch": 0.30986328125, + "grad_norm": 0.2823388874530792, + "learning_rate": 0.00040747828297165806, + "loss": 4.5664, + "step": 3173 + }, + { + "epoch": 0.3099609375, + "grad_norm": 0.23022131621837616, + "learning_rate": 0.0004074216519207742, + "loss": 4.5742, + "step": 3174 + }, + { + "epoch": 0.31005859375, + "grad_norm": 0.22173774242401123, + "learning_rate": 0.00040736500803250996, + "loss": 4.5742, + "step": 3175 + }, + { + "epoch": 0.31015625, + "grad_norm": 0.2631884515285492, + "learning_rate": 0.0004073083513123566, + "loss": 4.5898, + "step": 3176 + }, + { + "epoch": 0.31025390625, + "grad_norm": 0.3124387562274933, + "learning_rate": 0.0004072516817658065, + "loss": 4.5977, + "step": 3177 + }, + { + "epoch": 0.3103515625, + "grad_norm": 0.3834969699382782, + "learning_rate": 0.0004071949993983534, + "loss": 4.5859, + "step": 3178 + }, + { + "epoch": 0.31044921875, + "grad_norm": 0.3578173816204071, + "learning_rate": 0.00040713830421549235, + "loss": 4.5977, + "step": 3179 + }, + { + "epoch": 0.310546875, + "grad_norm": 0.30756324529647827, + "learning_rate": 0.00040708159622271963, + "loss": 4.5781, + "step": 3180 + }, + { + "epoch": 0.31064453125, + "grad_norm": 0.28106510639190674, + "learning_rate": 0.00040702487542553244, + "loss": 4.6133, + "step": 3181 + }, + { + "epoch": 0.3107421875, + "grad_norm": 0.22386769950389862, + "learning_rate": 0.0004069681418294298, + "loss": 4.5625, + "step": 3182 + }, + { + "epoch": 0.31083984375, + "grad_norm": 0.2369086742401123, + "learning_rate": 0.00040691139543991143, + "loss": 4.6133, + "step": 3183 + }, + { + "epoch": 0.3109375, + "grad_norm": 0.22481054067611694, + "learning_rate": 0.0004068546362624784, + "loss": 4.582, + "step": 3184 + }, + { + "epoch": 0.31103515625, + "grad_norm": 0.23761145770549774, + "learning_rate": 0.0004067978643026335, + "loss": 4.582, + "step": 3185 + }, + { + "epoch": 0.3111328125, + "grad_norm": 0.2514781355857849, + "learning_rate": 0.00040674107956588005, + "loss": 4.6016, + "step": 3186 + }, + { + "epoch": 0.31123046875, + "grad_norm": 0.23927055299282074, + "learning_rate": 0.0004066842820577231, + "loss": 4.5977, + "step": 3187 + }, + { + "epoch": 0.311328125, + "grad_norm": 0.22244587540626526, + "learning_rate": 0.0004066274717836688, + "loss": 4.6133, + "step": 3188 + }, + { + "epoch": 0.31142578125, + "grad_norm": 0.23321017622947693, + "learning_rate": 0.0004065706487492244, + "loss": 4.5977, + "step": 3189 + }, + { + "epoch": 0.3115234375, + "grad_norm": 0.222861647605896, + "learning_rate": 0.00040651381295989857, + "loss": 4.6094, + "step": 3190 + }, + { + "epoch": 0.31162109375, + "grad_norm": 0.21026979386806488, + "learning_rate": 0.0004064569644212012, + "loss": 4.5859, + "step": 3191 + }, + { + "epoch": 0.31171875, + "grad_norm": 0.2545301020145416, + "learning_rate": 0.00040640010313864325, + "loss": 4.6094, + "step": 3192 + }, + { + "epoch": 0.31181640625, + "grad_norm": 0.3010014295578003, + "learning_rate": 0.00040634322911773724, + "loss": 4.5664, + "step": 3193 + }, + { + "epoch": 0.3119140625, + "grad_norm": 0.3009093403816223, + "learning_rate": 0.00040628634236399653, + "loss": 4.5977, + "step": 3194 + }, + { + "epoch": 0.31201171875, + "grad_norm": 0.30418580770492554, + "learning_rate": 0.000406229442882936, + "loss": 4.6289, + "step": 3195 + }, + { + "epoch": 0.312109375, + "grad_norm": 0.2831104099750519, + "learning_rate": 0.00040617253068007156, + "loss": 4.6133, + "step": 3196 + }, + { + "epoch": 0.31220703125, + "grad_norm": 0.27336424589157104, + "learning_rate": 0.0004061156057609206, + "loss": 4.5859, + "step": 3197 + }, + { + "epoch": 0.3123046875, + "grad_norm": 0.2301994264125824, + "learning_rate": 0.00040605866813100153, + "loss": 4.5898, + "step": 3198 + }, + { + "epoch": 0.31240234375, + "grad_norm": 0.22892244160175323, + "learning_rate": 0.0004060017177958341, + "loss": 4.6172, + "step": 3199 + }, + { + "epoch": 0.3125, + "grad_norm": 0.24103528261184692, + "learning_rate": 0.0004059447547609392, + "loss": 4.5938, + "step": 3200 + }, + { + "epoch": 0.31259765625, + "grad_norm": 0.2707345187664032, + "learning_rate": 0.0004058877790318391, + "loss": 4.6016, + "step": 3201 + }, + { + "epoch": 0.3126953125, + "grad_norm": 0.28221550583839417, + "learning_rate": 0.00040583079061405717, + "loss": 4.5742, + "step": 3202 + }, + { + "epoch": 0.31279296875, + "grad_norm": 0.29296278953552246, + "learning_rate": 0.000405773789513118, + "loss": 4.6016, + "step": 3203 + }, + { + "epoch": 0.312890625, + "grad_norm": 0.2644599676132202, + "learning_rate": 0.00040571677573454754, + "loss": 4.6016, + "step": 3204 + }, + { + "epoch": 0.31298828125, + "grad_norm": 0.24904920160770416, + "learning_rate": 0.00040565974928387285, + "loss": 4.6016, + "step": 3205 + }, + { + "epoch": 0.3130859375, + "grad_norm": 0.2411424219608307, + "learning_rate": 0.0004056027101666222, + "loss": 4.6094, + "step": 3206 + }, + { + "epoch": 0.31318359375, + "grad_norm": 0.2801899313926697, + "learning_rate": 0.00040554565838832524, + "loss": 4.5703, + "step": 3207 + }, + { + "epoch": 0.31328125, + "grad_norm": 0.327980637550354, + "learning_rate": 0.0004054885939545127, + "loss": 4.6055, + "step": 3208 + }, + { + "epoch": 0.31337890625, + "grad_norm": 0.2677237391471863, + "learning_rate": 0.0004054315168707166, + "loss": 4.5898, + "step": 3209 + }, + { + "epoch": 0.3134765625, + "grad_norm": 0.21497760713100433, + "learning_rate": 0.00040537442714247017, + "loss": 4.6055, + "step": 3210 + }, + { + "epoch": 0.31357421875, + "grad_norm": 0.2246604710817337, + "learning_rate": 0.000405317324775308, + "loss": 4.5898, + "step": 3211 + }, + { + "epoch": 0.313671875, + "grad_norm": 0.2687215209007263, + "learning_rate": 0.00040526020977476554, + "loss": 4.6055, + "step": 3212 + }, + { + "epoch": 0.31376953125, + "grad_norm": 0.27656272053718567, + "learning_rate": 0.0004052030821463798, + "loss": 4.5977, + "step": 3213 + }, + { + "epoch": 0.3138671875, + "grad_norm": 0.2483285814523697, + "learning_rate": 0.00040514594189568903, + "loss": 4.5938, + "step": 3214 + }, + { + "epoch": 0.31396484375, + "grad_norm": 0.21956972777843475, + "learning_rate": 0.00040508878902823246, + "loss": 4.5664, + "step": 3215 + }, + { + "epoch": 0.3140625, + "grad_norm": 0.2637638747692108, + "learning_rate": 0.0004050316235495506, + "loss": 4.582, + "step": 3216 + }, + { + "epoch": 0.31416015625, + "grad_norm": 0.27183717489242554, + "learning_rate": 0.0004049744454651855, + "loss": 4.6172, + "step": 3217 + }, + { + "epoch": 0.3142578125, + "grad_norm": 0.2320816069841385, + "learning_rate": 0.00040491725478068003, + "loss": 4.5352, + "step": 3218 + }, + { + "epoch": 0.31435546875, + "grad_norm": 0.23274171352386475, + "learning_rate": 0.0004048600515015785, + "loss": 4.6055, + "step": 3219 + }, + { + "epoch": 0.314453125, + "grad_norm": 0.22542065382003784, + "learning_rate": 0.0004048028356334263, + "loss": 4.6055, + "step": 3220 + }, + { + "epoch": 0.31455078125, + "grad_norm": 0.23326903581619263, + "learning_rate": 0.0004047456071817701, + "loss": 4.6172, + "step": 3221 + }, + { + "epoch": 0.3146484375, + "grad_norm": 0.21394620835781097, + "learning_rate": 0.00040468836615215785, + "loss": 4.5781, + "step": 3222 + }, + { + "epoch": 0.31474609375, + "grad_norm": 0.21177315711975098, + "learning_rate": 0.00040463111255013883, + "loss": 4.5977, + "step": 3223 + }, + { + "epoch": 0.31484375, + "grad_norm": 0.23390544950962067, + "learning_rate": 0.00040457384638126323, + "loss": 4.625, + "step": 3224 + }, + { + "epoch": 0.31494140625, + "grad_norm": 0.2267731875181198, + "learning_rate": 0.0004045165676510826, + "loss": 4.5781, + "step": 3225 + }, + { + "epoch": 0.3150390625, + "grad_norm": 0.21257764101028442, + "learning_rate": 0.0004044592763651498, + "loss": 4.5898, + "step": 3226 + }, + { + "epoch": 0.31513671875, + "grad_norm": 0.24508829414844513, + "learning_rate": 0.0004044019725290187, + "loss": 4.6055, + "step": 3227 + }, + { + "epoch": 0.315234375, + "grad_norm": 0.3212210237979889, + "learning_rate": 0.0004043446561482448, + "loss": 4.6133, + "step": 3228 + }, + { + "epoch": 0.31533203125, + "grad_norm": 0.43434426188468933, + "learning_rate": 0.0004042873272283842, + "loss": 4.5938, + "step": 3229 + }, + { + "epoch": 0.3154296875, + "grad_norm": 0.45762398838996887, + "learning_rate": 0.0004042299857749947, + "loss": 4.625, + "step": 3230 + }, + { + "epoch": 0.31552734375, + "grad_norm": 0.33518514037132263, + "learning_rate": 0.0004041726317936352, + "loss": 4.6211, + "step": 3231 + }, + { + "epoch": 0.315625, + "grad_norm": 0.2198718935251236, + "learning_rate": 0.0004041152652898658, + "loss": 4.5781, + "step": 3232 + }, + { + "epoch": 0.31572265625, + "grad_norm": 0.30684539675712585, + "learning_rate": 0.00040405788626924764, + "loss": 4.6211, + "step": 3233 + }, + { + "epoch": 0.3158203125, + "grad_norm": 0.31937775015830994, + "learning_rate": 0.0004040004947373434, + "loss": 4.5938, + "step": 3234 + }, + { + "epoch": 0.31591796875, + "grad_norm": 0.24825255572795868, + "learning_rate": 0.00040394309069971676, + "loss": 4.6016, + "step": 3235 + }, + { + "epoch": 0.316015625, + "grad_norm": 0.24035389721393585, + "learning_rate": 0.0004038856741619325, + "loss": 4.5938, + "step": 3236 + }, + { + "epoch": 0.31611328125, + "grad_norm": 0.28749486804008484, + "learning_rate": 0.00040382824512955696, + "loss": 4.5898, + "step": 3237 + }, + { + "epoch": 0.3162109375, + "grad_norm": 0.25033918023109436, + "learning_rate": 0.00040377080360815736, + "loss": 4.5703, + "step": 3238 + }, + { + "epoch": 0.31630859375, + "grad_norm": 0.21576076745986938, + "learning_rate": 0.0004037133496033024, + "loss": 4.5625, + "step": 3239 + }, + { + "epoch": 0.31640625, + "grad_norm": 0.24997229874134064, + "learning_rate": 0.0004036558831205617, + "loss": 4.5977, + "step": 3240 + }, + { + "epoch": 0.31650390625, + "grad_norm": 0.26537737250328064, + "learning_rate": 0.0004035984041655063, + "loss": 4.582, + "step": 3241 + }, + { + "epoch": 0.3166015625, + "grad_norm": 0.23726266622543335, + "learning_rate": 0.00040354091274370844, + "loss": 4.5781, + "step": 3242 + }, + { + "epoch": 0.31669921875, + "grad_norm": 0.22166027128696442, + "learning_rate": 0.0004034834088607416, + "loss": 4.6133, + "step": 3243 + }, + { + "epoch": 0.316796875, + "grad_norm": 0.22233177721500397, + "learning_rate": 0.0004034258925221802, + "loss": 4.6133, + "step": 3244 + }, + { + "epoch": 0.31689453125, + "grad_norm": 0.2225351333618164, + "learning_rate": 0.0004033683637336002, + "loss": 4.5781, + "step": 3245 + }, + { + "epoch": 0.3169921875, + "grad_norm": 0.21428877115249634, + "learning_rate": 0.0004033108225005785, + "loss": 4.5898, + "step": 3246 + }, + { + "epoch": 0.31708984375, + "grad_norm": 0.20464369654655457, + "learning_rate": 0.00040325326882869353, + "loss": 4.5898, + "step": 3247 + }, + { + "epoch": 0.3171875, + "grad_norm": 0.20156773924827576, + "learning_rate": 0.00040319570272352446, + "loss": 4.5703, + "step": 3248 + }, + { + "epoch": 0.31728515625, + "grad_norm": 0.21686074137687683, + "learning_rate": 0.0004031381241906521, + "loss": 4.5938, + "step": 3249 + }, + { + "epoch": 0.3173828125, + "grad_norm": 0.2190120667219162, + "learning_rate": 0.00040308053323565837, + "loss": 4.5977, + "step": 3250 + }, + { + "epoch": 0.31748046875, + "grad_norm": 0.22909528017044067, + "learning_rate": 0.00040302292986412613, + "loss": 4.5703, + "step": 3251 + }, + { + "epoch": 0.317578125, + "grad_norm": 0.2043781727552414, + "learning_rate": 0.0004029653140816398, + "loss": 4.5664, + "step": 3252 + }, + { + "epoch": 0.31767578125, + "grad_norm": 0.3439045250415802, + "learning_rate": 0.00040290768589378473, + "loss": 4.5898, + "step": 3253 + }, + { + "epoch": 0.3177734375, + "grad_norm": 0.21872563660144806, + "learning_rate": 0.0004028500453061477, + "loss": 4.6133, + "step": 3254 + }, + { + "epoch": 0.31787109375, + "grad_norm": 0.24827922880649567, + "learning_rate": 0.00040279239232431644, + "loss": 4.5742, + "step": 3255 + }, + { + "epoch": 0.31796875, + "grad_norm": 0.25230517983436584, + "learning_rate": 0.0004027347269538801, + "loss": 4.5977, + "step": 3256 + }, + { + "epoch": 0.31806640625, + "grad_norm": 0.2946770489215851, + "learning_rate": 0.0004026770492004289, + "loss": 4.5781, + "step": 3257 + }, + { + "epoch": 0.3181640625, + "grad_norm": 0.2762652337551117, + "learning_rate": 0.0004026193590695542, + "loss": 4.5977, + "step": 3258 + }, + { + "epoch": 0.31826171875, + "grad_norm": 0.28183403611183167, + "learning_rate": 0.00040256165656684897, + "loss": 4.5625, + "step": 3259 + }, + { + "epoch": 0.318359375, + "grad_norm": 0.2496155947446823, + "learning_rate": 0.0004025039416979069, + "loss": 4.6016, + "step": 3260 + }, + { + "epoch": 0.31845703125, + "grad_norm": 0.25827324390411377, + "learning_rate": 0.00040244621446832297, + "loss": 4.5781, + "step": 3261 + }, + { + "epoch": 0.3185546875, + "grad_norm": 0.24164064228534698, + "learning_rate": 0.0004023884748836935, + "loss": 4.5859, + "step": 3262 + }, + { + "epoch": 0.31865234375, + "grad_norm": 0.24419957399368286, + "learning_rate": 0.00040233072294961603, + "loss": 4.5977, + "step": 3263 + }, + { + "epoch": 0.31875, + "grad_norm": 0.3115817606449127, + "learning_rate": 0.0004022729586716892, + "loss": 4.6133, + "step": 3264 + }, + { + "epoch": 0.31884765625, + "grad_norm": 0.3717244267463684, + "learning_rate": 0.00040221518205551285, + "loss": 4.5742, + "step": 3265 + }, + { + "epoch": 0.3189453125, + "grad_norm": 0.3240039348602295, + "learning_rate": 0.0004021573931066879, + "loss": 4.6172, + "step": 3266 + }, + { + "epoch": 0.31904296875, + "grad_norm": 0.30340713262557983, + "learning_rate": 0.0004020995918308168, + "loss": 4.5938, + "step": 3267 + }, + { + "epoch": 0.319140625, + "grad_norm": 0.23251943290233612, + "learning_rate": 0.00040204177823350293, + "loss": 4.5703, + "step": 3268 + }, + { + "epoch": 0.31923828125, + "grad_norm": 0.21755124628543854, + "learning_rate": 0.0004019839523203508, + "loss": 4.5938, + "step": 3269 + }, + { + "epoch": 0.3193359375, + "grad_norm": 0.24982519447803497, + "learning_rate": 0.0004019261140969664, + "loss": 4.6172, + "step": 3270 + }, + { + "epoch": 0.31943359375, + "grad_norm": 0.23979239165782928, + "learning_rate": 0.0004018682635689566, + "loss": 4.6055, + "step": 3271 + }, + { + "epoch": 0.31953125, + "grad_norm": 0.24309977889060974, + "learning_rate": 0.00040181040074192977, + "loss": 4.5859, + "step": 3272 + }, + { + "epoch": 0.31962890625, + "grad_norm": 0.26829880475997925, + "learning_rate": 0.0004017525256214953, + "loss": 4.5898, + "step": 3273 + }, + { + "epoch": 0.3197265625, + "grad_norm": 0.2595829963684082, + "learning_rate": 0.0004016946382132636, + "loss": 4.5859, + "step": 3274 + }, + { + "epoch": 0.31982421875, + "grad_norm": 0.29532483220100403, + "learning_rate": 0.00040163673852284675, + "loss": 4.5977, + "step": 3275 + }, + { + "epoch": 0.319921875, + "grad_norm": 0.2376522272825241, + "learning_rate": 0.00040157882655585754, + "loss": 4.5859, + "step": 3276 + }, + { + "epoch": 0.32001953125, + "grad_norm": 0.24719232320785522, + "learning_rate": 0.0004015209023179102, + "loss": 4.5664, + "step": 3277 + }, + { + "epoch": 0.3201171875, + "grad_norm": 0.21669365465641022, + "learning_rate": 0.00040146296581462007, + "loss": 4.5977, + "step": 3278 + }, + { + "epoch": 0.32021484375, + "grad_norm": 0.2210424840450287, + "learning_rate": 0.00040140501705160364, + "loss": 4.5625, + "step": 3279 + }, + { + "epoch": 0.3203125, + "grad_norm": 0.25438299775123596, + "learning_rate": 0.00040134705603447874, + "loss": 4.5664, + "step": 3280 + }, + { + "epoch": 0.32041015625, + "grad_norm": 0.2639719545841217, + "learning_rate": 0.0004012890827688644, + "loss": 4.6016, + "step": 3281 + }, + { + "epoch": 0.3205078125, + "grad_norm": 0.25193682312965393, + "learning_rate": 0.00040123109726038046, + "loss": 4.5977, + "step": 3282 + }, + { + "epoch": 0.32060546875, + "grad_norm": 0.2712264060974121, + "learning_rate": 0.0004011730995146485, + "loss": 4.5781, + "step": 3283 + }, + { + "epoch": 0.320703125, + "grad_norm": 0.21525435149669647, + "learning_rate": 0.0004011150895372908, + "loss": 4.6211, + "step": 3284 + }, + { + "epoch": 0.32080078125, + "grad_norm": 0.22655273973941803, + "learning_rate": 0.0004010570673339311, + "loss": 4.6055, + "step": 3285 + }, + { + "epoch": 0.3208984375, + "grad_norm": 0.22592154145240784, + "learning_rate": 0.0004009990329101944, + "loss": 4.6055, + "step": 3286 + }, + { + "epoch": 0.32099609375, + "grad_norm": 0.21216058731079102, + "learning_rate": 0.0004009409862717066, + "loss": 4.5781, + "step": 3287 + }, + { + "epoch": 0.32109375, + "grad_norm": 0.23415783047676086, + "learning_rate": 0.00040088292742409474, + "loss": 4.5781, + "step": 3288 + }, + { + "epoch": 0.32119140625, + "grad_norm": 0.22338560223579407, + "learning_rate": 0.0004008248563729876, + "loss": 4.5625, + "step": 3289 + }, + { + "epoch": 0.3212890625, + "grad_norm": 0.2333599030971527, + "learning_rate": 0.0004007667731240146, + "loss": 4.582, + "step": 3290 + }, + { + "epoch": 0.32138671875, + "grad_norm": 0.2193213850259781, + "learning_rate": 0.0004007086776828065, + "loss": 4.6094, + "step": 3291 + }, + { + "epoch": 0.321484375, + "grad_norm": 0.24566741287708282, + "learning_rate": 0.00040065057005499535, + "loss": 4.5938, + "step": 3292 + }, + { + "epoch": 0.32158203125, + "grad_norm": 0.27417898178100586, + "learning_rate": 0.00040059245024621413, + "loss": 4.6094, + "step": 3293 + }, + { + "epoch": 0.3216796875, + "grad_norm": 0.2818538546562195, + "learning_rate": 0.00040053431826209736, + "loss": 4.5664, + "step": 3294 + }, + { + "epoch": 0.32177734375, + "grad_norm": 0.30079394578933716, + "learning_rate": 0.0004004761741082803, + "loss": 4.5781, + "step": 3295 + }, + { + "epoch": 0.321875, + "grad_norm": 0.3245161473751068, + "learning_rate": 0.0004004180177903999, + "loss": 4.6133, + "step": 3296 + }, + { + "epoch": 0.32197265625, + "grad_norm": 0.3228883743286133, + "learning_rate": 0.00040035984931409375, + "loss": 4.625, + "step": 3297 + }, + { + "epoch": 0.3220703125, + "grad_norm": 0.7138991951942444, + "learning_rate": 0.00040030166868500124, + "loss": 4.5898, + "step": 3298 + }, + { + "epoch": 0.32216796875, + "grad_norm": 0.2312609851360321, + "learning_rate": 0.00040024347590876216, + "loss": 4.6016, + "step": 3299 + }, + { + "epoch": 0.322265625, + "grad_norm": 0.27485769987106323, + "learning_rate": 0.00040018527099101826, + "loss": 4.582, + "step": 3300 + }, + { + "epoch": 0.32236328125, + "grad_norm": 0.27202874422073364, + "learning_rate": 0.00040012705393741195, + "loss": 4.5938, + "step": 3301 + }, + { + "epoch": 0.3224609375, + "grad_norm": 0.23511004447937012, + "learning_rate": 0.00040006882475358694, + "loss": 4.6016, + "step": 3302 + }, + { + "epoch": 0.32255859375, + "grad_norm": 0.27762940526008606, + "learning_rate": 0.0004000105834451883, + "loss": 4.5898, + "step": 3303 + }, + { + "epoch": 0.32265625, + "grad_norm": 0.29470527172088623, + "learning_rate": 0.0003999523300178619, + "loss": 4.5781, + "step": 3304 + }, + { + "epoch": 0.32275390625, + "grad_norm": 0.29957303404808044, + "learning_rate": 0.00039989406447725527, + "loss": 4.5742, + "step": 3305 + }, + { + "epoch": 0.3228515625, + "grad_norm": 0.2313697338104248, + "learning_rate": 0.00039983578682901673, + "loss": 4.6016, + "step": 3306 + }, + { + "epoch": 0.32294921875, + "grad_norm": 0.27548637986183167, + "learning_rate": 0.0003997774970787959, + "loss": 4.6133, + "step": 3307 + }, + { + "epoch": 0.323046875, + "grad_norm": 0.2540157735347748, + "learning_rate": 0.0003997191952322437, + "loss": 4.6016, + "step": 3308 + }, + { + "epoch": 0.32314453125, + "grad_norm": 0.2588164508342743, + "learning_rate": 0.0003996608812950119, + "loss": 4.5742, + "step": 3309 + }, + { + "epoch": 0.3232421875, + "grad_norm": 0.3114417791366577, + "learning_rate": 0.00039960255527275376, + "loss": 4.625, + "step": 3310 + }, + { + "epoch": 0.32333984375, + "grad_norm": 0.389337956905365, + "learning_rate": 0.00039954421717112353, + "loss": 4.5977, + "step": 3311 + }, + { + "epoch": 0.3234375, + "grad_norm": 0.4523245394229889, + "learning_rate": 0.00039948586699577675, + "loss": 4.5898, + "step": 3312 + }, + { + "epoch": 0.32353515625, + "grad_norm": 0.3804641664028168, + "learning_rate": 0.00039942750475237, + "loss": 4.5508, + "step": 3313 + }, + { + "epoch": 0.3236328125, + "grad_norm": 0.32172253727912903, + "learning_rate": 0.00039936913044656127, + "loss": 4.5898, + "step": 3314 + }, + { + "epoch": 0.32373046875, + "grad_norm": 0.3860645294189453, + "learning_rate": 0.0003993107440840094, + "loss": 4.5859, + "step": 3315 + }, + { + "epoch": 0.323828125, + "grad_norm": 0.3296073079109192, + "learning_rate": 0.0003992523456703746, + "loss": 4.5938, + "step": 3316 + }, + { + "epoch": 0.32392578125, + "grad_norm": 0.2583603262901306, + "learning_rate": 0.0003991939352113181, + "loss": 4.5625, + "step": 3317 + }, + { + "epoch": 0.3240234375, + "grad_norm": 0.2825816869735718, + "learning_rate": 0.00039913551271250256, + "loss": 4.5547, + "step": 3318 + }, + { + "epoch": 0.32412109375, + "grad_norm": 0.3467269241809845, + "learning_rate": 0.00039907707817959156, + "loss": 4.5781, + "step": 3319 + }, + { + "epoch": 0.32421875, + "grad_norm": 0.31903743743896484, + "learning_rate": 0.0003990186316182499, + "loss": 4.5898, + "step": 3320 + }, + { + "epoch": 0.32431640625, + "grad_norm": 0.24508501589298248, + "learning_rate": 0.0003989601730341437, + "loss": 4.582, + "step": 3321 + }, + { + "epoch": 0.3244140625, + "grad_norm": 0.2489801049232483, + "learning_rate": 0.00039890170243294, + "loss": 4.5898, + "step": 3322 + }, + { + "epoch": 0.32451171875, + "grad_norm": 0.2656095027923584, + "learning_rate": 0.0003988432198203072, + "loss": 4.5781, + "step": 3323 + }, + { + "epoch": 0.324609375, + "grad_norm": 0.2796880900859833, + "learning_rate": 0.00039878472520191475, + "loss": 4.5664, + "step": 3324 + }, + { + "epoch": 0.32470703125, + "grad_norm": 0.2980833649635315, + "learning_rate": 0.0003987262185834334, + "loss": 4.5664, + "step": 3325 + }, + { + "epoch": 0.3248046875, + "grad_norm": 0.29967814683914185, + "learning_rate": 0.0003986676999705348, + "loss": 4.6094, + "step": 3326 + }, + { + "epoch": 0.32490234375, + "grad_norm": 0.27575263381004333, + "learning_rate": 0.000398609169368892, + "loss": 4.5664, + "step": 3327 + }, + { + "epoch": 0.325, + "grad_norm": 0.28849631547927856, + "learning_rate": 0.0003985506267841792, + "loss": 4.6133, + "step": 3328 + }, + { + "epoch": 0.32509765625, + "grad_norm": 0.23052667081356049, + "learning_rate": 0.0003984920722220716, + "loss": 4.5859, + "step": 3329 + }, + { + "epoch": 0.3251953125, + "grad_norm": 0.25383371114730835, + "learning_rate": 0.0003984335056882459, + "loss": 4.6172, + "step": 3330 + }, + { + "epoch": 0.32529296875, + "grad_norm": 0.27893173694610596, + "learning_rate": 0.00039837492718837946, + "loss": 4.5898, + "step": 3331 + }, + { + "epoch": 0.325390625, + "grad_norm": 0.2520153224468231, + "learning_rate": 0.0003983163367281512, + "loss": 4.5938, + "step": 3332 + }, + { + "epoch": 0.32548828125, + "grad_norm": 0.22899828851222992, + "learning_rate": 0.000398257734313241, + "loss": 4.6016, + "step": 3333 + }, + { + "epoch": 0.3255859375, + "grad_norm": 0.23806385695934296, + "learning_rate": 0.0003981991199493301, + "loss": 4.6055, + "step": 3334 + }, + { + "epoch": 0.32568359375, + "grad_norm": 0.23495785892009735, + "learning_rate": 0.0003981404936421006, + "loss": 4.5859, + "step": 3335 + }, + { + "epoch": 0.32578125, + "grad_norm": 0.22423014044761658, + "learning_rate": 0.0003980818553972361, + "loss": 4.5859, + "step": 3336 + }, + { + "epoch": 0.32587890625, + "grad_norm": 0.23717433214187622, + "learning_rate": 0.000398023205220421, + "loss": 4.5664, + "step": 3337 + }, + { + "epoch": 0.3259765625, + "grad_norm": 0.23725855350494385, + "learning_rate": 0.0003979645431173411, + "loss": 4.5977, + "step": 3338 + }, + { + "epoch": 0.32607421875, + "grad_norm": 0.21243347227573395, + "learning_rate": 0.00039790586909368335, + "loss": 4.5977, + "step": 3339 + }, + { + "epoch": 0.326171875, + "grad_norm": 0.21183107793331146, + "learning_rate": 0.0003978471831551358, + "loss": 4.6055, + "step": 3340 + }, + { + "epoch": 0.32626953125, + "grad_norm": 0.20117324590682983, + "learning_rate": 0.00039778848530738753, + "loss": 4.5977, + "step": 3341 + }, + { + "epoch": 0.3263671875, + "grad_norm": 0.20403042435646057, + "learning_rate": 0.000397729775556129, + "loss": 4.5742, + "step": 3342 + }, + { + "epoch": 0.32646484375, + "grad_norm": 0.20919233560562134, + "learning_rate": 0.00039767105390705167, + "loss": 4.6172, + "step": 3343 + }, + { + "epoch": 0.3265625, + "grad_norm": 0.20673570036888123, + "learning_rate": 0.00039761232036584826, + "loss": 4.5938, + "step": 3344 + }, + { + "epoch": 0.32666015625, + "grad_norm": 0.2213534116744995, + "learning_rate": 0.0003975535749382125, + "loss": 4.5898, + "step": 3345 + }, + { + "epoch": 0.3267578125, + "grad_norm": 0.2127733677625656, + "learning_rate": 0.00039749481762983957, + "loss": 4.5312, + "step": 3346 + }, + { + "epoch": 0.32685546875, + "grad_norm": 0.20783182978630066, + "learning_rate": 0.00039743604844642535, + "loss": 4.582, + "step": 3347 + }, + { + "epoch": 0.326953125, + "grad_norm": 0.2042011320590973, + "learning_rate": 0.0003973772673936672, + "loss": 4.5977, + "step": 3348 + }, + { + "epoch": 0.32705078125, + "grad_norm": 0.2511347532272339, + "learning_rate": 0.0003973184744772636, + "loss": 4.6016, + "step": 3349 + }, + { + "epoch": 0.3271484375, + "grad_norm": 0.21286721527576447, + "learning_rate": 0.000397259669702914, + "loss": 4.6016, + "step": 3350 + }, + { + "epoch": 0.32724609375, + "grad_norm": 0.22578848898410797, + "learning_rate": 0.00039720085307631933, + "loss": 4.5352, + "step": 3351 + }, + { + "epoch": 0.32734375, + "grad_norm": 0.2282680720090866, + "learning_rate": 0.0003971420246031812, + "loss": 4.6094, + "step": 3352 + }, + { + "epoch": 0.32744140625, + "grad_norm": 0.222673237323761, + "learning_rate": 0.00039708318428920283, + "loss": 4.6016, + "step": 3353 + }, + { + "epoch": 0.3275390625, + "grad_norm": 0.2062997668981552, + "learning_rate": 0.0003970243321400883, + "loss": 4.582, + "step": 3354 + }, + { + "epoch": 0.32763671875, + "grad_norm": 0.20805339515209198, + "learning_rate": 0.000396965468161543, + "loss": 4.5781, + "step": 3355 + }, + { + "epoch": 0.327734375, + "grad_norm": 0.2295922040939331, + "learning_rate": 0.00039690659235927327, + "loss": 4.5781, + "step": 3356 + }, + { + "epoch": 0.32783203125, + "grad_norm": 0.3085879981517792, + "learning_rate": 0.00039684770473898687, + "loss": 4.5703, + "step": 3357 + }, + { + "epoch": 0.3279296875, + "grad_norm": 0.4872780740261078, + "learning_rate": 0.0003967888053063925, + "loss": 4.6406, + "step": 3358 + }, + { + "epoch": 0.32802734375, + "grad_norm": 0.5959935188293457, + "learning_rate": 0.00039672989406719996, + "loss": 4.6133, + "step": 3359 + }, + { + "epoch": 0.328125, + "grad_norm": 0.3721720576286316, + "learning_rate": 0.0003966709710271204, + "loss": 4.5703, + "step": 3360 + }, + { + "epoch": 0.32822265625, + "grad_norm": 0.28000620007514954, + "learning_rate": 0.000396612036191866, + "loss": 4.5938, + "step": 3361 + }, + { + "epoch": 0.3283203125, + "grad_norm": 0.46910911798477173, + "learning_rate": 0.00039655308956715004, + "loss": 4.6172, + "step": 3362 + }, + { + "epoch": 0.32841796875, + "grad_norm": 0.33885282278060913, + "learning_rate": 0.000396494131158687, + "loss": 4.5859, + "step": 3363 + }, + { + "epoch": 0.328515625, + "grad_norm": 0.27920645475387573, + "learning_rate": 0.00039643516097219255, + "loss": 4.6094, + "step": 3364 + }, + { + "epoch": 0.32861328125, + "grad_norm": 0.3290136456489563, + "learning_rate": 0.0003963761790133835, + "loss": 4.6406, + "step": 3365 + }, + { + "epoch": 0.3287109375, + "grad_norm": 0.25264936685562134, + "learning_rate": 0.00039631718528797754, + "loss": 4.5859, + "step": 3366 + }, + { + "epoch": 0.32880859375, + "grad_norm": 0.26338130235671997, + "learning_rate": 0.00039625817980169396, + "loss": 4.5742, + "step": 3367 + }, + { + "epoch": 0.32890625, + "grad_norm": 0.32073238492012024, + "learning_rate": 0.00039619916256025277, + "loss": 4.582, + "step": 3368 + }, + { + "epoch": 0.32900390625, + "grad_norm": 0.23730438947677612, + "learning_rate": 0.0003961401335693754, + "loss": 4.5938, + "step": 3369 + }, + { + "epoch": 0.3291015625, + "grad_norm": 0.2410012185573578, + "learning_rate": 0.00039608109283478435, + "loss": 4.6016, + "step": 3370 + }, + { + "epoch": 0.32919921875, + "grad_norm": 0.26411348581314087, + "learning_rate": 0.00039602204036220294, + "loss": 4.582, + "step": 3371 + }, + { + "epoch": 0.329296875, + "grad_norm": 0.3594893515110016, + "learning_rate": 0.0003959629761573562, + "loss": 4.5859, + "step": 3372 + }, + { + "epoch": 0.32939453125, + "grad_norm": 0.23418715596199036, + "learning_rate": 0.0003959039002259699, + "loss": 4.5742, + "step": 3373 + }, + { + "epoch": 0.3294921875, + "grad_norm": 0.25021129846572876, + "learning_rate": 0.00039584481257377103, + "loss": 4.5625, + "step": 3374 + }, + { + "epoch": 0.32958984375, + "grad_norm": 0.20875130593776703, + "learning_rate": 0.00039578571320648774, + "loss": 4.5898, + "step": 3375 + }, + { + "epoch": 0.3296875, + "grad_norm": 0.22032596170902252, + "learning_rate": 0.00039572660212984934, + "loss": 4.5508, + "step": 3376 + }, + { + "epoch": 0.32978515625, + "grad_norm": 0.24137865006923676, + "learning_rate": 0.0003956674793495862, + "loss": 4.6055, + "step": 3377 + }, + { + "epoch": 0.3298828125, + "grad_norm": 0.2471792846918106, + "learning_rate": 0.00039560834487143006, + "loss": 4.6094, + "step": 3378 + }, + { + "epoch": 0.32998046875, + "grad_norm": 0.20678575336933136, + "learning_rate": 0.00039554919870111327, + "loss": 4.5898, + "step": 3379 + }, + { + "epoch": 0.330078125, + "grad_norm": 0.22744956612586975, + "learning_rate": 0.0003954900408443699, + "loss": 4.6133, + "step": 3380 + }, + { + "epoch": 0.33017578125, + "grad_norm": 0.2320060282945633, + "learning_rate": 0.0003954308713069349, + "loss": 4.5703, + "step": 3381 + }, + { + "epoch": 0.3302734375, + "grad_norm": 0.2318466305732727, + "learning_rate": 0.0003953716900945442, + "loss": 4.5664, + "step": 3382 + }, + { + "epoch": 0.33037109375, + "grad_norm": 0.24145260453224182, + "learning_rate": 0.0003953124972129352, + "loss": 4.6172, + "step": 3383 + }, + { + "epoch": 0.33046875, + "grad_norm": 0.23277801275253296, + "learning_rate": 0.0003952532926678461, + "loss": 4.5703, + "step": 3384 + }, + { + "epoch": 0.33056640625, + "grad_norm": 0.217460036277771, + "learning_rate": 0.00039519407646501647, + "loss": 4.5781, + "step": 3385 + }, + { + "epoch": 0.3306640625, + "grad_norm": 0.24385443329811096, + "learning_rate": 0.0003951348486101869, + "loss": 4.5742, + "step": 3386 + }, + { + "epoch": 0.33076171875, + "grad_norm": 0.22367963194847107, + "learning_rate": 0.0003950756091090991, + "loss": 4.5859, + "step": 3387 + }, + { + "epoch": 0.330859375, + "grad_norm": 0.21889397501945496, + "learning_rate": 0.0003950163579674959, + "loss": 4.6172, + "step": 3388 + }, + { + "epoch": 0.33095703125, + "grad_norm": 0.23627842962741852, + "learning_rate": 0.0003949570951911214, + "loss": 4.5859, + "step": 3389 + }, + { + "epoch": 0.3310546875, + "grad_norm": 0.20355363190174103, + "learning_rate": 0.0003948978207857206, + "loss": 4.6055, + "step": 3390 + }, + { + "epoch": 0.33115234375, + "grad_norm": 0.22467434406280518, + "learning_rate": 0.00039483853475704, + "loss": 4.6016, + "step": 3391 + }, + { + "epoch": 0.33125, + "grad_norm": 0.23810891807079315, + "learning_rate": 0.0003947792371108266, + "loss": 4.5938, + "step": 3392 + }, + { + "epoch": 0.33134765625, + "grad_norm": 0.22555001080036163, + "learning_rate": 0.0003947199278528292, + "loss": 4.5898, + "step": 3393 + }, + { + "epoch": 0.3314453125, + "grad_norm": 0.23411592841148376, + "learning_rate": 0.0003946606069887973, + "loss": 4.5859, + "step": 3394 + }, + { + "epoch": 0.33154296875, + "grad_norm": 0.23978155851364136, + "learning_rate": 0.00039460127452448166, + "loss": 4.5781, + "step": 3395 + }, + { + "epoch": 0.331640625, + "grad_norm": 0.21071137487888336, + "learning_rate": 0.0003945419304656343, + "loss": 4.5898, + "step": 3396 + }, + { + "epoch": 0.33173828125, + "grad_norm": 0.26667845249176025, + "learning_rate": 0.0003944825748180081, + "loss": 4.5703, + "step": 3397 + }, + { + "epoch": 0.3318359375, + "grad_norm": 0.26892712712287903, + "learning_rate": 0.00039442320758735716, + "loss": 4.6055, + "step": 3398 + }, + { + "epoch": 0.33193359375, + "grad_norm": 0.2445114254951477, + "learning_rate": 0.0003943638287794368, + "loss": 4.5977, + "step": 3399 + }, + { + "epoch": 0.33203125, + "grad_norm": 0.25445324182510376, + "learning_rate": 0.0003943044384000033, + "loss": 4.6094, + "step": 3400 + }, + { + "epoch": 0.33212890625, + "grad_norm": 0.23124822974205017, + "learning_rate": 0.0003942450364548143, + "loss": 4.5859, + "step": 3401 + }, + { + "epoch": 0.3322265625, + "grad_norm": 0.24814872443675995, + "learning_rate": 0.00039418562294962826, + "loss": 4.5898, + "step": 3402 + }, + { + "epoch": 0.33232421875, + "grad_norm": 0.22599546611309052, + "learning_rate": 0.00039412619789020505, + "loss": 4.5859, + "step": 3403 + }, + { + "epoch": 0.332421875, + "grad_norm": 0.23347483575344086, + "learning_rate": 0.0003940667612823054, + "loss": 4.5859, + "step": 3404 + }, + { + "epoch": 0.33251953125, + "grad_norm": 0.2559767961502075, + "learning_rate": 0.0003940073131316914, + "loss": 4.6016, + "step": 3405 + }, + { + "epoch": 0.3326171875, + "grad_norm": 0.2848718762397766, + "learning_rate": 0.00039394785344412615, + "loss": 4.5664, + "step": 3406 + }, + { + "epoch": 0.33271484375, + "grad_norm": 0.2830337584018707, + "learning_rate": 0.00039388838222537375, + "loss": 4.582, + "step": 3407 + }, + { + "epoch": 0.3328125, + "grad_norm": 0.28271812200546265, + "learning_rate": 0.0003938288994811995, + "loss": 4.5703, + "step": 3408 + }, + { + "epoch": 0.33291015625, + "grad_norm": 0.3354898691177368, + "learning_rate": 0.0003937694052173701, + "loss": 4.5781, + "step": 3409 + }, + { + "epoch": 0.3330078125, + "grad_norm": 0.34435465931892395, + "learning_rate": 0.00039370989943965286, + "loss": 4.6055, + "step": 3410 + }, + { + "epoch": 0.33310546875, + "grad_norm": 0.29029008746147156, + "learning_rate": 0.0003936503821538166, + "loss": 4.582, + "step": 3411 + }, + { + "epoch": 0.333203125, + "grad_norm": 0.21510900557041168, + "learning_rate": 0.00039359085336563107, + "loss": 4.6016, + "step": 3412 + }, + { + "epoch": 0.33330078125, + "grad_norm": 0.20869968831539154, + "learning_rate": 0.00039353131308086714, + "loss": 4.6172, + "step": 3413 + }, + { + "epoch": 0.3333984375, + "grad_norm": 0.24522189795970917, + "learning_rate": 0.0003934717613052969, + "loss": 4.5938, + "step": 3414 + }, + { + "epoch": 0.33349609375, + "grad_norm": 0.26707345247268677, + "learning_rate": 0.0003934121980446935, + "loss": 4.5938, + "step": 3415 + }, + { + "epoch": 0.33359375, + "grad_norm": 0.2939470410346985, + "learning_rate": 0.0003933526233048311, + "loss": 4.5664, + "step": 3416 + }, + { + "epoch": 0.33369140625, + "grad_norm": 0.30631959438323975, + "learning_rate": 0.00039329303709148523, + "loss": 4.6133, + "step": 3417 + }, + { + "epoch": 0.3337890625, + "grad_norm": 0.25258976221084595, + "learning_rate": 0.0003932334394104322, + "loss": 4.5781, + "step": 3418 + }, + { + "epoch": 0.33388671875, + "grad_norm": 0.25660714507102966, + "learning_rate": 0.0003931738302674497, + "loss": 4.5898, + "step": 3419 + }, + { + "epoch": 0.333984375, + "grad_norm": 0.2531268894672394, + "learning_rate": 0.0003931142096683164, + "loss": 4.5898, + "step": 3420 + }, + { + "epoch": 0.33408203125, + "grad_norm": 0.287847638130188, + "learning_rate": 0.0003930545776188121, + "loss": 4.5781, + "step": 3421 + }, + { + "epoch": 0.3341796875, + "grad_norm": 0.271643728017807, + "learning_rate": 0.00039299493412471776, + "loss": 4.5664, + "step": 3422 + }, + { + "epoch": 0.33427734375, + "grad_norm": 0.247173473238945, + "learning_rate": 0.0003929352791918154, + "loss": 4.5703, + "step": 3423 + }, + { + "epoch": 0.334375, + "grad_norm": 0.2137906551361084, + "learning_rate": 0.00039287561282588807, + "loss": 4.5742, + "step": 3424 + }, + { + "epoch": 0.33447265625, + "grad_norm": 0.2528728246688843, + "learning_rate": 0.0003928159350327202, + "loss": 4.6055, + "step": 3425 + }, + { + "epoch": 0.3345703125, + "grad_norm": 0.3007262349128723, + "learning_rate": 0.00039275624581809707, + "loss": 4.5898, + "step": 3426 + }, + { + "epoch": 0.33466796875, + "grad_norm": 0.3488275408744812, + "learning_rate": 0.000392696545187805, + "loss": 4.5742, + "step": 3427 + }, + { + "epoch": 0.334765625, + "grad_norm": 0.28687769174575806, + "learning_rate": 0.00039263683314763175, + "loss": 4.5703, + "step": 3428 + }, + { + "epoch": 0.33486328125, + "grad_norm": 0.22688698768615723, + "learning_rate": 0.000392577109703366, + "loss": 4.5664, + "step": 3429 + }, + { + "epoch": 0.3349609375, + "grad_norm": 0.2493756264448166, + "learning_rate": 0.00039251737486079735, + "loss": 4.5625, + "step": 3430 + }, + { + "epoch": 0.33505859375, + "grad_norm": 0.28942352533340454, + "learning_rate": 0.00039245762862571683, + "loss": 4.5859, + "step": 3431 + }, + { + "epoch": 0.33515625, + "grad_norm": 0.2988203763961792, + "learning_rate": 0.0003923978710039164, + "loss": 4.5742, + "step": 3432 + }, + { + "epoch": 0.33525390625, + "grad_norm": 0.2230750322341919, + "learning_rate": 0.00039233810200118923, + "loss": 4.5781, + "step": 3433 + }, + { + "epoch": 0.3353515625, + "grad_norm": 0.22613981366157532, + "learning_rate": 0.00039227832162332934, + "loss": 4.6055, + "step": 3434 + }, + { + "epoch": 0.33544921875, + "grad_norm": 0.2786661386489868, + "learning_rate": 0.00039221852987613223, + "loss": 4.6016, + "step": 3435 + }, + { + "epoch": 0.335546875, + "grad_norm": 0.26984351873397827, + "learning_rate": 0.0003921587267653941, + "loss": 4.582, + "step": 3436 + }, + { + "epoch": 0.33564453125, + "grad_norm": 0.198298379778862, + "learning_rate": 0.00039209891229691267, + "loss": 4.5547, + "step": 3437 + }, + { + "epoch": 0.3357421875, + "grad_norm": 0.2523723244667053, + "learning_rate": 0.00039203908647648646, + "loss": 4.5781, + "step": 3438 + }, + { + "epoch": 0.33583984375, + "grad_norm": 0.27255865931510925, + "learning_rate": 0.0003919792493099151, + "loss": 4.6094, + "step": 3439 + }, + { + "epoch": 0.3359375, + "grad_norm": 0.2252158522605896, + "learning_rate": 0.00039191940080299947, + "loss": 4.5625, + "step": 3440 + }, + { + "epoch": 0.33603515625, + "grad_norm": 0.22037279605865479, + "learning_rate": 0.00039185954096154145, + "loss": 4.5938, + "step": 3441 + }, + { + "epoch": 0.3361328125, + "grad_norm": 0.2069425731897354, + "learning_rate": 0.00039179966979134405, + "loss": 4.5508, + "step": 3442 + }, + { + "epoch": 0.33623046875, + "grad_norm": 0.21884022653102875, + "learning_rate": 0.00039173978729821143, + "loss": 4.5742, + "step": 3443 + }, + { + "epoch": 0.336328125, + "grad_norm": 0.21663275361061096, + "learning_rate": 0.00039167989348794875, + "loss": 4.5742, + "step": 3444 + }, + { + "epoch": 0.33642578125, + "grad_norm": 0.2364639937877655, + "learning_rate": 0.0003916199883663623, + "loss": 4.6055, + "step": 3445 + }, + { + "epoch": 0.3365234375, + "grad_norm": 0.3033164441585541, + "learning_rate": 0.00039156007193925945, + "loss": 4.5547, + "step": 3446 + }, + { + "epoch": 0.33662109375, + "grad_norm": 0.22516201436519623, + "learning_rate": 0.00039150014421244876, + "loss": 4.5977, + "step": 3447 + }, + { + "epoch": 0.33671875, + "grad_norm": 0.21647989749908447, + "learning_rate": 0.00039144020519173975, + "loss": 4.5977, + "step": 3448 + }, + { + "epoch": 0.33681640625, + "grad_norm": 0.23504209518432617, + "learning_rate": 0.00039138025488294313, + "loss": 4.5742, + "step": 3449 + }, + { + "epoch": 0.3369140625, + "grad_norm": 0.23530873656272888, + "learning_rate": 0.00039132029329187073, + "loss": 4.5469, + "step": 3450 + }, + { + "epoch": 0.33701171875, + "grad_norm": 0.2119770348072052, + "learning_rate": 0.00039126032042433533, + "loss": 4.6016, + "step": 3451 + }, + { + "epoch": 0.337109375, + "grad_norm": 0.2437066286802292, + "learning_rate": 0.00039120033628615086, + "loss": 4.5938, + "step": 3452 + }, + { + "epoch": 0.33720703125, + "grad_norm": 0.2603267729282379, + "learning_rate": 0.0003911403408831325, + "loss": 4.5977, + "step": 3453 + }, + { + "epoch": 0.3373046875, + "grad_norm": 0.32560423016548157, + "learning_rate": 0.0003910803342210964, + "loss": 4.6094, + "step": 3454 + }, + { + "epoch": 0.33740234375, + "grad_norm": 0.21630613505840302, + "learning_rate": 0.00039102031630585966, + "loss": 4.6055, + "step": 3455 + }, + { + "epoch": 0.3375, + "grad_norm": 0.213385671377182, + "learning_rate": 0.00039096028714324065, + "loss": 4.5547, + "step": 3456 + }, + { + "epoch": 0.33759765625, + "grad_norm": 0.2776811718940735, + "learning_rate": 0.0003909002467390589, + "loss": 4.5703, + "step": 3457 + }, + { + "epoch": 0.3376953125, + "grad_norm": 0.33209875226020813, + "learning_rate": 0.00039084019509913486, + "loss": 4.582, + "step": 3458 + }, + { + "epoch": 0.33779296875, + "grad_norm": 0.36082029342651367, + "learning_rate": 0.00039078013222929004, + "loss": 4.5938, + "step": 3459 + }, + { + "epoch": 0.337890625, + "grad_norm": 0.30585816502571106, + "learning_rate": 0.00039072005813534723, + "loss": 4.5977, + "step": 3460 + }, + { + "epoch": 0.33798828125, + "grad_norm": 0.2507856488227844, + "learning_rate": 0.00039065997282313017, + "loss": 4.5742, + "step": 3461 + }, + { + "epoch": 0.3380859375, + "grad_norm": 0.28282681107521057, + "learning_rate": 0.00039059987629846374, + "loss": 4.5859, + "step": 3462 + }, + { + "epoch": 0.33818359375, + "grad_norm": 0.2744799256324768, + "learning_rate": 0.0003905397685671739, + "loss": 4.5703, + "step": 3463 + }, + { + "epoch": 0.33828125, + "grad_norm": 0.21828201413154602, + "learning_rate": 0.00039047964963508764, + "loss": 4.5781, + "step": 3464 + }, + { + "epoch": 0.33837890625, + "grad_norm": 0.23777252435684204, + "learning_rate": 0.00039041951950803314, + "loss": 4.5977, + "step": 3465 + }, + { + "epoch": 0.3384765625, + "grad_norm": 0.2406795471906662, + "learning_rate": 0.00039035937819183945, + "loss": 4.5938, + "step": 3466 + }, + { + "epoch": 0.33857421875, + "grad_norm": 0.23512239754199982, + "learning_rate": 0.0003902992256923372, + "loss": 4.5664, + "step": 3467 + }, + { + "epoch": 0.338671875, + "grad_norm": 0.24088239669799805, + "learning_rate": 0.0003902390620153575, + "loss": 4.5938, + "step": 3468 + }, + { + "epoch": 0.33876953125, + "grad_norm": 0.22533607482910156, + "learning_rate": 0.0003901788871667328, + "loss": 4.5898, + "step": 3469 + }, + { + "epoch": 0.3388671875, + "grad_norm": 0.2299346923828125, + "learning_rate": 0.0003901187011522967, + "loss": 4.6016, + "step": 3470 + }, + { + "epoch": 0.33896484375, + "grad_norm": 0.20693060755729675, + "learning_rate": 0.0003900585039778839, + "loss": 4.5586, + "step": 3471 + }, + { + "epoch": 0.3390625, + "grad_norm": 0.23042234778404236, + "learning_rate": 0.00038999829564932996, + "loss": 4.5703, + "step": 3472 + }, + { + "epoch": 0.33916015625, + "grad_norm": 0.2646695375442505, + "learning_rate": 0.00038993807617247186, + "loss": 4.5547, + "step": 3473 + }, + { + "epoch": 0.3392578125, + "grad_norm": 0.2777637541294098, + "learning_rate": 0.0003898778455531473, + "loss": 4.6016, + "step": 3474 + }, + { + "epoch": 0.33935546875, + "grad_norm": 0.26099810004234314, + "learning_rate": 0.00038981760379719533, + "loss": 4.5781, + "step": 3475 + }, + { + "epoch": 0.339453125, + "grad_norm": 0.24548865854740143, + "learning_rate": 0.00038975735091045593, + "loss": 4.5781, + "step": 3476 + }, + { + "epoch": 0.33955078125, + "grad_norm": 0.24180658161640167, + "learning_rate": 0.0003896970868987702, + "loss": 4.5742, + "step": 3477 + }, + { + "epoch": 0.3396484375, + "grad_norm": 0.24547573924064636, + "learning_rate": 0.0003896368117679803, + "loss": 4.5898, + "step": 3478 + }, + { + "epoch": 0.33974609375, + "grad_norm": 0.2244662046432495, + "learning_rate": 0.0003895765255239297, + "loss": 4.5742, + "step": 3479 + }, + { + "epoch": 0.33984375, + "grad_norm": 0.21351224184036255, + "learning_rate": 0.00038951622817246244, + "loss": 4.5898, + "step": 3480 + }, + { + "epoch": 0.33994140625, + "grad_norm": 0.23468780517578125, + "learning_rate": 0.00038945591971942413, + "loss": 4.5781, + "step": 3481 + }, + { + "epoch": 0.3400390625, + "grad_norm": 0.2657860219478607, + "learning_rate": 0.0003893956001706612, + "loss": 4.5938, + "step": 3482 + }, + { + "epoch": 0.34013671875, + "grad_norm": 0.27142828702926636, + "learning_rate": 0.00038933526953202125, + "loss": 4.5586, + "step": 3483 + }, + { + "epoch": 0.340234375, + "grad_norm": 0.24665763974189758, + "learning_rate": 0.0003892749278093529, + "loss": 4.5742, + "step": 3484 + }, + { + "epoch": 0.34033203125, + "grad_norm": 0.22797048091888428, + "learning_rate": 0.00038921457500850596, + "loss": 4.5664, + "step": 3485 + }, + { + "epoch": 0.3404296875, + "grad_norm": 0.24307425320148468, + "learning_rate": 0.00038915421113533107, + "loss": 4.582, + "step": 3486 + }, + { + "epoch": 0.34052734375, + "grad_norm": 0.2106340527534485, + "learning_rate": 0.00038909383619568016, + "loss": 4.5703, + "step": 3487 + }, + { + "epoch": 0.340625, + "grad_norm": 0.21940727531909943, + "learning_rate": 0.0003890334501954062, + "loss": 4.5547, + "step": 3488 + }, + { + "epoch": 0.34072265625, + "grad_norm": 0.22043249011039734, + "learning_rate": 0.0003889730531403633, + "loss": 4.5703, + "step": 3489 + }, + { + "epoch": 0.3408203125, + "grad_norm": 0.21157008409500122, + "learning_rate": 0.0003889126450364063, + "loss": 4.5781, + "step": 3490 + }, + { + "epoch": 0.34091796875, + "grad_norm": 0.2148536890745163, + "learning_rate": 0.0003888522258893915, + "loss": 4.5977, + "step": 3491 + }, + { + "epoch": 0.341015625, + "grad_norm": 0.22569139301776886, + "learning_rate": 0.0003887917957051762, + "loss": 4.5703, + "step": 3492 + }, + { + "epoch": 0.34111328125, + "grad_norm": 0.235578715801239, + "learning_rate": 0.00038873135448961856, + "loss": 4.5898, + "step": 3493 + }, + { + "epoch": 0.3412109375, + "grad_norm": 0.23993854224681854, + "learning_rate": 0.000388670902248578, + "loss": 4.6016, + "step": 3494 + }, + { + "epoch": 0.34130859375, + "grad_norm": 0.2334795445203781, + "learning_rate": 0.000388610438987915, + "loss": 4.5859, + "step": 3495 + }, + { + "epoch": 0.34140625, + "grad_norm": 0.21744944155216217, + "learning_rate": 0.00038854996471349097, + "loss": 4.582, + "step": 3496 + }, + { + "epoch": 0.34150390625, + "grad_norm": 0.21493971347808838, + "learning_rate": 0.0003884894794311686, + "loss": 4.582, + "step": 3497 + }, + { + "epoch": 0.3416015625, + "grad_norm": 0.21299481391906738, + "learning_rate": 0.0003884289831468114, + "loss": 4.5781, + "step": 3498 + }, + { + "epoch": 0.34169921875, + "grad_norm": 0.20631639659404755, + "learning_rate": 0.00038836847586628403, + "loss": 4.5938, + "step": 3499 + }, + { + "epoch": 0.341796875, + "grad_norm": 0.24000613391399384, + "learning_rate": 0.00038830795759545243, + "loss": 4.5859, + "step": 3500 + }, + { + "epoch": 0.34189453125, + "grad_norm": 0.27369704842567444, + "learning_rate": 0.00038824742834018346, + "loss": 4.5586, + "step": 3501 + }, + { + "epoch": 0.3419921875, + "grad_norm": 0.33496975898742676, + "learning_rate": 0.0003881868881063448, + "loss": 4.6016, + "step": 3502 + }, + { + "epoch": 0.34208984375, + "grad_norm": 0.3325915038585663, + "learning_rate": 0.0003881263368998056, + "loss": 4.5547, + "step": 3503 + }, + { + "epoch": 0.3421875, + "grad_norm": 0.28877198696136475, + "learning_rate": 0.00038806577472643583, + "loss": 4.6094, + "step": 3504 + }, + { + "epoch": 0.34228515625, + "grad_norm": 0.22102631628513336, + "learning_rate": 0.0003880052015921066, + "loss": 4.5508, + "step": 3505 + }, + { + "epoch": 0.3423828125, + "grad_norm": 0.23434330523014069, + "learning_rate": 0.00038794461750269, + "loss": 4.5859, + "step": 3506 + }, + { + "epoch": 0.34248046875, + "grad_norm": 0.29204925894737244, + "learning_rate": 0.0003878840224640592, + "loss": 4.5898, + "step": 3507 + }, + { + "epoch": 0.342578125, + "grad_norm": 0.3155725598335266, + "learning_rate": 0.00038782341648208876, + "loss": 4.5898, + "step": 3508 + }, + { + "epoch": 0.34267578125, + "grad_norm": 0.28683701157569885, + "learning_rate": 0.0003877627995626537, + "loss": 4.5508, + "step": 3509 + }, + { + "epoch": 0.3427734375, + "grad_norm": 0.22580774128437042, + "learning_rate": 0.0003877021717116306, + "loss": 4.6133, + "step": 3510 + }, + { + "epoch": 0.34287109375, + "grad_norm": 0.24952606856822968, + "learning_rate": 0.0003876415329348968, + "loss": 4.5898, + "step": 3511 + }, + { + "epoch": 0.34296875, + "grad_norm": 0.3303544521331787, + "learning_rate": 0.000387580883238331, + "loss": 4.5859, + "step": 3512 + }, + { + "epoch": 0.34306640625, + "grad_norm": 0.32153865694999695, + "learning_rate": 0.0003875202226278126, + "loss": 4.5586, + "step": 3513 + }, + { + "epoch": 0.3431640625, + "grad_norm": 0.23124876618385315, + "learning_rate": 0.0003874595511092223, + "loss": 4.5781, + "step": 3514 + }, + { + "epoch": 0.34326171875, + "grad_norm": 0.23796994984149933, + "learning_rate": 0.0003873988686884417, + "loss": 4.5898, + "step": 3515 + }, + { + "epoch": 0.343359375, + "grad_norm": 0.27483710646629333, + "learning_rate": 0.00038733817537135376, + "loss": 4.5742, + "step": 3516 + }, + { + "epoch": 0.34345703125, + "grad_norm": 0.23060576617717743, + "learning_rate": 0.0003872774711638421, + "loss": 4.5938, + "step": 3517 + }, + { + "epoch": 0.3435546875, + "grad_norm": 0.2303927093744278, + "learning_rate": 0.0003872167560717916, + "loss": 4.5547, + "step": 3518 + }, + { + "epoch": 0.34365234375, + "grad_norm": 0.25281965732574463, + "learning_rate": 0.0003871560301010883, + "loss": 4.5352, + "step": 3519 + }, + { + "epoch": 0.34375, + "grad_norm": 0.24246461689472198, + "learning_rate": 0.000387095293257619, + "loss": 4.5859, + "step": 3520 + }, + { + "epoch": 0.34384765625, + "grad_norm": 0.21475031971931458, + "learning_rate": 0.0003870345455472719, + "loss": 4.5781, + "step": 3521 + }, + { + "epoch": 0.3439453125, + "grad_norm": 0.23205529153347015, + "learning_rate": 0.00038697378697593595, + "loss": 4.582, + "step": 3522 + }, + { + "epoch": 0.34404296875, + "grad_norm": 0.2671996057033539, + "learning_rate": 0.0003869130175495013, + "loss": 4.5781, + "step": 3523 + }, + { + "epoch": 0.344140625, + "grad_norm": 0.25403136014938354, + "learning_rate": 0.0003868522372738591, + "loss": 4.5938, + "step": 3524 + }, + { + "epoch": 0.34423828125, + "grad_norm": 0.2413015067577362, + "learning_rate": 0.0003867914461549018, + "loss": 4.582, + "step": 3525 + }, + { + "epoch": 0.3443359375, + "grad_norm": 0.22985169291496277, + "learning_rate": 0.00038673064419852244, + "loss": 4.5547, + "step": 3526 + }, + { + "epoch": 0.34443359375, + "grad_norm": 0.2390916794538498, + "learning_rate": 0.0003866698314106154, + "loss": 4.5742, + "step": 3527 + }, + { + "epoch": 0.34453125, + "grad_norm": 0.27179819345474243, + "learning_rate": 0.00038660900779707613, + "loss": 4.5703, + "step": 3528 + }, + { + "epoch": 0.34462890625, + "grad_norm": 0.24174676835536957, + "learning_rate": 0.0003865481733638011, + "loss": 4.6016, + "step": 3529 + }, + { + "epoch": 0.3447265625, + "grad_norm": 0.2269209921360016, + "learning_rate": 0.0003864873281166877, + "loss": 4.5898, + "step": 3530 + }, + { + "epoch": 0.34482421875, + "grad_norm": 0.25660082697868347, + "learning_rate": 0.00038642647206163456, + "loss": 4.5586, + "step": 3531 + }, + { + "epoch": 0.344921875, + "grad_norm": 0.2472751885652542, + "learning_rate": 0.00038636560520454116, + "loss": 4.5898, + "step": 3532 + }, + { + "epoch": 0.34501953125, + "grad_norm": 0.24784642457962036, + "learning_rate": 0.0003863047275513081, + "loss": 4.5664, + "step": 3533 + }, + { + "epoch": 0.3451171875, + "grad_norm": 0.2414056360721588, + "learning_rate": 0.00038624383910783723, + "loss": 4.5781, + "step": 3534 + }, + { + "epoch": 0.34521484375, + "grad_norm": 0.28914812207221985, + "learning_rate": 0.00038618293988003117, + "loss": 4.5586, + "step": 3535 + }, + { + "epoch": 0.3453125, + "grad_norm": 0.32863086462020874, + "learning_rate": 0.0003861220298737936, + "loss": 4.5859, + "step": 3536 + }, + { + "epoch": 0.34541015625, + "grad_norm": 0.25575563311576843, + "learning_rate": 0.00038606110909502955, + "loss": 4.6055, + "step": 3537 + }, + { + "epoch": 0.3455078125, + "grad_norm": 0.25360026955604553, + "learning_rate": 0.00038600017754964475, + "loss": 4.5664, + "step": 3538 + }, + { + "epoch": 0.34560546875, + "grad_norm": 0.2858823537826538, + "learning_rate": 0.00038593923524354596, + "loss": 4.5508, + "step": 3539 + }, + { + "epoch": 0.345703125, + "grad_norm": 0.2491176873445511, + "learning_rate": 0.00038587828218264133, + "loss": 4.5664, + "step": 3540 + }, + { + "epoch": 0.34580078125, + "grad_norm": 0.2505415081977844, + "learning_rate": 0.0003858173183728398, + "loss": 4.5742, + "step": 3541 + }, + { + "epoch": 0.3458984375, + "grad_norm": 0.22714461386203766, + "learning_rate": 0.0003857563438200514, + "loss": 4.5664, + "step": 3542 + }, + { + "epoch": 0.34599609375, + "grad_norm": 0.2949637174606323, + "learning_rate": 0.0003856953585301871, + "loss": 4.5703, + "step": 3543 + }, + { + "epoch": 0.34609375, + "grad_norm": 0.25250017642974854, + "learning_rate": 0.00038563436250915907, + "loss": 4.5547, + "step": 3544 + }, + { + "epoch": 0.34619140625, + "grad_norm": 0.2355991005897522, + "learning_rate": 0.00038557335576288053, + "loss": 4.5664, + "step": 3545 + }, + { + "epoch": 0.3462890625, + "grad_norm": 0.20867271721363068, + "learning_rate": 0.0003855123382972656, + "loss": 4.5625, + "step": 3546 + }, + { + "epoch": 0.34638671875, + "grad_norm": 0.2555949091911316, + "learning_rate": 0.0003854513101182295, + "loss": 4.5898, + "step": 3547 + }, + { + "epoch": 0.346484375, + "grad_norm": 0.26140639185905457, + "learning_rate": 0.0003853902712316886, + "loss": 4.5977, + "step": 3548 + }, + { + "epoch": 0.34658203125, + "grad_norm": 0.2133144587278366, + "learning_rate": 0.00038532922164356, + "loss": 4.6016, + "step": 3549 + }, + { + "epoch": 0.3466796875, + "grad_norm": 0.2101951241493225, + "learning_rate": 0.0003852681613597622, + "loss": 4.5859, + "step": 3550 + }, + { + "epoch": 0.34677734375, + "grad_norm": 0.22700613737106323, + "learning_rate": 0.00038520709038621467, + "loss": 4.5781, + "step": 3551 + }, + { + "epoch": 0.346875, + "grad_norm": 0.2154964953660965, + "learning_rate": 0.0003851460087288376, + "loss": 4.5938, + "step": 3552 + }, + { + "epoch": 0.34697265625, + "grad_norm": 0.20858052372932434, + "learning_rate": 0.0003850849163935525, + "loss": 4.5703, + "step": 3553 + }, + { + "epoch": 0.3470703125, + "grad_norm": 0.23127691447734833, + "learning_rate": 0.000385023813386282, + "loss": 4.5664, + "step": 3554 + }, + { + "epoch": 0.34716796875, + "grad_norm": 0.25448426604270935, + "learning_rate": 0.0003849626997129495, + "loss": 4.5742, + "step": 3555 + }, + { + "epoch": 0.347265625, + "grad_norm": 0.23418036103248596, + "learning_rate": 0.00038490157537947957, + "loss": 4.5703, + "step": 3556 + }, + { + "epoch": 0.34736328125, + "grad_norm": 0.21315982937812805, + "learning_rate": 0.0003848404403917978, + "loss": 4.5547, + "step": 3557 + }, + { + "epoch": 0.3474609375, + "grad_norm": 0.23820677399635315, + "learning_rate": 0.00038477929475583077, + "loss": 4.5742, + "step": 3558 + }, + { + "epoch": 0.34755859375, + "grad_norm": 0.1902332603931427, + "learning_rate": 0.0003847181384775063, + "loss": 4.5625, + "step": 3559 + }, + { + "epoch": 0.34765625, + "grad_norm": 0.21873919665813446, + "learning_rate": 0.000384656971562753, + "loss": 4.5938, + "step": 3560 + }, + { + "epoch": 0.34775390625, + "grad_norm": 0.22429879009723663, + "learning_rate": 0.0003845957940175004, + "loss": 4.5469, + "step": 3561 + }, + { + "epoch": 0.3478515625, + "grad_norm": 0.2322203516960144, + "learning_rate": 0.00038453460584767944, + "loss": 4.5898, + "step": 3562 + }, + { + "epoch": 0.34794921875, + "grad_norm": 0.20571021735668182, + "learning_rate": 0.0003844734070592219, + "loss": 4.5469, + "step": 3563 + }, + { + "epoch": 0.348046875, + "grad_norm": 0.6549842953681946, + "learning_rate": 0.0003844121976580606, + "loss": 4.5781, + "step": 3564 + }, + { + "epoch": 0.34814453125, + "grad_norm": 0.20868469774723053, + "learning_rate": 0.0003843509776501292, + "loss": 4.5781, + "step": 3565 + }, + { + "epoch": 0.3482421875, + "grad_norm": 0.2354210615158081, + "learning_rate": 0.0003842897470413627, + "loss": 4.582, + "step": 3566 + }, + { + "epoch": 0.34833984375, + "grad_norm": 0.22508767247200012, + "learning_rate": 0.00038422850583769705, + "loss": 4.6172, + "step": 3567 + }, + { + "epoch": 0.3484375, + "grad_norm": 0.23083415627479553, + "learning_rate": 0.00038416725404506904, + "loss": 4.5781, + "step": 3568 + }, + { + "epoch": 0.34853515625, + "grad_norm": 0.23082439601421356, + "learning_rate": 0.00038410599166941665, + "loss": 4.5859, + "step": 3569 + }, + { + "epoch": 0.3486328125, + "grad_norm": 0.25076547265052795, + "learning_rate": 0.0003840447187166789, + "loss": 4.5781, + "step": 3570 + }, + { + "epoch": 0.34873046875, + "grad_norm": 0.28675323724746704, + "learning_rate": 0.00038398343519279577, + "loss": 4.5664, + "step": 3571 + }, + { + "epoch": 0.348828125, + "grad_norm": 0.32859349250793457, + "learning_rate": 0.00038392214110370834, + "loss": 4.5625, + "step": 3572 + }, + { + "epoch": 0.34892578125, + "grad_norm": 0.3290226459503174, + "learning_rate": 0.0003838608364553585, + "loss": 4.543, + "step": 3573 + }, + { + "epoch": 0.3490234375, + "grad_norm": 0.2829148769378662, + "learning_rate": 0.0003837995212536894, + "loss": 4.5859, + "step": 3574 + }, + { + "epoch": 0.34912109375, + "grad_norm": 0.22749172151088715, + "learning_rate": 0.0003837381955046452, + "loss": 4.5664, + "step": 3575 + }, + { + "epoch": 0.34921875, + "grad_norm": 0.2500250041484833, + "learning_rate": 0.0003836768592141709, + "loss": 4.5625, + "step": 3576 + }, + { + "epoch": 0.34931640625, + "grad_norm": 0.2913195490837097, + "learning_rate": 0.00038361551238821266, + "loss": 4.6055, + "step": 3577 + }, + { + "epoch": 0.3494140625, + "grad_norm": 0.2783503234386444, + "learning_rate": 0.0003835541550327177, + "loss": 4.5742, + "step": 3578 + }, + { + "epoch": 0.34951171875, + "grad_norm": 0.21718256175518036, + "learning_rate": 0.00038349278715363425, + "loss": 4.5352, + "step": 3579 + }, + { + "epoch": 0.349609375, + "grad_norm": 0.2519012689590454, + "learning_rate": 0.0003834314087569113, + "loss": 4.5703, + "step": 3580 + }, + { + "epoch": 0.34970703125, + "grad_norm": 0.256191611289978, + "learning_rate": 0.00038337001984849927, + "loss": 4.5469, + "step": 3581 + }, + { + "epoch": 0.3498046875, + "grad_norm": 0.27345380187034607, + "learning_rate": 0.0003833086204343493, + "loss": 4.5781, + "step": 3582 + }, + { + "epoch": 0.34990234375, + "grad_norm": 0.22184456884860992, + "learning_rate": 0.00038324721052041374, + "loss": 4.5742, + "step": 3583 + }, + { + "epoch": 0.35, + "grad_norm": 0.23122559487819672, + "learning_rate": 0.0003831857901126457, + "loss": 4.5859, + "step": 3584 + }, + { + "epoch": 0.35009765625, + "grad_norm": 0.2597074806690216, + "learning_rate": 0.00038312435921699967, + "loss": 4.5703, + "step": 3585 + }, + { + "epoch": 0.3501953125, + "grad_norm": 0.24130751192569733, + "learning_rate": 0.00038306291783943075, + "loss": 4.5703, + "step": 3586 + }, + { + "epoch": 0.35029296875, + "grad_norm": 0.21051934361457825, + "learning_rate": 0.0003830014659858954, + "loss": 4.582, + "step": 3587 + }, + { + "epoch": 0.350390625, + "grad_norm": 0.25560328364372253, + "learning_rate": 0.00038294000366235093, + "loss": 4.5586, + "step": 3588 + }, + { + "epoch": 0.35048828125, + "grad_norm": 0.27199187874794006, + "learning_rate": 0.0003828785308747558, + "loss": 4.5703, + "step": 3589 + }, + { + "epoch": 0.3505859375, + "grad_norm": 0.22881963849067688, + "learning_rate": 0.0003828170476290692, + "loss": 4.5508, + "step": 3590 + }, + { + "epoch": 0.35068359375, + "grad_norm": 0.24479630589485168, + "learning_rate": 0.0003827555539312515, + "loss": 4.5664, + "step": 3591 + }, + { + "epoch": 0.35078125, + "grad_norm": 0.20475630462169647, + "learning_rate": 0.00038269404978726426, + "loss": 4.5742, + "step": 3592 + }, + { + "epoch": 0.35087890625, + "grad_norm": 0.23750877380371094, + "learning_rate": 0.00038263253520306984, + "loss": 4.5703, + "step": 3593 + }, + { + "epoch": 0.3509765625, + "grad_norm": 0.2701278328895569, + "learning_rate": 0.00038257101018463163, + "loss": 4.5898, + "step": 3594 + }, + { + "epoch": 0.35107421875, + "grad_norm": 0.2329765260219574, + "learning_rate": 0.000382509474737914, + "loss": 4.582, + "step": 3595 + }, + { + "epoch": 0.351171875, + "grad_norm": 0.19534918665885925, + "learning_rate": 0.00038244792886888253, + "loss": 4.5664, + "step": 3596 + }, + { + "epoch": 0.35126953125, + "grad_norm": 0.22755050659179688, + "learning_rate": 0.0003823863725835036, + "loss": 4.5977, + "step": 3597 + }, + { + "epoch": 0.3513671875, + "grad_norm": 0.2614189684391022, + "learning_rate": 0.0003823248058877447, + "loss": 4.5781, + "step": 3598 + }, + { + "epoch": 0.35146484375, + "grad_norm": 0.2122533768415451, + "learning_rate": 0.00038226322878757426, + "loss": 4.5859, + "step": 3599 + }, + { + "epoch": 0.3515625, + "grad_norm": 0.23295137286186218, + "learning_rate": 0.00038220164128896175, + "loss": 4.6055, + "step": 3600 + }, + { + "epoch": 0.35166015625, + "grad_norm": 0.2661930322647095, + "learning_rate": 0.0003821400433978778, + "loss": 4.5703, + "step": 3601 + }, + { + "epoch": 0.3517578125, + "grad_norm": 0.2844511568546295, + "learning_rate": 0.00038207843512029374, + "loss": 4.5703, + "step": 3602 + }, + { + "epoch": 0.35185546875, + "grad_norm": 0.2329130619764328, + "learning_rate": 0.0003820168164621821, + "loss": 4.5703, + "step": 3603 + }, + { + "epoch": 0.351953125, + "grad_norm": 0.21032951772212982, + "learning_rate": 0.00038195518742951647, + "loss": 4.5742, + "step": 3604 + }, + { + "epoch": 0.35205078125, + "grad_norm": 0.24826619029045105, + "learning_rate": 0.0003818935480282713, + "loss": 4.5586, + "step": 3605 + }, + { + "epoch": 0.3521484375, + "grad_norm": 0.2593922019004822, + "learning_rate": 0.00038183189826442217, + "loss": 4.5898, + "step": 3606 + }, + { + "epoch": 0.35224609375, + "grad_norm": 0.24321337044239044, + "learning_rate": 0.00038177023814394556, + "loss": 4.5547, + "step": 3607 + }, + { + "epoch": 0.35234375, + "grad_norm": 0.25784748792648315, + "learning_rate": 0.000381708567672819, + "loss": 4.5625, + "step": 3608 + }, + { + "epoch": 0.35244140625, + "grad_norm": 0.24851174652576447, + "learning_rate": 0.00038164688685702106, + "loss": 4.5859, + "step": 3609 + }, + { + "epoch": 0.3525390625, + "grad_norm": 0.21535657346248627, + "learning_rate": 0.00038158519570253126, + "loss": 4.543, + "step": 3610 + }, + { + "epoch": 0.35263671875, + "grad_norm": 0.22795481979846954, + "learning_rate": 0.0003815234942153301, + "loss": 4.5586, + "step": 3611 + }, + { + "epoch": 0.352734375, + "grad_norm": 0.2578272819519043, + "learning_rate": 0.0003814617824013992, + "loss": 4.6016, + "step": 3612 + }, + { + "epoch": 0.35283203125, + "grad_norm": 0.2736850380897522, + "learning_rate": 0.00038140006026672103, + "loss": 4.5625, + "step": 3613 + }, + { + "epoch": 0.3529296875, + "grad_norm": 0.30361953377723694, + "learning_rate": 0.00038133832781727913, + "loss": 4.5703, + "step": 3614 + }, + { + "epoch": 0.35302734375, + "grad_norm": 0.28210848569869995, + "learning_rate": 0.0003812765850590581, + "loss": 4.582, + "step": 3615 + }, + { + "epoch": 0.353125, + "grad_norm": 0.2924845814704895, + "learning_rate": 0.0003812148319980435, + "loss": 4.5664, + "step": 3616 + }, + { + "epoch": 0.35322265625, + "grad_norm": 0.21510456502437592, + "learning_rate": 0.0003811530686402217, + "loss": 4.5781, + "step": 3617 + }, + { + "epoch": 0.3533203125, + "grad_norm": 0.21807216107845306, + "learning_rate": 0.00038109129499158044, + "loss": 4.5664, + "step": 3618 + }, + { + "epoch": 0.35341796875, + "grad_norm": 0.2759113907814026, + "learning_rate": 0.00038102951105810817, + "loss": 4.5625, + "step": 3619 + }, + { + "epoch": 0.353515625, + "grad_norm": 0.32082703709602356, + "learning_rate": 0.0003809677168457944, + "loss": 4.5742, + "step": 3620 + }, + { + "epoch": 0.35361328125, + "grad_norm": 0.2792191505432129, + "learning_rate": 0.00038090591236062966, + "loss": 4.5703, + "step": 3621 + }, + { + "epoch": 0.3537109375, + "grad_norm": 0.19946914911270142, + "learning_rate": 0.0003808440976086056, + "loss": 4.582, + "step": 3622 + }, + { + "epoch": 0.35380859375, + "grad_norm": 0.2088184952735901, + "learning_rate": 0.0003807822725957146, + "loss": 4.5703, + "step": 3623 + }, + { + "epoch": 0.35390625, + "grad_norm": 0.2543959617614746, + "learning_rate": 0.00038072043732795014, + "loss": 4.5859, + "step": 3624 + }, + { + "epoch": 0.35400390625, + "grad_norm": 0.2538624107837677, + "learning_rate": 0.00038065859181130686, + "loss": 4.5859, + "step": 3625 + }, + { + "epoch": 0.3541015625, + "grad_norm": 0.2285611480474472, + "learning_rate": 0.0003805967360517802, + "loss": 4.5938, + "step": 3626 + }, + { + "epoch": 0.35419921875, + "grad_norm": 0.21131369471549988, + "learning_rate": 0.00038053487005536675, + "loss": 4.5625, + "step": 3627 + }, + { + "epoch": 0.354296875, + "grad_norm": 0.2784635126590729, + "learning_rate": 0.0003804729938280638, + "loss": 4.5938, + "step": 3628 + }, + { + "epoch": 0.35439453125, + "grad_norm": 0.279033362865448, + "learning_rate": 0.00038041110737587, + "loss": 4.5547, + "step": 3629 + }, + { + "epoch": 0.3544921875, + "grad_norm": 0.24487648904323578, + "learning_rate": 0.00038034921070478477, + "loss": 4.5312, + "step": 3630 + }, + { + "epoch": 0.35458984375, + "grad_norm": 0.1987142711877823, + "learning_rate": 0.0003802873038208086, + "loss": 4.5664, + "step": 3631 + }, + { + "epoch": 0.3546875, + "grad_norm": 0.2294733226299286, + "learning_rate": 0.00038022538672994287, + "loss": 4.5586, + "step": 3632 + }, + { + "epoch": 0.35478515625, + "grad_norm": 0.2343548834323883, + "learning_rate": 0.00038016345943819, + "loss": 4.5625, + "step": 3633 + }, + { + "epoch": 0.3548828125, + "grad_norm": 0.24301011860370636, + "learning_rate": 0.0003801015219515536, + "loss": 4.5508, + "step": 3634 + }, + { + "epoch": 0.35498046875, + "grad_norm": 0.23225438594818115, + "learning_rate": 0.00038003957427603795, + "loss": 4.5859, + "step": 3635 + }, + { + "epoch": 0.355078125, + "grad_norm": 0.27709367871284485, + "learning_rate": 0.00037997761641764846, + "loss": 4.5898, + "step": 3636 + }, + { + "epoch": 0.35517578125, + "grad_norm": 0.279118150472641, + "learning_rate": 0.00037991564838239154, + "loss": 4.5859, + "step": 3637 + }, + { + "epoch": 0.3552734375, + "grad_norm": 0.22382718324661255, + "learning_rate": 0.00037985367017627454, + "loss": 4.5547, + "step": 3638 + }, + { + "epoch": 0.35537109375, + "grad_norm": 0.20952828228473663, + "learning_rate": 0.000379791681805306, + "loss": 4.5977, + "step": 3639 + }, + { + "epoch": 0.35546875, + "grad_norm": 0.2446649968624115, + "learning_rate": 0.000379729683275495, + "loss": 4.5586, + "step": 3640 + }, + { + "epoch": 0.35556640625, + "grad_norm": 0.2754228413105011, + "learning_rate": 0.00037966767459285194, + "loss": 4.5664, + "step": 3641 + }, + { + "epoch": 0.3556640625, + "grad_norm": 0.27979210019111633, + "learning_rate": 0.00037960565576338834, + "loss": 4.5781, + "step": 3642 + }, + { + "epoch": 0.35576171875, + "grad_norm": 0.2776656150817871, + "learning_rate": 0.0003795436267931163, + "loss": 4.5781, + "step": 3643 + }, + { + "epoch": 0.355859375, + "grad_norm": 0.1969219148159027, + "learning_rate": 0.0003794815876880493, + "loss": 4.5508, + "step": 3644 + }, + { + "epoch": 0.35595703125, + "grad_norm": 0.22536104917526245, + "learning_rate": 0.0003794195384542014, + "loss": 4.5547, + "step": 3645 + }, + { + "epoch": 0.3560546875, + "grad_norm": 0.2419748455286026, + "learning_rate": 0.0003793574790975879, + "loss": 4.5547, + "step": 3646 + }, + { + "epoch": 0.35615234375, + "grad_norm": 0.22893331944942474, + "learning_rate": 0.00037929540962422514, + "loss": 4.5625, + "step": 3647 + }, + { + "epoch": 0.35625, + "grad_norm": 0.22302180528640747, + "learning_rate": 0.00037923333004013035, + "loss": 4.5508, + "step": 3648 + }, + { + "epoch": 0.35634765625, + "grad_norm": 0.20919984579086304, + "learning_rate": 0.00037917124035132156, + "loss": 4.543, + "step": 3649 + }, + { + "epoch": 0.3564453125, + "grad_norm": 0.19446073472499847, + "learning_rate": 0.00037910914056381794, + "loss": 4.5586, + "step": 3650 + }, + { + "epoch": 0.35654296875, + "grad_norm": 0.20676352083683014, + "learning_rate": 0.00037904703068363996, + "loss": 4.5664, + "step": 3651 + }, + { + "epoch": 0.356640625, + "grad_norm": 0.21957850456237793, + "learning_rate": 0.00037898491071680835, + "loss": 4.5898, + "step": 3652 + }, + { + "epoch": 0.35673828125, + "grad_norm": 0.246882826089859, + "learning_rate": 0.0003789227806693454, + "loss": 4.5469, + "step": 3653 + }, + { + "epoch": 0.3568359375, + "grad_norm": 0.25299644470214844, + "learning_rate": 0.0003788606405472742, + "loss": 4.6094, + "step": 3654 + }, + { + "epoch": 0.35693359375, + "grad_norm": 0.21891558170318604, + "learning_rate": 0.0003787984903566188, + "loss": 4.5391, + "step": 3655 + }, + { + "epoch": 0.35703125, + "grad_norm": 0.2169588804244995, + "learning_rate": 0.00037873633010340437, + "loss": 4.5898, + "step": 3656 + }, + { + "epoch": 0.35712890625, + "grad_norm": 0.19887502491474152, + "learning_rate": 0.00037867415979365673, + "loss": 4.582, + "step": 3657 + }, + { + "epoch": 0.3572265625, + "grad_norm": 0.23183923959732056, + "learning_rate": 0.00037861197943340287, + "loss": 4.5898, + "step": 3658 + }, + { + "epoch": 0.35732421875, + "grad_norm": 0.2397049069404602, + "learning_rate": 0.00037854978902867085, + "loss": 4.5781, + "step": 3659 + }, + { + "epoch": 0.357421875, + "grad_norm": 0.2595348060131073, + "learning_rate": 0.00037848758858548953, + "loss": 4.5703, + "step": 3660 + }, + { + "epoch": 0.35751953125, + "grad_norm": 0.29750970005989075, + "learning_rate": 0.000378425378109889, + "loss": 4.5781, + "step": 3661 + }, + { + "epoch": 0.3576171875, + "grad_norm": 0.2562835216522217, + "learning_rate": 0.0003783631576078999, + "loss": 4.5938, + "step": 3662 + }, + { + "epoch": 0.35771484375, + "grad_norm": 0.23170508444309235, + "learning_rate": 0.0003783009270855542, + "loss": 4.5938, + "step": 3663 + }, + { + "epoch": 0.3578125, + "grad_norm": 0.20134687423706055, + "learning_rate": 0.0003782386865488848, + "loss": 4.5703, + "step": 3664 + }, + { + "epoch": 0.35791015625, + "grad_norm": 0.23882128298282623, + "learning_rate": 0.0003781764360039253, + "loss": 4.5859, + "step": 3665 + }, + { + "epoch": 0.3580078125, + "grad_norm": 0.29110950231552124, + "learning_rate": 0.0003781141754567107, + "loss": 4.5547, + "step": 3666 + }, + { + "epoch": 0.35810546875, + "grad_norm": 0.2600801885128021, + "learning_rate": 0.00037805190491327665, + "loss": 4.5469, + "step": 3667 + }, + { + "epoch": 0.358203125, + "grad_norm": 0.22020958364009857, + "learning_rate": 0.00037798962437965975, + "loss": 4.5547, + "step": 3668 + }, + { + "epoch": 0.35830078125, + "grad_norm": 0.23662874102592468, + "learning_rate": 0.00037792733386189783, + "loss": 4.5742, + "step": 3669 + }, + { + "epoch": 0.3583984375, + "grad_norm": 0.239448681473732, + "learning_rate": 0.0003778650333660294, + "loss": 4.5547, + "step": 3670 + }, + { + "epoch": 0.35849609375, + "grad_norm": 0.36843615770339966, + "learning_rate": 0.0003778027228980942, + "loss": 4.5586, + "step": 3671 + }, + { + "epoch": 0.35859375, + "grad_norm": 0.24178937077522278, + "learning_rate": 0.00037774040246413266, + "loss": 4.5859, + "step": 3672 + }, + { + "epoch": 0.35869140625, + "grad_norm": 0.2356576919555664, + "learning_rate": 0.00037767807207018646, + "loss": 4.5469, + "step": 3673 + }, + { + "epoch": 0.3587890625, + "grad_norm": 0.21717901527881622, + "learning_rate": 0.000377615731722298, + "loss": 4.5742, + "step": 3674 + }, + { + "epoch": 0.35888671875, + "grad_norm": 0.265219122171402, + "learning_rate": 0.0003775533814265108, + "loss": 4.5938, + "step": 3675 + }, + { + "epoch": 0.358984375, + "grad_norm": 0.2896675169467926, + "learning_rate": 0.00037749102118886943, + "loss": 4.5859, + "step": 3676 + }, + { + "epoch": 0.35908203125, + "grad_norm": 0.24136193096637726, + "learning_rate": 0.000377428651015419, + "loss": 4.5781, + "step": 3677 + }, + { + "epoch": 0.3591796875, + "grad_norm": 0.24482491612434387, + "learning_rate": 0.00037736627091220615, + "loss": 4.5703, + "step": 3678 + }, + { + "epoch": 0.35927734375, + "grad_norm": 0.25840532779693604, + "learning_rate": 0.000377303880885278, + "loss": 4.5703, + "step": 3679 + }, + { + "epoch": 0.359375, + "grad_norm": 0.21277597546577454, + "learning_rate": 0.00037724148094068295, + "loss": 4.5703, + "step": 3680 + }, + { + "epoch": 0.35947265625, + "grad_norm": 0.2434084266424179, + "learning_rate": 0.00037717907108447034, + "loss": 4.6016, + "step": 3681 + }, + { + "epoch": 0.3595703125, + "grad_norm": 0.2801218330860138, + "learning_rate": 0.0003771166513226902, + "loss": 4.5703, + "step": 3682 + }, + { + "epoch": 0.35966796875, + "grad_norm": 0.29672127962112427, + "learning_rate": 0.0003770542216613938, + "loss": 4.5234, + "step": 3683 + }, + { + "epoch": 0.359765625, + "grad_norm": 0.2951870560646057, + "learning_rate": 0.0003769917821066333, + "loss": 4.582, + "step": 3684 + }, + { + "epoch": 0.35986328125, + "grad_norm": 0.26624128222465515, + "learning_rate": 0.00037692933266446165, + "loss": 4.5938, + "step": 3685 + }, + { + "epoch": 0.3599609375, + "grad_norm": 0.23350806534290314, + "learning_rate": 0.00037686687334093305, + "loss": 4.5781, + "step": 3686 + }, + { + "epoch": 0.36005859375, + "grad_norm": 0.21884194016456604, + "learning_rate": 0.0003768044041421025, + "loss": 4.5352, + "step": 3687 + }, + { + "epoch": 0.36015625, + "grad_norm": 0.2768612205982208, + "learning_rate": 0.00037674192507402584, + "loss": 4.5781, + "step": 3688 + }, + { + "epoch": 0.36025390625, + "grad_norm": 0.27278292179107666, + "learning_rate": 0.0003766794361427601, + "loss": 4.5156, + "step": 3689 + }, + { + "epoch": 0.3603515625, + "grad_norm": 0.2551245391368866, + "learning_rate": 0.00037661693735436316, + "loss": 4.5664, + "step": 3690 + }, + { + "epoch": 0.36044921875, + "grad_norm": 0.22481122612953186, + "learning_rate": 0.0003765544287148937, + "loss": 4.5703, + "step": 3691 + }, + { + "epoch": 0.360546875, + "grad_norm": 0.2237016260623932, + "learning_rate": 0.0003764919102304117, + "loss": 4.5625, + "step": 3692 + }, + { + "epoch": 0.36064453125, + "grad_norm": 0.27385836839675903, + "learning_rate": 0.00037642938190697784, + "loss": 4.5742, + "step": 3693 + }, + { + "epoch": 0.3607421875, + "grad_norm": 0.30567967891693115, + "learning_rate": 0.0003763668437506538, + "loss": 4.5859, + "step": 3694 + }, + { + "epoch": 0.36083984375, + "grad_norm": 0.282015860080719, + "learning_rate": 0.0003763042957675023, + "loss": 4.5469, + "step": 3695 + }, + { + "epoch": 0.3609375, + "grad_norm": 0.20504769682884216, + "learning_rate": 0.00037624173796358686, + "loss": 4.5586, + "step": 3696 + }, + { + "epoch": 0.36103515625, + "grad_norm": 0.2437128871679306, + "learning_rate": 0.00037617917034497194, + "loss": 4.582, + "step": 3697 + }, + { + "epoch": 0.3611328125, + "grad_norm": 0.2824758291244507, + "learning_rate": 0.0003761165929177233, + "loss": 4.5625, + "step": 3698 + }, + { + "epoch": 0.36123046875, + "grad_norm": 0.2728320360183716, + "learning_rate": 0.00037605400568790714, + "loss": 4.5703, + "step": 3699 + }, + { + "epoch": 0.361328125, + "grad_norm": 0.23510177433490753, + "learning_rate": 0.00037599140866159106, + "loss": 4.5859, + "step": 3700 + }, + { + "epoch": 0.36142578125, + "grad_norm": 0.26374226808547974, + "learning_rate": 0.0003759288018448433, + "loss": 4.5898, + "step": 3701 + }, + { + "epoch": 0.3615234375, + "grad_norm": 0.2697891891002655, + "learning_rate": 0.0003758661852437332, + "loss": 4.5664, + "step": 3702 + }, + { + "epoch": 0.36162109375, + "grad_norm": 0.26423609256744385, + "learning_rate": 0.00037580355886433114, + "loss": 4.5586, + "step": 3703 + }, + { + "epoch": 0.36171875, + "grad_norm": 0.2405981421470642, + "learning_rate": 0.00037574092271270806, + "loss": 4.5391, + "step": 3704 + }, + { + "epoch": 0.36181640625, + "grad_norm": 0.27411630749702454, + "learning_rate": 0.0003756782767949363, + "loss": 4.5742, + "step": 3705 + }, + { + "epoch": 0.3619140625, + "grad_norm": 0.31632712483406067, + "learning_rate": 0.0003756156211170889, + "loss": 4.6211, + "step": 3706 + }, + { + "epoch": 0.36201171875, + "grad_norm": 0.32672661542892456, + "learning_rate": 0.00037555295568523995, + "loss": 4.5898, + "step": 3707 + }, + { + "epoch": 0.362109375, + "grad_norm": 0.22755952179431915, + "learning_rate": 0.0003754902805054644, + "loss": 4.582, + "step": 3708 + }, + { + "epoch": 0.36220703125, + "grad_norm": 0.24091464281082153, + "learning_rate": 0.00037542759558383817, + "loss": 4.5898, + "step": 3709 + }, + { + "epoch": 0.3623046875, + "grad_norm": 0.31097692251205444, + "learning_rate": 0.0003753649009264381, + "loss": 4.5859, + "step": 3710 + }, + { + "epoch": 0.36240234375, + "grad_norm": 0.213272824883461, + "learning_rate": 0.0003753021965393421, + "loss": 4.5781, + "step": 3711 + }, + { + "epoch": 0.3625, + "grad_norm": 0.2237333357334137, + "learning_rate": 0.000375239482428629, + "loss": 4.5703, + "step": 3712 + }, + { + "epoch": 0.36259765625, + "grad_norm": 0.2189137488603592, + "learning_rate": 0.0003751767586003783, + "loss": 4.5586, + "step": 3713 + }, + { + "epoch": 0.3626953125, + "grad_norm": 0.22572632133960724, + "learning_rate": 0.00037511402506067073, + "loss": 4.5508, + "step": 3714 + }, + { + "epoch": 0.36279296875, + "grad_norm": 0.19613827764987946, + "learning_rate": 0.00037505128181558795, + "loss": 4.5703, + "step": 3715 + }, + { + "epoch": 0.362890625, + "grad_norm": 0.23606157302856445, + "learning_rate": 0.00037498852887121243, + "loss": 4.5547, + "step": 3716 + }, + { + "epoch": 0.36298828125, + "grad_norm": 0.23252497613430023, + "learning_rate": 0.00037492576623362773, + "loss": 4.5508, + "step": 3717 + }, + { + "epoch": 0.3630859375, + "grad_norm": 0.22681549191474915, + "learning_rate": 0.0003748629939089181, + "loss": 4.5898, + "step": 3718 + }, + { + "epoch": 0.36318359375, + "grad_norm": 0.2215224802494049, + "learning_rate": 0.00037480021190316906, + "loss": 4.5781, + "step": 3719 + }, + { + "epoch": 0.36328125, + "grad_norm": 0.20096111297607422, + "learning_rate": 0.0003747374202224668, + "loss": 4.5703, + "step": 3720 + }, + { + "epoch": 0.36337890625, + "grad_norm": 0.21084189414978027, + "learning_rate": 0.0003746746188728985, + "loss": 4.5547, + "step": 3721 + }, + { + "epoch": 0.3634765625, + "grad_norm": 0.24456515908241272, + "learning_rate": 0.00037461180786055244, + "loss": 4.543, + "step": 3722 + }, + { + "epoch": 0.36357421875, + "grad_norm": 0.21941445767879486, + "learning_rate": 0.00037454898719151765, + "loss": 4.5469, + "step": 3723 + }, + { + "epoch": 0.363671875, + "grad_norm": 0.23304468393325806, + "learning_rate": 0.00037448615687188424, + "loss": 4.5664, + "step": 3724 + }, + { + "epoch": 0.36376953125, + "grad_norm": 0.23572897911071777, + "learning_rate": 0.00037442331690774313, + "loss": 4.5781, + "step": 3725 + }, + { + "epoch": 0.3638671875, + "grad_norm": 0.22356674075126648, + "learning_rate": 0.0003743604673051862, + "loss": 4.6016, + "step": 3726 + }, + { + "epoch": 0.36396484375, + "grad_norm": 0.2362370789051056, + "learning_rate": 0.00037429760807030634, + "loss": 4.5625, + "step": 3727 + }, + { + "epoch": 0.3640625, + "grad_norm": 0.26695024967193604, + "learning_rate": 0.00037423473920919736, + "loss": 4.582, + "step": 3728 + }, + { + "epoch": 0.36416015625, + "grad_norm": 0.2637350261211395, + "learning_rate": 0.0003741718607279539, + "loss": 4.6172, + "step": 3729 + }, + { + "epoch": 0.3642578125, + "grad_norm": 0.22944873571395874, + "learning_rate": 0.0003741089726326716, + "loss": 4.5547, + "step": 3730 + }, + { + "epoch": 0.36435546875, + "grad_norm": 0.21376579999923706, + "learning_rate": 0.00037404607492944704, + "loss": 4.5391, + "step": 3731 + }, + { + "epoch": 0.364453125, + "grad_norm": 0.21306632459163666, + "learning_rate": 0.0003739831676243778, + "loss": 4.5781, + "step": 3732 + }, + { + "epoch": 0.36455078125, + "grad_norm": 0.24601516127586365, + "learning_rate": 0.0003739202507235622, + "loss": 4.5703, + "step": 3733 + }, + { + "epoch": 0.3646484375, + "grad_norm": 0.2682146728038788, + "learning_rate": 0.0003738573242330997, + "loss": 4.5703, + "step": 3734 + }, + { + "epoch": 0.36474609375, + "grad_norm": 0.2349218726158142, + "learning_rate": 0.0003737943881590906, + "loss": 4.5547, + "step": 3735 + }, + { + "epoch": 0.36484375, + "grad_norm": 0.216025248169899, + "learning_rate": 0.00037373144250763603, + "loss": 4.5508, + "step": 3736 + }, + { + "epoch": 0.36494140625, + "grad_norm": 0.20661693811416626, + "learning_rate": 0.0003736684872848382, + "loss": 4.6016, + "step": 3737 + }, + { + "epoch": 0.3650390625, + "grad_norm": 0.2149045169353485, + "learning_rate": 0.0003736055224968002, + "loss": 4.5664, + "step": 3738 + }, + { + "epoch": 0.36513671875, + "grad_norm": 0.22369985282421112, + "learning_rate": 0.00037354254814962604, + "loss": 4.5625, + "step": 3739 + }, + { + "epoch": 0.365234375, + "grad_norm": 0.22899076342582703, + "learning_rate": 0.0003734795642494206, + "loss": 4.5938, + "step": 3740 + }, + { + "epoch": 0.36533203125, + "grad_norm": 0.21144069731235504, + "learning_rate": 0.00037341657080228993, + "loss": 4.5508, + "step": 3741 + }, + { + "epoch": 0.3654296875, + "grad_norm": 0.22601349651813507, + "learning_rate": 0.00037335356781434056, + "loss": 4.5859, + "step": 3742 + }, + { + "epoch": 0.36552734375, + "grad_norm": 0.20910894870758057, + "learning_rate": 0.0003732905552916803, + "loss": 4.582, + "step": 3743 + }, + { + "epoch": 0.365625, + "grad_norm": 0.21281714737415314, + "learning_rate": 0.00037322753324041787, + "loss": 4.5547, + "step": 3744 + }, + { + "epoch": 0.36572265625, + "grad_norm": 0.2001722753047943, + "learning_rate": 0.00037316450166666274, + "loss": 4.6016, + "step": 3745 + }, + { + "epoch": 0.3658203125, + "grad_norm": 0.22400717437267303, + "learning_rate": 0.00037310146057652546, + "loss": 4.5703, + "step": 3746 + }, + { + "epoch": 0.36591796875, + "grad_norm": 0.25107571482658386, + "learning_rate": 0.00037303840997611725, + "loss": 4.5273, + "step": 3747 + }, + { + "epoch": 0.366015625, + "grad_norm": 0.2537175714969635, + "learning_rate": 0.0003729753498715507, + "loss": 4.5664, + "step": 3748 + }, + { + "epoch": 0.36611328125, + "grad_norm": 0.24743777513504028, + "learning_rate": 0.00037291228026893895, + "loss": 4.5664, + "step": 3749 + }, + { + "epoch": 0.3662109375, + "grad_norm": 0.197499617934227, + "learning_rate": 0.0003728492011743961, + "loss": 4.5898, + "step": 3750 + }, + { + "epoch": 0.36630859375, + "grad_norm": 0.2100924700498581, + "learning_rate": 0.0003727861125940374, + "loss": 4.5625, + "step": 3751 + }, + { + "epoch": 0.36640625, + "grad_norm": 0.2345946580171585, + "learning_rate": 0.00037272301453397866, + "loss": 4.5977, + "step": 3752 + }, + { + "epoch": 0.36650390625, + "grad_norm": 0.22621718049049377, + "learning_rate": 0.00037265990700033693, + "loss": 4.582, + "step": 3753 + }, + { + "epoch": 0.3666015625, + "grad_norm": 0.23994408547878265, + "learning_rate": 0.0003725967899992301, + "loss": 4.5508, + "step": 3754 + }, + { + "epoch": 0.36669921875, + "grad_norm": 0.2587624192237854, + "learning_rate": 0.0003725336635367768, + "loss": 4.543, + "step": 3755 + }, + { + "epoch": 0.366796875, + "grad_norm": 0.24301886558532715, + "learning_rate": 0.00037247052761909676, + "loss": 4.5781, + "step": 3756 + }, + { + "epoch": 0.36689453125, + "grad_norm": 0.24546672403812408, + "learning_rate": 0.0003724073822523107, + "loss": 4.5508, + "step": 3757 + }, + { + "epoch": 0.3669921875, + "grad_norm": 0.2665095031261444, + "learning_rate": 0.00037234422744253994, + "loss": 4.5508, + "step": 3758 + }, + { + "epoch": 0.36708984375, + "grad_norm": 0.22079843282699585, + "learning_rate": 0.0003722810631959071, + "loss": 4.5664, + "step": 3759 + }, + { + "epoch": 0.3671875, + "grad_norm": 0.20941261947155, + "learning_rate": 0.00037221788951853537, + "loss": 4.5586, + "step": 3760 + }, + { + "epoch": 0.36728515625, + "grad_norm": 0.2306739240884781, + "learning_rate": 0.00037215470641654904, + "loss": 4.5508, + "step": 3761 + }, + { + "epoch": 0.3673828125, + "grad_norm": 0.23311462998390198, + "learning_rate": 0.0003720915138960734, + "loss": 4.5547, + "step": 3762 + }, + { + "epoch": 0.36748046875, + "grad_norm": 0.2108597308397293, + "learning_rate": 0.0003720283119632343, + "loss": 4.5508, + "step": 3763 + }, + { + "epoch": 0.367578125, + "grad_norm": 0.22533003985881805, + "learning_rate": 0.00037196510062415905, + "loss": 4.5664, + "step": 3764 + }, + { + "epoch": 0.36767578125, + "grad_norm": 0.1996348798274994, + "learning_rate": 0.0003719018798849753, + "loss": 4.5586, + "step": 3765 + }, + { + "epoch": 0.3677734375, + "grad_norm": 0.2132413536310196, + "learning_rate": 0.0003718386497518121, + "loss": 4.5664, + "step": 3766 + }, + { + "epoch": 0.36787109375, + "grad_norm": 0.21500487625598907, + "learning_rate": 0.0003717754102307989, + "loss": 4.5625, + "step": 3767 + }, + { + "epoch": 0.36796875, + "grad_norm": 0.2140580117702484, + "learning_rate": 0.00037171216132806664, + "loss": 4.5508, + "step": 3768 + }, + { + "epoch": 0.36806640625, + "grad_norm": 0.2556339204311371, + "learning_rate": 0.00037164890304974656, + "loss": 4.5898, + "step": 3769 + }, + { + "epoch": 0.3681640625, + "grad_norm": 0.25823596119880676, + "learning_rate": 0.00037158563540197145, + "loss": 4.5547, + "step": 3770 + }, + { + "epoch": 0.36826171875, + "grad_norm": 0.2550797462463379, + "learning_rate": 0.00037152235839087443, + "loss": 4.543, + "step": 3771 + }, + { + "epoch": 0.368359375, + "grad_norm": 0.24696817994117737, + "learning_rate": 0.0003714590720225898, + "loss": 4.5781, + "step": 3772 + }, + { + "epoch": 0.36845703125, + "grad_norm": 0.24360255897045135, + "learning_rate": 0.00037139577630325286, + "loss": 4.5469, + "step": 3773 + }, + { + "epoch": 0.3685546875, + "grad_norm": 0.21116487681865692, + "learning_rate": 0.0003713324712389997, + "loss": 4.5469, + "step": 3774 + }, + { + "epoch": 0.36865234375, + "grad_norm": 0.21986894309520721, + "learning_rate": 0.0003712691568359673, + "loss": 4.5508, + "step": 3775 + }, + { + "epoch": 0.36875, + "grad_norm": 0.22325943410396576, + "learning_rate": 0.00037120583310029344, + "loss": 4.5859, + "step": 3776 + }, + { + "epoch": 0.36884765625, + "grad_norm": 0.24600698053836823, + "learning_rate": 0.00037114250003811696, + "loss": 4.5781, + "step": 3777 + }, + { + "epoch": 0.3689453125, + "grad_norm": 0.2490389049053192, + "learning_rate": 0.00037107915765557774, + "loss": 4.5508, + "step": 3778 + }, + { + "epoch": 0.36904296875, + "grad_norm": 0.26818838715553284, + "learning_rate": 0.0003710158059588162, + "loss": 4.5352, + "step": 3779 + }, + { + "epoch": 0.369140625, + "grad_norm": 0.23392382264137268, + "learning_rate": 0.00037095244495397397, + "loss": 4.5781, + "step": 3780 + }, + { + "epoch": 0.36923828125, + "grad_norm": 0.21591094136238098, + "learning_rate": 0.0003708890746471934, + "loss": 4.5078, + "step": 3781 + }, + { + "epoch": 0.3693359375, + "grad_norm": 0.21013550460338593, + "learning_rate": 0.00037082569504461786, + "loss": 4.5508, + "step": 3782 + }, + { + "epoch": 0.36943359375, + "grad_norm": 0.2186015546321869, + "learning_rate": 0.0003707623061523916, + "loss": 4.5469, + "step": 3783 + }, + { + "epoch": 0.36953125, + "grad_norm": 0.23341798782348633, + "learning_rate": 0.0003706989079766597, + "loss": 4.5586, + "step": 3784 + }, + { + "epoch": 0.36962890625, + "grad_norm": 0.25021326541900635, + "learning_rate": 0.00037063550052356814, + "loss": 4.5898, + "step": 3785 + }, + { + "epoch": 0.3697265625, + "grad_norm": 0.22992654144763947, + "learning_rate": 0.0003705720837992638, + "loss": 4.5156, + "step": 3786 + }, + { + "epoch": 0.36982421875, + "grad_norm": 0.21779493987560272, + "learning_rate": 0.00037050865780989474, + "loss": 4.5547, + "step": 3787 + }, + { + "epoch": 0.369921875, + "grad_norm": 0.21143049001693726, + "learning_rate": 0.00037044522256160945, + "loss": 4.5781, + "step": 3788 + }, + { + "epoch": 0.37001953125, + "grad_norm": 0.20537066459655762, + "learning_rate": 0.0003703817780605577, + "loss": 4.582, + "step": 3789 + }, + { + "epoch": 0.3701171875, + "grad_norm": 0.2354755699634552, + "learning_rate": 0.0003703183243128898, + "loss": 4.5898, + "step": 3790 + }, + { + "epoch": 0.37021484375, + "grad_norm": 0.22520548105239868, + "learning_rate": 0.0003702548613247573, + "loss": 4.5586, + "step": 3791 + }, + { + "epoch": 0.3703125, + "grad_norm": 0.22800710797309875, + "learning_rate": 0.0003701913891023126, + "loss": 4.5703, + "step": 3792 + }, + { + "epoch": 0.37041015625, + "grad_norm": 0.21046526730060577, + "learning_rate": 0.00037012790765170866, + "loss": 4.5508, + "step": 3793 + }, + { + "epoch": 0.3705078125, + "grad_norm": 0.21847796440124512, + "learning_rate": 0.00037006441697909977, + "loss": 4.5625, + "step": 3794 + }, + { + "epoch": 0.37060546875, + "grad_norm": 0.2048698216676712, + "learning_rate": 0.0003700009170906409, + "loss": 4.5859, + "step": 3795 + }, + { + "epoch": 0.370703125, + "grad_norm": 0.2066568285226822, + "learning_rate": 0.00036993740799248786, + "loss": 4.5469, + "step": 3796 + }, + { + "epoch": 0.37080078125, + "grad_norm": 0.20170116424560547, + "learning_rate": 0.0003698738896907975, + "loss": 4.5391, + "step": 3797 + }, + { + "epoch": 0.3708984375, + "grad_norm": 0.2126680612564087, + "learning_rate": 0.00036981036219172733, + "loss": 4.5625, + "step": 3798 + }, + { + "epoch": 0.37099609375, + "grad_norm": 0.21726283431053162, + "learning_rate": 0.00036974682550143615, + "loss": 4.5391, + "step": 3799 + }, + { + "epoch": 0.37109375, + "grad_norm": 0.20872974395751953, + "learning_rate": 0.0003696832796260832, + "loss": 4.5547, + "step": 3800 + }, + { + "epoch": 0.37119140625, + "grad_norm": 0.2360558956861496, + "learning_rate": 0.00036961972457182893, + "loss": 4.5625, + "step": 3801 + }, + { + "epoch": 0.3712890625, + "grad_norm": 0.2368108183145523, + "learning_rate": 0.0003695561603448345, + "loss": 4.5781, + "step": 3802 + }, + { + "epoch": 0.37138671875, + "grad_norm": 0.2909439206123352, + "learning_rate": 0.00036949258695126214, + "loss": 4.5703, + "step": 3803 + }, + { + "epoch": 0.371484375, + "grad_norm": 0.3347856402397156, + "learning_rate": 0.00036942900439727476, + "loss": 4.5547, + "step": 3804 + }, + { + "epoch": 0.37158203125, + "grad_norm": 0.3354850113391876, + "learning_rate": 0.00036936541268903634, + "loss": 4.543, + "step": 3805 + }, + { + "epoch": 0.3716796875, + "grad_norm": 0.2693799138069153, + "learning_rate": 0.0003693018118327116, + "loss": 4.5625, + "step": 3806 + }, + { + "epoch": 0.37177734375, + "grad_norm": 0.22655123472213745, + "learning_rate": 0.00036923820183446627, + "loss": 4.5508, + "step": 3807 + }, + { + "epoch": 0.371875, + "grad_norm": 0.2627026438713074, + "learning_rate": 0.0003691745827004668, + "loss": 4.5664, + "step": 3808 + }, + { + "epoch": 0.37197265625, + "grad_norm": 0.28225845098495483, + "learning_rate": 0.00036911095443688073, + "loss": 4.5391, + "step": 3809 + }, + { + "epoch": 0.3720703125, + "grad_norm": 0.25091806054115295, + "learning_rate": 0.0003690473170498764, + "loss": 4.5391, + "step": 3810 + }, + { + "epoch": 0.37216796875, + "grad_norm": 0.21855543553829193, + "learning_rate": 0.0003689836705456229, + "loss": 4.5664, + "step": 3811 + }, + { + "epoch": 0.372265625, + "grad_norm": 0.22446179389953613, + "learning_rate": 0.00036892001493029047, + "loss": 4.5586, + "step": 3812 + }, + { + "epoch": 0.37236328125, + "grad_norm": 0.2548200488090515, + "learning_rate": 0.0003688563502100501, + "loss": 4.5391, + "step": 3813 + }, + { + "epoch": 0.3724609375, + "grad_norm": 0.21699781715869904, + "learning_rate": 0.00036879267639107347, + "loss": 4.5352, + "step": 3814 + }, + { + "epoch": 0.37255859375, + "grad_norm": 0.23146621882915497, + "learning_rate": 0.0003687289934795335, + "loss": 4.5547, + "step": 3815 + }, + { + "epoch": 0.37265625, + "grad_norm": 0.2578572928905487, + "learning_rate": 0.0003686653014816037, + "loss": 4.5352, + "step": 3816 + }, + { + "epoch": 0.37275390625, + "grad_norm": 0.22417913377285004, + "learning_rate": 0.00036860160040345864, + "loss": 4.5742, + "step": 3817 + }, + { + "epoch": 0.3728515625, + "grad_norm": 0.2203618884086609, + "learning_rate": 0.00036853789025127363, + "loss": 4.6094, + "step": 3818 + }, + { + "epoch": 0.37294921875, + "grad_norm": 0.2247747927904129, + "learning_rate": 0.00036847417103122513, + "loss": 4.543, + "step": 3819 + }, + { + "epoch": 0.373046875, + "grad_norm": 0.22150303423404694, + "learning_rate": 0.00036841044274949007, + "loss": 4.5625, + "step": 3820 + }, + { + "epoch": 0.37314453125, + "grad_norm": 0.20416736602783203, + "learning_rate": 0.0003683467054122467, + "loss": 4.5508, + "step": 3821 + }, + { + "epoch": 0.3732421875, + "grad_norm": 0.21138995885849, + "learning_rate": 0.00036828295902567365, + "loss": 4.5586, + "step": 3822 + }, + { + "epoch": 0.37333984375, + "grad_norm": 0.2108135223388672, + "learning_rate": 0.0003682192035959509, + "loss": 4.5742, + "step": 3823 + }, + { + "epoch": 0.3734375, + "grad_norm": 0.2110549807548523, + "learning_rate": 0.000368155439129259, + "loss": 4.5312, + "step": 3824 + }, + { + "epoch": 0.37353515625, + "grad_norm": 0.20648688077926636, + "learning_rate": 0.00036809166563177954, + "loss": 4.5508, + "step": 3825 + }, + { + "epoch": 0.3736328125, + "grad_norm": 0.2102973610162735, + "learning_rate": 0.0003680278831096949, + "loss": 4.5078, + "step": 3826 + }, + { + "epoch": 0.37373046875, + "grad_norm": 0.2176796793937683, + "learning_rate": 0.0003679640915691884, + "loss": 4.5469, + "step": 3827 + }, + { + "epoch": 0.373828125, + "grad_norm": 0.196877583861351, + "learning_rate": 0.0003679002910164441, + "loss": 4.5234, + "step": 3828 + }, + { + "epoch": 0.37392578125, + "grad_norm": 0.2039942443370819, + "learning_rate": 0.00036783648145764726, + "loss": 4.5898, + "step": 3829 + }, + { + "epoch": 0.3740234375, + "grad_norm": 0.19996030628681183, + "learning_rate": 0.0003677726628989835, + "loss": 4.5664, + "step": 3830 + }, + { + "epoch": 0.37412109375, + "grad_norm": 0.20580768585205078, + "learning_rate": 0.00036770883534663974, + "loss": 4.5273, + "step": 3831 + }, + { + "epoch": 0.37421875, + "grad_norm": 0.19358858466148376, + "learning_rate": 0.00036764499880680363, + "loss": 4.5742, + "step": 3832 + }, + { + "epoch": 0.37431640625, + "grad_norm": 0.19693578779697418, + "learning_rate": 0.00036758115328566364, + "loss": 4.5664, + "step": 3833 + }, + { + "epoch": 0.3744140625, + "grad_norm": 0.20373231172561646, + "learning_rate": 0.00036751729878940927, + "loss": 4.5938, + "step": 3834 + }, + { + "epoch": 0.37451171875, + "grad_norm": 0.19048473238945007, + "learning_rate": 0.00036745343532423066, + "loss": 4.5586, + "step": 3835 + }, + { + "epoch": 0.374609375, + "grad_norm": 0.19251121580600739, + "learning_rate": 0.00036738956289631894, + "loss": 4.5742, + "step": 3836 + }, + { + "epoch": 0.37470703125, + "grad_norm": 0.2328319251537323, + "learning_rate": 0.0003673256815118662, + "loss": 4.5469, + "step": 3837 + }, + { + "epoch": 0.3748046875, + "grad_norm": 0.24332408607006073, + "learning_rate": 0.0003672617911770651, + "loss": 4.5508, + "step": 3838 + }, + { + "epoch": 0.37490234375, + "grad_norm": 0.24300016462802887, + "learning_rate": 0.0003671978918981097, + "loss": 4.5508, + "step": 3839 + }, + { + "epoch": 0.375, + "grad_norm": 0.2735011577606201, + "learning_rate": 0.0003671339836811944, + "loss": 4.5977, + "step": 3840 + }, + { + "epoch": 0.37509765625, + "grad_norm": 0.24669654667377472, + "learning_rate": 0.0003670700665325147, + "loss": 4.5273, + "step": 3841 + }, + { + "epoch": 0.3751953125, + "grad_norm": 0.22697386145591736, + "learning_rate": 0.00036700614045826683, + "loss": 4.5742, + "step": 3842 + }, + { + "epoch": 0.37529296875, + "grad_norm": 0.2029130756855011, + "learning_rate": 0.00036694220546464826, + "loss": 4.582, + "step": 3843 + }, + { + "epoch": 0.375390625, + "grad_norm": 0.22685472667217255, + "learning_rate": 0.0003668782615578567, + "loss": 4.5508, + "step": 3844 + }, + { + "epoch": 0.37548828125, + "grad_norm": 0.2411038726568222, + "learning_rate": 0.0003668143087440914, + "loss": 4.5273, + "step": 3845 + }, + { + "epoch": 0.3755859375, + "grad_norm": 0.2529904544353485, + "learning_rate": 0.00036675034702955196, + "loss": 4.5586, + "step": 3846 + }, + { + "epoch": 0.37568359375, + "grad_norm": 0.24757161736488342, + "learning_rate": 0.00036668637642043905, + "loss": 4.5898, + "step": 3847 + }, + { + "epoch": 0.37578125, + "grad_norm": 0.247783362865448, + "learning_rate": 0.0003666223969229543, + "loss": 4.5508, + "step": 3848 + }, + { + "epoch": 0.37587890625, + "grad_norm": 0.2276039570569992, + "learning_rate": 0.0003665584085432999, + "loss": 4.543, + "step": 3849 + }, + { + "epoch": 0.3759765625, + "grad_norm": 0.19936223328113556, + "learning_rate": 0.00036649441128767935, + "loss": 4.5625, + "step": 3850 + }, + { + "epoch": 0.37607421875, + "grad_norm": 0.21078261733055115, + "learning_rate": 0.00036643040516229645, + "loss": 4.5508, + "step": 3851 + }, + { + "epoch": 0.376171875, + "grad_norm": 0.24155552685260773, + "learning_rate": 0.00036636639017335643, + "loss": 4.5469, + "step": 3852 + }, + { + "epoch": 0.37626953125, + "grad_norm": 0.2901647090911865, + "learning_rate": 0.0003663023663270649, + "loss": 4.5508, + "step": 3853 + }, + { + "epoch": 0.3763671875, + "grad_norm": 0.28302890062332153, + "learning_rate": 0.0003662383336296287, + "loss": 4.5234, + "step": 3854 + }, + { + "epoch": 0.37646484375, + "grad_norm": 0.24422873556613922, + "learning_rate": 0.0003661742920872553, + "loss": 4.543, + "step": 3855 + }, + { + "epoch": 0.3765625, + "grad_norm": 0.2080693244934082, + "learning_rate": 0.000366110241706153, + "loss": 4.5273, + "step": 3856 + }, + { + "epoch": 0.37666015625, + "grad_norm": 0.2549125552177429, + "learning_rate": 0.0003660461824925312, + "loss": 4.5703, + "step": 3857 + }, + { + "epoch": 0.3767578125, + "grad_norm": 0.2808130085468292, + "learning_rate": 0.00036598211445259995, + "loss": 4.5352, + "step": 3858 + }, + { + "epoch": 0.37685546875, + "grad_norm": 0.24988223612308502, + "learning_rate": 0.00036591803759257013, + "loss": 4.5664, + "step": 3859 + }, + { + "epoch": 0.376953125, + "grad_norm": 0.2058619260787964, + "learning_rate": 0.0003658539519186537, + "loss": 4.5508, + "step": 3860 + }, + { + "epoch": 0.37705078125, + "grad_norm": 0.27423015236854553, + "learning_rate": 0.0003657898574370632, + "loss": 4.5273, + "step": 3861 + }, + { + "epoch": 0.3771484375, + "grad_norm": 0.28741520643234253, + "learning_rate": 0.00036572575415401215, + "loss": 4.5703, + "step": 3862 + }, + { + "epoch": 0.37724609375, + "grad_norm": 0.22467289865016937, + "learning_rate": 0.00036566164207571517, + "loss": 4.5508, + "step": 3863 + }, + { + "epoch": 0.37734375, + "grad_norm": 0.20723630487918854, + "learning_rate": 0.00036559752120838716, + "loss": 4.582, + "step": 3864 + }, + { + "epoch": 0.37744140625, + "grad_norm": 0.25713592767715454, + "learning_rate": 0.00036553339155824446, + "loss": 4.5469, + "step": 3865 + }, + { + "epoch": 0.3775390625, + "grad_norm": 0.2720530927181244, + "learning_rate": 0.00036546925313150387, + "loss": 4.5898, + "step": 3866 + }, + { + "epoch": 0.37763671875, + "grad_norm": 0.22821737825870514, + "learning_rate": 0.0003654051059343832, + "loss": 4.5742, + "step": 3867 + }, + { + "epoch": 0.377734375, + "grad_norm": 0.19715988636016846, + "learning_rate": 0.0003653409499731011, + "loss": 4.5273, + "step": 3868 + }, + { + "epoch": 0.37783203125, + "grad_norm": 0.2301965206861496, + "learning_rate": 0.00036527678525387705, + "loss": 4.5508, + "step": 3869 + }, + { + "epoch": 0.3779296875, + "grad_norm": 0.2273874282836914, + "learning_rate": 0.0003652126117829313, + "loss": 4.5469, + "step": 3870 + }, + { + "epoch": 0.37802734375, + "grad_norm": 0.2492510825395584, + "learning_rate": 0.00036514842956648523, + "loss": 4.5508, + "step": 3871 + }, + { + "epoch": 0.378125, + "grad_norm": 0.21269769966602325, + "learning_rate": 0.00036508423861076066, + "loss": 4.5664, + "step": 3872 + }, + { + "epoch": 0.37822265625, + "grad_norm": 0.2165064662694931, + "learning_rate": 0.0003650200389219806, + "loss": 4.543, + "step": 3873 + }, + { + "epoch": 0.3783203125, + "grad_norm": 0.22024405002593994, + "learning_rate": 0.0003649558305063688, + "loss": 4.5352, + "step": 3874 + }, + { + "epoch": 0.37841796875, + "grad_norm": 0.24619032442569733, + "learning_rate": 0.00036489161337014965, + "loss": 4.5469, + "step": 3875 + }, + { + "epoch": 0.378515625, + "grad_norm": 0.20022892951965332, + "learning_rate": 0.00036482738751954875, + "loss": 4.5195, + "step": 3876 + }, + { + "epoch": 0.37861328125, + "grad_norm": 0.20485606789588928, + "learning_rate": 0.00036476315296079224, + "loss": 4.5742, + "step": 3877 + }, + { + "epoch": 0.3787109375, + "grad_norm": 0.22393593192100525, + "learning_rate": 0.0003646989097001073, + "loss": 4.582, + "step": 3878 + }, + { + "epoch": 0.37880859375, + "grad_norm": 0.2158479541540146, + "learning_rate": 0.0003646346577437219, + "loss": 4.5742, + "step": 3879 + }, + { + "epoch": 0.37890625, + "grad_norm": 0.22578606009483337, + "learning_rate": 0.0003645703970978647, + "loss": 4.5547, + "step": 3880 + }, + { + "epoch": 0.37900390625, + "grad_norm": 0.22951842844486237, + "learning_rate": 0.00036450612776876547, + "loss": 4.5664, + "step": 3881 + }, + { + "epoch": 0.3791015625, + "grad_norm": 0.23956668376922607, + "learning_rate": 0.00036444184976265457, + "loss": 4.5352, + "step": 3882 + }, + { + "epoch": 0.37919921875, + "grad_norm": 0.2269906997680664, + "learning_rate": 0.0003643775630857635, + "loss": 4.5469, + "step": 3883 + }, + { + "epoch": 0.379296875, + "grad_norm": 0.2263822704553604, + "learning_rate": 0.0003643132677443242, + "loss": 4.5234, + "step": 3884 + }, + { + "epoch": 0.37939453125, + "grad_norm": 0.2669488489627838, + "learning_rate": 0.0003642489637445698, + "loss": 4.5195, + "step": 3885 + }, + { + "epoch": 0.3794921875, + "grad_norm": 0.24844890832901, + "learning_rate": 0.00036418465109273407, + "loss": 4.5508, + "step": 3886 + }, + { + "epoch": 0.37958984375, + "grad_norm": 0.25439518690109253, + "learning_rate": 0.00036412032979505173, + "loss": 4.5391, + "step": 3887 + }, + { + "epoch": 0.3796875, + "grad_norm": 0.229208841919899, + "learning_rate": 0.0003640559998577583, + "loss": 4.5625, + "step": 3888 + }, + { + "epoch": 0.37978515625, + "grad_norm": 0.1949658989906311, + "learning_rate": 0.00036399166128709006, + "loss": 4.5664, + "step": 3889 + }, + { + "epoch": 0.3798828125, + "grad_norm": 0.24328242242336273, + "learning_rate": 0.00036392731408928426, + "loss": 4.582, + "step": 3890 + }, + { + "epoch": 0.37998046875, + "grad_norm": 0.24651774764060974, + "learning_rate": 0.0003638629582705789, + "loss": 4.5742, + "step": 3891 + }, + { + "epoch": 0.380078125, + "grad_norm": 0.21276697516441345, + "learning_rate": 0.000363798593837213, + "loss": 4.5469, + "step": 3892 + }, + { + "epoch": 0.38017578125, + "grad_norm": 0.22126992046833038, + "learning_rate": 0.00036373422079542594, + "loss": 4.5781, + "step": 3893 + }, + { + "epoch": 0.3802734375, + "grad_norm": 0.20014148950576782, + "learning_rate": 0.0003636698391514584, + "loss": 4.5547, + "step": 3894 + }, + { + "epoch": 0.38037109375, + "grad_norm": 0.216833233833313, + "learning_rate": 0.00036360544891155184, + "loss": 4.5469, + "step": 3895 + }, + { + "epoch": 0.38046875, + "grad_norm": 0.24696378409862518, + "learning_rate": 0.00036354105008194846, + "loss": 4.5547, + "step": 3896 + }, + { + "epoch": 0.38056640625, + "grad_norm": 0.25087183713912964, + "learning_rate": 0.00036347664266889103, + "loss": 4.5547, + "step": 3897 + }, + { + "epoch": 0.3806640625, + "grad_norm": 0.21418248116970062, + "learning_rate": 0.0003634122266786236, + "loss": 4.543, + "step": 3898 + }, + { + "epoch": 0.38076171875, + "grad_norm": 0.22195738554000854, + "learning_rate": 0.0003633478021173909, + "loss": 4.5352, + "step": 3899 + }, + { + "epoch": 0.380859375, + "grad_norm": 0.2044471651315689, + "learning_rate": 0.00036328336899143837, + "loss": 4.5586, + "step": 3900 + }, + { + "epoch": 0.38095703125, + "grad_norm": 0.2172752022743225, + "learning_rate": 0.0003632189273070125, + "loss": 4.5469, + "step": 3901 + }, + { + "epoch": 0.3810546875, + "grad_norm": 0.22361449897289276, + "learning_rate": 0.0003631544770703603, + "loss": 4.543, + "step": 3902 + }, + { + "epoch": 0.38115234375, + "grad_norm": 0.2163439691066742, + "learning_rate": 0.00036309001828772983, + "loss": 4.5664, + "step": 3903 + }, + { + "epoch": 0.38125, + "grad_norm": 0.20652833580970764, + "learning_rate": 0.00036302555096537, + "loss": 4.543, + "step": 3904 + }, + { + "epoch": 0.38134765625, + "grad_norm": 0.1974000334739685, + "learning_rate": 0.00036296107510953044, + "loss": 4.5625, + "step": 3905 + }, + { + "epoch": 0.3814453125, + "grad_norm": 0.24653279781341553, + "learning_rate": 0.0003628965907264616, + "loss": 4.5352, + "step": 3906 + }, + { + "epoch": 0.38154296875, + "grad_norm": 0.2586394250392914, + "learning_rate": 0.0003628320978224148, + "loss": 4.543, + "step": 3907 + }, + { + "epoch": 0.381640625, + "grad_norm": 0.25924891233444214, + "learning_rate": 0.0003627675964036423, + "loss": 4.5547, + "step": 3908 + }, + { + "epoch": 0.38173828125, + "grad_norm": 0.2483292669057846, + "learning_rate": 0.00036270308647639703, + "loss": 4.5508, + "step": 3909 + }, + { + "epoch": 0.3818359375, + "grad_norm": 0.229950949549675, + "learning_rate": 0.00036263856804693277, + "loss": 4.5664, + "step": 3910 + }, + { + "epoch": 0.38193359375, + "grad_norm": 0.19701816141605377, + "learning_rate": 0.0003625740411215041, + "loss": 4.5586, + "step": 3911 + }, + { + "epoch": 0.38203125, + "grad_norm": 0.22239305078983307, + "learning_rate": 0.00036250950570636655, + "loss": 4.5625, + "step": 3912 + }, + { + "epoch": 0.38212890625, + "grad_norm": 0.25458335876464844, + "learning_rate": 0.00036244496180777634, + "loss": 4.5664, + "step": 3913 + }, + { + "epoch": 0.3822265625, + "grad_norm": 0.23247866332530975, + "learning_rate": 0.0003623804094319905, + "loss": 4.5508, + "step": 3914 + }, + { + "epoch": 0.38232421875, + "grad_norm": 0.2113930732011795, + "learning_rate": 0.0003623158485852671, + "loss": 4.5664, + "step": 3915 + }, + { + "epoch": 0.382421875, + "grad_norm": 0.21247810125350952, + "learning_rate": 0.0003622512792738648, + "loss": 4.5312, + "step": 3916 + }, + { + "epoch": 0.38251953125, + "grad_norm": 0.20285052061080933, + "learning_rate": 0.00036218670150404313, + "loss": 4.5391, + "step": 3917 + }, + { + "epoch": 0.3826171875, + "grad_norm": 0.1933477818965912, + "learning_rate": 0.0003621221152820625, + "loss": 4.543, + "step": 3918 + }, + { + "epoch": 0.38271484375, + "grad_norm": 0.21490438282489777, + "learning_rate": 0.0003620575206141841, + "loss": 4.5586, + "step": 3919 + }, + { + "epoch": 0.3828125, + "grad_norm": 0.19563163816928864, + "learning_rate": 0.00036199291750666987, + "loss": 4.5469, + "step": 3920 + }, + { + "epoch": 0.38291015625, + "grad_norm": 0.19900527596473694, + "learning_rate": 0.0003619283059657827, + "loss": 4.5195, + "step": 3921 + }, + { + "epoch": 0.3830078125, + "grad_norm": 0.20335721969604492, + "learning_rate": 0.00036186368599778633, + "loss": 4.5742, + "step": 3922 + }, + { + "epoch": 0.38310546875, + "grad_norm": 0.19746747612953186, + "learning_rate": 0.0003617990576089451, + "loss": 4.5508, + "step": 3923 + }, + { + "epoch": 0.383203125, + "grad_norm": 0.22214552760124207, + "learning_rate": 0.0003617344208055244, + "loss": 4.5586, + "step": 3924 + }, + { + "epoch": 0.38330078125, + "grad_norm": 0.20545615255832672, + "learning_rate": 0.00036166977559379016, + "loss": 4.5117, + "step": 3925 + }, + { + "epoch": 0.3833984375, + "grad_norm": 0.19555038213729858, + "learning_rate": 0.0003616051219800095, + "loss": 4.582, + "step": 3926 + }, + { + "epoch": 0.38349609375, + "grad_norm": 0.21728569269180298, + "learning_rate": 0.0003615404599704501, + "loss": 4.5703, + "step": 3927 + }, + { + "epoch": 0.38359375, + "grad_norm": 0.20031973719596863, + "learning_rate": 0.00036147578957138033, + "loss": 4.5781, + "step": 3928 + }, + { + "epoch": 0.38369140625, + "grad_norm": 0.21254299581050873, + "learning_rate": 0.00036141111078906977, + "loss": 4.5508, + "step": 3929 + }, + { + "epoch": 0.3837890625, + "grad_norm": 0.20205654203891754, + "learning_rate": 0.00036134642362978844, + "loss": 4.543, + "step": 3930 + }, + { + "epoch": 0.38388671875, + "grad_norm": 0.21170862019062042, + "learning_rate": 0.0003612817280998074, + "loss": 4.5586, + "step": 3931 + }, + { + "epoch": 0.383984375, + "grad_norm": 0.23134401440620422, + "learning_rate": 0.0003612170242053984, + "loss": 4.5391, + "step": 3932 + }, + { + "epoch": 0.38408203125, + "grad_norm": 0.2333475649356842, + "learning_rate": 0.0003611523119528341, + "loss": 4.5391, + "step": 3933 + }, + { + "epoch": 0.3841796875, + "grad_norm": 0.24961499869823456, + "learning_rate": 0.0003610875913483878, + "loss": 4.582, + "step": 3934 + }, + { + "epoch": 0.38427734375, + "grad_norm": 0.2801797688007355, + "learning_rate": 0.00036102286239833386, + "loss": 4.5508, + "step": 3935 + }, + { + "epoch": 0.384375, + "grad_norm": 0.2625277042388916, + "learning_rate": 0.0003609581251089472, + "loss": 4.5547, + "step": 3936 + }, + { + "epoch": 0.38447265625, + "grad_norm": 0.21679918467998505, + "learning_rate": 0.0003608933794865038, + "loss": 4.5352, + "step": 3937 + }, + { + "epoch": 0.3845703125, + "grad_norm": 0.23063300549983978, + "learning_rate": 0.0003608286255372801, + "loss": 4.5391, + "step": 3938 + }, + { + "epoch": 0.38466796875, + "grad_norm": 0.26355069875717163, + "learning_rate": 0.00036076386326755374, + "loss": 4.5391, + "step": 3939 + }, + { + "epoch": 0.384765625, + "grad_norm": 0.3291335999965668, + "learning_rate": 0.0003606990926836029, + "loss": 4.5312, + "step": 3940 + }, + { + "epoch": 0.38486328125, + "grad_norm": 0.3185564875602722, + "learning_rate": 0.0003606343137917067, + "loss": 4.5625, + "step": 3941 + }, + { + "epoch": 0.3849609375, + "grad_norm": 0.24365772306919098, + "learning_rate": 0.00036056952659814496, + "loss": 4.5938, + "step": 3942 + }, + { + "epoch": 0.38505859375, + "grad_norm": 0.2500409781932831, + "learning_rate": 0.0003605047311091984, + "loss": 4.5664, + "step": 3943 + }, + { + "epoch": 0.38515625, + "grad_norm": 0.2757214605808258, + "learning_rate": 0.00036043992733114844, + "loss": 4.5273, + "step": 3944 + }, + { + "epoch": 0.38525390625, + "grad_norm": 0.2558170557022095, + "learning_rate": 0.0003603751152702774, + "loss": 4.5273, + "step": 3945 + }, + { + "epoch": 0.3853515625, + "grad_norm": 0.1944461613893509, + "learning_rate": 0.0003603102949328684, + "loss": 4.5195, + "step": 3946 + }, + { + "epoch": 0.38544921875, + "grad_norm": 0.26219162344932556, + "learning_rate": 0.00036024546632520537, + "loss": 4.582, + "step": 3947 + }, + { + "epoch": 0.385546875, + "grad_norm": 0.29298943281173706, + "learning_rate": 0.00036018062945357285, + "loss": 4.5547, + "step": 3948 + }, + { + "epoch": 0.38564453125, + "grad_norm": 0.2205764204263687, + "learning_rate": 0.00036011578432425647, + "loss": 4.5508, + "step": 3949 + }, + { + "epoch": 0.3857421875, + "grad_norm": 0.23982557654380798, + "learning_rate": 0.0003600509309435424, + "loss": 4.5547, + "step": 3950 + }, + { + "epoch": 0.38583984375, + "grad_norm": 0.2387574166059494, + "learning_rate": 0.000359986069317718, + "loss": 4.5352, + "step": 3951 + }, + { + "epoch": 0.3859375, + "grad_norm": 0.23822936415672302, + "learning_rate": 0.00035992119945307084, + "loss": 4.582, + "step": 3952 + }, + { + "epoch": 0.38603515625, + "grad_norm": 0.20306189358234406, + "learning_rate": 0.00035985632135588974, + "loss": 4.5117, + "step": 3953 + }, + { + "epoch": 0.3861328125, + "grad_norm": 0.23864056169986725, + "learning_rate": 0.0003597914350324643, + "loss": 4.5859, + "step": 3954 + }, + { + "epoch": 0.38623046875, + "grad_norm": 0.2204524576663971, + "learning_rate": 0.0003597265404890847, + "loss": 4.5664, + "step": 3955 + }, + { + "epoch": 0.386328125, + "grad_norm": 0.2264997959136963, + "learning_rate": 0.000359661637732042, + "loss": 4.5742, + "step": 3956 + }, + { + "epoch": 0.38642578125, + "grad_norm": 0.19526885449886322, + "learning_rate": 0.00035959672676762807, + "loss": 4.5508, + "step": 3957 + }, + { + "epoch": 0.3865234375, + "grad_norm": 0.19720374047756195, + "learning_rate": 0.00035953180760213573, + "loss": 4.5586, + "step": 3958 + }, + { + "epoch": 0.38662109375, + "grad_norm": 0.20400942862033844, + "learning_rate": 0.0003594668802418583, + "loss": 4.5742, + "step": 3959 + }, + { + "epoch": 0.38671875, + "grad_norm": 0.19639185070991516, + "learning_rate": 0.00035940194469309016, + "loss": 4.5391, + "step": 3960 + }, + { + "epoch": 0.38681640625, + "grad_norm": 0.2054133117198944, + "learning_rate": 0.0003593370009621263, + "loss": 4.5508, + "step": 3961 + }, + { + "epoch": 0.3869140625, + "grad_norm": 0.19577625393867493, + "learning_rate": 0.00035927204905526256, + "loss": 4.5156, + "step": 3962 + }, + { + "epoch": 0.38701171875, + "grad_norm": 0.2023460865020752, + "learning_rate": 0.00035920708897879564, + "loss": 4.5742, + "step": 3963 + }, + { + "epoch": 0.387109375, + "grad_norm": 0.25561678409576416, + "learning_rate": 0.00035914212073902307, + "loss": 4.5469, + "step": 3964 + }, + { + "epoch": 0.38720703125, + "grad_norm": 0.2535405158996582, + "learning_rate": 0.0003590771443422428, + "loss": 4.582, + "step": 3965 + }, + { + "epoch": 0.3873046875, + "grad_norm": 0.24688060581684113, + "learning_rate": 0.0003590121597947541, + "loss": 4.5469, + "step": 3966 + }, + { + "epoch": 0.38740234375, + "grad_norm": 0.2333875149488449, + "learning_rate": 0.0003589471671028567, + "loss": 4.5664, + "step": 3967 + }, + { + "epoch": 0.3875, + "grad_norm": 0.20582197606563568, + "learning_rate": 0.0003588821662728512, + "loss": 4.5312, + "step": 3968 + }, + { + "epoch": 0.38759765625, + "grad_norm": 0.21114470064640045, + "learning_rate": 0.00035881715731103897, + "loss": 4.5664, + "step": 3969 + }, + { + "epoch": 0.3876953125, + "grad_norm": 0.20465944707393646, + "learning_rate": 0.0003587521402237222, + "loss": 4.5352, + "step": 3970 + }, + { + "epoch": 0.38779296875, + "grad_norm": 0.19395574927330017, + "learning_rate": 0.00035868711501720393, + "loss": 4.5898, + "step": 3971 + }, + { + "epoch": 0.387890625, + "grad_norm": 0.23545682430267334, + "learning_rate": 0.0003586220816977878, + "loss": 4.5703, + "step": 3972 + }, + { + "epoch": 0.38798828125, + "grad_norm": 0.21559612452983856, + "learning_rate": 0.0003585570402717784, + "loss": 4.5586, + "step": 3973 + }, + { + "epoch": 0.3880859375, + "grad_norm": 0.19468650221824646, + "learning_rate": 0.000358491990745481, + "loss": 4.5508, + "step": 3974 + }, + { + "epoch": 0.38818359375, + "grad_norm": 0.191791832447052, + "learning_rate": 0.0003584269331252018, + "loss": 4.5625, + "step": 3975 + }, + { + "epoch": 0.38828125, + "grad_norm": 0.20436179637908936, + "learning_rate": 0.0003583618674172477, + "loss": 4.5781, + "step": 3976 + }, + { + "epoch": 0.38837890625, + "grad_norm": 0.2200692594051361, + "learning_rate": 0.00035829679362792626, + "loss": 4.5625, + "step": 3977 + }, + { + "epoch": 0.3884765625, + "grad_norm": 0.20331627130508423, + "learning_rate": 0.00035823171176354603, + "loss": 4.5469, + "step": 3978 + }, + { + "epoch": 0.38857421875, + "grad_norm": 3.3503615856170654, + "learning_rate": 0.00035816662183041625, + "loss": 4.5508, + "step": 3979 + }, + { + "epoch": 0.388671875, + "grad_norm": 0.28469765186309814, + "learning_rate": 0.0003581015238348469, + "loss": 4.5742, + "step": 3980 + }, + { + "epoch": 0.38876953125, + "grad_norm": 1.8064192533493042, + "learning_rate": 0.0003580364177831489, + "loss": 4.5781, + "step": 3981 + }, + { + "epoch": 0.3888671875, + "grad_norm": 0.2907196283340454, + "learning_rate": 0.0003579713036816337, + "loss": 4.5352, + "step": 3982 + }, + { + "epoch": 0.38896484375, + "grad_norm": 0.46062764525413513, + "learning_rate": 0.00035790618153661377, + "loss": 4.5703, + "step": 3983 + }, + { + "epoch": 0.3890625, + "grad_norm": 0.5125043392181396, + "learning_rate": 0.0003578410513544022, + "loss": 4.5469, + "step": 3984 + }, + { + "epoch": 0.38916015625, + "grad_norm": 0.37048476934432983, + "learning_rate": 0.000357775913141313, + "loss": 4.543, + "step": 3985 + }, + { + "epoch": 0.3892578125, + "grad_norm": 0.73615962266922, + "learning_rate": 0.0003577107669036608, + "loss": 4.5781, + "step": 3986 + }, + { + "epoch": 0.38935546875, + "grad_norm": 0.460716187953949, + "learning_rate": 0.00035764561264776105, + "loss": 4.543, + "step": 3987 + }, + { + "epoch": 0.389453125, + "grad_norm": 0.4506913423538208, + "learning_rate": 0.00035758045037993016, + "loss": 4.5781, + "step": 3988 + }, + { + "epoch": 0.38955078125, + "grad_norm": 0.7387946248054504, + "learning_rate": 0.000357515280106485, + "loss": 4.5195, + "step": 3989 + }, + { + "epoch": 0.3896484375, + "grad_norm": 0.38603469729423523, + "learning_rate": 0.0003574501018337435, + "loss": 4.5859, + "step": 3990 + }, + { + "epoch": 0.38974609375, + "grad_norm": 0.3279763162136078, + "learning_rate": 0.00035738491556802426, + "loss": 4.543, + "step": 3991 + }, + { + "epoch": 0.38984375, + "grad_norm": 0.31028038263320923, + "learning_rate": 0.0003573197213156466, + "loss": 4.5508, + "step": 3992 + }, + { + "epoch": 0.38994140625, + "grad_norm": 0.29573673009872437, + "learning_rate": 0.00035725451908293066, + "loss": 4.5391, + "step": 3993 + }, + { + "epoch": 0.3900390625, + "grad_norm": 0.7172201871871948, + "learning_rate": 0.0003571893088761973, + "loss": 4.5664, + "step": 3994 + }, + { + "epoch": 0.39013671875, + "grad_norm": 0.34305018186569214, + "learning_rate": 0.00035712409070176826, + "loss": 4.543, + "step": 3995 + }, + { + "epoch": 0.390234375, + "grad_norm": 0.2646881639957428, + "learning_rate": 0.000357058864565966, + "loss": 4.5508, + "step": 3996 + }, + { + "epoch": 0.39033203125, + "grad_norm": 0.22321848571300507, + "learning_rate": 0.00035699363047511385, + "loss": 4.5391, + "step": 3997 + }, + { + "epoch": 0.3904296875, + "grad_norm": 0.2468116581439972, + "learning_rate": 0.0003569283884355357, + "loss": 4.5469, + "step": 3998 + }, + { + "epoch": 0.39052734375, + "grad_norm": 0.23610632121562958, + "learning_rate": 0.0003568631384535563, + "loss": 4.543, + "step": 3999 + }, + { + "epoch": 0.390625, + "grad_norm": 0.23311467468738556, + "learning_rate": 0.00035679788053550124, + "loss": 4.5781, + "step": 4000 + }, + { + "epoch": 0.39072265625, + "grad_norm": 1.0616041421890259, + "learning_rate": 0.00035673261468769675, + "loss": 4.582, + "step": 4001 + }, + { + "epoch": 0.3908203125, + "grad_norm": 0.43703344464302063, + "learning_rate": 0.00035666734091647015, + "loss": 4.5664, + "step": 4002 + }, + { + "epoch": 0.39091796875, + "grad_norm": 0.32485100626945496, + "learning_rate": 0.00035660205922814905, + "loss": 4.5742, + "step": 4003 + }, + { + "epoch": 0.391015625, + "grad_norm": 0.46926847100257874, + "learning_rate": 0.00035653676962906205, + "loss": 4.5703, + "step": 4004 + }, + { + "epoch": 0.39111328125, + "grad_norm": 0.2892122268676758, + "learning_rate": 0.00035647147212553867, + "loss": 4.5352, + "step": 4005 + }, + { + "epoch": 0.3912109375, + "grad_norm": 0.20957447588443756, + "learning_rate": 0.0003564061667239091, + "loss": 4.543, + "step": 4006 + }, + { + "epoch": 0.39130859375, + "grad_norm": 0.2625845968723297, + "learning_rate": 0.0003563408534305041, + "loss": 4.5547, + "step": 4007 + }, + { + "epoch": 0.39140625, + "grad_norm": 0.23379504680633545, + "learning_rate": 0.00035627553225165543, + "loss": 4.5625, + "step": 4008 + }, + { + "epoch": 0.39150390625, + "grad_norm": 0.24798503518104553, + "learning_rate": 0.0003562102031936955, + "loss": 4.5898, + "step": 4009 + }, + { + "epoch": 0.3916015625, + "grad_norm": 0.21947365999221802, + "learning_rate": 0.00035614486626295766, + "loss": 4.5156, + "step": 4010 + }, + { + "epoch": 0.39169921875, + "grad_norm": 0.23135443031787872, + "learning_rate": 0.0003560795214657757, + "loss": 4.5742, + "step": 4011 + }, + { + "epoch": 0.391796875, + "grad_norm": 0.23809033632278442, + "learning_rate": 0.0003560141688084844, + "loss": 4.5586, + "step": 4012 + }, + { + "epoch": 0.39189453125, + "grad_norm": 0.20813113451004028, + "learning_rate": 0.00035594880829741926, + "loss": 4.5508, + "step": 4013 + }, + { + "epoch": 0.3919921875, + "grad_norm": 0.21514393389225006, + "learning_rate": 0.00035588343993891666, + "loss": 4.5469, + "step": 4014 + }, + { + "epoch": 0.39208984375, + "grad_norm": 0.22037969529628754, + "learning_rate": 0.0003558180637393134, + "loss": 4.5469, + "step": 4015 + }, + { + "epoch": 0.3921875, + "grad_norm": 0.21196837723255157, + "learning_rate": 0.0003557526797049474, + "loss": 4.5547, + "step": 4016 + }, + { + "epoch": 0.39228515625, + "grad_norm": 0.2154344916343689, + "learning_rate": 0.00035568728784215727, + "loss": 4.5664, + "step": 4017 + }, + { + "epoch": 0.3923828125, + "grad_norm": 0.5858936905860901, + "learning_rate": 0.0003556218881572821, + "loss": 4.5977, + "step": 4018 + }, + { + "epoch": 0.39248046875, + "grad_norm": 0.2930135428905487, + "learning_rate": 0.0003555564806566621, + "loss": 4.5508, + "step": 4019 + }, + { + "epoch": 0.392578125, + "grad_norm": 0.23462602496147156, + "learning_rate": 0.00035549106534663803, + "loss": 4.582, + "step": 4020 + }, + { + "epoch": 0.39267578125, + "grad_norm": 0.2292822152376175, + "learning_rate": 0.0003554256422335515, + "loss": 4.5547, + "step": 4021 + }, + { + "epoch": 0.3927734375, + "grad_norm": 0.26094597578048706, + "learning_rate": 0.0003553602113237447, + "loss": 4.5312, + "step": 4022 + }, + { + "epoch": 0.39287109375, + "grad_norm": 0.25417307019233704, + "learning_rate": 0.0003552947726235609, + "loss": 4.5508, + "step": 4023 + }, + { + "epoch": 0.39296875, + "grad_norm": 0.21321046352386475, + "learning_rate": 0.0003552293261393438, + "loss": 4.5938, + "step": 4024 + }, + { + "epoch": 0.39306640625, + "grad_norm": 0.2270149439573288, + "learning_rate": 0.00035516387187743817, + "loss": 4.5703, + "step": 4025 + }, + { + "epoch": 0.3931640625, + "grad_norm": 0.2587560713291168, + "learning_rate": 0.00035509840984418917, + "loss": 4.5625, + "step": 4026 + }, + { + "epoch": 0.39326171875, + "grad_norm": 0.26145586371421814, + "learning_rate": 0.00035503294004594297, + "loss": 4.5469, + "step": 4027 + }, + { + "epoch": 0.393359375, + "grad_norm": 0.21044021844863892, + "learning_rate": 0.0003549674624890464, + "loss": 4.5234, + "step": 4028 + }, + { + "epoch": 0.39345703125, + "grad_norm": 0.23064593970775604, + "learning_rate": 0.00035490197717984704, + "loss": 4.543, + "step": 4029 + }, + { + "epoch": 0.3935546875, + "grad_norm": 0.21752195060253143, + "learning_rate": 0.0003548364841246934, + "loss": 4.5586, + "step": 4030 + }, + { + "epoch": 0.39365234375, + "grad_norm": 0.2615599036216736, + "learning_rate": 0.0003547709833299344, + "loss": 4.5195, + "step": 4031 + }, + { + "epoch": 0.39375, + "grad_norm": 0.21568220853805542, + "learning_rate": 0.0003547054748019199, + "loss": 4.5469, + "step": 4032 + }, + { + "epoch": 0.39384765625, + "grad_norm": 0.20464248955249786, + "learning_rate": 0.0003546399585470007, + "loss": 4.5547, + "step": 4033 + }, + { + "epoch": 0.3939453125, + "grad_norm": 0.22874481976032257, + "learning_rate": 0.00035457443457152804, + "loss": 4.5508, + "step": 4034 + }, + { + "epoch": 0.39404296875, + "grad_norm": 0.1869070678949356, + "learning_rate": 0.000354508902881854, + "loss": 4.5469, + "step": 4035 + }, + { + "epoch": 0.394140625, + "grad_norm": 0.24708020687103271, + "learning_rate": 0.0003544433634843314, + "loss": 4.5625, + "step": 4036 + }, + { + "epoch": 0.39423828125, + "grad_norm": 0.24348294734954834, + "learning_rate": 0.00035437781638531396, + "loss": 4.5742, + "step": 4037 + }, + { + "epoch": 0.3943359375, + "grad_norm": 0.20718319714069366, + "learning_rate": 0.00035431226159115593, + "loss": 4.5586, + "step": 4038 + }, + { + "epoch": 0.39443359375, + "grad_norm": 0.2001379281282425, + "learning_rate": 0.0003542466991082124, + "loss": 4.5547, + "step": 4039 + }, + { + "epoch": 0.39453125, + "grad_norm": 0.19845274090766907, + "learning_rate": 0.0003541811289428394, + "loss": 4.5469, + "step": 4040 + }, + { + "epoch": 0.39462890625, + "grad_norm": 0.20408929884433746, + "learning_rate": 0.00035411555110139315, + "loss": 4.5391, + "step": 4041 + }, + { + "epoch": 0.3947265625, + "grad_norm": 0.18628466129302979, + "learning_rate": 0.0003540499655902313, + "loss": 4.543, + "step": 4042 + }, + { + "epoch": 0.39482421875, + "grad_norm": 0.21607506275177002, + "learning_rate": 0.0003539843724157117, + "loss": 4.543, + "step": 4043 + }, + { + "epoch": 0.394921875, + "grad_norm": 0.21097971498966217, + "learning_rate": 0.0003539187715841934, + "loss": 4.5664, + "step": 4044 + }, + { + "epoch": 0.39501953125, + "grad_norm": 0.22395700216293335, + "learning_rate": 0.0003538531631020357, + "loss": 4.5508, + "step": 4045 + }, + { + "epoch": 0.3951171875, + "grad_norm": 0.21000897884368896, + "learning_rate": 0.00035378754697559903, + "loss": 4.5469, + "step": 4046 + }, + { + "epoch": 0.39521484375, + "grad_norm": 0.2068769931793213, + "learning_rate": 0.0003537219232112444, + "loss": 4.5625, + "step": 4047 + }, + { + "epoch": 0.3953125, + "grad_norm": 0.20337116718292236, + "learning_rate": 0.0003536562918153337, + "loss": 4.5234, + "step": 4048 + }, + { + "epoch": 0.39541015625, + "grad_norm": 0.22240625321865082, + "learning_rate": 0.0003535906527942293, + "loss": 4.5586, + "step": 4049 + }, + { + "epoch": 0.3955078125, + "grad_norm": 0.20726953446865082, + "learning_rate": 0.00035352500615429445, + "loss": 4.5781, + "step": 4050 + }, + { + "epoch": 0.39560546875, + "grad_norm": 0.191116601228714, + "learning_rate": 0.0003534593519018933, + "loss": 4.5625, + "step": 4051 + }, + { + "epoch": 0.395703125, + "grad_norm": 0.2275056540966034, + "learning_rate": 0.00035339369004339045, + "loss": 4.5469, + "step": 4052 + }, + { + "epoch": 0.39580078125, + "grad_norm": 0.22213804721832275, + "learning_rate": 0.0003533280205851514, + "loss": 4.5273, + "step": 4053 + }, + { + "epoch": 0.3958984375, + "grad_norm": 0.24688883125782013, + "learning_rate": 0.00035326234353354235, + "loss": 4.5664, + "step": 4054 + }, + { + "epoch": 0.39599609375, + "grad_norm": 0.20745114982128143, + "learning_rate": 0.0003531966588949302, + "loss": 4.5391, + "step": 4055 + }, + { + "epoch": 0.39609375, + "grad_norm": 0.19586782157421112, + "learning_rate": 0.0003531309666756828, + "loss": 4.543, + "step": 4056 + }, + { + "epoch": 0.39619140625, + "grad_norm": 0.2050609141588211, + "learning_rate": 0.0003530652668821685, + "loss": 4.5469, + "step": 4057 + }, + { + "epoch": 0.3962890625, + "grad_norm": 0.20276108384132385, + "learning_rate": 0.00035299955952075633, + "loss": 4.5664, + "step": 4058 + }, + { + "epoch": 0.39638671875, + "grad_norm": 0.23067191243171692, + "learning_rate": 0.00035293384459781626, + "loss": 4.5664, + "step": 4059 + }, + { + "epoch": 0.396484375, + "grad_norm": 0.2555325925350189, + "learning_rate": 0.00035286812211971885, + "loss": 4.5547, + "step": 4060 + }, + { + "epoch": 0.39658203125, + "grad_norm": 0.26767903566360474, + "learning_rate": 0.0003528023920928355, + "loss": 4.5547, + "step": 4061 + }, + { + "epoch": 0.3966796875, + "grad_norm": 0.2167680859565735, + "learning_rate": 0.0003527366545235384, + "loss": 4.5586, + "step": 4062 + }, + { + "epoch": 0.39677734375, + "grad_norm": 0.21388041973114014, + "learning_rate": 0.0003526709094182001, + "loss": 4.5469, + "step": 4063 + }, + { + "epoch": 0.396875, + "grad_norm": 0.21163912117481232, + "learning_rate": 0.00035260515678319437, + "loss": 4.5156, + "step": 4064 + }, + { + "epoch": 0.39697265625, + "grad_norm": 0.25626978278160095, + "learning_rate": 0.0003525393966248954, + "loss": 4.5469, + "step": 4065 + }, + { + "epoch": 0.3970703125, + "grad_norm": 0.24820320308208466, + "learning_rate": 0.00035247362894967816, + "loss": 4.5391, + "step": 4066 + }, + { + "epoch": 0.39716796875, + "grad_norm": 0.228718101978302, + "learning_rate": 0.00035240785376391845, + "loss": 4.5195, + "step": 4067 + }, + { + "epoch": 0.397265625, + "grad_norm": 0.21625976264476776, + "learning_rate": 0.00035234207107399257, + "loss": 4.5547, + "step": 4068 + }, + { + "epoch": 0.39736328125, + "grad_norm": 0.22045624256134033, + "learning_rate": 0.0003522762808862779, + "loss": 4.5781, + "step": 4069 + }, + { + "epoch": 0.3974609375, + "grad_norm": 0.2454875111579895, + "learning_rate": 0.0003522104832071523, + "loss": 4.5703, + "step": 4070 + }, + { + "epoch": 0.39755859375, + "grad_norm": 0.2646869122982025, + "learning_rate": 0.00035214467804299435, + "loss": 4.5625, + "step": 4071 + }, + { + "epoch": 0.39765625, + "grad_norm": 0.24505573511123657, + "learning_rate": 0.00035207886540018345, + "loss": 4.5273, + "step": 4072 + }, + { + "epoch": 0.39775390625, + "grad_norm": 0.2043951451778412, + "learning_rate": 0.0003520130452850997, + "loss": 4.5703, + "step": 4073 + }, + { + "epoch": 0.3978515625, + "grad_norm": 0.2503882646560669, + "learning_rate": 0.00035194721770412385, + "loss": 4.5938, + "step": 4074 + }, + { + "epoch": 0.39794921875, + "grad_norm": 0.20169861614704132, + "learning_rate": 0.00035188138266363755, + "loss": 4.5391, + "step": 4075 + }, + { + "epoch": 0.398046875, + "grad_norm": 0.2331235706806183, + "learning_rate": 0.0003518155401700229, + "loss": 4.5273, + "step": 4076 + }, + { + "epoch": 0.39814453125, + "grad_norm": 0.2196357697248459, + "learning_rate": 0.00035174969022966313, + "loss": 4.5586, + "step": 4077 + }, + { + "epoch": 0.3982421875, + "grad_norm": 0.2227904200553894, + "learning_rate": 0.00035168383284894165, + "loss": 4.5352, + "step": 4078 + }, + { + "epoch": 0.39833984375, + "grad_norm": 0.2352883666753769, + "learning_rate": 0.00035161796803424313, + "loss": 4.5586, + "step": 4079 + }, + { + "epoch": 0.3984375, + "grad_norm": 0.2257026582956314, + "learning_rate": 0.0003515520957919526, + "loss": 4.5195, + "step": 4080 + }, + { + "epoch": 0.39853515625, + "grad_norm": 0.21808560192584991, + "learning_rate": 0.00035148621612845593, + "loss": 4.5508, + "step": 4081 + }, + { + "epoch": 0.3986328125, + "grad_norm": 0.21684084832668304, + "learning_rate": 0.0003514203290501397, + "loss": 4.5234, + "step": 4082 + }, + { + "epoch": 0.39873046875, + "grad_norm": 0.19863474369049072, + "learning_rate": 0.0003513544345633912, + "loss": 4.5742, + "step": 4083 + }, + { + "epoch": 0.398828125, + "grad_norm": 0.21993204951286316, + "learning_rate": 0.00035128853267459855, + "loss": 4.5273, + "step": 4084 + }, + { + "epoch": 0.39892578125, + "grad_norm": 0.2087717354297638, + "learning_rate": 0.00035122262339015037, + "loss": 4.5586, + "step": 4085 + }, + { + "epoch": 0.3990234375, + "grad_norm": 0.2386511117219925, + "learning_rate": 0.0003511567067164363, + "loss": 4.5, + "step": 4086 + }, + { + "epoch": 0.39912109375, + "grad_norm": 0.23670727014541626, + "learning_rate": 0.00035109078265984636, + "loss": 4.5508, + "step": 4087 + }, + { + "epoch": 0.39921875, + "grad_norm": 0.2541656494140625, + "learning_rate": 0.0003510248512267714, + "loss": 4.5742, + "step": 4088 + }, + { + "epoch": 0.39931640625, + "grad_norm": 0.26631975173950195, + "learning_rate": 0.0003509589124236031, + "loss": 4.5312, + "step": 4089 + }, + { + "epoch": 0.3994140625, + "grad_norm": 0.23987272381782532, + "learning_rate": 0.00035089296625673386, + "loss": 4.543, + "step": 4090 + }, + { + "epoch": 0.39951171875, + "grad_norm": 0.22598996758460999, + "learning_rate": 0.00035082701273255644, + "loss": 4.5742, + "step": 4091 + }, + { + "epoch": 0.399609375, + "grad_norm": 0.20120254158973694, + "learning_rate": 0.00035076105185746493, + "loss": 4.5391, + "step": 4092 + }, + { + "epoch": 0.39970703125, + "grad_norm": 0.2025131732225418, + "learning_rate": 0.0003506950836378536, + "loss": 4.5508, + "step": 4093 + }, + { + "epoch": 0.3998046875, + "grad_norm": 0.21933786571025848, + "learning_rate": 0.0003506291080801177, + "loss": 4.5039, + "step": 4094 + }, + { + "epoch": 0.39990234375, + "grad_norm": 0.21877196431159973, + "learning_rate": 0.00035056312519065295, + "loss": 4.5586, + "step": 4095 + }, + { + "epoch": 0.4, + "grad_norm": 0.2014586329460144, + "learning_rate": 0.0003504971349758562, + "loss": 4.5469, + "step": 4096 + }, + { + "epoch": 0.40009765625, + "grad_norm": 0.2222808599472046, + "learning_rate": 0.0003504311374421246, + "loss": 4.5508, + "step": 4097 + }, + { + "epoch": 0.4001953125, + "grad_norm": 0.21528300642967224, + "learning_rate": 0.00035036513259585606, + "loss": 4.5195, + "step": 4098 + }, + { + "epoch": 0.40029296875, + "grad_norm": 0.21945050358772278, + "learning_rate": 0.0003502991204434495, + "loss": 4.5117, + "step": 4099 + }, + { + "epoch": 0.400390625, + "grad_norm": 0.21083275973796844, + "learning_rate": 0.0003502331009913042, + "loss": 4.5391, + "step": 4100 + }, + { + "epoch": 0.40048828125, + "grad_norm": 0.21027570962905884, + "learning_rate": 0.00035016707424582045, + "loss": 4.5703, + "step": 4101 + }, + { + "epoch": 0.4005859375, + "grad_norm": 0.18751072883605957, + "learning_rate": 0.000350101040213399, + "loss": 4.5273, + "step": 4102 + }, + { + "epoch": 0.40068359375, + "grad_norm": 0.20952056348323822, + "learning_rate": 0.0003500349989004414, + "loss": 4.5664, + "step": 4103 + }, + { + "epoch": 0.40078125, + "grad_norm": 0.21066226065158844, + "learning_rate": 0.00034996895031334997, + "loss": 4.5625, + "step": 4104 + }, + { + "epoch": 0.40087890625, + "grad_norm": 0.21606898307800293, + "learning_rate": 0.0003499028944585275, + "loss": 4.5469, + "step": 4105 + }, + { + "epoch": 0.4009765625, + "grad_norm": 0.20159971714019775, + "learning_rate": 0.00034983683134237793, + "loss": 4.5664, + "step": 4106 + }, + { + "epoch": 0.40107421875, + "grad_norm": 0.20797099173069, + "learning_rate": 0.0003497707609713054, + "loss": 4.5742, + "step": 4107 + }, + { + "epoch": 0.401171875, + "grad_norm": 0.1736592799425125, + "learning_rate": 0.00034970468335171506, + "loss": 4.5352, + "step": 4108 + }, + { + "epoch": 0.40126953125, + "grad_norm": 0.20780213177204132, + "learning_rate": 0.0003496385984900127, + "loss": 4.5508, + "step": 4109 + }, + { + "epoch": 0.4013671875, + "grad_norm": 0.22404348850250244, + "learning_rate": 0.0003495725063926048, + "loss": 4.5117, + "step": 4110 + }, + { + "epoch": 0.40146484375, + "grad_norm": 0.2228526920080185, + "learning_rate": 0.0003495064070658985, + "loss": 4.5391, + "step": 4111 + }, + { + "epoch": 0.4015625, + "grad_norm": 0.24266505241394043, + "learning_rate": 0.0003494403005163017, + "loss": 4.5781, + "step": 4112 + }, + { + "epoch": 0.40166015625, + "grad_norm": 0.2166912853717804, + "learning_rate": 0.00034937418675022304, + "loss": 4.5352, + "step": 4113 + }, + { + "epoch": 0.4017578125, + "grad_norm": 0.21442696452140808, + "learning_rate": 0.00034930806577407173, + "loss": 4.5508, + "step": 4114 + }, + { + "epoch": 0.40185546875, + "grad_norm": 0.20160861313343048, + "learning_rate": 0.0003492419375942578, + "loss": 4.5508, + "step": 4115 + }, + { + "epoch": 0.401953125, + "grad_norm": 0.1921132355928421, + "learning_rate": 0.00034917580221719194, + "loss": 4.5664, + "step": 4116 + }, + { + "epoch": 0.40205078125, + "grad_norm": 0.18674300611019135, + "learning_rate": 0.0003491096596492854, + "loss": 4.5469, + "step": 4117 + }, + { + "epoch": 0.4021484375, + "grad_norm": 0.2147088646888733, + "learning_rate": 0.00034904350989695034, + "loss": 4.5625, + "step": 4118 + }, + { + "epoch": 0.40224609375, + "grad_norm": 0.22489793598651886, + "learning_rate": 0.0003489773529665996, + "loss": 4.5586, + "step": 4119 + }, + { + "epoch": 0.40234375, + "grad_norm": 0.2136559933423996, + "learning_rate": 0.0003489111888646465, + "loss": 4.5664, + "step": 4120 + }, + { + "epoch": 0.40244140625, + "grad_norm": 0.22236715257167816, + "learning_rate": 0.0003488450175975053, + "loss": 4.5312, + "step": 4121 + }, + { + "epoch": 0.4025390625, + "grad_norm": 0.2164093405008316, + "learning_rate": 0.0003487788391715909, + "loss": 4.5117, + "step": 4122 + }, + { + "epoch": 0.40263671875, + "grad_norm": 0.22150009870529175, + "learning_rate": 0.00034871265359331867, + "loss": 4.5625, + "step": 4123 + }, + { + "epoch": 0.402734375, + "grad_norm": 0.20279577374458313, + "learning_rate": 0.00034864646086910507, + "loss": 4.5312, + "step": 4124 + }, + { + "epoch": 0.40283203125, + "grad_norm": 0.1962021440267563, + "learning_rate": 0.0003485802610053668, + "loss": 4.5547, + "step": 4125 + }, + { + "epoch": 0.4029296875, + "grad_norm": 0.2173384726047516, + "learning_rate": 0.00034851405400852174, + "loss": 4.5273, + "step": 4126 + }, + { + "epoch": 0.40302734375, + "grad_norm": 0.25943270325660706, + "learning_rate": 0.00034844783988498805, + "loss": 4.5391, + "step": 4127 + }, + { + "epoch": 0.403125, + "grad_norm": 0.3212743103504181, + "learning_rate": 0.00034838161864118476, + "loss": 4.5312, + "step": 4128 + }, + { + "epoch": 0.40322265625, + "grad_norm": 0.30954211950302124, + "learning_rate": 0.00034831539028353165, + "loss": 4.5547, + "step": 4129 + }, + { + "epoch": 0.4033203125, + "grad_norm": 0.37531328201293945, + "learning_rate": 0.000348249154818449, + "loss": 4.5586, + "step": 4130 + }, + { + "epoch": 0.40341796875, + "grad_norm": 0.22629401087760925, + "learning_rate": 0.00034818291225235785, + "loss": 4.5312, + "step": 4131 + }, + { + "epoch": 0.403515625, + "grad_norm": 0.2598307132720947, + "learning_rate": 0.0003481166625916802, + "loss": 4.543, + "step": 4132 + }, + { + "epoch": 0.40361328125, + "grad_norm": 0.22142809629440308, + "learning_rate": 0.00034805040584283826, + "loss": 4.5625, + "step": 4133 + }, + { + "epoch": 0.4037109375, + "grad_norm": 0.2177702784538269, + "learning_rate": 0.0003479841420122553, + "loss": 4.5469, + "step": 4134 + }, + { + "epoch": 0.40380859375, + "grad_norm": 0.18729080259799957, + "learning_rate": 0.000347917871106355, + "loss": 4.543, + "step": 4135 + }, + { + "epoch": 0.40390625, + "grad_norm": 0.19227266311645508, + "learning_rate": 0.0003478515931315622, + "loss": 4.5391, + "step": 4136 + }, + { + "epoch": 0.40400390625, + "grad_norm": 0.1920367181301117, + "learning_rate": 0.00034778530809430173, + "loss": 4.5703, + "step": 4137 + }, + { + "epoch": 0.4041015625, + "grad_norm": 0.20580989122390747, + "learning_rate": 0.0003477190160009997, + "loss": 4.5625, + "step": 4138 + }, + { + "epoch": 0.40419921875, + "grad_norm": 0.21145454049110413, + "learning_rate": 0.0003476527168580826, + "loss": 4.5625, + "step": 4139 + }, + { + "epoch": 0.404296875, + "grad_norm": 0.20169563591480255, + "learning_rate": 0.00034758641067197764, + "loss": 4.5547, + "step": 4140 + }, + { + "epoch": 0.40439453125, + "grad_norm": 0.20844419300556183, + "learning_rate": 0.00034752009744911294, + "loss": 4.5156, + "step": 4141 + }, + { + "epoch": 0.4044921875, + "grad_norm": 0.480696439743042, + "learning_rate": 0.0003474537771959169, + "loss": 4.5234, + "step": 4142 + }, + { + "epoch": 0.40458984375, + "grad_norm": 0.2771954834461212, + "learning_rate": 0.00034738744991881894, + "loss": 4.5352, + "step": 4143 + }, + { + "epoch": 0.4046875, + "grad_norm": 0.23765914142131805, + "learning_rate": 0.00034732111562424894, + "loss": 4.5625, + "step": 4144 + }, + { + "epoch": 0.40478515625, + "grad_norm": 0.22186824679374695, + "learning_rate": 0.0003472547743186377, + "loss": 4.5391, + "step": 4145 + }, + { + "epoch": 0.4048828125, + "grad_norm": 0.2652459144592285, + "learning_rate": 0.0003471884260084165, + "loss": 4.5547, + "step": 4146 + }, + { + "epoch": 0.40498046875, + "grad_norm": 0.2594757676124573, + "learning_rate": 0.00034712207070001735, + "loss": 4.5352, + "step": 4147 + }, + { + "epoch": 0.405078125, + "grad_norm": 0.2902342677116394, + "learning_rate": 0.00034705570839987276, + "loss": 4.5938, + "step": 4148 + }, + { + "epoch": 0.40517578125, + "grad_norm": 0.26344621181488037, + "learning_rate": 0.0003469893391144165, + "loss": 4.5352, + "step": 4149 + }, + { + "epoch": 0.4052734375, + "grad_norm": 0.2741298973560333, + "learning_rate": 0.0003469229628500823, + "loss": 4.5312, + "step": 4150 + }, + { + "epoch": 0.40537109375, + "grad_norm": 0.19027294218540192, + "learning_rate": 0.00034685657961330504, + "loss": 4.5391, + "step": 4151 + }, + { + "epoch": 0.40546875, + "grad_norm": 0.2420579344034195, + "learning_rate": 0.00034679018941052, + "loss": 4.5391, + "step": 4152 + }, + { + "epoch": 0.40556640625, + "grad_norm": 0.2890380024909973, + "learning_rate": 0.00034672379224816346, + "loss": 4.5508, + "step": 4153 + }, + { + "epoch": 0.4056640625, + "grad_norm": 0.24412448704242706, + "learning_rate": 0.00034665738813267194, + "loss": 4.5352, + "step": 4154 + }, + { + "epoch": 0.40576171875, + "grad_norm": 0.21122469007968903, + "learning_rate": 0.00034659097707048303, + "loss": 4.5508, + "step": 4155 + }, + { + "epoch": 0.405859375, + "grad_norm": 0.23695144057273865, + "learning_rate": 0.00034652455906803483, + "loss": 4.5859, + "step": 4156 + }, + { + "epoch": 0.40595703125, + "grad_norm": 0.27835455536842346, + "learning_rate": 0.00034645813413176603, + "loss": 4.5703, + "step": 4157 + }, + { + "epoch": 0.4060546875, + "grad_norm": 0.24840117990970612, + "learning_rate": 0.00034639170226811604, + "loss": 4.5742, + "step": 4158 + }, + { + "epoch": 0.40615234375, + "grad_norm": 0.22549547255039215, + "learning_rate": 0.0003463252634835252, + "loss": 4.5273, + "step": 4159 + }, + { + "epoch": 0.40625, + "grad_norm": 0.20937369763851166, + "learning_rate": 0.000346258817784434, + "loss": 4.5312, + "step": 4160 + }, + { + "epoch": 0.40634765625, + "grad_norm": 0.19097130000591278, + "learning_rate": 0.0003461923651772841, + "loss": 4.5156, + "step": 4161 + }, + { + "epoch": 0.4064453125, + "grad_norm": 0.20975615084171295, + "learning_rate": 0.0003461259056685176, + "loss": 4.5195, + "step": 4162 + }, + { + "epoch": 0.40654296875, + "grad_norm": 0.192159041762352, + "learning_rate": 0.0003460594392645773, + "loss": 4.5508, + "step": 4163 + }, + { + "epoch": 0.406640625, + "grad_norm": 0.22325633466243744, + "learning_rate": 0.0003459929659719066, + "loss": 4.5742, + "step": 4164 + }, + { + "epoch": 0.40673828125, + "grad_norm": 0.22643642127513885, + "learning_rate": 0.0003459264857969497, + "loss": 4.543, + "step": 4165 + }, + { + "epoch": 0.4068359375, + "grad_norm": 0.20877677202224731, + "learning_rate": 0.0003458599987461514, + "loss": 4.5195, + "step": 4166 + }, + { + "epoch": 0.40693359375, + "grad_norm": 0.20415176451206207, + "learning_rate": 0.00034579350482595713, + "loss": 4.5273, + "step": 4167 + }, + { + "epoch": 0.40703125, + "grad_norm": 0.19149133563041687, + "learning_rate": 0.000345727004042813, + "loss": 4.5352, + "step": 4168 + }, + { + "epoch": 0.40712890625, + "grad_norm": 0.22354748845100403, + "learning_rate": 0.0003456604964031659, + "loss": 4.5156, + "step": 4169 + }, + { + "epoch": 0.4072265625, + "grad_norm": 0.2413509637117386, + "learning_rate": 0.0003455939819134633, + "loss": 4.5391, + "step": 4170 + }, + { + "epoch": 0.40732421875, + "grad_norm": 0.2143695205450058, + "learning_rate": 0.00034552746058015316, + "loss": 4.5273, + "step": 4171 + }, + { + "epoch": 0.407421875, + "grad_norm": 0.20104680955410004, + "learning_rate": 0.00034546093240968447, + "loss": 4.5312, + "step": 4172 + }, + { + "epoch": 0.40751953125, + "grad_norm": 0.20091788470745087, + "learning_rate": 0.00034539439740850655, + "loss": 4.5312, + "step": 4173 + }, + { + "epoch": 0.4076171875, + "grad_norm": 0.24635468423366547, + "learning_rate": 0.0003453278555830696, + "loss": 4.5508, + "step": 4174 + }, + { + "epoch": 0.40771484375, + "grad_norm": 0.2426307201385498, + "learning_rate": 0.00034526130693982444, + "loss": 4.5391, + "step": 4175 + }, + { + "epoch": 0.4078125, + "grad_norm": 0.24194090068340302, + "learning_rate": 0.00034519475148522236, + "loss": 4.5312, + "step": 4176 + }, + { + "epoch": 0.40791015625, + "grad_norm": 0.22783949971199036, + "learning_rate": 0.0003451281892257155, + "loss": 4.5195, + "step": 4177 + }, + { + "epoch": 0.4080078125, + "grad_norm": 0.2178022712469101, + "learning_rate": 0.0003450616201677568, + "loss": 4.5547, + "step": 4178 + }, + { + "epoch": 0.40810546875, + "grad_norm": 0.238102525472641, + "learning_rate": 0.0003449950443177994, + "loss": 4.5195, + "step": 4179 + }, + { + "epoch": 0.408203125, + "grad_norm": 0.24297474324703217, + "learning_rate": 0.00034492846168229747, + "loss": 4.5312, + "step": 4180 + }, + { + "epoch": 0.40830078125, + "grad_norm": 0.22959333658218384, + "learning_rate": 0.0003448618722677059, + "loss": 4.5547, + "step": 4181 + }, + { + "epoch": 0.4083984375, + "grad_norm": 0.21266379952430725, + "learning_rate": 0.0003447952760804799, + "loss": 4.5469, + "step": 4182 + }, + { + "epoch": 0.40849609375, + "grad_norm": 0.19458381831645966, + "learning_rate": 0.00034472867312707565, + "loss": 4.5625, + "step": 4183 + }, + { + "epoch": 0.40859375, + "grad_norm": 0.21064218878746033, + "learning_rate": 0.00034466206341394974, + "loss": 4.4961, + "step": 4184 + }, + { + "epoch": 0.40869140625, + "grad_norm": 0.21794824302196503, + "learning_rate": 0.0003445954469475596, + "loss": 4.5586, + "step": 4185 + }, + { + "epoch": 0.4087890625, + "grad_norm": 0.21801288425922394, + "learning_rate": 0.0003445288237343632, + "loss": 4.5469, + "step": 4186 + }, + { + "epoch": 0.40888671875, + "grad_norm": 0.20109376311302185, + "learning_rate": 0.00034446219378081927, + "loss": 4.543, + "step": 4187 + }, + { + "epoch": 0.408984375, + "grad_norm": 0.26989951729774475, + "learning_rate": 0.00034439555709338704, + "loss": 4.5625, + "step": 4188 + }, + { + "epoch": 0.40908203125, + "grad_norm": 0.2726520597934723, + "learning_rate": 0.0003443289136785266, + "loss": 4.5312, + "step": 4189 + }, + { + "epoch": 0.4091796875, + "grad_norm": 0.22225505113601685, + "learning_rate": 0.0003442622635426985, + "loss": 4.5625, + "step": 4190 + }, + { + "epoch": 0.40927734375, + "grad_norm": 0.2109188586473465, + "learning_rate": 0.000344195606692364, + "loss": 4.5781, + "step": 4191 + }, + { + "epoch": 0.409375, + "grad_norm": 0.21650481224060059, + "learning_rate": 0.00034412894313398505, + "loss": 4.5742, + "step": 4192 + }, + { + "epoch": 0.40947265625, + "grad_norm": 0.2211488038301468, + "learning_rate": 0.00034406227287402433, + "loss": 4.5625, + "step": 4193 + }, + { + "epoch": 0.4095703125, + "grad_norm": 0.20261113345623016, + "learning_rate": 0.00034399559591894493, + "loss": 4.5547, + "step": 4194 + }, + { + "epoch": 0.40966796875, + "grad_norm": 0.19615773856639862, + "learning_rate": 0.00034392891227521074, + "loss": 4.5273, + "step": 4195 + }, + { + "epoch": 0.409765625, + "grad_norm": 0.1970633864402771, + "learning_rate": 0.0003438622219492863, + "loss": 4.5469, + "step": 4196 + }, + { + "epoch": 0.40986328125, + "grad_norm": 0.21661871671676636, + "learning_rate": 0.00034379552494763694, + "loss": 4.5664, + "step": 4197 + }, + { + "epoch": 0.4099609375, + "grad_norm": 0.19350437819957733, + "learning_rate": 0.00034372882127672833, + "loss": 4.5273, + "step": 4198 + }, + { + "epoch": 0.41005859375, + "grad_norm": 0.19990530610084534, + "learning_rate": 0.0003436621109430269, + "loss": 4.5508, + "step": 4199 + }, + { + "epoch": 0.41015625, + "grad_norm": 0.21591399610042572, + "learning_rate": 0.00034359539395299987, + "loss": 4.5273, + "step": 4200 + }, + { + "epoch": 0.41025390625, + "grad_norm": 0.19848501682281494, + "learning_rate": 0.0003435286703131149, + "loss": 4.5547, + "step": 4201 + }, + { + "epoch": 0.4103515625, + "grad_norm": 0.21190103888511658, + "learning_rate": 0.00034346194002984057, + "loss": 4.5664, + "step": 4202 + }, + { + "epoch": 0.41044921875, + "grad_norm": 0.22887387871742249, + "learning_rate": 0.0003433952031096456, + "loss": 4.5234, + "step": 4203 + }, + { + "epoch": 0.410546875, + "grad_norm": 0.21692945063114166, + "learning_rate": 0.00034332845955900007, + "loss": 4.5195, + "step": 4204 + }, + { + "epoch": 0.41064453125, + "grad_norm": 0.19616368412971497, + "learning_rate": 0.0003432617093843741, + "loss": 4.5469, + "step": 4205 + }, + { + "epoch": 0.4107421875, + "grad_norm": 0.219509094953537, + "learning_rate": 0.0003431949525922386, + "loss": 4.5664, + "step": 4206 + }, + { + "epoch": 0.41083984375, + "grad_norm": 0.19620618224143982, + "learning_rate": 0.0003431281891890654, + "loss": 4.5195, + "step": 4207 + }, + { + "epoch": 0.4109375, + "grad_norm": 0.22064127027988434, + "learning_rate": 0.0003430614191813266, + "loss": 4.5352, + "step": 4208 + }, + { + "epoch": 0.41103515625, + "grad_norm": 0.21091030538082123, + "learning_rate": 0.0003429946425754951, + "loss": 4.5195, + "step": 4209 + }, + { + "epoch": 0.4111328125, + "grad_norm": 0.20604848861694336, + "learning_rate": 0.00034292785937804456, + "loss": 4.5156, + "step": 4210 + }, + { + "epoch": 0.41123046875, + "grad_norm": 0.19509103894233704, + "learning_rate": 0.000342861069595449, + "loss": 4.5312, + "step": 4211 + }, + { + "epoch": 0.411328125, + "grad_norm": 0.27536261081695557, + "learning_rate": 0.00034279427323418343, + "loss": 4.5547, + "step": 4212 + }, + { + "epoch": 0.41142578125, + "grad_norm": 0.20961979031562805, + "learning_rate": 0.0003427274703007231, + "loss": 4.5742, + "step": 4213 + }, + { + "epoch": 0.4115234375, + "grad_norm": 0.21433280408382416, + "learning_rate": 0.0003426606608015442, + "loss": 4.5156, + "step": 4214 + }, + { + "epoch": 0.41162109375, + "grad_norm": 0.20710158348083496, + "learning_rate": 0.00034259384474312346, + "loss": 4.5352, + "step": 4215 + }, + { + "epoch": 0.41171875, + "grad_norm": 0.2086396962404251, + "learning_rate": 0.0003425270221319383, + "loss": 4.5391, + "step": 4216 + }, + { + "epoch": 0.41181640625, + "grad_norm": 0.208729088306427, + "learning_rate": 0.0003424601929744666, + "loss": 4.5117, + "step": 4217 + }, + { + "epoch": 0.4119140625, + "grad_norm": 0.19285652041435242, + "learning_rate": 0.00034239335727718703, + "loss": 4.5234, + "step": 4218 + }, + { + "epoch": 0.41201171875, + "grad_norm": 0.20926572382450104, + "learning_rate": 0.0003423265150465788, + "loss": 4.5742, + "step": 4219 + }, + { + "epoch": 0.412109375, + "grad_norm": 0.21528217196464539, + "learning_rate": 0.000342259666289122, + "loss": 4.5, + "step": 4220 + }, + { + "epoch": 0.41220703125, + "grad_norm": 0.20822221040725708, + "learning_rate": 0.000342192811011297, + "loss": 4.5664, + "step": 4221 + }, + { + "epoch": 0.4123046875, + "grad_norm": 0.19343708455562592, + "learning_rate": 0.0003421259492195851, + "loss": 4.5547, + "step": 4222 + }, + { + "epoch": 0.41240234375, + "grad_norm": 0.20270302891731262, + "learning_rate": 0.00034205908092046784, + "loss": 4.582, + "step": 4223 + }, + { + "epoch": 0.4125, + "grad_norm": 0.18494850397109985, + "learning_rate": 0.0003419922061204279, + "loss": 4.5156, + "step": 4224 + }, + { + "epoch": 0.41259765625, + "grad_norm": 0.20782315731048584, + "learning_rate": 0.0003419253248259483, + "loss": 4.5117, + "step": 4225 + }, + { + "epoch": 0.4126953125, + "grad_norm": 0.20236442983150482, + "learning_rate": 0.00034185843704351266, + "loss": 4.5469, + "step": 4226 + }, + { + "epoch": 0.41279296875, + "grad_norm": 0.21104386448860168, + "learning_rate": 0.00034179154277960526, + "loss": 4.5078, + "step": 4227 + }, + { + "epoch": 0.412890625, + "grad_norm": 0.22649024426937103, + "learning_rate": 0.00034172464204071113, + "loss": 4.5547, + "step": 4228 + }, + { + "epoch": 0.41298828125, + "grad_norm": 0.24051690101623535, + "learning_rate": 0.00034165773483331587, + "loss": 4.5664, + "step": 4229 + }, + { + "epoch": 0.4130859375, + "grad_norm": 0.22844736278057098, + "learning_rate": 0.00034159082116390555, + "loss": 4.5508, + "step": 4230 + }, + { + "epoch": 0.41318359375, + "grad_norm": 0.22087186574935913, + "learning_rate": 0.00034152390103896706, + "loss": 4.5312, + "step": 4231 + }, + { + "epoch": 0.41328125, + "grad_norm": 0.2391183227300644, + "learning_rate": 0.0003414569744649879, + "loss": 4.5117, + "step": 4232 + }, + { + "epoch": 0.41337890625, + "grad_norm": 0.21873103082180023, + "learning_rate": 0.0003413900414484562, + "loss": 4.543, + "step": 4233 + }, + { + "epoch": 0.4134765625, + "grad_norm": 0.26047226786613464, + "learning_rate": 0.0003413231019958605, + "loss": 4.5586, + "step": 4234 + }, + { + "epoch": 0.41357421875, + "grad_norm": 0.2003972828388214, + "learning_rate": 0.0003412561561136902, + "loss": 4.5391, + "step": 4235 + }, + { + "epoch": 0.413671875, + "grad_norm": 0.22209486365318298, + "learning_rate": 0.00034118920380843523, + "loss": 4.5469, + "step": 4236 + }, + { + "epoch": 0.41376953125, + "grad_norm": 0.2570430636405945, + "learning_rate": 0.0003411222450865862, + "loss": 4.5234, + "step": 4237 + }, + { + "epoch": 0.4138671875, + "grad_norm": 0.3031357228755951, + "learning_rate": 0.0003410552799546342, + "loss": 4.5352, + "step": 4238 + }, + { + "epoch": 0.41396484375, + "grad_norm": 0.2697660028934479, + "learning_rate": 0.0003409883084190712, + "loss": 4.5234, + "step": 4239 + }, + { + "epoch": 0.4140625, + "grad_norm": 0.17671824991703033, + "learning_rate": 0.00034092133048638955, + "loss": 4.5547, + "step": 4240 + }, + { + "epoch": 0.41416015625, + "grad_norm": 0.23667357861995697, + "learning_rate": 0.00034085434616308233, + "loss": 4.5469, + "step": 4241 + }, + { + "epoch": 0.4142578125, + "grad_norm": 0.24496574699878693, + "learning_rate": 0.0003407873554556432, + "loss": 4.5586, + "step": 4242 + }, + { + "epoch": 0.41435546875, + "grad_norm": 0.22179105877876282, + "learning_rate": 0.0003407203583705665, + "loss": 4.5469, + "step": 4243 + }, + { + "epoch": 0.414453125, + "grad_norm": 0.21274659037590027, + "learning_rate": 0.000340653354914347, + "loss": 4.5156, + "step": 4244 + }, + { + "epoch": 0.41455078125, + "grad_norm": 0.243591770529747, + "learning_rate": 0.0003405863450934804, + "loss": 4.5234, + "step": 4245 + }, + { + "epoch": 0.4146484375, + "grad_norm": 0.2619163990020752, + "learning_rate": 0.0003405193289144628, + "loss": 4.5195, + "step": 4246 + }, + { + "epoch": 0.41474609375, + "grad_norm": 0.24049386382102966, + "learning_rate": 0.0003404523063837909, + "loss": 4.5391, + "step": 4247 + }, + { + "epoch": 0.41484375, + "grad_norm": 0.20232141017913818, + "learning_rate": 0.0003403852775079621, + "loss": 4.5508, + "step": 4248 + }, + { + "epoch": 0.41494140625, + "grad_norm": 0.2355966418981552, + "learning_rate": 0.00034031824229347444, + "loss": 4.5312, + "step": 4249 + }, + { + "epoch": 0.4150390625, + "grad_norm": 0.27130910754203796, + "learning_rate": 0.00034025120074682657, + "loss": 4.5352, + "step": 4250 + }, + { + "epoch": 0.41513671875, + "grad_norm": 0.24365438520908356, + "learning_rate": 0.0003401841528745176, + "loss": 4.5312, + "step": 4251 + }, + { + "epoch": 0.415234375, + "grad_norm": 0.19915810227394104, + "learning_rate": 0.0003401170986830474, + "loss": 4.5312, + "step": 4252 + }, + { + "epoch": 0.41533203125, + "grad_norm": 0.22827087342739105, + "learning_rate": 0.00034005003817891646, + "loss": 4.5469, + "step": 4253 + }, + { + "epoch": 0.4154296875, + "grad_norm": 0.2281893640756607, + "learning_rate": 0.0003399829713686258, + "loss": 4.5195, + "step": 4254 + }, + { + "epoch": 0.41552734375, + "grad_norm": 0.20261000096797943, + "learning_rate": 0.00033991589825867706, + "loss": 4.5508, + "step": 4255 + }, + { + "epoch": 0.415625, + "grad_norm": 0.20133930444717407, + "learning_rate": 0.0003398488188555726, + "loss": 4.5469, + "step": 4256 + }, + { + "epoch": 0.41572265625, + "grad_norm": 0.23709116876125336, + "learning_rate": 0.00033978173316581527, + "loss": 4.5391, + "step": 4257 + }, + { + "epoch": 0.4158203125, + "grad_norm": 0.1949966996908188, + "learning_rate": 0.0003397146411959086, + "loss": 4.5586, + "step": 4258 + }, + { + "epoch": 0.41591796875, + "grad_norm": 0.2117062658071518, + "learning_rate": 0.00033964754295235674, + "loss": 4.5469, + "step": 4259 + }, + { + "epoch": 0.416015625, + "grad_norm": 0.24832065403461456, + "learning_rate": 0.0003395804384416643, + "loss": 4.5391, + "step": 4260 + }, + { + "epoch": 0.41611328125, + "grad_norm": 0.25508108735084534, + "learning_rate": 0.0003395133276703367, + "loss": 4.5391, + "step": 4261 + }, + { + "epoch": 0.4162109375, + "grad_norm": 0.20013858377933502, + "learning_rate": 0.00033944621064487977, + "loss": 4.5195, + "step": 4262 + }, + { + "epoch": 0.41630859375, + "grad_norm": 0.22289419174194336, + "learning_rate": 0.0003393790873718002, + "loss": 4.5547, + "step": 4263 + }, + { + "epoch": 0.41640625, + "grad_norm": 0.21877996623516083, + "learning_rate": 0.000339311957857605, + "loss": 4.5469, + "step": 4264 + }, + { + "epoch": 0.41650390625, + "grad_norm": 0.20020975172519684, + "learning_rate": 0.000339244822108802, + "loss": 4.5391, + "step": 4265 + }, + { + "epoch": 0.4166015625, + "grad_norm": 0.19105389714241028, + "learning_rate": 0.0003391776801318995, + "loss": 4.5586, + "step": 4266 + }, + { + "epoch": 0.41669921875, + "grad_norm": 0.20086093246936798, + "learning_rate": 0.00033911053193340645, + "loss": 4.5586, + "step": 4267 + }, + { + "epoch": 0.416796875, + "grad_norm": 0.21063904464244843, + "learning_rate": 0.0003390433775198325, + "loss": 4.5234, + "step": 4268 + }, + { + "epoch": 0.41689453125, + "grad_norm": 0.1937112957239151, + "learning_rate": 0.00033897621689768775, + "loss": 4.5078, + "step": 4269 + }, + { + "epoch": 0.4169921875, + "grad_norm": 0.21942195296287537, + "learning_rate": 0.000338909050073483, + "loss": 4.543, + "step": 4270 + }, + { + "epoch": 0.41708984375, + "grad_norm": 0.23989495635032654, + "learning_rate": 0.0003388418770537296, + "loss": 4.5508, + "step": 4271 + }, + { + "epoch": 0.4171875, + "grad_norm": 0.2318723201751709, + "learning_rate": 0.00033877469784493946, + "loss": 4.5391, + "step": 4272 + }, + { + "epoch": 0.41728515625, + "grad_norm": 0.198454350233078, + "learning_rate": 0.00033870751245362526, + "loss": 4.5352, + "step": 4273 + }, + { + "epoch": 0.4173828125, + "grad_norm": 0.188365638256073, + "learning_rate": 0.0003386403208863001, + "loss": 4.543, + "step": 4274 + }, + { + "epoch": 0.41748046875, + "grad_norm": 0.20459558069705963, + "learning_rate": 0.00033857312314947776, + "loss": 4.543, + "step": 4275 + }, + { + "epoch": 0.417578125, + "grad_norm": 0.2199014276266098, + "learning_rate": 0.00033850591924967254, + "loss": 4.5234, + "step": 4276 + }, + { + "epoch": 0.41767578125, + "grad_norm": 0.21258705854415894, + "learning_rate": 0.0003384387091933995, + "loss": 4.5312, + "step": 4277 + }, + { + "epoch": 0.4177734375, + "grad_norm": 0.18151478469371796, + "learning_rate": 0.0003383714929871742, + "loss": 4.5117, + "step": 4278 + }, + { + "epoch": 0.41787109375, + "grad_norm": 0.2078961730003357, + "learning_rate": 0.0003383042706375127, + "loss": 4.5039, + "step": 4279 + }, + { + "epoch": 0.41796875, + "grad_norm": 0.19561214745044708, + "learning_rate": 0.00033823704215093196, + "loss": 4.5312, + "step": 4280 + }, + { + "epoch": 0.41806640625, + "grad_norm": 0.2092207670211792, + "learning_rate": 0.00033816980753394903, + "loss": 4.5078, + "step": 4281 + }, + { + "epoch": 0.4181640625, + "grad_norm": 0.21523243188858032, + "learning_rate": 0.000338102566793082, + "loss": 4.5508, + "step": 4282 + }, + { + "epoch": 0.41826171875, + "grad_norm": 0.1868700087070465, + "learning_rate": 0.00033803531993484943, + "loss": 4.5352, + "step": 4283 + }, + { + "epoch": 0.418359375, + "grad_norm": 0.2173801064491272, + "learning_rate": 0.0003379680669657704, + "loss": 4.5703, + "step": 4284 + }, + { + "epoch": 0.41845703125, + "grad_norm": 0.234948992729187, + "learning_rate": 0.0003379008078923647, + "loss": 4.5469, + "step": 4285 + }, + { + "epoch": 0.4185546875, + "grad_norm": 0.24424946308135986, + "learning_rate": 0.0003378335427211525, + "loss": 4.543, + "step": 4286 + }, + { + "epoch": 0.41865234375, + "grad_norm": 0.22337597608566284, + "learning_rate": 0.00033776627145865485, + "loss": 4.5469, + "step": 4287 + }, + { + "epoch": 0.41875, + "grad_norm": 0.2037271410226822, + "learning_rate": 0.0003376989941113932, + "loss": 4.5234, + "step": 4288 + }, + { + "epoch": 0.41884765625, + "grad_norm": 0.1945188045501709, + "learning_rate": 0.00033763171068588955, + "loss": 4.5117, + "step": 4289 + }, + { + "epoch": 0.4189453125, + "grad_norm": 0.21202221512794495, + "learning_rate": 0.00033756442118866657, + "loss": 4.5273, + "step": 4290 + }, + { + "epoch": 0.41904296875, + "grad_norm": 0.2313307523727417, + "learning_rate": 0.00033749712562624766, + "loss": 4.5195, + "step": 4291 + }, + { + "epoch": 0.419140625, + "grad_norm": 0.23277506232261658, + "learning_rate": 0.0003374298240051566, + "loss": 4.5586, + "step": 4292 + }, + { + "epoch": 0.41923828125, + "grad_norm": 0.2052488774061203, + "learning_rate": 0.0003373625163319178, + "loss": 4.5508, + "step": 4293 + }, + { + "epoch": 0.4193359375, + "grad_norm": 0.1992032825946808, + "learning_rate": 0.0003372952026130563, + "loss": 4.5781, + "step": 4294 + }, + { + "epoch": 0.41943359375, + "grad_norm": 0.19684641063213348, + "learning_rate": 0.0003372278828550977, + "loss": 4.5312, + "step": 4295 + }, + { + "epoch": 0.41953125, + "grad_norm": 0.19456447660923004, + "learning_rate": 0.0003371605570645682, + "loss": 4.5312, + "step": 4296 + }, + { + "epoch": 0.41962890625, + "grad_norm": 0.1943928748369217, + "learning_rate": 0.00033709322524799463, + "loss": 4.5391, + "step": 4297 + }, + { + "epoch": 0.4197265625, + "grad_norm": 0.1950366199016571, + "learning_rate": 0.00033702588741190423, + "loss": 4.5195, + "step": 4298 + }, + { + "epoch": 0.41982421875, + "grad_norm": 0.1896734982728958, + "learning_rate": 0.00033695854356282495, + "loss": 4.5664, + "step": 4299 + }, + { + "epoch": 0.419921875, + "grad_norm": 0.18850304186344147, + "learning_rate": 0.0003368911937072855, + "loss": 4.5039, + "step": 4300 + }, + { + "epoch": 0.42001953125, + "grad_norm": 0.2176278978586197, + "learning_rate": 0.00033682383785181493, + "loss": 4.5703, + "step": 4301 + }, + { + "epoch": 0.4201171875, + "grad_norm": 0.2238473892211914, + "learning_rate": 0.0003367564760029428, + "loss": 4.5391, + "step": 4302 + }, + { + "epoch": 0.42021484375, + "grad_norm": 0.24081338942050934, + "learning_rate": 0.00033668910816719936, + "loss": 4.5156, + "step": 4303 + }, + { + "epoch": 0.4203125, + "grad_norm": 0.20266522467136383, + "learning_rate": 0.0003366217343511158, + "loss": 4.5, + "step": 4304 + }, + { + "epoch": 0.42041015625, + "grad_norm": 0.1970774084329605, + "learning_rate": 0.0003365543545612232, + "loss": 4.5625, + "step": 4305 + }, + { + "epoch": 0.4205078125, + "grad_norm": 0.22202615439891815, + "learning_rate": 0.0003364869688040536, + "loss": 4.5312, + "step": 4306 + }, + { + "epoch": 0.42060546875, + "grad_norm": 0.20657528936862946, + "learning_rate": 0.0003364195770861398, + "loss": 4.5469, + "step": 4307 + }, + { + "epoch": 0.420703125, + "grad_norm": 0.21875396370887756, + "learning_rate": 0.0003363521794140148, + "loss": 4.5156, + "step": 4308 + }, + { + "epoch": 0.42080078125, + "grad_norm": 0.20766115188598633, + "learning_rate": 0.00033628477579421246, + "loss": 4.5273, + "step": 4309 + }, + { + "epoch": 0.4208984375, + "grad_norm": 0.1977689415216446, + "learning_rate": 0.00033621736623326706, + "loss": 4.5625, + "step": 4310 + }, + { + "epoch": 0.42099609375, + "grad_norm": 0.20930136740207672, + "learning_rate": 0.00033614995073771343, + "loss": 4.5352, + "step": 4311 + }, + { + "epoch": 0.42109375, + "grad_norm": 0.20182935893535614, + "learning_rate": 0.0003360825293140871, + "loss": 4.5312, + "step": 4312 + }, + { + "epoch": 0.42119140625, + "grad_norm": 0.20800578594207764, + "learning_rate": 0.0003360151019689242, + "loss": 4.5547, + "step": 4313 + }, + { + "epoch": 0.4212890625, + "grad_norm": 0.21498483419418335, + "learning_rate": 0.0003359476687087612, + "loss": 4.5117, + "step": 4314 + }, + { + "epoch": 0.42138671875, + "grad_norm": 0.21331526339054108, + "learning_rate": 0.00033588022954013537, + "loss": 4.5469, + "step": 4315 + }, + { + "epoch": 0.421484375, + "grad_norm": 0.19197538495063782, + "learning_rate": 0.0003358127844695845, + "loss": 4.5586, + "step": 4316 + }, + { + "epoch": 0.42158203125, + "grad_norm": 0.20029297471046448, + "learning_rate": 0.000335745333503647, + "loss": 4.5391, + "step": 4317 + }, + { + "epoch": 0.4216796875, + "grad_norm": 0.20516625046730042, + "learning_rate": 0.0003356778766488615, + "loss": 4.5352, + "step": 4318 + }, + { + "epoch": 0.42177734375, + "grad_norm": 0.22141019999980927, + "learning_rate": 0.00033561041391176784, + "loss": 4.5703, + "step": 4319 + }, + { + "epoch": 0.421875, + "grad_norm": 0.2075464278459549, + "learning_rate": 0.00033554294529890586, + "loss": 4.5703, + "step": 4320 + }, + { + "epoch": 0.42197265625, + "grad_norm": 0.19836363196372986, + "learning_rate": 0.0003354754708168162, + "loss": 4.5117, + "step": 4321 + }, + { + "epoch": 0.4220703125, + "grad_norm": 0.20519572496414185, + "learning_rate": 0.00033540799047204024, + "loss": 4.5469, + "step": 4322 + }, + { + "epoch": 0.42216796875, + "grad_norm": 0.22865843772888184, + "learning_rate": 0.0003353405042711195, + "loss": 4.4961, + "step": 4323 + }, + { + "epoch": 0.422265625, + "grad_norm": 0.2891612946987152, + "learning_rate": 0.00033527301222059636, + "loss": 4.5508, + "step": 4324 + }, + { + "epoch": 0.42236328125, + "grad_norm": 0.2784929871559143, + "learning_rate": 0.0003352055143270138, + "loss": 4.5195, + "step": 4325 + }, + { + "epoch": 0.4224609375, + "grad_norm": 0.2218463271856308, + "learning_rate": 0.00033513801059691523, + "loss": 4.5508, + "step": 4326 + }, + { + "epoch": 0.42255859375, + "grad_norm": 0.19428327679634094, + "learning_rate": 0.00033507050103684475, + "loss": 4.5234, + "step": 4327 + }, + { + "epoch": 0.42265625, + "grad_norm": 0.2366311252117157, + "learning_rate": 0.0003350029856533468, + "loss": 4.5234, + "step": 4328 + }, + { + "epoch": 0.42275390625, + "grad_norm": 0.2606178820133209, + "learning_rate": 0.0003349354644529666, + "loss": 4.5469, + "step": 4329 + }, + { + "epoch": 0.4228515625, + "grad_norm": 0.23178665339946747, + "learning_rate": 0.00033486793744225005, + "loss": 4.5, + "step": 4330 + }, + { + "epoch": 0.42294921875, + "grad_norm": 0.19918088614940643, + "learning_rate": 0.0003348004046277432, + "loss": 4.5156, + "step": 4331 + }, + { + "epoch": 0.423046875, + "grad_norm": 0.2165551483631134, + "learning_rate": 0.00033473286601599304, + "loss": 4.5352, + "step": 4332 + }, + { + "epoch": 0.42314453125, + "grad_norm": 0.24429765343666077, + "learning_rate": 0.0003346653216135469, + "loss": 4.5469, + "step": 4333 + }, + { + "epoch": 0.4232421875, + "grad_norm": 0.21038371324539185, + "learning_rate": 0.00033459777142695276, + "loss": 4.5352, + "step": 4334 + }, + { + "epoch": 0.42333984375, + "grad_norm": 0.1983853131532669, + "learning_rate": 0.0003345302154627592, + "loss": 4.5664, + "step": 4335 + }, + { + "epoch": 0.4234375, + "grad_norm": 0.24756203591823578, + "learning_rate": 0.00033446265372751526, + "loss": 4.5586, + "step": 4336 + }, + { + "epoch": 0.42353515625, + "grad_norm": 0.25165197253227234, + "learning_rate": 0.00033439508622777066, + "loss": 4.5273, + "step": 4337 + }, + { + "epoch": 0.4236328125, + "grad_norm": 0.21070025861263275, + "learning_rate": 0.00033432751297007553, + "loss": 4.5391, + "step": 4338 + }, + { + "epoch": 0.42373046875, + "grad_norm": 0.19713866710662842, + "learning_rate": 0.0003342599339609807, + "loss": 4.5352, + "step": 4339 + }, + { + "epoch": 0.423828125, + "grad_norm": 0.20653079450130463, + "learning_rate": 0.0003341923492070374, + "loss": 4.5391, + "step": 4340 + }, + { + "epoch": 0.42392578125, + "grad_norm": 0.20679520070552826, + "learning_rate": 0.00033412475871479763, + "loss": 4.5312, + "step": 4341 + }, + { + "epoch": 0.4240234375, + "grad_norm": 0.20256011188030243, + "learning_rate": 0.0003340571624908138, + "loss": 4.543, + "step": 4342 + }, + { + "epoch": 0.42412109375, + "grad_norm": 0.18537397682666779, + "learning_rate": 0.00033398956054163885, + "loss": 4.5195, + "step": 4343 + }, + { + "epoch": 0.42421875, + "grad_norm": 0.22018860280513763, + "learning_rate": 0.00033392195287382644, + "loss": 4.5586, + "step": 4344 + }, + { + "epoch": 0.42431640625, + "grad_norm": 0.24714703857898712, + "learning_rate": 0.00033385433949393055, + "loss": 4.5586, + "step": 4345 + }, + { + "epoch": 0.4244140625, + "grad_norm": 0.23995360732078552, + "learning_rate": 0.0003337867204085059, + "loss": 4.5234, + "step": 4346 + }, + { + "epoch": 0.42451171875, + "grad_norm": 0.21916784346103668, + "learning_rate": 0.00033371909562410776, + "loss": 4.5469, + "step": 4347 + }, + { + "epoch": 0.424609375, + "grad_norm": 0.23397497832775116, + "learning_rate": 0.0003336514651472917, + "loss": 4.5508, + "step": 4348 + }, + { + "epoch": 0.42470703125, + "grad_norm": 0.23373901844024658, + "learning_rate": 0.0003335838289846142, + "loss": 4.543, + "step": 4349 + }, + { + "epoch": 0.4248046875, + "grad_norm": 0.2716101408004761, + "learning_rate": 0.0003335161871426321, + "loss": 4.5195, + "step": 4350 + }, + { + "epoch": 0.42490234375, + "grad_norm": 0.27141520380973816, + "learning_rate": 0.0003334485396279029, + "loss": 4.5273, + "step": 4351 + }, + { + "epoch": 0.425, + "grad_norm": 0.19498299062252045, + "learning_rate": 0.0003333808864469843, + "loss": 4.5312, + "step": 4352 + }, + { + "epoch": 0.42509765625, + "grad_norm": 0.21742255985736847, + "learning_rate": 0.0003333132276064351, + "loss": 4.5547, + "step": 4353 + }, + { + "epoch": 0.4251953125, + "grad_norm": 0.22053396701812744, + "learning_rate": 0.00033324556311281427, + "loss": 4.5664, + "step": 4354 + }, + { + "epoch": 0.42529296875, + "grad_norm": 0.17961496114730835, + "learning_rate": 0.00033317789297268135, + "loss": 4.5078, + "step": 4355 + }, + { + "epoch": 0.425390625, + "grad_norm": 0.20866578817367554, + "learning_rate": 0.0003331102171925966, + "loss": 4.5508, + "step": 4356 + }, + { + "epoch": 0.42548828125, + "grad_norm": 0.2137892097234726, + "learning_rate": 0.0003330425357791207, + "loss": 4.543, + "step": 4357 + }, + { + "epoch": 0.4255859375, + "grad_norm": 0.20170187950134277, + "learning_rate": 0.00033297484873881484, + "loss": 4.5312, + "step": 4358 + }, + { + "epoch": 0.42568359375, + "grad_norm": 0.2101389765739441, + "learning_rate": 0.0003329071560782409, + "loss": 4.5156, + "step": 4359 + }, + { + "epoch": 0.42578125, + "grad_norm": 0.20657703280448914, + "learning_rate": 0.0003328394578039612, + "loss": 4.5273, + "step": 4360 + }, + { + "epoch": 0.42587890625, + "grad_norm": 0.22372931241989136, + "learning_rate": 0.00033277175392253865, + "loss": 4.5664, + "step": 4361 + }, + { + "epoch": 0.4259765625, + "grad_norm": 0.23013439774513245, + "learning_rate": 0.0003327040444405366, + "loss": 4.5195, + "step": 4362 + }, + { + "epoch": 0.42607421875, + "grad_norm": 0.21487532556056976, + "learning_rate": 0.0003326363293645191, + "loss": 4.543, + "step": 4363 + }, + { + "epoch": 0.426171875, + "grad_norm": 0.22529487311840057, + "learning_rate": 0.0003325686087010507, + "loss": 4.5312, + "step": 4364 + }, + { + "epoch": 0.42626953125, + "grad_norm": 0.2539598345756531, + "learning_rate": 0.00033250088245669637, + "loss": 4.5742, + "step": 4365 + }, + { + "epoch": 0.4263671875, + "grad_norm": 0.2587740421295166, + "learning_rate": 0.00033243315063802173, + "loss": 4.5391, + "step": 4366 + }, + { + "epoch": 0.42646484375, + "grad_norm": 0.21654079854488373, + "learning_rate": 0.00033236541325159293, + "loss": 4.5586, + "step": 4367 + }, + { + "epoch": 0.4265625, + "grad_norm": 0.20469528436660767, + "learning_rate": 0.00033229767030397666, + "loss": 4.5352, + "step": 4368 + }, + { + "epoch": 0.42666015625, + "grad_norm": 0.21571515500545502, + "learning_rate": 0.00033222992180174016, + "loss": 4.543, + "step": 4369 + }, + { + "epoch": 0.4267578125, + "grad_norm": 0.21472634375095367, + "learning_rate": 0.0003321621677514511, + "loss": 4.5117, + "step": 4370 + }, + { + "epoch": 0.42685546875, + "grad_norm": 0.2040044367313385, + "learning_rate": 0.0003320944081596779, + "loss": 4.5078, + "step": 4371 + }, + { + "epoch": 0.426953125, + "grad_norm": 0.19609779119491577, + "learning_rate": 0.00033202664303298935, + "loss": 4.543, + "step": 4372 + }, + { + "epoch": 0.42705078125, + "grad_norm": 0.2218821793794632, + "learning_rate": 0.00033195887237795474, + "loss": 4.5469, + "step": 4373 + }, + { + "epoch": 0.4271484375, + "grad_norm": 0.20647819340229034, + "learning_rate": 0.00033189109620114407, + "loss": 4.5117, + "step": 4374 + }, + { + "epoch": 0.42724609375, + "grad_norm": 0.21101722121238708, + "learning_rate": 0.00033182331450912775, + "loss": 4.5352, + "step": 4375 + }, + { + "epoch": 0.42734375, + "grad_norm": 0.18750539422035217, + "learning_rate": 0.0003317555273084767, + "loss": 4.543, + "step": 4376 + }, + { + "epoch": 0.42744140625, + "grad_norm": 0.2190512716770172, + "learning_rate": 0.0003316877346057625, + "loss": 4.5352, + "step": 4377 + }, + { + "epoch": 0.4275390625, + "grad_norm": 0.21920260787010193, + "learning_rate": 0.0003316199364075572, + "loss": 4.5039, + "step": 4378 + }, + { + "epoch": 0.42763671875, + "grad_norm": 0.2301151007413864, + "learning_rate": 0.00033155213272043326, + "loss": 4.5703, + "step": 4379 + }, + { + "epoch": 0.427734375, + "grad_norm": 0.20789887011051178, + "learning_rate": 0.00033148432355096396, + "loss": 4.543, + "step": 4380 + }, + { + "epoch": 0.42783203125, + "grad_norm": 0.17820198833942413, + "learning_rate": 0.00033141650890572286, + "loss": 4.5117, + "step": 4381 + }, + { + "epoch": 0.4279296875, + "grad_norm": 0.21502824127674103, + "learning_rate": 0.00033134868879128407, + "loss": 4.5352, + "step": 4382 + }, + { + "epoch": 0.42802734375, + "grad_norm": 0.232644721865654, + "learning_rate": 0.00033128086321422236, + "loss": 4.5, + "step": 4383 + }, + { + "epoch": 0.428125, + "grad_norm": 0.21255506575107574, + "learning_rate": 0.00033121303218111293, + "loss": 4.5625, + "step": 4384 + }, + { + "epoch": 0.42822265625, + "grad_norm": 0.18026532232761383, + "learning_rate": 0.0003311451956985316, + "loss": 4.5312, + "step": 4385 + }, + { + "epoch": 0.4283203125, + "grad_norm": 0.19507016241550446, + "learning_rate": 0.00033107735377305457, + "loss": 4.5312, + "step": 4386 + }, + { + "epoch": 0.42841796875, + "grad_norm": 0.20661699771881104, + "learning_rate": 0.00033100950641125863, + "loss": 4.5312, + "step": 4387 + }, + { + "epoch": 0.428515625, + "grad_norm": 0.20132943987846375, + "learning_rate": 0.00033094165361972124, + "loss": 4.543, + "step": 4388 + }, + { + "epoch": 0.42861328125, + "grad_norm": 0.1977328509092331, + "learning_rate": 0.00033087379540502025, + "loss": 4.5039, + "step": 4389 + }, + { + "epoch": 0.4287109375, + "grad_norm": 0.21218714118003845, + "learning_rate": 0.00033080593177373395, + "loss": 4.5195, + "step": 4390 + }, + { + "epoch": 0.42880859375, + "grad_norm": 0.2273605465888977, + "learning_rate": 0.00033073806273244133, + "loss": 4.5234, + "step": 4391 + }, + { + "epoch": 0.42890625, + "grad_norm": 0.2143947035074234, + "learning_rate": 0.0003306701882877218, + "loss": 4.5312, + "step": 4392 + }, + { + "epoch": 0.42900390625, + "grad_norm": 0.20679841935634613, + "learning_rate": 0.0003306023084461555, + "loss": 4.5391, + "step": 4393 + }, + { + "epoch": 0.4291015625, + "grad_norm": 0.18199476599693298, + "learning_rate": 0.0003305344232143226, + "loss": 4.5273, + "step": 4394 + }, + { + "epoch": 0.42919921875, + "grad_norm": 0.22433419525623322, + "learning_rate": 0.0003304665325988043, + "loss": 4.543, + "step": 4395 + }, + { + "epoch": 0.429296875, + "grad_norm": 0.23837102949619293, + "learning_rate": 0.00033039863660618213, + "loss": 4.5234, + "step": 4396 + }, + { + "epoch": 0.42939453125, + "grad_norm": 0.23315425217151642, + "learning_rate": 0.00033033073524303826, + "loss": 4.5312, + "step": 4397 + }, + { + "epoch": 0.4294921875, + "grad_norm": 0.19370082020759583, + "learning_rate": 0.00033026282851595496, + "loss": 4.5625, + "step": 4398 + }, + { + "epoch": 0.42958984375, + "grad_norm": 0.18983274698257446, + "learning_rate": 0.0003301949164315156, + "loss": 4.5391, + "step": 4399 + }, + { + "epoch": 0.4296875, + "grad_norm": 0.2007448822259903, + "learning_rate": 0.0003301269989963037, + "loss": 4.543, + "step": 4400 + }, + { + "epoch": 0.42978515625, + "grad_norm": 0.18236641585826874, + "learning_rate": 0.00033005907621690335, + "loss": 4.5469, + "step": 4401 + }, + { + "epoch": 0.4298828125, + "grad_norm": 0.2007562220096588, + "learning_rate": 0.0003299911480998993, + "loss": 4.5508, + "step": 4402 + }, + { + "epoch": 0.42998046875, + "grad_norm": 0.20128126442432404, + "learning_rate": 0.00032992321465187666, + "loss": 4.5312, + "step": 4403 + }, + { + "epoch": 0.430078125, + "grad_norm": 0.1989973932504654, + "learning_rate": 0.000329855275879421, + "loss": 4.5, + "step": 4404 + }, + { + "epoch": 0.43017578125, + "grad_norm": 0.19425077736377716, + "learning_rate": 0.0003297873317891188, + "loss": 4.5195, + "step": 4405 + }, + { + "epoch": 0.4302734375, + "grad_norm": 0.2193920612335205, + "learning_rate": 0.0003297193823875566, + "loss": 4.5508, + "step": 4406 + }, + { + "epoch": 0.43037109375, + "grad_norm": 0.22691011428833008, + "learning_rate": 0.0003296514276813216, + "loss": 4.5234, + "step": 4407 + }, + { + "epoch": 0.43046875, + "grad_norm": 0.22228696942329407, + "learning_rate": 0.00032958346767700166, + "loss": 4.5312, + "step": 4408 + }, + { + "epoch": 0.43056640625, + "grad_norm": 0.25818145275115967, + "learning_rate": 0.000329515502381185, + "loss": 4.5508, + "step": 4409 + }, + { + "epoch": 0.4306640625, + "grad_norm": 0.27093949913978577, + "learning_rate": 0.00032944753180046035, + "loss": 4.5195, + "step": 4410 + }, + { + "epoch": 0.43076171875, + "grad_norm": 0.23827184736728668, + "learning_rate": 0.00032937955594141707, + "loss": 4.5234, + "step": 4411 + }, + { + "epoch": 0.430859375, + "grad_norm": 0.23867827653884888, + "learning_rate": 0.0003293115748106448, + "loss": 4.5312, + "step": 4412 + }, + { + "epoch": 0.43095703125, + "grad_norm": 0.1934238225221634, + "learning_rate": 0.00032924358841473404, + "loss": 4.5, + "step": 4413 + }, + { + "epoch": 0.4310546875, + "grad_norm": 0.20785847306251526, + "learning_rate": 0.0003291755967602756, + "loss": 4.5312, + "step": 4414 + }, + { + "epoch": 0.43115234375, + "grad_norm": 0.2102385312318802, + "learning_rate": 0.0003291075998538607, + "loss": 4.5273, + "step": 4415 + }, + { + "epoch": 0.43125, + "grad_norm": 0.2270030677318573, + "learning_rate": 0.0003290395977020812, + "loss": 4.5, + "step": 4416 + }, + { + "epoch": 0.43134765625, + "grad_norm": 0.2323227822780609, + "learning_rate": 0.0003289715903115294, + "loss": 4.5117, + "step": 4417 + }, + { + "epoch": 0.4314453125, + "grad_norm": 0.20858164131641388, + "learning_rate": 0.0003289035776887983, + "loss": 4.5391, + "step": 4418 + }, + { + "epoch": 0.43154296875, + "grad_norm": 0.22349855303764343, + "learning_rate": 0.00032883555984048124, + "loss": 4.5352, + "step": 4419 + }, + { + "epoch": 0.431640625, + "grad_norm": 0.23453128337860107, + "learning_rate": 0.000328767536773172, + "loss": 4.5234, + "step": 4420 + }, + { + "epoch": 0.43173828125, + "grad_norm": 0.24017755687236786, + "learning_rate": 0.00032869950849346495, + "loss": 4.5156, + "step": 4421 + }, + { + "epoch": 0.4318359375, + "grad_norm": 0.20869173109531403, + "learning_rate": 0.000328631475007955, + "loss": 4.5234, + "step": 4422 + }, + { + "epoch": 0.43193359375, + "grad_norm": 0.20544619858264923, + "learning_rate": 0.00032856343632323766, + "loss": 4.5312, + "step": 4423 + }, + { + "epoch": 0.43203125, + "grad_norm": 0.20834827423095703, + "learning_rate": 0.00032849539244590866, + "loss": 4.5391, + "step": 4424 + }, + { + "epoch": 0.43212890625, + "grad_norm": 0.2419765591621399, + "learning_rate": 0.00032842734338256436, + "loss": 4.5273, + "step": 4425 + }, + { + "epoch": 0.4322265625, + "grad_norm": 0.2258419692516327, + "learning_rate": 0.0003283592891398018, + "loss": 4.5508, + "step": 4426 + }, + { + "epoch": 0.43232421875, + "grad_norm": 0.1944732815027237, + "learning_rate": 0.0003282912297242183, + "loss": 4.5391, + "step": 4427 + }, + { + "epoch": 0.432421875, + "grad_norm": 0.18640844523906708, + "learning_rate": 0.0003282231651424117, + "loss": 4.5508, + "step": 4428 + }, + { + "epoch": 0.43251953125, + "grad_norm": 0.19308388233184814, + "learning_rate": 0.0003281550954009806, + "loss": 4.5273, + "step": 4429 + }, + { + "epoch": 0.4326171875, + "grad_norm": 0.22948555648326874, + "learning_rate": 0.0003280870205065237, + "loss": 4.5312, + "step": 4430 + }, + { + "epoch": 0.43271484375, + "grad_norm": 0.2091873437166214, + "learning_rate": 0.0003280189404656404, + "loss": 4.5469, + "step": 4431 + }, + { + "epoch": 0.4328125, + "grad_norm": 0.1874324530363083, + "learning_rate": 0.00032795085528493074, + "loss": 4.5273, + "step": 4432 + }, + { + "epoch": 0.43291015625, + "grad_norm": 0.198861226439476, + "learning_rate": 0.000327882764970995, + "loss": 4.5391, + "step": 4433 + }, + { + "epoch": 0.4330078125, + "grad_norm": 0.21194633841514587, + "learning_rate": 0.0003278146695304341, + "loss": 4.5312, + "step": 4434 + }, + { + "epoch": 0.43310546875, + "grad_norm": 0.23030957579612732, + "learning_rate": 0.0003277465689698495, + "loss": 4.5391, + "step": 4435 + }, + { + "epoch": 0.433203125, + "grad_norm": 0.21302200853824615, + "learning_rate": 0.000327678463295843, + "loss": 4.5156, + "step": 4436 + }, + { + "epoch": 0.43330078125, + "grad_norm": 0.18900921940803528, + "learning_rate": 0.000327610352515017, + "loss": 4.5078, + "step": 4437 + }, + { + "epoch": 0.4333984375, + "grad_norm": 0.2014285922050476, + "learning_rate": 0.0003275422366339744, + "loss": 4.5703, + "step": 4438 + }, + { + "epoch": 0.43349609375, + "grad_norm": 0.19716453552246094, + "learning_rate": 0.00032747411565931856, + "loss": 4.5234, + "step": 4439 + }, + { + "epoch": 0.43359375, + "grad_norm": 0.20398662984371185, + "learning_rate": 0.00032740598959765346, + "loss": 4.5469, + "step": 4440 + }, + { + "epoch": 0.43369140625, + "grad_norm": 0.2114112824201584, + "learning_rate": 0.0003273378584555832, + "loss": 4.5195, + "step": 4441 + }, + { + "epoch": 0.4337890625, + "grad_norm": 0.1810259371995926, + "learning_rate": 0.0003272697222397128, + "loss": 4.5078, + "step": 4442 + }, + { + "epoch": 0.43388671875, + "grad_norm": 0.20544156432151794, + "learning_rate": 0.0003272015809566476, + "loss": 4.5195, + "step": 4443 + }, + { + "epoch": 0.433984375, + "grad_norm": 0.1933601200580597, + "learning_rate": 0.00032713343461299345, + "loss": 4.5312, + "step": 4444 + }, + { + "epoch": 0.43408203125, + "grad_norm": 0.20459522306919098, + "learning_rate": 0.00032706528321535663, + "loss": 4.5391, + "step": 4445 + }, + { + "epoch": 0.4341796875, + "grad_norm": 0.23548495769500732, + "learning_rate": 0.00032699712677034394, + "loss": 4.5273, + "step": 4446 + }, + { + "epoch": 0.43427734375, + "grad_norm": 0.222953200340271, + "learning_rate": 0.0003269289652845627, + "loss": 4.5234, + "step": 4447 + }, + { + "epoch": 0.434375, + "grad_norm": 0.2363830804824829, + "learning_rate": 0.00032686079876462085, + "loss": 4.5391, + "step": 4448 + }, + { + "epoch": 0.43447265625, + "grad_norm": 0.19586940109729767, + "learning_rate": 0.00032679262721712645, + "loss": 4.5078, + "step": 4449 + }, + { + "epoch": 0.4345703125, + "grad_norm": 0.19043128192424774, + "learning_rate": 0.0003267244506486883, + "loss": 4.5352, + "step": 4450 + }, + { + "epoch": 0.43466796875, + "grad_norm": 0.23796534538269043, + "learning_rate": 0.0003266562690659158, + "loss": 4.543, + "step": 4451 + }, + { + "epoch": 0.434765625, + "grad_norm": 0.2062700390815735, + "learning_rate": 0.00032658808247541864, + "loss": 4.5156, + "step": 4452 + }, + { + "epoch": 0.43486328125, + "grad_norm": 0.19936172664165497, + "learning_rate": 0.0003265198908838069, + "loss": 4.5352, + "step": 4453 + }, + { + "epoch": 0.4349609375, + "grad_norm": 0.19740867614746094, + "learning_rate": 0.0003264516942976915, + "loss": 4.5391, + "step": 4454 + }, + { + "epoch": 0.43505859375, + "grad_norm": 0.22618481516838074, + "learning_rate": 0.00032638349272368344, + "loss": 4.5469, + "step": 4455 + }, + { + "epoch": 0.43515625, + "grad_norm": 0.20213083922863007, + "learning_rate": 0.0003263152861683945, + "loss": 4.5, + "step": 4456 + }, + { + "epoch": 0.43525390625, + "grad_norm": 0.19809061288833618, + "learning_rate": 0.00032624707463843703, + "loss": 4.5156, + "step": 4457 + }, + { + "epoch": 0.4353515625, + "grad_norm": 0.18381917476654053, + "learning_rate": 0.0003261788581404233, + "loss": 4.5, + "step": 4458 + }, + { + "epoch": 0.43544921875, + "grad_norm": 0.21003282070159912, + "learning_rate": 0.0003261106366809667, + "loss": 4.5195, + "step": 4459 + }, + { + "epoch": 0.435546875, + "grad_norm": 0.22188995778560638, + "learning_rate": 0.00032604241026668073, + "loss": 4.5742, + "step": 4460 + }, + { + "epoch": 0.43564453125, + "grad_norm": 0.2398741990327835, + "learning_rate": 0.00032597417890417954, + "loss": 4.5547, + "step": 4461 + }, + { + "epoch": 0.4357421875, + "grad_norm": 0.2111586481332779, + "learning_rate": 0.0003259059426000777, + "loss": 4.5508, + "step": 4462 + }, + { + "epoch": 0.43583984375, + "grad_norm": 0.20607000589370728, + "learning_rate": 0.00032583770136099016, + "loss": 4.5391, + "step": 4463 + }, + { + "epoch": 0.4359375, + "grad_norm": 0.1953241527080536, + "learning_rate": 0.0003257694551935325, + "loss": 4.5352, + "step": 4464 + }, + { + "epoch": 0.43603515625, + "grad_norm": 0.1940557062625885, + "learning_rate": 0.00032570120410432076, + "loss": 4.543, + "step": 4465 + }, + { + "epoch": 0.4361328125, + "grad_norm": 0.2345438301563263, + "learning_rate": 0.00032563294809997143, + "loss": 4.5547, + "step": 4466 + }, + { + "epoch": 0.43623046875, + "grad_norm": 0.22518609464168549, + "learning_rate": 0.00032556468718710147, + "loss": 4.543, + "step": 4467 + }, + { + "epoch": 0.436328125, + "grad_norm": 0.20131060481071472, + "learning_rate": 0.00032549642137232813, + "loss": 4.5391, + "step": 4468 + }, + { + "epoch": 0.43642578125, + "grad_norm": 0.18604958057403564, + "learning_rate": 0.00032542815066226955, + "loss": 4.5156, + "step": 4469 + }, + { + "epoch": 0.4365234375, + "grad_norm": 0.21055445075035095, + "learning_rate": 0.000325359875063544, + "loss": 4.5078, + "step": 4470 + }, + { + "epoch": 0.43662109375, + "grad_norm": 0.2313688099384308, + "learning_rate": 0.0003252915945827703, + "loss": 4.5078, + "step": 4471 + }, + { + "epoch": 0.43671875, + "grad_norm": 0.253934383392334, + "learning_rate": 0.00032522330922656794, + "loss": 4.5039, + "step": 4472 + }, + { + "epoch": 0.43681640625, + "grad_norm": 0.19999609887599945, + "learning_rate": 0.00032515501900155655, + "loss": 4.5195, + "step": 4473 + }, + { + "epoch": 0.4369140625, + "grad_norm": 0.19893260300159454, + "learning_rate": 0.00032508672391435645, + "loss": 4.5469, + "step": 4474 + }, + { + "epoch": 0.43701171875, + "grad_norm": 0.2637045085430145, + "learning_rate": 0.0003250184239715884, + "loss": 4.5625, + "step": 4475 + }, + { + "epoch": 0.437109375, + "grad_norm": 0.2480446696281433, + "learning_rate": 0.0003249501191798737, + "loss": 4.5312, + "step": 4476 + }, + { + "epoch": 0.43720703125, + "grad_norm": 0.20837350189685822, + "learning_rate": 0.00032488180954583397, + "loss": 4.5039, + "step": 4477 + }, + { + "epoch": 0.4373046875, + "grad_norm": 0.19845077395439148, + "learning_rate": 0.00032481349507609126, + "loss": 4.5391, + "step": 4478 + }, + { + "epoch": 0.43740234375, + "grad_norm": 0.2357725352048874, + "learning_rate": 0.0003247451757772684, + "loss": 4.5352, + "step": 4479 + }, + { + "epoch": 0.4375, + "grad_norm": 0.23393143713474274, + "learning_rate": 0.0003246768516559883, + "loss": 4.5469, + "step": 4480 + }, + { + "epoch": 0.43759765625, + "grad_norm": 0.20326542854309082, + "learning_rate": 0.0003246085227188745, + "loss": 4.5312, + "step": 4481 + }, + { + "epoch": 0.4376953125, + "grad_norm": 0.20865848660469055, + "learning_rate": 0.00032454018897255126, + "loss": 4.5625, + "step": 4482 + }, + { + "epoch": 0.43779296875, + "grad_norm": 0.2391895204782486, + "learning_rate": 0.00032447185042364286, + "loss": 4.5391, + "step": 4483 + }, + { + "epoch": 0.437890625, + "grad_norm": 0.21765485405921936, + "learning_rate": 0.00032440350707877433, + "loss": 4.5508, + "step": 4484 + }, + { + "epoch": 0.43798828125, + "grad_norm": 0.19464147090911865, + "learning_rate": 0.00032433515894457113, + "loss": 4.5547, + "step": 4485 + }, + { + "epoch": 0.4380859375, + "grad_norm": 0.23677833378314972, + "learning_rate": 0.00032426680602765914, + "loss": 4.5469, + "step": 4486 + }, + { + "epoch": 0.43818359375, + "grad_norm": 0.23736287653446198, + "learning_rate": 0.00032419844833466463, + "loss": 4.543, + "step": 4487 + }, + { + "epoch": 0.43828125, + "grad_norm": 0.22315044701099396, + "learning_rate": 0.00032413008587221445, + "loss": 4.5273, + "step": 4488 + }, + { + "epoch": 0.43837890625, + "grad_norm": 0.18796318769454956, + "learning_rate": 0.00032406171864693585, + "loss": 4.5195, + "step": 4489 + }, + { + "epoch": 0.4384765625, + "grad_norm": 0.19455398619174957, + "learning_rate": 0.0003239933466654567, + "loss": 4.5195, + "step": 4490 + }, + { + "epoch": 0.43857421875, + "grad_norm": 0.19659997522830963, + "learning_rate": 0.00032392496993440505, + "loss": 4.4805, + "step": 4491 + }, + { + "epoch": 0.438671875, + "grad_norm": 0.18947161734104156, + "learning_rate": 0.0003238565884604096, + "loss": 4.5312, + "step": 4492 + }, + { + "epoch": 0.43876953125, + "grad_norm": 0.2050585001707077, + "learning_rate": 0.0003237882022500995, + "loss": 4.5547, + "step": 4493 + }, + { + "epoch": 0.4388671875, + "grad_norm": 0.18132343888282776, + "learning_rate": 0.0003237198113101045, + "loss": 4.5391, + "step": 4494 + }, + { + "epoch": 0.43896484375, + "grad_norm": 0.1953376829624176, + "learning_rate": 0.00032365141564705426, + "loss": 4.5195, + "step": 4495 + }, + { + "epoch": 0.4390625, + "grad_norm": 0.19781985878944397, + "learning_rate": 0.00032358301526757947, + "loss": 4.5117, + "step": 4496 + }, + { + "epoch": 0.43916015625, + "grad_norm": 0.20766140520572662, + "learning_rate": 0.0003235146101783112, + "loss": 4.5352, + "step": 4497 + }, + { + "epoch": 0.4392578125, + "grad_norm": 0.19674928486347198, + "learning_rate": 0.00032344620038588065, + "loss": 4.5273, + "step": 4498 + }, + { + "epoch": 0.43935546875, + "grad_norm": 0.1870025247335434, + "learning_rate": 0.0003233777858969198, + "loss": 4.5273, + "step": 4499 + }, + { + "epoch": 0.439453125, + "grad_norm": 0.19033251702785492, + "learning_rate": 0.00032330936671806095, + "loss": 4.5195, + "step": 4500 + }, + { + "epoch": 0.43955078125, + "grad_norm": 0.21173185110092163, + "learning_rate": 0.00032324094285593685, + "loss": 4.5156, + "step": 4501 + }, + { + "epoch": 0.4396484375, + "grad_norm": 0.21463431417942047, + "learning_rate": 0.0003231725143171808, + "loss": 4.5352, + "step": 4502 + }, + { + "epoch": 0.43974609375, + "grad_norm": 0.1908867061138153, + "learning_rate": 0.00032310408110842645, + "loss": 4.5, + "step": 4503 + }, + { + "epoch": 0.43984375, + "grad_norm": 0.2041921466588974, + "learning_rate": 0.00032303564323630786, + "loss": 4.5, + "step": 4504 + }, + { + "epoch": 0.43994140625, + "grad_norm": 0.2068241536617279, + "learning_rate": 0.00032296720070745967, + "loss": 4.5039, + "step": 4505 + }, + { + "epoch": 0.4400390625, + "grad_norm": 0.22090201079845428, + "learning_rate": 0.00032289875352851694, + "loss": 4.4766, + "step": 4506 + }, + { + "epoch": 0.44013671875, + "grad_norm": 0.2069287896156311, + "learning_rate": 0.0003228303017061152, + "loss": 4.5391, + "step": 4507 + }, + { + "epoch": 0.440234375, + "grad_norm": 0.19182436168193817, + "learning_rate": 0.0003227618452468903, + "loss": 4.5195, + "step": 4508 + }, + { + "epoch": 0.44033203125, + "grad_norm": 0.18364793062210083, + "learning_rate": 0.0003226933841574786, + "loss": 4.5312, + "step": 4509 + }, + { + "epoch": 0.4404296875, + "grad_norm": 0.20465484261512756, + "learning_rate": 0.0003226249184445171, + "loss": 4.5195, + "step": 4510 + }, + { + "epoch": 0.44052734375, + "grad_norm": 0.21333731710910797, + "learning_rate": 0.00032255644811464304, + "loss": 4.5352, + "step": 4511 + }, + { + "epoch": 0.440625, + "grad_norm": 0.21067865192890167, + "learning_rate": 0.00032248797317449394, + "loss": 4.5234, + "step": 4512 + }, + { + "epoch": 0.44072265625, + "grad_norm": 0.21718868613243103, + "learning_rate": 0.0003224194936307082, + "loss": 4.5078, + "step": 4513 + }, + { + "epoch": 0.4408203125, + "grad_norm": 0.20343723893165588, + "learning_rate": 0.0003223510094899244, + "loss": 4.5469, + "step": 4514 + }, + { + "epoch": 0.44091796875, + "grad_norm": 0.21048948168754578, + "learning_rate": 0.0003222825207587816, + "loss": 4.5586, + "step": 4515 + }, + { + "epoch": 0.441015625, + "grad_norm": 0.19594675302505493, + "learning_rate": 0.00032221402744391934, + "loss": 4.5156, + "step": 4516 + }, + { + "epoch": 0.44111328125, + "grad_norm": 0.2070220708847046, + "learning_rate": 0.0003221455295519775, + "loss": 4.5195, + "step": 4517 + }, + { + "epoch": 0.4412109375, + "grad_norm": 0.19522973895072937, + "learning_rate": 0.0003220770270895966, + "loss": 4.5234, + "step": 4518 + }, + { + "epoch": 0.44130859375, + "grad_norm": 0.19688962399959564, + "learning_rate": 0.00032200852006341744, + "loss": 4.5547, + "step": 4519 + }, + { + "epoch": 0.44140625, + "grad_norm": 0.19884318113327026, + "learning_rate": 0.0003219400084800814, + "loss": 4.5234, + "step": 4520 + }, + { + "epoch": 0.44150390625, + "grad_norm": 0.18464922904968262, + "learning_rate": 0.0003218714923462299, + "loss": 4.543, + "step": 4521 + }, + { + "epoch": 0.4416015625, + "grad_norm": 0.21401017904281616, + "learning_rate": 0.00032180297166850547, + "loss": 4.5586, + "step": 4522 + }, + { + "epoch": 0.44169921875, + "grad_norm": 0.19261690974235535, + "learning_rate": 0.0003217344464535506, + "loss": 4.5078, + "step": 4523 + }, + { + "epoch": 0.441796875, + "grad_norm": 0.20733964443206787, + "learning_rate": 0.00032166591670800834, + "loss": 4.5352, + "step": 4524 + }, + { + "epoch": 0.44189453125, + "grad_norm": 0.2197677046060562, + "learning_rate": 0.0003215973824385221, + "loss": 4.5234, + "step": 4525 + }, + { + "epoch": 0.4419921875, + "grad_norm": 0.2248896360397339, + "learning_rate": 0.00032152884365173594, + "loss": 4.5234, + "step": 4526 + }, + { + "epoch": 0.44208984375, + "grad_norm": 0.2024918645620346, + "learning_rate": 0.00032146030035429415, + "loss": 4.5547, + "step": 4527 + }, + { + "epoch": 0.4421875, + "grad_norm": 0.1870448887348175, + "learning_rate": 0.0003213917525528416, + "loss": 4.4805, + "step": 4528 + }, + { + "epoch": 0.44228515625, + "grad_norm": 0.17727215588092804, + "learning_rate": 0.00032132320025402353, + "loss": 4.5352, + "step": 4529 + }, + { + "epoch": 0.4423828125, + "grad_norm": 0.18537001311779022, + "learning_rate": 0.00032125464346448556, + "loss": 4.4844, + "step": 4530 + }, + { + "epoch": 0.44248046875, + "grad_norm": 0.1875099539756775, + "learning_rate": 0.00032118608219087373, + "loss": 4.5195, + "step": 4531 + }, + { + "epoch": 0.442578125, + "grad_norm": 0.2120281606912613, + "learning_rate": 0.0003211175164398349, + "loss": 4.5117, + "step": 4532 + }, + { + "epoch": 0.44267578125, + "grad_norm": 0.20298795402050018, + "learning_rate": 0.00032104894621801577, + "loss": 4.5, + "step": 4533 + }, + { + "epoch": 0.4427734375, + "grad_norm": 0.19692641496658325, + "learning_rate": 0.0003209803715320638, + "loss": 4.5273, + "step": 4534 + }, + { + "epoch": 0.44287109375, + "grad_norm": 0.18256649374961853, + "learning_rate": 0.0003209117923886269, + "loss": 4.5078, + "step": 4535 + }, + { + "epoch": 0.44296875, + "grad_norm": 0.20503602921962738, + "learning_rate": 0.0003208432087943533, + "loss": 4.5078, + "step": 4536 + }, + { + "epoch": 0.44306640625, + "grad_norm": 0.1961802840232849, + "learning_rate": 0.0003207746207558918, + "loss": 4.5234, + "step": 4537 + }, + { + "epoch": 0.4431640625, + "grad_norm": 0.22225888073444366, + "learning_rate": 0.0003207060282798915, + "loss": 4.5234, + "step": 4538 + }, + { + "epoch": 0.44326171875, + "grad_norm": 0.19966357946395874, + "learning_rate": 0.0003206374313730019, + "loss": 4.5469, + "step": 4539 + }, + { + "epoch": 0.443359375, + "grad_norm": 0.201384499669075, + "learning_rate": 0.0003205688300418731, + "loss": 4.5391, + "step": 4540 + }, + { + "epoch": 0.44345703125, + "grad_norm": 0.22023342549800873, + "learning_rate": 0.00032050022429315554, + "loss": 4.5273, + "step": 4541 + }, + { + "epoch": 0.4435546875, + "grad_norm": 0.1954025775194168, + "learning_rate": 0.0003204316141335, + "loss": 4.5234, + "step": 4542 + }, + { + "epoch": 0.44365234375, + "grad_norm": 0.20487873256206512, + "learning_rate": 0.00032036299956955786, + "loss": 4.5469, + "step": 4543 + }, + { + "epoch": 0.44375, + "grad_norm": 0.2010820060968399, + "learning_rate": 0.0003202943806079807, + "loss": 4.5117, + "step": 4544 + }, + { + "epoch": 0.44384765625, + "grad_norm": 0.21332380175590515, + "learning_rate": 0.0003202257572554209, + "loss": 4.5117, + "step": 4545 + }, + { + "epoch": 0.4439453125, + "grad_norm": 0.2095111757516861, + "learning_rate": 0.0003201571295185308, + "loss": 4.5234, + "step": 4546 + }, + { + "epoch": 0.44404296875, + "grad_norm": 0.21185234189033508, + "learning_rate": 0.00032008849740396345, + "loss": 4.4883, + "step": 4547 + }, + { + "epoch": 0.444140625, + "grad_norm": 0.2375704050064087, + "learning_rate": 0.00032001986091837224, + "loss": 4.5273, + "step": 4548 + }, + { + "epoch": 0.44423828125, + "grad_norm": 0.22713026404380798, + "learning_rate": 0.0003199512200684112, + "loss": 4.5273, + "step": 4549 + }, + { + "epoch": 0.4443359375, + "grad_norm": 0.2159368246793747, + "learning_rate": 0.0003198825748607344, + "loss": 4.5391, + "step": 4550 + }, + { + "epoch": 0.44443359375, + "grad_norm": 0.19690531492233276, + "learning_rate": 0.0003198139253019966, + "loss": 4.5195, + "step": 4551 + }, + { + "epoch": 0.44453125, + "grad_norm": 0.1991814821958542, + "learning_rate": 0.00031974527139885285, + "loss": 4.5234, + "step": 4552 + }, + { + "epoch": 0.44462890625, + "grad_norm": 0.23383638262748718, + "learning_rate": 0.00031967661315795883, + "loss": 4.5195, + "step": 4553 + }, + { + "epoch": 0.4447265625, + "grad_norm": 0.2404201179742813, + "learning_rate": 0.0003196079505859703, + "loss": 4.5352, + "step": 4554 + }, + { + "epoch": 0.44482421875, + "grad_norm": 0.22039729356765747, + "learning_rate": 0.0003195392836895437, + "loss": 4.5352, + "step": 4555 + }, + { + "epoch": 0.444921875, + "grad_norm": 0.198837012052536, + "learning_rate": 0.0003194706124753359, + "loss": 4.5312, + "step": 4556 + }, + { + "epoch": 0.44501953125, + "grad_norm": 0.19770367443561554, + "learning_rate": 0.000319401936950004, + "loss": 4.5234, + "step": 4557 + }, + { + "epoch": 0.4451171875, + "grad_norm": 0.19984470307826996, + "learning_rate": 0.0003193332571202057, + "loss": 4.5039, + "step": 4558 + }, + { + "epoch": 0.44521484375, + "grad_norm": 0.19468826055526733, + "learning_rate": 0.00031926457299259894, + "loss": 4.4922, + "step": 4559 + }, + { + "epoch": 0.4453125, + "grad_norm": 0.18635861575603485, + "learning_rate": 0.0003191958845738423, + "loss": 4.5625, + "step": 4560 + }, + { + "epoch": 0.44541015625, + "grad_norm": 0.17495174705982208, + "learning_rate": 0.00031912719187059466, + "loss": 4.5312, + "step": 4561 + }, + { + "epoch": 0.4455078125, + "grad_norm": 0.2033688873052597, + "learning_rate": 0.00031905849488951516, + "loss": 4.5195, + "step": 4562 + }, + { + "epoch": 0.44560546875, + "grad_norm": 0.1858840137720108, + "learning_rate": 0.00031898979363726365, + "loss": 4.5156, + "step": 4563 + }, + { + "epoch": 0.445703125, + "grad_norm": 0.19045676290988922, + "learning_rate": 0.00031892108812050027, + "loss": 4.5508, + "step": 4564 + }, + { + "epoch": 0.44580078125, + "grad_norm": 0.18696536123752594, + "learning_rate": 0.0003188523783458854, + "loss": 4.5312, + "step": 4565 + }, + { + "epoch": 0.4458984375, + "grad_norm": 0.1982533037662506, + "learning_rate": 0.0003187836643200802, + "loss": 4.5195, + "step": 4566 + }, + { + "epoch": 0.44599609375, + "grad_norm": 0.20984594523906708, + "learning_rate": 0.00031871494604974576, + "loss": 4.5195, + "step": 4567 + }, + { + "epoch": 0.44609375, + "grad_norm": 0.24393682181835175, + "learning_rate": 0.00031864622354154407, + "loss": 4.4961, + "step": 4568 + }, + { + "epoch": 0.44619140625, + "grad_norm": 0.23847603797912598, + "learning_rate": 0.00031857749680213714, + "loss": 4.5508, + "step": 4569 + }, + { + "epoch": 0.4462890625, + "grad_norm": 0.2244279533624649, + "learning_rate": 0.0003185087658381878, + "loss": 4.543, + "step": 4570 + }, + { + "epoch": 0.44638671875, + "grad_norm": 0.19633477926254272, + "learning_rate": 0.00031844003065635877, + "loss": 4.5352, + "step": 4571 + }, + { + "epoch": 0.446484375, + "grad_norm": 0.21556459367275238, + "learning_rate": 0.0003183712912633137, + "loss": 4.5391, + "step": 4572 + }, + { + "epoch": 0.44658203125, + "grad_norm": 0.23900896310806274, + "learning_rate": 0.0003183025476657163, + "loss": 4.5352, + "step": 4573 + }, + { + "epoch": 0.4466796875, + "grad_norm": 0.2370019555091858, + "learning_rate": 0.0003182337998702308, + "loss": 4.5312, + "step": 4574 + }, + { + "epoch": 0.44677734375, + "grad_norm": 0.21700753271579742, + "learning_rate": 0.0003181650478835217, + "loss": 4.5352, + "step": 4575 + }, + { + "epoch": 0.446875, + "grad_norm": 0.1858913153409958, + "learning_rate": 0.0003180962917122543, + "loss": 4.543, + "step": 4576 + }, + { + "epoch": 0.44697265625, + "grad_norm": 0.20568719506263733, + "learning_rate": 0.00031802753136309385, + "loss": 4.5078, + "step": 4577 + }, + { + "epoch": 0.4470703125, + "grad_norm": 0.21230654418468475, + "learning_rate": 0.0003179587668427064, + "loss": 4.5, + "step": 4578 + }, + { + "epoch": 0.44716796875, + "grad_norm": 0.19326019287109375, + "learning_rate": 0.0003178899981577579, + "loss": 4.5508, + "step": 4579 + }, + { + "epoch": 0.447265625, + "grad_norm": 0.19291195273399353, + "learning_rate": 0.0003178212253149153, + "loss": 4.4961, + "step": 4580 + }, + { + "epoch": 0.44736328125, + "grad_norm": 0.21458159387111664, + "learning_rate": 0.00031775244832084543, + "loss": 4.5273, + "step": 4581 + }, + { + "epoch": 0.4474609375, + "grad_norm": 0.21241629123687744, + "learning_rate": 0.00031768366718221586, + "loss": 4.5078, + "step": 4582 + }, + { + "epoch": 0.44755859375, + "grad_norm": 0.19614863395690918, + "learning_rate": 0.0003176148819056946, + "loss": 4.5156, + "step": 4583 + }, + { + "epoch": 0.44765625, + "grad_norm": 0.20164260268211365, + "learning_rate": 0.0003175460924979496, + "loss": 4.5117, + "step": 4584 + }, + { + "epoch": 0.44775390625, + "grad_norm": 0.19649086892604828, + "learning_rate": 0.00031747729896564975, + "loss": 4.5117, + "step": 4585 + }, + { + "epoch": 0.4478515625, + "grad_norm": 0.20615875720977783, + "learning_rate": 0.00031740850131546406, + "loss": 4.5391, + "step": 4586 + }, + { + "epoch": 0.44794921875, + "grad_norm": 0.2120678573846817, + "learning_rate": 0.000317339699554062, + "loss": 4.5, + "step": 4587 + }, + { + "epoch": 0.448046875, + "grad_norm": 0.1972879320383072, + "learning_rate": 0.0003172708936881134, + "loss": 4.5352, + "step": 4588 + }, + { + "epoch": 0.44814453125, + "grad_norm": 0.2132818102836609, + "learning_rate": 0.0003172020837242886, + "loss": 4.543, + "step": 4589 + }, + { + "epoch": 0.4482421875, + "grad_norm": 0.1882414072751999, + "learning_rate": 0.00031713326966925816, + "loss": 4.5312, + "step": 4590 + }, + { + "epoch": 0.44833984375, + "grad_norm": 0.19941607117652893, + "learning_rate": 0.00031706445152969323, + "loss": 4.4766, + "step": 4591 + }, + { + "epoch": 0.4484375, + "grad_norm": 0.190363347530365, + "learning_rate": 0.00031699562931226515, + "loss": 4.5156, + "step": 4592 + }, + { + "epoch": 0.44853515625, + "grad_norm": 0.22457027435302734, + "learning_rate": 0.0003169268030236459, + "loss": 4.5352, + "step": 4593 + }, + { + "epoch": 0.4486328125, + "grad_norm": 0.218889057636261, + "learning_rate": 0.0003168579726705077, + "loss": 4.5469, + "step": 4594 + }, + { + "epoch": 0.44873046875, + "grad_norm": 0.19007110595703125, + "learning_rate": 0.00031678913825952307, + "loss": 4.5352, + "step": 4595 + }, + { + "epoch": 0.448828125, + "grad_norm": 0.19044940173625946, + "learning_rate": 0.000316720299797365, + "loss": 4.5117, + "step": 4596 + }, + { + "epoch": 0.44892578125, + "grad_norm": 0.1830681562423706, + "learning_rate": 0.0003166514572907072, + "loss": 4.5547, + "step": 4597 + }, + { + "epoch": 0.4490234375, + "grad_norm": 0.20148320496082306, + "learning_rate": 0.0003165826107462232, + "loss": 4.5312, + "step": 4598 + }, + { + "epoch": 0.44912109375, + "grad_norm": 0.20861521363258362, + "learning_rate": 0.0003165137601705874, + "loss": 4.5273, + "step": 4599 + }, + { + "epoch": 0.44921875, + "grad_norm": 0.17926272749900818, + "learning_rate": 0.0003164449055704742, + "loss": 4.5195, + "step": 4600 + }, + { + "epoch": 0.44931640625, + "grad_norm": 0.18481853604316711, + "learning_rate": 0.0003163760469525588, + "loss": 4.5508, + "step": 4601 + }, + { + "epoch": 0.4494140625, + "grad_norm": 0.1803535521030426, + "learning_rate": 0.0003163071843235164, + "loss": 4.5078, + "step": 4602 + }, + { + "epoch": 0.44951171875, + "grad_norm": 0.18805640935897827, + "learning_rate": 0.0003162383176900229, + "loss": 4.543, + "step": 4603 + }, + { + "epoch": 0.449609375, + "grad_norm": 0.184268519282341, + "learning_rate": 0.00031616944705875426, + "loss": 4.4883, + "step": 4604 + }, + { + "epoch": 0.44970703125, + "grad_norm": 0.19602136313915253, + "learning_rate": 0.00031610057243638723, + "loss": 4.5547, + "step": 4605 + }, + { + "epoch": 0.4498046875, + "grad_norm": 0.18924519419670105, + "learning_rate": 0.00031603169382959863, + "loss": 4.5156, + "step": 4606 + }, + { + "epoch": 0.44990234375, + "grad_norm": 0.2040221095085144, + "learning_rate": 0.00031596281124506584, + "loss": 4.5078, + "step": 4607 + }, + { + "epoch": 0.45, + "grad_norm": 0.188638374209404, + "learning_rate": 0.0003158939246894665, + "loss": 4.5078, + "step": 4608 + }, + { + "epoch": 0.45009765625, + "grad_norm": 0.18929627537727356, + "learning_rate": 0.00031582503416947865, + "loss": 4.5117, + "step": 4609 + }, + { + "epoch": 0.4501953125, + "grad_norm": 0.18807269632816315, + "learning_rate": 0.00031575613969178087, + "loss": 4.5039, + "step": 4610 + }, + { + "epoch": 0.45029296875, + "grad_norm": 0.19660279154777527, + "learning_rate": 0.00031568724126305195, + "loss": 4.5273, + "step": 4611 + }, + { + "epoch": 0.450390625, + "grad_norm": 0.1940307766199112, + "learning_rate": 0.00031561833888997114, + "loss": 4.5352, + "step": 4612 + }, + { + "epoch": 0.45048828125, + "grad_norm": 0.19396167993545532, + "learning_rate": 0.00031554943257921804, + "loss": 4.5273, + "step": 4613 + }, + { + "epoch": 0.4505859375, + "grad_norm": 0.19005364179611206, + "learning_rate": 0.00031548052233747274, + "loss": 4.5195, + "step": 4614 + }, + { + "epoch": 0.45068359375, + "grad_norm": 0.183942511677742, + "learning_rate": 0.0003154116081714154, + "loss": 4.5156, + "step": 4615 + }, + { + "epoch": 0.45078125, + "grad_norm": 0.1882038563489914, + "learning_rate": 0.000315342690087727, + "loss": 4.5195, + "step": 4616 + }, + { + "epoch": 0.45087890625, + "grad_norm": 0.18742604553699493, + "learning_rate": 0.0003152737680930886, + "loss": 4.5234, + "step": 4617 + }, + { + "epoch": 0.4509765625, + "grad_norm": 0.2272927612066269, + "learning_rate": 0.0003152048421941817, + "loss": 4.5273, + "step": 4618 + }, + { + "epoch": 0.45107421875, + "grad_norm": 0.26417961716651917, + "learning_rate": 0.00031513591239768824, + "loss": 4.5117, + "step": 4619 + }, + { + "epoch": 0.451171875, + "grad_norm": 0.2620939612388611, + "learning_rate": 0.00031506697871029046, + "loss": 4.5273, + "step": 4620 + }, + { + "epoch": 0.45126953125, + "grad_norm": 0.1987721025943756, + "learning_rate": 0.00031499804113867106, + "loss": 4.5234, + "step": 4621 + }, + { + "epoch": 0.4513671875, + "grad_norm": 0.19930468499660492, + "learning_rate": 0.0003149290996895131, + "loss": 4.5469, + "step": 4622 + }, + { + "epoch": 0.45146484375, + "grad_norm": 0.27694329619407654, + "learning_rate": 0.0003148601543694998, + "loss": 4.543, + "step": 4623 + }, + { + "epoch": 0.4515625, + "grad_norm": 0.24923160672187805, + "learning_rate": 0.00031479120518531506, + "loss": 4.5312, + "step": 4624 + }, + { + "epoch": 0.45166015625, + "grad_norm": 0.22678142786026, + "learning_rate": 0.0003147222521436431, + "loss": 4.5117, + "step": 4625 + }, + { + "epoch": 0.4517578125, + "grad_norm": 0.23605704307556152, + "learning_rate": 0.00031465329525116843, + "loss": 4.5039, + "step": 4626 + }, + { + "epoch": 0.45185546875, + "grad_norm": 0.28338128328323364, + "learning_rate": 0.00031458433451457584, + "loss": 4.5273, + "step": 4627 + }, + { + "epoch": 0.451953125, + "grad_norm": 0.2398362159729004, + "learning_rate": 0.0003145153699405507, + "loss": 4.5, + "step": 4628 + }, + { + "epoch": 0.45205078125, + "grad_norm": 0.20816409587860107, + "learning_rate": 0.00031444640153577866, + "loss": 4.5391, + "step": 4629 + }, + { + "epoch": 0.4521484375, + "grad_norm": 0.24035698175430298, + "learning_rate": 0.00031437742930694575, + "loss": 4.5352, + "step": 4630 + }, + { + "epoch": 0.45224609375, + "grad_norm": 0.22831131517887115, + "learning_rate": 0.0003143084532607382, + "loss": 4.5352, + "step": 4631 + }, + { + "epoch": 0.45234375, + "grad_norm": 0.20174892246723175, + "learning_rate": 0.00031423947340384294, + "loss": 4.5391, + "step": 4632 + }, + { + "epoch": 0.45244140625, + "grad_norm": 0.2099580019712448, + "learning_rate": 0.00031417048974294716, + "loss": 4.4922, + "step": 4633 + }, + { + "epoch": 0.4525390625, + "grad_norm": 0.2119961678981781, + "learning_rate": 0.0003141015022847381, + "loss": 4.5312, + "step": 4634 + }, + { + "epoch": 0.45263671875, + "grad_norm": 0.21319544315338135, + "learning_rate": 0.00031403251103590387, + "loss": 4.5078, + "step": 4635 + }, + { + "epoch": 0.452734375, + "grad_norm": 0.1969742774963379, + "learning_rate": 0.0003139635160031326, + "loss": 4.5586, + "step": 4636 + }, + { + "epoch": 0.45283203125, + "grad_norm": 0.23173289000988007, + "learning_rate": 0.0003138945171931128, + "loss": 4.5234, + "step": 4637 + }, + { + "epoch": 0.4529296875, + "grad_norm": 0.20970720052719116, + "learning_rate": 0.0003138255146125337, + "loss": 4.4961, + "step": 4638 + }, + { + "epoch": 0.45302734375, + "grad_norm": 0.19655244052410126, + "learning_rate": 0.0003137565082680844, + "loss": 4.5391, + "step": 4639 + }, + { + "epoch": 0.453125, + "grad_norm": 0.19924841821193695, + "learning_rate": 0.00031368749816645464, + "loss": 4.5039, + "step": 4640 + }, + { + "epoch": 0.45322265625, + "grad_norm": 0.19230122864246368, + "learning_rate": 0.0003136184843143345, + "loss": 4.5078, + "step": 4641 + }, + { + "epoch": 0.4533203125, + "grad_norm": 0.18629860877990723, + "learning_rate": 0.00031354946671841445, + "loss": 4.5156, + "step": 4642 + }, + { + "epoch": 0.45341796875, + "grad_norm": 0.20049424469470978, + "learning_rate": 0.00031348044538538524, + "loss": 4.5273, + "step": 4643 + }, + { + "epoch": 0.453515625, + "grad_norm": 0.21210506558418274, + "learning_rate": 0.00031341142032193794, + "loss": 4.5391, + "step": 4644 + }, + { + "epoch": 0.45361328125, + "grad_norm": 0.19526821374893188, + "learning_rate": 0.0003133423915347642, + "loss": 4.5469, + "step": 4645 + }, + { + "epoch": 0.4537109375, + "grad_norm": 0.19712293148040771, + "learning_rate": 0.0003132733590305558, + "loss": 4.543, + "step": 4646 + }, + { + "epoch": 0.45380859375, + "grad_norm": 0.2007729858160019, + "learning_rate": 0.0003132043228160051, + "loss": 4.4883, + "step": 4647 + }, + { + "epoch": 0.45390625, + "grad_norm": 0.2587430477142334, + "learning_rate": 0.00031313528289780444, + "loss": 4.5586, + "step": 4648 + }, + { + "epoch": 0.45400390625, + "grad_norm": 0.24549271166324615, + "learning_rate": 0.00031306623928264706, + "loss": 4.5508, + "step": 4649 + }, + { + "epoch": 0.4541015625, + "grad_norm": 0.19929195940494537, + "learning_rate": 0.0003129971919772261, + "loss": 4.5156, + "step": 4650 + }, + { + "epoch": 0.45419921875, + "grad_norm": 0.21530620753765106, + "learning_rate": 0.00031292814098823527, + "loss": 4.5312, + "step": 4651 + }, + { + "epoch": 0.454296875, + "grad_norm": 0.2592374384403229, + "learning_rate": 0.00031285908632236855, + "loss": 4.5078, + "step": 4652 + }, + { + "epoch": 0.45439453125, + "grad_norm": 0.21420057117938995, + "learning_rate": 0.0003127900279863203, + "loss": 4.5273, + "step": 4653 + }, + { + "epoch": 0.4544921875, + "grad_norm": 0.2113385647535324, + "learning_rate": 0.00031272096598678546, + "loss": 4.5703, + "step": 4654 + }, + { + "epoch": 0.45458984375, + "grad_norm": 0.23203358054161072, + "learning_rate": 0.00031265190033045895, + "loss": 4.5234, + "step": 4655 + }, + { + "epoch": 0.4546875, + "grad_norm": 0.2084053009748459, + "learning_rate": 0.00031258283102403615, + "loss": 4.5195, + "step": 4656 + }, + { + "epoch": 0.45478515625, + "grad_norm": 0.21320444345474243, + "learning_rate": 0.00031251375807421304, + "loss": 4.4961, + "step": 4657 + }, + { + "epoch": 0.4548828125, + "grad_norm": 0.18526767194271088, + "learning_rate": 0.00031244468148768567, + "loss": 4.5312, + "step": 4658 + }, + { + "epoch": 0.45498046875, + "grad_norm": 0.23145592212677002, + "learning_rate": 0.00031237560127115064, + "loss": 4.5156, + "step": 4659 + }, + { + "epoch": 0.455078125, + "grad_norm": 0.19319960474967957, + "learning_rate": 0.0003123065174313047, + "loss": 4.5508, + "step": 4660 + }, + { + "epoch": 0.45517578125, + "grad_norm": 0.18739789724349976, + "learning_rate": 0.000312237429974845, + "loss": 4.5156, + "step": 4661 + }, + { + "epoch": 0.4552734375, + "grad_norm": 0.19655229151248932, + "learning_rate": 0.0003121683389084693, + "loss": 4.5117, + "step": 4662 + }, + { + "epoch": 0.45537109375, + "grad_norm": 0.21095634996891022, + "learning_rate": 0.0003120992442388754, + "loss": 4.5117, + "step": 4663 + }, + { + "epoch": 0.45546875, + "grad_norm": 0.19926396012306213, + "learning_rate": 0.0003120301459727615, + "loss": 4.4922, + "step": 4664 + }, + { + "epoch": 0.45556640625, + "grad_norm": 0.22949261963367462, + "learning_rate": 0.0003119610441168264, + "loss": 4.5195, + "step": 4665 + }, + { + "epoch": 0.4556640625, + "grad_norm": 0.1899200826883316, + "learning_rate": 0.0003118919386777687, + "loss": 4.5, + "step": 4666 + }, + { + "epoch": 0.45576171875, + "grad_norm": 0.19490887224674225, + "learning_rate": 0.00031182282966228815, + "loss": 4.5195, + "step": 4667 + }, + { + "epoch": 0.455859375, + "grad_norm": 0.20040741562843323, + "learning_rate": 0.00031175371707708413, + "loss": 4.543, + "step": 4668 + }, + { + "epoch": 0.45595703125, + "grad_norm": 0.18758058547973633, + "learning_rate": 0.0003116846009288566, + "loss": 4.5117, + "step": 4669 + }, + { + "epoch": 0.4560546875, + "grad_norm": 0.20843686163425446, + "learning_rate": 0.000311615481224306, + "loss": 4.5234, + "step": 4670 + }, + { + "epoch": 0.45615234375, + "grad_norm": 0.1775505542755127, + "learning_rate": 0.0003115463579701331, + "loss": 4.5234, + "step": 4671 + }, + { + "epoch": 0.45625, + "grad_norm": 0.2046179622411728, + "learning_rate": 0.0003114772311730388, + "loss": 4.5078, + "step": 4672 + }, + { + "epoch": 0.45634765625, + "grad_norm": 0.19120004773139954, + "learning_rate": 0.0003114081008397245, + "loss": 4.5117, + "step": 4673 + }, + { + "epoch": 0.4564453125, + "grad_norm": 0.2021452933549881, + "learning_rate": 0.0003113389669768918, + "loss": 4.5234, + "step": 4674 + }, + { + "epoch": 0.45654296875, + "grad_norm": 0.18696098029613495, + "learning_rate": 0.0003112698295912431, + "loss": 4.5234, + "step": 4675 + }, + { + "epoch": 0.456640625, + "grad_norm": 0.19159789383411407, + "learning_rate": 0.00031120068868948047, + "loss": 4.5234, + "step": 4676 + }, + { + "epoch": 0.45673828125, + "grad_norm": 0.18893691897392273, + "learning_rate": 0.0003111315442783068, + "loss": 4.5039, + "step": 4677 + }, + { + "epoch": 0.4568359375, + "grad_norm": 0.20185202360153198, + "learning_rate": 0.00031106239636442514, + "loss": 4.5234, + "step": 4678 + }, + { + "epoch": 0.45693359375, + "grad_norm": 0.19668243825435638, + "learning_rate": 0.0003109932449545388, + "loss": 4.5312, + "step": 4679 + }, + { + "epoch": 0.45703125, + "grad_norm": 0.1866859644651413, + "learning_rate": 0.00031092409005535187, + "loss": 4.4961, + "step": 4680 + }, + { + "epoch": 0.45712890625, + "grad_norm": 0.19219732284545898, + "learning_rate": 0.0003108549316735681, + "loss": 4.5352, + "step": 4681 + }, + { + "epoch": 0.4572265625, + "grad_norm": 0.17668581008911133, + "learning_rate": 0.000310785769815892, + "loss": 4.5039, + "step": 4682 + }, + { + "epoch": 0.45732421875, + "grad_norm": 0.19153918325901031, + "learning_rate": 0.00031071660448902844, + "loss": 4.4922, + "step": 4683 + }, + { + "epoch": 0.457421875, + "grad_norm": 0.18667173385620117, + "learning_rate": 0.00031064743569968255, + "loss": 4.5273, + "step": 4684 + }, + { + "epoch": 0.45751953125, + "grad_norm": 0.19702638685703278, + "learning_rate": 0.00031057826345455965, + "loss": 4.5195, + "step": 4685 + }, + { + "epoch": 0.4576171875, + "grad_norm": 0.19739848375320435, + "learning_rate": 0.00031050908776036553, + "loss": 4.4766, + "step": 4686 + }, + { + "epoch": 0.45771484375, + "grad_norm": 0.1767302006483078, + "learning_rate": 0.0003104399086238064, + "loss": 4.5078, + "step": 4687 + }, + { + "epoch": 0.4578125, + "grad_norm": 0.20505668222904205, + "learning_rate": 0.0003103707260515887, + "loss": 4.5508, + "step": 4688 + }, + { + "epoch": 0.45791015625, + "grad_norm": 0.216728076338768, + "learning_rate": 0.0003103015400504191, + "loss": 4.5234, + "step": 4689 + }, + { + "epoch": 0.4580078125, + "grad_norm": 0.22385595738887787, + "learning_rate": 0.00031023235062700483, + "loss": 4.5078, + "step": 4690 + }, + { + "epoch": 0.45810546875, + "grad_norm": 0.1950686126947403, + "learning_rate": 0.00031016315778805326, + "loss": 4.5234, + "step": 4691 + }, + { + "epoch": 0.458203125, + "grad_norm": 0.19517037272453308, + "learning_rate": 0.00031009396154027226, + "loss": 4.5312, + "step": 4692 + }, + { + "epoch": 0.45830078125, + "grad_norm": 0.20731741189956665, + "learning_rate": 0.00031002476189036975, + "loss": 4.5273, + "step": 4693 + }, + { + "epoch": 0.4583984375, + "grad_norm": 0.22578230500221252, + "learning_rate": 0.00030995555884505443, + "loss": 4.5195, + "step": 4694 + }, + { + "epoch": 0.45849609375, + "grad_norm": 0.19322265684604645, + "learning_rate": 0.0003098863524110348, + "loss": 4.5156, + "step": 4695 + }, + { + "epoch": 0.45859375, + "grad_norm": 0.18762673437595367, + "learning_rate": 0.0003098171425950202, + "loss": 4.5117, + "step": 4696 + }, + { + "epoch": 0.45869140625, + "grad_norm": 0.19441059231758118, + "learning_rate": 0.0003097479294037198, + "loss": 4.5, + "step": 4697 + }, + { + "epoch": 0.4587890625, + "grad_norm": 0.19766007363796234, + "learning_rate": 0.0003096787128438436, + "loss": 4.5078, + "step": 4698 + }, + { + "epoch": 0.45888671875, + "grad_norm": 0.19299165904521942, + "learning_rate": 0.00030960949292210143, + "loss": 4.5039, + "step": 4699 + }, + { + "epoch": 0.458984375, + "grad_norm": 0.1940307766199112, + "learning_rate": 0.00030954026964520386, + "loss": 4.5117, + "step": 4700 + }, + { + "epoch": 0.45908203125, + "grad_norm": 0.20033495128154755, + "learning_rate": 0.00030947104301986167, + "loss": 4.5117, + "step": 4701 + }, + { + "epoch": 0.4591796875, + "grad_norm": 0.19672922790050507, + "learning_rate": 0.00030940181305278574, + "loss": 4.5078, + "step": 4702 + }, + { + "epoch": 0.45927734375, + "grad_norm": 0.19895492494106293, + "learning_rate": 0.0003093325797506875, + "loss": 4.5117, + "step": 4703 + }, + { + "epoch": 0.459375, + "grad_norm": 0.19131700694561005, + "learning_rate": 0.0003092633431202787, + "loss": 4.5195, + "step": 4704 + }, + { + "epoch": 0.45947265625, + "grad_norm": 0.20749962329864502, + "learning_rate": 0.00030919410316827134, + "loss": 4.5312, + "step": 4705 + }, + { + "epoch": 0.4595703125, + "grad_norm": 0.2110370248556137, + "learning_rate": 0.00030912485990137773, + "loss": 4.543, + "step": 4706 + }, + { + "epoch": 0.45966796875, + "grad_norm": 0.21175163984298706, + "learning_rate": 0.00030905561332631056, + "loss": 4.5312, + "step": 4707 + }, + { + "epoch": 0.459765625, + "grad_norm": 0.21531248092651367, + "learning_rate": 0.00030898636344978283, + "loss": 4.4922, + "step": 4708 + }, + { + "epoch": 0.45986328125, + "grad_norm": 0.2163790613412857, + "learning_rate": 0.0003089171102785078, + "loss": 4.5039, + "step": 4709 + }, + { + "epoch": 0.4599609375, + "grad_norm": 0.23695312440395355, + "learning_rate": 0.00030884785381919916, + "loss": 4.4883, + "step": 4710 + }, + { + "epoch": 0.46005859375, + "grad_norm": 0.2382650375366211, + "learning_rate": 0.0003087785940785708, + "loss": 4.5508, + "step": 4711 + }, + { + "epoch": 0.46015625, + "grad_norm": 0.2358185201883316, + "learning_rate": 0.000308709331063337, + "loss": 4.5195, + "step": 4712 + }, + { + "epoch": 0.46025390625, + "grad_norm": 0.19798113405704498, + "learning_rate": 0.00030864006478021234, + "loss": 4.5742, + "step": 4713 + }, + { + "epoch": 0.4603515625, + "grad_norm": 0.18412651121616364, + "learning_rate": 0.00030857079523591177, + "loss": 4.5117, + "step": 4714 + }, + { + "epoch": 0.46044921875, + "grad_norm": 0.19464971125125885, + "learning_rate": 0.0003085015224371503, + "loss": 4.5547, + "step": 4715 + }, + { + "epoch": 0.460546875, + "grad_norm": 0.1900629699230194, + "learning_rate": 0.0003084322463906437, + "loss": 4.5391, + "step": 4716 + }, + { + "epoch": 0.46064453125, + "grad_norm": 0.18945296108722687, + "learning_rate": 0.00030836296710310773, + "loss": 4.5273, + "step": 4717 + }, + { + "epoch": 0.4607421875, + "grad_norm": 0.19259829819202423, + "learning_rate": 0.00030829368458125855, + "loss": 4.5312, + "step": 4718 + }, + { + "epoch": 0.46083984375, + "grad_norm": 0.2025236040353775, + "learning_rate": 0.00030822439883181264, + "loss": 4.5039, + "step": 4719 + }, + { + "epoch": 0.4609375, + "grad_norm": 0.1841021180152893, + "learning_rate": 0.00030815510986148667, + "loss": 4.5273, + "step": 4720 + }, + { + "epoch": 0.46103515625, + "grad_norm": 0.1785983443260193, + "learning_rate": 0.0003080858176769978, + "loss": 4.5273, + "step": 4721 + }, + { + "epoch": 0.4611328125, + "grad_norm": 0.19601239264011383, + "learning_rate": 0.0003080165222850635, + "loss": 4.4961, + "step": 4722 + }, + { + "epoch": 0.46123046875, + "grad_norm": 0.21515241265296936, + "learning_rate": 0.0003079472236924015, + "loss": 4.5234, + "step": 4723 + }, + { + "epoch": 0.461328125, + "grad_norm": 0.23147067427635193, + "learning_rate": 0.0003078779219057298, + "loss": 4.5273, + "step": 4724 + }, + { + "epoch": 0.46142578125, + "grad_norm": 0.21675175428390503, + "learning_rate": 0.0003078086169317666, + "loss": 4.5547, + "step": 4725 + }, + { + "epoch": 0.4615234375, + "grad_norm": 0.1964460164308548, + "learning_rate": 0.00030773930877723074, + "loss": 4.5195, + "step": 4726 + }, + { + "epoch": 0.46162109375, + "grad_norm": 0.18291743099689484, + "learning_rate": 0.000307669997448841, + "loss": 4.5078, + "step": 4727 + }, + { + "epoch": 0.46171875, + "grad_norm": 0.198462575674057, + "learning_rate": 0.0003076006829533169, + "loss": 4.4961, + "step": 4728 + }, + { + "epoch": 0.46181640625, + "grad_norm": 0.19275623559951782, + "learning_rate": 0.00030753136529737774, + "loss": 4.5234, + "step": 4729 + }, + { + "epoch": 0.4619140625, + "grad_norm": 0.17455407977104187, + "learning_rate": 0.0003074620444877435, + "loss": 4.5273, + "step": 4730 + }, + { + "epoch": 0.46201171875, + "grad_norm": 0.18436522781848907, + "learning_rate": 0.00030739272053113435, + "loss": 4.4766, + "step": 4731 + }, + { + "epoch": 0.462109375, + "grad_norm": 0.20330676436424255, + "learning_rate": 0.0003073233934342709, + "loss": 4.4844, + "step": 4732 + }, + { + "epoch": 0.46220703125, + "grad_norm": 0.17955738306045532, + "learning_rate": 0.00030725406320387385, + "loss": 4.5156, + "step": 4733 + }, + { + "epoch": 0.4623046875, + "grad_norm": 0.17494937777519226, + "learning_rate": 0.0003071847298466641, + "loss": 4.5078, + "step": 4734 + }, + { + "epoch": 0.46240234375, + "grad_norm": 0.1957375556230545, + "learning_rate": 0.0003071153933693634, + "loss": 4.5156, + "step": 4735 + }, + { + "epoch": 0.4625, + "grad_norm": 0.20222704112529755, + "learning_rate": 0.0003070460537786932, + "loss": 4.5312, + "step": 4736 + }, + { + "epoch": 0.46259765625, + "grad_norm": 0.19780443608760834, + "learning_rate": 0.0003069767110813756, + "loss": 4.5078, + "step": 4737 + }, + { + "epoch": 0.4626953125, + "grad_norm": 0.19791901111602783, + "learning_rate": 0.0003069073652841329, + "loss": 4.5, + "step": 4738 + }, + { + "epoch": 0.46279296875, + "grad_norm": 0.20233005285263062, + "learning_rate": 0.00030683801639368766, + "loss": 4.5117, + "step": 4739 + }, + { + "epoch": 0.462890625, + "grad_norm": 0.1741165667772293, + "learning_rate": 0.00030676866441676283, + "loss": 4.4766, + "step": 4740 + }, + { + "epoch": 0.46298828125, + "grad_norm": 0.18705081939697266, + "learning_rate": 0.0003066993093600816, + "loss": 4.5273, + "step": 4741 + }, + { + "epoch": 0.4630859375, + "grad_norm": 0.1998337358236313, + "learning_rate": 0.0003066299512303675, + "loss": 4.5039, + "step": 4742 + }, + { + "epoch": 0.46318359375, + "grad_norm": 0.1985979676246643, + "learning_rate": 0.00030656059003434435, + "loss": 4.5391, + "step": 4743 + }, + { + "epoch": 0.46328125, + "grad_norm": 0.18911968171596527, + "learning_rate": 0.0003064912257787361, + "loss": 4.5078, + "step": 4744 + }, + { + "epoch": 0.46337890625, + "grad_norm": 0.1956348568201065, + "learning_rate": 0.00030642185847026723, + "loss": 4.4805, + "step": 4745 + }, + { + "epoch": 0.4634765625, + "grad_norm": 0.19758553802967072, + "learning_rate": 0.00030635248811566244, + "loss": 4.5391, + "step": 4746 + }, + { + "epoch": 0.46357421875, + "grad_norm": 0.19955140352249146, + "learning_rate": 0.00030628311472164675, + "loss": 4.5195, + "step": 4747 + }, + { + "epoch": 0.463671875, + "grad_norm": 0.1939479112625122, + "learning_rate": 0.0003062137382949455, + "loss": 4.5195, + "step": 4748 + }, + { + "epoch": 0.46376953125, + "grad_norm": 0.20367209613323212, + "learning_rate": 0.000306144358842284, + "loss": 4.5039, + "step": 4749 + }, + { + "epoch": 0.4638671875, + "grad_norm": 0.19261784851551056, + "learning_rate": 0.0003060749763703884, + "loss": 4.5273, + "step": 4750 + }, + { + "epoch": 0.46396484375, + "grad_norm": 0.1813916563987732, + "learning_rate": 0.00030600559088598476, + "loss": 4.5391, + "step": 4751 + }, + { + "epoch": 0.4640625, + "grad_norm": 0.1927923858165741, + "learning_rate": 0.0003059362023957994, + "loss": 4.5039, + "step": 4752 + }, + { + "epoch": 0.46416015625, + "grad_norm": 0.18640850484371185, + "learning_rate": 0.0003058668109065593, + "loss": 4.5039, + "step": 4753 + }, + { + "epoch": 0.4642578125, + "grad_norm": 0.19412115216255188, + "learning_rate": 0.00030579741642499135, + "loss": 4.5078, + "step": 4754 + }, + { + "epoch": 0.46435546875, + "grad_norm": 0.2154243290424347, + "learning_rate": 0.00030572801895782295, + "loss": 4.4844, + "step": 4755 + }, + { + "epoch": 0.464453125, + "grad_norm": 0.19964979588985443, + "learning_rate": 0.00030565861851178155, + "loss": 4.5391, + "step": 4756 + }, + { + "epoch": 0.46455078125, + "grad_norm": 0.21787723898887634, + "learning_rate": 0.0003055892150935952, + "loss": 4.5117, + "step": 4757 + }, + { + "epoch": 0.4646484375, + "grad_norm": 0.2491428703069687, + "learning_rate": 0.00030551980870999207, + "loss": 4.5117, + "step": 4758 + }, + { + "epoch": 0.46474609375, + "grad_norm": 0.22427338361740112, + "learning_rate": 0.00030545039936770066, + "loss": 4.5234, + "step": 4759 + }, + { + "epoch": 0.46484375, + "grad_norm": 0.22033701837062836, + "learning_rate": 0.0003053809870734497, + "loss": 4.4844, + "step": 4760 + }, + { + "epoch": 0.46494140625, + "grad_norm": 0.21117527782917023, + "learning_rate": 0.00030531157183396826, + "loss": 4.5508, + "step": 4761 + }, + { + "epoch": 0.4650390625, + "grad_norm": 0.19107000529766083, + "learning_rate": 0.0003052421536559855, + "loss": 4.4844, + "step": 4762 + }, + { + "epoch": 0.46513671875, + "grad_norm": 0.2275216430425644, + "learning_rate": 0.0003051727325462313, + "loss": 4.4922, + "step": 4763 + }, + { + "epoch": 0.465234375, + "grad_norm": 0.25748100876808167, + "learning_rate": 0.0003051033085114355, + "loss": 4.5156, + "step": 4764 + }, + { + "epoch": 0.46533203125, + "grad_norm": 0.22953814268112183, + "learning_rate": 0.0003050338815583282, + "loss": 4.5156, + "step": 4765 + }, + { + "epoch": 0.4654296875, + "grad_norm": 0.1911054253578186, + "learning_rate": 0.00030496445169363997, + "loss": 4.4805, + "step": 4766 + }, + { + "epoch": 0.46552734375, + "grad_norm": 0.21987749636173248, + "learning_rate": 0.00030489501892410145, + "loss": 4.5273, + "step": 4767 + }, + { + "epoch": 0.465625, + "grad_norm": 0.2672731578350067, + "learning_rate": 0.0003048255832564438, + "loss": 4.5469, + "step": 4768 + }, + { + "epoch": 0.46572265625, + "grad_norm": 0.21560978889465332, + "learning_rate": 0.00030475614469739826, + "loss": 4.5469, + "step": 4769 + }, + { + "epoch": 0.4658203125, + "grad_norm": 0.19551494717597961, + "learning_rate": 0.0003046867032536964, + "loss": 4.5234, + "step": 4770 + }, + { + "epoch": 0.46591796875, + "grad_norm": 0.23425531387329102, + "learning_rate": 0.0003046172589320702, + "loss": 4.5234, + "step": 4771 + }, + { + "epoch": 0.466015625, + "grad_norm": 0.22461086511611938, + "learning_rate": 0.0003045478117392518, + "loss": 4.5195, + "step": 4772 + }, + { + "epoch": 0.46611328125, + "grad_norm": 0.1848098337650299, + "learning_rate": 0.00030447836168197355, + "loss": 4.5117, + "step": 4773 + }, + { + "epoch": 0.4662109375, + "grad_norm": 0.21256153285503387, + "learning_rate": 0.0003044089087669682, + "loss": 4.4844, + "step": 4774 + }, + { + "epoch": 0.46630859375, + "grad_norm": 0.22910505533218384, + "learning_rate": 0.0003043394530009687, + "loss": 4.5312, + "step": 4775 + }, + { + "epoch": 0.46640625, + "grad_norm": 0.19095651805400848, + "learning_rate": 0.00030426999439070847, + "loss": 4.5273, + "step": 4776 + }, + { + "epoch": 0.46650390625, + "grad_norm": 0.19251839816570282, + "learning_rate": 0.00030420053294292083, + "loss": 4.5312, + "step": 4777 + }, + { + "epoch": 0.4666015625, + "grad_norm": 0.20083418488502502, + "learning_rate": 0.00030413106866433975, + "loss": 4.5234, + "step": 4778 + }, + { + "epoch": 0.46669921875, + "grad_norm": 0.1859997659921646, + "learning_rate": 0.00030406160156169927, + "loss": 4.5078, + "step": 4779 + }, + { + "epoch": 0.466796875, + "grad_norm": 0.2038940191268921, + "learning_rate": 0.0003039921316417337, + "loss": 4.5078, + "step": 4780 + }, + { + "epoch": 0.46689453125, + "grad_norm": 0.22971504926681519, + "learning_rate": 0.0003039226589111779, + "loss": 4.4844, + "step": 4781 + }, + { + "epoch": 0.4669921875, + "grad_norm": 0.22064350545406342, + "learning_rate": 0.0003038531833767665, + "loss": 4.5312, + "step": 4782 + }, + { + "epoch": 0.46708984375, + "grad_norm": 0.18288478255271912, + "learning_rate": 0.00030378370504523483, + "loss": 4.543, + "step": 4783 + }, + { + "epoch": 0.4671875, + "grad_norm": 0.18336179852485657, + "learning_rate": 0.00030371422392331827, + "loss": 4.5, + "step": 4784 + }, + { + "epoch": 0.46728515625, + "grad_norm": 0.20549464225769043, + "learning_rate": 0.0003036447400177526, + "loss": 4.5391, + "step": 4785 + }, + { + "epoch": 0.4673828125, + "grad_norm": 0.1856648176908493, + "learning_rate": 0.00030357525333527387, + "loss": 4.4922, + "step": 4786 + }, + { + "epoch": 0.46748046875, + "grad_norm": 0.20456022024154663, + "learning_rate": 0.00030350576388261816, + "loss": 4.5547, + "step": 4787 + }, + { + "epoch": 0.467578125, + "grad_norm": 0.18917760252952576, + "learning_rate": 0.00030343627166652215, + "loss": 4.5039, + "step": 4788 + }, + { + "epoch": 0.46767578125, + "grad_norm": 0.1960541158914566, + "learning_rate": 0.00030336677669372264, + "loss": 4.5352, + "step": 4789 + }, + { + "epoch": 0.4677734375, + "grad_norm": 0.18250985443592072, + "learning_rate": 0.00030329727897095664, + "loss": 4.5469, + "step": 4790 + }, + { + "epoch": 0.46787109375, + "grad_norm": 0.2017473429441452, + "learning_rate": 0.0003032277785049615, + "loss": 4.5312, + "step": 4791 + }, + { + "epoch": 0.46796875, + "grad_norm": 0.18474480509757996, + "learning_rate": 0.00030315827530247476, + "loss": 4.5234, + "step": 4792 + }, + { + "epoch": 0.46806640625, + "grad_norm": 0.20611371099948883, + "learning_rate": 0.00030308876937023443, + "loss": 4.543, + "step": 4793 + }, + { + "epoch": 0.4681640625, + "grad_norm": 0.1845095157623291, + "learning_rate": 0.0003030192607149785, + "loss": 4.4844, + "step": 4794 + }, + { + "epoch": 0.46826171875, + "grad_norm": 0.1869344711303711, + "learning_rate": 0.00030294974934344537, + "loss": 4.5352, + "step": 4795 + }, + { + "epoch": 0.468359375, + "grad_norm": 0.1770923137664795, + "learning_rate": 0.0003028802352623738, + "loss": 4.5, + "step": 4796 + }, + { + "epoch": 0.46845703125, + "grad_norm": 0.19603200256824493, + "learning_rate": 0.00030281071847850253, + "loss": 4.4961, + "step": 4797 + }, + { + "epoch": 0.4685546875, + "grad_norm": 0.1799277663230896, + "learning_rate": 0.0003027411989985711, + "loss": 4.4727, + "step": 4798 + }, + { + "epoch": 0.46865234375, + "grad_norm": 0.1827368140220642, + "learning_rate": 0.0003026716768293185, + "loss": 4.4922, + "step": 4799 + }, + { + "epoch": 0.46875, + "grad_norm": 0.18589933216571808, + "learning_rate": 0.0003026021519774847, + "loss": 4.5547, + "step": 4800 + }, + { + "epoch": 0.46884765625, + "grad_norm": 0.18215101957321167, + "learning_rate": 0.00030253262444980957, + "loss": 4.5156, + "step": 4801 + }, + { + "epoch": 0.4689453125, + "grad_norm": 0.1884995847940445, + "learning_rate": 0.00030246309425303343, + "loss": 4.4961, + "step": 4802 + }, + { + "epoch": 0.46904296875, + "grad_norm": 0.20671796798706055, + "learning_rate": 0.0003023935613938966, + "loss": 4.5078, + "step": 4803 + }, + { + "epoch": 0.469140625, + "grad_norm": 0.21057972311973572, + "learning_rate": 0.00030232402587914, + "loss": 4.5039, + "step": 4804 + }, + { + "epoch": 0.46923828125, + "grad_norm": 0.20597267150878906, + "learning_rate": 0.00030225448771550444, + "loss": 4.5039, + "step": 4805 + }, + { + "epoch": 0.4693359375, + "grad_norm": 0.19343122839927673, + "learning_rate": 0.0003021849469097315, + "loss": 4.4961, + "step": 4806 + }, + { + "epoch": 0.46943359375, + "grad_norm": 0.19192254543304443, + "learning_rate": 0.00030211540346856226, + "loss": 4.5117, + "step": 4807 + }, + { + "epoch": 0.46953125, + "grad_norm": 0.1820961982011795, + "learning_rate": 0.0003020458573987387, + "loss": 4.5078, + "step": 4808 + }, + { + "epoch": 0.46962890625, + "grad_norm": 0.18244512379169464, + "learning_rate": 0.0003019763087070028, + "loss": 4.5156, + "step": 4809 + }, + { + "epoch": 0.4697265625, + "grad_norm": 0.1741866171360016, + "learning_rate": 0.00030190675740009696, + "loss": 4.5078, + "step": 4810 + }, + { + "epoch": 0.46982421875, + "grad_norm": 0.18711112439632416, + "learning_rate": 0.0003018372034847636, + "loss": 4.4922, + "step": 4811 + }, + { + "epoch": 0.469921875, + "grad_norm": 0.18695634603500366, + "learning_rate": 0.0003017676469677454, + "loss": 4.5117, + "step": 4812 + }, + { + "epoch": 0.47001953125, + "grad_norm": 0.2119150012731552, + "learning_rate": 0.0003016980878557856, + "loss": 4.5078, + "step": 4813 + }, + { + "epoch": 0.4701171875, + "grad_norm": 0.23212242126464844, + "learning_rate": 0.0003016285261556274, + "loss": 4.5234, + "step": 4814 + }, + { + "epoch": 0.47021484375, + "grad_norm": 0.22071973979473114, + "learning_rate": 0.00030155896187401425, + "loss": 4.5469, + "step": 4815 + }, + { + "epoch": 0.4703125, + "grad_norm": 0.18164436519145966, + "learning_rate": 0.00030148939501768994, + "loss": 4.5039, + "step": 4816 + }, + { + "epoch": 0.47041015625, + "grad_norm": 0.18277615308761597, + "learning_rate": 0.0003014198255933987, + "loss": 4.5234, + "step": 4817 + }, + { + "epoch": 0.4705078125, + "grad_norm": 0.18201792240142822, + "learning_rate": 0.0003013502536078846, + "loss": 4.4766, + "step": 4818 + }, + { + "epoch": 0.47060546875, + "grad_norm": 0.18771183490753174, + "learning_rate": 0.0003012806790678923, + "loss": 4.5078, + "step": 4819 + }, + { + "epoch": 0.470703125, + "grad_norm": 0.18341046571731567, + "learning_rate": 0.00030121110198016657, + "loss": 4.5, + "step": 4820 + }, + { + "epoch": 0.47080078125, + "grad_norm": 0.17520374059677124, + "learning_rate": 0.0003011415223514523, + "loss": 4.5273, + "step": 4821 + }, + { + "epoch": 0.4708984375, + "grad_norm": 0.19353324174880981, + "learning_rate": 0.00030107194018849485, + "loss": 4.5078, + "step": 4822 + }, + { + "epoch": 0.47099609375, + "grad_norm": 0.1752232164144516, + "learning_rate": 0.0003010023554980399, + "loss": 4.5195, + "step": 4823 + }, + { + "epoch": 0.47109375, + "grad_norm": 0.17881426215171814, + "learning_rate": 0.00030093276828683293, + "loss": 4.5078, + "step": 4824 + }, + { + "epoch": 0.47119140625, + "grad_norm": 0.18315884470939636, + "learning_rate": 0.0003008631785616201, + "loss": 4.4961, + "step": 4825 + }, + { + "epoch": 0.4712890625, + "grad_norm": 0.19338764250278473, + "learning_rate": 0.00030079358632914766, + "loss": 4.5078, + "step": 4826 + }, + { + "epoch": 0.47138671875, + "grad_norm": 0.19729405641555786, + "learning_rate": 0.00030072399159616214, + "loss": 4.5078, + "step": 4827 + }, + { + "epoch": 0.471484375, + "grad_norm": 0.19854365289211273, + "learning_rate": 0.0003006543943694101, + "loss": 4.5156, + "step": 4828 + }, + { + "epoch": 0.47158203125, + "grad_norm": 0.19028058648109436, + "learning_rate": 0.00030058479465563877, + "loss": 4.5195, + "step": 4829 + }, + { + "epoch": 0.4716796875, + "grad_norm": 0.19432297348976135, + "learning_rate": 0.00030051519246159517, + "loss": 4.5039, + "step": 4830 + }, + { + "epoch": 0.47177734375, + "grad_norm": 0.1898324340581894, + "learning_rate": 0.00030044558779402687, + "loss": 4.5156, + "step": 4831 + }, + { + "epoch": 0.471875, + "grad_norm": 0.23504133522510529, + "learning_rate": 0.00030037598065968147, + "loss": 4.5234, + "step": 4832 + }, + { + "epoch": 0.47197265625, + "grad_norm": 0.27290987968444824, + "learning_rate": 0.0003003063710653071, + "loss": 4.4844, + "step": 4833 + }, + { + "epoch": 0.4720703125, + "grad_norm": 0.2575030028820038, + "learning_rate": 0.00030023675901765163, + "loss": 4.5273, + "step": 4834 + }, + { + "epoch": 0.47216796875, + "grad_norm": 0.20062363147735596, + "learning_rate": 0.00030016714452346384, + "loss": 4.4844, + "step": 4835 + }, + { + "epoch": 0.472265625, + "grad_norm": 0.20653186738491058, + "learning_rate": 0.00030009752758949204, + "loss": 4.543, + "step": 4836 + }, + { + "epoch": 0.47236328125, + "grad_norm": 0.2473936676979065, + "learning_rate": 0.00030002790822248536, + "loss": 4.4766, + "step": 4837 + }, + { + "epoch": 0.4724609375, + "grad_norm": 0.23700372874736786, + "learning_rate": 0.0002999582864291928, + "loss": 4.5156, + "step": 4838 + }, + { + "epoch": 0.47255859375, + "grad_norm": 0.1909337192773819, + "learning_rate": 0.0002998886622163638, + "loss": 4.5039, + "step": 4839 + }, + { + "epoch": 0.47265625, + "grad_norm": 0.23799315094947815, + "learning_rate": 0.0002998190355907479, + "loss": 4.5273, + "step": 4840 + }, + { + "epoch": 0.47275390625, + "grad_norm": 0.2602790892124176, + "learning_rate": 0.000299749406559095, + "loss": 4.5117, + "step": 4841 + }, + { + "epoch": 0.4728515625, + "grad_norm": 0.19429989159107208, + "learning_rate": 0.00029967977512815504, + "loss": 4.5039, + "step": 4842 + }, + { + "epoch": 0.47294921875, + "grad_norm": 0.21890947222709656, + "learning_rate": 0.0002996101413046785, + "loss": 4.5234, + "step": 4843 + }, + { + "epoch": 0.473046875, + "grad_norm": 0.22737354040145874, + "learning_rate": 0.00029954050509541563, + "loss": 4.5195, + "step": 4844 + }, + { + "epoch": 0.47314453125, + "grad_norm": 0.18165530264377594, + "learning_rate": 0.0002994708665071174, + "loss": 4.5117, + "step": 4845 + }, + { + "epoch": 0.4732421875, + "grad_norm": 0.20094479620456696, + "learning_rate": 0.00029940122554653477, + "loss": 4.5039, + "step": 4846 + }, + { + "epoch": 0.47333984375, + "grad_norm": 0.22545143961906433, + "learning_rate": 0.0002993315822204189, + "loss": 4.5117, + "step": 4847 + }, + { + "epoch": 0.4734375, + "grad_norm": 0.18449166417121887, + "learning_rate": 0.0002992619365355214, + "loss": 4.5117, + "step": 4848 + }, + { + "epoch": 0.47353515625, + "grad_norm": 0.20629574358463287, + "learning_rate": 0.00029919228849859366, + "loss": 4.4922, + "step": 4849 + }, + { + "epoch": 0.4736328125, + "grad_norm": 0.25417885184288025, + "learning_rate": 0.0002991226381163878, + "loss": 4.5352, + "step": 4850 + }, + { + "epoch": 0.47373046875, + "grad_norm": 0.21857663989067078, + "learning_rate": 0.00029905298539565587, + "loss": 4.5039, + "step": 4851 + }, + { + "epoch": 0.473828125, + "grad_norm": 0.18640853464603424, + "learning_rate": 0.0002989833303431503, + "loss": 4.5195, + "step": 4852 + }, + { + "epoch": 0.47392578125, + "grad_norm": 0.24365463852882385, + "learning_rate": 0.0002989136729656236, + "loss": 4.4922, + "step": 4853 + }, + { + "epoch": 0.4740234375, + "grad_norm": 0.19696946442127228, + "learning_rate": 0.00029884401326982864, + "loss": 4.5117, + "step": 4854 + }, + { + "epoch": 0.47412109375, + "grad_norm": 0.20019882917404175, + "learning_rate": 0.0002987743512625184, + "loss": 4.5, + "step": 4855 + }, + { + "epoch": 0.47421875, + "grad_norm": 0.2133832573890686, + "learning_rate": 0.0002987046869504462, + "loss": 4.5039, + "step": 4856 + }, + { + "epoch": 0.47431640625, + "grad_norm": 0.18329374492168427, + "learning_rate": 0.0002986350203403654, + "loss": 4.5195, + "step": 4857 + }, + { + "epoch": 0.4744140625, + "grad_norm": 0.20473390817642212, + "learning_rate": 0.0002985653514390298, + "loss": 4.5508, + "step": 4858 + }, + { + "epoch": 0.47451171875, + "grad_norm": 0.22099512815475464, + "learning_rate": 0.0002984956802531935, + "loss": 4.4961, + "step": 4859 + }, + { + "epoch": 0.474609375, + "grad_norm": 0.18871904909610748, + "learning_rate": 0.00029842600678961023, + "loss": 4.4883, + "step": 4860 + }, + { + "epoch": 0.47470703125, + "grad_norm": 0.217277392745018, + "learning_rate": 0.0002983563310550347, + "loss": 4.4727, + "step": 4861 + }, + { + "epoch": 0.4748046875, + "grad_norm": 0.21063847839832306, + "learning_rate": 0.00029828665305622133, + "loss": 4.5391, + "step": 4862 + }, + { + "epoch": 0.47490234375, + "grad_norm": 0.1926523596048355, + "learning_rate": 0.0002982169727999251, + "loss": 4.5156, + "step": 4863 + }, + { + "epoch": 0.475, + "grad_norm": 0.2093006819486618, + "learning_rate": 0.00029814729029290085, + "loss": 4.543, + "step": 4864 + }, + { + "epoch": 0.47509765625, + "grad_norm": 0.20262162387371063, + "learning_rate": 0.00029807760554190395, + "loss": 4.4961, + "step": 4865 + }, + { + "epoch": 0.4751953125, + "grad_norm": 0.17795535922050476, + "learning_rate": 0.00029800791855368974, + "loss": 4.5195, + "step": 4866 + }, + { + "epoch": 0.47529296875, + "grad_norm": 0.20611195266246796, + "learning_rate": 0.000297938229335014, + "loss": 4.5352, + "step": 4867 + }, + { + "epoch": 0.475390625, + "grad_norm": 0.19708003103733063, + "learning_rate": 0.00029786853789263274, + "loss": 4.5117, + "step": 4868 + }, + { + "epoch": 0.47548828125, + "grad_norm": 0.18446294963359833, + "learning_rate": 0.00029779884423330186, + "loss": 4.4922, + "step": 4869 + }, + { + "epoch": 0.4755859375, + "grad_norm": 0.19320671260356903, + "learning_rate": 0.0002977291483637777, + "loss": 4.4883, + "step": 4870 + }, + { + "epoch": 0.47568359375, + "grad_norm": 0.21193119883537292, + "learning_rate": 0.000297659450290817, + "loss": 4.5156, + "step": 4871 + }, + { + "epoch": 0.47578125, + "grad_norm": 0.17841850221157074, + "learning_rate": 0.0002975897500211762, + "loss": 4.5195, + "step": 4872 + }, + { + "epoch": 0.47587890625, + "grad_norm": 0.19898945093154907, + "learning_rate": 0.00029752004756161265, + "loss": 4.5391, + "step": 4873 + }, + { + "epoch": 0.4759765625, + "grad_norm": 0.19967150688171387, + "learning_rate": 0.0002974503429188832, + "loss": 4.5156, + "step": 4874 + }, + { + "epoch": 0.47607421875, + "grad_norm": 0.1989896297454834, + "learning_rate": 0.00029738063609974544, + "loss": 4.5, + "step": 4875 + }, + { + "epoch": 0.476171875, + "grad_norm": 0.18516696989536285, + "learning_rate": 0.0002973109271109569, + "loss": 4.4688, + "step": 4876 + }, + { + "epoch": 0.47626953125, + "grad_norm": 0.18554262816905975, + "learning_rate": 0.0002972412159592753, + "loss": 4.4727, + "step": 4877 + }, + { + "epoch": 0.4763671875, + "grad_norm": 0.18377220630645752, + "learning_rate": 0.00029717150265145897, + "loss": 4.4805, + "step": 4878 + }, + { + "epoch": 0.47646484375, + "grad_norm": 0.17921282351016998, + "learning_rate": 0.0002971017871942658, + "loss": 4.5234, + "step": 4879 + }, + { + "epoch": 0.4765625, + "grad_norm": 0.19601233303546906, + "learning_rate": 0.0002970320695944544, + "loss": 4.5312, + "step": 4880 + }, + { + "epoch": 0.47666015625, + "grad_norm": 0.1772652566432953, + "learning_rate": 0.00029696234985878334, + "loss": 4.4883, + "step": 4881 + }, + { + "epoch": 0.4767578125, + "grad_norm": 0.1815660297870636, + "learning_rate": 0.00029689262799401155, + "loss": 4.4922, + "step": 4882 + }, + { + "epoch": 0.47685546875, + "grad_norm": 0.2045193314552307, + "learning_rate": 0.0002968229040068982, + "loss": 4.4922, + "step": 4883 + }, + { + "epoch": 0.476953125, + "grad_norm": 0.19000308215618134, + "learning_rate": 0.0002967531779042022, + "loss": 4.4844, + "step": 4884 + }, + { + "epoch": 0.47705078125, + "grad_norm": 0.19714689254760742, + "learning_rate": 0.00029668344969268327, + "loss": 4.5703, + "step": 4885 + }, + { + "epoch": 0.4771484375, + "grad_norm": 0.20505721867084503, + "learning_rate": 0.0002966137193791012, + "loss": 4.4961, + "step": 4886 + }, + { + "epoch": 0.47724609375, + "grad_norm": 0.19378484785556793, + "learning_rate": 0.0002965439869702156, + "loss": 4.5039, + "step": 4887 + }, + { + "epoch": 0.47734375, + "grad_norm": 0.1906423270702362, + "learning_rate": 0.00029647425247278673, + "loss": 4.4805, + "step": 4888 + }, + { + "epoch": 0.47744140625, + "grad_norm": 0.18491792678833008, + "learning_rate": 0.0002964045158935748, + "loss": 4.5039, + "step": 4889 + }, + { + "epoch": 0.4775390625, + "grad_norm": 0.19311603903770447, + "learning_rate": 0.0002963347772393404, + "loss": 4.4805, + "step": 4890 + }, + { + "epoch": 0.47763671875, + "grad_norm": 0.2009570300579071, + "learning_rate": 0.0002962650365168441, + "loss": 4.5039, + "step": 4891 + }, + { + "epoch": 0.477734375, + "grad_norm": 0.23975831270217896, + "learning_rate": 0.0002961952937328468, + "loss": 4.5117, + "step": 4892 + }, + { + "epoch": 0.47783203125, + "grad_norm": 0.20882104337215424, + "learning_rate": 0.0002961255488941097, + "loss": 4.4883, + "step": 4893 + }, + { + "epoch": 0.4779296875, + "grad_norm": 0.18047364056110382, + "learning_rate": 0.000296055802007394, + "loss": 4.5039, + "step": 4894 + }, + { + "epoch": 0.47802734375, + "grad_norm": 0.1970759928226471, + "learning_rate": 0.00029598605307946116, + "loss": 4.5078, + "step": 4895 + }, + { + "epoch": 0.478125, + "grad_norm": 0.22318950295448303, + "learning_rate": 0.0002959163021170729, + "loss": 4.5312, + "step": 4896 + }, + { + "epoch": 0.47822265625, + "grad_norm": 0.22871297597885132, + "learning_rate": 0.0002958465491269911, + "loss": 4.5234, + "step": 4897 + }, + { + "epoch": 0.4783203125, + "grad_norm": 0.1868111789226532, + "learning_rate": 0.0002957767941159779, + "loss": 4.5156, + "step": 4898 + }, + { + "epoch": 0.47841796875, + "grad_norm": 0.190316841006279, + "learning_rate": 0.0002957070370907955, + "loss": 4.4883, + "step": 4899 + }, + { + "epoch": 0.478515625, + "grad_norm": 0.20926153659820557, + "learning_rate": 0.00029563727805820647, + "loss": 4.4883, + "step": 4900 + }, + { + "epoch": 0.47861328125, + "grad_norm": 0.1881253868341446, + "learning_rate": 0.00029556751702497323, + "loss": 4.5039, + "step": 4901 + }, + { + "epoch": 0.4787109375, + "grad_norm": 0.1670251041650772, + "learning_rate": 0.0002954977539978589, + "loss": 4.5195, + "step": 4902 + }, + { + "epoch": 0.47880859375, + "grad_norm": 0.1877918541431427, + "learning_rate": 0.0002954279889836265, + "loss": 4.5195, + "step": 4903 + }, + { + "epoch": 0.47890625, + "grad_norm": 0.18343430757522583, + "learning_rate": 0.00029535822198903914, + "loss": 4.4844, + "step": 4904 + }, + { + "epoch": 0.47900390625, + "grad_norm": 0.1749592125415802, + "learning_rate": 0.0002952884530208604, + "loss": 4.4727, + "step": 4905 + }, + { + "epoch": 0.4791015625, + "grad_norm": 0.18059606850147247, + "learning_rate": 0.0002952186820858537, + "loss": 4.4961, + "step": 4906 + }, + { + "epoch": 0.47919921875, + "grad_norm": 0.17389936745166779, + "learning_rate": 0.0002951489091907831, + "loss": 4.5039, + "step": 4907 + }, + { + "epoch": 0.479296875, + "grad_norm": 0.18824158608913422, + "learning_rate": 0.00029507913434241253, + "loss": 4.5078, + "step": 4908 + }, + { + "epoch": 0.47939453125, + "grad_norm": 0.19429364800453186, + "learning_rate": 0.0002950093575475061, + "loss": 4.5078, + "step": 4909 + }, + { + "epoch": 0.4794921875, + "grad_norm": 0.18458998203277588, + "learning_rate": 0.0002949395788128282, + "loss": 4.5273, + "step": 4910 + }, + { + "epoch": 0.47958984375, + "grad_norm": 0.18696632981300354, + "learning_rate": 0.0002948697981451436, + "loss": 4.5312, + "step": 4911 + }, + { + "epoch": 0.4796875, + "grad_norm": 0.18765518069267273, + "learning_rate": 0.00029480001555121685, + "loss": 4.4805, + "step": 4912 + }, + { + "epoch": 0.47978515625, + "grad_norm": 0.2094191163778305, + "learning_rate": 0.000294730231037813, + "loss": 4.4727, + "step": 4913 + }, + { + "epoch": 0.4798828125, + "grad_norm": 0.18133801221847534, + "learning_rate": 0.0002946604446116971, + "loss": 4.5117, + "step": 4914 + }, + { + "epoch": 0.47998046875, + "grad_norm": 0.19348259270191193, + "learning_rate": 0.00029459065627963465, + "loss": 4.5391, + "step": 4915 + }, + { + "epoch": 0.480078125, + "grad_norm": 0.21499043703079224, + "learning_rate": 0.000294520866048391, + "loss": 4.5039, + "step": 4916 + }, + { + "epoch": 0.48017578125, + "grad_norm": 0.1931108981370926, + "learning_rate": 0.00029445107392473183, + "loss": 4.5, + "step": 4917 + }, + { + "epoch": 0.4802734375, + "grad_norm": 0.19296056032180786, + "learning_rate": 0.000294381279915423, + "loss": 4.4805, + "step": 4918 + }, + { + "epoch": 0.48037109375, + "grad_norm": 0.1830490380525589, + "learning_rate": 0.00029431148402723074, + "loss": 4.5117, + "step": 4919 + }, + { + "epoch": 0.48046875, + "grad_norm": 0.18935756385326385, + "learning_rate": 0.00029424168626692116, + "loss": 4.4844, + "step": 4920 + }, + { + "epoch": 0.48056640625, + "grad_norm": 0.20308810472488403, + "learning_rate": 0.0002941718866412606, + "loss": 4.5, + "step": 4921 + }, + { + "epoch": 0.4806640625, + "grad_norm": 0.20616480708122253, + "learning_rate": 0.0002941020851570158, + "loss": 4.5117, + "step": 4922 + }, + { + "epoch": 0.48076171875, + "grad_norm": 0.18312186002731323, + "learning_rate": 0.00029403228182095346, + "loss": 4.4961, + "step": 4923 + }, + { + "epoch": 0.480859375, + "grad_norm": 0.1923656165599823, + "learning_rate": 0.00029396247663984065, + "loss": 4.5273, + "step": 4924 + }, + { + "epoch": 0.48095703125, + "grad_norm": 0.18236143887043, + "learning_rate": 0.0002938926696204444, + "loss": 4.5195, + "step": 4925 + }, + { + "epoch": 0.4810546875, + "grad_norm": 0.21130920946598053, + "learning_rate": 0.000293822860769532, + "loss": 4.5273, + "step": 4926 + }, + { + "epoch": 0.48115234375, + "grad_norm": 0.21677526831626892, + "learning_rate": 0.00029375305009387106, + "loss": 4.4922, + "step": 4927 + }, + { + "epoch": 0.48125, + "grad_norm": 0.18352484703063965, + "learning_rate": 0.0002936832376002292, + "loss": 4.4844, + "step": 4928 + }, + { + "epoch": 0.48134765625, + "grad_norm": 0.18204787373542786, + "learning_rate": 0.0002936134232953742, + "loss": 4.5078, + "step": 4929 + }, + { + "epoch": 0.4814453125, + "grad_norm": 0.17793574929237366, + "learning_rate": 0.00029354360718607416, + "loss": 4.4961, + "step": 4930 + }, + { + "epoch": 0.48154296875, + "grad_norm": 0.19242450594902039, + "learning_rate": 0.00029347378927909725, + "loss": 4.5547, + "step": 4931 + }, + { + "epoch": 0.481640625, + "grad_norm": 0.18779818713665009, + "learning_rate": 0.00029340396958121195, + "loss": 4.4805, + "step": 4932 + }, + { + "epoch": 0.48173828125, + "grad_norm": 0.17121903598308563, + "learning_rate": 0.0002933341480991866, + "loss": 4.4961, + "step": 4933 + }, + { + "epoch": 0.4818359375, + "grad_norm": 0.19215990602970123, + "learning_rate": 0.00029326432483979005, + "loss": 4.4922, + "step": 4934 + }, + { + "epoch": 0.48193359375, + "grad_norm": 0.2055845707654953, + "learning_rate": 0.0002931944998097912, + "loss": 4.5273, + "step": 4935 + }, + { + "epoch": 0.48203125, + "grad_norm": 0.20974083244800568, + "learning_rate": 0.000293124673015959, + "loss": 4.4766, + "step": 4936 + }, + { + "epoch": 0.48212890625, + "grad_norm": 0.2047787457704544, + "learning_rate": 0.0002930548444650629, + "loss": 4.5273, + "step": 4937 + }, + { + "epoch": 0.4822265625, + "grad_norm": 0.18928056955337524, + "learning_rate": 0.000292985014163872, + "loss": 4.5195, + "step": 4938 + }, + { + "epoch": 0.48232421875, + "grad_norm": 0.1792883723974228, + "learning_rate": 0.0002929151821191561, + "loss": 4.4844, + "step": 4939 + }, + { + "epoch": 0.482421875, + "grad_norm": 0.20913580060005188, + "learning_rate": 0.0002928453483376849, + "loss": 4.5078, + "step": 4940 + }, + { + "epoch": 0.48251953125, + "grad_norm": 0.19443626701831818, + "learning_rate": 0.0002927755128262283, + "loss": 4.4766, + "step": 4941 + }, + { + "epoch": 0.4826171875, + "grad_norm": 0.199191614985466, + "learning_rate": 0.0002927056755915564, + "loss": 4.5273, + "step": 4942 + }, + { + "epoch": 0.48271484375, + "grad_norm": 0.19665677845478058, + "learning_rate": 0.0002926358366404394, + "loss": 4.5078, + "step": 4943 + }, + { + "epoch": 0.4828125, + "grad_norm": 0.212947815656662, + "learning_rate": 0.0002925659959796477, + "loss": 4.5078, + "step": 4944 + }, + { + "epoch": 0.48291015625, + "grad_norm": 0.23902808129787445, + "learning_rate": 0.000292496153615952, + "loss": 4.4805, + "step": 4945 + }, + { + "epoch": 0.4830078125, + "grad_norm": 0.20815140008926392, + "learning_rate": 0.0002924263095561229, + "loss": 4.5039, + "step": 4946 + }, + { + "epoch": 0.48310546875, + "grad_norm": 0.18370166420936584, + "learning_rate": 0.0002923564638069313, + "loss": 4.5195, + "step": 4947 + }, + { + "epoch": 0.483203125, + "grad_norm": 0.19981400668621063, + "learning_rate": 0.0002922866163751484, + "loss": 4.5039, + "step": 4948 + }, + { + "epoch": 0.48330078125, + "grad_norm": 0.1828223019838333, + "learning_rate": 0.0002922167672675454, + "loss": 4.5039, + "step": 4949 + }, + { + "epoch": 0.4833984375, + "grad_norm": 0.18053089082241058, + "learning_rate": 0.0002921469164908936, + "loss": 4.5352, + "step": 4950 + }, + { + "epoch": 0.48349609375, + "grad_norm": 0.18078504502773285, + "learning_rate": 0.00029207706405196465, + "loss": 4.5391, + "step": 4951 + }, + { + "epoch": 0.48359375, + "grad_norm": 0.19809898734092712, + "learning_rate": 0.0002920072099575302, + "loss": 4.4883, + "step": 4952 + }, + { + "epoch": 0.48369140625, + "grad_norm": 0.18954119086265564, + "learning_rate": 0.00029193735421436225, + "loss": 4.4766, + "step": 4953 + }, + { + "epoch": 0.4837890625, + "grad_norm": 0.18608875572681427, + "learning_rate": 0.0002918674968292327, + "loss": 4.5195, + "step": 4954 + }, + { + "epoch": 0.48388671875, + "grad_norm": 0.19539164006710052, + "learning_rate": 0.0002917976378089138, + "loss": 4.5312, + "step": 4955 + }, + { + "epoch": 0.483984375, + "grad_norm": 0.18641431629657745, + "learning_rate": 0.000291727777160178, + "loss": 4.5195, + "step": 4956 + }, + { + "epoch": 0.48408203125, + "grad_norm": 0.19816046953201294, + "learning_rate": 0.00029165791488979767, + "loss": 4.5195, + "step": 4957 + }, + { + "epoch": 0.4841796875, + "grad_norm": 0.1764269769191742, + "learning_rate": 0.0002915880510045456, + "loss": 4.4844, + "step": 4958 + }, + { + "epoch": 0.48427734375, + "grad_norm": 0.19150441884994507, + "learning_rate": 0.0002915181855111945, + "loss": 4.5078, + "step": 4959 + }, + { + "epoch": 0.484375, + "grad_norm": 0.18321380019187927, + "learning_rate": 0.00029144831841651747, + "loss": 4.5117, + "step": 4960 + }, + { + "epoch": 0.48447265625, + "grad_norm": 0.17719793319702148, + "learning_rate": 0.0002913784497272876, + "loss": 4.5352, + "step": 4961 + }, + { + "epoch": 0.4845703125, + "grad_norm": 0.1779632270336151, + "learning_rate": 0.00029130857945027823, + "loss": 4.5078, + "step": 4962 + }, + { + "epoch": 0.48466796875, + "grad_norm": 0.1783190667629242, + "learning_rate": 0.00029123870759226276, + "loss": 4.4805, + "step": 4963 + }, + { + "epoch": 0.484765625, + "grad_norm": 0.17575164139270782, + "learning_rate": 0.0002911688341600147, + "loss": 4.5156, + "step": 4964 + }, + { + "epoch": 0.48486328125, + "grad_norm": 0.18184641003608704, + "learning_rate": 0.0002910989591603079, + "loss": 4.5039, + "step": 4965 + }, + { + "epoch": 0.4849609375, + "grad_norm": 0.20890910923480988, + "learning_rate": 0.0002910290825999164, + "loss": 4.5234, + "step": 4966 + }, + { + "epoch": 0.48505859375, + "grad_norm": 0.22401532530784607, + "learning_rate": 0.0002909592044856141, + "loss": 4.5273, + "step": 4967 + }, + { + "epoch": 0.48515625, + "grad_norm": 0.21770545840263367, + "learning_rate": 0.00029088932482417517, + "loss": 4.4961, + "step": 4968 + }, + { + "epoch": 0.48525390625, + "grad_norm": 0.17497552931308746, + "learning_rate": 0.00029081944362237413, + "loss": 4.4883, + "step": 4969 + }, + { + "epoch": 0.4853515625, + "grad_norm": 0.18110641837120056, + "learning_rate": 0.0002907495608869854, + "loss": 4.5117, + "step": 4970 + }, + { + "epoch": 0.48544921875, + "grad_norm": 0.21781732141971588, + "learning_rate": 0.0002906796766247835, + "loss": 4.4805, + "step": 4971 + }, + { + "epoch": 0.485546875, + "grad_norm": 0.22249147295951843, + "learning_rate": 0.0002906097908425435, + "loss": 4.5312, + "step": 4972 + }, + { + "epoch": 0.48564453125, + "grad_norm": 0.18605491518974304, + "learning_rate": 0.0002905399035470401, + "loss": 4.5039, + "step": 4973 + }, + { + "epoch": 0.4857421875, + "grad_norm": 0.19825677573680878, + "learning_rate": 0.0002904700147450487, + "loss": 4.4961, + "step": 4974 + }, + { + "epoch": 0.48583984375, + "grad_norm": 0.23580402135849, + "learning_rate": 0.00029040012444334426, + "loss": 4.4727, + "step": 4975 + }, + { + "epoch": 0.4859375, + "grad_norm": 0.2366577386856079, + "learning_rate": 0.00029033023264870227, + "loss": 4.4766, + "step": 4976 + }, + { + "epoch": 0.48603515625, + "grad_norm": 0.17717225849628448, + "learning_rate": 0.00029026033936789823, + "loss": 4.5117, + "step": 4977 + }, + { + "epoch": 0.4861328125, + "grad_norm": 0.2250281721353531, + "learning_rate": 0.0002901904446077079, + "loss": 4.5039, + "step": 4978 + }, + { + "epoch": 0.48623046875, + "grad_norm": 0.27039432525634766, + "learning_rate": 0.00029012054837490716, + "loss": 4.4883, + "step": 4979 + }, + { + "epoch": 0.486328125, + "grad_norm": 0.2066558599472046, + "learning_rate": 0.00029005065067627174, + "loss": 4.5469, + "step": 4980 + }, + { + "epoch": 0.48642578125, + "grad_norm": 0.19609202444553375, + "learning_rate": 0.00028998075151857796, + "loss": 4.543, + "step": 4981 + }, + { + "epoch": 0.4865234375, + "grad_norm": 0.2268548160791397, + "learning_rate": 0.000289910850908602, + "loss": 4.4727, + "step": 4982 + }, + { + "epoch": 0.48662109375, + "grad_norm": 0.1844746619462967, + "learning_rate": 0.00028984094885312024, + "loss": 4.4766, + "step": 4983 + }, + { + "epoch": 0.48671875, + "grad_norm": 0.1933065503835678, + "learning_rate": 0.0002897710453589092, + "loss": 4.4961, + "step": 4984 + }, + { + "epoch": 0.48681640625, + "grad_norm": 0.21170346438884735, + "learning_rate": 0.0002897011404327455, + "loss": 4.4883, + "step": 4985 + }, + { + "epoch": 0.4869140625, + "grad_norm": 0.17873001098632812, + "learning_rate": 0.0002896312340814061, + "loss": 4.5195, + "step": 4986 + }, + { + "epoch": 0.48701171875, + "grad_norm": 0.19476519525051117, + "learning_rate": 0.0002895613263116678, + "loss": 4.5273, + "step": 4987 + }, + { + "epoch": 0.487109375, + "grad_norm": 0.19193507730960846, + "learning_rate": 0.0002894914171303078, + "loss": 4.4922, + "step": 4988 + }, + { + "epoch": 0.48720703125, + "grad_norm": 0.18205752968788147, + "learning_rate": 0.0002894215065441032, + "loss": 4.5039, + "step": 4989 + }, + { + "epoch": 0.4873046875, + "grad_norm": 0.18413770198822021, + "learning_rate": 0.00028935159455983146, + "loss": 4.5039, + "step": 4990 + }, + { + "epoch": 0.48740234375, + "grad_norm": 0.19664976000785828, + "learning_rate": 0.0002892816811842701, + "loss": 4.5039, + "step": 4991 + }, + { + "epoch": 0.4875, + "grad_norm": 0.17460240423679352, + "learning_rate": 0.00028921176642419656, + "loss": 4.5, + "step": 4992 + }, + { + "epoch": 0.48759765625, + "grad_norm": 0.1874685287475586, + "learning_rate": 0.0002891418502863888, + "loss": 4.5, + "step": 4993 + }, + { + "epoch": 0.4876953125, + "grad_norm": 0.18278323113918304, + "learning_rate": 0.0002890719327776246, + "loss": 4.5273, + "step": 4994 + }, + { + "epoch": 0.48779296875, + "grad_norm": 0.18636064231395721, + "learning_rate": 0.00028900201390468194, + "loss": 4.4766, + "step": 4995 + }, + { + "epoch": 0.487890625, + "grad_norm": 0.19439184665679932, + "learning_rate": 0.0002889320936743392, + "loss": 4.5039, + "step": 4996 + }, + { + "epoch": 0.48798828125, + "grad_norm": 0.1704280972480774, + "learning_rate": 0.00028886217209337446, + "loss": 4.4883, + "step": 4997 + }, + { + "epoch": 0.4880859375, + "grad_norm": 0.19042551517486572, + "learning_rate": 0.00028879224916856627, + "loss": 4.5117, + "step": 4998 + }, + { + "epoch": 0.48818359375, + "grad_norm": 0.18333657085895538, + "learning_rate": 0.0002887223249066931, + "loss": 4.4844, + "step": 4999 + }, + { + "epoch": 0.48828125, + "grad_norm": 0.18821942806243896, + "learning_rate": 0.00028865239931453367, + "loss": 4.5234, + "step": 5000 + }, + { + "epoch": 0.48837890625, + "grad_norm": 0.1875939667224884, + "learning_rate": 0.00028858247239886683, + "loss": 4.5078, + "step": 5001 + }, + { + "epoch": 0.4884765625, + "grad_norm": 0.18211457133293152, + "learning_rate": 0.0002885125441664713, + "loss": 4.5273, + "step": 5002 + }, + { + "epoch": 0.48857421875, + "grad_norm": 0.18460799753665924, + "learning_rate": 0.0002884426146241265, + "loss": 4.4922, + "step": 5003 + }, + { + "epoch": 0.488671875, + "grad_norm": 0.19913485646247864, + "learning_rate": 0.00028837268377861143, + "loss": 4.4766, + "step": 5004 + }, + { + "epoch": 0.48876953125, + "grad_norm": 0.2219424843788147, + "learning_rate": 0.0002883027516367054, + "loss": 4.5234, + "step": 5005 + }, + { + "epoch": 0.4888671875, + "grad_norm": 0.19520394504070282, + "learning_rate": 0.0002882328182051879, + "loss": 4.5195, + "step": 5006 + }, + { + "epoch": 0.48896484375, + "grad_norm": 0.20169228315353394, + "learning_rate": 0.00028816288349083844, + "loss": 4.5039, + "step": 5007 + }, + { + "epoch": 0.4890625, + "grad_norm": 0.1953778862953186, + "learning_rate": 0.00028809294750043677, + "loss": 4.5195, + "step": 5008 + }, + { + "epoch": 0.48916015625, + "grad_norm": 0.21013332903385162, + "learning_rate": 0.00028802301024076276, + "loss": 4.4883, + "step": 5009 + }, + { + "epoch": 0.4892578125, + "grad_norm": 0.19454868137836456, + "learning_rate": 0.00028795307171859626, + "loss": 4.5195, + "step": 5010 + }, + { + "epoch": 0.48935546875, + "grad_norm": 0.19453375041484833, + "learning_rate": 0.00028788313194071737, + "loss": 4.5078, + "step": 5011 + }, + { + "epoch": 0.489453125, + "grad_norm": 0.19482453167438507, + "learning_rate": 0.0002878131909139063, + "loss": 4.4805, + "step": 5012 + }, + { + "epoch": 0.48955078125, + "grad_norm": 0.20300403237342834, + "learning_rate": 0.00028774324864494326, + "loss": 4.5195, + "step": 5013 + }, + { + "epoch": 0.4896484375, + "grad_norm": 0.20427776873111725, + "learning_rate": 0.00028767330514060886, + "loss": 4.5, + "step": 5014 + }, + { + "epoch": 0.48974609375, + "grad_norm": 0.19173915684223175, + "learning_rate": 0.0002876033604076834, + "loss": 4.5312, + "step": 5015 + }, + { + "epoch": 0.48984375, + "grad_norm": 0.2051384001970291, + "learning_rate": 0.0002875334144529478, + "loss": 4.4883, + "step": 5016 + }, + { + "epoch": 0.48994140625, + "grad_norm": 0.20055606961250305, + "learning_rate": 0.00028746346728318275, + "loss": 4.543, + "step": 5017 + }, + { + "epoch": 0.4900390625, + "grad_norm": 0.1746288686990738, + "learning_rate": 0.0002873935189051691, + "loss": 4.5469, + "step": 5018 + }, + { + "epoch": 0.49013671875, + "grad_norm": 0.16243818402290344, + "learning_rate": 0.00028732356932568786, + "loss": 4.5117, + "step": 5019 + }, + { + "epoch": 0.490234375, + "grad_norm": 0.186869814991951, + "learning_rate": 0.0002872536185515203, + "loss": 4.5273, + "step": 5020 + }, + { + "epoch": 0.49033203125, + "grad_norm": 0.20324669778347015, + "learning_rate": 0.00028718366658944753, + "loss": 4.5547, + "step": 5021 + }, + { + "epoch": 0.4904296875, + "grad_norm": 0.20204879343509674, + "learning_rate": 0.000287113713446251, + "loss": 4.4883, + "step": 5022 + }, + { + "epoch": 0.49052734375, + "grad_norm": 0.19301429390907288, + "learning_rate": 0.0002870437591287121, + "loss": 4.4727, + "step": 5023 + }, + { + "epoch": 0.490625, + "grad_norm": 0.18100139498710632, + "learning_rate": 0.00028697380364361253, + "loss": 4.4961, + "step": 5024 + }, + { + "epoch": 0.49072265625, + "grad_norm": 0.2038314938545227, + "learning_rate": 0.000286903846997734, + "loss": 4.5234, + "step": 5025 + }, + { + "epoch": 0.4908203125, + "grad_norm": 0.1910288780927658, + "learning_rate": 0.0002868338891978583, + "loss": 4.4883, + "step": 5026 + }, + { + "epoch": 0.49091796875, + "grad_norm": 0.18653604388237, + "learning_rate": 0.0002867639302507673, + "loss": 4.4961, + "step": 5027 + }, + { + "epoch": 0.491015625, + "grad_norm": 0.18722912669181824, + "learning_rate": 0.0002866939701632431, + "loss": 4.5391, + "step": 5028 + }, + { + "epoch": 0.49111328125, + "grad_norm": 0.19467434287071228, + "learning_rate": 0.000286624008942068, + "loss": 4.5312, + "step": 5029 + }, + { + "epoch": 0.4912109375, + "grad_norm": 0.20004309713840485, + "learning_rate": 0.00028655404659402395, + "loss": 4.5195, + "step": 5030 + }, + { + "epoch": 0.49130859375, + "grad_norm": 0.1868520826101303, + "learning_rate": 0.0002864840831258936, + "loss": 4.5156, + "step": 5031 + }, + { + "epoch": 0.49140625, + "grad_norm": 0.20459884405136108, + "learning_rate": 0.0002864141185444593, + "loss": 4.4961, + "step": 5032 + }, + { + "epoch": 0.49150390625, + "grad_norm": 0.18055082857608795, + "learning_rate": 0.00028634415285650367, + "loss": 4.5195, + "step": 5033 + }, + { + "epoch": 0.4916015625, + "grad_norm": 0.19507567584514618, + "learning_rate": 0.00028627418606880944, + "loss": 4.5195, + "step": 5034 + }, + { + "epoch": 0.49169921875, + "grad_norm": 0.20986425876617432, + "learning_rate": 0.00028620421818815935, + "loss": 4.5156, + "step": 5035 + }, + { + "epoch": 0.491796875, + "grad_norm": 0.18932271003723145, + "learning_rate": 0.0002861342492213364, + "loss": 4.4961, + "step": 5036 + }, + { + "epoch": 0.49189453125, + "grad_norm": 0.21241046488285065, + "learning_rate": 0.00028606427917512344, + "loss": 4.5156, + "step": 5037 + }, + { + "epoch": 0.4919921875, + "grad_norm": 0.18575364351272583, + "learning_rate": 0.0002859943080563039, + "loss": 4.5117, + "step": 5038 + }, + { + "epoch": 0.49208984375, + "grad_norm": 0.18719451129436493, + "learning_rate": 0.0002859243358716607, + "loss": 4.4844, + "step": 5039 + }, + { + "epoch": 0.4921875, + "grad_norm": 0.19003239274024963, + "learning_rate": 0.0002858543626279773, + "loss": 4.4961, + "step": 5040 + }, + { + "epoch": 0.49228515625, + "grad_norm": 0.17817120254039764, + "learning_rate": 0.00028578438833203707, + "loss": 4.5, + "step": 5041 + }, + { + "epoch": 0.4923828125, + "grad_norm": 0.18309123814105988, + "learning_rate": 0.0002857144129906237, + "loss": 4.5273, + "step": 5042 + }, + { + "epoch": 0.49248046875, + "grad_norm": 0.1807662397623062, + "learning_rate": 0.0002856444366105205, + "loss": 4.5078, + "step": 5043 + }, + { + "epoch": 0.492578125, + "grad_norm": 0.19463421404361725, + "learning_rate": 0.0002855744591985116, + "loss": 4.5195, + "step": 5044 + }, + { + "epoch": 0.49267578125, + "grad_norm": 0.18441832065582275, + "learning_rate": 0.0002855044807613806, + "loss": 4.5078, + "step": 5045 + }, + { + "epoch": 0.4927734375, + "grad_norm": 0.1828906238079071, + "learning_rate": 0.00028543450130591154, + "loss": 4.4922, + "step": 5046 + }, + { + "epoch": 0.49287109375, + "grad_norm": 0.1764693707227707, + "learning_rate": 0.0002853645208388883, + "loss": 4.4805, + "step": 5047 + }, + { + "epoch": 0.49296875, + "grad_norm": 0.17617064714431763, + "learning_rate": 0.0002852945393670951, + "loss": 4.5234, + "step": 5048 + }, + { + "epoch": 0.49306640625, + "grad_norm": 0.1779182255268097, + "learning_rate": 0.0002852245568973162, + "loss": 4.4688, + "step": 5049 + }, + { + "epoch": 0.4931640625, + "grad_norm": 0.18217980861663818, + "learning_rate": 0.00028515457343633593, + "loss": 4.5195, + "step": 5050 + }, + { + "epoch": 0.49326171875, + "grad_norm": 0.1873096525669098, + "learning_rate": 0.0002850845889909387, + "loss": 4.5234, + "step": 5051 + }, + { + "epoch": 0.493359375, + "grad_norm": 0.1839875876903534, + "learning_rate": 0.00028501460356790897, + "loss": 4.4883, + "step": 5052 + }, + { + "epoch": 0.49345703125, + "grad_norm": 0.1858394593000412, + "learning_rate": 0.0002849446171740314, + "loss": 4.4961, + "step": 5053 + }, + { + "epoch": 0.4935546875, + "grad_norm": 0.18596035242080688, + "learning_rate": 0.0002848746298160908, + "loss": 4.5, + "step": 5054 + }, + { + "epoch": 0.49365234375, + "grad_norm": 0.17963097989559174, + "learning_rate": 0.00028480464150087173, + "loss": 4.5078, + "step": 5055 + }, + { + "epoch": 0.49375, + "grad_norm": 0.1937597095966339, + "learning_rate": 0.0002847346522351592, + "loss": 4.5078, + "step": 5056 + }, + { + "epoch": 0.49384765625, + "grad_norm": 0.18518294394016266, + "learning_rate": 0.0002846646620257383, + "loss": 4.5039, + "step": 5057 + }, + { + "epoch": 0.4939453125, + "grad_norm": 0.1847790777683258, + "learning_rate": 0.0002845946708793939, + "loss": 4.5234, + "step": 5058 + }, + { + "epoch": 0.49404296875, + "grad_norm": 0.1719365119934082, + "learning_rate": 0.00028452467880291137, + "loss": 4.4844, + "step": 5059 + }, + { + "epoch": 0.494140625, + "grad_norm": 0.17659255862236023, + "learning_rate": 0.0002844546858030758, + "loss": 4.5117, + "step": 5060 + }, + { + "epoch": 0.49423828125, + "grad_norm": 0.18106552958488464, + "learning_rate": 0.0002843846918866727, + "loss": 4.4922, + "step": 5061 + }, + { + "epoch": 0.4943359375, + "grad_norm": 0.1849226951599121, + "learning_rate": 0.00028431469706048727, + "loss": 4.5117, + "step": 5062 + }, + { + "epoch": 0.49443359375, + "grad_norm": 0.19923287630081177, + "learning_rate": 0.0002842447013313054, + "loss": 4.5039, + "step": 5063 + }, + { + "epoch": 0.49453125, + "grad_norm": 0.19704851508140564, + "learning_rate": 0.00028417470470591227, + "loss": 4.4727, + "step": 5064 + }, + { + "epoch": 0.49462890625, + "grad_norm": 0.19552062451839447, + "learning_rate": 0.00028410470719109385, + "loss": 4.5234, + "step": 5065 + }, + { + "epoch": 0.4947265625, + "grad_norm": 0.19976045191287994, + "learning_rate": 0.0002840347087936359, + "loss": 4.4648, + "step": 5066 + }, + { + "epoch": 0.49482421875, + "grad_norm": 0.1800488382577896, + "learning_rate": 0.00028396470952032427, + "loss": 4.4922, + "step": 5067 + }, + { + "epoch": 0.494921875, + "grad_norm": 0.20043230056762695, + "learning_rate": 0.00028389470937794487, + "loss": 4.5, + "step": 5068 + }, + { + "epoch": 0.49501953125, + "grad_norm": 0.2082027941942215, + "learning_rate": 0.0002838247083732837, + "loss": 4.5234, + "step": 5069 + }, + { + "epoch": 0.4951171875, + "grad_norm": 0.20470726490020752, + "learning_rate": 0.000283754706513127, + "loss": 4.5, + "step": 5070 + }, + { + "epoch": 0.49521484375, + "grad_norm": 0.17109589278697968, + "learning_rate": 0.000283684703804261, + "loss": 4.5, + "step": 5071 + }, + { + "epoch": 0.4953125, + "grad_norm": 0.1688377559185028, + "learning_rate": 0.0002836147002534718, + "loss": 4.4961, + "step": 5072 + }, + { + "epoch": 0.49541015625, + "grad_norm": 0.21252302825450897, + "learning_rate": 0.0002835446958675459, + "loss": 4.4805, + "step": 5073 + }, + { + "epoch": 0.4955078125, + "grad_norm": 0.22529341280460358, + "learning_rate": 0.00028347469065326974, + "loss": 4.4805, + "step": 5074 + }, + { + "epoch": 0.49560546875, + "grad_norm": 0.1969635784626007, + "learning_rate": 0.00028340468461742987, + "loss": 4.4922, + "step": 5075 + }, + { + "epoch": 0.495703125, + "grad_norm": 0.18279263377189636, + "learning_rate": 0.00028333467776681283, + "loss": 4.4688, + "step": 5076 + }, + { + "epoch": 0.49580078125, + "grad_norm": 0.21120624244213104, + "learning_rate": 0.0002832646701082054, + "loss": 4.5312, + "step": 5077 + }, + { + "epoch": 0.4958984375, + "grad_norm": 0.20648019015789032, + "learning_rate": 0.0002831946616483942, + "loss": 4.4609, + "step": 5078 + }, + { + "epoch": 0.49599609375, + "grad_norm": 0.19025729596614838, + "learning_rate": 0.00028312465239416625, + "loss": 4.5078, + "step": 5079 + }, + { + "epoch": 0.49609375, + "grad_norm": 0.183236762881279, + "learning_rate": 0.00028305464235230837, + "loss": 4.4883, + "step": 5080 + }, + { + "epoch": 0.49619140625, + "grad_norm": 0.19843505322933197, + "learning_rate": 0.0002829846315296076, + "loss": 4.4883, + "step": 5081 + }, + { + "epoch": 0.4962890625, + "grad_norm": 0.1940833479166031, + "learning_rate": 0.00028291461993285087, + "loss": 4.5, + "step": 5082 + }, + { + "epoch": 0.49638671875, + "grad_norm": 0.17861264944076538, + "learning_rate": 0.0002828446075688256, + "loss": 4.5, + "step": 5083 + }, + { + "epoch": 0.496484375, + "grad_norm": 0.20620723068714142, + "learning_rate": 0.00028277459444431887, + "loss": 4.4766, + "step": 5084 + }, + { + "epoch": 0.49658203125, + "grad_norm": 0.2087031900882721, + "learning_rate": 0.0002827045805661179, + "loss": 4.4844, + "step": 5085 + }, + { + "epoch": 0.4966796875, + "grad_norm": 0.18129120767116547, + "learning_rate": 0.0002826345659410102, + "loss": 4.5, + "step": 5086 + }, + { + "epoch": 0.49677734375, + "grad_norm": 0.18435616791248322, + "learning_rate": 0.00028256455057578306, + "loss": 4.5117, + "step": 5087 + }, + { + "epoch": 0.496875, + "grad_norm": 0.18913733959197998, + "learning_rate": 0.0002824945344772242, + "loss": 4.4844, + "step": 5088 + }, + { + "epoch": 0.49697265625, + "grad_norm": 0.17566293478012085, + "learning_rate": 0.00028242451765212096, + "loss": 4.5352, + "step": 5089 + }, + { + "epoch": 0.4970703125, + "grad_norm": 0.17829683423042297, + "learning_rate": 0.0002823545001072612, + "loss": 4.4805, + "step": 5090 + }, + { + "epoch": 0.49716796875, + "grad_norm": 0.1754644513130188, + "learning_rate": 0.00028228448184943253, + "loss": 4.5039, + "step": 5091 + }, + { + "epoch": 0.497265625, + "grad_norm": 0.20612354576587677, + "learning_rate": 0.00028221446288542284, + "loss": 4.4844, + "step": 5092 + }, + { + "epoch": 0.49736328125, + "grad_norm": 0.18080143630504608, + "learning_rate": 0.00028214444322201986, + "loss": 4.5117, + "step": 5093 + }, + { + "epoch": 0.4974609375, + "grad_norm": 0.18130500614643097, + "learning_rate": 0.0002820744228660117, + "loss": 4.5234, + "step": 5094 + }, + { + "epoch": 0.49755859375, + "grad_norm": 0.18605540692806244, + "learning_rate": 0.0002820044018241862, + "loss": 4.4922, + "step": 5095 + }, + { + "epoch": 0.49765625, + "grad_norm": 0.1778370589017868, + "learning_rate": 0.0002819343801033314, + "loss": 4.4727, + "step": 5096 + }, + { + "epoch": 0.49775390625, + "grad_norm": 0.19978025555610657, + "learning_rate": 0.0002818643577102357, + "loss": 4.4961, + "step": 5097 + }, + { + "epoch": 0.4978515625, + "grad_norm": 0.17528221011161804, + "learning_rate": 0.000281794334651687, + "loss": 4.4766, + "step": 5098 + }, + { + "epoch": 0.49794921875, + "grad_norm": 0.17407627403736115, + "learning_rate": 0.00028172431093447366, + "loss": 4.4883, + "step": 5099 + }, + { + "epoch": 0.498046875, + "grad_norm": 0.1712258756160736, + "learning_rate": 0.000281654286565384, + "loss": 4.5039, + "step": 5100 + }, + { + "epoch": 0.49814453125, + "grad_norm": 0.17751474678516388, + "learning_rate": 0.00028158426155120645, + "loss": 4.4922, + "step": 5101 + }, + { + "epoch": 0.4982421875, + "grad_norm": 0.16980567574501038, + "learning_rate": 0.0002815142358987295, + "loss": 4.5078, + "step": 5102 + }, + { + "epoch": 0.49833984375, + "grad_norm": 0.17254739999771118, + "learning_rate": 0.00028144420961474147, + "loss": 4.4961, + "step": 5103 + }, + { + "epoch": 0.4984375, + "grad_norm": 0.18270719051361084, + "learning_rate": 0.0002813741827060311, + "loss": 4.5117, + "step": 5104 + }, + { + "epoch": 0.49853515625, + "grad_norm": 0.1776650846004486, + "learning_rate": 0.000281304155179387, + "loss": 4.5156, + "step": 5105 + }, + { + "epoch": 0.4986328125, + "grad_norm": 0.17031177878379822, + "learning_rate": 0.00028123412704159783, + "loss": 4.5039, + "step": 5106 + }, + { + "epoch": 0.49873046875, + "grad_norm": 0.1641244739294052, + "learning_rate": 0.0002811640982994523, + "loss": 4.5039, + "step": 5107 + }, + { + "epoch": 0.498828125, + "grad_norm": 0.1737366020679474, + "learning_rate": 0.00028109406895973936, + "loss": 4.5156, + "step": 5108 + }, + { + "epoch": 0.49892578125, + "grad_norm": 0.17943845689296722, + "learning_rate": 0.00028102403902924777, + "loss": 4.5, + "step": 5109 + }, + { + "epoch": 0.4990234375, + "grad_norm": 0.18026100099086761, + "learning_rate": 0.0002809540085147665, + "loss": 4.4805, + "step": 5110 + }, + { + "epoch": 0.49912109375, + "grad_norm": 0.19167636334896088, + "learning_rate": 0.0002808839774230845, + "loss": 4.4883, + "step": 5111 + }, + { + "epoch": 0.49921875, + "grad_norm": 0.16971394419670105, + "learning_rate": 0.0002808139457609907, + "loss": 4.4766, + "step": 5112 + }, + { + "epoch": 0.49931640625, + "grad_norm": 0.18336015939712524, + "learning_rate": 0.00028074391353527457, + "loss": 4.4609, + "step": 5113 + }, + { + "epoch": 0.4994140625, + "grad_norm": 0.1822541505098343, + "learning_rate": 0.0002806738807527248, + "loss": 4.4883, + "step": 5114 + }, + { + "epoch": 0.49951171875, + "grad_norm": 0.18493016064167023, + "learning_rate": 0.00028060384742013085, + "loss": 4.5195, + "step": 5115 + }, + { + "epoch": 0.499609375, + "grad_norm": 0.18027563393115997, + "learning_rate": 0.000280533813544282, + "loss": 4.5156, + "step": 5116 + }, + { + "epoch": 0.49970703125, + "grad_norm": 0.18407124280929565, + "learning_rate": 0.00028046377913196733, + "loss": 4.4883, + "step": 5117 + }, + { + "epoch": 0.4998046875, + "grad_norm": 0.1872275024652481, + "learning_rate": 0.00028039374418997655, + "loss": 4.4688, + "step": 5118 + }, + { + "epoch": 0.49990234375, + "grad_norm": 0.18981114029884338, + "learning_rate": 0.00028032370872509876, + "loss": 4.5, + "step": 5119 + }, + { + "epoch": 0.5, + "grad_norm": 0.1960623562335968, + "learning_rate": 0.00028025367274412354, + "loss": 4.4766, + "step": 5120 + }, + { + "epoch": 0.50009765625, + "grad_norm": 0.1847204715013504, + "learning_rate": 0.0002801836362538404, + "loss": 4.4805, + "step": 5121 + }, + { + "epoch": 0.5001953125, + "grad_norm": 0.1973002403974533, + "learning_rate": 0.0002801135992610389, + "loss": 4.5234, + "step": 5122 + }, + { + "epoch": 0.50029296875, + "grad_norm": 0.17842809855937958, + "learning_rate": 0.0002800435617725088, + "loss": 4.5039, + "step": 5123 + }, + { + "epoch": 0.500390625, + "grad_norm": 0.18592432141304016, + "learning_rate": 0.0002799735237950395, + "loss": 4.4961, + "step": 5124 + }, + { + "epoch": 0.50048828125, + "grad_norm": 0.18839935958385468, + "learning_rate": 0.0002799034853354208, + "loss": 4.4922, + "step": 5125 + }, + { + "epoch": 0.5005859375, + "grad_norm": 0.21128085255622864, + "learning_rate": 0.0002798334464004425, + "loss": 4.5156, + "step": 5126 + }, + { + "epoch": 0.50068359375, + "grad_norm": 0.22892269492149353, + "learning_rate": 0.0002797634069968944, + "loss": 4.4805, + "step": 5127 + }, + { + "epoch": 0.50078125, + "grad_norm": 0.22451509535312653, + "learning_rate": 0.00027969336713156627, + "loss": 4.5117, + "step": 5128 + }, + { + "epoch": 0.50087890625, + "grad_norm": 0.21445539593696594, + "learning_rate": 0.0002796233268112481, + "loss": 4.4961, + "step": 5129 + }, + { + "epoch": 0.5009765625, + "grad_norm": 0.175946444272995, + "learning_rate": 0.0002795532860427298, + "loss": 4.5039, + "step": 5130 + }, + { + "epoch": 0.50107421875, + "grad_norm": 0.186112180352211, + "learning_rate": 0.00027948324483280124, + "loss": 4.5, + "step": 5131 + }, + { + "epoch": 0.501171875, + "grad_norm": 0.1986958384513855, + "learning_rate": 0.0002794132031882525, + "loss": 4.5312, + "step": 5132 + }, + { + "epoch": 0.50126953125, + "grad_norm": 0.2069779932498932, + "learning_rate": 0.0002793431611158738, + "loss": 4.5156, + "step": 5133 + }, + { + "epoch": 0.5013671875, + "grad_norm": 0.1950129270553589, + "learning_rate": 0.00027927311862245503, + "loss": 4.4648, + "step": 5134 + }, + { + "epoch": 0.50146484375, + "grad_norm": 0.18365147709846497, + "learning_rate": 0.00027920307571478644, + "loss": 4.4531, + "step": 5135 + }, + { + "epoch": 0.5015625, + "grad_norm": 0.17558416724205017, + "learning_rate": 0.0002791330323996581, + "loss": 4.5117, + "step": 5136 + }, + { + "epoch": 0.50166015625, + "grad_norm": 0.18780530989170074, + "learning_rate": 0.0002790629886838604, + "loss": 4.5273, + "step": 5137 + }, + { + "epoch": 0.5017578125, + "grad_norm": 0.1864551305770874, + "learning_rate": 0.0002789929445741835, + "loss": 4.4688, + "step": 5138 + }, + { + "epoch": 0.50185546875, + "grad_norm": 0.17951829731464386, + "learning_rate": 0.00027892290007741773, + "loss": 4.4609, + "step": 5139 + }, + { + "epoch": 0.501953125, + "grad_norm": 0.1813666969537735, + "learning_rate": 0.0002788528552003534, + "loss": 4.4922, + "step": 5140 + }, + { + "epoch": 0.50205078125, + "grad_norm": 0.1822909116744995, + "learning_rate": 0.0002787828099497809, + "loss": 4.4922, + "step": 5141 + }, + { + "epoch": 0.5021484375, + "grad_norm": 0.1760866492986679, + "learning_rate": 0.0002787127643324907, + "loss": 4.4766, + "step": 5142 + }, + { + "epoch": 0.50224609375, + "grad_norm": 0.17687956988811493, + "learning_rate": 0.0002786427183552732, + "loss": 4.4961, + "step": 5143 + }, + { + "epoch": 0.50234375, + "grad_norm": 0.17573870718479156, + "learning_rate": 0.0002785726720249189, + "loss": 4.5, + "step": 5144 + }, + { + "epoch": 0.50244140625, + "grad_norm": 0.16743861138820648, + "learning_rate": 0.00027850262534821817, + "loss": 4.5078, + "step": 5145 + }, + { + "epoch": 0.5025390625, + "grad_norm": 0.17420105636119843, + "learning_rate": 0.00027843257833196175, + "loss": 4.5078, + "step": 5146 + }, + { + "epoch": 0.50263671875, + "grad_norm": 0.18306446075439453, + "learning_rate": 0.0002783625309829402, + "loss": 4.4922, + "step": 5147 + }, + { + "epoch": 0.502734375, + "grad_norm": 0.18245439231395721, + "learning_rate": 0.00027829248330794404, + "loss": 4.4492, + "step": 5148 + }, + { + "epoch": 0.50283203125, + "grad_norm": 0.18248721957206726, + "learning_rate": 0.000278222435313764, + "loss": 4.4844, + "step": 5149 + }, + { + "epoch": 0.5029296875, + "grad_norm": 0.19107308983802795, + "learning_rate": 0.0002781523870071907, + "loss": 4.4922, + "step": 5150 + }, + { + "epoch": 0.50302734375, + "grad_norm": 0.17651866376399994, + "learning_rate": 0.00027808233839501494, + "loss": 4.5078, + "step": 5151 + }, + { + "epoch": 0.503125, + "grad_norm": 0.18230322003364563, + "learning_rate": 0.0002780122894840273, + "loss": 4.4688, + "step": 5152 + }, + { + "epoch": 0.50322265625, + "grad_norm": 0.18481852114200592, + "learning_rate": 0.00027794224028101865, + "loss": 4.4961, + "step": 5153 + }, + { + "epoch": 0.5033203125, + "grad_norm": 0.17212416231632233, + "learning_rate": 0.0002778721907927799, + "loss": 4.4961, + "step": 5154 + }, + { + "epoch": 0.50341796875, + "grad_norm": 0.1837822049856186, + "learning_rate": 0.0002778021410261016, + "loss": 4.4805, + "step": 5155 + }, + { + "epoch": 0.503515625, + "grad_norm": 0.21511246263980865, + "learning_rate": 0.00027773209098777487, + "loss": 4.5117, + "step": 5156 + }, + { + "epoch": 0.50361328125, + "grad_norm": 0.24533307552337646, + "learning_rate": 0.0002776620406845904, + "loss": 4.5117, + "step": 5157 + }, + { + "epoch": 0.5037109375, + "grad_norm": 0.22093795239925385, + "learning_rate": 0.0002775919901233391, + "loss": 4.4961, + "step": 5158 + }, + { + "epoch": 0.50380859375, + "grad_norm": 0.19299525022506714, + "learning_rate": 0.000277521939310812, + "loss": 4.5117, + "step": 5159 + }, + { + "epoch": 0.50390625, + "grad_norm": 0.2091270387172699, + "learning_rate": 0.0002774518882538, + "loss": 4.4883, + "step": 5160 + }, + { + "epoch": 0.50400390625, + "grad_norm": 0.23386839032173157, + "learning_rate": 0.0002773818369590941, + "loss": 4.4922, + "step": 5161 + }, + { + "epoch": 0.5041015625, + "grad_norm": 0.19288483262062073, + "learning_rate": 0.00027731178543348515, + "loss": 4.5156, + "step": 5162 + }, + { + "epoch": 0.50419921875, + "grad_norm": 0.17989596724510193, + "learning_rate": 0.00027724173368376436, + "loss": 4.5195, + "step": 5163 + }, + { + "epoch": 0.504296875, + "grad_norm": 0.22145189344882965, + "learning_rate": 0.0002771716817167228, + "loss": 4.4766, + "step": 5164 + }, + { + "epoch": 0.50439453125, + "grad_norm": 0.2184356153011322, + "learning_rate": 0.00027710162953915136, + "loss": 4.4805, + "step": 5165 + }, + { + "epoch": 0.5044921875, + "grad_norm": 0.18257763981819153, + "learning_rate": 0.0002770315771578412, + "loss": 4.4883, + "step": 5166 + }, + { + "epoch": 0.50458984375, + "grad_norm": 0.19441840052604675, + "learning_rate": 0.0002769615245795834, + "loss": 4.5, + "step": 5167 + }, + { + "epoch": 0.5046875, + "grad_norm": 0.19846747815608978, + "learning_rate": 0.0002768914718111692, + "loss": 4.5312, + "step": 5168 + }, + { + "epoch": 0.50478515625, + "grad_norm": 0.18788984417915344, + "learning_rate": 0.00027682141885938957, + "loss": 4.4922, + "step": 5169 + }, + { + "epoch": 0.5048828125, + "grad_norm": 0.17766325175762177, + "learning_rate": 0.00027675136573103573, + "loss": 4.5, + "step": 5170 + }, + { + "epoch": 0.50498046875, + "grad_norm": 0.21000684797763824, + "learning_rate": 0.0002766813124328989, + "loss": 4.5078, + "step": 5171 + }, + { + "epoch": 0.505078125, + "grad_norm": 0.19261157512664795, + "learning_rate": 0.00027661125897177026, + "loss": 4.4922, + "step": 5172 + }, + { + "epoch": 0.50517578125, + "grad_norm": 0.18371053040027618, + "learning_rate": 0.00027654120535444097, + "loss": 4.4688, + "step": 5173 + }, + { + "epoch": 0.5052734375, + "grad_norm": 0.18978209793567657, + "learning_rate": 0.0002764711515877023, + "loss": 4.4961, + "step": 5174 + }, + { + "epoch": 0.50537109375, + "grad_norm": 0.1926388144493103, + "learning_rate": 0.00027640109767834535, + "loss": 4.4883, + "step": 5175 + }, + { + "epoch": 0.50546875, + "grad_norm": 0.1666542887687683, + "learning_rate": 0.00027633104363316164, + "loss": 4.4648, + "step": 5176 + }, + { + "epoch": 0.50556640625, + "grad_norm": 0.1851154863834381, + "learning_rate": 0.00027626098945894226, + "loss": 4.5273, + "step": 5177 + }, + { + "epoch": 0.5056640625, + "grad_norm": 0.1888445019721985, + "learning_rate": 0.0002761909351624784, + "loss": 4.4805, + "step": 5178 + }, + { + "epoch": 0.50576171875, + "grad_norm": 0.19601383805274963, + "learning_rate": 0.0002761208807505616, + "loss": 4.4922, + "step": 5179 + }, + { + "epoch": 0.505859375, + "grad_norm": 0.18130053579807281, + "learning_rate": 0.00027605082622998294, + "loss": 4.4648, + "step": 5180 + }, + { + "epoch": 0.50595703125, + "grad_norm": 0.1844625025987625, + "learning_rate": 0.0002759807716075339, + "loss": 4.4961, + "step": 5181 + }, + { + "epoch": 0.5060546875, + "grad_norm": 0.19223234057426453, + "learning_rate": 0.00027591071689000556, + "loss": 4.4883, + "step": 5182 + }, + { + "epoch": 0.50615234375, + "grad_norm": 0.17916558682918549, + "learning_rate": 0.0002758406620841895, + "loss": 4.4805, + "step": 5183 + }, + { + "epoch": 0.50625, + "grad_norm": 0.20715126395225525, + "learning_rate": 0.0002757706071968769, + "loss": 4.5, + "step": 5184 + }, + { + "epoch": 0.50634765625, + "grad_norm": 0.21150320768356323, + "learning_rate": 0.0002757005522348592, + "loss": 4.5039, + "step": 5185 + }, + { + "epoch": 0.5064453125, + "grad_norm": 0.1874847263097763, + "learning_rate": 0.00027563049720492774, + "loss": 4.4766, + "step": 5186 + }, + { + "epoch": 0.50654296875, + "grad_norm": 0.20165324211120605, + "learning_rate": 0.00027556044211387383, + "loss": 4.4844, + "step": 5187 + }, + { + "epoch": 0.506640625, + "grad_norm": 0.22891569137573242, + "learning_rate": 0.0002754903869684888, + "loss": 4.4922, + "step": 5188 + }, + { + "epoch": 0.50673828125, + "grad_norm": 0.20502299070358276, + "learning_rate": 0.0002754203317755642, + "loss": 4.4805, + "step": 5189 + }, + { + "epoch": 0.5068359375, + "grad_norm": 0.17352667450904846, + "learning_rate": 0.0002753502765418912, + "loss": 4.4883, + "step": 5190 + }, + { + "epoch": 0.50693359375, + "grad_norm": 0.18285579979419708, + "learning_rate": 0.0002752802212742613, + "loss": 4.5156, + "step": 5191 + }, + { + "epoch": 0.50703125, + "grad_norm": 0.20583832263946533, + "learning_rate": 0.0002752101659794658, + "loss": 4.4844, + "step": 5192 + }, + { + "epoch": 0.50712890625, + "grad_norm": 0.19565951824188232, + "learning_rate": 0.0002751401106642963, + "loss": 4.4961, + "step": 5193 + }, + { + "epoch": 0.5072265625, + "grad_norm": 0.1777915209531784, + "learning_rate": 0.0002750700553355438, + "loss": 4.5156, + "step": 5194 + }, + { + "epoch": 0.50732421875, + "grad_norm": 0.1722606122493744, + "learning_rate": 0.000275, + "loss": 4.5078, + "step": 5195 + }, + { + "epoch": 0.507421875, + "grad_norm": 0.17648378014564514, + "learning_rate": 0.00027492994466445623, + "loss": 4.4844, + "step": 5196 + }, + { + "epoch": 0.50751953125, + "grad_norm": 0.17650534212589264, + "learning_rate": 0.00027485988933570384, + "loss": 4.4961, + "step": 5197 + }, + { + "epoch": 0.5076171875, + "grad_norm": 0.19620700180530548, + "learning_rate": 0.00027478983402053417, + "loss": 4.5234, + "step": 5198 + }, + { + "epoch": 0.50771484375, + "grad_norm": 0.17936953902244568, + "learning_rate": 0.0002747197787257387, + "loss": 4.5, + "step": 5199 + }, + { + "epoch": 0.5078125, + "grad_norm": 0.18679946660995483, + "learning_rate": 0.0002746497234581089, + "loss": 4.4688, + "step": 5200 + }, + { + "epoch": 0.50791015625, + "grad_norm": 0.17426206171512604, + "learning_rate": 0.0002745796682244359, + "loss": 4.5117, + "step": 5201 + }, + { + "epoch": 0.5080078125, + "grad_norm": 0.19284945726394653, + "learning_rate": 0.0002745096130315112, + "loss": 4.5234, + "step": 5202 + }, + { + "epoch": 0.50810546875, + "grad_norm": 0.18751260638237, + "learning_rate": 0.00027443955788612626, + "loss": 4.4883, + "step": 5203 + }, + { + "epoch": 0.508203125, + "grad_norm": 0.24371646344661713, + "learning_rate": 0.00027436950279507234, + "loss": 4.5156, + "step": 5204 + }, + { + "epoch": 0.50830078125, + "grad_norm": 0.19178014993667603, + "learning_rate": 0.0002742994477651408, + "loss": 4.5078, + "step": 5205 + }, + { + "epoch": 0.5083984375, + "grad_norm": 0.1928720623254776, + "learning_rate": 0.0002742293928031231, + "loss": 4.5195, + "step": 5206 + }, + { + "epoch": 0.50849609375, + "grad_norm": 0.18713925778865814, + "learning_rate": 0.00027415933791581057, + "loss": 4.5078, + "step": 5207 + }, + { + "epoch": 0.50859375, + "grad_norm": 0.1960446536540985, + "learning_rate": 0.0002740892831099945, + "loss": 4.4844, + "step": 5208 + }, + { + "epoch": 0.50869140625, + "grad_norm": 0.18820494413375854, + "learning_rate": 0.00027401922839246626, + "loss": 4.4648, + "step": 5209 + }, + { + "epoch": 0.5087890625, + "grad_norm": 0.18190160393714905, + "learning_rate": 0.0002739491737700171, + "loss": 4.4961, + "step": 5210 + }, + { + "epoch": 0.50888671875, + "grad_norm": 0.2039182037115097, + "learning_rate": 0.0002738791192494385, + "loss": 4.4805, + "step": 5211 + }, + { + "epoch": 0.508984375, + "grad_norm": 0.2023007720708847, + "learning_rate": 0.0002738090648375216, + "loss": 4.5039, + "step": 5212 + }, + { + "epoch": 0.50908203125, + "grad_norm": 0.18021808564662933, + "learning_rate": 0.00027373901054105783, + "loss": 4.4805, + "step": 5213 + }, + { + "epoch": 0.5091796875, + "grad_norm": 0.19917525351047516, + "learning_rate": 0.0002736689563668384, + "loss": 4.4883, + "step": 5214 + }, + { + "epoch": 0.50927734375, + "grad_norm": 0.18239237368106842, + "learning_rate": 0.00027359890232165463, + "loss": 4.5117, + "step": 5215 + }, + { + "epoch": 0.509375, + "grad_norm": 0.20748265087604523, + "learning_rate": 0.0002735288484122978, + "loss": 4.4805, + "step": 5216 + }, + { + "epoch": 0.50947265625, + "grad_norm": 0.18883591890335083, + "learning_rate": 0.0002734587946455592, + "loss": 4.4961, + "step": 5217 + }, + { + "epoch": 0.5095703125, + "grad_norm": 0.17599162459373474, + "learning_rate": 0.0002733887410282299, + "loss": 4.4961, + "step": 5218 + }, + { + "epoch": 0.50966796875, + "grad_norm": 0.17728304862976074, + "learning_rate": 0.0002733186875671012, + "loss": 4.4805, + "step": 5219 + }, + { + "epoch": 0.509765625, + "grad_norm": 0.18131519854068756, + "learning_rate": 0.00027324863426896436, + "loss": 4.5117, + "step": 5220 + }, + { + "epoch": 0.50986328125, + "grad_norm": 0.18682228028774261, + "learning_rate": 0.0002731785811406105, + "loss": 4.4883, + "step": 5221 + }, + { + "epoch": 0.5099609375, + "grad_norm": 0.18427328765392303, + "learning_rate": 0.0002731085281888309, + "loss": 4.4883, + "step": 5222 + }, + { + "epoch": 0.51005859375, + "grad_norm": 0.17454998195171356, + "learning_rate": 0.0002730384754204166, + "loss": 4.4492, + "step": 5223 + }, + { + "epoch": 0.51015625, + "grad_norm": 0.1735469102859497, + "learning_rate": 0.00027296842284215875, + "loss": 4.5156, + "step": 5224 + }, + { + "epoch": 0.51025390625, + "grad_norm": 0.18684585392475128, + "learning_rate": 0.0002728983704608488, + "loss": 4.4805, + "step": 5225 + }, + { + "epoch": 0.5103515625, + "grad_norm": 0.19979646801948547, + "learning_rate": 0.00027282831828327725, + "loss": 4.5078, + "step": 5226 + }, + { + "epoch": 0.51044921875, + "grad_norm": 0.18240046501159668, + "learning_rate": 0.0002727582663162356, + "loss": 4.4531, + "step": 5227 + }, + { + "epoch": 0.510546875, + "grad_norm": 0.16831064224243164, + "learning_rate": 0.0002726882145665149, + "loss": 4.5273, + "step": 5228 + }, + { + "epoch": 0.51064453125, + "grad_norm": 0.19102627038955688, + "learning_rate": 0.000272618163040906, + "loss": 4.5078, + "step": 5229 + }, + { + "epoch": 0.5107421875, + "grad_norm": 0.20268750190734863, + "learning_rate": 0.00027254811174620003, + "loss": 4.5117, + "step": 5230 + }, + { + "epoch": 0.51083984375, + "grad_norm": 0.2005862444639206, + "learning_rate": 0.0002724780606891881, + "loss": 4.4961, + "step": 5231 + }, + { + "epoch": 0.5109375, + "grad_norm": 0.1856452226638794, + "learning_rate": 0.00027240800987666093, + "loss": 4.4883, + "step": 5232 + }, + { + "epoch": 0.51103515625, + "grad_norm": 0.17918166518211365, + "learning_rate": 0.00027233795931540976, + "loss": 4.4805, + "step": 5233 + }, + { + "epoch": 0.5111328125, + "grad_norm": 0.18040117621421814, + "learning_rate": 0.0002722679090122253, + "loss": 4.5039, + "step": 5234 + }, + { + "epoch": 0.51123046875, + "grad_norm": 0.17153236269950867, + "learning_rate": 0.0002721978589738985, + "loss": 4.5195, + "step": 5235 + }, + { + "epoch": 0.511328125, + "grad_norm": 0.1933881938457489, + "learning_rate": 0.0002721278092072202, + "loss": 4.4805, + "step": 5236 + }, + { + "epoch": 0.51142578125, + "grad_norm": 0.17588213086128235, + "learning_rate": 0.00027205775971898133, + "loss": 4.5234, + "step": 5237 + }, + { + "epoch": 0.5115234375, + "grad_norm": 0.18287554383277893, + "learning_rate": 0.00027198771051597273, + "loss": 4.4961, + "step": 5238 + }, + { + "epoch": 0.51162109375, + "grad_norm": 0.17986971139907837, + "learning_rate": 0.0002719176616049851, + "loss": 4.4961, + "step": 5239 + }, + { + "epoch": 0.51171875, + "grad_norm": 0.1862819939851761, + "learning_rate": 0.0002718476129928093, + "loss": 4.5117, + "step": 5240 + }, + { + "epoch": 0.51181640625, + "grad_norm": 0.17365388572216034, + "learning_rate": 0.000271777564686236, + "loss": 4.4727, + "step": 5241 + }, + { + "epoch": 0.5119140625, + "grad_norm": 0.18187285959720612, + "learning_rate": 0.000271707516692056, + "loss": 4.4805, + "step": 5242 + }, + { + "epoch": 0.51201171875, + "grad_norm": 0.17607340216636658, + "learning_rate": 0.00027163746901705986, + "loss": 4.5078, + "step": 5243 + }, + { + "epoch": 0.512109375, + "grad_norm": 0.18226300179958344, + "learning_rate": 0.0002715674216680383, + "loss": 4.5039, + "step": 5244 + }, + { + "epoch": 0.51220703125, + "grad_norm": 0.200672909617424, + "learning_rate": 0.00027149737465178187, + "loss": 4.5, + "step": 5245 + }, + { + "epoch": 0.5123046875, + "grad_norm": 0.19778978824615479, + "learning_rate": 0.0002714273279750812, + "loss": 4.4805, + "step": 5246 + }, + { + "epoch": 0.51240234375, + "grad_norm": 0.1803443282842636, + "learning_rate": 0.00027135728164472687, + "loss": 4.4922, + "step": 5247 + }, + { + "epoch": 0.5125, + "grad_norm": 0.17599613964557648, + "learning_rate": 0.00027128723566750937, + "loss": 4.457, + "step": 5248 + }, + { + "epoch": 0.51259765625, + "grad_norm": 0.1824713498353958, + "learning_rate": 0.000271217190050219, + "loss": 4.4727, + "step": 5249 + }, + { + "epoch": 0.5126953125, + "grad_norm": 0.17454794049263, + "learning_rate": 0.0002711471447996466, + "loss": 4.4766, + "step": 5250 + }, + { + "epoch": 0.51279296875, + "grad_norm": 0.19181416928768158, + "learning_rate": 0.00027107709992258235, + "loss": 4.4883, + "step": 5251 + }, + { + "epoch": 0.512890625, + "grad_norm": 0.1915457546710968, + "learning_rate": 0.0002710070554258165, + "loss": 4.457, + "step": 5252 + }, + { + "epoch": 0.51298828125, + "grad_norm": 0.17270402610301971, + "learning_rate": 0.00027093701131613966, + "loss": 4.4961, + "step": 5253 + }, + { + "epoch": 0.5130859375, + "grad_norm": 0.1673598736524582, + "learning_rate": 0.00027086696760034195, + "loss": 4.5039, + "step": 5254 + }, + { + "epoch": 0.51318359375, + "grad_norm": 0.17049787938594818, + "learning_rate": 0.0002707969242852137, + "loss": 4.4922, + "step": 5255 + }, + { + "epoch": 0.51328125, + "grad_norm": 0.17347605526447296, + "learning_rate": 0.00027072688137754505, + "loss": 4.4844, + "step": 5256 + }, + { + "epoch": 0.51337890625, + "grad_norm": 0.18219561874866486, + "learning_rate": 0.00027065683888412626, + "loss": 4.5, + "step": 5257 + }, + { + "epoch": 0.5134765625, + "grad_norm": 0.201579749584198, + "learning_rate": 0.00027058679681174746, + "loss": 4.4688, + "step": 5258 + }, + { + "epoch": 0.51357421875, + "grad_norm": 0.22213754057884216, + "learning_rate": 0.0002705167551671988, + "loss": 4.4922, + "step": 5259 + }, + { + "epoch": 0.513671875, + "grad_norm": 0.2015470713376999, + "learning_rate": 0.00027044671395727034, + "loss": 4.5, + "step": 5260 + }, + { + "epoch": 0.51376953125, + "grad_norm": 0.18569591641426086, + "learning_rate": 0.00027037667318875194, + "loss": 4.4727, + "step": 5261 + }, + { + "epoch": 0.5138671875, + "grad_norm": 0.17342926561832428, + "learning_rate": 0.00027030663286843376, + "loss": 4.5, + "step": 5262 + }, + { + "epoch": 0.51396484375, + "grad_norm": 0.18790604174137115, + "learning_rate": 0.0002702365930031057, + "loss": 4.4805, + "step": 5263 + }, + { + "epoch": 0.5140625, + "grad_norm": 0.20070628821849823, + "learning_rate": 0.00027016655359955754, + "loss": 4.4688, + "step": 5264 + }, + { + "epoch": 0.51416015625, + "grad_norm": 0.19139105081558228, + "learning_rate": 0.00027009651466457923, + "loss": 4.4961, + "step": 5265 + }, + { + "epoch": 0.5142578125, + "grad_norm": 0.1749289184808731, + "learning_rate": 0.0002700264762049606, + "loss": 4.5391, + "step": 5266 + }, + { + "epoch": 0.51435546875, + "grad_norm": 0.2187204658985138, + "learning_rate": 0.0002699564382274913, + "loss": 4.5078, + "step": 5267 + }, + { + "epoch": 0.514453125, + "grad_norm": 0.2159593105316162, + "learning_rate": 0.0002698864007389611, + "loss": 4.5078, + "step": 5268 + }, + { + "epoch": 0.51455078125, + "grad_norm": 0.18555203080177307, + "learning_rate": 0.00026981636374615964, + "loss": 4.4961, + "step": 5269 + }, + { + "epoch": 0.5146484375, + "grad_norm": 0.18940569460391998, + "learning_rate": 0.0002697463272558765, + "loss": 4.4609, + "step": 5270 + }, + { + "epoch": 0.51474609375, + "grad_norm": 0.23195992410182953, + "learning_rate": 0.0002696762912749013, + "loss": 4.4805, + "step": 5271 + }, + { + "epoch": 0.51484375, + "grad_norm": 0.19556456804275513, + "learning_rate": 0.00026960625581002353, + "loss": 4.4766, + "step": 5272 + }, + { + "epoch": 0.51494140625, + "grad_norm": 0.190942645072937, + "learning_rate": 0.0002695362208680327, + "loss": 4.5078, + "step": 5273 + }, + { + "epoch": 0.5150390625, + "grad_norm": 0.2398018091917038, + "learning_rate": 0.00026946618645571805, + "loss": 4.4844, + "step": 5274 + }, + { + "epoch": 0.51513671875, + "grad_norm": 0.20809948444366455, + "learning_rate": 0.0002693961525798692, + "loss": 4.5195, + "step": 5275 + }, + { + "epoch": 0.515234375, + "grad_norm": 0.17319625616073608, + "learning_rate": 0.00026932611924727527, + "loss": 4.5039, + "step": 5276 + }, + { + "epoch": 0.51533203125, + "grad_norm": 0.2188679724931717, + "learning_rate": 0.00026925608646472557, + "loss": 4.5039, + "step": 5277 + }, + { + "epoch": 0.5154296875, + "grad_norm": 0.17676736414432526, + "learning_rate": 0.00026918605423900926, + "loss": 4.4844, + "step": 5278 + }, + { + "epoch": 0.51552734375, + "grad_norm": 0.1954302340745926, + "learning_rate": 0.00026911602257691556, + "loss": 4.5117, + "step": 5279 + }, + { + "epoch": 0.515625, + "grad_norm": 0.2011948674917221, + "learning_rate": 0.0002690459914852336, + "loss": 4.4883, + "step": 5280 + }, + { + "epoch": 0.51572265625, + "grad_norm": 0.16826267540454865, + "learning_rate": 0.00026897596097075227, + "loss": 4.4805, + "step": 5281 + }, + { + "epoch": 0.5158203125, + "grad_norm": 0.182972714304924, + "learning_rate": 0.0002689059310402606, + "loss": 4.5234, + "step": 5282 + }, + { + "epoch": 0.51591796875, + "grad_norm": 0.19413644075393677, + "learning_rate": 0.00026883590170054765, + "loss": 4.4805, + "step": 5283 + }, + { + "epoch": 0.516015625, + "grad_norm": 0.16497351229190826, + "learning_rate": 0.00026876587295840225, + "loss": 4.4961, + "step": 5284 + }, + { + "epoch": 0.51611328125, + "grad_norm": 0.18416735529899597, + "learning_rate": 0.0002686958448206131, + "loss": 4.5195, + "step": 5285 + }, + { + "epoch": 0.5162109375, + "grad_norm": 0.1827724725008011, + "learning_rate": 0.000268625817293969, + "loss": 4.4961, + "step": 5286 + }, + { + "epoch": 0.51630859375, + "grad_norm": 0.16597582399845123, + "learning_rate": 0.00026855579038525856, + "loss": 4.5039, + "step": 5287 + }, + { + "epoch": 0.51640625, + "grad_norm": 0.19028228521347046, + "learning_rate": 0.00026848576410127065, + "loss": 4.4883, + "step": 5288 + }, + { + "epoch": 0.51650390625, + "grad_norm": 0.16532675921916962, + "learning_rate": 0.0002684157384487936, + "loss": 4.5, + "step": 5289 + }, + { + "epoch": 0.5166015625, + "grad_norm": 0.18111036717891693, + "learning_rate": 0.000268345713434616, + "loss": 4.4883, + "step": 5290 + }, + { + "epoch": 0.51669921875, + "grad_norm": 0.1823853701353073, + "learning_rate": 0.0002682756890655264, + "loss": 4.5039, + "step": 5291 + }, + { + "epoch": 0.516796875, + "grad_norm": 0.1765025556087494, + "learning_rate": 0.00026820566534831316, + "loss": 4.4883, + "step": 5292 + }, + { + "epoch": 0.51689453125, + "grad_norm": 0.18323221802711487, + "learning_rate": 0.00026813564228976447, + "loss": 4.4805, + "step": 5293 + }, + { + "epoch": 0.5169921875, + "grad_norm": 0.1626303791999817, + "learning_rate": 0.0002680656198966686, + "loss": 4.4844, + "step": 5294 + }, + { + "epoch": 0.51708984375, + "grad_norm": 0.17424529790878296, + "learning_rate": 0.0002679955981758139, + "loss": 4.4375, + "step": 5295 + }, + { + "epoch": 0.5171875, + "grad_norm": 0.17967253923416138, + "learning_rate": 0.00026792557713398835, + "loss": 4.5, + "step": 5296 + }, + { + "epoch": 0.51728515625, + "grad_norm": 0.17166011035442352, + "learning_rate": 0.0002678555567779801, + "loss": 4.4922, + "step": 5297 + }, + { + "epoch": 0.5173828125, + "grad_norm": 0.1844809651374817, + "learning_rate": 0.0002677855371145772, + "loss": 4.5078, + "step": 5298 + }, + { + "epoch": 0.51748046875, + "grad_norm": 0.17497271299362183, + "learning_rate": 0.0002677155181505675, + "loss": 4.5, + "step": 5299 + }, + { + "epoch": 0.517578125, + "grad_norm": 0.20287856459617615, + "learning_rate": 0.00026764549989273886, + "loss": 4.5078, + "step": 5300 + }, + { + "epoch": 0.51767578125, + "grad_norm": 0.1792289763689041, + "learning_rate": 0.0002675754823478791, + "loss": 4.5234, + "step": 5301 + }, + { + "epoch": 0.5177734375, + "grad_norm": 0.21071858704090118, + "learning_rate": 0.00026750546552277595, + "loss": 4.5156, + "step": 5302 + }, + { + "epoch": 0.51787109375, + "grad_norm": 0.21535851061344147, + "learning_rate": 0.000267435449424217, + "loss": 4.5078, + "step": 5303 + }, + { + "epoch": 0.51796875, + "grad_norm": 0.19862309098243713, + "learning_rate": 0.0002673654340589899, + "loss": 4.4766, + "step": 5304 + }, + { + "epoch": 0.51806640625, + "grad_norm": 0.1771143227815628, + "learning_rate": 0.0002672954194338822, + "loss": 4.5078, + "step": 5305 + }, + { + "epoch": 0.5181640625, + "grad_norm": 0.18325309455394745, + "learning_rate": 0.0002672254055556812, + "loss": 4.5156, + "step": 5306 + }, + { + "epoch": 0.51826171875, + "grad_norm": 0.1873893439769745, + "learning_rate": 0.0002671553924311744, + "loss": 4.4648, + "step": 5307 + }, + { + "epoch": 0.518359375, + "grad_norm": 0.1757688820362091, + "learning_rate": 0.0002670853800671491, + "loss": 4.5, + "step": 5308 + }, + { + "epoch": 0.51845703125, + "grad_norm": 0.1792537122964859, + "learning_rate": 0.00026701536847039255, + "loss": 4.5195, + "step": 5309 + }, + { + "epoch": 0.5185546875, + "grad_norm": 0.17456789314746857, + "learning_rate": 0.0002669453576476917, + "loss": 4.4844, + "step": 5310 + }, + { + "epoch": 0.51865234375, + "grad_norm": 0.18291442096233368, + "learning_rate": 0.0002668753476058339, + "loss": 4.5117, + "step": 5311 + }, + { + "epoch": 0.51875, + "grad_norm": 0.17110617458820343, + "learning_rate": 0.00026680533835160585, + "loss": 4.5, + "step": 5312 + }, + { + "epoch": 0.51884765625, + "grad_norm": 0.1834949404001236, + "learning_rate": 0.0002667353298917947, + "loss": 4.4609, + "step": 5313 + }, + { + "epoch": 0.5189453125, + "grad_norm": 0.19689109921455383, + "learning_rate": 0.00026666532223318726, + "loss": 4.4648, + "step": 5314 + }, + { + "epoch": 0.51904296875, + "grad_norm": 0.21841907501220703, + "learning_rate": 0.00026659531538257016, + "loss": 4.5, + "step": 5315 + }, + { + "epoch": 0.519140625, + "grad_norm": 0.17851410806179047, + "learning_rate": 0.00026652530934673024, + "loss": 4.4727, + "step": 5316 + }, + { + "epoch": 0.51923828125, + "grad_norm": 0.1806097775697708, + "learning_rate": 0.00026645530413245405, + "loss": 4.4688, + "step": 5317 + }, + { + "epoch": 0.5193359375, + "grad_norm": 0.18065892159938812, + "learning_rate": 0.0002663852997465283, + "loss": 4.4648, + "step": 5318 + }, + { + "epoch": 0.51943359375, + "grad_norm": 0.19222699105739594, + "learning_rate": 0.00026631529619573915, + "loss": 4.4844, + "step": 5319 + }, + { + "epoch": 0.51953125, + "grad_norm": 0.18287140130996704, + "learning_rate": 0.00026624529348687307, + "loss": 4.5117, + "step": 5320 + }, + { + "epoch": 0.51962890625, + "grad_norm": 0.18558919429779053, + "learning_rate": 0.0002661752916267163, + "loss": 4.4688, + "step": 5321 + }, + { + "epoch": 0.5197265625, + "grad_norm": 0.18747256696224213, + "learning_rate": 0.00026610529062205516, + "loss": 4.4922, + "step": 5322 + }, + { + "epoch": 0.51982421875, + "grad_norm": 0.18445996940135956, + "learning_rate": 0.00026603529047967576, + "loss": 4.5195, + "step": 5323 + }, + { + "epoch": 0.519921875, + "grad_norm": 0.18557733297348022, + "learning_rate": 0.0002659652912063641, + "loss": 4.5156, + "step": 5324 + }, + { + "epoch": 0.52001953125, + "grad_norm": 0.18035298585891724, + "learning_rate": 0.0002658952928089061, + "loss": 4.5156, + "step": 5325 + }, + { + "epoch": 0.5201171875, + "grad_norm": 0.17564265429973602, + "learning_rate": 0.0002658252952940878, + "loss": 4.4688, + "step": 5326 + }, + { + "epoch": 0.52021484375, + "grad_norm": 0.18456104397773743, + "learning_rate": 0.00026575529866869476, + "loss": 4.5039, + "step": 5327 + }, + { + "epoch": 0.5203125, + "grad_norm": 0.17634578049182892, + "learning_rate": 0.00026568530293951276, + "loss": 4.5078, + "step": 5328 + }, + { + "epoch": 0.52041015625, + "grad_norm": 0.18599417805671692, + "learning_rate": 0.0002656153081133274, + "loss": 4.4844, + "step": 5329 + }, + { + "epoch": 0.5205078125, + "grad_norm": 0.18964865803718567, + "learning_rate": 0.00026554531419692424, + "loss": 4.4922, + "step": 5330 + }, + { + "epoch": 0.52060546875, + "grad_norm": 0.19986894726753235, + "learning_rate": 0.00026547532119708866, + "loss": 4.4766, + "step": 5331 + }, + { + "epoch": 0.520703125, + "grad_norm": 0.17711614072322845, + "learning_rate": 0.0002654053291206061, + "loss": 4.4844, + "step": 5332 + }, + { + "epoch": 0.52080078125, + "grad_norm": 0.1809254139661789, + "learning_rate": 0.0002653353379742618, + "loss": 4.4766, + "step": 5333 + }, + { + "epoch": 0.5208984375, + "grad_norm": 0.17076444625854492, + "learning_rate": 0.0002652653477648409, + "loss": 4.4766, + "step": 5334 + }, + { + "epoch": 0.52099609375, + "grad_norm": 0.17385873198509216, + "learning_rate": 0.0002651953584991284, + "loss": 4.4961, + "step": 5335 + }, + { + "epoch": 0.52109375, + "grad_norm": 0.18247109651565552, + "learning_rate": 0.00026512537018390936, + "loss": 4.4766, + "step": 5336 + }, + { + "epoch": 0.52119140625, + "grad_norm": 0.17090724408626556, + "learning_rate": 0.0002650553828259686, + "loss": 4.4844, + "step": 5337 + }, + { + "epoch": 0.5212890625, + "grad_norm": 0.17713536322116852, + "learning_rate": 0.00026498539643209106, + "loss": 4.4922, + "step": 5338 + }, + { + "epoch": 0.52138671875, + "grad_norm": 0.1817668229341507, + "learning_rate": 0.0002649154110090614, + "loss": 4.5, + "step": 5339 + }, + { + "epoch": 0.521484375, + "grad_norm": 0.17346107959747314, + "learning_rate": 0.00026484542656366405, + "loss": 4.4805, + "step": 5340 + }, + { + "epoch": 0.52158203125, + "grad_norm": 0.17734886705875397, + "learning_rate": 0.0002647754431026838, + "loss": 4.4688, + "step": 5341 + }, + { + "epoch": 0.5216796875, + "grad_norm": 0.1751069873571396, + "learning_rate": 0.00026470546063290493, + "loss": 4.4688, + "step": 5342 + }, + { + "epoch": 0.52177734375, + "grad_norm": 0.206977978348732, + "learning_rate": 0.0002646354791611119, + "loss": 4.4961, + "step": 5343 + }, + { + "epoch": 0.521875, + "grad_norm": 0.20093224942684174, + "learning_rate": 0.00026456549869408865, + "loss": 4.4688, + "step": 5344 + }, + { + "epoch": 0.52197265625, + "grad_norm": 0.17910388112068176, + "learning_rate": 0.0002644955192386195, + "loss": 4.4805, + "step": 5345 + }, + { + "epoch": 0.5220703125, + "grad_norm": 0.19420960545539856, + "learning_rate": 0.00026442554080148843, + "loss": 4.4727, + "step": 5346 + }, + { + "epoch": 0.52216796875, + "grad_norm": 0.22932326793670654, + "learning_rate": 0.00026435556338947946, + "loss": 4.4961, + "step": 5347 + }, + { + "epoch": 0.522265625, + "grad_norm": 0.2049339860677719, + "learning_rate": 0.00026428558700937645, + "loss": 4.4961, + "step": 5348 + }, + { + "epoch": 0.52236328125, + "grad_norm": 0.1912604719400406, + "learning_rate": 0.0002642156116679629, + "loss": 4.4883, + "step": 5349 + }, + { + "epoch": 0.5224609375, + "grad_norm": 0.22431732714176178, + "learning_rate": 0.00026414563737202275, + "loss": 4.5195, + "step": 5350 + }, + { + "epoch": 0.52255859375, + "grad_norm": 0.18836233019828796, + "learning_rate": 0.0002640756641283394, + "loss": 4.4805, + "step": 5351 + }, + { + "epoch": 0.52265625, + "grad_norm": 0.18770906329154968, + "learning_rate": 0.00026400569194369623, + "loss": 4.4805, + "step": 5352 + }, + { + "epoch": 0.52275390625, + "grad_norm": 0.21265771985054016, + "learning_rate": 0.00026393572082487654, + "loss": 4.4609, + "step": 5353 + }, + { + "epoch": 0.5228515625, + "grad_norm": 0.19437076151371002, + "learning_rate": 0.00026386575077866364, + "loss": 4.4453, + "step": 5354 + }, + { + "epoch": 0.52294921875, + "grad_norm": 0.17993590235710144, + "learning_rate": 0.0002637957818118407, + "loss": 4.4883, + "step": 5355 + }, + { + "epoch": 0.523046875, + "grad_norm": 0.19138431549072266, + "learning_rate": 0.0002637258139311906, + "loss": 4.4883, + "step": 5356 + }, + { + "epoch": 0.52314453125, + "grad_norm": 0.17315620183944702, + "learning_rate": 0.0002636558471434963, + "loss": 4.5039, + "step": 5357 + }, + { + "epoch": 0.5232421875, + "grad_norm": 0.1817198395729065, + "learning_rate": 0.00026358588145554076, + "loss": 4.4648, + "step": 5358 + }, + { + "epoch": 0.52333984375, + "grad_norm": 0.17923352122306824, + "learning_rate": 0.0002635159168741064, + "loss": 4.5, + "step": 5359 + }, + { + "epoch": 0.5234375, + "grad_norm": 0.18708573281764984, + "learning_rate": 0.0002634459534059761, + "loss": 4.4727, + "step": 5360 + }, + { + "epoch": 0.52353515625, + "grad_norm": 0.16895882785320282, + "learning_rate": 0.00026337599105793216, + "loss": 4.4922, + "step": 5361 + }, + { + "epoch": 0.5236328125, + "grad_norm": 0.19359655678272247, + "learning_rate": 0.00026330602983675697, + "loss": 4.4844, + "step": 5362 + }, + { + "epoch": 0.52373046875, + "grad_norm": 0.20167149603366852, + "learning_rate": 0.00026323606974923273, + "loss": 4.4961, + "step": 5363 + }, + { + "epoch": 0.523828125, + "grad_norm": 0.1867368221282959, + "learning_rate": 0.00026316611080214183, + "loss": 4.4844, + "step": 5364 + }, + { + "epoch": 0.52392578125, + "grad_norm": 0.18111465871334076, + "learning_rate": 0.000263096153002266, + "loss": 4.4766, + "step": 5365 + }, + { + "epoch": 0.5240234375, + "grad_norm": 0.19725432991981506, + "learning_rate": 0.00026302619635638745, + "loss": 4.5039, + "step": 5366 + }, + { + "epoch": 0.52412109375, + "grad_norm": 0.20007207989692688, + "learning_rate": 0.0002629562408712879, + "loss": 4.5078, + "step": 5367 + }, + { + "epoch": 0.52421875, + "grad_norm": 0.17357154190540314, + "learning_rate": 0.0002628862865537491, + "loss": 4.4688, + "step": 5368 + }, + { + "epoch": 0.52431640625, + "grad_norm": 0.19791804254055023, + "learning_rate": 0.0002628163334105526, + "loss": 4.4961, + "step": 5369 + }, + { + "epoch": 0.5244140625, + "grad_norm": 0.1882006973028183, + "learning_rate": 0.00026274638144847985, + "loss": 4.5156, + "step": 5370 + }, + { + "epoch": 0.52451171875, + "grad_norm": 0.1833990514278412, + "learning_rate": 0.0002626764306743122, + "loss": 4.4727, + "step": 5371 + }, + { + "epoch": 0.524609375, + "grad_norm": 0.20819170773029327, + "learning_rate": 0.000262606481094831, + "loss": 4.4531, + "step": 5372 + }, + { + "epoch": 0.52470703125, + "grad_norm": 0.20363543927669525, + "learning_rate": 0.00026253653271681734, + "loss": 4.4531, + "step": 5373 + }, + { + "epoch": 0.5248046875, + "grad_norm": 0.17883625626564026, + "learning_rate": 0.00026246658554705224, + "loss": 4.4922, + "step": 5374 + }, + { + "epoch": 0.52490234375, + "grad_norm": 0.1925768256187439, + "learning_rate": 0.0002623966395923166, + "loss": 4.4805, + "step": 5375 + }, + { + "epoch": 0.525, + "grad_norm": 0.1939682513475418, + "learning_rate": 0.00026232669485939117, + "loss": 4.5234, + "step": 5376 + }, + { + "epoch": 0.52509765625, + "grad_norm": 0.1816798895597458, + "learning_rate": 0.0002622567513550568, + "loss": 4.4766, + "step": 5377 + }, + { + "epoch": 0.5251953125, + "grad_norm": 0.18060623109340668, + "learning_rate": 0.00026218680908609383, + "loss": 4.4648, + "step": 5378 + }, + { + "epoch": 0.52529296875, + "grad_norm": 0.1918782740831375, + "learning_rate": 0.0002621168680592827, + "loss": 4.5039, + "step": 5379 + }, + { + "epoch": 0.525390625, + "grad_norm": 0.18466810882091522, + "learning_rate": 0.00026204692828140383, + "loss": 4.4805, + "step": 5380 + }, + { + "epoch": 0.52548828125, + "grad_norm": 0.18022669851779938, + "learning_rate": 0.00026197698975923727, + "loss": 4.4922, + "step": 5381 + }, + { + "epoch": 0.5255859375, + "grad_norm": 0.20277869701385498, + "learning_rate": 0.00026190705249956326, + "loss": 4.4844, + "step": 5382 + }, + { + "epoch": 0.52568359375, + "grad_norm": 0.17842768132686615, + "learning_rate": 0.0002618371165091616, + "loss": 4.4844, + "step": 5383 + }, + { + "epoch": 0.52578125, + "grad_norm": 0.1775490641593933, + "learning_rate": 0.0002617671817948121, + "loss": 4.4688, + "step": 5384 + }, + { + "epoch": 0.52587890625, + "grad_norm": 0.1648997664451599, + "learning_rate": 0.0002616972483632947, + "loss": 4.457, + "step": 5385 + }, + { + "epoch": 0.5259765625, + "grad_norm": 0.18560518324375153, + "learning_rate": 0.00026162731622138865, + "loss": 4.5039, + "step": 5386 + }, + { + "epoch": 0.52607421875, + "grad_norm": 0.16757196187973022, + "learning_rate": 0.0002615573853758735, + "loss": 4.5156, + "step": 5387 + }, + { + "epoch": 0.526171875, + "grad_norm": 0.17971962690353394, + "learning_rate": 0.00026148745583352866, + "loss": 4.4961, + "step": 5388 + }, + { + "epoch": 0.52626953125, + "grad_norm": 0.1870625913143158, + "learning_rate": 0.0002614175276011333, + "loss": 4.4844, + "step": 5389 + }, + { + "epoch": 0.5263671875, + "grad_norm": 0.16256222128868103, + "learning_rate": 0.00026134760068546636, + "loss": 4.543, + "step": 5390 + }, + { + "epoch": 0.52646484375, + "grad_norm": 0.18502026796340942, + "learning_rate": 0.00026127767509330697, + "loss": 4.4922, + "step": 5391 + }, + { + "epoch": 0.5265625, + "grad_norm": 0.18503457307815552, + "learning_rate": 0.00026120775083143376, + "loss": 4.4688, + "step": 5392 + }, + { + "epoch": 0.52666015625, + "grad_norm": 0.19337385892868042, + "learning_rate": 0.0002611378279066256, + "loss": 4.4766, + "step": 5393 + }, + { + "epoch": 0.5267578125, + "grad_norm": 0.17879340052604675, + "learning_rate": 0.00026106790632566087, + "loss": 4.4883, + "step": 5394 + }, + { + "epoch": 0.52685546875, + "grad_norm": 0.1794479340314865, + "learning_rate": 0.00026099798609531804, + "loss": 4.4805, + "step": 5395 + }, + { + "epoch": 0.526953125, + "grad_norm": 0.18700258433818817, + "learning_rate": 0.0002609280672223755, + "loss": 4.5, + "step": 5396 + }, + { + "epoch": 0.52705078125, + "grad_norm": 0.18340791761875153, + "learning_rate": 0.0002608581497136113, + "loss": 4.5, + "step": 5397 + }, + { + "epoch": 0.5271484375, + "grad_norm": 0.17617708444595337, + "learning_rate": 0.0002607882335758036, + "loss": 4.4883, + "step": 5398 + }, + { + "epoch": 0.52724609375, + "grad_norm": 0.1972651332616806, + "learning_rate": 0.00026071831881573, + "loss": 4.4727, + "step": 5399 + }, + { + "epoch": 0.52734375, + "grad_norm": 0.2142610102891922, + "learning_rate": 0.0002606484054401686, + "loss": 4.4609, + "step": 5400 + }, + { + "epoch": 0.52744140625, + "grad_norm": 0.18760408461093903, + "learning_rate": 0.0002605784934558968, + "loss": 4.4609, + "step": 5401 + }, + { + "epoch": 0.5275390625, + "grad_norm": 0.1836080551147461, + "learning_rate": 0.00026050858286969224, + "loss": 4.4688, + "step": 5402 + }, + { + "epoch": 0.52763671875, + "grad_norm": 0.17963381111621857, + "learning_rate": 0.0002604386736883322, + "loss": 4.4609, + "step": 5403 + }, + { + "epoch": 0.527734375, + "grad_norm": 0.18159978091716766, + "learning_rate": 0.000260368765918594, + "loss": 4.5, + "step": 5404 + }, + { + "epoch": 0.52783203125, + "grad_norm": 0.17452657222747803, + "learning_rate": 0.0002602988595672545, + "loss": 4.4688, + "step": 5405 + }, + { + "epoch": 0.5279296875, + "grad_norm": 0.186039999127388, + "learning_rate": 0.0002602289546410909, + "loss": 4.5, + "step": 5406 + }, + { + "epoch": 0.52802734375, + "grad_norm": 0.17764322459697723, + "learning_rate": 0.0002601590511468798, + "loss": 4.4883, + "step": 5407 + }, + { + "epoch": 0.528125, + "grad_norm": 0.1751566231250763, + "learning_rate": 0.0002600891490913981, + "loss": 4.4961, + "step": 5408 + }, + { + "epoch": 0.52822265625, + "grad_norm": 0.18665091693401337, + "learning_rate": 0.0002600192484814221, + "loss": 4.4961, + "step": 5409 + }, + { + "epoch": 0.5283203125, + "grad_norm": 0.16943365335464478, + "learning_rate": 0.0002599493493237283, + "loss": 4.5039, + "step": 5410 + }, + { + "epoch": 0.52841796875, + "grad_norm": 0.1814589947462082, + "learning_rate": 0.000259879451625093, + "loss": 4.4648, + "step": 5411 + }, + { + "epoch": 0.528515625, + "grad_norm": 0.16321393847465515, + "learning_rate": 0.0002598095553922921, + "loss": 4.457, + "step": 5412 + }, + { + "epoch": 0.52861328125, + "grad_norm": 0.1815808117389679, + "learning_rate": 0.0002597396606321018, + "loss": 4.4922, + "step": 5413 + }, + { + "epoch": 0.5287109375, + "grad_norm": 0.186603844165802, + "learning_rate": 0.0002596697673512978, + "loss": 4.5234, + "step": 5414 + }, + { + "epoch": 0.52880859375, + "grad_norm": 0.1950417011976242, + "learning_rate": 0.00025959987555665583, + "loss": 4.5039, + "step": 5415 + }, + { + "epoch": 0.52890625, + "grad_norm": 0.1846604347229004, + "learning_rate": 0.00025952998525495134, + "loss": 4.457, + "step": 5416 + }, + { + "epoch": 0.52900390625, + "grad_norm": 0.16703364253044128, + "learning_rate": 0.00025946009645295986, + "loss": 4.5039, + "step": 5417 + }, + { + "epoch": 0.5291015625, + "grad_norm": 0.18195843696594238, + "learning_rate": 0.00025939020915745657, + "loss": 4.5039, + "step": 5418 + }, + { + "epoch": 0.52919921875, + "grad_norm": 0.1875489503145218, + "learning_rate": 0.00025932032337521653, + "loss": 4.4844, + "step": 5419 + }, + { + "epoch": 0.529296875, + "grad_norm": 0.1738373041152954, + "learning_rate": 0.00025925043911301476, + "loss": 4.4844, + "step": 5420 + }, + { + "epoch": 0.52939453125, + "grad_norm": 0.17221756279468536, + "learning_rate": 0.00025918055637762596, + "loss": 4.4844, + "step": 5421 + }, + { + "epoch": 0.5294921875, + "grad_norm": 0.18053296208381653, + "learning_rate": 0.00025911067517582487, + "loss": 4.4844, + "step": 5422 + }, + { + "epoch": 0.52958984375, + "grad_norm": 0.1783989518880844, + "learning_rate": 0.00025904079551438596, + "loss": 4.4805, + "step": 5423 + }, + { + "epoch": 0.5296875, + "grad_norm": 0.17181448638439178, + "learning_rate": 0.00025897091740008366, + "loss": 4.4688, + "step": 5424 + }, + { + "epoch": 0.52978515625, + "grad_norm": 0.17350250482559204, + "learning_rate": 0.00025890104083969205, + "loss": 4.4609, + "step": 5425 + }, + { + "epoch": 0.5298828125, + "grad_norm": 0.1867428869009018, + "learning_rate": 0.00025883116583998536, + "loss": 4.4961, + "step": 5426 + }, + { + "epoch": 0.52998046875, + "grad_norm": 0.17393869161605835, + "learning_rate": 0.00025876129240773744, + "loss": 4.5078, + "step": 5427 + }, + { + "epoch": 0.530078125, + "grad_norm": 0.17831715941429138, + "learning_rate": 0.0002586914205497219, + "loss": 4.4883, + "step": 5428 + }, + { + "epoch": 0.53017578125, + "grad_norm": 0.1924014389514923, + "learning_rate": 0.00025862155027271244, + "loss": 4.4922, + "step": 5429 + }, + { + "epoch": 0.5302734375, + "grad_norm": 0.19070589542388916, + "learning_rate": 0.00025855168158348256, + "loss": 4.5117, + "step": 5430 + }, + { + "epoch": 0.53037109375, + "grad_norm": 0.17372608184814453, + "learning_rate": 0.0002584818144888055, + "loss": 4.5156, + "step": 5431 + }, + { + "epoch": 0.53046875, + "grad_norm": 0.17236748337745667, + "learning_rate": 0.00025841194899545443, + "loss": 4.5, + "step": 5432 + }, + { + "epoch": 0.53056640625, + "grad_norm": 0.19340214133262634, + "learning_rate": 0.00025834208511020236, + "loss": 4.4609, + "step": 5433 + }, + { + "epoch": 0.5306640625, + "grad_norm": 0.20313924551010132, + "learning_rate": 0.00025827222283982204, + "loss": 4.4688, + "step": 5434 + }, + { + "epoch": 0.53076171875, + "grad_norm": 0.1949266791343689, + "learning_rate": 0.00025820236219108617, + "loss": 4.5, + "step": 5435 + }, + { + "epoch": 0.530859375, + "grad_norm": 0.18678082525730133, + "learning_rate": 0.0002581325031707674, + "loss": 4.4844, + "step": 5436 + }, + { + "epoch": 0.53095703125, + "grad_norm": 0.17471595108509064, + "learning_rate": 0.0002580626457856379, + "loss": 4.457, + "step": 5437 + }, + { + "epoch": 0.5310546875, + "grad_norm": 0.19691260159015656, + "learning_rate": 0.00025799279004246983, + "loss": 4.4961, + "step": 5438 + }, + { + "epoch": 0.53115234375, + "grad_norm": 0.17909899353981018, + "learning_rate": 0.0002579229359480354, + "loss": 4.4727, + "step": 5439 + }, + { + "epoch": 0.53125, + "grad_norm": 0.17090481519699097, + "learning_rate": 0.0002578530835091065, + "loss": 4.4805, + "step": 5440 + }, + { + "epoch": 0.53134765625, + "grad_norm": 0.18815244734287262, + "learning_rate": 0.00025778323273245465, + "loss": 4.4922, + "step": 5441 + }, + { + "epoch": 0.5314453125, + "grad_norm": 0.20675766468048096, + "learning_rate": 0.0002577133836248516, + "loss": 4.4492, + "step": 5442 + }, + { + "epoch": 0.53154296875, + "grad_norm": 0.20198340713977814, + "learning_rate": 0.0002576435361930687, + "loss": 4.4844, + "step": 5443 + }, + { + "epoch": 0.531640625, + "grad_norm": 0.17861050367355347, + "learning_rate": 0.0002575736904438772, + "loss": 4.5117, + "step": 5444 + }, + { + "epoch": 0.53173828125, + "grad_norm": 0.18939025700092316, + "learning_rate": 0.0002575038463840481, + "loss": 4.4883, + "step": 5445 + }, + { + "epoch": 0.5318359375, + "grad_norm": 0.18879660964012146, + "learning_rate": 0.0002574340040203523, + "loss": 4.4766, + "step": 5446 + }, + { + "epoch": 0.53193359375, + "grad_norm": 0.17825376987457275, + "learning_rate": 0.00025736416335956063, + "loss": 4.5156, + "step": 5447 + }, + { + "epoch": 0.53203125, + "grad_norm": 0.1775849610567093, + "learning_rate": 0.0002572943244084436, + "loss": 4.4766, + "step": 5448 + }, + { + "epoch": 0.53212890625, + "grad_norm": 0.19117985665798187, + "learning_rate": 0.00025722448717377166, + "loss": 4.4883, + "step": 5449 + }, + { + "epoch": 0.5322265625, + "grad_norm": 0.17805302143096924, + "learning_rate": 0.0002571546516623151, + "loss": 4.4922, + "step": 5450 + }, + { + "epoch": 0.53232421875, + "grad_norm": 0.1735052913427353, + "learning_rate": 0.00025708481788084387, + "loss": 4.4883, + "step": 5451 + }, + { + "epoch": 0.532421875, + "grad_norm": 0.18671561777591705, + "learning_rate": 0.0002570149858361281, + "loss": 4.5156, + "step": 5452 + }, + { + "epoch": 0.53251953125, + "grad_norm": 0.1867712140083313, + "learning_rate": 0.00025694515553493727, + "loss": 4.4805, + "step": 5453 + }, + { + "epoch": 0.5326171875, + "grad_norm": 0.17163370549678802, + "learning_rate": 0.0002568753269840411, + "loss": 4.4922, + "step": 5454 + }, + { + "epoch": 0.53271484375, + "grad_norm": 0.17465277016162872, + "learning_rate": 0.0002568055001902089, + "loss": 4.4336, + "step": 5455 + }, + { + "epoch": 0.5328125, + "grad_norm": 0.19107219576835632, + "learning_rate": 0.00025673567516021003, + "loss": 4.5195, + "step": 5456 + }, + { + "epoch": 0.53291015625, + "grad_norm": 0.1949634552001953, + "learning_rate": 0.00025666585190081345, + "loss": 4.4961, + "step": 5457 + }, + { + "epoch": 0.5330078125, + "grad_norm": 0.1729367971420288, + "learning_rate": 0.0002565960304187881, + "loss": 4.4805, + "step": 5458 + }, + { + "epoch": 0.53310546875, + "grad_norm": 0.1832340806722641, + "learning_rate": 0.0002565262107209027, + "loss": 4.5, + "step": 5459 + }, + { + "epoch": 0.533203125, + "grad_norm": 0.20075735449790955, + "learning_rate": 0.0002564563928139258, + "loss": 4.4375, + "step": 5460 + }, + { + "epoch": 0.53330078125, + "grad_norm": 0.19012129306793213, + "learning_rate": 0.0002563865767046259, + "loss": 4.4805, + "step": 5461 + }, + { + "epoch": 0.5333984375, + "grad_norm": 0.17604577541351318, + "learning_rate": 0.0002563167623997709, + "loss": 4.4883, + "step": 5462 + }, + { + "epoch": 0.53349609375, + "grad_norm": 0.1735958456993103, + "learning_rate": 0.00025624694990612903, + "loss": 4.5234, + "step": 5463 + }, + { + "epoch": 0.53359375, + "grad_norm": 0.183329775929451, + "learning_rate": 0.00025617713923046804, + "loss": 4.5, + "step": 5464 + }, + { + "epoch": 0.53369140625, + "grad_norm": 0.17801111936569214, + "learning_rate": 0.00025610733037955563, + "loss": 4.4688, + "step": 5465 + }, + { + "epoch": 0.5337890625, + "grad_norm": 0.18499505519866943, + "learning_rate": 0.00025603752336015933, + "loss": 4.4766, + "step": 5466 + }, + { + "epoch": 0.53388671875, + "grad_norm": 0.19643664360046387, + "learning_rate": 0.0002559677181790465, + "loss": 4.4883, + "step": 5467 + }, + { + "epoch": 0.533984375, + "grad_norm": 0.1826391965150833, + "learning_rate": 0.0002558979148429842, + "loss": 4.4922, + "step": 5468 + }, + { + "epoch": 0.53408203125, + "grad_norm": 0.19470785558223724, + "learning_rate": 0.00025582811335873943, + "loss": 4.5078, + "step": 5469 + }, + { + "epoch": 0.5341796875, + "grad_norm": 0.19914329051971436, + "learning_rate": 0.00025575831373307893, + "loss": 4.5, + "step": 5470 + }, + { + "epoch": 0.53427734375, + "grad_norm": 0.231279656291008, + "learning_rate": 0.0002556885159727694, + "loss": 4.5117, + "step": 5471 + }, + { + "epoch": 0.534375, + "grad_norm": 0.19260622560977936, + "learning_rate": 0.000255618720084577, + "loss": 4.4805, + "step": 5472 + }, + { + "epoch": 0.53447265625, + "grad_norm": 0.18889853358268738, + "learning_rate": 0.00025554892607526826, + "loss": 4.4727, + "step": 5473 + }, + { + "epoch": 0.5345703125, + "grad_norm": 0.20960290729999542, + "learning_rate": 0.0002554791339516091, + "loss": 4.5039, + "step": 5474 + }, + { + "epoch": 0.53466796875, + "grad_norm": 0.195246621966362, + "learning_rate": 0.0002554093437203654, + "loss": 4.4922, + "step": 5475 + }, + { + "epoch": 0.534765625, + "grad_norm": 0.18173642456531525, + "learning_rate": 0.0002553395553883029, + "loss": 4.4727, + "step": 5476 + }, + { + "epoch": 0.53486328125, + "grad_norm": 0.18584193289279938, + "learning_rate": 0.00025526976896218703, + "loss": 4.5312, + "step": 5477 + }, + { + "epoch": 0.5349609375, + "grad_norm": 0.19247718155384064, + "learning_rate": 0.00025519998444878324, + "loss": 4.4805, + "step": 5478 + }, + { + "epoch": 0.53505859375, + "grad_norm": 0.18580877780914307, + "learning_rate": 0.00025513020185485646, + "loss": 4.4609, + "step": 5479 + }, + { + "epoch": 0.53515625, + "grad_norm": 0.19038981199264526, + "learning_rate": 0.0002550604211871718, + "loss": 4.4766, + "step": 5480 + }, + { + "epoch": 0.53525390625, + "grad_norm": 0.19222956895828247, + "learning_rate": 0.000254990642452494, + "loss": 4.4688, + "step": 5481 + }, + { + "epoch": 0.5353515625, + "grad_norm": 0.18087762594223022, + "learning_rate": 0.0002549208656575876, + "loss": 4.4688, + "step": 5482 + }, + { + "epoch": 0.53544921875, + "grad_norm": 0.18637341260910034, + "learning_rate": 0.0002548510908092169, + "loss": 4.4961, + "step": 5483 + }, + { + "epoch": 0.535546875, + "grad_norm": 0.1941346526145935, + "learning_rate": 0.00025478131791414633, + "loss": 4.4805, + "step": 5484 + }, + { + "epoch": 0.53564453125, + "grad_norm": 0.16831877827644348, + "learning_rate": 0.0002547115469791397, + "loss": 4.4961, + "step": 5485 + }, + { + "epoch": 0.5357421875, + "grad_norm": 0.1838119775056839, + "learning_rate": 0.00025464177801096094, + "loss": 4.4883, + "step": 5486 + }, + { + "epoch": 0.53583984375, + "grad_norm": 0.18500350415706635, + "learning_rate": 0.00025457201101637363, + "loss": 4.4492, + "step": 5487 + }, + { + "epoch": 0.5359375, + "grad_norm": 0.17208628356456757, + "learning_rate": 0.0002545022460021411, + "loss": 4.4961, + "step": 5488 + }, + { + "epoch": 0.53603515625, + "grad_norm": 0.17202883958816528, + "learning_rate": 0.0002544324829750268, + "loss": 4.4766, + "step": 5489 + }, + { + "epoch": 0.5361328125, + "grad_norm": 0.1692601889371872, + "learning_rate": 0.0002543627219417936, + "loss": 4.5078, + "step": 5490 + }, + { + "epoch": 0.53623046875, + "grad_norm": 0.18366889655590057, + "learning_rate": 0.0002542929629092045, + "loss": 4.4766, + "step": 5491 + }, + { + "epoch": 0.536328125, + "grad_norm": 0.1841311752796173, + "learning_rate": 0.00025422320588402216, + "loss": 4.4805, + "step": 5492 + }, + { + "epoch": 0.53642578125, + "grad_norm": 0.17571155726909637, + "learning_rate": 0.0002541534508730089, + "loss": 4.4883, + "step": 5493 + }, + { + "epoch": 0.5365234375, + "grad_norm": 0.17711897194385529, + "learning_rate": 0.0002540836978829272, + "loss": 4.5156, + "step": 5494 + }, + { + "epoch": 0.53662109375, + "grad_norm": 0.19101794064044952, + "learning_rate": 0.0002540139469205389, + "loss": 4.4609, + "step": 5495 + }, + { + "epoch": 0.53671875, + "grad_norm": 0.23679803311824799, + "learning_rate": 0.0002539441979926061, + "loss": 4.5078, + "step": 5496 + }, + { + "epoch": 0.53681640625, + "grad_norm": 0.18012429773807526, + "learning_rate": 0.0002538744511058904, + "loss": 4.4844, + "step": 5497 + }, + { + "epoch": 0.5369140625, + "grad_norm": 0.1912495195865631, + "learning_rate": 0.00025380470626715327, + "loss": 4.4844, + "step": 5498 + }, + { + "epoch": 0.53701171875, + "grad_norm": 0.1936570554971695, + "learning_rate": 0.0002537349634831559, + "loss": 4.4805, + "step": 5499 + }, + { + "epoch": 0.537109375, + "grad_norm": 0.20833687484264374, + "learning_rate": 0.00025366522276065967, + "loss": 4.4727, + "step": 5500 + }, + { + "epoch": 0.53720703125, + "grad_norm": 0.18197932839393616, + "learning_rate": 0.0002535954841064252, + "loss": 4.5078, + "step": 5501 + }, + { + "epoch": 0.5373046875, + "grad_norm": 0.1995457112789154, + "learning_rate": 0.00025352574752721325, + "loss": 4.5508, + "step": 5502 + }, + { + "epoch": 0.53740234375, + "grad_norm": 0.18541818857192993, + "learning_rate": 0.00025345601302978444, + "loss": 4.4922, + "step": 5503 + }, + { + "epoch": 0.5375, + "grad_norm": 0.17902421951293945, + "learning_rate": 0.0002533862806208989, + "loss": 4.5039, + "step": 5504 + }, + { + "epoch": 0.53759765625, + "grad_norm": 0.18393373489379883, + "learning_rate": 0.00025331655030731676, + "loss": 4.4336, + "step": 5505 + }, + { + "epoch": 0.5376953125, + "grad_norm": 0.17354533076286316, + "learning_rate": 0.00025324682209579784, + "loss": 4.4961, + "step": 5506 + }, + { + "epoch": 0.53779296875, + "grad_norm": 0.1756397783756256, + "learning_rate": 0.00025317709599310197, + "loss": 4.5039, + "step": 5507 + }, + { + "epoch": 0.537890625, + "grad_norm": 0.19439180195331573, + "learning_rate": 0.00025310737200598843, + "loss": 4.5234, + "step": 5508 + }, + { + "epoch": 0.53798828125, + "grad_norm": 0.18072621524333954, + "learning_rate": 0.00025303765014121664, + "loss": 4.4922, + "step": 5509 + }, + { + "epoch": 0.5380859375, + "grad_norm": 0.18339931964874268, + "learning_rate": 0.0002529679304055456, + "loss": 4.4805, + "step": 5510 + }, + { + "epoch": 0.53818359375, + "grad_norm": 0.19344839453697205, + "learning_rate": 0.00025289821280573423, + "loss": 4.5078, + "step": 5511 + }, + { + "epoch": 0.53828125, + "grad_norm": 0.2086607664823532, + "learning_rate": 0.00025282849734854107, + "loss": 4.4922, + "step": 5512 + }, + { + "epoch": 0.53837890625, + "grad_norm": 0.1716528683900833, + "learning_rate": 0.00025275878404072466, + "loss": 4.4922, + "step": 5513 + }, + { + "epoch": 0.5384765625, + "grad_norm": 0.1828726828098297, + "learning_rate": 0.00025268907288904317, + "loss": 4.4766, + "step": 5514 + }, + { + "epoch": 0.53857421875, + "grad_norm": 0.19288578629493713, + "learning_rate": 0.0002526193639002546, + "loss": 4.5078, + "step": 5515 + }, + { + "epoch": 0.538671875, + "grad_norm": 0.18476760387420654, + "learning_rate": 0.0002525496570811168, + "loss": 4.4688, + "step": 5516 + }, + { + "epoch": 0.53876953125, + "grad_norm": 0.17774814367294312, + "learning_rate": 0.0002524799524383874, + "loss": 4.4531, + "step": 5517 + }, + { + "epoch": 0.5388671875, + "grad_norm": 0.1954430192708969, + "learning_rate": 0.0002524102499788238, + "loss": 4.4688, + "step": 5518 + }, + { + "epoch": 0.53896484375, + "grad_norm": 0.1901288777589798, + "learning_rate": 0.00025234054970918305, + "loss": 4.5117, + "step": 5519 + }, + { + "epoch": 0.5390625, + "grad_norm": 0.17280998826026917, + "learning_rate": 0.00025227085163622234, + "loss": 4.5, + "step": 5520 + }, + { + "epoch": 0.53916015625, + "grad_norm": 0.190078005194664, + "learning_rate": 0.0002522011557666982, + "loss": 4.4805, + "step": 5521 + }, + { + "epoch": 0.5392578125, + "grad_norm": 0.18211908638477325, + "learning_rate": 0.0002521314621073673, + "loss": 4.4844, + "step": 5522 + }, + { + "epoch": 0.53935546875, + "grad_norm": 0.18096239864826202, + "learning_rate": 0.000252061770664986, + "loss": 4.5, + "step": 5523 + }, + { + "epoch": 0.539453125, + "grad_norm": 0.16703933477401733, + "learning_rate": 0.0002519920814463103, + "loss": 4.5117, + "step": 5524 + }, + { + "epoch": 0.53955078125, + "grad_norm": 0.17412669956684113, + "learning_rate": 0.00025192239445809614, + "loss": 4.4922, + "step": 5525 + }, + { + "epoch": 0.5396484375, + "grad_norm": 0.17399005591869354, + "learning_rate": 0.00025185270970709924, + "loss": 4.5117, + "step": 5526 + }, + { + "epoch": 0.53974609375, + "grad_norm": 0.16459961235523224, + "learning_rate": 0.00025178302720007504, + "loss": 4.4688, + "step": 5527 + }, + { + "epoch": 0.53984375, + "grad_norm": 0.17545437812805176, + "learning_rate": 0.00025171334694377875, + "loss": 4.457, + "step": 5528 + }, + { + "epoch": 0.53994140625, + "grad_norm": 0.19680188596248627, + "learning_rate": 0.00025164366894496536, + "loss": 4.4727, + "step": 5529 + }, + { + "epoch": 0.5400390625, + "grad_norm": 0.18153060972690582, + "learning_rate": 0.00025157399321038985, + "loss": 4.4883, + "step": 5530 + }, + { + "epoch": 0.54013671875, + "grad_norm": 0.1638968586921692, + "learning_rate": 0.00025150431974680666, + "loss": 4.4805, + "step": 5531 + }, + { + "epoch": 0.540234375, + "grad_norm": 0.184491828083992, + "learning_rate": 0.0002514346485609702, + "loss": 4.4766, + "step": 5532 + }, + { + "epoch": 0.54033203125, + "grad_norm": 0.176998108625412, + "learning_rate": 0.0002513649796596347, + "loss": 4.4727, + "step": 5533 + }, + { + "epoch": 0.5404296875, + "grad_norm": 0.17204459011554718, + "learning_rate": 0.0002512953130495539, + "loss": 4.5195, + "step": 5534 + }, + { + "epoch": 0.54052734375, + "grad_norm": 0.1721116006374359, + "learning_rate": 0.00025122564873748164, + "loss": 4.4844, + "step": 5535 + }, + { + "epoch": 0.540625, + "grad_norm": 0.19692742824554443, + "learning_rate": 0.00025115598673017145, + "loss": 4.4844, + "step": 5536 + }, + { + "epoch": 0.54072265625, + "grad_norm": 0.17269155383110046, + "learning_rate": 0.00025108632703437644, + "loss": 4.4727, + "step": 5537 + }, + { + "epoch": 0.5408203125, + "grad_norm": 0.1791267693042755, + "learning_rate": 0.0002510166696568498, + "loss": 4.4805, + "step": 5538 + }, + { + "epoch": 0.54091796875, + "grad_norm": 0.2122359275817871, + "learning_rate": 0.00025094701460434416, + "loss": 4.5234, + "step": 5539 + }, + { + "epoch": 0.541015625, + "grad_norm": 0.18318620324134827, + "learning_rate": 0.00025087736188361226, + "loss": 4.5156, + "step": 5540 + }, + { + "epoch": 0.54111328125, + "grad_norm": 0.18372513353824615, + "learning_rate": 0.00025080771150140643, + "loss": 4.4805, + "step": 5541 + }, + { + "epoch": 0.5412109375, + "grad_norm": 0.1987018585205078, + "learning_rate": 0.0002507380634644787, + "loss": 4.4961, + "step": 5542 + }, + { + "epoch": 0.54130859375, + "grad_norm": 0.18807817995548248, + "learning_rate": 0.00025066841777958117, + "loss": 4.4727, + "step": 5543 + }, + { + "epoch": 0.54140625, + "grad_norm": 0.16611264646053314, + "learning_rate": 0.0002505987744534653, + "loss": 4.4922, + "step": 5544 + }, + { + "epoch": 0.54150390625, + "grad_norm": 0.19445329904556274, + "learning_rate": 0.0002505291334928827, + "loss": 4.4609, + "step": 5545 + }, + { + "epoch": 0.5416015625, + "grad_norm": 0.1702830046415329, + "learning_rate": 0.00025045949490458446, + "loss": 4.4727, + "step": 5546 + }, + { + "epoch": 0.54169921875, + "grad_norm": 0.19100013375282288, + "learning_rate": 0.00025038985869532167, + "loss": 4.4805, + "step": 5547 + }, + { + "epoch": 0.541796875, + "grad_norm": 0.17713676393032074, + "learning_rate": 0.000250320224871845, + "loss": 4.5078, + "step": 5548 + }, + { + "epoch": 0.54189453125, + "grad_norm": 0.18444709479808807, + "learning_rate": 0.00025025059344090507, + "loss": 4.4883, + "step": 5549 + }, + { + "epoch": 0.5419921875, + "grad_norm": 0.16927555203437805, + "learning_rate": 0.0002501809644092521, + "loss": 4.4805, + "step": 5550 + }, + { + "epoch": 0.54208984375, + "grad_norm": 0.17560121417045593, + "learning_rate": 0.0002501113377836362, + "loss": 4.4609, + "step": 5551 + }, + { + "epoch": 0.5421875, + "grad_norm": 0.1909835934638977, + "learning_rate": 0.0002500417135708072, + "loss": 4.4883, + "step": 5552 + }, + { + "epoch": 0.54228515625, + "grad_norm": 0.17962850630283356, + "learning_rate": 0.00024997209177751473, + "loss": 4.4609, + "step": 5553 + }, + { + "epoch": 0.5423828125, + "grad_norm": 0.18652521073818207, + "learning_rate": 0.000249902472410508, + "loss": 4.4609, + "step": 5554 + }, + { + "epoch": 0.54248046875, + "grad_norm": 0.2038511484861374, + "learning_rate": 0.0002498328554765363, + "loss": 4.4648, + "step": 5555 + }, + { + "epoch": 0.542578125, + "grad_norm": 0.18224450945854187, + "learning_rate": 0.00024976324098234835, + "loss": 4.4922, + "step": 5556 + }, + { + "epoch": 0.54267578125, + "grad_norm": 0.18461814522743225, + "learning_rate": 0.000249693628934693, + "loss": 4.5078, + "step": 5557 + }, + { + "epoch": 0.5427734375, + "grad_norm": 0.18162570893764496, + "learning_rate": 0.0002496240193403185, + "loss": 4.4492, + "step": 5558 + }, + { + "epoch": 0.54287109375, + "grad_norm": 0.2086452841758728, + "learning_rate": 0.0002495544122059732, + "loss": 4.4688, + "step": 5559 + }, + { + "epoch": 0.54296875, + "grad_norm": 0.180499866604805, + "learning_rate": 0.00024948480753840486, + "loss": 4.4844, + "step": 5560 + }, + { + "epoch": 0.54306640625, + "grad_norm": 0.19053250551223755, + "learning_rate": 0.00024941520534436126, + "loss": 4.4688, + "step": 5561 + }, + { + "epoch": 0.5431640625, + "grad_norm": 0.1879425197839737, + "learning_rate": 0.00024934560563058994, + "loss": 4.457, + "step": 5562 + }, + { + "epoch": 0.54326171875, + "grad_norm": 0.17621012032032013, + "learning_rate": 0.00024927600840383795, + "loss": 4.4805, + "step": 5563 + }, + { + "epoch": 0.543359375, + "grad_norm": 0.17588970065116882, + "learning_rate": 0.0002492064136708524, + "loss": 4.4922, + "step": 5564 + }, + { + "epoch": 0.54345703125, + "grad_norm": 0.1734452247619629, + "learning_rate": 0.0002491368214383799, + "loss": 4.5, + "step": 5565 + }, + { + "epoch": 0.5435546875, + "grad_norm": 0.17054376006126404, + "learning_rate": 0.0002490672317131671, + "loss": 4.457, + "step": 5566 + }, + { + "epoch": 0.54365234375, + "grad_norm": 0.16873033344745636, + "learning_rate": 0.00024899764450196014, + "loss": 4.4414, + "step": 5567 + }, + { + "epoch": 0.54375, + "grad_norm": 0.17004366219043732, + "learning_rate": 0.00024892805981150507, + "loss": 4.5, + "step": 5568 + }, + { + "epoch": 0.54384765625, + "grad_norm": 0.1837015300989151, + "learning_rate": 0.0002488584776485477, + "loss": 4.4727, + "step": 5569 + }, + { + "epoch": 0.5439453125, + "grad_norm": 0.1775113344192505, + "learning_rate": 0.0002487888980198336, + "loss": 4.4805, + "step": 5570 + }, + { + "epoch": 0.54404296875, + "grad_norm": 0.17413252592086792, + "learning_rate": 0.00024871932093210777, + "loss": 4.4766, + "step": 5571 + }, + { + "epoch": 0.544140625, + "grad_norm": 0.16979488730430603, + "learning_rate": 0.0002486497463921154, + "loss": 4.4609, + "step": 5572 + }, + { + "epoch": 0.54423828125, + "grad_norm": 0.17691245675086975, + "learning_rate": 0.0002485801744066013, + "loss": 4.4648, + "step": 5573 + }, + { + "epoch": 0.5443359375, + "grad_norm": 0.1620752513408661, + "learning_rate": 0.00024851060498231003, + "loss": 4.5117, + "step": 5574 + }, + { + "epoch": 0.54443359375, + "grad_norm": 0.1737234741449356, + "learning_rate": 0.0002484410381259858, + "loss": 4.4922, + "step": 5575 + }, + { + "epoch": 0.54453125, + "grad_norm": 0.17740893363952637, + "learning_rate": 0.00024837147384437265, + "loss": 4.5, + "step": 5576 + }, + { + "epoch": 0.54462890625, + "grad_norm": 0.16735251247882843, + "learning_rate": 0.00024830191214421436, + "loss": 4.4453, + "step": 5577 + }, + { + "epoch": 0.5447265625, + "grad_norm": 0.16231083869934082, + "learning_rate": 0.00024823235303225456, + "loss": 4.4766, + "step": 5578 + }, + { + "epoch": 0.54482421875, + "grad_norm": 0.18157412111759186, + "learning_rate": 0.00024816279651523654, + "loss": 4.4531, + "step": 5579 + }, + { + "epoch": 0.544921875, + "grad_norm": 0.18494315445423126, + "learning_rate": 0.0002480932425999031, + "loss": 4.4883, + "step": 5580 + }, + { + "epoch": 0.54501953125, + "grad_norm": 0.17149503529071808, + "learning_rate": 0.00024802369129299723, + "loss": 4.4688, + "step": 5581 + }, + { + "epoch": 0.5451171875, + "grad_norm": 0.17268569767475128, + "learning_rate": 0.00024795414260126136, + "loss": 4.4805, + "step": 5582 + }, + { + "epoch": 0.54521484375, + "grad_norm": 0.1774284541606903, + "learning_rate": 0.0002478845965314378, + "loss": 4.4727, + "step": 5583 + }, + { + "epoch": 0.5453125, + "grad_norm": 0.20209404826164246, + "learning_rate": 0.0002478150530902686, + "loss": 4.5117, + "step": 5584 + }, + { + "epoch": 0.54541015625, + "grad_norm": 0.1795349270105362, + "learning_rate": 0.0002477455122844955, + "loss": 4.4531, + "step": 5585 + }, + { + "epoch": 0.5455078125, + "grad_norm": 0.16481177508831024, + "learning_rate": 0.00024767597412086, + "loss": 4.5, + "step": 5586 + }, + { + "epoch": 0.54560546875, + "grad_norm": 0.18211039900779724, + "learning_rate": 0.0002476064386061034, + "loss": 4.4961, + "step": 5587 + }, + { + "epoch": 0.545703125, + "grad_norm": 0.19378839433193207, + "learning_rate": 0.0002475369057469667, + "loss": 4.4883, + "step": 5588 + }, + { + "epoch": 0.54580078125, + "grad_norm": 0.18498960137367249, + "learning_rate": 0.0002474673755501905, + "loss": 4.4453, + "step": 5589 + }, + { + "epoch": 0.5458984375, + "grad_norm": 0.16753093898296356, + "learning_rate": 0.0002473978480225154, + "loss": 4.4609, + "step": 5590 + }, + { + "epoch": 0.54599609375, + "grad_norm": 0.16684801876544952, + "learning_rate": 0.00024732832317068157, + "loss": 4.4688, + "step": 5591 + }, + { + "epoch": 0.54609375, + "grad_norm": 0.17933504283428192, + "learning_rate": 0.000247258801001429, + "loss": 4.4766, + "step": 5592 + }, + { + "epoch": 0.54619140625, + "grad_norm": 0.16665203869342804, + "learning_rate": 0.0002471892815214974, + "loss": 4.4766, + "step": 5593 + }, + { + "epoch": 0.5462890625, + "grad_norm": 0.1866944581270218, + "learning_rate": 0.0002471197647376262, + "loss": 4.4648, + "step": 5594 + }, + { + "epoch": 0.54638671875, + "grad_norm": 0.178785502910614, + "learning_rate": 0.0002470502506565546, + "loss": 4.4766, + "step": 5595 + }, + { + "epoch": 0.546484375, + "grad_norm": 0.18401885032653809, + "learning_rate": 0.00024698073928502165, + "loss": 4.4648, + "step": 5596 + }, + { + "epoch": 0.54658203125, + "grad_norm": 0.17393521964550018, + "learning_rate": 0.00024691123062976566, + "loss": 4.4922, + "step": 5597 + }, + { + "epoch": 0.5466796875, + "grad_norm": 0.17728553712368011, + "learning_rate": 0.00024684172469752533, + "loss": 4.4883, + "step": 5598 + }, + { + "epoch": 0.54677734375, + "grad_norm": 0.1846318542957306, + "learning_rate": 0.0002467722214950386, + "loss": 4.4844, + "step": 5599 + }, + { + "epoch": 0.546875, + "grad_norm": 0.17459425330162048, + "learning_rate": 0.0002467027210290434, + "loss": 4.5078, + "step": 5600 + }, + { + "epoch": 0.54697265625, + "grad_norm": 0.19373269379138947, + "learning_rate": 0.00024663322330627734, + "loss": 4.4492, + "step": 5601 + }, + { + "epoch": 0.5470703125, + "grad_norm": 0.16646447777748108, + "learning_rate": 0.00024656372833347783, + "loss": 4.457, + "step": 5602 + }, + { + "epoch": 0.54716796875, + "grad_norm": 0.19072182476520538, + "learning_rate": 0.0002464942361173818, + "loss": 4.4844, + "step": 5603 + }, + { + "epoch": 0.547265625, + "grad_norm": 0.17966419458389282, + "learning_rate": 0.0002464247466647263, + "loss": 4.4805, + "step": 5604 + }, + { + "epoch": 0.54736328125, + "grad_norm": 0.1704295426607132, + "learning_rate": 0.00024635525998224745, + "loss": 4.4766, + "step": 5605 + }, + { + "epoch": 0.5474609375, + "grad_norm": 0.17493313550949097, + "learning_rate": 0.00024628577607668176, + "loss": 4.4844, + "step": 5606 + }, + { + "epoch": 0.54755859375, + "grad_norm": 0.1642441749572754, + "learning_rate": 0.00024621629495476526, + "loss": 4.4883, + "step": 5607 + }, + { + "epoch": 0.54765625, + "grad_norm": 0.1701955646276474, + "learning_rate": 0.00024614681662323354, + "loss": 4.4648, + "step": 5608 + }, + { + "epoch": 0.54775390625, + "grad_norm": 0.17105169594287872, + "learning_rate": 0.0002460773410888222, + "loss": 4.4688, + "step": 5609 + }, + { + "epoch": 0.5478515625, + "grad_norm": 0.17652620375156403, + "learning_rate": 0.0002460078683582662, + "loss": 4.4414, + "step": 5610 + }, + { + "epoch": 0.54794921875, + "grad_norm": 0.16819946467876434, + "learning_rate": 0.00024593839843830076, + "loss": 4.5, + "step": 5611 + }, + { + "epoch": 0.548046875, + "grad_norm": 0.17281466722488403, + "learning_rate": 0.00024586893133566034, + "loss": 4.4688, + "step": 5612 + }, + { + "epoch": 0.54814453125, + "grad_norm": 0.1695176064968109, + "learning_rate": 0.0002457994670570792, + "loss": 4.5, + "step": 5613 + }, + { + "epoch": 0.5482421875, + "grad_norm": 0.1779690682888031, + "learning_rate": 0.00024573000560929167, + "loss": 4.4531, + "step": 5614 + }, + { + "epoch": 0.54833984375, + "grad_norm": 0.16788144409656525, + "learning_rate": 0.00024566054699903133, + "loss": 4.4805, + "step": 5615 + }, + { + "epoch": 0.5484375, + "grad_norm": 0.16321374475955963, + "learning_rate": 0.00024559109123303184, + "loss": 4.4844, + "step": 5616 + }, + { + "epoch": 0.54853515625, + "grad_norm": 0.17697018384933472, + "learning_rate": 0.0002455216383180265, + "loss": 4.4805, + "step": 5617 + }, + { + "epoch": 0.5486328125, + "grad_norm": 0.18679171800613403, + "learning_rate": 0.0002454521882607482, + "loss": 4.5039, + "step": 5618 + }, + { + "epoch": 0.54873046875, + "grad_norm": 0.18467684090137482, + "learning_rate": 0.0002453827410679298, + "loss": 4.4688, + "step": 5619 + }, + { + "epoch": 0.548828125, + "grad_norm": 0.16987988352775574, + "learning_rate": 0.0002453132967463035, + "loss": 4.4961, + "step": 5620 + }, + { + "epoch": 0.54892578125, + "grad_norm": 0.1873723566532135, + "learning_rate": 0.0002452438553026018, + "loss": 4.4727, + "step": 5621 + }, + { + "epoch": 0.5490234375, + "grad_norm": 0.21020932495594025, + "learning_rate": 0.0002451744167435563, + "loss": 4.4766, + "step": 5622 + }, + { + "epoch": 0.54912109375, + "grad_norm": 0.21463894844055176, + "learning_rate": 0.0002451049810758986, + "loss": 4.5039, + "step": 5623 + }, + { + "epoch": 0.54921875, + "grad_norm": 0.1611166149377823, + "learning_rate": 0.0002450355483063601, + "loss": 4.4805, + "step": 5624 + }, + { + "epoch": 0.54931640625, + "grad_norm": 0.20972920954227448, + "learning_rate": 0.00024496611844167183, + "loss": 4.4844, + "step": 5625 + }, + { + "epoch": 0.5494140625, + "grad_norm": 0.21136574447155, + "learning_rate": 0.0002448966914885645, + "loss": 4.4531, + "step": 5626 + }, + { + "epoch": 0.54951171875, + "grad_norm": 0.16390611231327057, + "learning_rate": 0.0002448272674537687, + "loss": 4.457, + "step": 5627 + }, + { + "epoch": 0.549609375, + "grad_norm": 0.19498378038406372, + "learning_rate": 0.00024475784634401447, + "loss": 4.4844, + "step": 5628 + }, + { + "epoch": 0.54970703125, + "grad_norm": 0.20845724642276764, + "learning_rate": 0.00024468842816603194, + "loss": 4.4883, + "step": 5629 + }, + { + "epoch": 0.5498046875, + "grad_norm": 0.1635984480381012, + "learning_rate": 0.0002446190129265504, + "loss": 4.4727, + "step": 5630 + }, + { + "epoch": 0.54990234375, + "grad_norm": 0.204924538731575, + "learning_rate": 0.0002445496006322994, + "loss": 4.4688, + "step": 5631 + }, + { + "epoch": 0.55, + "grad_norm": 0.2058529108762741, + "learning_rate": 0.0002444801912900079, + "loss": 4.5, + "step": 5632 + }, + { + "epoch": 0.55009765625, + "grad_norm": 0.1691880077123642, + "learning_rate": 0.0002444107849064048, + "loss": 4.4609, + "step": 5633 + }, + { + "epoch": 0.5501953125, + "grad_norm": 0.20831380784511566, + "learning_rate": 0.0002443413814882184, + "loss": 4.4688, + "step": 5634 + }, + { + "epoch": 0.55029296875, + "grad_norm": 0.21838435530662537, + "learning_rate": 0.00024427198104217713, + "loss": 4.5039, + "step": 5635 + }, + { + "epoch": 0.550390625, + "grad_norm": 0.16680355370044708, + "learning_rate": 0.00024420258357500863, + "loss": 4.4883, + "step": 5636 + }, + { + "epoch": 0.55048828125, + "grad_norm": 0.2131723165512085, + "learning_rate": 0.00024413318909344067, + "loss": 4.5078, + "step": 5637 + }, + { + "epoch": 0.5505859375, + "grad_norm": 0.18839526176452637, + "learning_rate": 0.00024406379760420055, + "loss": 4.4453, + "step": 5638 + }, + { + "epoch": 0.55068359375, + "grad_norm": 0.18841411173343658, + "learning_rate": 0.00024399440911401532, + "loss": 4.5, + "step": 5639 + }, + { + "epoch": 0.55078125, + "grad_norm": 0.19388891756534576, + "learning_rate": 0.00024392502362961167, + "loss": 4.4961, + "step": 5640 + }, + { + "epoch": 0.55087890625, + "grad_norm": 0.16674251854419708, + "learning_rate": 0.00024385564115771602, + "loss": 4.4805, + "step": 5641 + }, + { + "epoch": 0.5509765625, + "grad_norm": 0.1778622716665268, + "learning_rate": 0.00024378626170505459, + "loss": 4.4883, + "step": 5642 + }, + { + "epoch": 0.55107421875, + "grad_norm": 0.16293755173683167, + "learning_rate": 0.00024371688527835322, + "loss": 4.4844, + "step": 5643 + }, + { + "epoch": 0.551171875, + "grad_norm": 0.17059065401554108, + "learning_rate": 0.00024364751188433754, + "loss": 4.457, + "step": 5644 + }, + { + "epoch": 0.55126953125, + "grad_norm": 0.181987464427948, + "learning_rate": 0.00024357814152973278, + "loss": 4.4688, + "step": 5645 + }, + { + "epoch": 0.5513671875, + "grad_norm": 0.18539194762706757, + "learning_rate": 0.000243508774221264, + "loss": 4.4688, + "step": 5646 + }, + { + "epoch": 0.55146484375, + "grad_norm": 0.17324107885360718, + "learning_rate": 0.00024343940996565577, + "loss": 4.4688, + "step": 5647 + }, + { + "epoch": 0.5515625, + "grad_norm": 0.1824297457933426, + "learning_rate": 0.00024337004876963253, + "loss": 4.4688, + "step": 5648 + }, + { + "epoch": 0.55166015625, + "grad_norm": 0.17598804831504822, + "learning_rate": 0.00024330069063991844, + "loss": 4.4609, + "step": 5649 + }, + { + "epoch": 0.5517578125, + "grad_norm": 0.1839090883731842, + "learning_rate": 0.00024323133558323723, + "loss": 4.5117, + "step": 5650 + }, + { + "epoch": 0.55185546875, + "grad_norm": 0.17058517038822174, + "learning_rate": 0.00024316198360631238, + "loss": 4.4766, + "step": 5651 + }, + { + "epoch": 0.551953125, + "grad_norm": 0.16618813574314117, + "learning_rate": 0.00024309263471586711, + "loss": 4.4961, + "step": 5652 + }, + { + "epoch": 0.55205078125, + "grad_norm": 0.176447331905365, + "learning_rate": 0.0002430232889186244, + "loss": 4.4805, + "step": 5653 + }, + { + "epoch": 0.5521484375, + "grad_norm": 0.1747579574584961, + "learning_rate": 0.00024295394622130678, + "loss": 4.4609, + "step": 5654 + }, + { + "epoch": 0.55224609375, + "grad_norm": 0.16835850477218628, + "learning_rate": 0.00024288460663063666, + "loss": 4.4961, + "step": 5655 + }, + { + "epoch": 0.55234375, + "grad_norm": 0.16997027397155762, + "learning_rate": 0.00024281527015333588, + "loss": 4.4688, + "step": 5656 + }, + { + "epoch": 0.55244140625, + "grad_norm": 0.1790413111448288, + "learning_rate": 0.0002427459367961263, + "loss": 4.4492, + "step": 5657 + }, + { + "epoch": 0.5525390625, + "grad_norm": 0.16376307606697083, + "learning_rate": 0.00024267660656572914, + "loss": 4.4492, + "step": 5658 + }, + { + "epoch": 0.55263671875, + "grad_norm": 0.18457864224910736, + "learning_rate": 0.00024260727946886563, + "loss": 4.4609, + "step": 5659 + }, + { + "epoch": 0.552734375, + "grad_norm": 0.16611461341381073, + "learning_rate": 0.0002425379555122565, + "loss": 4.4805, + "step": 5660 + }, + { + "epoch": 0.55283203125, + "grad_norm": 0.18410734832286835, + "learning_rate": 0.00024246863470262232, + "loss": 4.4531, + "step": 5661 + }, + { + "epoch": 0.5529296875, + "grad_norm": 0.17703936994075775, + "learning_rate": 0.00024239931704668317, + "loss": 4.4688, + "step": 5662 + }, + { + "epoch": 0.55302734375, + "grad_norm": 0.18987682461738586, + "learning_rate": 0.000242330002551159, + "loss": 4.4648, + "step": 5663 + }, + { + "epoch": 0.553125, + "grad_norm": 0.17967554926872253, + "learning_rate": 0.00024226069122276934, + "loss": 4.4805, + "step": 5664 + }, + { + "epoch": 0.55322265625, + "grad_norm": 0.1868099719285965, + "learning_rate": 0.00024219138306823346, + "loss": 4.4531, + "step": 5665 + }, + { + "epoch": 0.5533203125, + "grad_norm": 0.18378861248493195, + "learning_rate": 0.00024212207809427028, + "loss": 4.4609, + "step": 5666 + }, + { + "epoch": 0.55341796875, + "grad_norm": 0.1781270056962967, + "learning_rate": 0.00024205277630759853, + "loss": 4.4766, + "step": 5667 + }, + { + "epoch": 0.553515625, + "grad_norm": 0.1760854423046112, + "learning_rate": 0.00024198347771493651, + "loss": 4.4727, + "step": 5668 + }, + { + "epoch": 0.55361328125, + "grad_norm": 0.1694212406873703, + "learning_rate": 0.0002419141823230022, + "loss": 4.4648, + "step": 5669 + }, + { + "epoch": 0.5537109375, + "grad_norm": 0.1740177720785141, + "learning_rate": 0.00024184489013851342, + "loss": 4.4688, + "step": 5670 + }, + { + "epoch": 0.55380859375, + "grad_norm": 0.16800962388515472, + "learning_rate": 0.00024177560116818753, + "loss": 4.5039, + "step": 5671 + }, + { + "epoch": 0.55390625, + "grad_norm": 0.18739859759807587, + "learning_rate": 0.0002417063154187415, + "loss": 4.4648, + "step": 5672 + }, + { + "epoch": 0.55400390625, + "grad_norm": 0.16809095442295074, + "learning_rate": 0.00024163703289689233, + "loss": 4.4609, + "step": 5673 + }, + { + "epoch": 0.5541015625, + "grad_norm": 0.184182807803154, + "learning_rate": 0.00024156775360935636, + "loss": 4.4961, + "step": 5674 + }, + { + "epoch": 0.55419921875, + "grad_norm": 0.17791728675365448, + "learning_rate": 0.0002414984775628497, + "loss": 4.4648, + "step": 5675 + }, + { + "epoch": 0.554296875, + "grad_norm": 0.17162130773067474, + "learning_rate": 0.00024142920476408832, + "loss": 4.4648, + "step": 5676 + }, + { + "epoch": 0.55439453125, + "grad_norm": 0.18037356436252594, + "learning_rate": 0.00024135993521978767, + "loss": 4.5156, + "step": 5677 + }, + { + "epoch": 0.5544921875, + "grad_norm": 0.20485949516296387, + "learning_rate": 0.00024129066893666308, + "loss": 4.4648, + "step": 5678 + }, + { + "epoch": 0.55458984375, + "grad_norm": 0.1898423731327057, + "learning_rate": 0.00024122140592142927, + "loss": 4.5, + "step": 5679 + }, + { + "epoch": 0.5546875, + "grad_norm": 0.17810586094856262, + "learning_rate": 0.00024115214618080096, + "loss": 4.5117, + "step": 5680 + }, + { + "epoch": 0.55478515625, + "grad_norm": 0.22253607213497162, + "learning_rate": 0.00024108288972149228, + "loss": 4.4844, + "step": 5681 + }, + { + "epoch": 0.5548828125, + "grad_norm": 0.19311989843845367, + "learning_rate": 0.00024101363655021723, + "loss": 4.4609, + "step": 5682 + }, + { + "epoch": 0.55498046875, + "grad_norm": 0.18672816455364227, + "learning_rate": 0.0002409443866736895, + "loss": 4.4727, + "step": 5683 + }, + { + "epoch": 0.555078125, + "grad_norm": 0.20529106259346008, + "learning_rate": 0.00024087514009862233, + "loss": 4.4961, + "step": 5684 + }, + { + "epoch": 0.55517578125, + "grad_norm": 0.17741455137729645, + "learning_rate": 0.00024080589683172872, + "loss": 4.4766, + "step": 5685 + }, + { + "epoch": 0.5552734375, + "grad_norm": 0.18614007532596588, + "learning_rate": 0.0002407366568797213, + "loss": 4.4766, + "step": 5686 + }, + { + "epoch": 0.55537109375, + "grad_norm": 0.1974625289440155, + "learning_rate": 0.00024066742024931247, + "loss": 4.457, + "step": 5687 + }, + { + "epoch": 0.55546875, + "grad_norm": 0.18403594195842743, + "learning_rate": 0.00024059818694721437, + "loss": 4.4258, + "step": 5688 + }, + { + "epoch": 0.55556640625, + "grad_norm": 0.18801254034042358, + "learning_rate": 0.00024052895698013844, + "loss": 4.4844, + "step": 5689 + }, + { + "epoch": 0.5556640625, + "grad_norm": 0.1997949779033661, + "learning_rate": 0.00024045973035479617, + "loss": 4.4727, + "step": 5690 + }, + { + "epoch": 0.55576171875, + "grad_norm": 0.17451290786266327, + "learning_rate": 0.00024039050707789863, + "loss": 4.4375, + "step": 5691 + }, + { + "epoch": 0.555859375, + "grad_norm": 0.17943252623081207, + "learning_rate": 0.00024032128715615648, + "loss": 4.4492, + "step": 5692 + }, + { + "epoch": 0.55595703125, + "grad_norm": 0.18045178055763245, + "learning_rate": 0.00024025207059628025, + "loss": 4.5, + "step": 5693 + }, + { + "epoch": 0.5560546875, + "grad_norm": 0.16472052037715912, + "learning_rate": 0.00024018285740497986, + "loss": 4.4648, + "step": 5694 + }, + { + "epoch": 0.55615234375, + "grad_norm": 0.17328886687755585, + "learning_rate": 0.0002401136475889652, + "loss": 4.4688, + "step": 5695 + }, + { + "epoch": 0.55625, + "grad_norm": 0.17597556114196777, + "learning_rate": 0.0002400444411549456, + "loss": 4.4883, + "step": 5696 + }, + { + "epoch": 0.55634765625, + "grad_norm": 0.17057742178440094, + "learning_rate": 0.00023997523810963028, + "loss": 4.4844, + "step": 5697 + }, + { + "epoch": 0.5564453125, + "grad_norm": 0.1725737750530243, + "learning_rate": 0.00023990603845972785, + "loss": 4.5039, + "step": 5698 + }, + { + "epoch": 0.55654296875, + "grad_norm": 0.16241511702537537, + "learning_rate": 0.0002398368422119468, + "loss": 4.4688, + "step": 5699 + }, + { + "epoch": 0.556640625, + "grad_norm": 0.16851846873760223, + "learning_rate": 0.00023976764937299523, + "loss": 4.4688, + "step": 5700 + }, + { + "epoch": 0.55673828125, + "grad_norm": 0.1764947921037674, + "learning_rate": 0.00023969845994958094, + "loss": 4.4922, + "step": 5701 + }, + { + "epoch": 0.5568359375, + "grad_norm": 0.1617913544178009, + "learning_rate": 0.0002396292739484114, + "loss": 4.4727, + "step": 5702 + }, + { + "epoch": 0.55693359375, + "grad_norm": 0.15415474772453308, + "learning_rate": 0.0002395600913761936, + "loss": 4.4688, + "step": 5703 + }, + { + "epoch": 0.55703125, + "grad_norm": 0.1749630570411682, + "learning_rate": 0.00023949091223963447, + "loss": 4.5117, + "step": 5704 + }, + { + "epoch": 0.55712890625, + "grad_norm": 0.16059736907482147, + "learning_rate": 0.00023942173654544047, + "loss": 4.4883, + "step": 5705 + }, + { + "epoch": 0.5572265625, + "grad_norm": 0.17650265991687775, + "learning_rate": 0.00023935256430031754, + "loss": 4.4961, + "step": 5706 + }, + { + "epoch": 0.55732421875, + "grad_norm": 0.17186607420444489, + "learning_rate": 0.00023928339551097162, + "loss": 4.5, + "step": 5707 + }, + { + "epoch": 0.557421875, + "grad_norm": 0.16778558492660522, + "learning_rate": 0.00023921423018410804, + "loss": 4.4609, + "step": 5708 + }, + { + "epoch": 0.55751953125, + "grad_norm": 0.18072718381881714, + "learning_rate": 0.000239145068326432, + "loss": 4.4805, + "step": 5709 + }, + { + "epoch": 0.5576171875, + "grad_norm": 0.17522750794887543, + "learning_rate": 0.00023907590994464822, + "loss": 4.4648, + "step": 5710 + }, + { + "epoch": 0.55771484375, + "grad_norm": 0.17971138656139374, + "learning_rate": 0.0002390067550454611, + "loss": 4.4453, + "step": 5711 + }, + { + "epoch": 0.5578125, + "grad_norm": 0.1915704309940338, + "learning_rate": 0.00023893760363557487, + "loss": 4.5039, + "step": 5712 + }, + { + "epoch": 0.55791015625, + "grad_norm": 0.1805531084537506, + "learning_rate": 0.0002388684557216933, + "loss": 4.4727, + "step": 5713 + }, + { + "epoch": 0.5580078125, + "grad_norm": 0.17784251272678375, + "learning_rate": 0.00023879931131051956, + "loss": 4.4844, + "step": 5714 + }, + { + "epoch": 0.55810546875, + "grad_norm": 0.18931163847446442, + "learning_rate": 0.00023873017040875694, + "loss": 4.5, + "step": 5715 + }, + { + "epoch": 0.558203125, + "grad_norm": 0.19941790401935577, + "learning_rate": 0.00023866103302310816, + "loss": 4.4648, + "step": 5716 + }, + { + "epoch": 0.55830078125, + "grad_norm": 0.18364651501178741, + "learning_rate": 0.00023859189916027558, + "loss": 4.4961, + "step": 5717 + }, + { + "epoch": 0.5583984375, + "grad_norm": 0.16483086347579956, + "learning_rate": 0.0002385227688269613, + "loss": 4.4258, + "step": 5718 + }, + { + "epoch": 0.55849609375, + "grad_norm": 0.20431862771511078, + "learning_rate": 0.00023845364202986692, + "loss": 4.5039, + "step": 5719 + }, + { + "epoch": 0.55859375, + "grad_norm": 0.19481027126312256, + "learning_rate": 0.00023838451877569394, + "loss": 4.4727, + "step": 5720 + }, + { + "epoch": 0.55869140625, + "grad_norm": 0.17635312676429749, + "learning_rate": 0.0002383153990711434, + "loss": 4.4805, + "step": 5721 + }, + { + "epoch": 0.5587890625, + "grad_norm": 0.1664312332868576, + "learning_rate": 0.000238246282922916, + "loss": 4.4766, + "step": 5722 + }, + { + "epoch": 0.55888671875, + "grad_norm": 0.18807411193847656, + "learning_rate": 0.00023817717033771196, + "loss": 4.4492, + "step": 5723 + }, + { + "epoch": 0.558984375, + "grad_norm": 0.16118144989013672, + "learning_rate": 0.0002381080613222313, + "loss": 4.4844, + "step": 5724 + }, + { + "epoch": 0.55908203125, + "grad_norm": 0.18989871442317963, + "learning_rate": 0.00023803895588317376, + "loss": 4.4492, + "step": 5725 + }, + { + "epoch": 0.5591796875, + "grad_norm": 0.1777978092432022, + "learning_rate": 0.00023796985402723853, + "loss": 4.4805, + "step": 5726 + }, + { + "epoch": 0.55927734375, + "grad_norm": 0.17598319053649902, + "learning_rate": 0.00023790075576112464, + "loss": 4.4414, + "step": 5727 + }, + { + "epoch": 0.559375, + "grad_norm": 0.1765703707933426, + "learning_rate": 0.0002378316610915307, + "loss": 4.4922, + "step": 5728 + }, + { + "epoch": 0.55947265625, + "grad_norm": 0.19025199115276337, + "learning_rate": 0.00023776257002515496, + "loss": 4.4961, + "step": 5729 + }, + { + "epoch": 0.5595703125, + "grad_norm": 0.1683504730463028, + "learning_rate": 0.00023769348256869543, + "loss": 4.5195, + "step": 5730 + }, + { + "epoch": 0.55966796875, + "grad_norm": 0.18569634854793549, + "learning_rate": 0.00023762439872884944, + "loss": 4.4531, + "step": 5731 + }, + { + "epoch": 0.559765625, + "grad_norm": 0.17594197392463684, + "learning_rate": 0.0002375553185123143, + "loss": 4.4648, + "step": 5732 + }, + { + "epoch": 0.55986328125, + "grad_norm": 0.17913998663425446, + "learning_rate": 0.00023748624192578694, + "loss": 4.4961, + "step": 5733 + }, + { + "epoch": 0.5599609375, + "grad_norm": 0.16606515645980835, + "learning_rate": 0.00023741716897596383, + "loss": 4.4922, + "step": 5734 + }, + { + "epoch": 0.56005859375, + "grad_norm": 0.17378219962120056, + "learning_rate": 0.00023734809966954109, + "loss": 4.4414, + "step": 5735 + }, + { + "epoch": 0.56015625, + "grad_norm": 0.172393336892128, + "learning_rate": 0.00023727903401321454, + "loss": 4.4531, + "step": 5736 + }, + { + "epoch": 0.56025390625, + "grad_norm": 0.17055387794971466, + "learning_rate": 0.0002372099720136796, + "loss": 4.4727, + "step": 5737 + }, + { + "epoch": 0.5603515625, + "grad_norm": 0.17384661734104156, + "learning_rate": 0.00023714091367763146, + "loss": 4.4336, + "step": 5738 + }, + { + "epoch": 0.56044921875, + "grad_norm": 0.18198265135288239, + "learning_rate": 0.00023707185901176487, + "loss": 4.4688, + "step": 5739 + }, + { + "epoch": 0.560546875, + "grad_norm": 0.16112607717514038, + "learning_rate": 0.000237002808022774, + "loss": 4.4727, + "step": 5740 + }, + { + "epoch": 0.56064453125, + "grad_norm": 0.18192680180072784, + "learning_rate": 0.000236933760717353, + "loss": 4.4648, + "step": 5741 + }, + { + "epoch": 0.5607421875, + "grad_norm": 0.17933033406734467, + "learning_rate": 0.0002368647171021956, + "loss": 4.5117, + "step": 5742 + }, + { + "epoch": 0.56083984375, + "grad_norm": 0.17833828926086426, + "learning_rate": 0.00023679567718399504, + "loss": 4.4766, + "step": 5743 + }, + { + "epoch": 0.5609375, + "grad_norm": 0.16487637162208557, + "learning_rate": 0.0002367266409694442, + "loss": 4.4805, + "step": 5744 + }, + { + "epoch": 0.56103515625, + "grad_norm": 0.16137410700321198, + "learning_rate": 0.00023665760846523583, + "loss": 4.4922, + "step": 5745 + }, + { + "epoch": 0.5611328125, + "grad_norm": 0.17175886034965515, + "learning_rate": 0.00023658857967806207, + "loss": 4.4648, + "step": 5746 + }, + { + "epoch": 0.56123046875, + "grad_norm": 0.16976909339427948, + "learning_rate": 0.0002365195546146149, + "loss": 4.4688, + "step": 5747 + }, + { + "epoch": 0.561328125, + "grad_norm": 0.1771152764558792, + "learning_rate": 0.00023645053328158567, + "loss": 4.5195, + "step": 5748 + }, + { + "epoch": 0.56142578125, + "grad_norm": 0.1592680811882019, + "learning_rate": 0.00023638151568566558, + "loss": 4.4844, + "step": 5749 + }, + { + "epoch": 0.5615234375, + "grad_norm": 0.17741017043590546, + "learning_rate": 0.00023631250183354542, + "loss": 4.5117, + "step": 5750 + }, + { + "epoch": 0.56162109375, + "grad_norm": 0.16992183029651642, + "learning_rate": 0.00023624349173191568, + "loss": 4.5078, + "step": 5751 + }, + { + "epoch": 0.56171875, + "grad_norm": 0.1869528591632843, + "learning_rate": 0.00023617448538746632, + "loss": 4.457, + "step": 5752 + }, + { + "epoch": 0.56181640625, + "grad_norm": 0.18033339083194733, + "learning_rate": 0.00023610548280688715, + "loss": 4.4727, + "step": 5753 + }, + { + "epoch": 0.5619140625, + "grad_norm": 0.1764552891254425, + "learning_rate": 0.00023603648399686745, + "loss": 4.5, + "step": 5754 + }, + { + "epoch": 0.56201171875, + "grad_norm": 0.17967437207698822, + "learning_rate": 0.00023596748896409609, + "loss": 4.4414, + "step": 5755 + }, + { + "epoch": 0.562109375, + "grad_norm": 0.17904351651668549, + "learning_rate": 0.00023589849771526195, + "loss": 4.5, + "step": 5756 + }, + { + "epoch": 0.56220703125, + "grad_norm": 0.17215780913829803, + "learning_rate": 0.00023582951025705295, + "loss": 4.4883, + "step": 5757 + }, + { + "epoch": 0.5623046875, + "grad_norm": 0.17417898774147034, + "learning_rate": 0.00023576052659615704, + "loss": 4.4883, + "step": 5758 + }, + { + "epoch": 0.56240234375, + "grad_norm": 0.18052132427692413, + "learning_rate": 0.0002356915467392618, + "loss": 4.4688, + "step": 5759 + }, + { + "epoch": 0.5625, + "grad_norm": 0.1800679862499237, + "learning_rate": 0.00023562257069305437, + "loss": 4.4844, + "step": 5760 + }, + { + "epoch": 0.56259765625, + "grad_norm": 0.17604555189609528, + "learning_rate": 0.00023555359846422138, + "loss": 4.4648, + "step": 5761 + }, + { + "epoch": 0.5626953125, + "grad_norm": 0.1682446002960205, + "learning_rate": 0.0002354846300594493, + "loss": 4.4805, + "step": 5762 + }, + { + "epoch": 0.56279296875, + "grad_norm": 0.1799929291009903, + "learning_rate": 0.00023541566548542414, + "loss": 4.4648, + "step": 5763 + }, + { + "epoch": 0.562890625, + "grad_norm": 0.17862984538078308, + "learning_rate": 0.00023534670474883169, + "loss": 4.4961, + "step": 5764 + }, + { + "epoch": 0.56298828125, + "grad_norm": 0.1767793893814087, + "learning_rate": 0.0002352777478563569, + "loss": 4.5117, + "step": 5765 + }, + { + "epoch": 0.5630859375, + "grad_norm": 0.1714426875114441, + "learning_rate": 0.00023520879481468492, + "loss": 4.4414, + "step": 5766 + }, + { + "epoch": 0.56318359375, + "grad_norm": 0.17284773290157318, + "learning_rate": 0.0002351398456305003, + "loss": 4.4805, + "step": 5767 + }, + { + "epoch": 0.56328125, + "grad_norm": 0.17241334915161133, + "learning_rate": 0.000235070900310487, + "loss": 4.4805, + "step": 5768 + }, + { + "epoch": 0.56337890625, + "grad_norm": 0.18486388027668, + "learning_rate": 0.00023500195886132897, + "loss": 4.4727, + "step": 5769 + }, + { + "epoch": 0.5634765625, + "grad_norm": 0.16871479153633118, + "learning_rate": 0.00023493302128970955, + "loss": 4.4883, + "step": 5770 + }, + { + "epoch": 0.56357421875, + "grad_norm": 0.17736349999904633, + "learning_rate": 0.00023486408760231176, + "loss": 4.4883, + "step": 5771 + }, + { + "epoch": 0.563671875, + "grad_norm": 0.17560578882694244, + "learning_rate": 0.00023479515780581834, + "loss": 4.4883, + "step": 5772 + }, + { + "epoch": 0.56376953125, + "grad_norm": 0.17637088894844055, + "learning_rate": 0.00023472623190691144, + "loss": 4.4688, + "step": 5773 + }, + { + "epoch": 0.5638671875, + "grad_norm": 0.22039522230625153, + "learning_rate": 0.00023465730991227308, + "loss": 4.457, + "step": 5774 + }, + { + "epoch": 0.56396484375, + "grad_norm": 0.1792699247598648, + "learning_rate": 0.00023458839182858464, + "loss": 4.4922, + "step": 5775 + }, + { + "epoch": 0.5640625, + "grad_norm": 0.1782669723033905, + "learning_rate": 0.0002345194776625274, + "loss": 4.4727, + "step": 5776 + }, + { + "epoch": 0.56416015625, + "grad_norm": 0.2060362845659256, + "learning_rate": 0.00023445056742078197, + "loss": 4.4922, + "step": 5777 + }, + { + "epoch": 0.5642578125, + "grad_norm": 0.17617753148078918, + "learning_rate": 0.00023438166111002884, + "loss": 4.4844, + "step": 5778 + }, + { + "epoch": 0.56435546875, + "grad_norm": 0.2517484128475189, + "learning_rate": 0.00023431275873694808, + "loss": 4.4492, + "step": 5779 + }, + { + "epoch": 0.564453125, + "grad_norm": 0.1880187839269638, + "learning_rate": 0.00023424386030821914, + "loss": 4.4883, + "step": 5780 + }, + { + "epoch": 0.56455078125, + "grad_norm": 0.19575010240077972, + "learning_rate": 0.00023417496583052138, + "loss": 4.4531, + "step": 5781 + }, + { + "epoch": 0.5646484375, + "grad_norm": 0.1992238163948059, + "learning_rate": 0.00023410607531053356, + "loss": 4.4727, + "step": 5782 + }, + { + "epoch": 0.56474609375, + "grad_norm": 0.18956176936626434, + "learning_rate": 0.0002340371887549342, + "loss": 4.4531, + "step": 5783 + }, + { + "epoch": 0.56484375, + "grad_norm": 0.1753232777118683, + "learning_rate": 0.00023396830617040143, + "loss": 4.4844, + "step": 5784 + }, + { + "epoch": 0.56494140625, + "grad_norm": 0.18767178058624268, + "learning_rate": 0.00023389942756361283, + "loss": 4.4766, + "step": 5785 + }, + { + "epoch": 0.5650390625, + "grad_norm": 0.18085968494415283, + "learning_rate": 0.00023383055294124577, + "loss": 4.4688, + "step": 5786 + }, + { + "epoch": 0.56513671875, + "grad_norm": 0.1698661744594574, + "learning_rate": 0.00023376168230997718, + "loss": 4.4844, + "step": 5787 + }, + { + "epoch": 0.565234375, + "grad_norm": 0.19494877755641937, + "learning_rate": 0.00023369281567648366, + "loss": 4.4922, + "step": 5788 + }, + { + "epoch": 0.56533203125, + "grad_norm": 0.21226131916046143, + "learning_rate": 0.00023362395304744128, + "loss": 4.5078, + "step": 5789 + }, + { + "epoch": 0.5654296875, + "grad_norm": 0.19375987350940704, + "learning_rate": 0.00023355509442952584, + "loss": 4.457, + "step": 5790 + }, + { + "epoch": 0.56552734375, + "grad_norm": 0.17733027040958405, + "learning_rate": 0.00023348623982941263, + "loss": 4.4766, + "step": 5791 + }, + { + "epoch": 0.565625, + "grad_norm": 0.1731042116880417, + "learning_rate": 0.0002334173892537768, + "loss": 4.4805, + "step": 5792 + }, + { + "epoch": 0.56572265625, + "grad_norm": 0.17678417265415192, + "learning_rate": 0.00023334854270929285, + "loss": 4.457, + "step": 5793 + }, + { + "epoch": 0.5658203125, + "grad_norm": 0.1932516247034073, + "learning_rate": 0.000233279700202635, + "loss": 4.4688, + "step": 5794 + }, + { + "epoch": 0.56591796875, + "grad_norm": 0.17402932047843933, + "learning_rate": 0.00023321086174047707, + "loss": 4.4727, + "step": 5795 + }, + { + "epoch": 0.566015625, + "grad_norm": 0.17764249444007874, + "learning_rate": 0.0002331420273294924, + "loss": 4.4766, + "step": 5796 + }, + { + "epoch": 0.56611328125, + "grad_norm": 0.1944137066602707, + "learning_rate": 0.0002330731969763541, + "loss": 4.4609, + "step": 5797 + }, + { + "epoch": 0.5662109375, + "grad_norm": 0.19821797311306, + "learning_rate": 0.00023300437068773488, + "loss": 4.4844, + "step": 5798 + }, + { + "epoch": 0.56630859375, + "grad_norm": 0.16421355307102203, + "learning_rate": 0.00023293554847030686, + "loss": 4.4727, + "step": 5799 + }, + { + "epoch": 0.56640625, + "grad_norm": 0.19646060466766357, + "learning_rate": 0.00023286673033074185, + "loss": 4.4961, + "step": 5800 + }, + { + "epoch": 0.56650390625, + "grad_norm": 0.1922343373298645, + "learning_rate": 0.00023279791627571145, + "loss": 4.4688, + "step": 5801 + }, + { + "epoch": 0.5666015625, + "grad_norm": 0.16607369482517242, + "learning_rate": 0.0002327291063118866, + "loss": 4.4805, + "step": 5802 + }, + { + "epoch": 0.56669921875, + "grad_norm": 0.17748425900936127, + "learning_rate": 0.0002326603004459381, + "loss": 4.4648, + "step": 5803 + }, + { + "epoch": 0.566796875, + "grad_norm": 0.1784178763628006, + "learning_rate": 0.000232591498684536, + "loss": 4.4883, + "step": 5804 + }, + { + "epoch": 0.56689453125, + "grad_norm": 0.16142183542251587, + "learning_rate": 0.00023252270103435031, + "loss": 4.4922, + "step": 5805 + }, + { + "epoch": 0.5669921875, + "grad_norm": 0.18812493979930878, + "learning_rate": 0.00023245390750205048, + "loss": 4.4414, + "step": 5806 + }, + { + "epoch": 0.56708984375, + "grad_norm": 0.16620944440364838, + "learning_rate": 0.00023238511809430557, + "loss": 4.4648, + "step": 5807 + }, + { + "epoch": 0.5671875, + "grad_norm": 0.1705884039402008, + "learning_rate": 0.00023231633281778415, + "loss": 4.4805, + "step": 5808 + }, + { + "epoch": 0.56728515625, + "grad_norm": 0.16813518106937408, + "learning_rate": 0.00023224755167915463, + "loss": 4.4766, + "step": 5809 + }, + { + "epoch": 0.5673828125, + "grad_norm": 0.17473673820495605, + "learning_rate": 0.0002321787746850848, + "loss": 4.4648, + "step": 5810 + }, + { + "epoch": 0.56748046875, + "grad_norm": 0.16412313282489777, + "learning_rate": 0.00023211000184224212, + "loss": 4.4336, + "step": 5811 + }, + { + "epoch": 0.567578125, + "grad_norm": 0.16269248723983765, + "learning_rate": 0.00023204123315729365, + "loss": 4.4844, + "step": 5812 + }, + { + "epoch": 0.56767578125, + "grad_norm": 0.16873663663864136, + "learning_rate": 0.00023197246863690613, + "loss": 4.4922, + "step": 5813 + }, + { + "epoch": 0.5677734375, + "grad_norm": 0.1726755052804947, + "learning_rate": 0.00023190370828774572, + "loss": 4.4648, + "step": 5814 + }, + { + "epoch": 0.56787109375, + "grad_norm": 0.1768028438091278, + "learning_rate": 0.0002318349521164783, + "loss": 4.4688, + "step": 5815 + }, + { + "epoch": 0.56796875, + "grad_norm": 0.16477498412132263, + "learning_rate": 0.0002317662001297693, + "loss": 4.4766, + "step": 5816 + }, + { + "epoch": 0.56806640625, + "grad_norm": 0.1684807538986206, + "learning_rate": 0.0002316974523342838, + "loss": 4.4805, + "step": 5817 + }, + { + "epoch": 0.5681640625, + "grad_norm": 0.16434282064437866, + "learning_rate": 0.00023162870873668634, + "loss": 4.4727, + "step": 5818 + }, + { + "epoch": 0.56826171875, + "grad_norm": 0.17103558778762817, + "learning_rate": 0.0002315599693436412, + "loss": 4.4805, + "step": 5819 + }, + { + "epoch": 0.568359375, + "grad_norm": 0.1640734225511551, + "learning_rate": 0.00023149123416181224, + "loss": 4.4492, + "step": 5820 + }, + { + "epoch": 0.56845703125, + "grad_norm": 0.1621914654970169, + "learning_rate": 0.00023142250319786283, + "loss": 4.4648, + "step": 5821 + }, + { + "epoch": 0.5685546875, + "grad_norm": 0.16547708213329315, + "learning_rate": 0.00023135377645845597, + "loss": 4.4453, + "step": 5822 + }, + { + "epoch": 0.56865234375, + "grad_norm": 0.16044028103351593, + "learning_rate": 0.00023128505395025433, + "loss": 4.418, + "step": 5823 + }, + { + "epoch": 0.56875, + "grad_norm": 0.1615370810031891, + "learning_rate": 0.00023121633567991997, + "loss": 4.457, + "step": 5824 + }, + { + "epoch": 0.56884765625, + "grad_norm": 0.1700940877199173, + "learning_rate": 0.00023114762165411462, + "loss": 4.5, + "step": 5825 + }, + { + "epoch": 0.5689453125, + "grad_norm": 0.16869111359119415, + "learning_rate": 0.00023107891187949982, + "loss": 4.4805, + "step": 5826 + }, + { + "epoch": 0.56904296875, + "grad_norm": 0.1756461262702942, + "learning_rate": 0.00023101020636273633, + "loss": 4.4883, + "step": 5827 + }, + { + "epoch": 0.569140625, + "grad_norm": 0.16969148814678192, + "learning_rate": 0.00023094150511048485, + "loss": 4.4727, + "step": 5828 + }, + { + "epoch": 0.56923828125, + "grad_norm": 0.1735909879207611, + "learning_rate": 0.00023087280812940538, + "loss": 4.4766, + "step": 5829 + }, + { + "epoch": 0.5693359375, + "grad_norm": 0.17264723777770996, + "learning_rate": 0.0002308041154261577, + "loss": 4.5039, + "step": 5830 + }, + { + "epoch": 0.56943359375, + "grad_norm": 0.1644759178161621, + "learning_rate": 0.00023073542700740112, + "loss": 4.4531, + "step": 5831 + }, + { + "epoch": 0.56953125, + "grad_norm": 0.1620153933763504, + "learning_rate": 0.00023066674287979444, + "loss": 4.4453, + "step": 5832 + }, + { + "epoch": 0.56962890625, + "grad_norm": 0.16062015295028687, + "learning_rate": 0.0002305980630499961, + "loss": 4.4375, + "step": 5833 + }, + { + "epoch": 0.5697265625, + "grad_norm": 0.16667994856834412, + "learning_rate": 0.00023052938752466417, + "loss": 4.4648, + "step": 5834 + }, + { + "epoch": 0.56982421875, + "grad_norm": 0.1714445799589157, + "learning_rate": 0.00023046071631045633, + "loss": 4.4805, + "step": 5835 + }, + { + "epoch": 0.569921875, + "grad_norm": 0.17716123163700104, + "learning_rate": 0.00023039204941402975, + "loss": 4.4648, + "step": 5836 + }, + { + "epoch": 0.57001953125, + "grad_norm": 0.17185921967029572, + "learning_rate": 0.00023032338684204123, + "loss": 4.4688, + "step": 5837 + }, + { + "epoch": 0.5701171875, + "grad_norm": 0.17201413214206696, + "learning_rate": 0.0002302547286011471, + "loss": 4.4492, + "step": 5838 + }, + { + "epoch": 0.57021484375, + "grad_norm": 0.1638117879629135, + "learning_rate": 0.0002301860746980034, + "loss": 4.5039, + "step": 5839 + }, + { + "epoch": 0.5703125, + "grad_norm": 0.1655907928943634, + "learning_rate": 0.0002301174251392657, + "loss": 4.4844, + "step": 5840 + }, + { + "epoch": 0.57041015625, + "grad_norm": 0.179342582821846, + "learning_rate": 0.00023004877993158885, + "loss": 4.4375, + "step": 5841 + }, + { + "epoch": 0.5705078125, + "grad_norm": 0.18044227361679077, + "learning_rate": 0.00022998013908162774, + "loss": 4.4961, + "step": 5842 + }, + { + "epoch": 0.57060546875, + "grad_norm": 0.16676118969917297, + "learning_rate": 0.0002299115025960366, + "loss": 4.4531, + "step": 5843 + }, + { + "epoch": 0.570703125, + "grad_norm": 0.17396773397922516, + "learning_rate": 0.00022984287048146928, + "loss": 4.5234, + "step": 5844 + }, + { + "epoch": 0.57080078125, + "grad_norm": 0.1768062710762024, + "learning_rate": 0.0002297742427445792, + "loss": 4.5039, + "step": 5845 + }, + { + "epoch": 0.5708984375, + "grad_norm": 0.17463748157024384, + "learning_rate": 0.00022970561939201928, + "loss": 4.4609, + "step": 5846 + }, + { + "epoch": 0.57099609375, + "grad_norm": 0.15966449677944183, + "learning_rate": 0.00022963700043044218, + "loss": 4.4609, + "step": 5847 + }, + { + "epoch": 0.57109375, + "grad_norm": 0.17531105875968933, + "learning_rate": 0.00022956838586650008, + "loss": 4.4688, + "step": 5848 + }, + { + "epoch": 0.57119140625, + "grad_norm": 0.17210374772548676, + "learning_rate": 0.00022949977570684455, + "loss": 4.4531, + "step": 5849 + }, + { + "epoch": 0.5712890625, + "grad_norm": 0.16826319694519043, + "learning_rate": 0.00022943116995812696, + "loss": 4.457, + "step": 5850 + }, + { + "epoch": 0.57138671875, + "grad_norm": 0.17481109499931335, + "learning_rate": 0.00022936256862699818, + "loss": 4.5, + "step": 5851 + }, + { + "epoch": 0.571484375, + "grad_norm": 0.1752835512161255, + "learning_rate": 0.00022929397172010858, + "loss": 4.4492, + "step": 5852 + }, + { + "epoch": 0.57158203125, + "grad_norm": 0.17356222867965698, + "learning_rate": 0.00022922537924410823, + "loss": 4.4688, + "step": 5853 + }, + { + "epoch": 0.5716796875, + "grad_norm": 0.17662110924720764, + "learning_rate": 0.0002291567912056467, + "loss": 4.4648, + "step": 5854 + }, + { + "epoch": 0.57177734375, + "grad_norm": 0.17313194274902344, + "learning_rate": 0.00022908820761137312, + "loss": 4.4922, + "step": 5855 + }, + { + "epoch": 0.571875, + "grad_norm": 0.18128396570682526, + "learning_rate": 0.00022901962846793623, + "loss": 4.4727, + "step": 5856 + }, + { + "epoch": 0.57197265625, + "grad_norm": 0.1817648708820343, + "learning_rate": 0.0002289510537819844, + "loss": 4.4727, + "step": 5857 + }, + { + "epoch": 0.5720703125, + "grad_norm": 0.19050338864326477, + "learning_rate": 0.0002288824835601652, + "loss": 4.4492, + "step": 5858 + }, + { + "epoch": 0.57216796875, + "grad_norm": 0.17917639017105103, + "learning_rate": 0.00022881391780912625, + "loss": 4.5117, + "step": 5859 + }, + { + "epoch": 0.572265625, + "grad_norm": 0.18184831738471985, + "learning_rate": 0.00022874535653551447, + "loss": 4.4609, + "step": 5860 + }, + { + "epoch": 0.57236328125, + "grad_norm": 0.18453124165534973, + "learning_rate": 0.00022867679974597656, + "loss": 4.4688, + "step": 5861 + }, + { + "epoch": 0.5724609375, + "grad_norm": 0.18587401509284973, + "learning_rate": 0.0002286082474471584, + "loss": 4.4609, + "step": 5862 + }, + { + "epoch": 0.57255859375, + "grad_norm": 0.17471228539943695, + "learning_rate": 0.0002285396996457058, + "loss": 4.4805, + "step": 5863 + }, + { + "epoch": 0.57265625, + "grad_norm": 0.18671071529388428, + "learning_rate": 0.00022847115634826404, + "loss": 4.4492, + "step": 5864 + }, + { + "epoch": 0.57275390625, + "grad_norm": 0.1595827043056488, + "learning_rate": 0.00022840261756147795, + "loss": 4.4727, + "step": 5865 + }, + { + "epoch": 0.5728515625, + "grad_norm": 0.18790985643863678, + "learning_rate": 0.00022833408329199178, + "loss": 4.5117, + "step": 5866 + }, + { + "epoch": 0.57294921875, + "grad_norm": 0.1582627296447754, + "learning_rate": 0.00022826555354644947, + "loss": 4.4805, + "step": 5867 + }, + { + "epoch": 0.573046875, + "grad_norm": 0.17943088710308075, + "learning_rate": 0.00022819702833149454, + "loss": 4.4688, + "step": 5868 + }, + { + "epoch": 0.57314453125, + "grad_norm": 0.1938745528459549, + "learning_rate": 0.00022812850765377014, + "loss": 4.4492, + "step": 5869 + }, + { + "epoch": 0.5732421875, + "grad_norm": 0.171878382563591, + "learning_rate": 0.00022805999151991873, + "loss": 4.4766, + "step": 5870 + }, + { + "epoch": 0.57333984375, + "grad_norm": 0.1648281216621399, + "learning_rate": 0.00022799147993658253, + "loss": 4.5078, + "step": 5871 + }, + { + "epoch": 0.5734375, + "grad_norm": 0.186711847782135, + "learning_rate": 0.00022792297291040337, + "loss": 4.4609, + "step": 5872 + }, + { + "epoch": 0.57353515625, + "grad_norm": 0.19002099335193634, + "learning_rate": 0.0002278544704480225, + "loss": 4.4727, + "step": 5873 + }, + { + "epoch": 0.5736328125, + "grad_norm": 0.17940692603588104, + "learning_rate": 0.00022778597255608074, + "loss": 4.4727, + "step": 5874 + }, + { + "epoch": 0.57373046875, + "grad_norm": 0.1720336377620697, + "learning_rate": 0.00022771747924121843, + "loss": 4.4648, + "step": 5875 + }, + { + "epoch": 0.573828125, + "grad_norm": 0.17023596167564392, + "learning_rate": 0.00022764899051007565, + "loss": 4.4492, + "step": 5876 + }, + { + "epoch": 0.57392578125, + "grad_norm": 0.1725986897945404, + "learning_rate": 0.00022758050636929183, + "loss": 4.4492, + "step": 5877 + }, + { + "epoch": 0.5740234375, + "grad_norm": 0.16694720089435577, + "learning_rate": 0.00022751202682550614, + "loss": 4.4922, + "step": 5878 + }, + { + "epoch": 0.57412109375, + "grad_norm": 0.1774391233921051, + "learning_rate": 0.0002274435518853571, + "loss": 4.4805, + "step": 5879 + }, + { + "epoch": 0.57421875, + "grad_norm": 0.19406390190124512, + "learning_rate": 0.00022737508155548292, + "loss": 4.4609, + "step": 5880 + }, + { + "epoch": 0.57431640625, + "grad_norm": 0.157316654920578, + "learning_rate": 0.00022730661584252138, + "loss": 4.4453, + "step": 5881 + }, + { + "epoch": 0.5744140625, + "grad_norm": 0.18765366077423096, + "learning_rate": 0.00022723815475310976, + "loss": 4.4453, + "step": 5882 + }, + { + "epoch": 0.57451171875, + "grad_norm": 0.19185833632946014, + "learning_rate": 0.0002271696982938849, + "loss": 4.4844, + "step": 5883 + }, + { + "epoch": 0.574609375, + "grad_norm": 0.18179713189601898, + "learning_rate": 0.00022710124647148312, + "loss": 4.4766, + "step": 5884 + }, + { + "epoch": 0.57470703125, + "grad_norm": 0.17680597305297852, + "learning_rate": 0.0002270327992925404, + "loss": 4.4375, + "step": 5885 + }, + { + "epoch": 0.5748046875, + "grad_norm": 0.20981214940547943, + "learning_rate": 0.0002269643567636922, + "loss": 4.4648, + "step": 5886 + }, + { + "epoch": 0.57490234375, + "grad_norm": 0.1778404712677002, + "learning_rate": 0.00022689591889157363, + "loss": 4.4844, + "step": 5887 + }, + { + "epoch": 0.575, + "grad_norm": 0.18084728717803955, + "learning_rate": 0.00022682748568281924, + "loss": 4.4766, + "step": 5888 + }, + { + "epoch": 0.57509765625, + "grad_norm": 0.2143409252166748, + "learning_rate": 0.00022675905714406315, + "loss": 4.4531, + "step": 5889 + }, + { + "epoch": 0.5751953125, + "grad_norm": 0.17704346776008606, + "learning_rate": 0.0002266906332819391, + "loss": 4.4805, + "step": 5890 + }, + { + "epoch": 0.57529296875, + "grad_norm": 0.18668977916240692, + "learning_rate": 0.00022662221410308032, + "loss": 4.4492, + "step": 5891 + }, + { + "epoch": 0.575390625, + "grad_norm": 0.20881247520446777, + "learning_rate": 0.00022655379961411944, + "loss": 4.4688, + "step": 5892 + }, + { + "epoch": 0.57548828125, + "grad_norm": 0.1762191504240036, + "learning_rate": 0.00022648538982168888, + "loss": 4.4648, + "step": 5893 + }, + { + "epoch": 0.5755859375, + "grad_norm": 0.17779706418514252, + "learning_rate": 0.0002264169847324205, + "loss": 4.5117, + "step": 5894 + }, + { + "epoch": 0.57568359375, + "grad_norm": 0.18577338755130768, + "learning_rate": 0.0002263485843529458, + "loss": 4.4531, + "step": 5895 + }, + { + "epoch": 0.57578125, + "grad_norm": 0.1661708801984787, + "learning_rate": 0.00022628018868989559, + "loss": 4.4609, + "step": 5896 + }, + { + "epoch": 0.57587890625, + "grad_norm": 0.19425155222415924, + "learning_rate": 0.0002262117977499004, + "loss": 4.457, + "step": 5897 + }, + { + "epoch": 0.5759765625, + "grad_norm": 0.17550887167453766, + "learning_rate": 0.00022614341153959035, + "loss": 4.4766, + "step": 5898 + }, + { + "epoch": 0.57607421875, + "grad_norm": 0.16902583837509155, + "learning_rate": 0.00022607503006559504, + "loss": 4.4766, + "step": 5899 + }, + { + "epoch": 0.576171875, + "grad_norm": 0.17665301263332367, + "learning_rate": 0.0002260066533345434, + "loss": 4.4688, + "step": 5900 + }, + { + "epoch": 0.57626953125, + "grad_norm": 0.16951461136341095, + "learning_rate": 0.0002259382813530642, + "loss": 4.457, + "step": 5901 + }, + { + "epoch": 0.5763671875, + "grad_norm": 0.17041145265102386, + "learning_rate": 0.00022586991412778564, + "loss": 4.4727, + "step": 5902 + }, + { + "epoch": 0.57646484375, + "grad_norm": 0.17128659784793854, + "learning_rate": 0.00022580155166533546, + "loss": 4.4805, + "step": 5903 + }, + { + "epoch": 0.5765625, + "grad_norm": 0.15765394270420074, + "learning_rate": 0.00022573319397234094, + "loss": 4.4453, + "step": 5904 + }, + { + "epoch": 0.57666015625, + "grad_norm": 0.1794353723526001, + "learning_rate": 0.0002256648410554289, + "loss": 4.4336, + "step": 5905 + }, + { + "epoch": 0.5767578125, + "grad_norm": 0.16672471165657043, + "learning_rate": 0.00022559649292122565, + "loss": 4.4531, + "step": 5906 + }, + { + "epoch": 0.57685546875, + "grad_norm": 0.16902686655521393, + "learning_rate": 0.00022552814957635715, + "loss": 4.4648, + "step": 5907 + }, + { + "epoch": 0.576953125, + "grad_norm": 0.16261841356754303, + "learning_rate": 0.00022545981102744877, + "loss": 4.4961, + "step": 5908 + }, + { + "epoch": 0.57705078125, + "grad_norm": 0.1666291058063507, + "learning_rate": 0.00022539147728112553, + "loss": 4.4844, + "step": 5909 + }, + { + "epoch": 0.5771484375, + "grad_norm": 0.16545498371124268, + "learning_rate": 0.0002253231483440118, + "loss": 4.4961, + "step": 5910 + }, + { + "epoch": 0.57724609375, + "grad_norm": 0.1680792272090912, + "learning_rate": 0.0002252548242227317, + "loss": 4.4727, + "step": 5911 + }, + { + "epoch": 0.57734375, + "grad_norm": 0.16002556681632996, + "learning_rate": 0.00022518650492390875, + "loss": 4.4727, + "step": 5912 + }, + { + "epoch": 0.57744140625, + "grad_norm": 0.16940781474113464, + "learning_rate": 0.0002251181904541661, + "loss": 4.4805, + "step": 5913 + }, + { + "epoch": 0.5775390625, + "grad_norm": 0.17309051752090454, + "learning_rate": 0.0002250498808201263, + "loss": 4.4766, + "step": 5914 + }, + { + "epoch": 0.57763671875, + "grad_norm": 0.16495341062545776, + "learning_rate": 0.0002249815760284115, + "loss": 4.4961, + "step": 5915 + }, + { + "epoch": 0.577734375, + "grad_norm": 0.17159703373908997, + "learning_rate": 0.00022491327608564356, + "loss": 4.4531, + "step": 5916 + }, + { + "epoch": 0.57783203125, + "grad_norm": 0.1813633143901825, + "learning_rate": 0.00022484498099844348, + "loss": 4.4648, + "step": 5917 + }, + { + "epoch": 0.5779296875, + "grad_norm": 0.17338010668754578, + "learning_rate": 0.0002247766907734321, + "loss": 4.4648, + "step": 5918 + }, + { + "epoch": 0.57802734375, + "grad_norm": 0.17467136681079865, + "learning_rate": 0.00022470840541722977, + "loss": 4.4648, + "step": 5919 + }, + { + "epoch": 0.578125, + "grad_norm": 0.17265649139881134, + "learning_rate": 0.00022464012493645608, + "loss": 4.4492, + "step": 5920 + }, + { + "epoch": 0.57822265625, + "grad_norm": 0.18474704027175903, + "learning_rate": 0.0002245718493377305, + "loss": 4.4727, + "step": 5921 + }, + { + "epoch": 0.5783203125, + "grad_norm": 0.1748138666152954, + "learning_rate": 0.0002245035786276719, + "loss": 4.4453, + "step": 5922 + }, + { + "epoch": 0.57841796875, + "grad_norm": 0.15945565700531006, + "learning_rate": 0.00022443531281289865, + "loss": 4.4453, + "step": 5923 + }, + { + "epoch": 0.578515625, + "grad_norm": 0.16552047431468964, + "learning_rate": 0.0002243670519000286, + "loss": 4.4531, + "step": 5924 + }, + { + "epoch": 0.57861328125, + "grad_norm": 0.16406361758708954, + "learning_rate": 0.00022429879589567925, + "loss": 4.4844, + "step": 5925 + }, + { + "epoch": 0.5787109375, + "grad_norm": 0.16452904045581818, + "learning_rate": 0.00022423054480646748, + "loss": 4.457, + "step": 5926 + }, + { + "epoch": 0.57880859375, + "grad_norm": 0.16634447872638702, + "learning_rate": 0.0002241622986390099, + "loss": 4.4609, + "step": 5927 + }, + { + "epoch": 0.57890625, + "grad_norm": 0.16825851798057556, + "learning_rate": 0.0002240940573999224, + "loss": 4.5039, + "step": 5928 + }, + { + "epoch": 0.57900390625, + "grad_norm": 0.15938861668109894, + "learning_rate": 0.00022402582109582047, + "loss": 4.4609, + "step": 5929 + }, + { + "epoch": 0.5791015625, + "grad_norm": 0.16223864257335663, + "learning_rate": 0.0002239575897333193, + "loss": 4.4375, + "step": 5930 + }, + { + "epoch": 0.57919921875, + "grad_norm": 0.16061240434646606, + "learning_rate": 0.00022388936331903332, + "loss": 4.4531, + "step": 5931 + }, + { + "epoch": 0.579296875, + "grad_norm": 0.17046387493610382, + "learning_rate": 0.00022382114185957674, + "loss": 4.4375, + "step": 5932 + }, + { + "epoch": 0.57939453125, + "grad_norm": 0.16466355323791504, + "learning_rate": 0.00022375292536156311, + "loss": 4.4414, + "step": 5933 + }, + { + "epoch": 0.5794921875, + "grad_norm": 0.16366951167583466, + "learning_rate": 0.0002236847138316055, + "loss": 4.4727, + "step": 5934 + }, + { + "epoch": 0.57958984375, + "grad_norm": 0.18273594975471497, + "learning_rate": 0.0002236165072763166, + "loss": 4.4688, + "step": 5935 + }, + { + "epoch": 0.5796875, + "grad_norm": 0.18539240956306458, + "learning_rate": 0.0002235483057023086, + "loss": 4.4844, + "step": 5936 + }, + { + "epoch": 0.57978515625, + "grad_norm": 0.16910502314567566, + "learning_rate": 0.00022348010911619315, + "loss": 4.4688, + "step": 5937 + }, + { + "epoch": 0.5798828125, + "grad_norm": 0.17266632616519928, + "learning_rate": 0.00022341191752458145, + "loss": 4.4922, + "step": 5938 + }, + { + "epoch": 0.57998046875, + "grad_norm": 0.17467671632766724, + "learning_rate": 0.0002233437309340843, + "loss": 4.5078, + "step": 5939 + }, + { + "epoch": 0.580078125, + "grad_norm": 0.17123636603355408, + "learning_rate": 0.00022327554935131172, + "loss": 4.4844, + "step": 5940 + }, + { + "epoch": 0.58017578125, + "grad_norm": 0.17289063334465027, + "learning_rate": 0.00022320737278287367, + "loss": 4.4531, + "step": 5941 + }, + { + "epoch": 0.5802734375, + "grad_norm": 0.17855721712112427, + "learning_rate": 0.00022313920123537924, + "loss": 4.4688, + "step": 5942 + }, + { + "epoch": 0.58037109375, + "grad_norm": 0.18201619386672974, + "learning_rate": 0.00022307103471543726, + "loss": 4.4375, + "step": 5943 + }, + { + "epoch": 0.58046875, + "grad_norm": 0.1699538677930832, + "learning_rate": 0.00022300287322965612, + "loss": 4.4414, + "step": 5944 + }, + { + "epoch": 0.58056640625, + "grad_norm": 0.1737283319234848, + "learning_rate": 0.00022293471678464338, + "loss": 4.4336, + "step": 5945 + }, + { + "epoch": 0.5806640625, + "grad_norm": 0.16638928651809692, + "learning_rate": 0.00022286656538700656, + "loss": 4.4688, + "step": 5946 + }, + { + "epoch": 0.58076171875, + "grad_norm": 0.1680837720632553, + "learning_rate": 0.00022279841904335242, + "loss": 4.4414, + "step": 5947 + }, + { + "epoch": 0.580859375, + "grad_norm": 0.17480576038360596, + "learning_rate": 0.00022273027776028727, + "loss": 4.5039, + "step": 5948 + }, + { + "epoch": 0.58095703125, + "grad_norm": 0.15597988665103912, + "learning_rate": 0.00022266214154441694, + "loss": 4.4609, + "step": 5949 + }, + { + "epoch": 0.5810546875, + "grad_norm": 0.16647648811340332, + "learning_rate": 0.0002225940104023467, + "loss": 4.4922, + "step": 5950 + }, + { + "epoch": 0.58115234375, + "grad_norm": 0.17179712653160095, + "learning_rate": 0.0002225258843406815, + "loss": 4.4609, + "step": 5951 + }, + { + "epoch": 0.58125, + "grad_norm": 0.17112061381340027, + "learning_rate": 0.00022245776336602563, + "loss": 4.418, + "step": 5952 + }, + { + "epoch": 0.58134765625, + "grad_norm": 0.17650674283504486, + "learning_rate": 0.00022238964748498303, + "loss": 4.4766, + "step": 5953 + }, + { + "epoch": 0.5814453125, + "grad_norm": 0.1691657304763794, + "learning_rate": 0.00022232153670415704, + "loss": 4.4531, + "step": 5954 + }, + { + "epoch": 0.58154296875, + "grad_norm": 0.16264432668685913, + "learning_rate": 0.0002222534310301505, + "loss": 4.4883, + "step": 5955 + }, + { + "epoch": 0.581640625, + "grad_norm": 0.17650838196277618, + "learning_rate": 0.00022218533046956584, + "loss": 4.4648, + "step": 5956 + }, + { + "epoch": 0.58173828125, + "grad_norm": 0.17542505264282227, + "learning_rate": 0.000222117235029005, + "loss": 4.4922, + "step": 5957 + }, + { + "epoch": 0.5818359375, + "grad_norm": 0.17794623970985413, + "learning_rate": 0.00022204914471506932, + "loss": 4.4648, + "step": 5958 + }, + { + "epoch": 0.58193359375, + "grad_norm": 0.18520241975784302, + "learning_rate": 0.00022198105953435965, + "loss": 4.4883, + "step": 5959 + }, + { + "epoch": 0.58203125, + "grad_norm": 0.18467532098293304, + "learning_rate": 0.0002219129794934764, + "loss": 4.4688, + "step": 5960 + }, + { + "epoch": 0.58212890625, + "grad_norm": 0.17904268205165863, + "learning_rate": 0.00022184490459901946, + "loss": 4.4727, + "step": 5961 + }, + { + "epoch": 0.5822265625, + "grad_norm": 0.17612536251544952, + "learning_rate": 0.00022177683485758825, + "loss": 4.5234, + "step": 5962 + }, + { + "epoch": 0.58232421875, + "grad_norm": 0.1803075224161148, + "learning_rate": 0.00022170877027578173, + "loss": 4.4727, + "step": 5963 + }, + { + "epoch": 0.582421875, + "grad_norm": 0.17087189853191376, + "learning_rate": 0.0002216407108601982, + "loss": 4.4453, + "step": 5964 + }, + { + "epoch": 0.58251953125, + "grad_norm": 0.1563486009836197, + "learning_rate": 0.0002215726566174356, + "loss": 4.4531, + "step": 5965 + }, + { + "epoch": 0.5826171875, + "grad_norm": 0.1669105738401413, + "learning_rate": 0.00022150460755409145, + "loss": 4.4688, + "step": 5966 + }, + { + "epoch": 0.58271484375, + "grad_norm": 0.17060726881027222, + "learning_rate": 0.0002214365636767624, + "loss": 4.4375, + "step": 5967 + }, + { + "epoch": 0.5828125, + "grad_norm": 0.16282570362091064, + "learning_rate": 0.00022136852499204495, + "loss": 4.4844, + "step": 5968 + }, + { + "epoch": 0.58291015625, + "grad_norm": 0.16245891153812408, + "learning_rate": 0.00022130049150653508, + "loss": 4.4961, + "step": 5969 + }, + { + "epoch": 0.5830078125, + "grad_norm": 0.16428908705711365, + "learning_rate": 0.00022123246322682804, + "loss": 4.4531, + "step": 5970 + }, + { + "epoch": 0.58310546875, + "grad_norm": 0.18445713818073273, + "learning_rate": 0.00022116444015951876, + "loss": 4.4688, + "step": 5971 + }, + { + "epoch": 0.583203125, + "grad_norm": 0.16346825659275055, + "learning_rate": 0.00022109642231120163, + "loss": 4.4844, + "step": 5972 + }, + { + "epoch": 0.58330078125, + "grad_norm": 0.18612679839134216, + "learning_rate": 0.00022102840968847055, + "loss": 4.4844, + "step": 5973 + }, + { + "epoch": 0.5833984375, + "grad_norm": 0.16440781950950623, + "learning_rate": 0.0002209604022979188, + "loss": 4.4805, + "step": 5974 + }, + { + "epoch": 0.58349609375, + "grad_norm": 0.16965323686599731, + "learning_rate": 0.00022089240014613943, + "loss": 4.4688, + "step": 5975 + }, + { + "epoch": 0.58359375, + "grad_norm": 0.1756609082221985, + "learning_rate": 0.00022082440323972446, + "loss": 4.4688, + "step": 5976 + }, + { + "epoch": 0.58369140625, + "grad_norm": 0.17025907337665558, + "learning_rate": 0.000220756411585266, + "loss": 4.4844, + "step": 5977 + }, + { + "epoch": 0.5837890625, + "grad_norm": 0.16990303993225098, + "learning_rate": 0.0002206884251893552, + "loss": 4.4883, + "step": 5978 + }, + { + "epoch": 0.58388671875, + "grad_norm": 0.1853402554988861, + "learning_rate": 0.000220620444058583, + "loss": 4.4688, + "step": 5979 + }, + { + "epoch": 0.583984375, + "grad_norm": 0.16973742842674255, + "learning_rate": 0.00022055246819953966, + "loss": 4.4336, + "step": 5980 + }, + { + "epoch": 0.58408203125, + "grad_norm": 0.18282903730869293, + "learning_rate": 0.00022048449761881506, + "loss": 4.457, + "step": 5981 + }, + { + "epoch": 0.5841796875, + "grad_norm": 0.1703486144542694, + "learning_rate": 0.0002204165323229983, + "loss": 4.4922, + "step": 5982 + }, + { + "epoch": 0.58427734375, + "grad_norm": 0.18503889441490173, + "learning_rate": 0.00022034857231867844, + "loss": 4.4688, + "step": 5983 + }, + { + "epoch": 0.584375, + "grad_norm": 0.18599070608615875, + "learning_rate": 0.0002202806176124435, + "loss": 4.4961, + "step": 5984 + }, + { + "epoch": 0.58447265625, + "grad_norm": 0.17802740633487701, + "learning_rate": 0.00022021266821088125, + "loss": 4.4883, + "step": 5985 + }, + { + "epoch": 0.5845703125, + "grad_norm": 0.17208197712898254, + "learning_rate": 0.000220144724120579, + "loss": 4.4492, + "step": 5986 + }, + { + "epoch": 0.58466796875, + "grad_norm": 0.17599013447761536, + "learning_rate": 0.00022007678534812343, + "loss": 4.4805, + "step": 5987 + }, + { + "epoch": 0.584765625, + "grad_norm": 0.17113761603832245, + "learning_rate": 0.00022000885190010078, + "loss": 4.4531, + "step": 5988 + }, + { + "epoch": 0.58486328125, + "grad_norm": 0.1802828013896942, + "learning_rate": 0.00021994092378309666, + "loss": 4.4688, + "step": 5989 + }, + { + "epoch": 0.5849609375, + "grad_norm": 0.16846942901611328, + "learning_rate": 0.00021987300100369633, + "loss": 4.5078, + "step": 5990 + }, + { + "epoch": 0.58505859375, + "grad_norm": 0.16279855370521545, + "learning_rate": 0.0002198050835684845, + "loss": 4.4648, + "step": 5991 + }, + { + "epoch": 0.58515625, + "grad_norm": 0.1737007051706314, + "learning_rate": 0.0002197371714840451, + "loss": 4.4531, + "step": 5992 + }, + { + "epoch": 0.58525390625, + "grad_norm": 0.15666770935058594, + "learning_rate": 0.00021966926475696185, + "loss": 4.4922, + "step": 5993 + }, + { + "epoch": 0.5853515625, + "grad_norm": 0.16603171825408936, + "learning_rate": 0.00021960136339381787, + "loss": 4.4648, + "step": 5994 + }, + { + "epoch": 0.58544921875, + "grad_norm": 0.1661268174648285, + "learning_rate": 0.00021953346740119574, + "loss": 4.4297, + "step": 5995 + }, + { + "epoch": 0.585546875, + "grad_norm": 0.15862566232681274, + "learning_rate": 0.00021946557678567747, + "loss": 4.457, + "step": 5996 + }, + { + "epoch": 0.58564453125, + "grad_norm": 0.16884049773216248, + "learning_rate": 0.0002193976915538446, + "loss": 4.5039, + "step": 5997 + }, + { + "epoch": 0.5857421875, + "grad_norm": 0.15759369730949402, + "learning_rate": 0.00021932981171227816, + "loss": 4.4492, + "step": 5998 + }, + { + "epoch": 0.58583984375, + "grad_norm": 0.1749560832977295, + "learning_rate": 0.00021926193726755868, + "loss": 4.4336, + "step": 5999 + }, + { + "epoch": 0.5859375, + "grad_norm": 0.17260321974754333, + "learning_rate": 0.00021919406822626614, + "loss": 4.4688, + "step": 6000 + }, + { + "epoch": 0.58603515625, + "grad_norm": 0.1657172590494156, + "learning_rate": 0.00021912620459497984, + "loss": 4.4961, + "step": 6001 + }, + { + "epoch": 0.5861328125, + "grad_norm": 0.1787807196378708, + "learning_rate": 0.00021905834638027876, + "loss": 4.4414, + "step": 6002 + }, + { + "epoch": 0.58623046875, + "grad_norm": 0.15790975093841553, + "learning_rate": 0.0002189904935887414, + "loss": 4.4961, + "step": 6003 + }, + { + "epoch": 0.586328125, + "grad_norm": 0.16679991781711578, + "learning_rate": 0.00021892264622694552, + "loss": 4.457, + "step": 6004 + }, + { + "epoch": 0.58642578125, + "grad_norm": 0.16489839553833008, + "learning_rate": 0.00021885480430146843, + "loss": 4.4727, + "step": 6005 + }, + { + "epoch": 0.5865234375, + "grad_norm": 0.17479082942008972, + "learning_rate": 0.00021878696781888707, + "loss": 4.4805, + "step": 6006 + }, + { + "epoch": 0.58662109375, + "grad_norm": 0.16876289248466492, + "learning_rate": 0.00021871913678577764, + "loss": 4.4766, + "step": 6007 + }, + { + "epoch": 0.58671875, + "grad_norm": 0.1635620892047882, + "learning_rate": 0.00021865131120871601, + "loss": 4.4531, + "step": 6008 + }, + { + "epoch": 0.58681640625, + "grad_norm": 0.1688223034143448, + "learning_rate": 0.00021858349109427723, + "loss": 4.4609, + "step": 6009 + }, + { + "epoch": 0.5869140625, + "grad_norm": 0.17924508452415466, + "learning_rate": 0.00021851567644903607, + "loss": 4.4766, + "step": 6010 + }, + { + "epoch": 0.58701171875, + "grad_norm": 0.16160383820533752, + "learning_rate": 0.00021844786727956672, + "loss": 4.4688, + "step": 6011 + }, + { + "epoch": 0.587109375, + "grad_norm": 0.16216948628425598, + "learning_rate": 0.00021838006359244284, + "loss": 4.4766, + "step": 6012 + }, + { + "epoch": 0.58720703125, + "grad_norm": 0.16864261031150818, + "learning_rate": 0.00021831226539423753, + "loss": 4.4375, + "step": 6013 + }, + { + "epoch": 0.5873046875, + "grad_norm": 0.1605362892150879, + "learning_rate": 0.0002182444726915233, + "loss": 4.4922, + "step": 6014 + }, + { + "epoch": 0.58740234375, + "grad_norm": 0.16483724117279053, + "learning_rate": 0.00021817668549087228, + "loss": 4.4766, + "step": 6015 + }, + { + "epoch": 0.5875, + "grad_norm": 0.18295735120773315, + "learning_rate": 0.00021810890379885594, + "loss": 4.4414, + "step": 6016 + }, + { + "epoch": 0.58759765625, + "grad_norm": 0.17458802461624146, + "learning_rate": 0.00021804112762204537, + "loss": 4.5312, + "step": 6017 + }, + { + "epoch": 0.5876953125, + "grad_norm": 0.1711786836385727, + "learning_rate": 0.00021797335696701077, + "loss": 4.4688, + "step": 6018 + }, + { + "epoch": 0.58779296875, + "grad_norm": 0.1771831512451172, + "learning_rate": 0.0002179055918403221, + "loss": 4.4805, + "step": 6019 + }, + { + "epoch": 0.587890625, + "grad_norm": 0.17338082194328308, + "learning_rate": 0.0002178378322485489, + "loss": 4.4531, + "step": 6020 + }, + { + "epoch": 0.58798828125, + "grad_norm": 0.17704331874847412, + "learning_rate": 0.0002177700781982599, + "loss": 4.4102, + "step": 6021 + }, + { + "epoch": 0.5880859375, + "grad_norm": 0.17447318136692047, + "learning_rate": 0.00021770232969602331, + "loss": 4.4648, + "step": 6022 + }, + { + "epoch": 0.58818359375, + "grad_norm": 0.15844739973545074, + "learning_rate": 0.00021763458674840707, + "loss": 4.4297, + "step": 6023 + }, + { + "epoch": 0.58828125, + "grad_norm": 0.17200222611427307, + "learning_rate": 0.0002175668493619783, + "loss": 4.5, + "step": 6024 + }, + { + "epoch": 0.58837890625, + "grad_norm": 0.16760316491127014, + "learning_rate": 0.00021749911754330377, + "loss": 4.4688, + "step": 6025 + }, + { + "epoch": 0.5884765625, + "grad_norm": 0.16436642408370972, + "learning_rate": 0.0002174313912989494, + "loss": 4.4727, + "step": 6026 + }, + { + "epoch": 0.58857421875, + "grad_norm": 0.16465498507022858, + "learning_rate": 0.0002173636706354809, + "loss": 4.4688, + "step": 6027 + }, + { + "epoch": 0.588671875, + "grad_norm": 0.17990742623806, + "learning_rate": 0.0002172959555594634, + "loss": 4.4922, + "step": 6028 + }, + { + "epoch": 0.58876953125, + "grad_norm": 0.16385574638843536, + "learning_rate": 0.0002172282460774614, + "loss": 4.5156, + "step": 6029 + }, + { + "epoch": 0.5888671875, + "grad_norm": 0.1739395558834076, + "learning_rate": 0.0002171605421960388, + "loss": 4.4531, + "step": 6030 + }, + { + "epoch": 0.58896484375, + "grad_norm": 0.16644105315208435, + "learning_rate": 0.0002170928439217591, + "loss": 4.4609, + "step": 6031 + }, + { + "epoch": 0.5890625, + "grad_norm": 0.18316331505775452, + "learning_rate": 0.0002170251512611851, + "loss": 4.457, + "step": 6032 + }, + { + "epoch": 0.58916015625, + "grad_norm": 0.1701793372631073, + "learning_rate": 0.0002169574642208793, + "loss": 4.4805, + "step": 6033 + }, + { + "epoch": 0.5892578125, + "grad_norm": 0.16374225914478302, + "learning_rate": 0.00021688978280740346, + "loss": 4.4844, + "step": 6034 + }, + { + "epoch": 0.58935546875, + "grad_norm": 0.17357562482357025, + "learning_rate": 0.00021682210702731868, + "loss": 4.4805, + "step": 6035 + }, + { + "epoch": 0.589453125, + "grad_norm": 0.1941138505935669, + "learning_rate": 0.00021675443688718582, + "loss": 4.4414, + "step": 6036 + }, + { + "epoch": 0.58955078125, + "grad_norm": 0.16435092687606812, + "learning_rate": 0.00021668677239356494, + "loss": 4.4688, + "step": 6037 + }, + { + "epoch": 0.5896484375, + "grad_norm": 0.16798441112041473, + "learning_rate": 0.0002166191135530157, + "loss": 4.4883, + "step": 6038 + }, + { + "epoch": 0.58974609375, + "grad_norm": 0.19235841929912567, + "learning_rate": 0.00021655146037209715, + "loss": 4.4492, + "step": 6039 + }, + { + "epoch": 0.58984375, + "grad_norm": 0.1658581793308258, + "learning_rate": 0.0002164838128573679, + "loss": 4.4453, + "step": 6040 + }, + { + "epoch": 0.58994140625, + "grad_norm": 0.18853452801704407, + "learning_rate": 0.0002164161710153858, + "loss": 4.4531, + "step": 6041 + }, + { + "epoch": 0.5900390625, + "grad_norm": 0.1757703721523285, + "learning_rate": 0.00021634853485270834, + "loss": 4.4375, + "step": 6042 + }, + { + "epoch": 0.59013671875, + "grad_norm": 0.17950578033924103, + "learning_rate": 0.00021628090437589233, + "loss": 4.4492, + "step": 6043 + }, + { + "epoch": 0.590234375, + "grad_norm": 0.17119823396205902, + "learning_rate": 0.00021621327959149417, + "loss": 4.4805, + "step": 6044 + }, + { + "epoch": 0.59033203125, + "grad_norm": 0.1847761869430542, + "learning_rate": 0.00021614566050606954, + "loss": 4.4414, + "step": 6045 + }, + { + "epoch": 0.5904296875, + "grad_norm": 0.17088237404823303, + "learning_rate": 0.00021607804712617362, + "loss": 4.4766, + "step": 6046 + }, + { + "epoch": 0.59052734375, + "grad_norm": 0.16939900815486908, + "learning_rate": 0.00021601043945836113, + "loss": 4.4375, + "step": 6047 + }, + { + "epoch": 0.590625, + "grad_norm": 0.18712075054645538, + "learning_rate": 0.00021594283750918624, + "loss": 4.418, + "step": 6048 + }, + { + "epoch": 0.59072265625, + "grad_norm": 0.188264861702919, + "learning_rate": 0.00021587524128520235, + "loss": 4.4961, + "step": 6049 + }, + { + "epoch": 0.5908203125, + "grad_norm": 0.16464588046073914, + "learning_rate": 0.00021580765079296267, + "loss": 4.4922, + "step": 6050 + }, + { + "epoch": 0.59091796875, + "grad_norm": 0.1868586391210556, + "learning_rate": 0.00021574006603901935, + "loss": 4.5234, + "step": 6051 + }, + { + "epoch": 0.591015625, + "grad_norm": 0.18516358733177185, + "learning_rate": 0.00021567248702992453, + "loss": 4.4727, + "step": 6052 + }, + { + "epoch": 0.59111328125, + "grad_norm": 0.16441181302070618, + "learning_rate": 0.00021560491377222934, + "loss": 4.457, + "step": 6053 + }, + { + "epoch": 0.5912109375, + "grad_norm": 0.18200244009494781, + "learning_rate": 0.00021553734627248477, + "loss": 4.4609, + "step": 6054 + }, + { + "epoch": 0.59130859375, + "grad_norm": 0.17161308228969574, + "learning_rate": 0.00021546978453724087, + "loss": 4.4883, + "step": 6055 + }, + { + "epoch": 0.59140625, + "grad_norm": 0.17162133753299713, + "learning_rate": 0.00021540222857304724, + "loss": 4.4883, + "step": 6056 + }, + { + "epoch": 0.59150390625, + "grad_norm": 0.16235749423503876, + "learning_rate": 0.00021533467838645315, + "loss": 4.4414, + "step": 6057 + }, + { + "epoch": 0.5916015625, + "grad_norm": 0.16741082072257996, + "learning_rate": 0.000215267133984007, + "loss": 4.5039, + "step": 6058 + }, + { + "epoch": 0.59169921875, + "grad_norm": 0.1628093421459198, + "learning_rate": 0.0002151995953722568, + "loss": 4.4336, + "step": 6059 + }, + { + "epoch": 0.591796875, + "grad_norm": 0.16798509657382965, + "learning_rate": 0.00021513206255774998, + "loss": 4.5078, + "step": 6060 + }, + { + "epoch": 0.59189453125, + "grad_norm": 0.17066973447799683, + "learning_rate": 0.00021506453554703337, + "loss": 4.4453, + "step": 6061 + }, + { + "epoch": 0.5919921875, + "grad_norm": 0.16875766217708588, + "learning_rate": 0.0002149970143466532, + "loss": 4.4844, + "step": 6062 + }, + { + "epoch": 0.59208984375, + "grad_norm": 0.164557084441185, + "learning_rate": 0.0002149294989631553, + "loss": 4.4531, + "step": 6063 + }, + { + "epoch": 0.5921875, + "grad_norm": 0.17001132667064667, + "learning_rate": 0.00021486198940308483, + "loss": 4.5, + "step": 6064 + }, + { + "epoch": 0.59228515625, + "grad_norm": 0.16033408045768738, + "learning_rate": 0.00021479448567298626, + "loss": 4.4727, + "step": 6065 + }, + { + "epoch": 0.5923828125, + "grad_norm": 0.17154373228549957, + "learning_rate": 0.0002147269877794037, + "loss": 4.4492, + "step": 6066 + }, + { + "epoch": 0.59248046875, + "grad_norm": 0.16112470626831055, + "learning_rate": 0.00021465949572888065, + "loss": 4.4766, + "step": 6067 + }, + { + "epoch": 0.592578125, + "grad_norm": 0.16175147891044617, + "learning_rate": 0.00021459200952795988, + "loss": 4.4922, + "step": 6068 + }, + { + "epoch": 0.59267578125, + "grad_norm": 0.1662796288728714, + "learning_rate": 0.00021452452918318383, + "loss": 4.4844, + "step": 6069 + }, + { + "epoch": 0.5927734375, + "grad_norm": 0.1834796518087387, + "learning_rate": 0.00021445705470109417, + "loss": 4.4297, + "step": 6070 + }, + { + "epoch": 0.59287109375, + "grad_norm": 0.18102869391441345, + "learning_rate": 0.00021438958608823217, + "loss": 4.4531, + "step": 6071 + }, + { + "epoch": 0.59296875, + "grad_norm": 0.15888193249702454, + "learning_rate": 0.0002143221233511385, + "loss": 4.4922, + "step": 6072 + }, + { + "epoch": 0.59306640625, + "grad_norm": 0.18861067295074463, + "learning_rate": 0.0002142546664963531, + "loss": 4.457, + "step": 6073 + }, + { + "epoch": 0.5931640625, + "grad_norm": 0.19542868435382843, + "learning_rate": 0.00021418721553041552, + "loss": 4.4375, + "step": 6074 + }, + { + "epoch": 0.59326171875, + "grad_norm": 0.15801483392715454, + "learning_rate": 0.00021411977045986464, + "loss": 4.4844, + "step": 6075 + }, + { + "epoch": 0.593359375, + "grad_norm": 0.18958023190498352, + "learning_rate": 0.0002140523312912389, + "loss": 4.457, + "step": 6076 + }, + { + "epoch": 0.59345703125, + "grad_norm": 0.1691666841506958, + "learning_rate": 0.0002139848980310759, + "loss": 4.4648, + "step": 6077 + }, + { + "epoch": 0.5935546875, + "grad_norm": 0.1624249815940857, + "learning_rate": 0.00021391747068591295, + "loss": 4.457, + "step": 6078 + }, + { + "epoch": 0.59365234375, + "grad_norm": 0.17147798836231232, + "learning_rate": 0.00021385004926228663, + "loss": 4.4727, + "step": 6079 + }, + { + "epoch": 0.59375, + "grad_norm": 0.1731937676668167, + "learning_rate": 0.000213782633766733, + "loss": 4.4297, + "step": 6080 + }, + { + "epoch": 0.59384765625, + "grad_norm": 0.16151471436023712, + "learning_rate": 0.00021371522420578754, + "loss": 4.457, + "step": 6081 + }, + { + "epoch": 0.5939453125, + "grad_norm": 0.16301268339157104, + "learning_rate": 0.0002136478205859852, + "loss": 4.4688, + "step": 6082 + }, + { + "epoch": 0.59404296875, + "grad_norm": 0.16609904170036316, + "learning_rate": 0.0002135804229138602, + "loss": 4.4453, + "step": 6083 + }, + { + "epoch": 0.594140625, + "grad_norm": 0.15802131593227386, + "learning_rate": 0.00021351303119594646, + "loss": 4.4688, + "step": 6084 + }, + { + "epoch": 0.59423828125, + "grad_norm": 0.16936029493808746, + "learning_rate": 0.00021344564543877698, + "loss": 4.4414, + "step": 6085 + }, + { + "epoch": 0.5943359375, + "grad_norm": 0.1725950837135315, + "learning_rate": 0.00021337826564888436, + "loss": 4.4727, + "step": 6086 + }, + { + "epoch": 0.59443359375, + "grad_norm": 0.17209291458129883, + "learning_rate": 0.00021331089183280062, + "loss": 4.4453, + "step": 6087 + }, + { + "epoch": 0.59453125, + "grad_norm": 0.16176651418209076, + "learning_rate": 0.0002132435239970573, + "loss": 4.4961, + "step": 6088 + }, + { + "epoch": 0.59462890625, + "grad_norm": 0.16319221258163452, + "learning_rate": 0.00021317616214818513, + "loss": 4.4805, + "step": 6089 + }, + { + "epoch": 0.5947265625, + "grad_norm": 0.16560330986976624, + "learning_rate": 0.00021310880629271445, + "loss": 4.4453, + "step": 6090 + }, + { + "epoch": 0.59482421875, + "grad_norm": 0.15733397006988525, + "learning_rate": 0.000213041456437175, + "loss": 4.4609, + "step": 6091 + }, + { + "epoch": 0.594921875, + "grad_norm": 0.1577156037092209, + "learning_rate": 0.0002129741125880959, + "loss": 4.4688, + "step": 6092 + }, + { + "epoch": 0.59501953125, + "grad_norm": 0.1718216836452484, + "learning_rate": 0.00021290677475200548, + "loss": 4.4492, + "step": 6093 + }, + { + "epoch": 0.5951171875, + "grad_norm": 0.1655651330947876, + "learning_rate": 0.00021283944293543184, + "loss": 4.4648, + "step": 6094 + }, + { + "epoch": 0.59521484375, + "grad_norm": 0.17209173738956451, + "learning_rate": 0.00021277211714490236, + "loss": 4.4531, + "step": 6095 + }, + { + "epoch": 0.5953125, + "grad_norm": 0.1657503843307495, + "learning_rate": 0.00021270479738694375, + "loss": 4.4688, + "step": 6096 + }, + { + "epoch": 0.59541015625, + "grad_norm": 0.1672469824552536, + "learning_rate": 0.00021263748366808223, + "loss": 4.4727, + "step": 6097 + }, + { + "epoch": 0.5955078125, + "grad_norm": 0.1616370677947998, + "learning_rate": 0.0002125701759948434, + "loss": 4.4492, + "step": 6098 + }, + { + "epoch": 0.59560546875, + "grad_norm": 0.16275089979171753, + "learning_rate": 0.00021250287437375232, + "loss": 4.4531, + "step": 6099 + }, + { + "epoch": 0.595703125, + "grad_norm": 0.1699889749288559, + "learning_rate": 0.0002124355788113334, + "loss": 4.4766, + "step": 6100 + }, + { + "epoch": 0.59580078125, + "grad_norm": 0.16710476577281952, + "learning_rate": 0.00021236828931411056, + "loss": 4.4727, + "step": 6101 + }, + { + "epoch": 0.5958984375, + "grad_norm": 0.1748429834842682, + "learning_rate": 0.00021230100588860696, + "loss": 4.4609, + "step": 6102 + }, + { + "epoch": 0.59599609375, + "grad_norm": 0.16576264798641205, + "learning_rate": 0.0002122337285413452, + "loss": 4.4688, + "step": 6103 + }, + { + "epoch": 0.59609375, + "grad_norm": 0.1652030199766159, + "learning_rate": 0.00021216645727884753, + "loss": 4.4297, + "step": 6104 + }, + { + "epoch": 0.59619140625, + "grad_norm": 0.16703729331493378, + "learning_rate": 0.00021209919210763534, + "loss": 4.4297, + "step": 6105 + }, + { + "epoch": 0.5962890625, + "grad_norm": 0.16275185346603394, + "learning_rate": 0.00021203193303422958, + "loss": 4.4531, + "step": 6106 + }, + { + "epoch": 0.59638671875, + "grad_norm": 0.18402735888957977, + "learning_rate": 0.00021196468006515057, + "loss": 4.4688, + "step": 6107 + }, + { + "epoch": 0.596484375, + "grad_norm": 0.16889944672584534, + "learning_rate": 0.00021189743320691796, + "loss": 4.4727, + "step": 6108 + }, + { + "epoch": 0.59658203125, + "grad_norm": 0.1634773463010788, + "learning_rate": 0.00021183019246605105, + "loss": 4.4609, + "step": 6109 + }, + { + "epoch": 0.5966796875, + "grad_norm": 0.1665189564228058, + "learning_rate": 0.00021176295784906818, + "loss": 4.4336, + "step": 6110 + }, + { + "epoch": 0.59677734375, + "grad_norm": 0.17617984116077423, + "learning_rate": 0.00021169572936248726, + "loss": 4.4766, + "step": 6111 + }, + { + "epoch": 0.596875, + "grad_norm": 0.17939960956573486, + "learning_rate": 0.00021162850701282582, + "loss": 4.4648, + "step": 6112 + }, + { + "epoch": 0.59697265625, + "grad_norm": 0.15935669839382172, + "learning_rate": 0.00021156129080660048, + "loss": 4.4492, + "step": 6113 + }, + { + "epoch": 0.5970703125, + "grad_norm": 0.1710626184940338, + "learning_rate": 0.00021149408075032744, + "loss": 4.4766, + "step": 6114 + }, + { + "epoch": 0.59716796875, + "grad_norm": 0.1645573079586029, + "learning_rate": 0.0002114268768505223, + "loss": 4.4961, + "step": 6115 + }, + { + "epoch": 0.597265625, + "grad_norm": 0.17194676399230957, + "learning_rate": 0.00021135967911369992, + "loss": 4.4922, + "step": 6116 + }, + { + "epoch": 0.59736328125, + "grad_norm": 0.15555021166801453, + "learning_rate": 0.00021129248754637477, + "loss": 4.4297, + "step": 6117 + }, + { + "epoch": 0.5974609375, + "grad_norm": 0.1812765747308731, + "learning_rate": 0.00021122530215506057, + "loss": 4.4492, + "step": 6118 + }, + { + "epoch": 0.59755859375, + "grad_norm": 0.1821271926164627, + "learning_rate": 0.00021115812294627051, + "loss": 4.4492, + "step": 6119 + }, + { + "epoch": 0.59765625, + "grad_norm": 0.16375309228897095, + "learning_rate": 0.00021109094992651707, + "loss": 4.4805, + "step": 6120 + }, + { + "epoch": 0.59775390625, + "grad_norm": 0.1805366724729538, + "learning_rate": 0.0002110237831023123, + "loss": 4.4727, + "step": 6121 + }, + { + "epoch": 0.5978515625, + "grad_norm": 0.17317835986614227, + "learning_rate": 0.0002109566224801675, + "loss": 4.4414, + "step": 6122 + }, + { + "epoch": 0.59794921875, + "grad_norm": 0.16137301921844482, + "learning_rate": 0.00021088946806659353, + "loss": 4.457, + "step": 6123 + }, + { + "epoch": 0.598046875, + "grad_norm": 0.17272189259529114, + "learning_rate": 0.0002108223198681005, + "loss": 4.4648, + "step": 6124 + }, + { + "epoch": 0.59814453125, + "grad_norm": 0.16762514412403107, + "learning_rate": 0.00021075517789119803, + "loss": 4.4688, + "step": 6125 + }, + { + "epoch": 0.5982421875, + "grad_norm": 0.1663873940706253, + "learning_rate": 0.00021068804214239507, + "loss": 4.4805, + "step": 6126 + }, + { + "epoch": 0.59833984375, + "grad_norm": 0.15348415076732635, + "learning_rate": 0.00021062091262819989, + "loss": 4.457, + "step": 6127 + }, + { + "epoch": 0.5984375, + "grad_norm": 0.16662238538265228, + "learning_rate": 0.00021055378935512026, + "loss": 4.4492, + "step": 6128 + }, + { + "epoch": 0.59853515625, + "grad_norm": 0.1586202085018158, + "learning_rate": 0.0002104866723296634, + "loss": 4.5, + "step": 6129 + }, + { + "epoch": 0.5986328125, + "grad_norm": 0.16838586330413818, + "learning_rate": 0.00021041956155833574, + "loss": 4.457, + "step": 6130 + }, + { + "epoch": 0.59873046875, + "grad_norm": 0.16724559664726257, + "learning_rate": 0.00021035245704764327, + "loss": 4.4688, + "step": 6131 + }, + { + "epoch": 0.598828125, + "grad_norm": 0.16583766043186188, + "learning_rate": 0.00021028535880409133, + "loss": 4.457, + "step": 6132 + }, + { + "epoch": 0.59892578125, + "grad_norm": 0.16642655432224274, + "learning_rate": 0.0002102182668341847, + "loss": 4.4531, + "step": 6133 + }, + { + "epoch": 0.5990234375, + "grad_norm": 0.17084574699401855, + "learning_rate": 0.0002101511811444274, + "loss": 4.4883, + "step": 6134 + }, + { + "epoch": 0.59912109375, + "grad_norm": 0.17458689212799072, + "learning_rate": 0.00021008410174132302, + "loss": 4.4648, + "step": 6135 + }, + { + "epoch": 0.59921875, + "grad_norm": 0.16326703131198883, + "learning_rate": 0.00021001702863137435, + "loss": 4.4453, + "step": 6136 + }, + { + "epoch": 0.59931640625, + "grad_norm": 0.1634257435798645, + "learning_rate": 0.00020994996182108363, + "loss": 4.457, + "step": 6137 + }, + { + "epoch": 0.5994140625, + "grad_norm": 0.18135221302509308, + "learning_rate": 0.00020988290131695265, + "loss": 4.4727, + "step": 6138 + }, + { + "epoch": 0.59951171875, + "grad_norm": 0.16986924409866333, + "learning_rate": 0.00020981584712548247, + "loss": 4.4492, + "step": 6139 + }, + { + "epoch": 0.599609375, + "grad_norm": 0.17081202566623688, + "learning_rate": 0.00020974879925317346, + "loss": 4.4766, + "step": 6140 + }, + { + "epoch": 0.59970703125, + "grad_norm": 0.1596297174692154, + "learning_rate": 0.00020968175770652553, + "loss": 4.4453, + "step": 6141 + }, + { + "epoch": 0.5998046875, + "grad_norm": 0.1847783327102661, + "learning_rate": 0.00020961472249203788, + "loss": 4.4844, + "step": 6142 + }, + { + "epoch": 0.59990234375, + "grad_norm": 0.16805490851402283, + "learning_rate": 0.00020954769361620918, + "loss": 4.4883, + "step": 6143 + }, + { + "epoch": 0.6, + "grad_norm": 0.17612192034721375, + "learning_rate": 0.0002094806710855373, + "loss": 4.4648, + "step": 6144 + }, + { + "epoch": 0.60009765625, + "grad_norm": 0.16691969335079193, + "learning_rate": 0.00020941365490651965, + "loss": 4.4805, + "step": 6145 + }, + { + "epoch": 0.6001953125, + "grad_norm": 0.180605947971344, + "learning_rate": 0.00020934664508565304, + "loss": 4.457, + "step": 6146 + }, + { + "epoch": 0.60029296875, + "grad_norm": 0.17183630168437958, + "learning_rate": 0.00020927964162943357, + "loss": 4.4609, + "step": 6147 + }, + { + "epoch": 0.600390625, + "grad_norm": 0.17107990384101868, + "learning_rate": 0.00020921264454435684, + "loss": 4.457, + "step": 6148 + }, + { + "epoch": 0.60048828125, + "grad_norm": 0.16995064914226532, + "learning_rate": 0.0002091456538369177, + "loss": 4.4414, + "step": 6149 + }, + { + "epoch": 0.6005859375, + "grad_norm": 0.16711072623729706, + "learning_rate": 0.00020907866951361042, + "loss": 4.5156, + "step": 6150 + }, + { + "epoch": 0.60068359375, + "grad_norm": 0.15726974606513977, + "learning_rate": 0.00020901169158092886, + "loss": 4.4688, + "step": 6151 + }, + { + "epoch": 0.60078125, + "grad_norm": 0.16412372887134552, + "learning_rate": 0.00020894472004536586, + "loss": 4.4375, + "step": 6152 + }, + { + "epoch": 0.60087890625, + "grad_norm": 0.16840389370918274, + "learning_rate": 0.00020887775491341393, + "loss": 4.4805, + "step": 6153 + }, + { + "epoch": 0.6009765625, + "grad_norm": 0.15960060060024261, + "learning_rate": 0.00020881079619156486, + "loss": 4.4414, + "step": 6154 + }, + { + "epoch": 0.60107421875, + "grad_norm": 0.15825201570987701, + "learning_rate": 0.0002087438438863099, + "loss": 4.4492, + "step": 6155 + }, + { + "epoch": 0.601171875, + "grad_norm": 0.17074979841709137, + "learning_rate": 0.0002086768980041396, + "loss": 4.4727, + "step": 6156 + }, + { + "epoch": 0.60126953125, + "grad_norm": 0.18095234036445618, + "learning_rate": 0.00020860995855154387, + "loss": 4.4883, + "step": 6157 + }, + { + "epoch": 0.6013671875, + "grad_norm": 0.17452101409435272, + "learning_rate": 0.00020854302553501204, + "loss": 4.4531, + "step": 6158 + }, + { + "epoch": 0.60146484375, + "grad_norm": 0.1735273003578186, + "learning_rate": 0.00020847609896103286, + "loss": 4.4688, + "step": 6159 + }, + { + "epoch": 0.6015625, + "grad_norm": 0.17879250645637512, + "learning_rate": 0.00020840917883609456, + "loss": 4.4258, + "step": 6160 + }, + { + "epoch": 0.60166015625, + "grad_norm": 0.18994781374931335, + "learning_rate": 0.00020834226516668424, + "loss": 4.4531, + "step": 6161 + }, + { + "epoch": 0.6017578125, + "grad_norm": 0.17196448147296906, + "learning_rate": 0.00020827535795928893, + "loss": 4.4336, + "step": 6162 + }, + { + "epoch": 0.60185546875, + "grad_norm": 0.16320551931858063, + "learning_rate": 0.00020820845722039477, + "loss": 4.4375, + "step": 6163 + }, + { + "epoch": 0.601953125, + "grad_norm": 0.1928258240222931, + "learning_rate": 0.00020814156295648746, + "loss": 4.4375, + "step": 6164 + }, + { + "epoch": 0.60205078125, + "grad_norm": 0.17017067968845367, + "learning_rate": 0.00020807467517405172, + "loss": 4.4648, + "step": 6165 + }, + { + "epoch": 0.6021484375, + "grad_norm": 0.1610180139541626, + "learning_rate": 0.0002080077938795721, + "loss": 4.4648, + "step": 6166 + }, + { + "epoch": 0.60224609375, + "grad_norm": 0.18825265765190125, + "learning_rate": 0.00020794091907953217, + "loss": 4.4648, + "step": 6167 + }, + { + "epoch": 0.60234375, + "grad_norm": 0.17876577377319336, + "learning_rate": 0.00020787405078041504, + "loss": 4.4258, + "step": 6168 + }, + { + "epoch": 0.60244140625, + "grad_norm": 0.16607734560966492, + "learning_rate": 0.00020780718898870303, + "loss": 4.4531, + "step": 6169 + }, + { + "epoch": 0.6025390625, + "grad_norm": 0.18053480982780457, + "learning_rate": 0.0002077403337108781, + "loss": 4.4648, + "step": 6170 + }, + { + "epoch": 0.60263671875, + "grad_norm": 0.18132391571998596, + "learning_rate": 0.00020767348495342119, + "loss": 4.4297, + "step": 6171 + }, + { + "epoch": 0.602734375, + "grad_norm": 0.1560417264699936, + "learning_rate": 0.00020760664272281303, + "loss": 4.4648, + "step": 6172 + }, + { + "epoch": 0.60283203125, + "grad_norm": 0.173537477850914, + "learning_rate": 0.0002075398070255335, + "loss": 4.5, + "step": 6173 + }, + { + "epoch": 0.6029296875, + "grad_norm": 0.18042334914207458, + "learning_rate": 0.00020747297786806175, + "loss": 4.4414, + "step": 6174 + }, + { + "epoch": 0.60302734375, + "grad_norm": 0.16864252090454102, + "learning_rate": 0.00020740615525687657, + "loss": 4.457, + "step": 6175 + }, + { + "epoch": 0.603125, + "grad_norm": 0.16340425610542297, + "learning_rate": 0.00020733933919845577, + "loss": 4.4453, + "step": 6176 + }, + { + "epoch": 0.60322265625, + "grad_norm": 0.1681157946586609, + "learning_rate": 0.00020727252969927694, + "loss": 4.4336, + "step": 6177 + }, + { + "epoch": 0.6033203125, + "grad_norm": 0.15953974425792694, + "learning_rate": 0.00020720572676581668, + "loss": 4.5, + "step": 6178 + }, + { + "epoch": 0.60341796875, + "grad_norm": 0.16766898334026337, + "learning_rate": 0.000207138930404551, + "loss": 4.4609, + "step": 6179 + }, + { + "epoch": 0.603515625, + "grad_norm": 0.1672762930393219, + "learning_rate": 0.0002070721406219555, + "loss": 4.4492, + "step": 6180 + }, + { + "epoch": 0.60361328125, + "grad_norm": 0.16619235277175903, + "learning_rate": 0.00020700535742450494, + "loss": 4.5078, + "step": 6181 + }, + { + "epoch": 0.6037109375, + "grad_norm": 0.1564810425043106, + "learning_rate": 0.00020693858081867345, + "loss": 4.4805, + "step": 6182 + }, + { + "epoch": 0.60380859375, + "grad_norm": 0.17707891762256622, + "learning_rate": 0.00020687181081093463, + "loss": 4.4648, + "step": 6183 + }, + { + "epoch": 0.60390625, + "grad_norm": 0.1705404371023178, + "learning_rate": 0.0002068050474077614, + "loss": 4.4414, + "step": 6184 + }, + { + "epoch": 0.60400390625, + "grad_norm": 0.16286535561084747, + "learning_rate": 0.00020673829061562606, + "loss": 4.4414, + "step": 6185 + }, + { + "epoch": 0.6041015625, + "grad_norm": 0.1684788465499878, + "learning_rate": 0.00020667154044100002, + "loss": 4.4375, + "step": 6186 + }, + { + "epoch": 0.60419921875, + "grad_norm": 0.1693364530801773, + "learning_rate": 0.0002066047968903544, + "loss": 4.4297, + "step": 6187 + }, + { + "epoch": 0.604296875, + "grad_norm": 0.181385338306427, + "learning_rate": 0.00020653805997015951, + "loss": 4.4727, + "step": 6188 + }, + { + "epoch": 0.60439453125, + "grad_norm": 0.16511517763137817, + "learning_rate": 0.00020647132968688514, + "loss": 4.4766, + "step": 6189 + }, + { + "epoch": 0.6044921875, + "grad_norm": 0.16661076247692108, + "learning_rate": 0.00020640460604700017, + "loss": 4.4844, + "step": 6190 + }, + { + "epoch": 0.60458984375, + "grad_norm": 0.17398701608181, + "learning_rate": 0.0002063378890569731, + "loss": 4.5117, + "step": 6191 + }, + { + "epoch": 0.6046875, + "grad_norm": 0.16927535831928253, + "learning_rate": 0.00020627117872327173, + "loss": 4.4648, + "step": 6192 + }, + { + "epoch": 0.60478515625, + "grad_norm": 0.17010191082954407, + "learning_rate": 0.00020620447505236307, + "loss": 4.4922, + "step": 6193 + }, + { + "epoch": 0.6048828125, + "grad_norm": 0.16355521976947784, + "learning_rate": 0.00020613777805071365, + "loss": 4.4688, + "step": 6194 + }, + { + "epoch": 0.60498046875, + "grad_norm": 0.17101524770259857, + "learning_rate": 0.00020607108772478926, + "loss": 4.4727, + "step": 6195 + }, + { + "epoch": 0.605078125, + "grad_norm": 0.16197606921195984, + "learning_rate": 0.00020600440408105513, + "loss": 4.4375, + "step": 6196 + }, + { + "epoch": 0.60517578125, + "grad_norm": 0.17348071932792664, + "learning_rate": 0.00020593772712597575, + "loss": 4.4336, + "step": 6197 + }, + { + "epoch": 0.6052734375, + "grad_norm": 0.19841420650482178, + "learning_rate": 0.00020587105686601493, + "loss": 4.457, + "step": 6198 + }, + { + "epoch": 0.60537109375, + "grad_norm": 0.18421894311904907, + "learning_rate": 0.00020580439330763605, + "loss": 4.4688, + "step": 6199 + }, + { + "epoch": 0.60546875, + "grad_norm": 0.17881663143634796, + "learning_rate": 0.00020573773645730158, + "loss": 4.4609, + "step": 6200 + }, + { + "epoch": 0.60556640625, + "grad_norm": 0.1832701712846756, + "learning_rate": 0.00020567108632147348, + "loss": 4.457, + "step": 6201 + }, + { + "epoch": 0.6056640625, + "grad_norm": 0.17962871491909027, + "learning_rate": 0.00020560444290661302, + "loss": 4.4727, + "step": 6202 + }, + { + "epoch": 0.60576171875, + "grad_norm": 0.1810973733663559, + "learning_rate": 0.00020553780621918085, + "loss": 4.4453, + "step": 6203 + }, + { + "epoch": 0.605859375, + "grad_norm": 0.19277611374855042, + "learning_rate": 0.0002054711762656369, + "loss": 4.4883, + "step": 6204 + }, + { + "epoch": 0.60595703125, + "grad_norm": 0.17301614582538605, + "learning_rate": 0.00020540455305244045, + "loss": 4.457, + "step": 6205 + }, + { + "epoch": 0.6060546875, + "grad_norm": 0.16824309527873993, + "learning_rate": 0.00020533793658605032, + "loss": 4.4688, + "step": 6206 + }, + { + "epoch": 0.60615234375, + "grad_norm": 0.17464327812194824, + "learning_rate": 0.00020527132687292443, + "loss": 4.4609, + "step": 6207 + }, + { + "epoch": 0.60625, + "grad_norm": 0.17454014718532562, + "learning_rate": 0.0002052047239195201, + "loss": 4.4883, + "step": 6208 + }, + { + "epoch": 0.60634765625, + "grad_norm": 0.16032913327217102, + "learning_rate": 0.00020513812773229417, + "loss": 4.4453, + "step": 6209 + }, + { + "epoch": 0.6064453125, + "grad_norm": 0.17497821152210236, + "learning_rate": 0.0002050715383177026, + "loss": 4.4531, + "step": 6210 + }, + { + "epoch": 0.60654296875, + "grad_norm": 0.16876186430454254, + "learning_rate": 0.00020500495568220073, + "loss": 4.4727, + "step": 6211 + }, + { + "epoch": 0.606640625, + "grad_norm": 0.16404227912425995, + "learning_rate": 0.00020493837983224334, + "loss": 4.4336, + "step": 6212 + }, + { + "epoch": 0.60673828125, + "grad_norm": 0.1988711655139923, + "learning_rate": 0.0002048718107742845, + "loss": 4.4766, + "step": 6213 + }, + { + "epoch": 0.6068359375, + "grad_norm": 0.17882251739501953, + "learning_rate": 0.00020480524851477773, + "loss": 4.4375, + "step": 6214 + }, + { + "epoch": 0.60693359375, + "grad_norm": 0.1823958158493042, + "learning_rate": 0.00020473869306017562, + "loss": 4.4883, + "step": 6215 + }, + { + "epoch": 0.60703125, + "grad_norm": 0.20239198207855225, + "learning_rate": 0.00020467214441693038, + "loss": 4.4492, + "step": 6216 + }, + { + "epoch": 0.60712890625, + "grad_norm": 0.1717233657836914, + "learning_rate": 0.00020460560259149342, + "loss": 4.4727, + "step": 6217 + }, + { + "epoch": 0.6072265625, + "grad_norm": 0.18138280510902405, + "learning_rate": 0.00020453906759031553, + "loss": 4.4648, + "step": 6218 + }, + { + "epoch": 0.60732421875, + "grad_norm": 0.19760681688785553, + "learning_rate": 0.0002044725394198469, + "loss": 4.4531, + "step": 6219 + }, + { + "epoch": 0.607421875, + "grad_norm": 0.17155314981937408, + "learning_rate": 0.0002044060180865368, + "loss": 4.4141, + "step": 6220 + }, + { + "epoch": 0.60751953125, + "grad_norm": 0.20171360671520233, + "learning_rate": 0.00020433950359683412, + "loss": 4.457, + "step": 6221 + }, + { + "epoch": 0.6076171875, + "grad_norm": 0.16709370911121368, + "learning_rate": 0.000204272995957187, + "loss": 4.4727, + "step": 6222 + }, + { + "epoch": 0.60771484375, + "grad_norm": 0.1762688159942627, + "learning_rate": 0.00020420649517404293, + "loss": 4.4805, + "step": 6223 + }, + { + "epoch": 0.6078125, + "grad_norm": 0.19373264908790588, + "learning_rate": 0.00020414000125384862, + "loss": 4.4375, + "step": 6224 + }, + { + "epoch": 0.60791015625, + "grad_norm": 0.1611872762441635, + "learning_rate": 0.00020407351420305032, + "loss": 4.4648, + "step": 6225 + }, + { + "epoch": 0.6080078125, + "grad_norm": 0.1941380500793457, + "learning_rate": 0.00020400703402809339, + "loss": 4.457, + "step": 6226 + }, + { + "epoch": 0.60810546875, + "grad_norm": 0.1693430095911026, + "learning_rate": 0.00020394056073542283, + "loss": 4.4648, + "step": 6227 + }, + { + "epoch": 0.608203125, + "grad_norm": 0.17890560626983643, + "learning_rate": 0.00020387409433148245, + "loss": 4.4648, + "step": 6228 + }, + { + "epoch": 0.60830078125, + "grad_norm": 0.17490576207637787, + "learning_rate": 0.0002038076348227159, + "loss": 4.457, + "step": 6229 + }, + { + "epoch": 0.6083984375, + "grad_norm": 0.16792932152748108, + "learning_rate": 0.00020374118221556603, + "loss": 4.418, + "step": 6230 + }, + { + "epoch": 0.60849609375, + "grad_norm": 0.18173307180404663, + "learning_rate": 0.00020367473651647489, + "loss": 4.4766, + "step": 6231 + }, + { + "epoch": 0.60859375, + "grad_norm": 0.16911771893501282, + "learning_rate": 0.000203608297731884, + "loss": 4.4414, + "step": 6232 + }, + { + "epoch": 0.60869140625, + "grad_norm": 0.1717779040336609, + "learning_rate": 0.00020354186586823403, + "loss": 4.4727, + "step": 6233 + }, + { + "epoch": 0.6087890625, + "grad_norm": 0.17259126901626587, + "learning_rate": 0.00020347544093196517, + "loss": 4.4727, + "step": 6234 + }, + { + "epoch": 0.60888671875, + "grad_norm": 0.17518708109855652, + "learning_rate": 0.00020340902292951697, + "loss": 4.457, + "step": 6235 + }, + { + "epoch": 0.608984375, + "grad_norm": 0.16222551465034485, + "learning_rate": 0.00020334261186732812, + "loss": 4.457, + "step": 6236 + }, + { + "epoch": 0.60908203125, + "grad_norm": 0.16353349387645721, + "learning_rate": 0.00020327620775183663, + "loss": 4.4453, + "step": 6237 + }, + { + "epoch": 0.6091796875, + "grad_norm": 0.16708695888519287, + "learning_rate": 0.00020320981058948002, + "loss": 4.4492, + "step": 6238 + }, + { + "epoch": 0.60927734375, + "grad_norm": 0.16716161370277405, + "learning_rate": 0.00020314342038669502, + "loss": 4.4727, + "step": 6239 + }, + { + "epoch": 0.609375, + "grad_norm": 0.17165876924991608, + "learning_rate": 0.0002030770371499177, + "loss": 4.4688, + "step": 6240 + }, + { + "epoch": 0.60947265625, + "grad_norm": 0.15469489991664886, + "learning_rate": 0.0002030106608855835, + "loss": 4.4492, + "step": 6241 + }, + { + "epoch": 0.6095703125, + "grad_norm": 0.16265402734279633, + "learning_rate": 0.00020294429160012717, + "loss": 4.4805, + "step": 6242 + }, + { + "epoch": 0.60966796875, + "grad_norm": 0.16873861849308014, + "learning_rate": 0.0002028779292999827, + "loss": 4.4844, + "step": 6243 + }, + { + "epoch": 0.609765625, + "grad_norm": 0.16130714118480682, + "learning_rate": 0.00020281157399158363, + "loss": 4.4492, + "step": 6244 + }, + { + "epoch": 0.60986328125, + "grad_norm": 0.1696554571390152, + "learning_rate": 0.00020274522568136232, + "loss": 4.4531, + "step": 6245 + }, + { + "epoch": 0.6099609375, + "grad_norm": 0.1668081432580948, + "learning_rate": 0.00020267888437575104, + "loss": 4.4453, + "step": 6246 + }, + { + "epoch": 0.61005859375, + "grad_norm": 0.15825341641902924, + "learning_rate": 0.00020261255008118112, + "loss": 4.4766, + "step": 6247 + }, + { + "epoch": 0.61015625, + "grad_norm": 0.1709161251783371, + "learning_rate": 0.00020254622280408313, + "loss": 4.4609, + "step": 6248 + }, + { + "epoch": 0.61025390625, + "grad_norm": 0.18395133316516876, + "learning_rate": 0.00020247990255088712, + "loss": 4.4648, + "step": 6249 + }, + { + "epoch": 0.6103515625, + "grad_norm": 0.16986167430877686, + "learning_rate": 0.00020241358932802234, + "loss": 4.4414, + "step": 6250 + }, + { + "epoch": 0.61044921875, + "grad_norm": 0.16565008461475372, + "learning_rate": 0.0002023472831419174, + "loss": 4.4492, + "step": 6251 + }, + { + "epoch": 0.610546875, + "grad_norm": 0.17853176593780518, + "learning_rate": 0.0002022809839990003, + "loss": 4.4883, + "step": 6252 + }, + { + "epoch": 0.61064453125, + "grad_norm": 0.16454291343688965, + "learning_rate": 0.00020221469190569836, + "loss": 4.4688, + "step": 6253 + }, + { + "epoch": 0.6107421875, + "grad_norm": 0.17603155970573425, + "learning_rate": 0.0002021484068684379, + "loss": 4.4453, + "step": 6254 + }, + { + "epoch": 0.61083984375, + "grad_norm": 0.16967524588108063, + "learning_rate": 0.00020208212889364496, + "loss": 4.457, + "step": 6255 + }, + { + "epoch": 0.6109375, + "grad_norm": 0.17207075655460358, + "learning_rate": 0.0002020158579877448, + "loss": 4.4375, + "step": 6256 + }, + { + "epoch": 0.61103515625, + "grad_norm": 0.18356798589229584, + "learning_rate": 0.0002019495941571618, + "loss": 4.4297, + "step": 6257 + }, + { + "epoch": 0.6111328125, + "grad_norm": 0.16512830555438995, + "learning_rate": 0.00020188333740831988, + "loss": 4.4883, + "step": 6258 + }, + { + "epoch": 0.61123046875, + "grad_norm": 0.18678268790245056, + "learning_rate": 0.00020181708774764213, + "loss": 4.457, + "step": 6259 + }, + { + "epoch": 0.611328125, + "grad_norm": 0.17845956981182098, + "learning_rate": 0.00020175084518155107, + "loss": 4.4648, + "step": 6260 + }, + { + "epoch": 0.61142578125, + "grad_norm": 0.16446761786937714, + "learning_rate": 0.00020168460971646846, + "loss": 4.4375, + "step": 6261 + }, + { + "epoch": 0.6115234375, + "grad_norm": 0.1740410327911377, + "learning_rate": 0.00020161838135881527, + "loss": 4.4805, + "step": 6262 + }, + { + "epoch": 0.61162109375, + "grad_norm": 0.1738165020942688, + "learning_rate": 0.00020155216011501199, + "loss": 4.5, + "step": 6263 + }, + { + "epoch": 0.61171875, + "grad_norm": 0.16801945865154266, + "learning_rate": 0.0002014859459914783, + "loss": 4.4414, + "step": 6264 + }, + { + "epoch": 0.61181640625, + "grad_norm": 0.17998738586902618, + "learning_rate": 0.00020141973899463316, + "loss": 4.4727, + "step": 6265 + }, + { + "epoch": 0.6119140625, + "grad_norm": 0.16055531799793243, + "learning_rate": 0.000201353539130895, + "loss": 4.4727, + "step": 6266 + }, + { + "epoch": 0.61201171875, + "grad_norm": 0.18508942425251007, + "learning_rate": 0.00020128734640668128, + "loss": 4.4609, + "step": 6267 + }, + { + "epoch": 0.612109375, + "grad_norm": 0.1656854897737503, + "learning_rate": 0.00020122116082840912, + "loss": 4.4609, + "step": 6268 + }, + { + "epoch": 0.61220703125, + "grad_norm": 0.17553170025348663, + "learning_rate": 0.00020115498240249474, + "loss": 4.4375, + "step": 6269 + }, + { + "epoch": 0.6123046875, + "grad_norm": 0.1739964783191681, + "learning_rate": 0.00020108881113535356, + "loss": 4.4258, + "step": 6270 + }, + { + "epoch": 0.61240234375, + "grad_norm": 0.18464410305023193, + "learning_rate": 0.00020102264703340052, + "loss": 4.4258, + "step": 6271 + }, + { + "epoch": 0.6125, + "grad_norm": 0.16353543102741241, + "learning_rate": 0.00020095649010304966, + "loss": 4.4609, + "step": 6272 + }, + { + "epoch": 0.61259765625, + "grad_norm": 0.17749351263046265, + "learning_rate": 0.0002008903403507147, + "loss": 4.4062, + "step": 6273 + }, + { + "epoch": 0.6126953125, + "grad_norm": 0.1757742166519165, + "learning_rate": 0.00020082419778280814, + "loss": 4.4141, + "step": 6274 + }, + { + "epoch": 0.61279296875, + "grad_norm": 0.17478249967098236, + "learning_rate": 0.00020075806240574224, + "loss": 4.4336, + "step": 6275 + }, + { + "epoch": 0.612890625, + "grad_norm": 0.18863564729690552, + "learning_rate": 0.00020069193422592825, + "loss": 4.4336, + "step": 6276 + }, + { + "epoch": 0.61298828125, + "grad_norm": 0.17492233216762543, + "learning_rate": 0.00020062581324977697, + "loss": 4.4414, + "step": 6277 + }, + { + "epoch": 0.6130859375, + "grad_norm": 0.19457592070102692, + "learning_rate": 0.00020055969948369834, + "loss": 4.4727, + "step": 6278 + }, + { + "epoch": 0.61318359375, + "grad_norm": 0.19492247700691223, + "learning_rate": 0.00020049359293410158, + "loss": 4.457, + "step": 6279 + }, + { + "epoch": 0.61328125, + "grad_norm": 0.17878210544586182, + "learning_rate": 0.00020042749360739526, + "loss": 4.457, + "step": 6280 + }, + { + "epoch": 0.61337890625, + "grad_norm": 0.19128930568695068, + "learning_rate": 0.00020036140150998734, + "loss": 4.4531, + "step": 6281 + }, + { + "epoch": 0.6134765625, + "grad_norm": 0.186677023768425, + "learning_rate": 0.00020029531664828498, + "loss": 4.457, + "step": 6282 + }, + { + "epoch": 0.61357421875, + "grad_norm": 0.18112359941005707, + "learning_rate": 0.00020022923902869466, + "loss": 4.4492, + "step": 6283 + }, + { + "epoch": 0.613671875, + "grad_norm": 0.19334138929843903, + "learning_rate": 0.00020016316865762213, + "loss": 4.4531, + "step": 6284 + }, + { + "epoch": 0.61376953125, + "grad_norm": 0.1776667684316635, + "learning_rate": 0.00020009710554147247, + "loss": 4.4531, + "step": 6285 + }, + { + "epoch": 0.6138671875, + "grad_norm": 0.1814638078212738, + "learning_rate": 0.00020003104968665015, + "loss": 4.4531, + "step": 6286 + }, + { + "epoch": 0.61396484375, + "grad_norm": 0.18237778544425964, + "learning_rate": 0.00019996500109955868, + "loss": 4.4766, + "step": 6287 + }, + { + "epoch": 0.6140625, + "grad_norm": 0.16367049515247345, + "learning_rate": 0.00019989895978660106, + "loss": 4.4453, + "step": 6288 + }, + { + "epoch": 0.61416015625, + "grad_norm": 0.19509637355804443, + "learning_rate": 0.0001998329257541796, + "loss": 4.4727, + "step": 6289 + }, + { + "epoch": 0.6142578125, + "grad_norm": 0.16098210215568542, + "learning_rate": 0.00019976689900869576, + "loss": 4.4453, + "step": 6290 + }, + { + "epoch": 0.61435546875, + "grad_norm": 0.18081891536712646, + "learning_rate": 0.00019970087955655053, + "loss": 4.4609, + "step": 6291 + }, + { + "epoch": 0.614453125, + "grad_norm": 0.16715647280216217, + "learning_rate": 0.00019963486740414395, + "loss": 4.4453, + "step": 6292 + }, + { + "epoch": 0.61455078125, + "grad_norm": 0.1728346049785614, + "learning_rate": 0.00019956886255787548, + "loss": 4.4414, + "step": 6293 + }, + { + "epoch": 0.6146484375, + "grad_norm": 0.18008874356746674, + "learning_rate": 0.00019950286502414384, + "loss": 4.4453, + "step": 6294 + }, + { + "epoch": 0.61474609375, + "grad_norm": 0.15987923741340637, + "learning_rate": 0.00019943687480934708, + "loss": 4.4648, + "step": 6295 + }, + { + "epoch": 0.61484375, + "grad_norm": 0.17690515518188477, + "learning_rate": 0.0001993708919198824, + "loss": 4.4453, + "step": 6296 + }, + { + "epoch": 0.61494140625, + "grad_norm": 0.17070119082927704, + "learning_rate": 0.00019930491636214643, + "loss": 4.4805, + "step": 6297 + }, + { + "epoch": 0.6150390625, + "grad_norm": 0.16152724623680115, + "learning_rate": 0.00019923894814253513, + "loss": 4.4414, + "step": 6298 + }, + { + "epoch": 0.61513671875, + "grad_norm": 0.17681096494197845, + "learning_rate": 0.00019917298726744353, + "loss": 4.4883, + "step": 6299 + }, + { + "epoch": 0.615234375, + "grad_norm": 0.17336954176425934, + "learning_rate": 0.00019910703374326628, + "loss": 4.4844, + "step": 6300 + }, + { + "epoch": 0.61533203125, + "grad_norm": 0.16773177683353424, + "learning_rate": 0.00019904108757639693, + "loss": 4.4453, + "step": 6301 + }, + { + "epoch": 0.6154296875, + "grad_norm": 0.16403433680534363, + "learning_rate": 0.00019897514877322865, + "loss": 4.4648, + "step": 6302 + }, + { + "epoch": 0.61552734375, + "grad_norm": 0.16544990241527557, + "learning_rate": 0.00019890921734015376, + "loss": 4.4688, + "step": 6303 + }, + { + "epoch": 0.615625, + "grad_norm": 0.16999949514865875, + "learning_rate": 0.00019884329328356376, + "loss": 4.4531, + "step": 6304 + }, + { + "epoch": 0.61572265625, + "grad_norm": 0.16409456729888916, + "learning_rate": 0.0001987773766098496, + "loss": 4.4336, + "step": 6305 + }, + { + "epoch": 0.6158203125, + "grad_norm": 0.1654205173254013, + "learning_rate": 0.00019871146732540146, + "loss": 4.457, + "step": 6306 + }, + { + "epoch": 0.61591796875, + "grad_norm": 0.1609824001789093, + "learning_rate": 0.00019864556543660878, + "loss": 4.4805, + "step": 6307 + }, + { + "epoch": 0.616015625, + "grad_norm": 0.16511499881744385, + "learning_rate": 0.00019857967094986034, + "loss": 4.4688, + "step": 6308 + }, + { + "epoch": 0.61611328125, + "grad_norm": 0.164848193526268, + "learning_rate": 0.0001985137838715441, + "loss": 4.4336, + "step": 6309 + }, + { + "epoch": 0.6162109375, + "grad_norm": 0.16588719189167023, + "learning_rate": 0.00019844790420804742, + "loss": 4.4492, + "step": 6310 + }, + { + "epoch": 0.61630859375, + "grad_norm": 0.15287548303604126, + "learning_rate": 0.00019838203196575693, + "loss": 4.4648, + "step": 6311 + }, + { + "epoch": 0.61640625, + "grad_norm": 0.17183901369571686, + "learning_rate": 0.00019831616715105832, + "loss": 4.4609, + "step": 6312 + }, + { + "epoch": 0.61650390625, + "grad_norm": 0.17453348636627197, + "learning_rate": 0.00019825030977033693, + "loss": 4.4609, + "step": 6313 + }, + { + "epoch": 0.6166015625, + "grad_norm": 0.16221313178539276, + "learning_rate": 0.00019818445982997706, + "loss": 4.4492, + "step": 6314 + }, + { + "epoch": 0.61669921875, + "grad_norm": 0.17529599368572235, + "learning_rate": 0.0001981186173363625, + "loss": 4.4688, + "step": 6315 + }, + { + "epoch": 0.616796875, + "grad_norm": 0.16497579216957092, + "learning_rate": 0.0001980527822958762, + "loss": 4.4609, + "step": 6316 + }, + { + "epoch": 0.61689453125, + "grad_norm": 0.1524345427751541, + "learning_rate": 0.00019798695471490036, + "loss": 4.4609, + "step": 6317 + }, + { + "epoch": 0.6169921875, + "grad_norm": 0.16550998389720917, + "learning_rate": 0.00019792113459981658, + "loss": 4.4453, + "step": 6318 + }, + { + "epoch": 0.61708984375, + "grad_norm": 0.15164117515087128, + "learning_rate": 0.00019785532195700566, + "loss": 4.4609, + "step": 6319 + }, + { + "epoch": 0.6171875, + "grad_norm": 0.15291564166545868, + "learning_rate": 0.00019778951679284775, + "loss": 4.4609, + "step": 6320 + }, + { + "epoch": 0.61728515625, + "grad_norm": 0.1590595841407776, + "learning_rate": 0.00019772371911372212, + "loss": 4.4688, + "step": 6321 + }, + { + "epoch": 0.6173828125, + "grad_norm": 0.1561022251844406, + "learning_rate": 0.0001976579289260074, + "loss": 4.4492, + "step": 6322 + }, + { + "epoch": 0.61748046875, + "grad_norm": 0.15972618758678436, + "learning_rate": 0.00019759214623608164, + "loss": 4.4844, + "step": 6323 + }, + { + "epoch": 0.617578125, + "grad_norm": 0.17125512659549713, + "learning_rate": 0.00019752637105032196, + "loss": 4.4727, + "step": 6324 + }, + { + "epoch": 0.61767578125, + "grad_norm": 0.15944816172122955, + "learning_rate": 0.00019746060337510462, + "loss": 4.4531, + "step": 6325 + }, + { + "epoch": 0.6177734375, + "grad_norm": 0.16557066142559052, + "learning_rate": 0.00019739484321680567, + "loss": 4.4805, + "step": 6326 + }, + { + "epoch": 0.61787109375, + "grad_norm": 0.16275012493133545, + "learning_rate": 0.00019732909058179987, + "loss": 4.4609, + "step": 6327 + }, + { + "epoch": 0.61796875, + "grad_norm": 0.1575167179107666, + "learning_rate": 0.00019726334547646169, + "loss": 4.457, + "step": 6328 + }, + { + "epoch": 0.61806640625, + "grad_norm": 0.16612960398197174, + "learning_rate": 0.00019719760790716452, + "loss": 4.4297, + "step": 6329 + }, + { + "epoch": 0.6181640625, + "grad_norm": 0.16061918437480927, + "learning_rate": 0.00019713187788028119, + "loss": 4.457, + "step": 6330 + }, + { + "epoch": 0.61826171875, + "grad_norm": 0.17116186022758484, + "learning_rate": 0.00019706615540218382, + "loss": 4.4414, + "step": 6331 + }, + { + "epoch": 0.618359375, + "grad_norm": 0.1633685678243637, + "learning_rate": 0.0001970004404792437, + "loss": 4.5, + "step": 6332 + }, + { + "epoch": 0.61845703125, + "grad_norm": 0.15936973690986633, + "learning_rate": 0.00019693473311783156, + "loss": 4.457, + "step": 6333 + }, + { + "epoch": 0.6185546875, + "grad_norm": 0.17258305847644806, + "learning_rate": 0.00019686903332431723, + "loss": 4.4258, + "step": 6334 + }, + { + "epoch": 0.61865234375, + "grad_norm": 0.1873469203710556, + "learning_rate": 0.00019680334110506976, + "loss": 4.4336, + "step": 6335 + }, + { + "epoch": 0.61875, + "grad_norm": 0.1706395298242569, + "learning_rate": 0.0001967376564664577, + "loss": 4.4922, + "step": 6336 + }, + { + "epoch": 0.61884765625, + "grad_norm": 0.16502270102500916, + "learning_rate": 0.00019667197941484872, + "loss": 4.4648, + "step": 6337 + }, + { + "epoch": 0.6189453125, + "grad_norm": 0.1642698347568512, + "learning_rate": 0.00019660630995660966, + "loss": 4.4492, + "step": 6338 + }, + { + "epoch": 0.61904296875, + "grad_norm": 0.16926982998847961, + "learning_rate": 0.0001965406480981068, + "loss": 4.4609, + "step": 6339 + }, + { + "epoch": 0.619140625, + "grad_norm": 0.1734558343887329, + "learning_rate": 0.00019647499384570555, + "loss": 4.4648, + "step": 6340 + }, + { + "epoch": 0.61923828125, + "grad_norm": 0.1599050611257553, + "learning_rate": 0.00019640934720577076, + "loss": 4.4531, + "step": 6341 + }, + { + "epoch": 0.6193359375, + "grad_norm": 0.17397071421146393, + "learning_rate": 0.00019634370818466635, + "loss": 4.4297, + "step": 6342 + }, + { + "epoch": 0.61943359375, + "grad_norm": 0.174725741147995, + "learning_rate": 0.0001962780767887556, + "loss": 4.4492, + "step": 6343 + }, + { + "epoch": 0.61953125, + "grad_norm": 0.1664077341556549, + "learning_rate": 0.000196212453024401, + "loss": 4.4609, + "step": 6344 + }, + { + "epoch": 0.61962890625, + "grad_norm": 0.16969117522239685, + "learning_rate": 0.00019614683689796436, + "loss": 4.4609, + "step": 6345 + }, + { + "epoch": 0.6197265625, + "grad_norm": 0.16538839042186737, + "learning_rate": 0.00019608122841580672, + "loss": 4.4062, + "step": 6346 + }, + { + "epoch": 0.61982421875, + "grad_norm": 0.16119083762168884, + "learning_rate": 0.00019601562758428832, + "loss": 4.4453, + "step": 6347 + }, + { + "epoch": 0.619921875, + "grad_norm": 0.16037534177303314, + "learning_rate": 0.00019595003440976878, + "loss": 4.4844, + "step": 6348 + }, + { + "epoch": 0.62001953125, + "grad_norm": 0.167230486869812, + "learning_rate": 0.0001958844488986069, + "loss": 4.4805, + "step": 6349 + }, + { + "epoch": 0.6201171875, + "grad_norm": 0.16355426609516144, + "learning_rate": 0.00019581887105716073, + "loss": 4.457, + "step": 6350 + }, + { + "epoch": 0.62021484375, + "grad_norm": 0.1665310114622116, + "learning_rate": 0.00019575330089178757, + "loss": 4.4414, + "step": 6351 + }, + { + "epoch": 0.6203125, + "grad_norm": 0.16715189814567566, + "learning_rate": 0.00019568773840884408, + "loss": 4.4844, + "step": 6352 + }, + { + "epoch": 0.62041015625, + "grad_norm": 0.1612021028995514, + "learning_rate": 0.00019562218361468604, + "loss": 4.4453, + "step": 6353 + }, + { + "epoch": 0.6205078125, + "grad_norm": 0.1724766492843628, + "learning_rate": 0.00019555663651566867, + "loss": 4.4375, + "step": 6354 + }, + { + "epoch": 0.62060546875, + "grad_norm": 0.16919340193271637, + "learning_rate": 0.0001954910971181461, + "loss": 4.4414, + "step": 6355 + }, + { + "epoch": 0.620703125, + "grad_norm": 0.17596349120140076, + "learning_rate": 0.000195425565428472, + "loss": 4.4453, + "step": 6356 + }, + { + "epoch": 0.62080078125, + "grad_norm": 0.15458478033542633, + "learning_rate": 0.0001953600414529993, + "loss": 4.4688, + "step": 6357 + }, + { + "epoch": 0.6208984375, + "grad_norm": 0.1649215668439865, + "learning_rate": 0.00019529452519808006, + "loss": 4.4492, + "step": 6358 + }, + { + "epoch": 0.62099609375, + "grad_norm": 0.17640581727027893, + "learning_rate": 0.00019522901667006566, + "loss": 4.457, + "step": 6359 + }, + { + "epoch": 0.62109375, + "grad_norm": 0.17940351366996765, + "learning_rate": 0.00019516351587530663, + "loss": 4.4375, + "step": 6360 + }, + { + "epoch": 0.62119140625, + "grad_norm": 0.15621840953826904, + "learning_rate": 0.00019509802282015293, + "loss": 4.4336, + "step": 6361 + }, + { + "epoch": 0.6212890625, + "grad_norm": 0.1862347424030304, + "learning_rate": 0.00019503253751095367, + "loss": 4.4805, + "step": 6362 + }, + { + "epoch": 0.62138671875, + "grad_norm": 0.16980606317520142, + "learning_rate": 0.00019496705995405712, + "loss": 4.4531, + "step": 6363 + }, + { + "epoch": 0.621484375, + "grad_norm": 0.16181378066539764, + "learning_rate": 0.0001949015901558109, + "loss": 4.4531, + "step": 6364 + }, + { + "epoch": 0.62158203125, + "grad_norm": 0.17067290842533112, + "learning_rate": 0.00019483612812256192, + "loss": 4.4258, + "step": 6365 + }, + { + "epoch": 0.6216796875, + "grad_norm": 0.17313115298748016, + "learning_rate": 0.00019477067386065617, + "loss": 4.457, + "step": 6366 + }, + { + "epoch": 0.62177734375, + "grad_norm": 0.1586454212665558, + "learning_rate": 0.0001947052273764391, + "loss": 4.457, + "step": 6367 + }, + { + "epoch": 0.621875, + "grad_norm": 0.17618179321289062, + "learning_rate": 0.00019463978867625525, + "loss": 4.4609, + "step": 6368 + }, + { + "epoch": 0.62197265625, + "grad_norm": 0.15881410241127014, + "learning_rate": 0.00019457435776644856, + "loss": 4.4219, + "step": 6369 + }, + { + "epoch": 0.6220703125, + "grad_norm": 0.17371733486652374, + "learning_rate": 0.00019450893465336206, + "loss": 4.4531, + "step": 6370 + }, + { + "epoch": 0.62216796875, + "grad_norm": 0.17927826941013336, + "learning_rate": 0.00019444351934333798, + "loss": 4.457, + "step": 6371 + }, + { + "epoch": 0.622265625, + "grad_norm": 0.17528174817562103, + "learning_rate": 0.00019437811184271796, + "loss": 4.4453, + "step": 6372 + }, + { + "epoch": 0.62236328125, + "grad_norm": 0.1608734279870987, + "learning_rate": 0.00019431271215784285, + "loss": 4.4258, + "step": 6373 + }, + { + "epoch": 0.6224609375, + "grad_norm": 0.17026564478874207, + "learning_rate": 0.0001942473202950526, + "loss": 4.4648, + "step": 6374 + }, + { + "epoch": 0.62255859375, + "grad_norm": 0.16263100504875183, + "learning_rate": 0.00019418193626068658, + "loss": 4.457, + "step": 6375 + }, + { + "epoch": 0.62265625, + "grad_norm": 0.17093858122825623, + "learning_rate": 0.00019411656006108335, + "loss": 4.4844, + "step": 6376 + }, + { + "epoch": 0.62275390625, + "grad_norm": 0.16630679368972778, + "learning_rate": 0.00019405119170258072, + "loss": 4.4336, + "step": 6377 + }, + { + "epoch": 0.6228515625, + "grad_norm": 0.1683543473482132, + "learning_rate": 0.00019398583119151558, + "loss": 4.4922, + "step": 6378 + }, + { + "epoch": 0.62294921875, + "grad_norm": 0.17075610160827637, + "learning_rate": 0.00019392047853422439, + "loss": 4.4609, + "step": 6379 + }, + { + "epoch": 0.623046875, + "grad_norm": 0.17935459315776825, + "learning_rate": 0.00019385513373704246, + "loss": 4.4609, + "step": 6380 + }, + { + "epoch": 0.62314453125, + "grad_norm": 0.15268921852111816, + "learning_rate": 0.0001937897968063045, + "loss": 4.4453, + "step": 6381 + }, + { + "epoch": 0.6232421875, + "grad_norm": 0.18680717051029205, + "learning_rate": 0.00019372446774834458, + "loss": 4.4414, + "step": 6382 + }, + { + "epoch": 0.62333984375, + "grad_norm": 0.17440582811832428, + "learning_rate": 0.0001936591465694959, + "loss": 4.4766, + "step": 6383 + }, + { + "epoch": 0.6234375, + "grad_norm": 0.1770864725112915, + "learning_rate": 0.00019359383327609094, + "loss": 4.4375, + "step": 6384 + }, + { + "epoch": 0.62353515625, + "grad_norm": 0.1921233981847763, + "learning_rate": 0.00019352852787446129, + "loss": 4.4375, + "step": 6385 + }, + { + "epoch": 0.6236328125, + "grad_norm": 0.16275791823863983, + "learning_rate": 0.00019346323037093793, + "loss": 4.457, + "step": 6386 + }, + { + "epoch": 0.62373046875, + "grad_norm": 0.18081603944301605, + "learning_rate": 0.00019339794077185115, + "loss": 4.4492, + "step": 6387 + }, + { + "epoch": 0.623828125, + "grad_norm": 0.17742586135864258, + "learning_rate": 0.00019333265908353, + "loss": 4.4375, + "step": 6388 + }, + { + "epoch": 0.62392578125, + "grad_norm": 0.1661609411239624, + "learning_rate": 0.0001932673853123033, + "loss": 4.4766, + "step": 6389 + }, + { + "epoch": 0.6240234375, + "grad_norm": 0.1788572072982788, + "learning_rate": 0.00019320211946449882, + "loss": 4.4492, + "step": 6390 + }, + { + "epoch": 0.62412109375, + "grad_norm": 0.1568785458803177, + "learning_rate": 0.00019313686154644378, + "loss": 4.4336, + "step": 6391 + }, + { + "epoch": 0.62421875, + "grad_norm": 0.18507027626037598, + "learning_rate": 0.00019307161156446433, + "loss": 4.4492, + "step": 6392 + }, + { + "epoch": 0.62431640625, + "grad_norm": 0.1733994036912918, + "learning_rate": 0.00019300636952488616, + "loss": 4.4609, + "step": 6393 + }, + { + "epoch": 0.6244140625, + "grad_norm": 0.176666721701622, + "learning_rate": 0.00019294113543403396, + "loss": 4.4336, + "step": 6394 + }, + { + "epoch": 0.62451171875, + "grad_norm": 0.185407817363739, + "learning_rate": 0.00019287590929823172, + "loss": 4.4922, + "step": 6395 + }, + { + "epoch": 0.624609375, + "grad_norm": 0.16937702894210815, + "learning_rate": 0.0001928106911238028, + "loss": 4.4453, + "step": 6396 + }, + { + "epoch": 0.62470703125, + "grad_norm": 0.20024588704109192, + "learning_rate": 0.00019274548091706946, + "loss": 4.4453, + "step": 6397 + }, + { + "epoch": 0.6248046875, + "grad_norm": 0.17126721143722534, + "learning_rate": 0.0001926802786843535, + "loss": 4.4688, + "step": 6398 + }, + { + "epoch": 0.62490234375, + "grad_norm": 0.18673935532569885, + "learning_rate": 0.00019261508443197577, + "loss": 4.4688, + "step": 6399 + }, + { + "epoch": 0.625, + "grad_norm": 0.17891007661819458, + "learning_rate": 0.0001925498981662565, + "loss": 4.4531, + "step": 6400 + }, + { + "epoch": 0.62509765625, + "grad_norm": 0.1557353436946869, + "learning_rate": 0.000192484719893515, + "loss": 4.4375, + "step": 6401 + }, + { + "epoch": 0.6251953125, + "grad_norm": 0.17874588072299957, + "learning_rate": 0.00019241954962006985, + "loss": 4.4375, + "step": 6402 + }, + { + "epoch": 0.62529296875, + "grad_norm": 0.15710271894931793, + "learning_rate": 0.00019235438735223893, + "loss": 4.4492, + "step": 6403 + }, + { + "epoch": 0.625390625, + "grad_norm": 0.15764179825782776, + "learning_rate": 0.00019228923309633927, + "loss": 4.4414, + "step": 6404 + }, + { + "epoch": 0.62548828125, + "grad_norm": 0.15846046805381775, + "learning_rate": 0.00019222408685868708, + "loss": 4.457, + "step": 6405 + }, + { + "epoch": 0.6255859375, + "grad_norm": 0.17478260397911072, + "learning_rate": 0.00019215894864559787, + "loss": 4.4297, + "step": 6406 + }, + { + "epoch": 0.62568359375, + "grad_norm": 0.15854157507419586, + "learning_rate": 0.00019209381846338624, + "loss": 4.4492, + "step": 6407 + }, + { + "epoch": 0.62578125, + "grad_norm": 0.16451141238212585, + "learning_rate": 0.00019202869631836634, + "loss": 4.4375, + "step": 6408 + }, + { + "epoch": 0.62587890625, + "grad_norm": 0.16961060464382172, + "learning_rate": 0.0001919635822168511, + "loss": 4.4453, + "step": 6409 + }, + { + "epoch": 0.6259765625, + "grad_norm": 0.16836921870708466, + "learning_rate": 0.0001918984761651531, + "loss": 4.4492, + "step": 6410 + }, + { + "epoch": 0.62607421875, + "grad_norm": 0.16919243335723877, + "learning_rate": 0.0001918333781695838, + "loss": 4.4297, + "step": 6411 + }, + { + "epoch": 0.626171875, + "grad_norm": 0.15828818082809448, + "learning_rate": 0.00019176828823645398, + "loss": 4.4414, + "step": 6412 + }, + { + "epoch": 0.62626953125, + "grad_norm": 0.17232654988765717, + "learning_rate": 0.00019170320637207383, + "loss": 4.4531, + "step": 6413 + }, + { + "epoch": 0.6263671875, + "grad_norm": 0.17221489548683167, + "learning_rate": 0.00019163813258275242, + "loss": 4.4648, + "step": 6414 + }, + { + "epoch": 0.62646484375, + "grad_norm": 0.16415508091449738, + "learning_rate": 0.00019157306687479826, + "loss": 4.4531, + "step": 6415 + }, + { + "epoch": 0.6265625, + "grad_norm": 0.1723625659942627, + "learning_rate": 0.00019150800925451906, + "loss": 4.4336, + "step": 6416 + }, + { + "epoch": 0.62666015625, + "grad_norm": 0.17711523175239563, + "learning_rate": 0.00019144295972822167, + "loss": 4.4531, + "step": 6417 + }, + { + "epoch": 0.6267578125, + "grad_norm": 0.17333319783210754, + "learning_rate": 0.00019137791830221225, + "loss": 4.4453, + "step": 6418 + }, + { + "epoch": 0.62685546875, + "grad_norm": 0.16572439670562744, + "learning_rate": 0.0001913128849827961, + "loss": 4.4219, + "step": 6419 + }, + { + "epoch": 0.626953125, + "grad_norm": 0.16659653186798096, + "learning_rate": 0.00019124785977627778, + "loss": 4.4531, + "step": 6420 + }, + { + "epoch": 0.62705078125, + "grad_norm": 0.1762450635433197, + "learning_rate": 0.00019118284268896115, + "loss": 4.4688, + "step": 6421 + }, + { + "epoch": 0.6271484375, + "grad_norm": 0.15105518698692322, + "learning_rate": 0.00019111783372714885, + "loss": 4.4297, + "step": 6422 + }, + { + "epoch": 0.62724609375, + "grad_norm": 0.17176447808742523, + "learning_rate": 0.00019105283289714336, + "loss": 4.5117, + "step": 6423 + }, + { + "epoch": 0.62734375, + "grad_norm": 0.16947565972805023, + "learning_rate": 0.00019098784020524595, + "loss": 4.4688, + "step": 6424 + }, + { + "epoch": 0.62744140625, + "grad_norm": 0.17340928316116333, + "learning_rate": 0.00019092285565775719, + "loss": 4.4336, + "step": 6425 + }, + { + "epoch": 0.6275390625, + "grad_norm": 0.17045269906520844, + "learning_rate": 0.00019085787926097702, + "loss": 4.4531, + "step": 6426 + }, + { + "epoch": 0.62763671875, + "grad_norm": 0.17269465327262878, + "learning_rate": 0.00019079291102120434, + "loss": 4.4648, + "step": 6427 + }, + { + "epoch": 0.627734375, + "grad_norm": 0.16609519720077515, + "learning_rate": 0.0001907279509447374, + "loss": 4.4258, + "step": 6428 + }, + { + "epoch": 0.62783203125, + "grad_norm": 0.17214594781398773, + "learning_rate": 0.00019066299903787372, + "loss": 4.4375, + "step": 6429 + }, + { + "epoch": 0.6279296875, + "grad_norm": 0.16394805908203125, + "learning_rate": 0.00019059805530690992, + "loss": 4.4023, + "step": 6430 + }, + { + "epoch": 0.62802734375, + "grad_norm": 0.1648997664451599, + "learning_rate": 0.00019053311975814174, + "loss": 4.4688, + "step": 6431 + }, + { + "epoch": 0.628125, + "grad_norm": 0.1625446230173111, + "learning_rate": 0.00019046819239786433, + "loss": 4.4531, + "step": 6432 + }, + { + "epoch": 0.62822265625, + "grad_norm": 0.1606859266757965, + "learning_rate": 0.00019040327323237194, + "loss": 4.4492, + "step": 6433 + }, + { + "epoch": 0.6283203125, + "grad_norm": 0.17204581201076508, + "learning_rate": 0.00019033836226795808, + "loss": 4.4414, + "step": 6434 + }, + { + "epoch": 0.62841796875, + "grad_norm": 0.15299896895885468, + "learning_rate": 0.00019027345951091535, + "loss": 4.4805, + "step": 6435 + }, + { + "epoch": 0.628515625, + "grad_norm": 0.16125158965587616, + "learning_rate": 0.00019020856496753576, + "loss": 4.4844, + "step": 6436 + }, + { + "epoch": 0.62861328125, + "grad_norm": 0.21978813409805298, + "learning_rate": 0.00019014367864411024, + "loss": 4.4688, + "step": 6437 + }, + { + "epoch": 0.6287109375, + "grad_norm": 0.17008396983146667, + "learning_rate": 0.00019007880054692922, + "loss": 4.4609, + "step": 6438 + }, + { + "epoch": 0.62880859375, + "grad_norm": 0.15826457738876343, + "learning_rate": 0.00019001393068228213, + "loss": 4.4805, + "step": 6439 + }, + { + "epoch": 0.62890625, + "grad_norm": 0.15785221755504608, + "learning_rate": 0.0001899490690564576, + "loss": 4.4297, + "step": 6440 + }, + { + "epoch": 0.62900390625, + "grad_norm": 0.15955887734889984, + "learning_rate": 0.0001898842156757436, + "loss": 4.4492, + "step": 6441 + }, + { + "epoch": 0.6291015625, + "grad_norm": 0.16427135467529297, + "learning_rate": 0.00018981937054642718, + "loss": 4.4375, + "step": 6442 + }, + { + "epoch": 0.62919921875, + "grad_norm": 0.1610272228717804, + "learning_rate": 0.00018975453367479472, + "loss": 4.4531, + "step": 6443 + }, + { + "epoch": 0.629296875, + "grad_norm": 0.16381457448005676, + "learning_rate": 0.00018968970506713162, + "loss": 4.4883, + "step": 6444 + }, + { + "epoch": 0.62939453125, + "grad_norm": 0.17208035290241241, + "learning_rate": 0.00018962488472972256, + "loss": 4.4609, + "step": 6445 + }, + { + "epoch": 0.6294921875, + "grad_norm": 0.15719209611415863, + "learning_rate": 0.00018956007266885162, + "loss": 4.4141, + "step": 6446 + }, + { + "epoch": 0.62958984375, + "grad_norm": 0.1703023910522461, + "learning_rate": 0.00018949526889080166, + "loss": 4.4102, + "step": 6447 + }, + { + "epoch": 0.6296875, + "grad_norm": 0.16275115311145782, + "learning_rate": 0.00018943047340185505, + "loss": 4.4414, + "step": 6448 + }, + { + "epoch": 0.62978515625, + "grad_norm": 0.16429588198661804, + "learning_rate": 0.00018936568620829335, + "loss": 4.4336, + "step": 6449 + }, + { + "epoch": 0.6298828125, + "grad_norm": 0.16511715948581696, + "learning_rate": 0.00018930090731639716, + "loss": 4.4766, + "step": 6450 + }, + { + "epoch": 0.62998046875, + "grad_norm": 0.15884996950626373, + "learning_rate": 0.00018923613673244626, + "loss": 4.4414, + "step": 6451 + }, + { + "epoch": 0.630078125, + "grad_norm": 0.15610261261463165, + "learning_rate": 0.00018917137446271991, + "loss": 4.4258, + "step": 6452 + }, + { + "epoch": 0.63017578125, + "grad_norm": 0.16798223555088043, + "learning_rate": 0.00018910662051349625, + "loss": 4.4258, + "step": 6453 + }, + { + "epoch": 0.6302734375, + "grad_norm": 0.1593465358018875, + "learning_rate": 0.0001890418748910528, + "loss": 4.4648, + "step": 6454 + }, + { + "epoch": 0.63037109375, + "grad_norm": 0.1708420366048813, + "learning_rate": 0.00018897713760166618, + "loss": 4.4648, + "step": 6455 + }, + { + "epoch": 0.63046875, + "grad_norm": 0.1636761873960495, + "learning_rate": 0.00018891240865161223, + "loss": 4.4688, + "step": 6456 + }, + { + "epoch": 0.63056640625, + "grad_norm": 0.16302184760570526, + "learning_rate": 0.00018884768804716594, + "loss": 4.4531, + "step": 6457 + }, + { + "epoch": 0.6306640625, + "grad_norm": 0.16798050701618195, + "learning_rate": 0.00018878297579460158, + "loss": 4.4648, + "step": 6458 + }, + { + "epoch": 0.63076171875, + "grad_norm": 0.1651872992515564, + "learning_rate": 0.0001887182719001927, + "loss": 4.4805, + "step": 6459 + }, + { + "epoch": 0.630859375, + "grad_norm": 0.1571522057056427, + "learning_rate": 0.00018865357637021157, + "loss": 4.457, + "step": 6460 + }, + { + "epoch": 0.63095703125, + "grad_norm": 0.1687481552362442, + "learning_rate": 0.00018858888921093032, + "loss": 4.4727, + "step": 6461 + }, + { + "epoch": 0.6310546875, + "grad_norm": 0.16056686639785767, + "learning_rate": 0.00018852421042861968, + "loss": 4.4844, + "step": 6462 + }, + { + "epoch": 0.63115234375, + "grad_norm": 0.1654931753873825, + "learning_rate": 0.00018845954002955, + "loss": 4.4531, + "step": 6463 + }, + { + "epoch": 0.63125, + "grad_norm": 0.17789769172668457, + "learning_rate": 0.0001883948780199905, + "loss": 4.4531, + "step": 6464 + }, + { + "epoch": 0.63134765625, + "grad_norm": 0.16416265070438385, + "learning_rate": 0.00018833022440620985, + "loss": 4.418, + "step": 6465 + }, + { + "epoch": 0.6314453125, + "grad_norm": 0.16038498282432556, + "learning_rate": 0.00018826557919447568, + "loss": 4.4531, + "step": 6466 + }, + { + "epoch": 0.63154296875, + "grad_norm": 0.16586191952228546, + "learning_rate": 0.0001882009423910549, + "loss": 4.4102, + "step": 6467 + }, + { + "epoch": 0.631640625, + "grad_norm": 0.16170482337474823, + "learning_rate": 0.0001881363140022137, + "loss": 4.4609, + "step": 6468 + }, + { + "epoch": 0.63173828125, + "grad_norm": 0.16126009821891785, + "learning_rate": 0.0001880716940342173, + "loss": 4.4688, + "step": 6469 + }, + { + "epoch": 0.6318359375, + "grad_norm": 0.15617012977600098, + "learning_rate": 0.00018800708249333022, + "loss": 4.457, + "step": 6470 + }, + { + "epoch": 0.63193359375, + "grad_norm": 0.16785036027431488, + "learning_rate": 0.00018794247938581603, + "loss": 4.4727, + "step": 6471 + }, + { + "epoch": 0.63203125, + "grad_norm": 0.15244078636169434, + "learning_rate": 0.00018787788471793759, + "loss": 4.4805, + "step": 6472 + }, + { + "epoch": 0.63212890625, + "grad_norm": 0.1675260365009308, + "learning_rate": 0.00018781329849595695, + "loss": 4.4688, + "step": 6473 + }, + { + "epoch": 0.6322265625, + "grad_norm": 0.16929610073566437, + "learning_rate": 0.00018774872072613525, + "loss": 4.4609, + "step": 6474 + }, + { + "epoch": 0.63232421875, + "grad_norm": 0.1670127958059311, + "learning_rate": 0.00018768415141473294, + "loss": 4.4727, + "step": 6475 + }, + { + "epoch": 0.632421875, + "grad_norm": 0.16060471534729004, + "learning_rate": 0.0001876195905680095, + "loss": 4.457, + "step": 6476 + }, + { + "epoch": 0.63251953125, + "grad_norm": 0.16893787682056427, + "learning_rate": 0.0001875550381922237, + "loss": 4.4609, + "step": 6477 + }, + { + "epoch": 0.6326171875, + "grad_norm": 0.15559615194797516, + "learning_rate": 0.00018749049429363346, + "loss": 4.4492, + "step": 6478 + }, + { + "epoch": 0.63271484375, + "grad_norm": 0.1669638752937317, + "learning_rate": 0.00018742595887849596, + "loss": 4.4492, + "step": 6479 + }, + { + "epoch": 0.6328125, + "grad_norm": 0.17469759285449982, + "learning_rate": 0.00018736143195306732, + "loss": 4.4141, + "step": 6480 + }, + { + "epoch": 0.63291015625, + "grad_norm": 0.16157011687755585, + "learning_rate": 0.00018729691352360305, + "loss": 4.4688, + "step": 6481 + }, + { + "epoch": 0.6330078125, + "grad_norm": 0.16092905402183533, + "learning_rate": 0.00018723240359635774, + "loss": 4.4336, + "step": 6482 + }, + { + "epoch": 0.63310546875, + "grad_norm": 0.16795872151851654, + "learning_rate": 0.00018716790217758523, + "loss": 4.4727, + "step": 6483 + }, + { + "epoch": 0.633203125, + "grad_norm": 0.15562553703784943, + "learning_rate": 0.00018710340927353847, + "loss": 4.4648, + "step": 6484 + }, + { + "epoch": 0.63330078125, + "grad_norm": 0.17018313705921173, + "learning_rate": 0.00018703892489046965, + "loss": 4.4258, + "step": 6485 + }, + { + "epoch": 0.6333984375, + "grad_norm": 0.16108331084251404, + "learning_rate": 0.00018697444903463008, + "loss": 4.4375, + "step": 6486 + }, + { + "epoch": 0.63349609375, + "grad_norm": 0.17012012004852295, + "learning_rate": 0.0001869099817122702, + "loss": 4.457, + "step": 6487 + }, + { + "epoch": 0.63359375, + "grad_norm": 0.15820275247097015, + "learning_rate": 0.0001868455229296398, + "loss": 4.4531, + "step": 6488 + }, + { + "epoch": 0.63369140625, + "grad_norm": 0.1695975661277771, + "learning_rate": 0.0001867810726929876, + "loss": 4.4805, + "step": 6489 + }, + { + "epoch": 0.6337890625, + "grad_norm": 0.15659813582897186, + "learning_rate": 0.0001867166310085616, + "loss": 4.4375, + "step": 6490 + }, + { + "epoch": 0.63388671875, + "grad_norm": 0.15882310271263123, + "learning_rate": 0.00018665219788260913, + "loss": 4.4375, + "step": 6491 + }, + { + "epoch": 0.633984375, + "grad_norm": 0.16618555784225464, + "learning_rate": 0.00018658777332137644, + "loss": 4.4453, + "step": 6492 + }, + { + "epoch": 0.63408203125, + "grad_norm": 0.1633576899766922, + "learning_rate": 0.00018652335733110903, + "loss": 4.4492, + "step": 6493 + }, + { + "epoch": 0.6341796875, + "grad_norm": 0.1720981001853943, + "learning_rate": 0.00018645894991805168, + "loss": 4.4375, + "step": 6494 + }, + { + "epoch": 0.63427734375, + "grad_norm": 0.16622857749462128, + "learning_rate": 0.00018639455108844817, + "loss": 4.4336, + "step": 6495 + }, + { + "epoch": 0.634375, + "grad_norm": 0.16473932564258575, + "learning_rate": 0.00018633016084854153, + "loss": 4.4609, + "step": 6496 + }, + { + "epoch": 0.63447265625, + "grad_norm": 0.17152060568332672, + "learning_rate": 0.00018626577920457415, + "loss": 4.4648, + "step": 6497 + }, + { + "epoch": 0.6345703125, + "grad_norm": 0.1720244437456131, + "learning_rate": 0.00018620140616278715, + "loss": 4.4883, + "step": 6498 + }, + { + "epoch": 0.63466796875, + "grad_norm": 0.1613079458475113, + "learning_rate": 0.0001861370417294211, + "loss": 4.457, + "step": 6499 + }, + { + "epoch": 0.634765625, + "grad_norm": 0.17076289653778076, + "learning_rate": 0.00018607268591071575, + "loss": 4.4258, + "step": 6500 + }, + { + "epoch": 0.63486328125, + "grad_norm": 0.16988541185855865, + "learning_rate": 0.00018600833871290995, + "loss": 4.4375, + "step": 6501 + }, + { + "epoch": 0.6349609375, + "grad_norm": 0.16242623329162598, + "learning_rate": 0.00018594400014224172, + "loss": 4.4375, + "step": 6502 + }, + { + "epoch": 0.63505859375, + "grad_norm": 0.1617782860994339, + "learning_rate": 0.00018587967020494827, + "loss": 4.4375, + "step": 6503 + }, + { + "epoch": 0.63515625, + "grad_norm": 0.16058026254177094, + "learning_rate": 0.00018581534890726594, + "loss": 4.4375, + "step": 6504 + }, + { + "epoch": 0.63525390625, + "grad_norm": 0.15801385045051575, + "learning_rate": 0.00018575103625543033, + "loss": 4.4531, + "step": 6505 + }, + { + "epoch": 0.6353515625, + "grad_norm": 0.16664476692676544, + "learning_rate": 0.0001856867322556759, + "loss": 4.4297, + "step": 6506 + }, + { + "epoch": 0.63544921875, + "grad_norm": 0.17576661705970764, + "learning_rate": 0.0001856224369142366, + "loss": 4.4492, + "step": 6507 + }, + { + "epoch": 0.635546875, + "grad_norm": 0.1834227442741394, + "learning_rate": 0.00018555815023734546, + "loss": 4.4453, + "step": 6508 + }, + { + "epoch": 0.63564453125, + "grad_norm": 0.17555052042007446, + "learning_rate": 0.0001854938722312346, + "loss": 4.4531, + "step": 6509 + }, + { + "epoch": 0.6357421875, + "grad_norm": 0.17056727409362793, + "learning_rate": 0.00018542960290213528, + "loss": 4.4375, + "step": 6510 + }, + { + "epoch": 0.63583984375, + "grad_norm": 0.17461469769477844, + "learning_rate": 0.00018536534225627817, + "loss": 4.4922, + "step": 6511 + }, + { + "epoch": 0.6359375, + "grad_norm": 0.1747380644083023, + "learning_rate": 0.00018530109029989267, + "loss": 4.4727, + "step": 6512 + }, + { + "epoch": 0.63603515625, + "grad_norm": 0.1791106015443802, + "learning_rate": 0.00018523684703920774, + "loss": 4.4688, + "step": 6513 + }, + { + "epoch": 0.6361328125, + "grad_norm": 0.1648419052362442, + "learning_rate": 0.00018517261248045136, + "loss": 4.4609, + "step": 6514 + }, + { + "epoch": 0.63623046875, + "grad_norm": 0.1776411384344101, + "learning_rate": 0.0001851083866298504, + "loss": 4.4219, + "step": 6515 + }, + { + "epoch": 0.636328125, + "grad_norm": 0.17203916609287262, + "learning_rate": 0.00018504416949363127, + "loss": 4.457, + "step": 6516 + }, + { + "epoch": 0.63642578125, + "grad_norm": 0.15331022441387177, + "learning_rate": 0.0001849799610780194, + "loss": 4.4766, + "step": 6517 + }, + { + "epoch": 0.6365234375, + "grad_norm": 0.172994926571846, + "learning_rate": 0.00018491576138923938, + "loss": 4.4531, + "step": 6518 + }, + { + "epoch": 0.63662109375, + "grad_norm": 0.17790454626083374, + "learning_rate": 0.0001848515704335148, + "loss": 4.4609, + "step": 6519 + }, + { + "epoch": 0.63671875, + "grad_norm": 0.15660132467746735, + "learning_rate": 0.00018478738821706869, + "loss": 4.4336, + "step": 6520 + }, + { + "epoch": 0.63681640625, + "grad_norm": 0.1735367625951767, + "learning_rate": 0.00018472321474612296, + "loss": 4.4258, + "step": 6521 + }, + { + "epoch": 0.6369140625, + "grad_norm": 0.17013752460479736, + "learning_rate": 0.000184659050026899, + "loss": 4.4766, + "step": 6522 + }, + { + "epoch": 0.63701171875, + "grad_norm": 0.17001599073410034, + "learning_rate": 0.00018459489406561687, + "loss": 4.4414, + "step": 6523 + }, + { + "epoch": 0.637109375, + "grad_norm": 0.1695450395345688, + "learning_rate": 0.0001845307468684962, + "loss": 4.4258, + "step": 6524 + }, + { + "epoch": 0.63720703125, + "grad_norm": 0.16070932149887085, + "learning_rate": 0.00018446660844175555, + "loss": 4.4336, + "step": 6525 + }, + { + "epoch": 0.6373046875, + "grad_norm": 0.17669641971588135, + "learning_rate": 0.00018440247879161282, + "loss": 4.4531, + "step": 6526 + }, + { + "epoch": 0.63740234375, + "grad_norm": 0.18499018251895905, + "learning_rate": 0.00018433835792428484, + "loss": 4.4961, + "step": 6527 + }, + { + "epoch": 0.6375, + "grad_norm": 0.157243549823761, + "learning_rate": 0.00018427424584598778, + "loss": 4.4531, + "step": 6528 + }, + { + "epoch": 0.63759765625, + "grad_norm": 0.16400781273841858, + "learning_rate": 0.00018421014256293682, + "loss": 4.4492, + "step": 6529 + }, + { + "epoch": 0.6376953125, + "grad_norm": 0.17751149833202362, + "learning_rate": 0.00018414604808134644, + "loss": 4.4336, + "step": 6530 + }, + { + "epoch": 0.63779296875, + "grad_norm": 0.168592169880867, + "learning_rate": 0.00018408196240742998, + "loss": 4.4258, + "step": 6531 + }, + { + "epoch": 0.637890625, + "grad_norm": 0.16882309317588806, + "learning_rate": 0.0001840178855474001, + "loss": 4.4414, + "step": 6532 + }, + { + "epoch": 0.63798828125, + "grad_norm": 0.16622164845466614, + "learning_rate": 0.00018395381750746886, + "loss": 4.4531, + "step": 6533 + }, + { + "epoch": 0.6380859375, + "grad_norm": 0.15582023561000824, + "learning_rate": 0.00018388975829384703, + "loss": 4.4531, + "step": 6534 + }, + { + "epoch": 0.63818359375, + "grad_norm": 0.16839727759361267, + "learning_rate": 0.0001838257079127448, + "loss": 4.4609, + "step": 6535 + }, + { + "epoch": 0.63828125, + "grad_norm": 0.17008529603481293, + "learning_rate": 0.00018376166637037135, + "loss": 4.4375, + "step": 6536 + }, + { + "epoch": 0.63837890625, + "grad_norm": 0.16317474842071533, + "learning_rate": 0.00018369763367293506, + "loss": 4.4805, + "step": 6537 + }, + { + "epoch": 0.6384765625, + "grad_norm": 0.1704435497522354, + "learning_rate": 0.00018363360982664358, + "loss": 4.4453, + "step": 6538 + }, + { + "epoch": 0.63857421875, + "grad_norm": 0.17157818377017975, + "learning_rate": 0.00018356959483770358, + "loss": 4.3984, + "step": 6539 + }, + { + "epoch": 0.638671875, + "grad_norm": 0.16271375119686127, + "learning_rate": 0.00018350558871232077, + "loss": 4.4219, + "step": 6540 + }, + { + "epoch": 0.63876953125, + "grad_norm": 0.19104553759098053, + "learning_rate": 0.00018344159145670014, + "loss": 4.418, + "step": 6541 + }, + { + "epoch": 0.6388671875, + "grad_norm": 0.17354100942611694, + "learning_rate": 0.00018337760307704582, + "loss": 4.457, + "step": 6542 + }, + { + "epoch": 0.63896484375, + "grad_norm": 0.16875891387462616, + "learning_rate": 0.000183313623579561, + "loss": 4.4805, + "step": 6543 + }, + { + "epoch": 0.6390625, + "grad_norm": 0.18595393002033234, + "learning_rate": 0.0001832496529704481, + "loss": 4.4336, + "step": 6544 + }, + { + "epoch": 0.63916015625, + "grad_norm": 0.16237540543079376, + "learning_rate": 0.00018318569125590868, + "loss": 4.4727, + "step": 6545 + }, + { + "epoch": 0.6392578125, + "grad_norm": 0.17790831625461578, + "learning_rate": 0.0001831217384421433, + "loss": 4.4648, + "step": 6546 + }, + { + "epoch": 0.63935546875, + "grad_norm": 0.1662386804819107, + "learning_rate": 0.00018305779453535193, + "loss": 4.4141, + "step": 6547 + }, + { + "epoch": 0.639453125, + "grad_norm": 0.1707170158624649, + "learning_rate": 0.0001829938595417332, + "loss": 4.4375, + "step": 6548 + }, + { + "epoch": 0.63955078125, + "grad_norm": 0.17450222373008728, + "learning_rate": 0.00018292993346748543, + "loss": 4.4609, + "step": 6549 + }, + { + "epoch": 0.6396484375, + "grad_norm": 0.16170261800289154, + "learning_rate": 0.00018286601631880566, + "loss": 4.4297, + "step": 6550 + }, + { + "epoch": 0.63974609375, + "grad_norm": 0.1706267148256302, + "learning_rate": 0.00018280210810189034, + "loss": 4.4609, + "step": 6551 + }, + { + "epoch": 0.63984375, + "grad_norm": 0.16388553380966187, + "learning_rate": 0.0001827382088229349, + "loss": 4.4492, + "step": 6552 + }, + { + "epoch": 0.63994140625, + "grad_norm": 0.16952264308929443, + "learning_rate": 0.00018267431848813386, + "loss": 4.4805, + "step": 6553 + }, + { + "epoch": 0.6400390625, + "grad_norm": 0.15934255719184875, + "learning_rate": 0.00018261043710368115, + "loss": 4.4375, + "step": 6554 + }, + { + "epoch": 0.64013671875, + "grad_norm": 0.16067388653755188, + "learning_rate": 0.00018254656467576942, + "loss": 4.4609, + "step": 6555 + }, + { + "epoch": 0.640234375, + "grad_norm": 0.16260722279548645, + "learning_rate": 0.00018248270121059085, + "loss": 4.4492, + "step": 6556 + }, + { + "epoch": 0.64033203125, + "grad_norm": 0.17711585760116577, + "learning_rate": 0.0001824188467143364, + "loss": 4.4453, + "step": 6557 + }, + { + "epoch": 0.6404296875, + "grad_norm": 0.16524793207645416, + "learning_rate": 0.00018235500119319643, + "loss": 4.4492, + "step": 6558 + }, + { + "epoch": 0.64052734375, + "grad_norm": 0.16832879185676575, + "learning_rate": 0.00018229116465336026, + "loss": 4.4414, + "step": 6559 + }, + { + "epoch": 0.640625, + "grad_norm": 0.16603146493434906, + "learning_rate": 0.0001822273371010165, + "loss": 4.4219, + "step": 6560 + }, + { + "epoch": 0.64072265625, + "grad_norm": 0.15960907936096191, + "learning_rate": 0.0001821635185423528, + "loss": 4.4414, + "step": 6561 + }, + { + "epoch": 0.6408203125, + "grad_norm": 0.17235903441905975, + "learning_rate": 0.00018209970898355583, + "loss": 4.4336, + "step": 6562 + }, + { + "epoch": 0.64091796875, + "grad_norm": 0.16817769408226013, + "learning_rate": 0.00018203590843081157, + "loss": 4.4648, + "step": 6563 + }, + { + "epoch": 0.641015625, + "grad_norm": 0.16076479852199554, + "learning_rate": 0.00018197211689030513, + "loss": 4.4336, + "step": 6564 + }, + { + "epoch": 0.64111328125, + "grad_norm": 0.16619136929512024, + "learning_rate": 0.00018190833436822052, + "loss": 4.4609, + "step": 6565 + }, + { + "epoch": 0.6412109375, + "grad_norm": 0.1595350056886673, + "learning_rate": 0.0001818445608707411, + "loss": 4.457, + "step": 6566 + }, + { + "epoch": 0.64130859375, + "grad_norm": 0.17311188578605652, + "learning_rate": 0.00018178079640404916, + "loss": 4.4531, + "step": 6567 + }, + { + "epoch": 0.64140625, + "grad_norm": 0.15876077115535736, + "learning_rate": 0.00018171704097432638, + "loss": 4.4492, + "step": 6568 + }, + { + "epoch": 0.64150390625, + "grad_norm": 0.16911451518535614, + "learning_rate": 0.00018165329458775338, + "loss": 4.4414, + "step": 6569 + }, + { + "epoch": 0.6416015625, + "grad_norm": 0.1607685089111328, + "learning_rate": 0.0001815895572505099, + "loss": 4.4492, + "step": 6570 + }, + { + "epoch": 0.64169921875, + "grad_norm": 0.15751799941062927, + "learning_rate": 0.0001815258289687749, + "loss": 4.4648, + "step": 6571 + }, + { + "epoch": 0.641796875, + "grad_norm": 0.15604527294635773, + "learning_rate": 0.0001814621097487263, + "loss": 4.4609, + "step": 6572 + }, + { + "epoch": 0.64189453125, + "grad_norm": 0.16265171766281128, + "learning_rate": 0.00018139839959654142, + "loss": 4.457, + "step": 6573 + }, + { + "epoch": 0.6419921875, + "grad_norm": 0.16178679466247559, + "learning_rate": 0.0001813346985183964, + "loss": 4.4453, + "step": 6574 + }, + { + "epoch": 0.64208984375, + "grad_norm": 0.1565990000963211, + "learning_rate": 0.0001812710065204666, + "loss": 4.4219, + "step": 6575 + }, + { + "epoch": 0.6421875, + "grad_norm": 0.16272732615470886, + "learning_rate": 0.00018120732360892662, + "loss": 4.4336, + "step": 6576 + }, + { + "epoch": 0.64228515625, + "grad_norm": 0.1571868360042572, + "learning_rate": 0.00018114364978995002, + "loss": 4.4453, + "step": 6577 + }, + { + "epoch": 0.6423828125, + "grad_norm": 0.16201165318489075, + "learning_rate": 0.0001810799850697095, + "loss": 4.4336, + "step": 6578 + }, + { + "epoch": 0.64248046875, + "grad_norm": 0.16662152111530304, + "learning_rate": 0.00018101632945437707, + "loss": 4.4492, + "step": 6579 + }, + { + "epoch": 0.642578125, + "grad_norm": 0.16453780233860016, + "learning_rate": 0.00018095268295012358, + "loss": 4.3984, + "step": 6580 + }, + { + "epoch": 0.64267578125, + "grad_norm": 0.16192692518234253, + "learning_rate": 0.0001808890455631193, + "loss": 4.4805, + "step": 6581 + }, + { + "epoch": 0.6427734375, + "grad_norm": 0.15904860198497772, + "learning_rate": 0.00018082541729953327, + "loss": 4.4727, + "step": 6582 + }, + { + "epoch": 0.64287109375, + "grad_norm": 0.1652742177248001, + "learning_rate": 0.0001807617981655338, + "loss": 4.4336, + "step": 6583 + }, + { + "epoch": 0.64296875, + "grad_norm": 0.166152223944664, + "learning_rate": 0.00018069818816728844, + "loss": 4.4297, + "step": 6584 + }, + { + "epoch": 0.64306640625, + "grad_norm": 0.16925160586833954, + "learning_rate": 0.00018063458731096372, + "loss": 4.4492, + "step": 6585 + }, + { + "epoch": 0.6431640625, + "grad_norm": 0.1634615659713745, + "learning_rate": 0.00018057099560272528, + "loss": 4.4492, + "step": 6586 + }, + { + "epoch": 0.64326171875, + "grad_norm": 0.1739477515220642, + "learning_rate": 0.00018050741304873792, + "loss": 4.4297, + "step": 6587 + }, + { + "epoch": 0.643359375, + "grad_norm": 0.15738916397094727, + "learning_rate": 0.00018044383965516549, + "loss": 4.4414, + "step": 6588 + }, + { + "epoch": 0.64345703125, + "grad_norm": 0.1686093807220459, + "learning_rate": 0.00018038027542817121, + "loss": 4.4219, + "step": 6589 + }, + { + "epoch": 0.6435546875, + "grad_norm": 0.16329875588417053, + "learning_rate": 0.00018031672037391693, + "loss": 4.4297, + "step": 6590 + }, + { + "epoch": 0.64365234375, + "grad_norm": 0.16856150329113007, + "learning_rate": 0.000180253174498564, + "loss": 4.4336, + "step": 6591 + }, + { + "epoch": 0.64375, + "grad_norm": 0.17202115058898926, + "learning_rate": 0.00018018963780827275, + "loss": 4.4883, + "step": 6592 + }, + { + "epoch": 0.64384765625, + "grad_norm": 0.16755880415439606, + "learning_rate": 0.00018012611030920268, + "loss": 4.4922, + "step": 6593 + }, + { + "epoch": 0.6439453125, + "grad_norm": 0.16761143505573273, + "learning_rate": 0.00018006259200751225, + "loss": 4.4727, + "step": 6594 + }, + { + "epoch": 0.64404296875, + "grad_norm": 0.1726960688829422, + "learning_rate": 0.00017999908290935917, + "loss": 4.4453, + "step": 6595 + }, + { + "epoch": 0.644140625, + "grad_norm": 0.18153513967990875, + "learning_rate": 0.00017993558302090024, + "loss": 4.4766, + "step": 6596 + }, + { + "epoch": 0.64423828125, + "grad_norm": 0.15652601420879364, + "learning_rate": 0.00017987209234829131, + "loss": 4.4727, + "step": 6597 + }, + { + "epoch": 0.6443359375, + "grad_norm": 0.176737979054451, + "learning_rate": 0.00017980861089768752, + "loss": 4.4531, + "step": 6598 + }, + { + "epoch": 0.64443359375, + "grad_norm": 0.16430142521858215, + "learning_rate": 0.0001797451386752428, + "loss": 4.4336, + "step": 6599 + }, + { + "epoch": 0.64453125, + "grad_norm": 0.17142462730407715, + "learning_rate": 0.0001796816756871103, + "loss": 4.4688, + "step": 6600 + }, + { + "epoch": 0.64462890625, + "grad_norm": 0.15906395018100739, + "learning_rate": 0.00017961822193944245, + "loss": 4.457, + "step": 6601 + }, + { + "epoch": 0.6447265625, + "grad_norm": 0.16055071353912354, + "learning_rate": 0.0001795547774383906, + "loss": 4.4531, + "step": 6602 + }, + { + "epoch": 0.64482421875, + "grad_norm": 0.168269082903862, + "learning_rate": 0.00017949134219010532, + "loss": 4.4336, + "step": 6603 + }, + { + "epoch": 0.644921875, + "grad_norm": 0.16909301280975342, + "learning_rate": 0.00017942791620073617, + "loss": 4.4531, + "step": 6604 + }, + { + "epoch": 0.64501953125, + "grad_norm": 0.15148700773715973, + "learning_rate": 0.00017936449947643197, + "loss": 4.4492, + "step": 6605 + }, + { + "epoch": 0.6451171875, + "grad_norm": 0.1678888350725174, + "learning_rate": 0.00017930109202334043, + "loss": 4.4297, + "step": 6606 + }, + { + "epoch": 0.64521484375, + "grad_norm": 0.21180656552314758, + "learning_rate": 0.00017923769384760852, + "loss": 4.4219, + "step": 6607 + }, + { + "epoch": 0.6453125, + "grad_norm": 0.16709917783737183, + "learning_rate": 0.00017917430495538217, + "loss": 4.4375, + "step": 6608 + }, + { + "epoch": 0.64541015625, + "grad_norm": 0.1580944061279297, + "learning_rate": 0.00017911092535280665, + "loss": 4.4336, + "step": 6609 + }, + { + "epoch": 0.6455078125, + "grad_norm": 0.1674104928970337, + "learning_rate": 0.0001790475550460261, + "loss": 4.4336, + "step": 6610 + }, + { + "epoch": 0.64560546875, + "grad_norm": 0.15678825974464417, + "learning_rate": 0.00017898419404118387, + "loss": 4.4336, + "step": 6611 + }, + { + "epoch": 0.645703125, + "grad_norm": 0.15718041360378265, + "learning_rate": 0.00017892084234442235, + "loss": 4.4531, + "step": 6612 + }, + { + "epoch": 0.64580078125, + "grad_norm": 0.16736149787902832, + "learning_rate": 0.00017885749996188305, + "loss": 4.4414, + "step": 6613 + }, + { + "epoch": 0.6458984375, + "grad_norm": 0.1605442613363266, + "learning_rate": 0.00017879416689970662, + "loss": 4.4258, + "step": 6614 + }, + { + "epoch": 0.64599609375, + "grad_norm": 0.1538110077381134, + "learning_rate": 0.0001787308431640329, + "loss": 4.4062, + "step": 6615 + }, + { + "epoch": 0.64609375, + "grad_norm": 0.17174288630485535, + "learning_rate": 0.0001786675287610004, + "loss": 4.4531, + "step": 6616 + }, + { + "epoch": 0.64619140625, + "grad_norm": 0.1586245745420456, + "learning_rate": 0.00017860422369674717, + "loss": 4.4492, + "step": 6617 + }, + { + "epoch": 0.6462890625, + "grad_norm": 0.1667192131280899, + "learning_rate": 0.00017854092797741023, + "loss": 4.4688, + "step": 6618 + }, + { + "epoch": 0.64638671875, + "grad_norm": 0.17362408339977264, + "learning_rate": 0.0001784776416091257, + "loss": 4.4062, + "step": 6619 + }, + { + "epoch": 0.646484375, + "grad_norm": 0.17451386153697968, + "learning_rate": 0.00017841436459802866, + "loss": 4.4727, + "step": 6620 + }, + { + "epoch": 0.64658203125, + "grad_norm": 0.16502250730991364, + "learning_rate": 0.00017835109695025348, + "loss": 4.4297, + "step": 6621 + }, + { + "epoch": 0.6466796875, + "grad_norm": 0.15981489419937134, + "learning_rate": 0.00017828783867193348, + "loss": 4.4727, + "step": 6622 + }, + { + "epoch": 0.64677734375, + "grad_norm": 0.16341428458690643, + "learning_rate": 0.0001782245897692012, + "loss": 4.4609, + "step": 6623 + }, + { + "epoch": 0.646875, + "grad_norm": 0.15949034690856934, + "learning_rate": 0.00017816135024818801, + "loss": 4.4609, + "step": 6624 + }, + { + "epoch": 0.64697265625, + "grad_norm": 0.16353049874305725, + "learning_rate": 0.00017809812011502474, + "loss": 4.4492, + "step": 6625 + }, + { + "epoch": 0.6470703125, + "grad_norm": 0.1520174890756607, + "learning_rate": 0.000178034899375841, + "loss": 4.457, + "step": 6626 + }, + { + "epoch": 0.64716796875, + "grad_norm": 0.16313369572162628, + "learning_rate": 0.0001779716880367657, + "loss": 4.4453, + "step": 6627 + }, + { + "epoch": 0.647265625, + "grad_norm": 0.17339791357517242, + "learning_rate": 0.00017790848610392667, + "loss": 4.4609, + "step": 6628 + }, + { + "epoch": 0.64736328125, + "grad_norm": 0.16614224016666412, + "learning_rate": 0.00017784529358345102, + "loss": 4.4688, + "step": 6629 + }, + { + "epoch": 0.6474609375, + "grad_norm": 0.17432484030723572, + "learning_rate": 0.0001777821104814647, + "loss": 4.4492, + "step": 6630 + }, + { + "epoch": 0.64755859375, + "grad_norm": 0.19280411303043365, + "learning_rate": 0.000177718936804093, + "loss": 4.4375, + "step": 6631 + }, + { + "epoch": 0.64765625, + "grad_norm": 0.15832000970840454, + "learning_rate": 0.00017765577255746018, + "loss": 4.4453, + "step": 6632 + }, + { + "epoch": 0.64775390625, + "grad_norm": 0.1799992322921753, + "learning_rate": 0.00017759261774768942, + "loss": 4.4609, + "step": 6633 + }, + { + "epoch": 0.6478515625, + "grad_norm": 0.16497880220413208, + "learning_rate": 0.00017752947238090333, + "loss": 4.4375, + "step": 6634 + }, + { + "epoch": 0.64794921875, + "grad_norm": 0.15873882174491882, + "learning_rate": 0.0001774663364632233, + "loss": 4.4453, + "step": 6635 + }, + { + "epoch": 0.648046875, + "grad_norm": 0.1678294837474823, + "learning_rate": 0.00017740321000076997, + "loss": 4.4883, + "step": 6636 + }, + { + "epoch": 0.64814453125, + "grad_norm": 0.15754452347755432, + "learning_rate": 0.00017734009299966313, + "loss": 4.4648, + "step": 6637 + }, + { + "epoch": 0.6482421875, + "grad_norm": 0.16525287926197052, + "learning_rate": 0.0001772769854660214, + "loss": 4.4258, + "step": 6638 + }, + { + "epoch": 0.64833984375, + "grad_norm": 0.15811565518379211, + "learning_rate": 0.00017721388740596266, + "loss": 4.457, + "step": 6639 + }, + { + "epoch": 0.6484375, + "grad_norm": 0.16690553724765778, + "learning_rate": 0.00017715079882560391, + "loss": 4.4375, + "step": 6640 + }, + { + "epoch": 0.64853515625, + "grad_norm": 0.16214993596076965, + "learning_rate": 0.00017708771973106108, + "loss": 4.4531, + "step": 6641 + }, + { + "epoch": 0.6486328125, + "grad_norm": 0.16396909952163696, + "learning_rate": 0.00017702465012844938, + "loss": 4.4375, + "step": 6642 + }, + { + "epoch": 0.64873046875, + "grad_norm": 0.15671557188034058, + "learning_rate": 0.00017696159002388278, + "loss": 4.4727, + "step": 6643 + }, + { + "epoch": 0.648828125, + "grad_norm": 0.1711798906326294, + "learning_rate": 0.00017689853942347468, + "loss": 4.4258, + "step": 6644 + }, + { + "epoch": 0.64892578125, + "grad_norm": 0.16250351071357727, + "learning_rate": 0.00017683549833333735, + "loss": 4.4531, + "step": 6645 + }, + { + "epoch": 0.6490234375, + "grad_norm": 0.1607770323753357, + "learning_rate": 0.0001767724667595822, + "loss": 4.4609, + "step": 6646 + }, + { + "epoch": 0.64912109375, + "grad_norm": 0.1582522690296173, + "learning_rate": 0.00017670944470831972, + "loss": 4.4609, + "step": 6647 + }, + { + "epoch": 0.64921875, + "grad_norm": 0.16942626237869263, + "learning_rate": 0.00017664643218565952, + "loss": 4.418, + "step": 6648 + }, + { + "epoch": 0.64931640625, + "grad_norm": 0.16460303962230682, + "learning_rate": 0.00017658342919771015, + "loss": 4.4492, + "step": 6649 + }, + { + "epoch": 0.6494140625, + "grad_norm": 0.15273018181324005, + "learning_rate": 0.00017652043575057936, + "loss": 4.457, + "step": 6650 + }, + { + "epoch": 0.64951171875, + "grad_norm": 0.1686968356370926, + "learning_rate": 0.000176457451850374, + "loss": 4.4414, + "step": 6651 + }, + { + "epoch": 0.649609375, + "grad_norm": 0.1569916158914566, + "learning_rate": 0.00017639447750319987, + "loss": 4.4414, + "step": 6652 + }, + { + "epoch": 0.64970703125, + "grad_norm": 0.162063330411911, + "learning_rate": 0.00017633151271516183, + "loss": 4.4648, + "step": 6653 + }, + { + "epoch": 0.6498046875, + "grad_norm": 0.15597319602966309, + "learning_rate": 0.00017626855749236403, + "loss": 4.4922, + "step": 6654 + }, + { + "epoch": 0.64990234375, + "grad_norm": 0.16321271657943726, + "learning_rate": 0.0001762056118409095, + "loss": 4.4336, + "step": 6655 + }, + { + "epoch": 0.65, + "grad_norm": 0.1582133173942566, + "learning_rate": 0.00017614267576690035, + "loss": 4.4375, + "step": 6656 + }, + { + "epoch": 0.65009765625, + "grad_norm": 0.15471340715885162, + "learning_rate": 0.00017607974927643782, + "loss": 4.4375, + "step": 6657 + }, + { + "epoch": 0.6501953125, + "grad_norm": 0.16564828157424927, + "learning_rate": 0.00017601683237562226, + "loss": 4.4492, + "step": 6658 + }, + { + "epoch": 0.65029296875, + "grad_norm": 0.15719667077064514, + "learning_rate": 0.00017595392507055297, + "loss": 4.4492, + "step": 6659 + }, + { + "epoch": 0.650390625, + "grad_norm": 0.16007624566555023, + "learning_rate": 0.00017589102736732842, + "loss": 4.4766, + "step": 6660 + }, + { + "epoch": 0.65048828125, + "grad_norm": 0.16439081728458405, + "learning_rate": 0.0001758281392720461, + "loss": 4.4688, + "step": 6661 + }, + { + "epoch": 0.6505859375, + "grad_norm": 0.1626226007938385, + "learning_rate": 0.00017576526079080267, + "loss": 4.4492, + "step": 6662 + }, + { + "epoch": 0.65068359375, + "grad_norm": 0.1597587764263153, + "learning_rate": 0.00017570239192969366, + "loss": 4.4141, + "step": 6663 + }, + { + "epoch": 0.65078125, + "grad_norm": 0.1559351682662964, + "learning_rate": 0.0001756395326948138, + "loss": 4.4688, + "step": 6664 + }, + { + "epoch": 0.65087890625, + "grad_norm": 0.1569567620754242, + "learning_rate": 0.00017557668309225687, + "loss": 4.4375, + "step": 6665 + }, + { + "epoch": 0.6509765625, + "grad_norm": 0.1634717732667923, + "learning_rate": 0.00017551384312811574, + "loss": 4.4297, + "step": 6666 + }, + { + "epoch": 0.65107421875, + "grad_norm": 0.15941818058490753, + "learning_rate": 0.00017545101280848235, + "loss": 4.4219, + "step": 6667 + }, + { + "epoch": 0.651171875, + "grad_norm": 0.1654597669839859, + "learning_rate": 0.00017538819213944756, + "loss": 4.4141, + "step": 6668 + }, + { + "epoch": 0.65126953125, + "grad_norm": 0.16707180440425873, + "learning_rate": 0.0001753253811271015, + "loss": 4.4336, + "step": 6669 + }, + { + "epoch": 0.6513671875, + "grad_norm": 0.1687074452638626, + "learning_rate": 0.00017526257977753325, + "loss": 4.4375, + "step": 6670 + }, + { + "epoch": 0.65146484375, + "grad_norm": 0.15304097533226013, + "learning_rate": 0.00017519978809683095, + "loss": 4.4414, + "step": 6671 + }, + { + "epoch": 0.6515625, + "grad_norm": 0.16140057146549225, + "learning_rate": 0.00017513700609108197, + "loss": 4.4414, + "step": 6672 + }, + { + "epoch": 0.65166015625, + "grad_norm": 0.15828992426395416, + "learning_rate": 0.0001750742337663724, + "loss": 4.4883, + "step": 6673 + }, + { + "epoch": 0.6517578125, + "grad_norm": 0.15330414474010468, + "learning_rate": 0.00017501147112878758, + "loss": 4.4609, + "step": 6674 + }, + { + "epoch": 0.65185546875, + "grad_norm": 0.15542635321617126, + "learning_rate": 0.00017494871818441205, + "loss": 4.4414, + "step": 6675 + }, + { + "epoch": 0.651953125, + "grad_norm": 0.1604926735162735, + "learning_rate": 0.00017488597493932928, + "loss": 4.4336, + "step": 6676 + }, + { + "epoch": 0.65205078125, + "grad_norm": 0.14803647994995117, + "learning_rate": 0.00017482324139962176, + "loss": 4.4492, + "step": 6677 + }, + { + "epoch": 0.6521484375, + "grad_norm": 0.15389837324619293, + "learning_rate": 0.00017476051757137106, + "loss": 4.4531, + "step": 6678 + }, + { + "epoch": 0.65224609375, + "grad_norm": 0.1597268432378769, + "learning_rate": 0.00017469780346065784, + "loss": 4.4062, + "step": 6679 + }, + { + "epoch": 0.65234375, + "grad_norm": 0.15047875046730042, + "learning_rate": 0.00017463509907356185, + "loss": 4.4531, + "step": 6680 + }, + { + "epoch": 0.65244140625, + "grad_norm": 0.1655719131231308, + "learning_rate": 0.0001745724044161618, + "loss": 4.4766, + "step": 6681 + }, + { + "epoch": 0.6525390625, + "grad_norm": 0.16378532350063324, + "learning_rate": 0.00017450971949453564, + "loss": 4.418, + "step": 6682 + }, + { + "epoch": 0.65263671875, + "grad_norm": 0.15148542821407318, + "learning_rate": 0.00017444704431476005, + "loss": 4.4141, + "step": 6683 + }, + { + "epoch": 0.652734375, + "grad_norm": 0.16064228117465973, + "learning_rate": 0.00017438437888291108, + "loss": 4.4375, + "step": 6684 + }, + { + "epoch": 0.65283203125, + "grad_norm": 0.15877410769462585, + "learning_rate": 0.0001743217232050637, + "loss": 4.4297, + "step": 6685 + }, + { + "epoch": 0.6529296875, + "grad_norm": 0.16316398978233337, + "learning_rate": 0.00017425907728729192, + "loss": 4.4336, + "step": 6686 + }, + { + "epoch": 0.65302734375, + "grad_norm": 0.15632984042167664, + "learning_rate": 0.0001741964411356689, + "loss": 4.4688, + "step": 6687 + }, + { + "epoch": 0.653125, + "grad_norm": 0.16845403611660004, + "learning_rate": 0.00017413381475626672, + "loss": 4.4297, + "step": 6688 + }, + { + "epoch": 0.65322265625, + "grad_norm": 0.17895914614200592, + "learning_rate": 0.00017407119815515665, + "loss": 4.4492, + "step": 6689 + }, + { + "epoch": 0.6533203125, + "grad_norm": 0.16231557726860046, + "learning_rate": 0.00017400859133840895, + "loss": 4.4219, + "step": 6690 + }, + { + "epoch": 0.65341796875, + "grad_norm": 0.1626700609922409, + "learning_rate": 0.00017394599431209284, + "loss": 4.4609, + "step": 6691 + }, + { + "epoch": 0.653515625, + "grad_norm": 0.16822363436222076, + "learning_rate": 0.00017388340708227674, + "loss": 4.4141, + "step": 6692 + }, + { + "epoch": 0.65361328125, + "grad_norm": 0.1619095504283905, + "learning_rate": 0.00017382082965502804, + "loss": 4.4453, + "step": 6693 + }, + { + "epoch": 0.6537109375, + "grad_norm": 0.16256427764892578, + "learning_rate": 0.0001737582620364132, + "loss": 4.4609, + "step": 6694 + }, + { + "epoch": 0.65380859375, + "grad_norm": 0.16650788486003876, + "learning_rate": 0.0001736957042324977, + "loss": 4.4609, + "step": 6695 + }, + { + "epoch": 0.65390625, + "grad_norm": 0.16485077142715454, + "learning_rate": 0.00017363315624934614, + "loss": 4.4297, + "step": 6696 + }, + { + "epoch": 0.65400390625, + "grad_norm": 0.16584989428520203, + "learning_rate": 0.0001735706180930221, + "loss": 4.3867, + "step": 6697 + }, + { + "epoch": 0.6541015625, + "grad_norm": 0.16434325277805328, + "learning_rate": 0.0001735080897695882, + "loss": 4.4492, + "step": 6698 + }, + { + "epoch": 0.65419921875, + "grad_norm": 0.1584497094154358, + "learning_rate": 0.0001734455712851063, + "loss": 4.4414, + "step": 6699 + }, + { + "epoch": 0.654296875, + "grad_norm": 0.15746267139911652, + "learning_rate": 0.0001733830626456369, + "loss": 4.4375, + "step": 6700 + }, + { + "epoch": 0.65439453125, + "grad_norm": 0.16314509510993958, + "learning_rate": 0.00017332056385723993, + "loss": 4.4375, + "step": 6701 + }, + { + "epoch": 0.6544921875, + "grad_norm": 0.1514531522989273, + "learning_rate": 0.00017325807492597417, + "loss": 4.4648, + "step": 6702 + }, + { + "epoch": 0.65458984375, + "grad_norm": 0.16346943378448486, + "learning_rate": 0.0001731955958578975, + "loss": 4.4648, + "step": 6703 + }, + { + "epoch": 0.6546875, + "grad_norm": 0.16126970946788788, + "learning_rate": 0.00017313312665906693, + "loss": 4.4219, + "step": 6704 + }, + { + "epoch": 0.65478515625, + "grad_norm": 0.1664515882730484, + "learning_rate": 0.00017307066733553828, + "loss": 4.4414, + "step": 6705 + }, + { + "epoch": 0.6548828125, + "grad_norm": 0.16017796099185944, + "learning_rate": 0.0001730082178933667, + "loss": 4.4492, + "step": 6706 + }, + { + "epoch": 0.65498046875, + "grad_norm": 0.16916856169700623, + "learning_rate": 0.0001729457783386062, + "loss": 4.4688, + "step": 6707 + }, + { + "epoch": 0.655078125, + "grad_norm": 0.16550259292125702, + "learning_rate": 0.00017288334867730982, + "loss": 4.4492, + "step": 6708 + }, + { + "epoch": 0.65517578125, + "grad_norm": 0.1593656688928604, + "learning_rate": 0.0001728209289155297, + "loss": 4.4258, + "step": 6709 + }, + { + "epoch": 0.6552734375, + "grad_norm": 0.167970210313797, + "learning_rate": 0.000172758519059317, + "loss": 4.4766, + "step": 6710 + }, + { + "epoch": 0.65537109375, + "grad_norm": 0.16045409440994263, + "learning_rate": 0.000172696119114722, + "loss": 4.4648, + "step": 6711 + }, + { + "epoch": 0.65546875, + "grad_norm": 0.16425341367721558, + "learning_rate": 0.0001726337290877939, + "loss": 4.4453, + "step": 6712 + }, + { + "epoch": 0.65556640625, + "grad_norm": 0.1672198325395584, + "learning_rate": 0.00017257134898458098, + "loss": 4.4414, + "step": 6713 + }, + { + "epoch": 0.6556640625, + "grad_norm": 0.18597888946533203, + "learning_rate": 0.00017250897881113063, + "loss": 4.4336, + "step": 6714 + }, + { + "epoch": 0.65576171875, + "grad_norm": 0.17330707609653473, + "learning_rate": 0.00017244661857348915, + "loss": 4.4609, + "step": 6715 + }, + { + "epoch": 0.655859375, + "grad_norm": 0.16889043152332306, + "learning_rate": 0.00017238426827770206, + "loss": 4.4531, + "step": 6716 + }, + { + "epoch": 0.65595703125, + "grad_norm": 0.16311021149158478, + "learning_rate": 0.0001723219279298136, + "loss": 4.4414, + "step": 6717 + }, + { + "epoch": 0.6560546875, + "grad_norm": 0.1544819325208664, + "learning_rate": 0.0001722595975358674, + "loss": 4.4297, + "step": 6718 + }, + { + "epoch": 0.65615234375, + "grad_norm": 0.17125704884529114, + "learning_rate": 0.00017219727710190587, + "loss": 4.4258, + "step": 6719 + }, + { + "epoch": 0.65625, + "grad_norm": 0.15789251029491425, + "learning_rate": 0.0001721349666339706, + "loss": 4.4531, + "step": 6720 + }, + { + "epoch": 0.65634765625, + "grad_norm": 0.17396411299705505, + "learning_rate": 0.00017207266613810217, + "loss": 4.4336, + "step": 6721 + }, + { + "epoch": 0.6564453125, + "grad_norm": 0.15998302400112152, + "learning_rate": 0.00017201037562034023, + "loss": 4.4258, + "step": 6722 + }, + { + "epoch": 0.65654296875, + "grad_norm": 0.16299881041049957, + "learning_rate": 0.00017194809508672335, + "loss": 4.4219, + "step": 6723 + }, + { + "epoch": 0.656640625, + "grad_norm": 0.15735310316085815, + "learning_rate": 0.00017188582454328932, + "loss": 4.4492, + "step": 6724 + }, + { + "epoch": 0.65673828125, + "grad_norm": 0.16271305084228516, + "learning_rate": 0.00017182356399607468, + "loss": 4.418, + "step": 6725 + }, + { + "epoch": 0.6568359375, + "grad_norm": 0.17006705701351166, + "learning_rate": 0.00017176131345111523, + "loss": 4.4141, + "step": 6726 + }, + { + "epoch": 0.65693359375, + "grad_norm": 0.16069914400577545, + "learning_rate": 0.0001716990729144458, + "loss": 4.4492, + "step": 6727 + }, + { + "epoch": 0.65703125, + "grad_norm": 0.16992822289466858, + "learning_rate": 0.00017163684239210013, + "loss": 4.4336, + "step": 6728 + }, + { + "epoch": 0.65712890625, + "grad_norm": 0.16353504359722137, + "learning_rate": 0.00017157462189011104, + "loss": 4.4258, + "step": 6729 + }, + { + "epoch": 0.6572265625, + "grad_norm": 0.1681196242570877, + "learning_rate": 0.00017151241141451044, + "loss": 4.4414, + "step": 6730 + }, + { + "epoch": 0.65732421875, + "grad_norm": 0.1642204076051712, + "learning_rate": 0.00017145021097132916, + "loss": 4.4375, + "step": 6731 + }, + { + "epoch": 0.657421875, + "grad_norm": 0.1611696481704712, + "learning_rate": 0.00017138802056659714, + "loss": 4.4141, + "step": 6732 + }, + { + "epoch": 0.65751953125, + "grad_norm": 0.17946702241897583, + "learning_rate": 0.0001713258402063434, + "loss": 4.4414, + "step": 6733 + }, + { + "epoch": 0.6576171875, + "grad_norm": 0.1600063145160675, + "learning_rate": 0.0001712636698965957, + "loss": 4.4062, + "step": 6734 + }, + { + "epoch": 0.65771484375, + "grad_norm": 0.18010611832141876, + "learning_rate": 0.00017120150964338116, + "loss": 4.457, + "step": 6735 + }, + { + "epoch": 0.6578125, + "grad_norm": 0.17762474715709686, + "learning_rate": 0.0001711393594527258, + "loss": 4.4453, + "step": 6736 + }, + { + "epoch": 0.65791015625, + "grad_norm": 0.16762281954288483, + "learning_rate": 0.00017107721933065463, + "loss": 4.4492, + "step": 6737 + }, + { + "epoch": 0.6580078125, + "grad_norm": 0.16565579175949097, + "learning_rate": 0.00017101508928319168, + "loss": 4.4336, + "step": 6738 + }, + { + "epoch": 0.65810546875, + "grad_norm": 0.1672474443912506, + "learning_rate": 0.00017095296931636013, + "loss": 4.4219, + "step": 6739 + }, + { + "epoch": 0.658203125, + "grad_norm": 0.1632225066423416, + "learning_rate": 0.00017089085943618198, + "loss": 4.4375, + "step": 6740 + }, + { + "epoch": 0.65830078125, + "grad_norm": 0.16824348270893097, + "learning_rate": 0.00017082875964867855, + "loss": 4.4648, + "step": 6741 + }, + { + "epoch": 0.6583984375, + "grad_norm": 0.17884430289268494, + "learning_rate": 0.00017076666995986974, + "loss": 4.457, + "step": 6742 + }, + { + "epoch": 0.65849609375, + "grad_norm": 0.15679393708705902, + "learning_rate": 0.00017070459037577484, + "loss": 4.4492, + "step": 6743 + }, + { + "epoch": 0.65859375, + "grad_norm": 0.17152366042137146, + "learning_rate": 0.00017064252090241212, + "loss": 4.457, + "step": 6744 + }, + { + "epoch": 0.65869140625, + "grad_norm": 0.1556115299463272, + "learning_rate": 0.00017058046154579866, + "loss": 4.4453, + "step": 6745 + }, + { + "epoch": 0.6587890625, + "grad_norm": 0.16867060959339142, + "learning_rate": 0.00017051841231195076, + "loss": 4.4648, + "step": 6746 + }, + { + "epoch": 0.65888671875, + "grad_norm": 0.1647936999797821, + "learning_rate": 0.00017045637320688367, + "loss": 4.4336, + "step": 6747 + }, + { + "epoch": 0.658984375, + "grad_norm": 0.17135462164878845, + "learning_rate": 0.00017039434423661166, + "loss": 4.4531, + "step": 6748 + }, + { + "epoch": 0.65908203125, + "grad_norm": 0.15647050738334656, + "learning_rate": 0.0001703323254071481, + "loss": 4.4336, + "step": 6749 + }, + { + "epoch": 0.6591796875, + "grad_norm": 0.16781200468540192, + "learning_rate": 0.00017027031672450514, + "loss": 4.4453, + "step": 6750 + }, + { + "epoch": 0.65927734375, + "grad_norm": 0.16917045414447784, + "learning_rate": 0.00017020831819469413, + "loss": 4.4609, + "step": 6751 + }, + { + "epoch": 0.659375, + "grad_norm": 0.16586163640022278, + "learning_rate": 0.00017014632982372547, + "loss": 4.4219, + "step": 6752 + }, + { + "epoch": 0.65947265625, + "grad_norm": 0.1690550595521927, + "learning_rate": 0.00017008435161760852, + "loss": 4.418, + "step": 6753 + }, + { + "epoch": 0.6595703125, + "grad_norm": 0.16142988204956055, + "learning_rate": 0.0001700223835823516, + "loss": 4.4453, + "step": 6754 + }, + { + "epoch": 0.65966796875, + "grad_norm": 0.15429723262786865, + "learning_rate": 0.00016996042572396208, + "loss": 4.4219, + "step": 6755 + }, + { + "epoch": 0.659765625, + "grad_norm": 0.16788004338741302, + "learning_rate": 0.00016989847804844637, + "loss": 4.4297, + "step": 6756 + }, + { + "epoch": 0.65986328125, + "grad_norm": 0.16189299523830414, + "learning_rate": 0.00016983654056180992, + "loss": 4.457, + "step": 6757 + }, + { + "epoch": 0.6599609375, + "grad_norm": 0.16124944388866425, + "learning_rate": 0.00016977461327005722, + "loss": 4.418, + "step": 6758 + }, + { + "epoch": 0.66005859375, + "grad_norm": 0.15592515468597412, + "learning_rate": 0.0001697126961791915, + "loss": 4.4062, + "step": 6759 + }, + { + "epoch": 0.66015625, + "grad_norm": 0.15906524658203125, + "learning_rate": 0.00016965078929521526, + "loss": 4.4375, + "step": 6760 + }, + { + "epoch": 0.66025390625, + "grad_norm": 0.15637515485286713, + "learning_rate": 0.00016958889262413002, + "loss": 4.4258, + "step": 6761 + }, + { + "epoch": 0.6603515625, + "grad_norm": 0.15919892489910126, + "learning_rate": 0.0001695270061719362, + "loss": 4.4727, + "step": 6762 + }, + { + "epoch": 0.66044921875, + "grad_norm": 0.15928472578525543, + "learning_rate": 0.0001694651299446333, + "loss": 4.4531, + "step": 6763 + }, + { + "epoch": 0.660546875, + "grad_norm": 0.1557203084230423, + "learning_rate": 0.0001694032639482198, + "loss": 4.4258, + "step": 6764 + }, + { + "epoch": 0.66064453125, + "grad_norm": 0.1614193618297577, + "learning_rate": 0.00016934140818869315, + "loss": 4.4375, + "step": 6765 + }, + { + "epoch": 0.6607421875, + "grad_norm": 0.15392449498176575, + "learning_rate": 0.00016927956267204992, + "loss": 4.3945, + "step": 6766 + }, + { + "epoch": 0.66083984375, + "grad_norm": 0.16300617158412933, + "learning_rate": 0.00016921772740428553, + "loss": 4.4453, + "step": 6767 + }, + { + "epoch": 0.6609375, + "grad_norm": 0.16175493597984314, + "learning_rate": 0.0001691559023913945, + "loss": 4.418, + "step": 6768 + }, + { + "epoch": 0.66103515625, + "grad_norm": 0.15387171506881714, + "learning_rate": 0.0001690940876393704, + "loss": 4.4531, + "step": 6769 + }, + { + "epoch": 0.6611328125, + "grad_norm": 0.17083501815795898, + "learning_rate": 0.00016903228315420565, + "loss": 4.4492, + "step": 6770 + }, + { + "epoch": 0.66123046875, + "grad_norm": 0.15688204765319824, + "learning_rate": 0.00016897048894189191, + "loss": 4.457, + "step": 6771 + }, + { + "epoch": 0.661328125, + "grad_norm": 0.15842069685459137, + "learning_rate": 0.00016890870500841962, + "loss": 4.4688, + "step": 6772 + }, + { + "epoch": 0.66142578125, + "grad_norm": 0.17439860105514526, + "learning_rate": 0.0001688469313597783, + "loss": 4.4609, + "step": 6773 + }, + { + "epoch": 0.6615234375, + "grad_norm": 0.1636752486228943, + "learning_rate": 0.00016878516800195658, + "loss": 4.4688, + "step": 6774 + }, + { + "epoch": 0.66162109375, + "grad_norm": 0.15780527889728546, + "learning_rate": 0.00016872341494094195, + "loss": 4.418, + "step": 6775 + }, + { + "epoch": 0.66171875, + "grad_norm": 0.18037913739681244, + "learning_rate": 0.00016866167218272093, + "loss": 4.457, + "step": 6776 + }, + { + "epoch": 0.66181640625, + "grad_norm": 0.16653694212436676, + "learning_rate": 0.00016859993973327903, + "loss": 4.4648, + "step": 6777 + }, + { + "epoch": 0.6619140625, + "grad_norm": 0.1573764532804489, + "learning_rate": 0.00016853821759860085, + "loss": 4.4609, + "step": 6778 + }, + { + "epoch": 0.66201171875, + "grad_norm": 0.175594761967659, + "learning_rate": 0.00016847650578466993, + "loss": 4.4453, + "step": 6779 + }, + { + "epoch": 0.662109375, + "grad_norm": 0.16191734373569489, + "learning_rate": 0.00016841480429746875, + "loss": 4.4609, + "step": 6780 + }, + { + "epoch": 0.66220703125, + "grad_norm": 0.17826972901821136, + "learning_rate": 0.00016835311314297897, + "loss": 4.4531, + "step": 6781 + }, + { + "epoch": 0.6623046875, + "grad_norm": 0.17231667041778564, + "learning_rate": 0.00016829143232718096, + "loss": 4.4453, + "step": 6782 + }, + { + "epoch": 0.66240234375, + "grad_norm": 0.16429497301578522, + "learning_rate": 0.00016822976185605453, + "loss": 4.4414, + "step": 6783 + }, + { + "epoch": 0.6625, + "grad_norm": 0.16008645296096802, + "learning_rate": 0.0001681681017355779, + "loss": 4.4414, + "step": 6784 + }, + { + "epoch": 0.66259765625, + "grad_norm": 0.17184315621852875, + "learning_rate": 0.00016810645197172874, + "loss": 4.4336, + "step": 6785 + }, + { + "epoch": 0.6626953125, + "grad_norm": 0.16322170197963715, + "learning_rate": 0.0001680448125704836, + "loss": 4.4844, + "step": 6786 + }, + { + "epoch": 0.66279296875, + "grad_norm": 0.16971109807491302, + "learning_rate": 0.00016798318353781794, + "loss": 4.4258, + "step": 6787 + }, + { + "epoch": 0.662890625, + "grad_norm": 0.18317794799804688, + "learning_rate": 0.00016792156487970635, + "loss": 4.4336, + "step": 6788 + }, + { + "epoch": 0.66298828125, + "grad_norm": 0.16676779091358185, + "learning_rate": 0.00016785995660212227, + "loss": 4.4219, + "step": 6789 + }, + { + "epoch": 0.6630859375, + "grad_norm": 0.1712525188922882, + "learning_rate": 0.0001677983587110382, + "loss": 4.4531, + "step": 6790 + }, + { + "epoch": 0.66318359375, + "grad_norm": 0.1724303960800171, + "learning_rate": 0.00016773677121242574, + "loss": 4.4297, + "step": 6791 + }, + { + "epoch": 0.66328125, + "grad_norm": 0.16387148201465607, + "learning_rate": 0.0001676751941122554, + "loss": 4.4062, + "step": 6792 + }, + { + "epoch": 0.66337890625, + "grad_norm": 0.1887136548757553, + "learning_rate": 0.00016761362741649644, + "loss": 4.4023, + "step": 6793 + }, + { + "epoch": 0.6634765625, + "grad_norm": 0.16771678626537323, + "learning_rate": 0.0001675520711311175, + "loss": 4.4141, + "step": 6794 + }, + { + "epoch": 0.66357421875, + "grad_norm": 0.1763731837272644, + "learning_rate": 0.000167490525262086, + "loss": 4.4492, + "step": 6795 + }, + { + "epoch": 0.663671875, + "grad_norm": 0.16985757648944855, + "learning_rate": 0.00016742898981536843, + "loss": 4.457, + "step": 6796 + }, + { + "epoch": 0.66376953125, + "grad_norm": 0.16576483845710754, + "learning_rate": 0.0001673674647969302, + "loss": 4.4102, + "step": 6797 + }, + { + "epoch": 0.6638671875, + "grad_norm": 0.1718452274799347, + "learning_rate": 0.00016730595021273572, + "loss": 4.4336, + "step": 6798 + }, + { + "epoch": 0.66396484375, + "grad_norm": 0.1649199277162552, + "learning_rate": 0.0001672444460687485, + "loss": 4.4102, + "step": 6799 + }, + { + "epoch": 0.6640625, + "grad_norm": 0.16928128898143768, + "learning_rate": 0.00016718295237093094, + "loss": 4.457, + "step": 6800 + }, + { + "epoch": 0.66416015625, + "grad_norm": 0.16083301603794098, + "learning_rate": 0.00016712146912524432, + "loss": 4.4336, + "step": 6801 + }, + { + "epoch": 0.6642578125, + "grad_norm": 0.16315534710884094, + "learning_rate": 0.00016705999633764908, + "loss": 4.4688, + "step": 6802 + }, + { + "epoch": 0.66435546875, + "grad_norm": 0.15670445561408997, + "learning_rate": 0.00016699853401410465, + "loss": 4.4336, + "step": 6803 + }, + { + "epoch": 0.664453125, + "grad_norm": 0.15951308608055115, + "learning_rate": 0.0001669370821605693, + "loss": 4.5, + "step": 6804 + }, + { + "epoch": 0.66455078125, + "grad_norm": 0.15847595036029816, + "learning_rate": 0.00016687564078300042, + "loss": 4.4414, + "step": 6805 + }, + { + "epoch": 0.6646484375, + "grad_norm": 0.1725224256515503, + "learning_rate": 0.0001668142098873543, + "loss": 4.4414, + "step": 6806 + }, + { + "epoch": 0.66474609375, + "grad_norm": 0.16106191277503967, + "learning_rate": 0.00016675278947958627, + "loss": 4.4375, + "step": 6807 + }, + { + "epoch": 0.66484375, + "grad_norm": 0.16809219121932983, + "learning_rate": 0.00016669137956565075, + "loss": 4.4336, + "step": 6808 + }, + { + "epoch": 0.66494140625, + "grad_norm": 0.1598915308713913, + "learning_rate": 0.0001666299801515008, + "loss": 4.4531, + "step": 6809 + }, + { + "epoch": 0.6650390625, + "grad_norm": 0.16365166008472443, + "learning_rate": 0.00016656859124308872, + "loss": 4.4414, + "step": 6810 + }, + { + "epoch": 0.66513671875, + "grad_norm": 0.15969382226467133, + "learning_rate": 0.00016650721284636584, + "loss": 4.4336, + "step": 6811 + }, + { + "epoch": 0.665234375, + "grad_norm": 0.1662243753671646, + "learning_rate": 0.00016644584496728232, + "loss": 4.4453, + "step": 6812 + }, + { + "epoch": 0.66533203125, + "grad_norm": 0.16214874386787415, + "learning_rate": 0.00016638448761178734, + "loss": 4.4297, + "step": 6813 + }, + { + "epoch": 0.6654296875, + "grad_norm": 0.1604100912809372, + "learning_rate": 0.00016632314078582916, + "loss": 4.4297, + "step": 6814 + }, + { + "epoch": 0.66552734375, + "grad_norm": 0.16083599627017975, + "learning_rate": 0.00016626180449535487, + "loss": 4.4609, + "step": 6815 + }, + { + "epoch": 0.665625, + "grad_norm": 0.15639621019363403, + "learning_rate": 0.0001662004787463106, + "loss": 4.418, + "step": 6816 + }, + { + "epoch": 0.66572265625, + "grad_norm": 0.1566103845834732, + "learning_rate": 0.0001661391635446416, + "loss": 4.4688, + "step": 6817 + }, + { + "epoch": 0.6658203125, + "grad_norm": 0.16249489784240723, + "learning_rate": 0.00016607785889629172, + "loss": 4.418, + "step": 6818 + }, + { + "epoch": 0.66591796875, + "grad_norm": 0.16123254597187042, + "learning_rate": 0.00016601656480720424, + "loss": 4.4648, + "step": 6819 + }, + { + "epoch": 0.666015625, + "grad_norm": 0.15287061035633087, + "learning_rate": 0.0001659552812833211, + "loss": 4.4492, + "step": 6820 + }, + { + "epoch": 0.66611328125, + "grad_norm": 0.17211276292800903, + "learning_rate": 0.00016589400833058333, + "loss": 4.4609, + "step": 6821 + }, + { + "epoch": 0.6662109375, + "grad_norm": 0.16340336203575134, + "learning_rate": 0.00016583274595493097, + "loss": 4.4531, + "step": 6822 + }, + { + "epoch": 0.66630859375, + "grad_norm": 0.15912170708179474, + "learning_rate": 0.00016577149416230298, + "loss": 4.418, + "step": 6823 + }, + { + "epoch": 0.66640625, + "grad_norm": 0.1614779382944107, + "learning_rate": 0.00016571025295863728, + "loss": 4.4141, + "step": 6824 + }, + { + "epoch": 0.66650390625, + "grad_norm": 0.15901592373847961, + "learning_rate": 0.0001656490223498709, + "loss": 4.418, + "step": 6825 + }, + { + "epoch": 0.6666015625, + "grad_norm": 0.1628388911485672, + "learning_rate": 0.00016558780234193955, + "loss": 4.4648, + "step": 6826 + }, + { + "epoch": 0.66669921875, + "grad_norm": 0.16640827059745789, + "learning_rate": 0.00016552659294077812, + "loss": 4.4453, + "step": 6827 + }, + { + "epoch": 0.666796875, + "grad_norm": 0.15849199891090393, + "learning_rate": 0.0001654653941523206, + "loss": 4.4297, + "step": 6828 + }, + { + "epoch": 0.66689453125, + "grad_norm": 0.164942666888237, + "learning_rate": 0.0001654042059824996, + "loss": 4.4492, + "step": 6829 + }, + { + "epoch": 0.6669921875, + "grad_norm": 0.15916621685028076, + "learning_rate": 0.0001653430284372471, + "loss": 4.4258, + "step": 6830 + }, + { + "epoch": 0.66708984375, + "grad_norm": 0.1620337963104248, + "learning_rate": 0.0001652818615224937, + "loss": 4.418, + "step": 6831 + }, + { + "epoch": 0.6671875, + "grad_norm": 0.16376128792762756, + "learning_rate": 0.00016522070524416918, + "loss": 4.457, + "step": 6832 + }, + { + "epoch": 0.66728515625, + "grad_norm": 0.15288561582565308, + "learning_rate": 0.00016515955960820223, + "loss": 4.4375, + "step": 6833 + }, + { + "epoch": 0.6673828125, + "grad_norm": 0.1749425232410431, + "learning_rate": 0.00016509842462052055, + "loss": 4.4141, + "step": 6834 + }, + { + "epoch": 0.66748046875, + "grad_norm": 0.15831801295280457, + "learning_rate": 0.00016503730028705055, + "loss": 4.4297, + "step": 6835 + }, + { + "epoch": 0.667578125, + "grad_norm": 0.16037020087242126, + "learning_rate": 0.00016497618661371808, + "loss": 4.4727, + "step": 6836 + }, + { + "epoch": 0.66767578125, + "grad_norm": 0.1630600541830063, + "learning_rate": 0.00016491508360644752, + "loss": 4.4531, + "step": 6837 + }, + { + "epoch": 0.6677734375, + "grad_norm": 0.15258893370628357, + "learning_rate": 0.00016485399127116247, + "loss": 4.4336, + "step": 6838 + }, + { + "epoch": 0.66787109375, + "grad_norm": 0.17008721828460693, + "learning_rate": 0.0001647929096137854, + "loss": 4.4648, + "step": 6839 + }, + { + "epoch": 0.66796875, + "grad_norm": 0.17131967842578888, + "learning_rate": 0.00016473183864023779, + "loss": 4.4375, + "step": 6840 + }, + { + "epoch": 0.66806640625, + "grad_norm": 0.16434550285339355, + "learning_rate": 0.00016467077835644, + "loss": 4.4844, + "step": 6841 + }, + { + "epoch": 0.6681640625, + "grad_norm": 0.1551322042942047, + "learning_rate": 0.00016460972876831154, + "loss": 4.4258, + "step": 6842 + }, + { + "epoch": 0.66826171875, + "grad_norm": 0.16217729449272156, + "learning_rate": 0.00016454868988177054, + "loss": 4.4297, + "step": 6843 + }, + { + "epoch": 0.668359375, + "grad_norm": 0.1546533852815628, + "learning_rate": 0.00016448766170273446, + "loss": 4.418, + "step": 6844 + }, + { + "epoch": 0.66845703125, + "grad_norm": 0.16041815280914307, + "learning_rate": 0.0001644266442371195, + "loss": 4.4219, + "step": 6845 + }, + { + "epoch": 0.6685546875, + "grad_norm": 0.15747377276420593, + "learning_rate": 0.00016436563749084093, + "loss": 4.4141, + "step": 6846 + }, + { + "epoch": 0.66865234375, + "grad_norm": 0.15786083042621613, + "learning_rate": 0.00016430464146981294, + "loss": 4.418, + "step": 6847 + }, + { + "epoch": 0.66875, + "grad_norm": 0.1662689447402954, + "learning_rate": 0.00016424365617994865, + "loss": 4.4102, + "step": 6848 + }, + { + "epoch": 0.66884765625, + "grad_norm": 0.16440945863723755, + "learning_rate": 0.0001641826816271602, + "loss": 4.4336, + "step": 6849 + }, + { + "epoch": 0.6689453125, + "grad_norm": 0.1681075394153595, + "learning_rate": 0.00016412171781735873, + "loss": 4.4336, + "step": 6850 + }, + { + "epoch": 0.66904296875, + "grad_norm": 0.27236178517341614, + "learning_rate": 0.0001640607647564541, + "loss": 4.4453, + "step": 6851 + }, + { + "epoch": 0.669140625, + "grad_norm": 0.1609262228012085, + "learning_rate": 0.00016399982245035542, + "loss": 4.4453, + "step": 6852 + }, + { + "epoch": 0.66923828125, + "grad_norm": 0.16728800535202026, + "learning_rate": 0.00016393889090497054, + "loss": 4.4531, + "step": 6853 + }, + { + "epoch": 0.6693359375, + "grad_norm": 0.15499664843082428, + "learning_rate": 0.0001638779701262064, + "loss": 4.4258, + "step": 6854 + }, + { + "epoch": 0.66943359375, + "grad_norm": 0.1704072654247284, + "learning_rate": 0.0001638170601199689, + "loss": 4.4648, + "step": 6855 + }, + { + "epoch": 0.66953125, + "grad_norm": 0.1507396399974823, + "learning_rate": 0.00016375616089216283, + "loss": 4.4375, + "step": 6856 + }, + { + "epoch": 0.66962890625, + "grad_norm": 0.1657029539346695, + "learning_rate": 0.0001636952724486919, + "loss": 4.4414, + "step": 6857 + }, + { + "epoch": 0.6697265625, + "grad_norm": 0.15872691571712494, + "learning_rate": 0.00016363439479545892, + "loss": 4.457, + "step": 6858 + }, + { + "epoch": 0.66982421875, + "grad_norm": 0.16106653213500977, + "learning_rate": 0.0001635735279383656, + "loss": 4.4297, + "step": 6859 + }, + { + "epoch": 0.669921875, + "grad_norm": 0.16414965689182281, + "learning_rate": 0.00016351267188331243, + "loss": 4.4531, + "step": 6860 + }, + { + "epoch": 0.67001953125, + "grad_norm": 0.15559937059879303, + "learning_rate": 0.00016345182663619898, + "loss": 4.4023, + "step": 6861 + }, + { + "epoch": 0.6701171875, + "grad_norm": 0.16651324927806854, + "learning_rate": 0.00016339099220292393, + "loss": 4.4258, + "step": 6862 + }, + { + "epoch": 0.67021484375, + "grad_norm": 0.16572238504886627, + "learning_rate": 0.00016333016858938466, + "loss": 4.418, + "step": 6863 + }, + { + "epoch": 0.6703125, + "grad_norm": 0.15797880291938782, + "learning_rate": 0.00016326935580147767, + "loss": 4.4688, + "step": 6864 + }, + { + "epoch": 0.67041015625, + "grad_norm": 0.17275284230709076, + "learning_rate": 0.00016320855384509828, + "loss": 4.4648, + "step": 6865 + }, + { + "epoch": 0.6705078125, + "grad_norm": 0.1504029482603073, + "learning_rate": 0.00016314776272614091, + "loss": 4.418, + "step": 6866 + }, + { + "epoch": 0.67060546875, + "grad_norm": 0.17793810367584229, + "learning_rate": 0.00016308698245049886, + "loss": 4.4375, + "step": 6867 + }, + { + "epoch": 0.670703125, + "grad_norm": 0.16743409633636475, + "learning_rate": 0.0001630262130240642, + "loss": 4.4414, + "step": 6868 + }, + { + "epoch": 0.67080078125, + "grad_norm": 0.16052141785621643, + "learning_rate": 0.00016296545445272824, + "loss": 4.4609, + "step": 6869 + }, + { + "epoch": 0.6708984375, + "grad_norm": 0.16387279331684113, + "learning_rate": 0.00016290470674238105, + "loss": 4.4375, + "step": 6870 + }, + { + "epoch": 0.67099609375, + "grad_norm": 0.16108916699886322, + "learning_rate": 0.0001628439698989118, + "loss": 4.4336, + "step": 6871 + }, + { + "epoch": 0.67109375, + "grad_norm": 0.1559152603149414, + "learning_rate": 0.00016278324392820845, + "loss": 4.4453, + "step": 6872 + }, + { + "epoch": 0.67119140625, + "grad_norm": 0.1686200648546219, + "learning_rate": 0.000162722528836158, + "loss": 4.4219, + "step": 6873 + }, + { + "epoch": 0.6712890625, + "grad_norm": 0.15844444930553436, + "learning_rate": 0.00016266182462864633, + "loss": 4.4258, + "step": 6874 + }, + { + "epoch": 0.67138671875, + "grad_norm": 0.1633799523115158, + "learning_rate": 0.0001626011313115583, + "loss": 4.4531, + "step": 6875 + }, + { + "epoch": 0.671484375, + "grad_norm": 0.258285790681839, + "learning_rate": 0.00016254044889077787, + "loss": 4.4375, + "step": 6876 + }, + { + "epoch": 0.67158203125, + "grad_norm": 0.16985295712947845, + "learning_rate": 0.00016247977737218755, + "loss": 4.4414, + "step": 6877 + }, + { + "epoch": 0.6716796875, + "grad_norm": 0.15573932230472565, + "learning_rate": 0.0001624191167616691, + "loss": 4.4648, + "step": 6878 + }, + { + "epoch": 0.67177734375, + "grad_norm": 0.16294783353805542, + "learning_rate": 0.0001623584670651032, + "loss": 4.4219, + "step": 6879 + }, + { + "epoch": 0.671875, + "grad_norm": 0.16869620978832245, + "learning_rate": 0.00016229782828836947, + "loss": 4.4375, + "step": 6880 + }, + { + "epoch": 0.67197265625, + "grad_norm": 0.15246453881263733, + "learning_rate": 0.00016223720043734633, + "loss": 4.4102, + "step": 6881 + }, + { + "epoch": 0.6720703125, + "grad_norm": 0.16849453747272491, + "learning_rate": 0.0001621765835179113, + "loss": 4.4258, + "step": 6882 + }, + { + "epoch": 0.67216796875, + "grad_norm": 0.15538188815116882, + "learning_rate": 0.00016211597753594076, + "loss": 4.4102, + "step": 6883 + }, + { + "epoch": 0.672265625, + "grad_norm": 0.15586352348327637, + "learning_rate": 0.00016205538249731014, + "loss": 4.4219, + "step": 6884 + }, + { + "epoch": 0.67236328125, + "grad_norm": 0.1562061905860901, + "learning_rate": 0.00016199479840789356, + "loss": 4.4258, + "step": 6885 + }, + { + "epoch": 0.6724609375, + "grad_norm": 0.15599246323108673, + "learning_rate": 0.00016193422527356426, + "loss": 4.4336, + "step": 6886 + }, + { + "epoch": 0.67255859375, + "grad_norm": 0.1503261923789978, + "learning_rate": 0.00016187366310019448, + "loss": 4.3906, + "step": 6887 + }, + { + "epoch": 0.67265625, + "grad_norm": 0.15196335315704346, + "learning_rate": 0.00016181311189365527, + "loss": 4.3984, + "step": 6888 + }, + { + "epoch": 0.67275390625, + "grad_norm": 0.155692920088768, + "learning_rate": 0.00016175257165981663, + "loss": 4.4219, + "step": 6889 + }, + { + "epoch": 0.6728515625, + "grad_norm": 0.1555703729391098, + "learning_rate": 0.0001616920424045476, + "loss": 4.4492, + "step": 6890 + }, + { + "epoch": 0.67294921875, + "grad_norm": 0.1591021567583084, + "learning_rate": 0.000161631524133716, + "loss": 4.4414, + "step": 6891 + }, + { + "epoch": 0.673046875, + "grad_norm": 0.149369016289711, + "learning_rate": 0.00016157101685318871, + "loss": 4.4297, + "step": 6892 + }, + { + "epoch": 0.67314453125, + "grad_norm": 0.1497383713722229, + "learning_rate": 0.00016151052056883158, + "loss": 4.4453, + "step": 6893 + }, + { + "epoch": 0.6732421875, + "grad_norm": 0.14868004620075226, + "learning_rate": 0.00016145003528650914, + "loss": 4.418, + "step": 6894 + }, + { + "epoch": 0.67333984375, + "grad_norm": 0.158140629529953, + "learning_rate": 0.00016138956101208513, + "loss": 4.4414, + "step": 6895 + }, + { + "epoch": 0.6734375, + "grad_norm": 0.16552284359931946, + "learning_rate": 0.00016132909775142207, + "loss": 4.3984, + "step": 6896 + }, + { + "epoch": 0.67353515625, + "grad_norm": 0.15239585936069489, + "learning_rate": 0.0001612686455103815, + "loss": 4.4219, + "step": 6897 + }, + { + "epoch": 0.6736328125, + "grad_norm": 0.16008862853050232, + "learning_rate": 0.00016120820429482385, + "loss": 4.4492, + "step": 6898 + }, + { + "epoch": 0.67373046875, + "grad_norm": 0.16183675825595856, + "learning_rate": 0.00016114777411060853, + "loss": 4.4297, + "step": 6899 + }, + { + "epoch": 0.673828125, + "grad_norm": 0.15333010256290436, + "learning_rate": 0.00016108735496359373, + "loss": 4.4492, + "step": 6900 + }, + { + "epoch": 0.67392578125, + "grad_norm": 0.1567636877298355, + "learning_rate": 0.0001610269468596368, + "loss": 4.4414, + "step": 6901 + }, + { + "epoch": 0.6740234375, + "grad_norm": 0.15972357988357544, + "learning_rate": 0.00016096654980459388, + "loss": 4.4375, + "step": 6902 + }, + { + "epoch": 0.67412109375, + "grad_norm": 0.15277595818042755, + "learning_rate": 0.00016090616380431993, + "loss": 4.4336, + "step": 6903 + }, + { + "epoch": 0.67421875, + "grad_norm": 0.1572718322277069, + "learning_rate": 0.00016084578886466905, + "loss": 4.4453, + "step": 6904 + }, + { + "epoch": 0.67431640625, + "grad_norm": 0.16586941480636597, + "learning_rate": 0.00016078542499149419, + "loss": 4.4492, + "step": 6905 + }, + { + "epoch": 0.6744140625, + "grad_norm": 0.16380220651626587, + "learning_rate": 0.00016072507219064714, + "loss": 4.4414, + "step": 6906 + }, + { + "epoch": 0.67451171875, + "grad_norm": 0.15906952321529388, + "learning_rate": 0.0001606647304679788, + "loss": 4.4336, + "step": 6907 + }, + { + "epoch": 0.674609375, + "grad_norm": 0.1548093557357788, + "learning_rate": 0.00016060439982933884, + "loss": 4.4219, + "step": 6908 + }, + { + "epoch": 0.67470703125, + "grad_norm": 0.1634160876274109, + "learning_rate": 0.00016054408028057593, + "loss": 4.4805, + "step": 6909 + }, + { + "epoch": 0.6748046875, + "grad_norm": 0.15590447187423706, + "learning_rate": 0.0001604837718275376, + "loss": 4.4219, + "step": 6910 + }, + { + "epoch": 0.67490234375, + "grad_norm": 0.16823464632034302, + "learning_rate": 0.00016042347447607036, + "loss": 4.4492, + "step": 6911 + }, + { + "epoch": 0.675, + "grad_norm": 0.1652384102344513, + "learning_rate": 0.00016036318823201974, + "loss": 4.4453, + "step": 6912 + }, + { + "epoch": 0.67509765625, + "grad_norm": 0.16569873690605164, + "learning_rate": 0.00016030291310122988, + "loss": 4.4414, + "step": 6913 + }, + { + "epoch": 0.6751953125, + "grad_norm": 0.16117067635059357, + "learning_rate": 0.00016024264908954413, + "loss": 4.418, + "step": 6914 + }, + { + "epoch": 0.67529296875, + "grad_norm": 0.1636311560869217, + "learning_rate": 0.00016018239620280473, + "loss": 4.4414, + "step": 6915 + }, + { + "epoch": 0.675390625, + "grad_norm": 0.16264864802360535, + "learning_rate": 0.00016012215444685275, + "loss": 4.4492, + "step": 6916 + }, + { + "epoch": 0.67548828125, + "grad_norm": 0.16706931591033936, + "learning_rate": 0.00016006192382752815, + "loss": 4.4258, + "step": 6917 + }, + { + "epoch": 0.6755859375, + "grad_norm": 0.15800555050373077, + "learning_rate": 0.00016000170435067002, + "loss": 4.457, + "step": 6918 + }, + { + "epoch": 0.67568359375, + "grad_norm": 0.16805338859558105, + "learning_rate": 0.0001599414960221161, + "loss": 4.4375, + "step": 6919 + }, + { + "epoch": 0.67578125, + "grad_norm": 0.16577310860157013, + "learning_rate": 0.00015988129884770327, + "loss": 4.4336, + "step": 6920 + }, + { + "epoch": 0.67587890625, + "grad_norm": 0.15864504873752594, + "learning_rate": 0.00015982111283326718, + "loss": 4.4375, + "step": 6921 + }, + { + "epoch": 0.6759765625, + "grad_norm": 0.1601056009531021, + "learning_rate": 0.0001597609379846426, + "loss": 4.4062, + "step": 6922 + }, + { + "epoch": 0.67607421875, + "grad_norm": 0.16227127611637115, + "learning_rate": 0.00015970077430766286, + "loss": 4.4648, + "step": 6923 + }, + { + "epoch": 0.676171875, + "grad_norm": 0.1682976931333542, + "learning_rate": 0.00015964062180816053, + "loss": 4.4531, + "step": 6924 + }, + { + "epoch": 0.67626953125, + "grad_norm": 0.17128302156925201, + "learning_rate": 0.00015958048049196695, + "loss": 4.4062, + "step": 6925 + }, + { + "epoch": 0.6763671875, + "grad_norm": 0.15799380838871002, + "learning_rate": 0.00015952035036491242, + "loss": 4.4258, + "step": 6926 + }, + { + "epoch": 0.67646484375, + "grad_norm": 0.16626757383346558, + "learning_rate": 0.00015946023143282613, + "loss": 4.4375, + "step": 6927 + }, + { + "epoch": 0.6765625, + "grad_norm": 0.16181285679340363, + "learning_rate": 0.0001594001237015363, + "loss": 4.4492, + "step": 6928 + }, + { + "epoch": 0.67666015625, + "grad_norm": 0.16065916419029236, + "learning_rate": 0.00015934002717686986, + "loss": 4.4297, + "step": 6929 + }, + { + "epoch": 0.6767578125, + "grad_norm": 0.165615051984787, + "learning_rate": 0.00015927994186465278, + "loss": 4.4141, + "step": 6930 + }, + { + "epoch": 0.67685546875, + "grad_norm": 0.1558147668838501, + "learning_rate": 0.00015921986777071, + "loss": 4.4375, + "step": 6931 + }, + { + "epoch": 0.676953125, + "grad_norm": 0.16617564857006073, + "learning_rate": 0.00015915980490086525, + "loss": 4.4258, + "step": 6932 + }, + { + "epoch": 0.67705078125, + "grad_norm": 0.15764722228050232, + "learning_rate": 0.00015909975326094117, + "loss": 4.4531, + "step": 6933 + }, + { + "epoch": 0.6771484375, + "grad_norm": 0.16472077369689941, + "learning_rate": 0.0001590397128567594, + "loss": 4.4375, + "step": 6934 + }, + { + "epoch": 0.67724609375, + "grad_norm": 0.15654291212558746, + "learning_rate": 0.0001589796836941404, + "loss": 4.4648, + "step": 6935 + }, + { + "epoch": 0.67734375, + "grad_norm": 0.1572616547346115, + "learning_rate": 0.00015891966577890365, + "loss": 4.4648, + "step": 6936 + }, + { + "epoch": 0.67744140625, + "grad_norm": 0.1535930633544922, + "learning_rate": 0.0001588596591168675, + "loss": 4.4336, + "step": 6937 + }, + { + "epoch": 0.6775390625, + "grad_norm": 0.16014009714126587, + "learning_rate": 0.00015879966371384912, + "loss": 4.4375, + "step": 6938 + }, + { + "epoch": 0.67763671875, + "grad_norm": 0.1635516732931137, + "learning_rate": 0.0001587396795756647, + "loss": 4.4375, + "step": 6939 + }, + { + "epoch": 0.677734375, + "grad_norm": 0.15151771903038025, + "learning_rate": 0.00015867970670812925, + "loss": 4.4297, + "step": 6940 + }, + { + "epoch": 0.67783203125, + "grad_norm": 0.16250060498714447, + "learning_rate": 0.00015861974511705682, + "loss": 4.4375, + "step": 6941 + }, + { + "epoch": 0.6779296875, + "grad_norm": 0.14689551293849945, + "learning_rate": 0.00015855979480826026, + "loss": 4.4492, + "step": 6942 + }, + { + "epoch": 0.67802734375, + "grad_norm": 0.1616639494895935, + "learning_rate": 0.00015849985578755127, + "loss": 4.3984, + "step": 6943 + }, + { + "epoch": 0.678125, + "grad_norm": 0.15799716114997864, + "learning_rate": 0.00015843992806074053, + "loss": 4.4102, + "step": 6944 + }, + { + "epoch": 0.67822265625, + "grad_norm": 0.1605096012353897, + "learning_rate": 0.00015838001163363775, + "loss": 4.4531, + "step": 6945 + }, + { + "epoch": 0.6783203125, + "grad_norm": 0.15434253215789795, + "learning_rate": 0.00015832010651205126, + "loss": 4.4609, + "step": 6946 + }, + { + "epoch": 0.67841796875, + "grad_norm": 0.17039066553115845, + "learning_rate": 0.00015826021270178852, + "loss": 4.4258, + "step": 6947 + }, + { + "epoch": 0.678515625, + "grad_norm": 0.15312908589839935, + "learning_rate": 0.00015820033020865593, + "loss": 4.4492, + "step": 6948 + }, + { + "epoch": 0.67861328125, + "grad_norm": 0.16028819978237152, + "learning_rate": 0.00015814045903845853, + "loss": 4.4648, + "step": 6949 + }, + { + "epoch": 0.6787109375, + "grad_norm": 0.1515302211046219, + "learning_rate": 0.0001580805991970005, + "loss": 4.4375, + "step": 6950 + }, + { + "epoch": 0.67880859375, + "grad_norm": 0.17058920860290527, + "learning_rate": 0.00015802075069008486, + "loss": 4.4492, + "step": 6951 + }, + { + "epoch": 0.67890625, + "grad_norm": 0.15240895748138428, + "learning_rate": 0.00015796091352351363, + "loss": 4.4258, + "step": 6952 + }, + { + "epoch": 0.67900390625, + "grad_norm": 0.16838188469409943, + "learning_rate": 0.00015790108770308736, + "loss": 4.4414, + "step": 6953 + }, + { + "epoch": 0.6791015625, + "grad_norm": 0.16070429980754852, + "learning_rate": 0.0001578412732346059, + "loss": 4.4648, + "step": 6954 + }, + { + "epoch": 0.67919921875, + "grad_norm": 0.16527429223060608, + "learning_rate": 0.0001577814701238678, + "loss": 4.4258, + "step": 6955 + }, + { + "epoch": 0.679296875, + "grad_norm": 0.1678277999162674, + "learning_rate": 0.0001577216783766706, + "loss": 4.4492, + "step": 6956 + }, + { + "epoch": 0.67939453125, + "grad_norm": 0.15758788585662842, + "learning_rate": 0.0001576618979988108, + "loss": 4.4258, + "step": 6957 + }, + { + "epoch": 0.6794921875, + "grad_norm": 0.16702519357204437, + "learning_rate": 0.00015760212899608355, + "loss": 4.4531, + "step": 6958 + }, + { + "epoch": 0.67958984375, + "grad_norm": 0.16100944578647614, + "learning_rate": 0.00015754237137428312, + "loss": 4.4219, + "step": 6959 + }, + { + "epoch": 0.6796875, + "grad_norm": 0.15500803291797638, + "learning_rate": 0.00015748262513920269, + "loss": 4.4258, + "step": 6960 + }, + { + "epoch": 0.67978515625, + "grad_norm": 0.1579052060842514, + "learning_rate": 0.0001574228902966341, + "loss": 4.4609, + "step": 6961 + }, + { + "epoch": 0.6798828125, + "grad_norm": 0.15906643867492676, + "learning_rate": 0.00015736316685236823, + "loss": 4.4258, + "step": 6962 + }, + { + "epoch": 0.67998046875, + "grad_norm": 0.1595408171415329, + "learning_rate": 0.00015730345481219498, + "loss": 4.4258, + "step": 6963 + }, + { + "epoch": 0.680078125, + "grad_norm": 0.14761081337928772, + "learning_rate": 0.00015724375418190296, + "loss": 4.4258, + "step": 6964 + }, + { + "epoch": 0.68017578125, + "grad_norm": 0.15418265759944916, + "learning_rate": 0.00015718406496727978, + "loss": 4.3789, + "step": 6965 + }, + { + "epoch": 0.6802734375, + "grad_norm": 0.16396468877792358, + "learning_rate": 0.00015712438717411186, + "loss": 4.3945, + "step": 6966 + }, + { + "epoch": 0.68037109375, + "grad_norm": 0.16081777215003967, + "learning_rate": 0.00015706472080818457, + "loss": 4.4414, + "step": 6967 + }, + { + "epoch": 0.68046875, + "grad_norm": 0.16181306540966034, + "learning_rate": 0.0001570050658752823, + "loss": 4.4414, + "step": 6968 + }, + { + "epoch": 0.68056640625, + "grad_norm": 0.1604819893836975, + "learning_rate": 0.0001569454223811879, + "loss": 4.4414, + "step": 6969 + }, + { + "epoch": 0.6806640625, + "grad_norm": 0.17226724326610565, + "learning_rate": 0.0001568857903316836, + "loss": 4.4375, + "step": 6970 + }, + { + "epoch": 0.68076171875, + "grad_norm": 0.16339553892612457, + "learning_rate": 0.00015682616973255033, + "loss": 4.4258, + "step": 6971 + }, + { + "epoch": 0.680859375, + "grad_norm": 0.16520000994205475, + "learning_rate": 0.0001567665605895678, + "loss": 4.4453, + "step": 6972 + }, + { + "epoch": 0.68095703125, + "grad_norm": 0.15435071289539337, + "learning_rate": 0.00015670696290851478, + "loss": 4.4336, + "step": 6973 + }, + { + "epoch": 0.6810546875, + "grad_norm": 0.16177372634410858, + "learning_rate": 0.00015664737669516884, + "loss": 4.4531, + "step": 6974 + }, + { + "epoch": 0.68115234375, + "grad_norm": 0.16279937326908112, + "learning_rate": 0.00015658780195530647, + "loss": 4.4141, + "step": 6975 + }, + { + "epoch": 0.68125, + "grad_norm": 0.15206021070480347, + "learning_rate": 0.00015652823869470307, + "loss": 4.4375, + "step": 6976 + }, + { + "epoch": 0.68134765625, + "grad_norm": 0.1643427312374115, + "learning_rate": 0.0001564686869191329, + "loss": 4.4453, + "step": 6977 + }, + { + "epoch": 0.6814453125, + "grad_norm": 0.1563086360692978, + "learning_rate": 0.000156409146634369, + "loss": 4.4531, + "step": 6978 + }, + { + "epoch": 0.68154296875, + "grad_norm": 0.15671886503696442, + "learning_rate": 0.00015634961784618347, + "loss": 4.4297, + "step": 6979 + }, + { + "epoch": 0.681640625, + "grad_norm": 0.151637464761734, + "learning_rate": 0.00015629010056034714, + "loss": 4.4375, + "step": 6980 + }, + { + "epoch": 0.68173828125, + "grad_norm": 0.15398380160331726, + "learning_rate": 0.0001562305947826299, + "loss": 4.4336, + "step": 6981 + }, + { + "epoch": 0.6818359375, + "grad_norm": 0.1576603204011917, + "learning_rate": 0.00015617110051880046, + "loss": 4.4141, + "step": 6982 + }, + { + "epoch": 0.68193359375, + "grad_norm": 0.15473267436027527, + "learning_rate": 0.00015611161777462628, + "loss": 4.4297, + "step": 6983 + }, + { + "epoch": 0.68203125, + "grad_norm": 0.16003772616386414, + "learning_rate": 0.00015605214655587386, + "loss": 4.4102, + "step": 6984 + }, + { + "epoch": 0.68212890625, + "grad_norm": 0.16121114790439606, + "learning_rate": 0.00015599268686830863, + "loss": 4.4414, + "step": 6985 + }, + { + "epoch": 0.6822265625, + "grad_norm": 0.1638038456439972, + "learning_rate": 0.00015593323871769465, + "loss": 4.418, + "step": 6986 + }, + { + "epoch": 0.68232421875, + "grad_norm": 0.1749398410320282, + "learning_rate": 0.000155873802109795, + "loss": 4.4297, + "step": 6987 + }, + { + "epoch": 0.682421875, + "grad_norm": 0.15238840878009796, + "learning_rate": 0.00015581437705037177, + "loss": 4.4531, + "step": 6988 + }, + { + "epoch": 0.68251953125, + "grad_norm": 0.16978104412555695, + "learning_rate": 0.00015575496354518575, + "loss": 4.3945, + "step": 6989 + }, + { + "epoch": 0.6826171875, + "grad_norm": 0.16358016431331635, + "learning_rate": 0.0001556955615999967, + "loss": 4.4219, + "step": 6990 + }, + { + "epoch": 0.68271484375, + "grad_norm": 0.154835507273674, + "learning_rate": 0.00015563617122056324, + "loss": 4.4414, + "step": 6991 + }, + { + "epoch": 0.6828125, + "grad_norm": 0.17992012202739716, + "learning_rate": 0.00015557679241264284, + "loss": 4.4414, + "step": 6992 + }, + { + "epoch": 0.68291015625, + "grad_norm": 0.15468630194664001, + "learning_rate": 0.00015551742518199192, + "loss": 4.3867, + "step": 6993 + }, + { + "epoch": 0.6830078125, + "grad_norm": 0.160928875207901, + "learning_rate": 0.00015545806953436576, + "loss": 4.4609, + "step": 6994 + }, + { + "epoch": 0.68310546875, + "grad_norm": 0.15980033576488495, + "learning_rate": 0.00015539872547551831, + "loss": 4.4336, + "step": 6995 + }, + { + "epoch": 0.683203125, + "grad_norm": 0.15898260474205017, + "learning_rate": 0.00015533939301120276, + "loss": 4.4453, + "step": 6996 + }, + { + "epoch": 0.68330078125, + "grad_norm": 0.1545393168926239, + "learning_rate": 0.00015528007214717085, + "loss": 4.4375, + "step": 6997 + }, + { + "epoch": 0.6833984375, + "grad_norm": 0.16391296684741974, + "learning_rate": 0.0001552207628891734, + "loss": 4.4297, + "step": 6998 + }, + { + "epoch": 0.68349609375, + "grad_norm": 0.1532118320465088, + "learning_rate": 0.0001551614652429601, + "loss": 4.4453, + "step": 6999 + }, + { + "epoch": 0.68359375, + "grad_norm": 0.15457819402217865, + "learning_rate": 0.00015510217921427938, + "loss": 4.4531, + "step": 7000 + }, + { + "epoch": 0.68369140625, + "grad_norm": 0.1598549783229828, + "learning_rate": 0.0001550429048088786, + "loss": 4.4258, + "step": 7001 + }, + { + "epoch": 0.6837890625, + "grad_norm": 0.15585532784461975, + "learning_rate": 0.0001549836420325042, + "loss": 4.4453, + "step": 7002 + }, + { + "epoch": 0.68388671875, + "grad_norm": 0.15430401265621185, + "learning_rate": 0.000154924390890901, + "loss": 4.4492, + "step": 7003 + }, + { + "epoch": 0.683984375, + "grad_norm": 0.16044233739376068, + "learning_rate": 0.0001548651513898132, + "loss": 4.457, + "step": 7004 + }, + { + "epoch": 0.68408203125, + "grad_norm": 0.15861983597278595, + "learning_rate": 0.00015480592353498356, + "loss": 4.4258, + "step": 7005 + }, + { + "epoch": 0.6841796875, + "grad_norm": 0.15539132058620453, + "learning_rate": 0.0001547467073321539, + "loss": 4.4336, + "step": 7006 + }, + { + "epoch": 0.68427734375, + "grad_norm": 0.1501215398311615, + "learning_rate": 0.0001546875027870648, + "loss": 4.4531, + "step": 7007 + }, + { + "epoch": 0.684375, + "grad_norm": 0.1634976863861084, + "learning_rate": 0.0001546283099054558, + "loss": 4.4531, + "step": 7008 + }, + { + "epoch": 0.68447265625, + "grad_norm": 0.16023896634578705, + "learning_rate": 0.00015456912869306512, + "loss": 4.4766, + "step": 7009 + }, + { + "epoch": 0.6845703125, + "grad_norm": 0.16603170335292816, + "learning_rate": 0.00015450995915563005, + "loss": 4.4375, + "step": 7010 + }, + { + "epoch": 0.68466796875, + "grad_norm": 0.15289606153964996, + "learning_rate": 0.00015445080129888677, + "loss": 4.4414, + "step": 7011 + }, + { + "epoch": 0.684765625, + "grad_norm": 0.1609164923429489, + "learning_rate": 0.00015439165512857005, + "loss": 4.4219, + "step": 7012 + }, + { + "epoch": 0.68486328125, + "grad_norm": 0.16611474752426147, + "learning_rate": 0.00015433252065041379, + "loss": 4.4258, + "step": 7013 + }, + { + "epoch": 0.6849609375, + "grad_norm": 0.15772394835948944, + "learning_rate": 0.00015427339787015072, + "loss": 4.4297, + "step": 7014 + }, + { + "epoch": 0.68505859375, + "grad_norm": 0.17391541600227356, + "learning_rate": 0.0001542142867935123, + "loss": 4.4414, + "step": 7015 + }, + { + "epoch": 0.68515625, + "grad_norm": 0.1675868034362793, + "learning_rate": 0.00015415518742622903, + "loss": 4.4336, + "step": 7016 + }, + { + "epoch": 0.68525390625, + "grad_norm": 0.16025812923908234, + "learning_rate": 0.0001540960997740301, + "loss": 4.4219, + "step": 7017 + }, + { + "epoch": 0.6853515625, + "grad_norm": 0.17404451966285706, + "learning_rate": 0.00015403702384264378, + "loss": 4.418, + "step": 7018 + }, + { + "epoch": 0.68544921875, + "grad_norm": 0.15267981588840485, + "learning_rate": 0.0001539779596377971, + "loss": 4.4609, + "step": 7019 + }, + { + "epoch": 0.685546875, + "grad_norm": 0.16741126775741577, + "learning_rate": 0.0001539189071652158, + "loss": 4.4375, + "step": 7020 + }, + { + "epoch": 0.68564453125, + "grad_norm": 0.14610929787158966, + "learning_rate": 0.0001538598664306246, + "loss": 4.4219, + "step": 7021 + }, + { + "epoch": 0.6857421875, + "grad_norm": 0.17302922904491425, + "learning_rate": 0.0001538008374397472, + "loss": 4.4141, + "step": 7022 + }, + { + "epoch": 0.68583984375, + "grad_norm": 0.16122311353683472, + "learning_rate": 0.00015374182019830607, + "loss": 4.4453, + "step": 7023 + }, + { + "epoch": 0.6859375, + "grad_norm": 0.15955093502998352, + "learning_rate": 0.0001536828147120224, + "loss": 4.4453, + "step": 7024 + }, + { + "epoch": 0.68603515625, + "grad_norm": 0.16778364777565002, + "learning_rate": 0.00015362382098661653, + "loss": 4.4141, + "step": 7025 + }, + { + "epoch": 0.6861328125, + "grad_norm": 0.16438336670398712, + "learning_rate": 0.00015356483902780743, + "loss": 4.4375, + "step": 7026 + }, + { + "epoch": 0.68623046875, + "grad_norm": 0.1637345850467682, + "learning_rate": 0.00015350586884131307, + "loss": 4.4219, + "step": 7027 + }, + { + "epoch": 0.686328125, + "grad_norm": 0.15820477902889252, + "learning_rate": 0.00015344691043285005, + "loss": 4.4414, + "step": 7028 + }, + { + "epoch": 0.68642578125, + "grad_norm": 0.16374491155147552, + "learning_rate": 0.0001533879638081341, + "loss": 4.4414, + "step": 7029 + }, + { + "epoch": 0.6865234375, + "grad_norm": 0.15741823613643646, + "learning_rate": 0.0001533290289728797, + "loss": 4.4219, + "step": 7030 + }, + { + "epoch": 0.68662109375, + "grad_norm": 0.15712347626686096, + "learning_rate": 0.0001532701059328001, + "loss": 4.4492, + "step": 7031 + }, + { + "epoch": 0.68671875, + "grad_norm": 0.15022200345993042, + "learning_rate": 0.00015321119469360756, + "loss": 4.4492, + "step": 7032 + }, + { + "epoch": 0.68681640625, + "grad_norm": 0.15827958285808563, + "learning_rate": 0.00015315229526101316, + "loss": 4.4219, + "step": 7033 + }, + { + "epoch": 0.6869140625, + "grad_norm": 0.16022320091724396, + "learning_rate": 0.00015309340764072668, + "loss": 4.4375, + "step": 7034 + }, + { + "epoch": 0.68701171875, + "grad_norm": 0.15234516561031342, + "learning_rate": 0.000153034531838457, + "loss": 4.3867, + "step": 7035 + }, + { + "epoch": 0.687109375, + "grad_norm": 0.16296015679836273, + "learning_rate": 0.00015297566785991173, + "loss": 4.4219, + "step": 7036 + }, + { + "epoch": 0.68720703125, + "grad_norm": 0.14686110615730286, + "learning_rate": 0.00015291681571079718, + "loss": 4.418, + "step": 7037 + }, + { + "epoch": 0.6873046875, + "grad_norm": 0.15875661373138428, + "learning_rate": 0.00015285797539681885, + "loss": 4.4492, + "step": 7038 + }, + { + "epoch": 0.68740234375, + "grad_norm": 0.16582246124744415, + "learning_rate": 0.00015279914692368075, + "loss": 4.4102, + "step": 7039 + }, + { + "epoch": 0.6875, + "grad_norm": 0.16077248752117157, + "learning_rate": 0.000152740330297086, + "loss": 4.4336, + "step": 7040 + }, + { + "epoch": 0.68759765625, + "grad_norm": 0.16548088192939758, + "learning_rate": 0.00015268152552273646, + "loss": 4.418, + "step": 7041 + }, + { + "epoch": 0.6876953125, + "grad_norm": 0.15918448567390442, + "learning_rate": 0.0001526227326063328, + "loss": 4.4336, + "step": 7042 + }, + { + "epoch": 0.68779296875, + "grad_norm": 0.15753185749053955, + "learning_rate": 0.00015256395155357466, + "loss": 4.4062, + "step": 7043 + }, + { + "epoch": 0.687890625, + "grad_norm": 0.1597943753004074, + "learning_rate": 0.00015250518237016052, + "loss": 4.4297, + "step": 7044 + }, + { + "epoch": 0.68798828125, + "grad_norm": 0.1532880663871765, + "learning_rate": 0.0001524464250617875, + "loss": 4.4102, + "step": 7045 + }, + { + "epoch": 0.6880859375, + "grad_norm": 0.15117032825946808, + "learning_rate": 0.0001523876796341518, + "loss": 4.4336, + "step": 7046 + }, + { + "epoch": 0.68818359375, + "grad_norm": 0.15277153253555298, + "learning_rate": 0.0001523289460929484, + "loss": 4.4141, + "step": 7047 + }, + { + "epoch": 0.68828125, + "grad_norm": 0.15270403027534485, + "learning_rate": 0.00015227022444387107, + "loss": 4.4648, + "step": 7048 + }, + { + "epoch": 0.68837890625, + "grad_norm": 0.15782606601715088, + "learning_rate": 0.00015221151469261253, + "loss": 4.418, + "step": 7049 + }, + { + "epoch": 0.6884765625, + "grad_norm": 0.15392521023750305, + "learning_rate": 0.00015215281684486425, + "loss": 4.4258, + "step": 7050 + }, + { + "epoch": 0.68857421875, + "grad_norm": 0.15908081829547882, + "learning_rate": 0.00015209413090631663, + "loss": 4.4062, + "step": 7051 + }, + { + "epoch": 0.688671875, + "grad_norm": 0.16001223027706146, + "learning_rate": 0.00015203545688265887, + "loss": 4.4336, + "step": 7052 + }, + { + "epoch": 0.68876953125, + "grad_norm": 0.16236372292041779, + "learning_rate": 0.0001519767947795791, + "loss": 4.3945, + "step": 7053 + }, + { + "epoch": 0.6888671875, + "grad_norm": 0.1612752079963684, + "learning_rate": 0.000151918144602764, + "loss": 4.418, + "step": 7054 + }, + { + "epoch": 0.68896484375, + "grad_norm": 0.16015547513961792, + "learning_rate": 0.00015185950635789941, + "loss": 4.457, + "step": 7055 + }, + { + "epoch": 0.6890625, + "grad_norm": 0.15501748025417328, + "learning_rate": 0.00015180088005066996, + "loss": 4.4648, + "step": 7056 + }, + { + "epoch": 0.68916015625, + "grad_norm": 0.1583217829465866, + "learning_rate": 0.00015174226568675898, + "loss": 4.4727, + "step": 7057 + }, + { + "epoch": 0.6892578125, + "grad_norm": 0.1707146018743515, + "learning_rate": 0.00015168366327184885, + "loss": 4.4219, + "step": 7058 + }, + { + "epoch": 0.68935546875, + "grad_norm": 0.162746861577034, + "learning_rate": 0.00015162507281162057, + "loss": 4.457, + "step": 7059 + }, + { + "epoch": 0.689453125, + "grad_norm": 0.1522258222103119, + "learning_rate": 0.00015156649431175413, + "loss": 4.418, + "step": 7060 + }, + { + "epoch": 0.68955078125, + "grad_norm": 0.15916073322296143, + "learning_rate": 0.00015150792777792843, + "loss": 4.4414, + "step": 7061 + }, + { + "epoch": 0.6896484375, + "grad_norm": 0.15725359320640564, + "learning_rate": 0.0001514493732158209, + "loss": 4.4219, + "step": 7062 + }, + { + "epoch": 0.68974609375, + "grad_norm": 0.1624823808670044, + "learning_rate": 0.00015139083063110808, + "loss": 4.418, + "step": 7063 + }, + { + "epoch": 0.68984375, + "grad_norm": 0.1681053638458252, + "learning_rate": 0.0001513323000294653, + "loss": 4.4453, + "step": 7064 + }, + { + "epoch": 0.68994140625, + "grad_norm": 0.1550324261188507, + "learning_rate": 0.0001512737814165667, + "loss": 4.4375, + "step": 7065 + }, + { + "epoch": 0.6900390625, + "grad_norm": 0.14830714464187622, + "learning_rate": 0.00015121527479808528, + "loss": 4.4453, + "step": 7066 + }, + { + "epoch": 0.69013671875, + "grad_norm": 0.15241822600364685, + "learning_rate": 0.00015115678017969282, + "loss": 4.4414, + "step": 7067 + }, + { + "epoch": 0.690234375, + "grad_norm": 0.1520991176366806, + "learning_rate": 0.00015109829756706, + "loss": 4.4102, + "step": 7068 + }, + { + "epoch": 0.69033203125, + "grad_norm": 0.1653759926557541, + "learning_rate": 0.0001510398269658564, + "loss": 4.418, + "step": 7069 + }, + { + "epoch": 0.6904296875, + "grad_norm": 0.17033636569976807, + "learning_rate": 0.00015098136838175014, + "loss": 4.4492, + "step": 7070 + }, + { + "epoch": 0.69052734375, + "grad_norm": 0.1691121608018875, + "learning_rate": 0.0001509229218204085, + "loss": 4.4219, + "step": 7071 + }, + { + "epoch": 0.690625, + "grad_norm": 0.16420342028141022, + "learning_rate": 0.0001508644872874975, + "loss": 4.3984, + "step": 7072 + }, + { + "epoch": 0.69072265625, + "grad_norm": 0.16921532154083252, + "learning_rate": 0.00015080606478868193, + "loss": 4.4062, + "step": 7073 + }, + { + "epoch": 0.6908203125, + "grad_norm": 0.1548907607793808, + "learning_rate": 0.0001507476543296255, + "loss": 4.457, + "step": 7074 + }, + { + "epoch": 0.69091796875, + "grad_norm": 0.16696563363075256, + "learning_rate": 0.00015068925591599065, + "loss": 4.4531, + "step": 7075 + }, + { + "epoch": 0.691015625, + "grad_norm": 0.1578383892774582, + "learning_rate": 0.00015063086955343873, + "loss": 4.4258, + "step": 7076 + }, + { + "epoch": 0.69111328125, + "grad_norm": 0.1651468127965927, + "learning_rate": 0.00015057249524762996, + "loss": 4.4375, + "step": 7077 + }, + { + "epoch": 0.6912109375, + "grad_norm": 0.15036940574645996, + "learning_rate": 0.00015051413300422333, + "loss": 4.4062, + "step": 7078 + }, + { + "epoch": 0.69130859375, + "grad_norm": 0.16034796833992004, + "learning_rate": 0.00015045578282887656, + "loss": 4.4414, + "step": 7079 + }, + { + "epoch": 0.69140625, + "grad_norm": 0.14991596341133118, + "learning_rate": 0.00015039744472724635, + "loss": 4.4102, + "step": 7080 + }, + { + "epoch": 0.69150390625, + "grad_norm": 0.156624436378479, + "learning_rate": 0.00015033911870498818, + "loss": 4.4414, + "step": 7081 + }, + { + "epoch": 0.6916015625, + "grad_norm": 0.1668628454208374, + "learning_rate": 0.00015028080476775637, + "loss": 4.4297, + "step": 7082 + }, + { + "epoch": 0.69169921875, + "grad_norm": 0.15509940683841705, + "learning_rate": 0.00015022250292120407, + "loss": 4.4414, + "step": 7083 + }, + { + "epoch": 0.691796875, + "grad_norm": 0.15824024379253387, + "learning_rate": 0.00015016421317098327, + "loss": 4.4258, + "step": 7084 + }, + { + "epoch": 0.69189453125, + "grad_norm": 0.157178595662117, + "learning_rate": 0.0001501059355227447, + "loss": 4.4375, + "step": 7085 + }, + { + "epoch": 0.6919921875, + "grad_norm": 0.1650533527135849, + "learning_rate": 0.00015004766998213815, + "loss": 4.4609, + "step": 7086 + }, + { + "epoch": 0.69208984375, + "grad_norm": 0.1542055606842041, + "learning_rate": 0.00014998941655481183, + "loss": 4.3984, + "step": 7087 + }, + { + "epoch": 0.6921875, + "grad_norm": 0.16022159159183502, + "learning_rate": 0.00014993117524641315, + "loss": 4.4492, + "step": 7088 + }, + { + "epoch": 0.69228515625, + "grad_norm": 0.1588653326034546, + "learning_rate": 0.00014987294606258816, + "loss": 4.3867, + "step": 7089 + }, + { + "epoch": 0.6923828125, + "grad_norm": 0.1628570705652237, + "learning_rate": 0.0001498147290089818, + "loss": 4.4062, + "step": 7090 + }, + { + "epoch": 0.69248046875, + "grad_norm": 0.15695366263389587, + "learning_rate": 0.00014975652409123785, + "loss": 4.4336, + "step": 7091 + }, + { + "epoch": 0.692578125, + "grad_norm": 0.16258077323436737, + "learning_rate": 0.00014969833131499885, + "loss": 4.4023, + "step": 7092 + }, + { + "epoch": 0.69267578125, + "grad_norm": 0.16879941523075104, + "learning_rate": 0.00014964015068590623, + "loss": 4.4453, + "step": 7093 + }, + { + "epoch": 0.6927734375, + "grad_norm": 0.1535227745771408, + "learning_rate": 0.00014958198220960011, + "loss": 4.418, + "step": 7094 + }, + { + "epoch": 0.69287109375, + "grad_norm": 0.16817918419837952, + "learning_rate": 0.00014952382589171974, + "loss": 4.4297, + "step": 7095 + }, + { + "epoch": 0.69296875, + "grad_norm": 0.15898838639259338, + "learning_rate": 0.00014946568173790275, + "loss": 4.4219, + "step": 7096 + }, + { + "epoch": 0.69306640625, + "grad_norm": 0.16280797123908997, + "learning_rate": 0.00014940754975378595, + "loss": 4.4375, + "step": 7097 + }, + { + "epoch": 0.6931640625, + "grad_norm": 0.16791841387748718, + "learning_rate": 0.00014934942994500477, + "loss": 4.4492, + "step": 7098 + }, + { + "epoch": 0.69326171875, + "grad_norm": 0.1550721675157547, + "learning_rate": 0.00014929132231719357, + "loss": 4.4492, + "step": 7099 + }, + { + "epoch": 0.693359375, + "grad_norm": 0.16627748310565948, + "learning_rate": 0.00014923322687598545, + "loss": 4.418, + "step": 7100 + }, + { + "epoch": 0.69345703125, + "grad_norm": 0.1626335233449936, + "learning_rate": 0.0001491751436270124, + "loss": 4.4414, + "step": 7101 + }, + { + "epoch": 0.6935546875, + "grad_norm": 0.15744799375534058, + "learning_rate": 0.00014911707257590524, + "loss": 4.4414, + "step": 7102 + }, + { + "epoch": 0.69365234375, + "grad_norm": 0.15828455984592438, + "learning_rate": 0.0001490590137282936, + "loss": 4.4141, + "step": 7103 + }, + { + "epoch": 0.69375, + "grad_norm": 0.15398378670215607, + "learning_rate": 0.00014900096708980571, + "loss": 4.3633, + "step": 7104 + }, + { + "epoch": 0.69384765625, + "grad_norm": 0.1660323143005371, + "learning_rate": 0.00014894293266606889, + "loss": 4.4609, + "step": 7105 + }, + { + "epoch": 0.6939453125, + "grad_norm": 0.1580164134502411, + "learning_rate": 0.00014888491046270925, + "loss": 4.4141, + "step": 7106 + }, + { + "epoch": 0.69404296875, + "grad_norm": 0.17077553272247314, + "learning_rate": 0.00014882690048535158, + "loss": 4.4258, + "step": 7107 + }, + { + "epoch": 0.694140625, + "grad_norm": 0.15530851483345032, + "learning_rate": 0.00014876890273961952, + "loss": 4.4336, + "step": 7108 + }, + { + "epoch": 0.69423828125, + "grad_norm": 0.16664662957191467, + "learning_rate": 0.00014871091723113567, + "loss": 4.4375, + "step": 7109 + }, + { + "epoch": 0.6943359375, + "grad_norm": 0.1668640822172165, + "learning_rate": 0.00014865294396552127, + "loss": 4.418, + "step": 7110 + }, + { + "epoch": 0.69443359375, + "grad_norm": 0.15961594879627228, + "learning_rate": 0.00014859498294839636, + "loss": 4.4531, + "step": 7111 + }, + { + "epoch": 0.69453125, + "grad_norm": 0.15915991365909576, + "learning_rate": 0.00014853703418538005, + "loss": 4.4375, + "step": 7112 + }, + { + "epoch": 0.69462890625, + "grad_norm": 0.15999309718608856, + "learning_rate": 0.00014847909768208993, + "loss": 4.4258, + "step": 7113 + }, + { + "epoch": 0.6947265625, + "grad_norm": 0.1624843031167984, + "learning_rate": 0.00014842117344414255, + "loss": 4.4609, + "step": 7114 + }, + { + "epoch": 0.69482421875, + "grad_norm": 0.15743541717529297, + "learning_rate": 0.00014836326147715334, + "loss": 4.4258, + "step": 7115 + }, + { + "epoch": 0.694921875, + "grad_norm": 0.1757933497428894, + "learning_rate": 0.00014830536178673642, + "loss": 4.4492, + "step": 7116 + }, + { + "epoch": 0.69501953125, + "grad_norm": 0.1578277200460434, + "learning_rate": 0.00014824747437850477, + "loss": 4.4688, + "step": 7117 + }, + { + "epoch": 0.6951171875, + "grad_norm": 0.16266845166683197, + "learning_rate": 0.00014818959925807024, + "loss": 4.4492, + "step": 7118 + }, + { + "epoch": 0.69521484375, + "grad_norm": 0.15812158584594727, + "learning_rate": 0.00014813173643104345, + "loss": 4.4375, + "step": 7119 + }, + { + "epoch": 0.6953125, + "grad_norm": 0.16462133824825287, + "learning_rate": 0.00014807388590303374, + "loss": 4.4414, + "step": 7120 + }, + { + "epoch": 0.69541015625, + "grad_norm": 0.1623114049434662, + "learning_rate": 0.0001480160476796493, + "loss": 4.4492, + "step": 7121 + }, + { + "epoch": 0.6955078125, + "grad_norm": 0.15950371325016022, + "learning_rate": 0.0001479582217664972, + "loss": 4.4531, + "step": 7122 + }, + { + "epoch": 0.69560546875, + "grad_norm": 0.15229952335357666, + "learning_rate": 0.00014790040816918326, + "loss": 4.4141, + "step": 7123 + }, + { + "epoch": 0.695703125, + "grad_norm": 0.16012394428253174, + "learning_rate": 0.00014784260689331213, + "loss": 4.418, + "step": 7124 + }, + { + "epoch": 0.69580078125, + "grad_norm": 0.158954456448555, + "learning_rate": 0.0001477848179444872, + "loss": 4.4297, + "step": 7125 + }, + { + "epoch": 0.6958984375, + "grad_norm": 0.14972394704818726, + "learning_rate": 0.00014772704132831084, + "loss": 4.4336, + "step": 7126 + }, + { + "epoch": 0.69599609375, + "grad_norm": 0.15261879563331604, + "learning_rate": 0.00014766927705038398, + "loss": 4.4062, + "step": 7127 + }, + { + "epoch": 0.69609375, + "grad_norm": 0.15138253569602966, + "learning_rate": 0.00014761152511630658, + "loss": 4.4375, + "step": 7128 + }, + { + "epoch": 0.69619140625, + "grad_norm": 0.15589524805545807, + "learning_rate": 0.00014755378553167714, + "loss": 4.4141, + "step": 7129 + }, + { + "epoch": 0.6962890625, + "grad_norm": 0.15444034337997437, + "learning_rate": 0.00014749605830209324, + "loss": 4.4102, + "step": 7130 + }, + { + "epoch": 0.69638671875, + "grad_norm": 0.15743236243724823, + "learning_rate": 0.0001474383434331511, + "loss": 4.4258, + "step": 7131 + }, + { + "epoch": 0.696484375, + "grad_norm": 0.161315456032753, + "learning_rate": 0.0001473806409304458, + "loss": 4.4336, + "step": 7132 + }, + { + "epoch": 0.69658203125, + "grad_norm": 0.15091194212436676, + "learning_rate": 0.00014732295079957123, + "loss": 4.4102, + "step": 7133 + }, + { + "epoch": 0.6966796875, + "grad_norm": 0.15905718505382538, + "learning_rate": 0.00014726527304612002, + "loss": 4.4219, + "step": 7134 + }, + { + "epoch": 0.69677734375, + "grad_norm": 0.15515902638435364, + "learning_rate": 0.00014720760767568365, + "loss": 4.4258, + "step": 7135 + }, + { + "epoch": 0.696875, + "grad_norm": 0.15570823848247528, + "learning_rate": 0.00014714995469385233, + "loss": 4.3906, + "step": 7136 + }, + { + "epoch": 0.69697265625, + "grad_norm": 0.1516818404197693, + "learning_rate": 0.00014709231410621536, + "loss": 4.4375, + "step": 7137 + }, + { + "epoch": 0.6970703125, + "grad_norm": 0.15668882429599762, + "learning_rate": 0.00014703468591836027, + "loss": 4.4258, + "step": 7138 + }, + { + "epoch": 0.69716796875, + "grad_norm": 0.14918340742588043, + "learning_rate": 0.0001469770701358739, + "loss": 4.4062, + "step": 7139 + }, + { + "epoch": 0.697265625, + "grad_norm": 0.15295155346393585, + "learning_rate": 0.00014691946676434172, + "loss": 4.4023, + "step": 7140 + }, + { + "epoch": 0.69736328125, + "grad_norm": 0.1500699371099472, + "learning_rate": 0.0001468618758093479, + "loss": 4.4258, + "step": 7141 + }, + { + "epoch": 0.6974609375, + "grad_norm": 0.15699872374534607, + "learning_rate": 0.0001468042972764756, + "loss": 4.4336, + "step": 7142 + }, + { + "epoch": 0.69755859375, + "grad_norm": 0.16041336953639984, + "learning_rate": 0.00014674673117130659, + "loss": 4.4492, + "step": 7143 + }, + { + "epoch": 0.69765625, + "grad_norm": 0.16148078441619873, + "learning_rate": 0.00014668917749942152, + "loss": 4.4336, + "step": 7144 + }, + { + "epoch": 0.69775390625, + "grad_norm": 0.1596602201461792, + "learning_rate": 0.0001466316362663999, + "loss": 4.4531, + "step": 7145 + }, + { + "epoch": 0.6978515625, + "grad_norm": 0.15939664840698242, + "learning_rate": 0.00014657410747781987, + "loss": 4.4258, + "step": 7146 + }, + { + "epoch": 0.69794921875, + "grad_norm": 0.15930192172527313, + "learning_rate": 0.00014651659113925847, + "loss": 4.4141, + "step": 7147 + }, + { + "epoch": 0.698046875, + "grad_norm": 0.16293859481811523, + "learning_rate": 0.00014645908725629154, + "loss": 4.4219, + "step": 7148 + }, + { + "epoch": 0.69814453125, + "grad_norm": 0.15065400302410126, + "learning_rate": 0.00014640159583449372, + "loss": 4.4219, + "step": 7149 + }, + { + "epoch": 0.6982421875, + "grad_norm": 0.15116456151008606, + "learning_rate": 0.00014634411687943836, + "loss": 4.4414, + "step": 7150 + }, + { + "epoch": 0.69833984375, + "grad_norm": 0.1555454134941101, + "learning_rate": 0.00014628665039669768, + "loss": 4.4258, + "step": 7151 + }, + { + "epoch": 0.6984375, + "grad_norm": 0.14741970598697662, + "learning_rate": 0.00014622919639184267, + "loss": 4.3945, + "step": 7152 + }, + { + "epoch": 0.69853515625, + "grad_norm": 0.1530485302209854, + "learning_rate": 0.0001461717548704431, + "loss": 4.3984, + "step": 7153 + }, + { + "epoch": 0.6986328125, + "grad_norm": 0.15511195361614227, + "learning_rate": 0.0001461143258380676, + "loss": 4.4414, + "step": 7154 + }, + { + "epoch": 0.69873046875, + "grad_norm": 0.16008834540843964, + "learning_rate": 0.00014605690930028336, + "loss": 4.4141, + "step": 7155 + }, + { + "epoch": 0.698828125, + "grad_norm": 0.14948861300945282, + "learning_rate": 0.00014599950526265665, + "loss": 4.4023, + "step": 7156 + }, + { + "epoch": 0.69892578125, + "grad_norm": 0.15626132488250732, + "learning_rate": 0.0001459421137307524, + "loss": 4.4414, + "step": 7157 + }, + { + "epoch": 0.6990234375, + "grad_norm": 0.15378649532794952, + "learning_rate": 0.00014588473471013427, + "loss": 4.3906, + "step": 7158 + }, + { + "epoch": 0.69912109375, + "grad_norm": 0.15489734709262848, + "learning_rate": 0.00014582736820636482, + "loss": 4.4258, + "step": 7159 + }, + { + "epoch": 0.69921875, + "grad_norm": 0.161583811044693, + "learning_rate": 0.0001457700142250053, + "loss": 4.4375, + "step": 7160 + }, + { + "epoch": 0.69931640625, + "grad_norm": 0.17039674520492554, + "learning_rate": 0.00014571267277161586, + "loss": 4.4141, + "step": 7161 + }, + { + "epoch": 0.6994140625, + "grad_norm": 0.1568434238433838, + "learning_rate": 0.00014565534385175527, + "loss": 4.4297, + "step": 7162 + }, + { + "epoch": 0.69951171875, + "grad_norm": 0.16415683925151825, + "learning_rate": 0.00014559802747098137, + "loss": 4.4453, + "step": 7163 + }, + { + "epoch": 0.699609375, + "grad_norm": 0.16969524323940277, + "learning_rate": 0.00014554072363485033, + "loss": 4.3906, + "step": 7164 + }, + { + "epoch": 0.69970703125, + "grad_norm": 0.1541953682899475, + "learning_rate": 0.00014548343234891748, + "loss": 4.4336, + "step": 7165 + }, + { + "epoch": 0.6998046875, + "grad_norm": 0.1615845263004303, + "learning_rate": 0.00014542615361873686, + "loss": 4.4336, + "step": 7166 + }, + { + "epoch": 0.69990234375, + "grad_norm": 0.147820383310318, + "learning_rate": 0.0001453688874498612, + "loss": 4.4453, + "step": 7167 + }, + { + "epoch": 0.7, + "grad_norm": 0.16583451628684998, + "learning_rate": 0.00014531163384784212, + "loss": 4.4414, + "step": 7168 + }, + { + "epoch": 0.70009765625, + "grad_norm": 0.1591467261314392, + "learning_rate": 0.00014525439281822995, + "loss": 4.418, + "step": 7169 + }, + { + "epoch": 0.7001953125, + "grad_norm": 0.1624739170074463, + "learning_rate": 0.00014519716436657378, + "loss": 4.4102, + "step": 7170 + }, + { + "epoch": 0.70029296875, + "grad_norm": 0.1675368696451187, + "learning_rate": 0.00014513994849842154, + "loss": 4.4336, + "step": 7171 + }, + { + "epoch": 0.700390625, + "grad_norm": 0.15761008858680725, + "learning_rate": 0.00014508274521931997, + "loss": 4.4219, + "step": 7172 + }, + { + "epoch": 0.70048828125, + "grad_norm": 0.16826730966567993, + "learning_rate": 0.00014502555453481457, + "loss": 4.4062, + "step": 7173 + }, + { + "epoch": 0.7005859375, + "grad_norm": 0.15602412819862366, + "learning_rate": 0.0001449683764504494, + "loss": 4.3906, + "step": 7174 + }, + { + "epoch": 0.70068359375, + "grad_norm": 0.1600164771080017, + "learning_rate": 0.00014491121097176763, + "loss": 4.4258, + "step": 7175 + }, + { + "epoch": 0.70078125, + "grad_norm": 0.15835776925086975, + "learning_rate": 0.00014485405810431108, + "loss": 4.4258, + "step": 7176 + }, + { + "epoch": 0.70087890625, + "grad_norm": 0.15765324234962463, + "learning_rate": 0.0001447969178536202, + "loss": 4.4297, + "step": 7177 + }, + { + "epoch": 0.7009765625, + "grad_norm": 0.1564856469631195, + "learning_rate": 0.00014473979022523454, + "loss": 4.4023, + "step": 7178 + }, + { + "epoch": 0.70107421875, + "grad_norm": 0.16416983306407928, + "learning_rate": 0.00014468267522469209, + "loss": 4.4023, + "step": 7179 + }, + { + "epoch": 0.701171875, + "grad_norm": 0.15971767902374268, + "learning_rate": 0.0001446255728575298, + "loss": 4.3906, + "step": 7180 + }, + { + "epoch": 0.70126953125, + "grad_norm": 0.16209867596626282, + "learning_rate": 0.0001445684831292834, + "loss": 4.4414, + "step": 7181 + }, + { + "epoch": 0.7013671875, + "grad_norm": 0.16140832006931305, + "learning_rate": 0.00014451140604548733, + "loss": 4.4258, + "step": 7182 + }, + { + "epoch": 0.70146484375, + "grad_norm": 0.19359277188777924, + "learning_rate": 0.00014445434161167487, + "loss": 4.3984, + "step": 7183 + }, + { + "epoch": 0.7015625, + "grad_norm": 0.1626926064491272, + "learning_rate": 0.0001443972898333779, + "loss": 4.4766, + "step": 7184 + }, + { + "epoch": 0.70166015625, + "grad_norm": 0.16258037090301514, + "learning_rate": 0.00014434025071612724, + "loss": 4.418, + "step": 7185 + }, + { + "epoch": 0.7017578125, + "grad_norm": 0.16024860739707947, + "learning_rate": 0.00014428322426545255, + "loss": 4.4219, + "step": 7186 + }, + { + "epoch": 0.70185546875, + "grad_norm": 0.17342662811279297, + "learning_rate": 0.00014422621048688206, + "loss": 4.4492, + "step": 7187 + }, + { + "epoch": 0.701953125, + "grad_norm": 0.15433688461780548, + "learning_rate": 0.0001441692093859429, + "loss": 4.418, + "step": 7188 + }, + { + "epoch": 0.70205078125, + "grad_norm": 0.16194204986095428, + "learning_rate": 0.0001441122209681609, + "loss": 4.4531, + "step": 7189 + }, + { + "epoch": 0.7021484375, + "grad_norm": 0.17633184790611267, + "learning_rate": 0.00014405524523906078, + "loss": 4.4219, + "step": 7190 + }, + { + "epoch": 0.70224609375, + "grad_norm": 0.1496172994375229, + "learning_rate": 0.0001439982822041659, + "loss": 4.4609, + "step": 7191 + }, + { + "epoch": 0.70234375, + "grad_norm": 0.17096740007400513, + "learning_rate": 0.00014394133186899856, + "loss": 4.3984, + "step": 7192 + }, + { + "epoch": 0.70244140625, + "grad_norm": 0.15361976623535156, + "learning_rate": 0.00014388439423907947, + "loss": 4.4375, + "step": 7193 + }, + { + "epoch": 0.7025390625, + "grad_norm": 0.16132569313049316, + "learning_rate": 0.0001438274693199285, + "loss": 4.4375, + "step": 7194 + }, + { + "epoch": 0.70263671875, + "grad_norm": 0.16228090226650238, + "learning_rate": 0.0001437705571170641, + "loss": 4.4297, + "step": 7195 + }, + { + "epoch": 0.702734375, + "grad_norm": 0.15851810574531555, + "learning_rate": 0.00014371365763600353, + "loss": 4.4258, + "step": 7196 + }, + { + "epoch": 0.70283203125, + "grad_norm": 0.16261446475982666, + "learning_rate": 0.00014365677088226284, + "loss": 4.418, + "step": 7197 + }, + { + "epoch": 0.7029296875, + "grad_norm": 0.1561499387025833, + "learning_rate": 0.00014359989686135675, + "loss": 4.4297, + "step": 7198 + }, + { + "epoch": 0.70302734375, + "grad_norm": 0.16164547204971313, + "learning_rate": 0.00014354303557879882, + "loss": 4.4375, + "step": 7199 + }, + { + "epoch": 0.703125, + "grad_norm": 0.14888451993465424, + "learning_rate": 0.00014348618704010147, + "loss": 4.4023, + "step": 7200 + }, + { + "epoch": 0.70322265625, + "grad_norm": 0.16178536415100098, + "learning_rate": 0.0001434293512507756, + "loss": 4.4062, + "step": 7201 + }, + { + "epoch": 0.7033203125, + "grad_norm": 0.1545884609222412, + "learning_rate": 0.00014337252821633128, + "loss": 4.4297, + "step": 7202 + }, + { + "epoch": 0.70341796875, + "grad_norm": 0.1521773338317871, + "learning_rate": 0.00014331571794227692, + "loss": 4.4062, + "step": 7203 + }, + { + "epoch": 0.703515625, + "grad_norm": 0.15502972900867462, + "learning_rate": 0.00014325892043411998, + "loss": 4.4297, + "step": 7204 + }, + { + "epoch": 0.70361328125, + "grad_norm": 0.15970365703105927, + "learning_rate": 0.00014320213569736655, + "loss": 4.457, + "step": 7205 + }, + { + "epoch": 0.7037109375, + "grad_norm": 0.15494892001152039, + "learning_rate": 0.0001431453637375215, + "loss": 4.4023, + "step": 7206 + }, + { + "epoch": 0.70380859375, + "grad_norm": 0.1545119732618332, + "learning_rate": 0.00014308860456008863, + "loss": 4.4688, + "step": 7207 + }, + { + "epoch": 0.70390625, + "grad_norm": 0.14810767769813538, + "learning_rate": 0.0001430318581705702, + "loss": 4.4336, + "step": 7208 + }, + { + "epoch": 0.70400390625, + "grad_norm": 0.1526859849691391, + "learning_rate": 0.0001429751245744675, + "loss": 4.4219, + "step": 7209 + }, + { + "epoch": 0.7041015625, + "grad_norm": 0.15609005093574524, + "learning_rate": 0.00014291840377728037, + "loss": 4.4062, + "step": 7210 + }, + { + "epoch": 0.70419921875, + "grad_norm": 0.15693864226341248, + "learning_rate": 0.00014286169578450758, + "loss": 4.418, + "step": 7211 + }, + { + "epoch": 0.704296875, + "grad_norm": 0.15837083756923676, + "learning_rate": 0.0001428050006016466, + "loss": 4.4141, + "step": 7212 + }, + { + "epoch": 0.70439453125, + "grad_norm": 0.1528652012348175, + "learning_rate": 0.0001427483182341936, + "loss": 4.4414, + "step": 7213 + }, + { + "epoch": 0.7044921875, + "grad_norm": 0.15194836258888245, + "learning_rate": 0.00014269164868764347, + "loss": 4.4375, + "step": 7214 + }, + { + "epoch": 0.70458984375, + "grad_norm": 0.1530403345823288, + "learning_rate": 0.00014263499196749005, + "loss": 4.4102, + "step": 7215 + }, + { + "epoch": 0.7046875, + "grad_norm": 0.15276922285556793, + "learning_rate": 0.00014257834807922582, + "loss": 4.4414, + "step": 7216 + }, + { + "epoch": 0.70478515625, + "grad_norm": 0.1557009220123291, + "learning_rate": 0.00014252171702834195, + "loss": 4.4648, + "step": 7217 + }, + { + "epoch": 0.7048828125, + "grad_norm": 0.14898359775543213, + "learning_rate": 0.00014246509882032848, + "loss": 4.4219, + "step": 7218 + }, + { + "epoch": 0.70498046875, + "grad_norm": 0.15885336697101593, + "learning_rate": 0.0001424084934606742, + "loss": 4.4023, + "step": 7219 + }, + { + "epoch": 0.705078125, + "grad_norm": 0.1558695137500763, + "learning_rate": 0.00014235190095486654, + "loss": 4.4531, + "step": 7220 + }, + { + "epoch": 0.70517578125, + "grad_norm": 0.16148442029953003, + "learning_rate": 0.00014229532130839194, + "loss": 4.4023, + "step": 7221 + }, + { + "epoch": 0.7052734375, + "grad_norm": 0.15476500988006592, + "learning_rate": 0.00014223875452673513, + "loss": 4.3906, + "step": 7222 + }, + { + "epoch": 0.70537109375, + "grad_norm": 0.15611153841018677, + "learning_rate": 0.00014218220061538008, + "loss": 4.4258, + "step": 7223 + }, + { + "epoch": 0.70546875, + "grad_norm": 0.1574060171842575, + "learning_rate": 0.00014212565957980917, + "loss": 4.4102, + "step": 7224 + }, + { + "epoch": 0.70556640625, + "grad_norm": 0.14745770394802094, + "learning_rate": 0.00014206913142550377, + "loss": 4.4336, + "step": 7225 + }, + { + "epoch": 0.7056640625, + "grad_norm": 0.15775856375694275, + "learning_rate": 0.00014201261615794388, + "loss": 4.4219, + "step": 7226 + }, + { + "epoch": 0.70576171875, + "grad_norm": 0.15592820942401886, + "learning_rate": 0.00014195611378260825, + "loss": 4.418, + "step": 7227 + }, + { + "epoch": 0.705859375, + "grad_norm": 0.1509212702512741, + "learning_rate": 0.00014189962430497443, + "loss": 4.3789, + "step": 7228 + }, + { + "epoch": 0.70595703125, + "grad_norm": 0.14517392218112946, + "learning_rate": 0.00014184314773051866, + "loss": 4.4336, + "step": 7229 + }, + { + "epoch": 0.7060546875, + "grad_norm": 0.15017017722129822, + "learning_rate": 0.0001417866840647161, + "loss": 4.4141, + "step": 7230 + }, + { + "epoch": 0.70615234375, + "grad_norm": 0.15875324606895447, + "learning_rate": 0.0001417302333130403, + "loss": 4.4297, + "step": 7231 + }, + { + "epoch": 0.70625, + "grad_norm": 0.15239247679710388, + "learning_rate": 0.00014167379548096388, + "loss": 4.4297, + "step": 7232 + }, + { + "epoch": 0.70634765625, + "grad_norm": 0.16057813167572021, + "learning_rate": 0.00014161737057395807, + "loss": 4.4297, + "step": 7233 + }, + { + "epoch": 0.7064453125, + "grad_norm": 0.16287972033023834, + "learning_rate": 0.0001415609585974929, + "loss": 4.4336, + "step": 7234 + }, + { + "epoch": 0.70654296875, + "grad_norm": 0.15345053374767303, + "learning_rate": 0.00014150455955703716, + "loss": 4.3906, + "step": 7235 + }, + { + "epoch": 0.706640625, + "grad_norm": 0.1577313244342804, + "learning_rate": 0.00014144817345805837, + "loss": 4.4062, + "step": 7236 + }, + { + "epoch": 0.70673828125, + "grad_norm": 0.1514955461025238, + "learning_rate": 0.00014139180030602272, + "loss": 4.4453, + "step": 7237 + }, + { + "epoch": 0.7068359375, + "grad_norm": 0.15570253133773804, + "learning_rate": 0.0001413354401063953, + "loss": 4.4141, + "step": 7238 + }, + { + "epoch": 0.70693359375, + "grad_norm": 0.1524611860513687, + "learning_rate": 0.00014127909286463974, + "loss": 4.4258, + "step": 7239 + }, + { + "epoch": 0.70703125, + "grad_norm": 0.16601137816905975, + "learning_rate": 0.00014122275858621854, + "loss": 4.4531, + "step": 7240 + }, + { + "epoch": 0.70712890625, + "grad_norm": 0.1511063277721405, + "learning_rate": 0.00014116643727659292, + "loss": 4.418, + "step": 7241 + }, + { + "epoch": 0.7072265625, + "grad_norm": 0.15581750869750977, + "learning_rate": 0.0001411101289412229, + "loss": 4.4375, + "step": 7242 + }, + { + "epoch": 0.70732421875, + "grad_norm": 0.16213339567184448, + "learning_rate": 0.00014105383358556714, + "loss": 4.4414, + "step": 7243 + }, + { + "epoch": 0.707421875, + "grad_norm": 0.15410439670085907, + "learning_rate": 0.00014099755121508312, + "loss": 4.4844, + "step": 7244 + }, + { + "epoch": 0.70751953125, + "grad_norm": 0.1575489193201065, + "learning_rate": 0.00014094128183522706, + "loss": 4.4219, + "step": 7245 + }, + { + "epoch": 0.7076171875, + "grad_norm": 0.1459220051765442, + "learning_rate": 0.00014088502545145392, + "loss": 4.4219, + "step": 7246 + }, + { + "epoch": 0.70771484375, + "grad_norm": 0.17067553102970123, + "learning_rate": 0.00014082878206921726, + "loss": 4.4414, + "step": 7247 + }, + { + "epoch": 0.7078125, + "grad_norm": 0.15520378947257996, + "learning_rate": 0.0001407725516939696, + "loss": 4.3945, + "step": 7248 + }, + { + "epoch": 0.70791015625, + "grad_norm": 0.16349118947982788, + "learning_rate": 0.00014071633433116194, + "loss": 4.4414, + "step": 7249 + }, + { + "epoch": 0.7080078125, + "grad_norm": 0.15722480416297913, + "learning_rate": 0.00014066012998624437, + "loss": 4.4219, + "step": 7250 + }, + { + "epoch": 0.70810546875, + "grad_norm": 0.15850003063678741, + "learning_rate": 0.00014060393866466539, + "loss": 4.4375, + "step": 7251 + }, + { + "epoch": 0.708203125, + "grad_norm": 0.16081088781356812, + "learning_rate": 0.0001405477603718724, + "loss": 4.418, + "step": 7252 + }, + { + "epoch": 0.70830078125, + "grad_norm": 0.153306245803833, + "learning_rate": 0.00014049159511331154, + "loss": 4.4141, + "step": 7253 + }, + { + "epoch": 0.7083984375, + "grad_norm": 0.16744515299797058, + "learning_rate": 0.0001404354428944276, + "loss": 4.4297, + "step": 7254 + }, + { + "epoch": 0.70849609375, + "grad_norm": 0.16861383616924286, + "learning_rate": 0.00014037930372066425, + "loss": 4.4219, + "step": 7255 + }, + { + "epoch": 0.70859375, + "grad_norm": 0.1599789261817932, + "learning_rate": 0.00014032317759746368, + "loss": 4.4258, + "step": 7256 + }, + { + "epoch": 0.70869140625, + "grad_norm": 0.16351105272769928, + "learning_rate": 0.000140267064530267, + "loss": 4.4414, + "step": 7257 + }, + { + "epoch": 0.7087890625, + "grad_norm": 0.1463087499141693, + "learning_rate": 0.00014021096452451393, + "loss": 4.4023, + "step": 7258 + }, + { + "epoch": 0.70888671875, + "grad_norm": 0.1568661779165268, + "learning_rate": 0.00014015487758564307, + "loss": 4.4453, + "step": 7259 + }, + { + "epoch": 0.708984375, + "grad_norm": 0.14923690259456635, + "learning_rate": 0.00014009880371909162, + "loss": 4.4219, + "step": 7260 + }, + { + "epoch": 0.70908203125, + "grad_norm": 0.15795668959617615, + "learning_rate": 0.0001400427429302956, + "loss": 4.4336, + "step": 7261 + }, + { + "epoch": 0.7091796875, + "grad_norm": 0.15663562715053558, + "learning_rate": 0.00013998669522468965, + "loss": 4.4531, + "step": 7262 + }, + { + "epoch": 0.70927734375, + "grad_norm": 0.15833643078804016, + "learning_rate": 0.00013993066060770737, + "loss": 4.4102, + "step": 7263 + }, + { + "epoch": 0.709375, + "grad_norm": 0.15769493579864502, + "learning_rate": 0.00013987463908478074, + "loss": 4.457, + "step": 7264 + }, + { + "epoch": 0.70947265625, + "grad_norm": 0.15233762562274933, + "learning_rate": 0.00013981863066134077, + "loss": 4.418, + "step": 7265 + }, + { + "epoch": 0.7095703125, + "grad_norm": 0.15748734772205353, + "learning_rate": 0.00013976263534281702, + "loss": 4.4336, + "step": 7266 + }, + { + "epoch": 0.70966796875, + "grad_norm": 0.1589486002922058, + "learning_rate": 0.00013970665313463795, + "loss": 4.4492, + "step": 7267 + }, + { + "epoch": 0.709765625, + "grad_norm": 0.155470609664917, + "learning_rate": 0.00013965068404223063, + "loss": 4.4336, + "step": 7268 + }, + { + "epoch": 0.70986328125, + "grad_norm": 0.15450285375118256, + "learning_rate": 0.00013959472807102086, + "loss": 4.4141, + "step": 7269 + }, + { + "epoch": 0.7099609375, + "grad_norm": 0.15973083674907684, + "learning_rate": 0.00013953878522643322, + "loss": 4.3984, + "step": 7270 + }, + { + "epoch": 0.71005859375, + "grad_norm": 0.16186638176441193, + "learning_rate": 0.00013948285551389098, + "loss": 4.4141, + "step": 7271 + }, + { + "epoch": 0.71015625, + "grad_norm": 0.16200870275497437, + "learning_rate": 0.00013942693893881619, + "loss": 4.4062, + "step": 7272 + }, + { + "epoch": 0.71025390625, + "grad_norm": 0.15892942249774933, + "learning_rate": 0.0001393710355066295, + "loss": 4.418, + "step": 7273 + }, + { + "epoch": 0.7103515625, + "grad_norm": 0.15878503024578094, + "learning_rate": 0.00013931514522275035, + "loss": 4.4297, + "step": 7274 + }, + { + "epoch": 0.71044921875, + "grad_norm": 0.15193960070610046, + "learning_rate": 0.00013925926809259698, + "loss": 4.4844, + "step": 7275 + }, + { + "epoch": 0.710546875, + "grad_norm": 0.17159293591976166, + "learning_rate": 0.0001392034041215863, + "loss": 4.4336, + "step": 7276 + }, + { + "epoch": 0.71064453125, + "grad_norm": 0.16009080410003662, + "learning_rate": 0.00013914755331513395, + "loss": 4.4414, + "step": 7277 + }, + { + "epoch": 0.7107421875, + "grad_norm": 0.17159907519817352, + "learning_rate": 0.00013909171567865424, + "loss": 4.3984, + "step": 7278 + }, + { + "epoch": 0.71083984375, + "grad_norm": 0.15817402303218842, + "learning_rate": 0.00013903589121756033, + "loss": 4.4297, + "step": 7279 + }, + { + "epoch": 0.7109375, + "grad_norm": 0.15928654372692108, + "learning_rate": 0.00013898007993726403, + "loss": 4.418, + "step": 7280 + }, + { + "epoch": 0.71103515625, + "grad_norm": 0.17344164848327637, + "learning_rate": 0.00013892428184317574, + "loss": 4.4492, + "step": 7281 + }, + { + "epoch": 0.7111328125, + "grad_norm": 0.15164701640605927, + "learning_rate": 0.00013886849694070479, + "loss": 4.4219, + "step": 7282 + }, + { + "epoch": 0.71123046875, + "grad_norm": 0.15826153755187988, + "learning_rate": 0.0001388127252352591, + "loss": 4.4453, + "step": 7283 + }, + { + "epoch": 0.711328125, + "grad_norm": 0.15460731089115143, + "learning_rate": 0.00013875696673224541, + "loss": 4.418, + "step": 7284 + }, + { + "epoch": 0.71142578125, + "grad_norm": 0.1633424013853073, + "learning_rate": 0.0001387012214370691, + "loss": 4.4219, + "step": 7285 + }, + { + "epoch": 0.7115234375, + "grad_norm": 0.15724588930606842, + "learning_rate": 0.00013864548935513433, + "loss": 4.418, + "step": 7286 + }, + { + "epoch": 0.71162109375, + "grad_norm": 0.15673983097076416, + "learning_rate": 0.0001385897704918439, + "loss": 4.3828, + "step": 7287 + }, + { + "epoch": 0.71171875, + "grad_norm": 0.16090460121631622, + "learning_rate": 0.0001385340648525995, + "loss": 4.4531, + "step": 7288 + }, + { + "epoch": 0.71181640625, + "grad_norm": 0.15166760981082916, + "learning_rate": 0.00013847837244280122, + "loss": 4.4375, + "step": 7289 + }, + { + "epoch": 0.7119140625, + "grad_norm": 0.1654677540063858, + "learning_rate": 0.00013842269326784818, + "loss": 4.4375, + "step": 7290 + }, + { + "epoch": 0.71201171875, + "grad_norm": 0.16517089307308197, + "learning_rate": 0.00013836702733313806, + "loss": 4.418, + "step": 7291 + }, + { + "epoch": 0.712109375, + "grad_norm": 0.1646585762500763, + "learning_rate": 0.0001383113746440673, + "loss": 4.4062, + "step": 7292 + }, + { + "epoch": 0.71220703125, + "grad_norm": 0.16393478214740753, + "learning_rate": 0.00013825573520603106, + "loss": 4.4297, + "step": 7293 + }, + { + "epoch": 0.7123046875, + "grad_norm": 0.16259527206420898, + "learning_rate": 0.00013820010902442325, + "loss": 4.4375, + "step": 7294 + }, + { + "epoch": 0.71240234375, + "grad_norm": 0.15716566145420074, + "learning_rate": 0.00013814449610463636, + "loss": 4.4375, + "step": 7295 + }, + { + "epoch": 0.7125, + "grad_norm": 0.16418571770191193, + "learning_rate": 0.00013808889645206175, + "loss": 4.418, + "step": 7296 + }, + { + "epoch": 0.71259765625, + "grad_norm": 0.1659562587738037, + "learning_rate": 0.0001380333100720895, + "loss": 4.4453, + "step": 7297 + }, + { + "epoch": 0.7126953125, + "grad_norm": 0.16096441447734833, + "learning_rate": 0.00013797773697010813, + "loss": 4.4414, + "step": 7298 + }, + { + "epoch": 0.71279296875, + "grad_norm": 0.15282319486141205, + "learning_rate": 0.0001379221771515052, + "loss": 4.4531, + "step": 7299 + }, + { + "epoch": 0.712890625, + "grad_norm": 0.1548435389995575, + "learning_rate": 0.0001378666306216669, + "loss": 4.4297, + "step": 7300 + }, + { + "epoch": 0.71298828125, + "grad_norm": 0.16177646815776825, + "learning_rate": 0.00013781109738597794, + "loss": 4.418, + "step": 7301 + }, + { + "epoch": 0.7130859375, + "grad_norm": 0.15351787209510803, + "learning_rate": 0.00013775557744982202, + "loss": 4.3984, + "step": 7302 + }, + { + "epoch": 0.71318359375, + "grad_norm": 0.16058188676834106, + "learning_rate": 0.0001377000708185814, + "loss": 4.4023, + "step": 7303 + }, + { + "epoch": 0.71328125, + "grad_norm": 0.15306513011455536, + "learning_rate": 0.00013764457749763708, + "loss": 4.4023, + "step": 7304 + }, + { + "epoch": 0.71337890625, + "grad_norm": 0.2315640151500702, + "learning_rate": 0.00013758909749236878, + "loss": 4.4414, + "step": 7305 + }, + { + "epoch": 0.7134765625, + "grad_norm": 0.15353073179721832, + "learning_rate": 0.00013753363080815477, + "loss": 4.4297, + "step": 7306 + }, + { + "epoch": 0.71357421875, + "grad_norm": 0.15660029649734497, + "learning_rate": 0.00013747817745037234, + "loss": 4.4102, + "step": 7307 + }, + { + "epoch": 0.713671875, + "grad_norm": 0.16443176567554474, + "learning_rate": 0.00013742273742439718, + "loss": 4.4453, + "step": 7308 + }, + { + "epoch": 0.71376953125, + "grad_norm": 0.15710391104221344, + "learning_rate": 0.0001373673107356039, + "loss": 4.4414, + "step": 7309 + }, + { + "epoch": 0.7138671875, + "grad_norm": 0.16212981939315796, + "learning_rate": 0.0001373118973893657, + "loss": 4.4414, + "step": 7310 + }, + { + "epoch": 0.71396484375, + "grad_norm": 0.15570731461048126, + "learning_rate": 0.0001372564973910546, + "loss": 4.4023, + "step": 7311 + }, + { + "epoch": 0.7140625, + "grad_norm": 0.15833935141563416, + "learning_rate": 0.0001372011107460412, + "loss": 4.418, + "step": 7312 + }, + { + "epoch": 0.71416015625, + "grad_norm": 0.16637708246707916, + "learning_rate": 0.00013714573745969488, + "loss": 4.457, + "step": 7313 + }, + { + "epoch": 0.7142578125, + "grad_norm": 0.1582215428352356, + "learning_rate": 0.00013709037753738376, + "loss": 4.3867, + "step": 7314 + }, + { + "epoch": 0.71435546875, + "grad_norm": 0.16885793209075928, + "learning_rate": 0.00013703503098447446, + "loss": 4.4023, + "step": 7315 + }, + { + "epoch": 0.714453125, + "grad_norm": 0.16637873649597168, + "learning_rate": 0.00013697969780633254, + "loss": 4.4414, + "step": 7316 + }, + { + "epoch": 0.71455078125, + "grad_norm": 0.15658873319625854, + "learning_rate": 0.0001369243780083222, + "loss": 4.3828, + "step": 7317 + }, + { + "epoch": 0.7146484375, + "grad_norm": 0.17076973617076874, + "learning_rate": 0.00013686907159580626, + "loss": 4.4492, + "step": 7318 + }, + { + "epoch": 0.71474609375, + "grad_norm": 0.15600799024105072, + "learning_rate": 0.00013681377857414635, + "loss": 4.4102, + "step": 7319 + }, + { + "epoch": 0.71484375, + "grad_norm": 0.15388211607933044, + "learning_rate": 0.00013675849894870267, + "loss": 4.418, + "step": 7320 + }, + { + "epoch": 0.71494140625, + "grad_norm": 0.15554691851139069, + "learning_rate": 0.0001367032327248343, + "loss": 4.4102, + "step": 7321 + }, + { + "epoch": 0.7150390625, + "grad_norm": 0.15711455047130585, + "learning_rate": 0.000136647979907899, + "loss": 4.3945, + "step": 7322 + }, + { + "epoch": 0.71513671875, + "grad_norm": 0.15208475291728973, + "learning_rate": 0.00013659274050325295, + "loss": 4.4219, + "step": 7323 + }, + { + "epoch": 0.715234375, + "grad_norm": 0.15961384773254395, + "learning_rate": 0.00013653751451625135, + "loss": 4.4062, + "step": 7324 + }, + { + "epoch": 0.71533203125, + "grad_norm": 0.16171330213546753, + "learning_rate": 0.00013648230195224798, + "loss": 4.4297, + "step": 7325 + }, + { + "epoch": 0.7154296875, + "grad_norm": 0.1513027548789978, + "learning_rate": 0.00013642710281659533, + "loss": 4.4336, + "step": 7326 + }, + { + "epoch": 0.71552734375, + "grad_norm": 0.1602858603000641, + "learning_rate": 0.0001363719171146445, + "loss": 4.4258, + "step": 7327 + }, + { + "epoch": 0.715625, + "grad_norm": 0.14997848868370056, + "learning_rate": 0.00013631674485174548, + "loss": 4.4219, + "step": 7328 + }, + { + "epoch": 0.71572265625, + "grad_norm": 0.15403881669044495, + "learning_rate": 0.0001362615860332468, + "loss": 4.4219, + "step": 7329 + }, + { + "epoch": 0.7158203125, + "grad_norm": 0.14747914671897888, + "learning_rate": 0.00013620644066449566, + "loss": 4.4258, + "step": 7330 + }, + { + "epoch": 0.71591796875, + "grad_norm": 0.15804143249988556, + "learning_rate": 0.00013615130875083826, + "loss": 4.3867, + "step": 7331 + }, + { + "epoch": 0.716015625, + "grad_norm": 0.15425533056259155, + "learning_rate": 0.00013609619029761905, + "loss": 4.4062, + "step": 7332 + }, + { + "epoch": 0.71611328125, + "grad_norm": 0.15474757552146912, + "learning_rate": 0.00013604108531018138, + "loss": 4.4258, + "step": 7333 + }, + { + "epoch": 0.7162109375, + "grad_norm": 0.15743085741996765, + "learning_rate": 0.0001359859937938674, + "loss": 4.4336, + "step": 7334 + }, + { + "epoch": 0.71630859375, + "grad_norm": 0.15384383499622345, + "learning_rate": 0.00013593091575401784, + "loss": 4.4492, + "step": 7335 + }, + { + "epoch": 0.71640625, + "grad_norm": 0.1558152735233307, + "learning_rate": 0.00013587585119597211, + "loss": 4.4414, + "step": 7336 + }, + { + "epoch": 0.71650390625, + "grad_norm": 0.15255264937877655, + "learning_rate": 0.00013582080012506837, + "loss": 4.4258, + "step": 7337 + }, + { + "epoch": 0.7166015625, + "grad_norm": 0.15031905472278595, + "learning_rate": 0.00013576576254664346, + "loss": 4.457, + "step": 7338 + }, + { + "epoch": 0.71669921875, + "grad_norm": 0.14672301709651947, + "learning_rate": 0.000135710738466033, + "loss": 4.418, + "step": 7339 + }, + { + "epoch": 0.716796875, + "grad_norm": 0.1507348120212555, + "learning_rate": 0.00013565572788857097, + "loss": 4.4297, + "step": 7340 + }, + { + "epoch": 0.71689453125, + "grad_norm": 0.15010710060596466, + "learning_rate": 0.0001356007308195904, + "loss": 4.4219, + "step": 7341 + }, + { + "epoch": 0.7169921875, + "grad_norm": 0.15229754149913788, + "learning_rate": 0.00013554574726442283, + "loss": 4.4141, + "step": 7342 + }, + { + "epoch": 0.71708984375, + "grad_norm": 0.15038275718688965, + "learning_rate": 0.0001354907772283986, + "loss": 4.4375, + "step": 7343 + }, + { + "epoch": 0.7171875, + "grad_norm": 0.15523529052734375, + "learning_rate": 0.0001354358207168467, + "loss": 4.4219, + "step": 7344 + }, + { + "epoch": 0.71728515625, + "grad_norm": 0.14941778779029846, + "learning_rate": 0.00013538087773509472, + "loss": 4.4258, + "step": 7345 + }, + { + "epoch": 0.7173828125, + "grad_norm": 0.1563187688589096, + "learning_rate": 0.0001353259482884691, + "loss": 4.4023, + "step": 7346 + }, + { + "epoch": 0.71748046875, + "grad_norm": 0.1568332016468048, + "learning_rate": 0.00013527103238229486, + "loss": 4.4141, + "step": 7347 + }, + { + "epoch": 0.717578125, + "grad_norm": 0.15299305319786072, + "learning_rate": 0.00013521613002189558, + "loss": 4.4414, + "step": 7348 + }, + { + "epoch": 0.71767578125, + "grad_norm": 0.15198005735874176, + "learning_rate": 0.00013516124121259383, + "loss": 4.4609, + "step": 7349 + }, + { + "epoch": 0.7177734375, + "grad_norm": 0.16429629921913147, + "learning_rate": 0.00013510636595971066, + "loss": 4.4336, + "step": 7350 + }, + { + "epoch": 0.71787109375, + "grad_norm": 0.15064530074596405, + "learning_rate": 0.0001350515042685658, + "loss": 4.4102, + "step": 7351 + }, + { + "epoch": 0.71796875, + "grad_norm": 0.1530359536409378, + "learning_rate": 0.0001349966561444778, + "loss": 4.4375, + "step": 7352 + }, + { + "epoch": 0.71806640625, + "grad_norm": 0.14262932538986206, + "learning_rate": 0.00013494182159276376, + "loss": 4.4258, + "step": 7353 + }, + { + "epoch": 0.7181640625, + "grad_norm": 0.15429289638996124, + "learning_rate": 0.00013488700061873958, + "loss": 4.4102, + "step": 7354 + }, + { + "epoch": 0.71826171875, + "grad_norm": 0.15964457392692566, + "learning_rate": 0.00013483219322771973, + "loss": 4.4062, + "step": 7355 + }, + { + "epoch": 0.718359375, + "grad_norm": 0.15956899523735046, + "learning_rate": 0.00013477739942501746, + "loss": 4.4609, + "step": 7356 + }, + { + "epoch": 0.71845703125, + "grad_norm": 0.1581142246723175, + "learning_rate": 0.0001347226192159446, + "loss": 4.4062, + "step": 7357 + }, + { + "epoch": 0.7185546875, + "grad_norm": 0.15143775939941406, + "learning_rate": 0.0001346678526058117, + "loss": 4.4141, + "step": 7358 + }, + { + "epoch": 0.71865234375, + "grad_norm": 0.15328902006149292, + "learning_rate": 0.00013461309959992806, + "loss": 4.4258, + "step": 7359 + }, + { + "epoch": 0.71875, + "grad_norm": 0.1668652594089508, + "learning_rate": 0.0001345583602036016, + "loss": 4.4141, + "step": 7360 + }, + { + "epoch": 0.71884765625, + "grad_norm": 0.16026641428470612, + "learning_rate": 0.00013450363442213894, + "loss": 4.4375, + "step": 7361 + }, + { + "epoch": 0.7189453125, + "grad_norm": 0.15846294164657593, + "learning_rate": 0.0001344489222608454, + "loss": 4.4062, + "step": 7362 + }, + { + "epoch": 0.71904296875, + "grad_norm": 0.14986512064933777, + "learning_rate": 0.00013439422372502492, + "loss": 4.4062, + "step": 7363 + }, + { + "epoch": 0.719140625, + "grad_norm": 0.16010792553424835, + "learning_rate": 0.0001343395388199802, + "loss": 4.4258, + "step": 7364 + }, + { + "epoch": 0.71923828125, + "grad_norm": 0.15443438291549683, + "learning_rate": 0.00013428486755101245, + "loss": 4.4375, + "step": 7365 + }, + { + "epoch": 0.7193359375, + "grad_norm": 0.16244156658649445, + "learning_rate": 0.00013423020992342177, + "loss": 4.4609, + "step": 7366 + }, + { + "epoch": 0.71943359375, + "grad_norm": 0.15674875676631927, + "learning_rate": 0.00013417556594250686, + "loss": 4.4141, + "step": 7367 + }, + { + "epoch": 0.71953125, + "grad_norm": 0.1687503606081009, + "learning_rate": 0.000134120935613565, + "loss": 4.418, + "step": 7368 + }, + { + "epoch": 0.71962890625, + "grad_norm": 0.15094424784183502, + "learning_rate": 0.00013406631894189233, + "loss": 4.4258, + "step": 7369 + }, + { + "epoch": 0.7197265625, + "grad_norm": 0.1763504594564438, + "learning_rate": 0.0001340117159327835, + "loss": 4.4297, + "step": 7370 + }, + { + "epoch": 0.71982421875, + "grad_norm": 0.164609894156456, + "learning_rate": 0.0001339571265915319, + "loss": 4.4297, + "step": 7371 + }, + { + "epoch": 0.719921875, + "grad_norm": 0.16353648900985718, + "learning_rate": 0.00013390255092342963, + "loss": 4.4336, + "step": 7372 + }, + { + "epoch": 0.72001953125, + "grad_norm": 0.16545608639717102, + "learning_rate": 0.00013384798893376748, + "loss": 4.418, + "step": 7373 + }, + { + "epoch": 0.7201171875, + "grad_norm": 0.16186077892780304, + "learning_rate": 0.00013379344062783478, + "loss": 4.4531, + "step": 7374 + }, + { + "epoch": 0.72021484375, + "grad_norm": 0.15740644931793213, + "learning_rate": 0.00013373890601091955, + "loss": 4.4453, + "step": 7375 + }, + { + "epoch": 0.7203125, + "grad_norm": 0.15783922374248505, + "learning_rate": 0.00013368438508830872, + "loss": 4.4297, + "step": 7376 + }, + { + "epoch": 0.72041015625, + "grad_norm": 0.15968593955039978, + "learning_rate": 0.00013362987786528761, + "loss": 4.4336, + "step": 7377 + }, + { + "epoch": 0.7205078125, + "grad_norm": 0.1517067700624466, + "learning_rate": 0.00013357538434714038, + "loss": 4.4375, + "step": 7378 + }, + { + "epoch": 0.72060546875, + "grad_norm": 0.15611544251441956, + "learning_rate": 0.00013352090453914978, + "loss": 4.4141, + "step": 7379 + }, + { + "epoch": 0.720703125, + "grad_norm": 0.15504461526870728, + "learning_rate": 0.00013346643844659722, + "loss": 4.4375, + "step": 7380 + }, + { + "epoch": 0.72080078125, + "grad_norm": 0.1582416445016861, + "learning_rate": 0.000133411986074763, + "loss": 4.4375, + "step": 7381 + }, + { + "epoch": 0.7208984375, + "grad_norm": 0.16449101269245148, + "learning_rate": 0.00013335754742892564, + "loss": 4.4062, + "step": 7382 + }, + { + "epoch": 0.72099609375, + "grad_norm": 0.1597408503293991, + "learning_rate": 0.00013330312251436278, + "loss": 4.4492, + "step": 7383 + }, + { + "epoch": 0.72109375, + "grad_norm": 0.15227282047271729, + "learning_rate": 0.00013324871133635045, + "loss": 4.4258, + "step": 7384 + }, + { + "epoch": 0.72119140625, + "grad_norm": 0.15393191576004028, + "learning_rate": 0.00013319431390016348, + "loss": 4.4219, + "step": 7385 + }, + { + "epoch": 0.7212890625, + "grad_norm": 0.16736654937267303, + "learning_rate": 0.00013313993021107538, + "loss": 4.4414, + "step": 7386 + }, + { + "epoch": 0.72138671875, + "grad_norm": 0.15382325649261475, + "learning_rate": 0.0001330855602743582, + "loss": 4.3984, + "step": 7387 + }, + { + "epoch": 0.721484375, + "grad_norm": 0.1680581271648407, + "learning_rate": 0.0001330312040952828, + "loss": 4.4219, + "step": 7388 + }, + { + "epoch": 0.72158203125, + "grad_norm": 0.15622518956661224, + "learning_rate": 0.0001329768616791186, + "loss": 4.4219, + "step": 7389 + }, + { + "epoch": 0.7216796875, + "grad_norm": 0.1584346443414688, + "learning_rate": 0.00013292253303113382, + "loss": 4.4453, + "step": 7390 + }, + { + "epoch": 0.72177734375, + "grad_norm": 0.15779300034046173, + "learning_rate": 0.00013286821815659513, + "loss": 4.4102, + "step": 7391 + }, + { + "epoch": 0.721875, + "grad_norm": 0.15593859553337097, + "learning_rate": 0.00013281391706076803, + "loss": 4.4219, + "step": 7392 + }, + { + "epoch": 0.72197265625, + "grad_norm": 0.16628959774971008, + "learning_rate": 0.00013275962974891665, + "loss": 4.3906, + "step": 7393 + }, + { + "epoch": 0.7220703125, + "grad_norm": 0.14737896621227264, + "learning_rate": 0.00013270535622630374, + "loss": 4.4453, + "step": 7394 + }, + { + "epoch": 0.72216796875, + "grad_norm": 0.16434019804000854, + "learning_rate": 0.00013265109649819085, + "loss": 4.4219, + "step": 7395 + }, + { + "epoch": 0.722265625, + "grad_norm": 0.15081071853637695, + "learning_rate": 0.000132596850569838, + "loss": 4.4102, + "step": 7396 + }, + { + "epoch": 0.72236328125, + "grad_norm": 0.15670959651470184, + "learning_rate": 0.000132542618446504, + "loss": 4.4102, + "step": 7397 + }, + { + "epoch": 0.7224609375, + "grad_norm": 0.15079860389232635, + "learning_rate": 0.0001324884001334463, + "loss": 4.4453, + "step": 7398 + }, + { + "epoch": 0.72255859375, + "grad_norm": 0.14780817925930023, + "learning_rate": 0.000132434195635921, + "loss": 4.4258, + "step": 7399 + }, + { + "epoch": 0.72265625, + "grad_norm": 0.15228857100009918, + "learning_rate": 0.00013238000495918278, + "loss": 4.4062, + "step": 7400 + }, + { + "epoch": 0.72275390625, + "grad_norm": 0.15700843930244446, + "learning_rate": 0.00013232582810848514, + "loss": 4.4375, + "step": 7401 + }, + { + "epoch": 0.7228515625, + "grad_norm": 0.15100954473018646, + "learning_rate": 0.00013227166508908013, + "loss": 4.4258, + "step": 7402 + }, + { + "epoch": 0.72294921875, + "grad_norm": 0.15761396288871765, + "learning_rate": 0.00013221751590621845, + "loss": 4.4141, + "step": 7403 + }, + { + "epoch": 0.723046875, + "grad_norm": 0.16001614928245544, + "learning_rate": 0.0001321633805651496, + "loss": 4.4062, + "step": 7404 + }, + { + "epoch": 0.72314453125, + "grad_norm": 0.148284450173378, + "learning_rate": 0.0001321092590711215, + "loss": 4.4062, + "step": 7405 + }, + { + "epoch": 0.7232421875, + "grad_norm": 0.15888842940330505, + "learning_rate": 0.000132055151429381, + "loss": 4.3828, + "step": 7406 + }, + { + "epoch": 0.72333984375, + "grad_norm": 0.16330517828464508, + "learning_rate": 0.00013200105764517338, + "loss": 4.4336, + "step": 7407 + }, + { + "epoch": 0.7234375, + "grad_norm": 0.160234734416008, + "learning_rate": 0.00013194697772374268, + "loss": 4.4375, + "step": 7408 + }, + { + "epoch": 0.72353515625, + "grad_norm": 0.15380513668060303, + "learning_rate": 0.00013189291167033158, + "loss": 4.4453, + "step": 7409 + }, + { + "epoch": 0.7236328125, + "grad_norm": 0.15520133078098297, + "learning_rate": 0.0001318388594901814, + "loss": 4.4727, + "step": 7410 + }, + { + "epoch": 0.72373046875, + "grad_norm": 0.14864704012870789, + "learning_rate": 0.00013178482118853218, + "loss": 4.4297, + "step": 7411 + }, + { + "epoch": 0.723828125, + "grad_norm": 0.15043708682060242, + "learning_rate": 0.00013173079677062253, + "loss": 4.4141, + "step": 7412 + }, + { + "epoch": 0.72392578125, + "grad_norm": 0.15654103457927704, + "learning_rate": 0.00013167678624168976, + "loss": 4.4023, + "step": 7413 + }, + { + "epoch": 0.7240234375, + "grad_norm": 0.15767861902713776, + "learning_rate": 0.00013162278960696984, + "loss": 4.4375, + "step": 7414 + }, + { + "epoch": 0.72412109375, + "grad_norm": 0.15727505087852478, + "learning_rate": 0.00013156880687169742, + "loss": 4.4414, + "step": 7415 + }, + { + "epoch": 0.72421875, + "grad_norm": 0.15632598102092743, + "learning_rate": 0.00013151483804110564, + "loss": 4.4141, + "step": 7416 + }, + { + "epoch": 0.72431640625, + "grad_norm": 0.16026270389556885, + "learning_rate": 0.00013146088312042647, + "loss": 4.4336, + "step": 7417 + }, + { + "epoch": 0.7244140625, + "grad_norm": 0.14890852570533752, + "learning_rate": 0.00013140694211489045, + "loss": 4.4023, + "step": 7418 + }, + { + "epoch": 0.72451171875, + "grad_norm": 0.15546904504299164, + "learning_rate": 0.00013135301502972688, + "loss": 4.4023, + "step": 7419 + }, + { + "epoch": 0.724609375, + "grad_norm": 0.14677095413208008, + "learning_rate": 0.00013129910187016354, + "loss": 4.418, + "step": 7420 + }, + { + "epoch": 0.72470703125, + "grad_norm": 0.15838514268398285, + "learning_rate": 0.00013124520264142696, + "loss": 4.4414, + "step": 7421 + }, + { + "epoch": 0.7248046875, + "grad_norm": 0.15500670671463013, + "learning_rate": 0.00013119131734874235, + "loss": 4.4219, + "step": 7422 + }, + { + "epoch": 0.72490234375, + "grad_norm": 0.14931590855121613, + "learning_rate": 0.00013113744599733352, + "loss": 4.4062, + "step": 7423 + }, + { + "epoch": 0.725, + "grad_norm": 0.154063418507576, + "learning_rate": 0.00013108358859242287, + "loss": 4.4102, + "step": 7424 + }, + { + "epoch": 0.72509765625, + "grad_norm": 0.1630350947380066, + "learning_rate": 0.0001310297451392315, + "loss": 4.4219, + "step": 7425 + }, + { + "epoch": 0.7251953125, + "grad_norm": 0.15256865322589874, + "learning_rate": 0.00013097591564297924, + "loss": 4.4375, + "step": 7426 + }, + { + "epoch": 0.72529296875, + "grad_norm": 0.15024515986442566, + "learning_rate": 0.00013092210010888448, + "loss": 4.3906, + "step": 7427 + }, + { + "epoch": 0.725390625, + "grad_norm": 0.14610403776168823, + "learning_rate": 0.00013086829854216424, + "loss": 4.4258, + "step": 7428 + }, + { + "epoch": 0.72548828125, + "grad_norm": 0.15125788748264313, + "learning_rate": 0.0001308145109480342, + "loss": 4.4141, + "step": 7429 + }, + { + "epoch": 0.7255859375, + "grad_norm": 0.15942911803722382, + "learning_rate": 0.0001307607373317088, + "loss": 4.457, + "step": 7430 + }, + { + "epoch": 0.72568359375, + "grad_norm": 0.15231305360794067, + "learning_rate": 0.0001307069776984009, + "loss": 4.3984, + "step": 7431 + }, + { + "epoch": 0.72578125, + "grad_norm": 0.1540101170539856, + "learning_rate": 0.00013065323205332222, + "loss": 4.4336, + "step": 7432 + }, + { + "epoch": 0.72587890625, + "grad_norm": 0.15149644017219543, + "learning_rate": 0.00013059950040168305, + "loss": 4.3984, + "step": 7433 + }, + { + "epoch": 0.7259765625, + "grad_norm": 0.14395546913146973, + "learning_rate": 0.00013054578274869223, + "loss": 4.4492, + "step": 7434 + }, + { + "epoch": 0.72607421875, + "grad_norm": 0.1618860512971878, + "learning_rate": 0.00013049207909955735, + "loss": 4.4258, + "step": 7435 + }, + { + "epoch": 0.726171875, + "grad_norm": 0.16168297827243805, + "learning_rate": 0.0001304383894594846, + "loss": 4.4258, + "step": 7436 + }, + { + "epoch": 0.72626953125, + "grad_norm": 0.15821342170238495, + "learning_rate": 0.0001303847138336789, + "loss": 4.4297, + "step": 7437 + }, + { + "epoch": 0.7263671875, + "grad_norm": 0.15154117345809937, + "learning_rate": 0.00013033105222734365, + "loss": 4.3789, + "step": 7438 + }, + { + "epoch": 0.72646484375, + "grad_norm": 0.1537981480360031, + "learning_rate": 0.000130277404645681, + "loss": 4.4336, + "step": 7439 + }, + { + "epoch": 0.7265625, + "grad_norm": 0.15337447822093964, + "learning_rate": 0.00013022377109389179, + "loss": 4.4336, + "step": 7440 + }, + { + "epoch": 0.72666015625, + "grad_norm": 0.15082216262817383, + "learning_rate": 0.0001301701515771753, + "loss": 4.4336, + "step": 7441 + }, + { + "epoch": 0.7267578125, + "grad_norm": 0.1567872315645218, + "learning_rate": 0.0001301165461007297, + "loss": 4.4297, + "step": 7442 + }, + { + "epoch": 0.72685546875, + "grad_norm": 0.1583169549703598, + "learning_rate": 0.00013006295466975171, + "loss": 4.3984, + "step": 7443 + }, + { + "epoch": 0.726953125, + "grad_norm": 0.15436609089374542, + "learning_rate": 0.00013000937728943645, + "loss": 4.4102, + "step": 7444 + }, + { + "epoch": 0.72705078125, + "grad_norm": 0.16267691552639008, + "learning_rate": 0.00012995581396497806, + "loss": 4.4141, + "step": 7445 + }, + { + "epoch": 0.7271484375, + "grad_norm": 0.14685796201229095, + "learning_rate": 0.00012990226470156909, + "loss": 4.4062, + "step": 7446 + }, + { + "epoch": 0.72724609375, + "grad_norm": 0.14918221533298492, + "learning_rate": 0.00012984872950440074, + "loss": 4.3984, + "step": 7447 + }, + { + "epoch": 0.72734375, + "grad_norm": 0.15221072733402252, + "learning_rate": 0.00012979520837866294, + "loss": 4.418, + "step": 7448 + }, + { + "epoch": 0.72744140625, + "grad_norm": 0.16070111095905304, + "learning_rate": 0.00012974170132954417, + "loss": 4.4258, + "step": 7449 + }, + { + "epoch": 0.7275390625, + "grad_norm": 0.1635676771402359, + "learning_rate": 0.00012968820836223156, + "loss": 4.4414, + "step": 7450 + }, + { + "epoch": 0.72763671875, + "grad_norm": 0.1487974226474762, + "learning_rate": 0.00012963472948191097, + "loss": 4.4375, + "step": 7451 + }, + { + "epoch": 0.727734375, + "grad_norm": 0.16396839916706085, + "learning_rate": 0.00012958126469376675, + "loss": 4.3672, + "step": 7452 + }, + { + "epoch": 0.72783203125, + "grad_norm": 0.150769904255867, + "learning_rate": 0.00012952781400298198, + "loss": 4.418, + "step": 7453 + }, + { + "epoch": 0.7279296875, + "grad_norm": 0.16643275320529938, + "learning_rate": 0.00012947437741473827, + "loss": 4.4453, + "step": 7454 + }, + { + "epoch": 0.72802734375, + "grad_norm": 0.14956697821617126, + "learning_rate": 0.00012942095493421603, + "loss": 4.4609, + "step": 7455 + }, + { + "epoch": 0.728125, + "grad_norm": 0.16435708105564117, + "learning_rate": 0.00012936754656659415, + "loss": 4.4688, + "step": 7456 + }, + { + "epoch": 0.72822265625, + "grad_norm": 0.16113829612731934, + "learning_rate": 0.00012931415231705017, + "loss": 4.3828, + "step": 7457 + }, + { + "epoch": 0.7283203125, + "grad_norm": 0.15606369078159332, + "learning_rate": 0.00012926077219076038, + "loss": 4.3789, + "step": 7458 + }, + { + "epoch": 0.72841796875, + "grad_norm": 0.1702394187450409, + "learning_rate": 0.00012920740619289957, + "loss": 4.4141, + "step": 7459 + }, + { + "epoch": 0.728515625, + "grad_norm": 0.15538500249385834, + "learning_rate": 0.00012915405432864125, + "loss": 4.4219, + "step": 7460 + }, + { + "epoch": 0.72861328125, + "grad_norm": 0.15201862156391144, + "learning_rate": 0.00012910071660315753, + "loss": 4.4023, + "step": 7461 + }, + { + "epoch": 0.7287109375, + "grad_norm": 0.15377171337604523, + "learning_rate": 0.00012904739302161907, + "loss": 4.3945, + "step": 7462 + }, + { + "epoch": 0.72880859375, + "grad_norm": 0.15379291772842407, + "learning_rate": 0.00012899408358919536, + "loss": 4.4141, + "step": 7463 + }, + { + "epoch": 0.72890625, + "grad_norm": 0.15855009853839874, + "learning_rate": 0.00012894078831105426, + "loss": 4.4492, + "step": 7464 + }, + { + "epoch": 0.72900390625, + "grad_norm": 0.16196788847446442, + "learning_rate": 0.00012888750719236238, + "loss": 4.3906, + "step": 7465 + }, + { + "epoch": 0.7291015625, + "grad_norm": 0.1481006145477295, + "learning_rate": 0.00012883424023828506, + "loss": 4.4492, + "step": 7466 + }, + { + "epoch": 0.72919921875, + "grad_norm": 0.16652144491672516, + "learning_rate": 0.00012878098745398606, + "loss": 4.4023, + "step": 7467 + }, + { + "epoch": 0.729296875, + "grad_norm": 0.1612030565738678, + "learning_rate": 0.00012872774884462797, + "loss": 4.418, + "step": 7468 + }, + { + "epoch": 0.72939453125, + "grad_norm": 0.15509814023971558, + "learning_rate": 0.00012867452441537187, + "loss": 4.3828, + "step": 7469 + }, + { + "epoch": 0.7294921875, + "grad_norm": 0.15513379871845245, + "learning_rate": 0.0001286213141713775, + "loss": 4.4531, + "step": 7470 + }, + { + "epoch": 0.72958984375, + "grad_norm": 0.15221793949604034, + "learning_rate": 0.0001285681181178033, + "loss": 4.4102, + "step": 7471 + }, + { + "epoch": 0.7296875, + "grad_norm": 0.14902381598949432, + "learning_rate": 0.00012851493625980616, + "loss": 4.4258, + "step": 7472 + }, + { + "epoch": 0.72978515625, + "grad_norm": 0.15477603673934937, + "learning_rate": 0.00012846176860254182, + "loss": 4.418, + "step": 7473 + }, + { + "epoch": 0.7298828125, + "grad_norm": 0.15169410407543182, + "learning_rate": 0.0001284086151511644, + "loss": 4.4141, + "step": 7474 + }, + { + "epoch": 0.72998046875, + "grad_norm": 0.1581718474626541, + "learning_rate": 0.0001283554759108268, + "loss": 4.4297, + "step": 7475 + }, + { + "epoch": 0.730078125, + "grad_norm": 0.14823158085346222, + "learning_rate": 0.00012830235088668058, + "loss": 4.418, + "step": 7476 + }, + { + "epoch": 0.73017578125, + "grad_norm": 0.15780621767044067, + "learning_rate": 0.00012824924008387572, + "loss": 4.4297, + "step": 7477 + }, + { + "epoch": 0.7302734375, + "grad_norm": 0.15449823439121246, + "learning_rate": 0.00012819614350756116, + "loss": 4.4258, + "step": 7478 + }, + { + "epoch": 0.73037109375, + "grad_norm": 0.15220820903778076, + "learning_rate": 0.00012814306116288405, + "loss": 4.4297, + "step": 7479 + }, + { + "epoch": 0.73046875, + "grad_norm": 0.1546747088432312, + "learning_rate": 0.0001280899930549904, + "loss": 4.457, + "step": 7480 + }, + { + "epoch": 0.73056640625, + "grad_norm": 0.1491554230451584, + "learning_rate": 0.0001280369391890249, + "loss": 4.3945, + "step": 7481 + }, + { + "epoch": 0.7306640625, + "grad_norm": 0.1559380143880844, + "learning_rate": 0.00012798389957013076, + "loss": 4.4297, + "step": 7482 + }, + { + "epoch": 0.73076171875, + "grad_norm": 0.14733648300170898, + "learning_rate": 0.00012793087420344968, + "loss": 4.3984, + "step": 7483 + }, + { + "epoch": 0.730859375, + "grad_norm": 0.1558484435081482, + "learning_rate": 0.0001278778630941222, + "loss": 4.3984, + "step": 7484 + }, + { + "epoch": 0.73095703125, + "grad_norm": 0.15226523578166962, + "learning_rate": 0.00012782486624728735, + "loss": 4.4102, + "step": 7485 + }, + { + "epoch": 0.7310546875, + "grad_norm": 0.14782686531543732, + "learning_rate": 0.00012777188366808287, + "loss": 4.418, + "step": 7486 + }, + { + "epoch": 0.73115234375, + "grad_norm": 0.15709908306598663, + "learning_rate": 0.000127718915361645, + "loss": 4.418, + "step": 7487 + }, + { + "epoch": 0.73125, + "grad_norm": 0.14999830722808838, + "learning_rate": 0.00012766596133310866, + "loss": 4.4219, + "step": 7488 + }, + { + "epoch": 0.73134765625, + "grad_norm": 0.16267339885234833, + "learning_rate": 0.00012761302158760746, + "loss": 4.4102, + "step": 7489 + }, + { + "epoch": 0.7314453125, + "grad_norm": 0.15309545397758484, + "learning_rate": 0.00012756009613027348, + "loss": 4.4688, + "step": 7490 + }, + { + "epoch": 0.73154296875, + "grad_norm": 0.1476503312587738, + "learning_rate": 0.0001275071849662376, + "loss": 4.4453, + "step": 7491 + }, + { + "epoch": 0.731640625, + "grad_norm": 0.1653166562318802, + "learning_rate": 0.000127454288100629, + "loss": 4.4297, + "step": 7492 + }, + { + "epoch": 0.73173828125, + "grad_norm": 0.14850600063800812, + "learning_rate": 0.00012740140553857575, + "loss": 4.418, + "step": 7493 + }, + { + "epoch": 0.7318359375, + "grad_norm": 0.17374801635742188, + "learning_rate": 0.00012734853728520453, + "loss": 4.4336, + "step": 7494 + }, + { + "epoch": 0.73193359375, + "grad_norm": 0.15782494843006134, + "learning_rate": 0.0001272956833456405, + "loss": 4.4336, + "step": 7495 + }, + { + "epoch": 0.73203125, + "grad_norm": 0.15229032933712006, + "learning_rate": 0.0001272428437250075, + "loss": 4.4297, + "step": 7496 + }, + { + "epoch": 0.73212890625, + "grad_norm": 0.15806634724140167, + "learning_rate": 0.00012719001842842793, + "loss": 4.4375, + "step": 7497 + }, + { + "epoch": 0.7322265625, + "grad_norm": 0.15328891575336456, + "learning_rate": 0.0001271372074610229, + "loss": 4.4375, + "step": 7498 + }, + { + "epoch": 0.73232421875, + "grad_norm": 0.16478852927684784, + "learning_rate": 0.0001270844108279121, + "loss": 4.4414, + "step": 7499 + }, + { + "epoch": 0.732421875, + "grad_norm": 0.15683677792549133, + "learning_rate": 0.00012703162853421374, + "loss": 4.3984, + "step": 7500 + }, + { + "epoch": 0.73251953125, + "grad_norm": 0.16330599784851074, + "learning_rate": 0.00012697886058504474, + "loss": 4.4531, + "step": 7501 + }, + { + "epoch": 0.7326171875, + "grad_norm": 0.15203440189361572, + "learning_rate": 0.00012692610698552056, + "loss": 4.4258, + "step": 7502 + }, + { + "epoch": 0.73271484375, + "grad_norm": 0.15931281447410583, + "learning_rate": 0.0001268733677407553, + "loss": 4.4219, + "step": 7503 + }, + { + "epoch": 0.7328125, + "grad_norm": 0.1529395431280136, + "learning_rate": 0.00012682064285586176, + "loss": 4.4141, + "step": 7504 + }, + { + "epoch": 0.73291015625, + "grad_norm": 0.1594206839799881, + "learning_rate": 0.00012676793233595113, + "loss": 4.4023, + "step": 7505 + }, + { + "epoch": 0.7330078125, + "grad_norm": 0.15525861084461212, + "learning_rate": 0.00012671523618613346, + "loss": 4.3906, + "step": 7506 + }, + { + "epoch": 0.73310546875, + "grad_norm": 0.15295933187007904, + "learning_rate": 0.00012666255441151728, + "loss": 4.4023, + "step": 7507 + }, + { + "epoch": 0.733203125, + "grad_norm": 0.16119632124900818, + "learning_rate": 0.00012660988701720962, + "loss": 4.4102, + "step": 7508 + }, + { + "epoch": 0.73330078125, + "grad_norm": 0.16188406944274902, + "learning_rate": 0.00012655723400831627, + "loss": 4.3867, + "step": 7509 + }, + { + "epoch": 0.7333984375, + "grad_norm": 0.1621178686618805, + "learning_rate": 0.00012650459538994163, + "loss": 4.4219, + "step": 7510 + }, + { + "epoch": 0.73349609375, + "grad_norm": 0.15490268170833588, + "learning_rate": 0.0001264519711671886, + "loss": 4.4375, + "step": 7511 + }, + { + "epoch": 0.73359375, + "grad_norm": 0.16095499694347382, + "learning_rate": 0.00012639936134515876, + "loss": 4.4258, + "step": 7512 + }, + { + "epoch": 0.73369140625, + "grad_norm": 0.15624520182609558, + "learning_rate": 0.00012634676592895231, + "loss": 4.4336, + "step": 7513 + }, + { + "epoch": 0.7337890625, + "grad_norm": 0.1698998659849167, + "learning_rate": 0.00012629418492366795, + "loss": 4.418, + "step": 7514 + }, + { + "epoch": 0.73388671875, + "grad_norm": 0.16680948436260223, + "learning_rate": 0.00012624161833440313, + "loss": 4.4102, + "step": 7515 + }, + { + "epoch": 0.733984375, + "grad_norm": 0.15718711912631989, + "learning_rate": 0.00012618906616625384, + "loss": 4.4023, + "step": 7516 + }, + { + "epoch": 0.73408203125, + "grad_norm": 0.17512069642543793, + "learning_rate": 0.0001261365284243145, + "loss": 4.3828, + "step": 7517 + }, + { + "epoch": 0.7341796875, + "grad_norm": 0.15175138413906097, + "learning_rate": 0.00012608400511367846, + "loss": 4.4219, + "step": 7518 + }, + { + "epoch": 0.73427734375, + "grad_norm": 0.15610924363136292, + "learning_rate": 0.00012603149623943737, + "loss": 4.4023, + "step": 7519 + }, + { + "epoch": 0.734375, + "grad_norm": 0.16253934800624847, + "learning_rate": 0.0001259790018066817, + "loss": 4.4492, + "step": 7520 + }, + { + "epoch": 0.73447265625, + "grad_norm": 0.15393897891044617, + "learning_rate": 0.0001259265218205004, + "loss": 4.4375, + "step": 7521 + }, + { + "epoch": 0.7345703125, + "grad_norm": 0.14990298449993134, + "learning_rate": 0.00012587405628598102, + "loss": 4.3984, + "step": 7522 + }, + { + "epoch": 0.73466796875, + "grad_norm": 0.1689717024564743, + "learning_rate": 0.0001258216052082098, + "loss": 4.4297, + "step": 7523 + }, + { + "epoch": 0.734765625, + "grad_norm": 0.15780244767665863, + "learning_rate": 0.00012576916859227149, + "loss": 4.4062, + "step": 7524 + }, + { + "epoch": 0.73486328125, + "grad_norm": 0.15969839692115784, + "learning_rate": 0.00012571674644324944, + "loss": 4.4414, + "step": 7525 + }, + { + "epoch": 0.7349609375, + "grad_norm": 0.15832704305648804, + "learning_rate": 0.00012566433876622557, + "loss": 4.4414, + "step": 7526 + }, + { + "epoch": 0.73505859375, + "grad_norm": 0.15280450880527496, + "learning_rate": 0.00012561194556628058, + "loss": 4.4062, + "step": 7527 + }, + { + "epoch": 0.73515625, + "grad_norm": 0.17155613005161285, + "learning_rate": 0.0001255595668484935, + "loss": 4.4219, + "step": 7528 + }, + { + "epoch": 0.73525390625, + "grad_norm": 0.16319015622138977, + "learning_rate": 0.00012550720261794222, + "loss": 4.418, + "step": 7529 + }, + { + "epoch": 0.7353515625, + "grad_norm": 0.170371413230896, + "learning_rate": 0.00012545485287970297, + "loss": 4.3984, + "step": 7530 + }, + { + "epoch": 0.73544921875, + "grad_norm": 0.1613108217716217, + "learning_rate": 0.00012540251763885085, + "loss": 4.4102, + "step": 7531 + }, + { + "epoch": 0.735546875, + "grad_norm": 0.16210253536701202, + "learning_rate": 0.0001253501969004593, + "loss": 4.4492, + "step": 7532 + }, + { + "epoch": 0.73564453125, + "grad_norm": 0.15581655502319336, + "learning_rate": 0.00012529789066960054, + "loss": 4.3945, + "step": 7533 + }, + { + "epoch": 0.7357421875, + "grad_norm": 0.17129500210285187, + "learning_rate": 0.00012524559895134518, + "loss": 4.4297, + "step": 7534 + }, + { + "epoch": 0.73583984375, + "grad_norm": 0.1642560064792633, + "learning_rate": 0.0001251933217507626, + "loss": 4.4062, + "step": 7535 + }, + { + "epoch": 0.7359375, + "grad_norm": 0.17345158755779266, + "learning_rate": 0.00012514105907292074, + "loss": 4.4727, + "step": 7536 + }, + { + "epoch": 0.73603515625, + "grad_norm": 0.15783734619617462, + "learning_rate": 0.0001250888109228861, + "loss": 4.418, + "step": 7537 + }, + { + "epoch": 0.7361328125, + "grad_norm": 0.1812889128923416, + "learning_rate": 0.00012503657730572382, + "loss": 4.4492, + "step": 7538 + }, + { + "epoch": 0.73623046875, + "grad_norm": 0.15036939084529877, + "learning_rate": 0.00012498435822649756, + "loss": 4.4414, + "step": 7539 + }, + { + "epoch": 0.736328125, + "grad_norm": 0.1680668294429779, + "learning_rate": 0.0001249321536902696, + "loss": 4.3867, + "step": 7540 + }, + { + "epoch": 0.73642578125, + "grad_norm": 0.16024844348430634, + "learning_rate": 0.0001248799637021009, + "loss": 4.4219, + "step": 7541 + }, + { + "epoch": 0.7365234375, + "grad_norm": 0.15455125272274017, + "learning_rate": 0.00012482778826705077, + "loss": 4.4375, + "step": 7542 + }, + { + "epoch": 0.73662109375, + "grad_norm": 0.1603914499282837, + "learning_rate": 0.00012477562739017736, + "loss": 4.418, + "step": 7543 + }, + { + "epoch": 0.73671875, + "grad_norm": 0.15817278623580933, + "learning_rate": 0.00012472348107653728, + "loss": 4.4453, + "step": 7544 + }, + { + "epoch": 0.73681640625, + "grad_norm": 0.15304195880889893, + "learning_rate": 0.0001246713493311858, + "loss": 4.4062, + "step": 7545 + }, + { + "epoch": 0.7369140625, + "grad_norm": 0.15008258819580078, + "learning_rate": 0.0001246192321591767, + "loss": 4.4414, + "step": 7546 + }, + { + "epoch": 0.73701171875, + "grad_norm": 0.1584523469209671, + "learning_rate": 0.0001245671295655624, + "loss": 4.4258, + "step": 7547 + }, + { + "epoch": 0.737109375, + "grad_norm": 0.15383468568325043, + "learning_rate": 0.00012451504155539393, + "loss": 4.4141, + "step": 7548 + }, + { + "epoch": 0.73720703125, + "grad_norm": 0.14985670149326324, + "learning_rate": 0.00012446296813372083, + "loss": 4.4258, + "step": 7549 + }, + { + "epoch": 0.7373046875, + "grad_norm": 0.16687902808189392, + "learning_rate": 0.0001244109093055913, + "loss": 4.4453, + "step": 7550 + }, + { + "epoch": 0.73740234375, + "grad_norm": 0.16249427199363708, + "learning_rate": 0.00012435886507605206, + "loss": 4.4336, + "step": 7551 + }, + { + "epoch": 0.7375, + "grad_norm": 0.1499052345752716, + "learning_rate": 0.0001243068354501484, + "loss": 4.3828, + "step": 7552 + }, + { + "epoch": 0.73759765625, + "grad_norm": 0.16798412799835205, + "learning_rate": 0.0001242548204329243, + "loss": 4.4375, + "step": 7553 + }, + { + "epoch": 0.7376953125, + "grad_norm": 0.14942780137062073, + "learning_rate": 0.00012420282002942223, + "loss": 4.4414, + "step": 7554 + }, + { + "epoch": 0.73779296875, + "grad_norm": 0.15841558575630188, + "learning_rate": 0.0001241508342446833, + "loss": 4.3828, + "step": 7555 + }, + { + "epoch": 0.737890625, + "grad_norm": 0.1492685228586197, + "learning_rate": 0.0001240988630837472, + "loss": 4.4297, + "step": 7556 + }, + { + "epoch": 0.73798828125, + "grad_norm": 0.15494926273822784, + "learning_rate": 0.00012404690655165213, + "loss": 4.4102, + "step": 7557 + }, + { + "epoch": 0.7380859375, + "grad_norm": 0.14820846915245056, + "learning_rate": 0.00012399496465343503, + "loss": 4.4141, + "step": 7558 + }, + { + "epoch": 0.73818359375, + "grad_norm": 0.14880762994289398, + "learning_rate": 0.00012394303739413114, + "loss": 4.4297, + "step": 7559 + }, + { + "epoch": 0.73828125, + "grad_norm": 0.1566554456949234, + "learning_rate": 0.00012389112477877458, + "loss": 4.4258, + "step": 7560 + }, + { + "epoch": 0.73837890625, + "grad_norm": 0.14976048469543457, + "learning_rate": 0.0001238392268123979, + "loss": 4.4258, + "step": 7561 + }, + { + "epoch": 0.7384765625, + "grad_norm": 0.14974820613861084, + "learning_rate": 0.0001237873435000322, + "loss": 4.4375, + "step": 7562 + }, + { + "epoch": 0.73857421875, + "grad_norm": 0.1599554866552353, + "learning_rate": 0.0001237354748467073, + "loss": 4.4023, + "step": 7563 + }, + { + "epoch": 0.738671875, + "grad_norm": 0.1461082249879837, + "learning_rate": 0.00012368362085745147, + "loss": 4.418, + "step": 7564 + }, + { + "epoch": 0.73876953125, + "grad_norm": 0.1649254560470581, + "learning_rate": 0.00012363178153729164, + "loss": 4.4141, + "step": 7565 + }, + { + "epoch": 0.7388671875, + "grad_norm": 0.15947631001472473, + "learning_rate": 0.0001235799568912533, + "loss": 4.4102, + "step": 7566 + }, + { + "epoch": 0.73896484375, + "grad_norm": 0.14603644609451294, + "learning_rate": 0.00012352814692436035, + "loss": 4.4219, + "step": 7567 + }, + { + "epoch": 0.7390625, + "grad_norm": 0.16181069612503052, + "learning_rate": 0.00012347635164163554, + "loss": 4.3945, + "step": 7568 + }, + { + "epoch": 0.73916015625, + "grad_norm": 0.1609387993812561, + "learning_rate": 0.00012342457104810002, + "loss": 4.4258, + "step": 7569 + }, + { + "epoch": 0.7392578125, + "grad_norm": 0.15482045710086823, + "learning_rate": 0.00012337280514877362, + "loss": 4.418, + "step": 7570 + }, + { + "epoch": 0.73935546875, + "grad_norm": 0.1657499223947525, + "learning_rate": 0.00012332105394867465, + "loss": 4.4492, + "step": 7571 + }, + { + "epoch": 0.739453125, + "grad_norm": 0.15112797915935516, + "learning_rate": 0.00012326931745282004, + "loss": 4.4102, + "step": 7572 + }, + { + "epoch": 0.73955078125, + "grad_norm": 0.16433358192443848, + "learning_rate": 0.00012321759566622534, + "loss": 4.4102, + "step": 7573 + }, + { + "epoch": 0.7396484375, + "grad_norm": 0.16014279425144196, + "learning_rate": 0.00012316588859390457, + "loss": 4.3945, + "step": 7574 + }, + { + "epoch": 0.73974609375, + "grad_norm": 0.15190166234970093, + "learning_rate": 0.00012311419624087046, + "loss": 4.4375, + "step": 7575 + }, + { + "epoch": 0.73984375, + "grad_norm": 0.16003470122814178, + "learning_rate": 0.00012306251861213412, + "loss": 4.4414, + "step": 7576 + }, + { + "epoch": 0.73994140625, + "grad_norm": 0.15121148526668549, + "learning_rate": 0.0001230108557127054, + "loss": 4.3945, + "step": 7577 + }, + { + "epoch": 0.7400390625, + "grad_norm": 0.1583106368780136, + "learning_rate": 0.00012295920754759262, + "loss": 4.4023, + "step": 7578 + }, + { + "epoch": 0.74013671875, + "grad_norm": 0.15677836537361145, + "learning_rate": 0.00012290757412180281, + "loss": 4.4258, + "step": 7579 + }, + { + "epoch": 0.740234375, + "grad_norm": 0.14685946702957153, + "learning_rate": 0.00012285595544034143, + "loss": 4.3906, + "step": 7580 + }, + { + "epoch": 0.74033203125, + "grad_norm": 0.1509077548980713, + "learning_rate": 0.00012280435150821255, + "loss": 4.4062, + "step": 7581 + }, + { + "epoch": 0.7404296875, + "grad_norm": 0.1460164487361908, + "learning_rate": 0.00012275276233041885, + "loss": 4.3984, + "step": 7582 + }, + { + "epoch": 0.74052734375, + "grad_norm": 0.14282438158988953, + "learning_rate": 0.00012270118791196162, + "loss": 4.4062, + "step": 7583 + }, + { + "epoch": 0.740625, + "grad_norm": 0.15622282028198242, + "learning_rate": 0.00012264962825784046, + "loss": 4.4453, + "step": 7584 + }, + { + "epoch": 0.74072265625, + "grad_norm": 0.14823557436466217, + "learning_rate": 0.00012259808337305384, + "loss": 4.4375, + "step": 7585 + }, + { + "epoch": 0.7408203125, + "grad_norm": 0.1587832123041153, + "learning_rate": 0.0001225465532625987, + "loss": 4.4102, + "step": 7586 + }, + { + "epoch": 0.74091796875, + "grad_norm": 0.1442839801311493, + "learning_rate": 0.0001224950379314705, + "loss": 4.4023, + "step": 7587 + }, + { + "epoch": 0.741015625, + "grad_norm": 0.15884824097156525, + "learning_rate": 0.00012244353738466335, + "loss": 4.4492, + "step": 7588 + }, + { + "epoch": 0.74111328125, + "grad_norm": 0.14947634935379028, + "learning_rate": 0.00012239205162716983, + "loss": 4.4023, + "step": 7589 + }, + { + "epoch": 0.7412109375, + "grad_norm": 0.14423982799053192, + "learning_rate": 0.00012234058066398114, + "loss": 4.418, + "step": 7590 + }, + { + "epoch": 0.74130859375, + "grad_norm": 0.14842036366462708, + "learning_rate": 0.00012228912450008706, + "loss": 4.4336, + "step": 7591 + }, + { + "epoch": 0.74140625, + "grad_norm": 0.155101016163826, + "learning_rate": 0.000122237683140476, + "loss": 4.3867, + "step": 7592 + }, + { + "epoch": 0.74150390625, + "grad_norm": 0.1501379907131195, + "learning_rate": 0.00012218625659013465, + "loss": 4.4297, + "step": 7593 + }, + { + "epoch": 0.7416015625, + "grad_norm": 0.15750110149383545, + "learning_rate": 0.0001221348448540486, + "loss": 4.4141, + "step": 7594 + }, + { + "epoch": 0.74169921875, + "grad_norm": 0.15614420175552368, + "learning_rate": 0.0001220834479372018, + "loss": 4.418, + "step": 7595 + }, + { + "epoch": 0.741796875, + "grad_norm": 0.15961110591888428, + "learning_rate": 0.00012203206584457692, + "loss": 4.4531, + "step": 7596 + }, + { + "epoch": 0.74189453125, + "grad_norm": 0.15128639340400696, + "learning_rate": 0.00012198069858115508, + "loss": 4.375, + "step": 7597 + }, + { + "epoch": 0.7419921875, + "grad_norm": 0.1538468450307846, + "learning_rate": 0.0001219293461519159, + "loss": 4.3906, + "step": 7598 + }, + { + "epoch": 0.74208984375, + "grad_norm": 0.15233196318149567, + "learning_rate": 0.00012187800856183775, + "loss": 4.4375, + "step": 7599 + }, + { + "epoch": 0.7421875, + "grad_norm": 0.15765570104122162, + "learning_rate": 0.00012182668581589752, + "loss": 4.3984, + "step": 7600 + }, + { + "epoch": 0.74228515625, + "grad_norm": 0.15436989068984985, + "learning_rate": 0.0001217753779190704, + "loss": 4.4492, + "step": 7601 + }, + { + "epoch": 0.7423828125, + "grad_norm": 0.1517174392938614, + "learning_rate": 0.00012172408487633051, + "loss": 4.3906, + "step": 7602 + }, + { + "epoch": 0.74248046875, + "grad_norm": 0.15264186263084412, + "learning_rate": 0.00012167280669265026, + "loss": 4.4336, + "step": 7603 + }, + { + "epoch": 0.742578125, + "grad_norm": 0.14743226766586304, + "learning_rate": 0.00012162154337300076, + "loss": 4.4141, + "step": 7604 + }, + { + "epoch": 0.74267578125, + "grad_norm": 0.15598736703395844, + "learning_rate": 0.00012157029492235167, + "loss": 4.4141, + "step": 7605 + }, + { + "epoch": 0.7427734375, + "grad_norm": 0.15700574219226837, + "learning_rate": 0.00012151906134567114, + "loss": 4.4414, + "step": 7606 + }, + { + "epoch": 0.74287109375, + "grad_norm": 0.15545766055583954, + "learning_rate": 0.00012146784264792594, + "loss": 4.4453, + "step": 7607 + }, + { + "epoch": 0.74296875, + "grad_norm": 0.15554918348789215, + "learning_rate": 0.00012141663883408138, + "loss": 4.4102, + "step": 7608 + }, + { + "epoch": 0.74306640625, + "grad_norm": 0.15714141726493835, + "learning_rate": 0.00012136544990910135, + "loss": 4.4258, + "step": 7609 + }, + { + "epoch": 0.7431640625, + "grad_norm": 0.15175826847553253, + "learning_rate": 0.00012131427587794818, + "loss": 4.4297, + "step": 7610 + }, + { + "epoch": 0.74326171875, + "grad_norm": 0.15478746592998505, + "learning_rate": 0.00012126311674558292, + "loss": 4.4375, + "step": 7611 + }, + { + "epoch": 0.743359375, + "grad_norm": 0.15218821167945862, + "learning_rate": 0.00012121197251696506, + "loss": 4.4297, + "step": 7612 + }, + { + "epoch": 0.74345703125, + "grad_norm": 0.1505499631166458, + "learning_rate": 0.0001211608431970527, + "loss": 4.3867, + "step": 7613 + }, + { + "epoch": 0.7435546875, + "grad_norm": 0.1520676612854004, + "learning_rate": 0.00012110972879080247, + "loss": 4.3867, + "step": 7614 + }, + { + "epoch": 0.74365234375, + "grad_norm": 0.15298300981521606, + "learning_rate": 0.00012105862930316957, + "loss": 4.4336, + "step": 7615 + }, + { + "epoch": 0.74375, + "grad_norm": 0.1559426337480545, + "learning_rate": 0.00012100754473910779, + "loss": 4.3984, + "step": 7616 + }, + { + "epoch": 0.74384765625, + "grad_norm": 0.1547628939151764, + "learning_rate": 0.00012095647510356944, + "loss": 4.4141, + "step": 7617 + }, + { + "epoch": 0.7439453125, + "grad_norm": 0.15586091578006744, + "learning_rate": 0.00012090542040150527, + "loss": 4.4531, + "step": 7618 + }, + { + "epoch": 0.74404296875, + "grad_norm": 0.1495155543088913, + "learning_rate": 0.00012085438063786477, + "loss": 4.418, + "step": 7619 + }, + { + "epoch": 0.744140625, + "grad_norm": 0.1518457978963852, + "learning_rate": 0.00012080335581759585, + "loss": 4.418, + "step": 7620 + }, + { + "epoch": 0.74423828125, + "grad_norm": 0.15681132674217224, + "learning_rate": 0.00012075234594564505, + "loss": 4.3945, + "step": 7621 + }, + { + "epoch": 0.7443359375, + "grad_norm": 0.14995244145393372, + "learning_rate": 0.00012070135102695744, + "loss": 4.4023, + "step": 7622 + }, + { + "epoch": 0.74443359375, + "grad_norm": 0.1429816633462906, + "learning_rate": 0.00012065037106647661, + "loss": 4.418, + "step": 7623 + }, + { + "epoch": 0.74453125, + "grad_norm": 0.15039031207561493, + "learning_rate": 0.00012059940606914474, + "loss": 4.3984, + "step": 7624 + }, + { + "epoch": 0.74462890625, + "grad_norm": 0.14444826543331146, + "learning_rate": 0.00012054845603990259, + "loss": 4.3945, + "step": 7625 + }, + { + "epoch": 0.7447265625, + "grad_norm": 0.14723604917526245, + "learning_rate": 0.0001204975209836893, + "loss": 4.4141, + "step": 7626 + }, + { + "epoch": 0.74482421875, + "grad_norm": 0.14853955805301666, + "learning_rate": 0.00012044660090544273, + "loss": 4.3945, + "step": 7627 + }, + { + "epoch": 0.744921875, + "grad_norm": 0.1482938528060913, + "learning_rate": 0.00012039569581009926, + "loss": 4.3945, + "step": 7628 + }, + { + "epoch": 0.74501953125, + "grad_norm": 0.1541387140750885, + "learning_rate": 0.00012034480570259378, + "loss": 4.4492, + "step": 7629 + }, + { + "epoch": 0.7451171875, + "grad_norm": 0.1473454236984253, + "learning_rate": 0.00012029393058785972, + "loss": 4.418, + "step": 7630 + }, + { + "epoch": 0.74521484375, + "grad_norm": 0.1487545222043991, + "learning_rate": 0.00012024307047082912, + "loss": 4.4062, + "step": 7631 + }, + { + "epoch": 0.7453125, + "grad_norm": 0.15459798276424408, + "learning_rate": 0.00012019222535643248, + "loss": 4.3906, + "step": 7632 + }, + { + "epoch": 0.74541015625, + "grad_norm": 0.15332043170928955, + "learning_rate": 0.0001201413952495989, + "loss": 4.4258, + "step": 7633 + }, + { + "epoch": 0.7455078125, + "grad_norm": 0.1491042673587799, + "learning_rate": 0.00012009058015525614, + "loss": 4.418, + "step": 7634 + }, + { + "epoch": 0.74560546875, + "grad_norm": 0.14598482847213745, + "learning_rate": 0.00012003978007833015, + "loss": 4.4102, + "step": 7635 + }, + { + "epoch": 0.745703125, + "grad_norm": 0.15112504363059998, + "learning_rate": 0.00011998899502374576, + "loss": 4.4297, + "step": 7636 + }, + { + "epoch": 0.74580078125, + "grad_norm": 0.150597482919693, + "learning_rate": 0.00011993822499642625, + "loss": 4.3711, + "step": 7637 + }, + { + "epoch": 0.7458984375, + "grad_norm": 0.15552370250225067, + "learning_rate": 0.00011988747000129342, + "loss": 4.4023, + "step": 7638 + }, + { + "epoch": 0.74599609375, + "grad_norm": 0.15348924696445465, + "learning_rate": 0.00011983673004326761, + "loss": 4.4062, + "step": 7639 + }, + { + "epoch": 0.74609375, + "grad_norm": 0.15508022904396057, + "learning_rate": 0.00011978600512726775, + "loss": 4.4414, + "step": 7640 + }, + { + "epoch": 0.74619140625, + "grad_norm": 0.15223389863967896, + "learning_rate": 0.00011973529525821122, + "loss": 4.4297, + "step": 7641 + }, + { + "epoch": 0.7462890625, + "grad_norm": 0.1561390906572342, + "learning_rate": 0.00011968460044101414, + "loss": 4.4531, + "step": 7642 + }, + { + "epoch": 0.74638671875, + "grad_norm": 0.1481391191482544, + "learning_rate": 0.00011963392068059082, + "loss": 4.4102, + "step": 7643 + }, + { + "epoch": 0.746484375, + "grad_norm": 0.14852924644947052, + "learning_rate": 0.00011958325598185443, + "loss": 4.418, + "step": 7644 + }, + { + "epoch": 0.74658203125, + "grad_norm": 0.15662671625614166, + "learning_rate": 0.00011953260634971655, + "loss": 4.4141, + "step": 7645 + }, + { + "epoch": 0.7466796875, + "grad_norm": 0.14440929889678955, + "learning_rate": 0.00011948197178908733, + "loss": 4.4102, + "step": 7646 + }, + { + "epoch": 0.74677734375, + "grad_norm": 0.14912916719913483, + "learning_rate": 0.00011943135230487542, + "loss": 4.4414, + "step": 7647 + }, + { + "epoch": 0.746875, + "grad_norm": 0.15084296464920044, + "learning_rate": 0.00011938074790198807, + "loss": 4.4258, + "step": 7648 + }, + { + "epoch": 0.74697265625, + "grad_norm": 0.15082819759845734, + "learning_rate": 0.00011933015858533102, + "loss": 4.3867, + "step": 7649 + }, + { + "epoch": 0.7470703125, + "grad_norm": 0.16039209067821503, + "learning_rate": 0.00011927958435980857, + "loss": 4.4062, + "step": 7650 + }, + { + "epoch": 0.74716796875, + "grad_norm": 0.15243710577487946, + "learning_rate": 0.00011922902523032361, + "loss": 4.4297, + "step": 7651 + }, + { + "epoch": 0.747265625, + "grad_norm": 0.14883774518966675, + "learning_rate": 0.00011917848120177736, + "loss": 4.418, + "step": 7652 + }, + { + "epoch": 0.74736328125, + "grad_norm": 0.15677592158317566, + "learning_rate": 0.00011912795227906978, + "loss": 4.418, + "step": 7653 + }, + { + "epoch": 0.7474609375, + "grad_norm": 0.15634267032146454, + "learning_rate": 0.00011907743846709934, + "loss": 4.4258, + "step": 7654 + }, + { + "epoch": 0.74755859375, + "grad_norm": 0.14909769594669342, + "learning_rate": 0.00011902693977076298, + "loss": 4.4219, + "step": 7655 + }, + { + "epoch": 0.74765625, + "grad_norm": 0.15294477343559265, + "learning_rate": 0.0001189764561949562, + "loss": 4.4062, + "step": 7656 + }, + { + "epoch": 0.74775390625, + "grad_norm": 0.14555934071540833, + "learning_rate": 0.0001189259877445731, + "loss": 4.4102, + "step": 7657 + }, + { + "epoch": 0.7478515625, + "grad_norm": 0.15002655982971191, + "learning_rate": 0.00011887553442450618, + "loss": 4.4297, + "step": 7658 + }, + { + "epoch": 0.74794921875, + "grad_norm": 0.15603992342948914, + "learning_rate": 0.00011882509623964665, + "loss": 4.3867, + "step": 7659 + }, + { + "epoch": 0.748046875, + "grad_norm": 0.1526353657245636, + "learning_rate": 0.000118774673194884, + "loss": 4.4102, + "step": 7660 + }, + { + "epoch": 0.74814453125, + "grad_norm": 0.14934147894382477, + "learning_rate": 0.0001187242652951065, + "loss": 4.4258, + "step": 7661 + }, + { + "epoch": 0.7482421875, + "grad_norm": 0.15116658806800842, + "learning_rate": 0.00011867387254520083, + "loss": 4.4023, + "step": 7662 + }, + { + "epoch": 0.74833984375, + "grad_norm": 0.15265294909477234, + "learning_rate": 0.00011862349495005221, + "loss": 4.3984, + "step": 7663 + }, + { + "epoch": 0.7484375, + "grad_norm": 0.15858712792396545, + "learning_rate": 0.00011857313251454443, + "loss": 4.418, + "step": 7664 + }, + { + "epoch": 0.74853515625, + "grad_norm": 0.14755168557167053, + "learning_rate": 0.00011852278524355977, + "loss": 4.4258, + "step": 7665 + }, + { + "epoch": 0.7486328125, + "grad_norm": 0.15303902328014374, + "learning_rate": 0.00011847245314197907, + "loss": 4.4297, + "step": 7666 + }, + { + "epoch": 0.74873046875, + "grad_norm": 0.1516467183828354, + "learning_rate": 0.0001184221362146818, + "loss": 4.4023, + "step": 7667 + }, + { + "epoch": 0.748828125, + "grad_norm": 0.16038216650485992, + "learning_rate": 0.00011837183446654562, + "loss": 4.4102, + "step": 7668 + }, + { + "epoch": 0.74892578125, + "grad_norm": 0.15167488157749176, + "learning_rate": 0.00011832154790244704, + "loss": 4.3594, + "step": 7669 + }, + { + "epoch": 0.7490234375, + "grad_norm": 0.15490226447582245, + "learning_rate": 0.00011827127652726102, + "loss": 4.4492, + "step": 7670 + }, + { + "epoch": 0.74912109375, + "grad_norm": 0.15413643419742584, + "learning_rate": 0.00011822102034586101, + "loss": 4.3867, + "step": 7671 + }, + { + "epoch": 0.74921875, + "grad_norm": 0.1489761620759964, + "learning_rate": 0.00011817077936311903, + "loss": 4.4141, + "step": 7672 + }, + { + "epoch": 0.74931640625, + "grad_norm": 0.15851104259490967, + "learning_rate": 0.00011812055358390556, + "loss": 4.418, + "step": 7673 + }, + { + "epoch": 0.7494140625, + "grad_norm": 0.15494221448898315, + "learning_rate": 0.0001180703430130897, + "loss": 4.4141, + "step": 7674 + }, + { + "epoch": 0.74951171875, + "grad_norm": 0.16104602813720703, + "learning_rate": 0.00011802014765553903, + "loss": 4.3984, + "step": 7675 + }, + { + "epoch": 0.749609375, + "grad_norm": 0.1619718223810196, + "learning_rate": 0.00011796996751611965, + "loss": 4.418, + "step": 7676 + }, + { + "epoch": 0.74970703125, + "grad_norm": 0.1464228630065918, + "learning_rate": 0.00011791980259969609, + "loss": 4.4336, + "step": 7677 + }, + { + "epoch": 0.7498046875, + "grad_norm": 0.1586873084306717, + "learning_rate": 0.00011786965291113157, + "loss": 4.4023, + "step": 7678 + }, + { + "epoch": 0.74990234375, + "grad_norm": 0.154151052236557, + "learning_rate": 0.00011781951845528774, + "loss": 4.4258, + "step": 7679 + }, + { + "epoch": 0.75, + "grad_norm": 0.1583954244852066, + "learning_rate": 0.00011776939923702482, + "loss": 4.4297, + "step": 7680 + }, + { + "epoch": 0.75009765625, + "grad_norm": 0.15361805260181427, + "learning_rate": 0.00011771929526120152, + "loss": 4.4414, + "step": 7681 + }, + { + "epoch": 0.7501953125, + "grad_norm": 0.15085305273532867, + "learning_rate": 0.00011766920653267506, + "loss": 4.3945, + "step": 7682 + }, + { + "epoch": 0.75029296875, + "grad_norm": 0.1560216099023819, + "learning_rate": 0.00011761913305630123, + "loss": 4.4023, + "step": 7683 + }, + { + "epoch": 0.750390625, + "grad_norm": 0.15529562532901764, + "learning_rate": 0.00011756907483693436, + "loss": 4.3945, + "step": 7684 + }, + { + "epoch": 0.75048828125, + "grad_norm": 0.15947850048542023, + "learning_rate": 0.00011751903187942714, + "loss": 4.4453, + "step": 7685 + }, + { + "epoch": 0.7505859375, + "grad_norm": 0.15139102935791016, + "learning_rate": 0.00011746900418863097, + "loss": 4.4141, + "step": 7686 + }, + { + "epoch": 0.75068359375, + "grad_norm": 0.15356221795082092, + "learning_rate": 0.00011741899176939566, + "loss": 4.418, + "step": 7687 + }, + { + "epoch": 0.75078125, + "grad_norm": 0.1536005586385727, + "learning_rate": 0.00011736899462656957, + "loss": 4.4258, + "step": 7688 + }, + { + "epoch": 0.75087890625, + "grad_norm": 0.16014035046100616, + "learning_rate": 0.00011731901276499963, + "loss": 4.4102, + "step": 7689 + }, + { + "epoch": 0.7509765625, + "grad_norm": 0.15174296498298645, + "learning_rate": 0.00011726904618953122, + "loss": 4.4023, + "step": 7690 + }, + { + "epoch": 0.75107421875, + "grad_norm": 0.15922939777374268, + "learning_rate": 0.00011721909490500828, + "loss": 4.3867, + "step": 7691 + }, + { + "epoch": 0.751171875, + "grad_norm": 0.15044005215168, + "learning_rate": 0.0001171691589162732, + "loss": 4.4258, + "step": 7692 + }, + { + "epoch": 0.75126953125, + "grad_norm": 0.15720559656620026, + "learning_rate": 0.00011711923822816704, + "loss": 4.418, + "step": 7693 + }, + { + "epoch": 0.7513671875, + "grad_norm": 0.16483958065509796, + "learning_rate": 0.0001170693328455291, + "loss": 4.4492, + "step": 7694 + }, + { + "epoch": 0.75146484375, + "grad_norm": 0.1514645367860794, + "learning_rate": 0.0001170194427731975, + "loss": 4.3867, + "step": 7695 + }, + { + "epoch": 0.7515625, + "grad_norm": 0.15118782222270966, + "learning_rate": 0.00011696956801600867, + "loss": 4.418, + "step": 7696 + }, + { + "epoch": 0.75166015625, + "grad_norm": 0.1562124788761139, + "learning_rate": 0.00011691970857879767, + "loss": 4.4297, + "step": 7697 + }, + { + "epoch": 0.7517578125, + "grad_norm": 0.1496713012456894, + "learning_rate": 0.00011686986446639805, + "loss": 4.4414, + "step": 7698 + }, + { + "epoch": 0.75185546875, + "grad_norm": 0.14912216365337372, + "learning_rate": 0.00011682003568364183, + "loss": 4.3906, + "step": 7699 + }, + { + "epoch": 0.751953125, + "grad_norm": 0.15376384556293488, + "learning_rate": 0.00011677022223535957, + "loss": 4.4336, + "step": 7700 + }, + { + "epoch": 0.75205078125, + "grad_norm": 0.15400327742099762, + "learning_rate": 0.00011672042412638037, + "loss": 4.3906, + "step": 7701 + }, + { + "epoch": 0.7521484375, + "grad_norm": 0.15102256834506989, + "learning_rate": 0.00011667064136153181, + "loss": 4.3945, + "step": 7702 + }, + { + "epoch": 0.75224609375, + "grad_norm": 0.1481059193611145, + "learning_rate": 0.00011662087394564002, + "loss": 4.4375, + "step": 7703 + }, + { + "epoch": 0.75234375, + "grad_norm": 0.14933830499649048, + "learning_rate": 0.00011657112188352955, + "loss": 4.4141, + "step": 7704 + }, + { + "epoch": 0.75244140625, + "grad_norm": 0.14884594082832336, + "learning_rate": 0.00011652138518002353, + "loss": 4.4141, + "step": 7705 + }, + { + "epoch": 0.7525390625, + "grad_norm": 0.1450316458940506, + "learning_rate": 0.0001164716638399436, + "loss": 4.4102, + "step": 7706 + }, + { + "epoch": 0.75263671875, + "grad_norm": 0.1490420401096344, + "learning_rate": 0.00011642195786810997, + "loss": 4.4531, + "step": 7707 + }, + { + "epoch": 0.752734375, + "grad_norm": 0.14149333536624908, + "learning_rate": 0.00011637226726934124, + "loss": 4.4453, + "step": 7708 + }, + { + "epoch": 0.75283203125, + "grad_norm": 0.14931902289390564, + "learning_rate": 0.00011632259204845458, + "loss": 4.4023, + "step": 7709 + }, + { + "epoch": 0.7529296875, + "grad_norm": 0.15479913353919983, + "learning_rate": 0.00011627293221026567, + "loss": 4.3828, + "step": 7710 + }, + { + "epoch": 0.75302734375, + "grad_norm": 0.15400630235671997, + "learning_rate": 0.00011622328775958872, + "loss": 4.3984, + "step": 7711 + }, + { + "epoch": 0.753125, + "grad_norm": 0.16265511512756348, + "learning_rate": 0.00011617365870123638, + "loss": 4.4219, + "step": 7712 + }, + { + "epoch": 0.75322265625, + "grad_norm": 0.15772366523742676, + "learning_rate": 0.00011612404504001993, + "loss": 4.4453, + "step": 7713 + }, + { + "epoch": 0.7533203125, + "grad_norm": 0.15334032475948334, + "learning_rate": 0.00011607444678074897, + "loss": 4.4062, + "step": 7714 + }, + { + "epoch": 0.75341796875, + "grad_norm": 0.15696145594120026, + "learning_rate": 0.00011602486392823177, + "loss": 4.418, + "step": 7715 + }, + { + "epoch": 0.753515625, + "grad_norm": 0.14874203503131866, + "learning_rate": 0.00011597529648727503, + "loss": 4.4414, + "step": 7716 + }, + { + "epoch": 0.75361328125, + "grad_norm": 0.1538916379213333, + "learning_rate": 0.00011592574446268399, + "loss": 4.3984, + "step": 7717 + }, + { + "epoch": 0.7537109375, + "grad_norm": 0.15491442382335663, + "learning_rate": 0.00011587620785926237, + "loss": 4.4375, + "step": 7718 + }, + { + "epoch": 0.75380859375, + "grad_norm": 0.15426000952720642, + "learning_rate": 0.00011582668668181246, + "loss": 4.3828, + "step": 7719 + }, + { + "epoch": 0.75390625, + "grad_norm": 0.1598297655582428, + "learning_rate": 0.00011577718093513496, + "loss": 4.3867, + "step": 7720 + }, + { + "epoch": 0.75400390625, + "grad_norm": 0.15339218080043793, + "learning_rate": 0.00011572769062402909, + "loss": 4.4531, + "step": 7721 + }, + { + "epoch": 0.7541015625, + "grad_norm": 0.16284044086933136, + "learning_rate": 0.00011567821575329263, + "loss": 4.4219, + "step": 7722 + }, + { + "epoch": 0.75419921875, + "grad_norm": 0.16056625545024872, + "learning_rate": 0.00011562875632772193, + "loss": 4.4297, + "step": 7723 + }, + { + "epoch": 0.754296875, + "grad_norm": 0.14642269909381866, + "learning_rate": 0.00011557931235211155, + "loss": 4.3789, + "step": 7724 + }, + { + "epoch": 0.75439453125, + "grad_norm": 0.15117672085762024, + "learning_rate": 0.00011552988383125486, + "loss": 4.3867, + "step": 7725 + }, + { + "epoch": 0.7544921875, + "grad_norm": 0.15217478573322296, + "learning_rate": 0.0001154804707699436, + "loss": 4.3984, + "step": 7726 + }, + { + "epoch": 0.75458984375, + "grad_norm": 0.14905865490436554, + "learning_rate": 0.00011543107317296806, + "loss": 4.4102, + "step": 7727 + }, + { + "epoch": 0.7546875, + "grad_norm": 0.15292255580425262, + "learning_rate": 0.00011538169104511695, + "loss": 4.4336, + "step": 7728 + }, + { + "epoch": 0.75478515625, + "grad_norm": 0.15601669251918793, + "learning_rate": 0.00011533232439117757, + "loss": 4.418, + "step": 7729 + }, + { + "epoch": 0.7548828125, + "grad_norm": 0.15403839945793152, + "learning_rate": 0.00011528297321593568, + "loss": 4.4297, + "step": 7730 + }, + { + "epoch": 0.75498046875, + "grad_norm": 0.1519792675971985, + "learning_rate": 0.00011523363752417553, + "loss": 4.4531, + "step": 7731 + }, + { + "epoch": 0.755078125, + "grad_norm": 0.15321366488933563, + "learning_rate": 0.00011518431732067989, + "loss": 4.4141, + "step": 7732 + }, + { + "epoch": 0.75517578125, + "grad_norm": 0.1468295454978943, + "learning_rate": 0.00011513501261023007, + "loss": 4.3867, + "step": 7733 + }, + { + "epoch": 0.7552734375, + "grad_norm": 0.15129777789115906, + "learning_rate": 0.0001150857233976057, + "loss": 4.4062, + "step": 7734 + }, + { + "epoch": 0.75537109375, + "grad_norm": 0.15185008943080902, + "learning_rate": 0.0001150364496875851, + "loss": 4.3984, + "step": 7735 + }, + { + "epoch": 0.75546875, + "grad_norm": 0.15787598490715027, + "learning_rate": 0.00011498719148494505, + "loss": 4.3789, + "step": 7736 + }, + { + "epoch": 0.75556640625, + "grad_norm": 0.14661623537540436, + "learning_rate": 0.00011493794879446076, + "loss": 4.3984, + "step": 7737 + }, + { + "epoch": 0.7556640625, + "grad_norm": 0.1511438488960266, + "learning_rate": 0.00011488872162090599, + "loss": 4.3672, + "step": 7738 + }, + { + "epoch": 0.75576171875, + "grad_norm": 0.14960454404354095, + "learning_rate": 0.00011483950996905299, + "loss": 4.3906, + "step": 7739 + }, + { + "epoch": 0.755859375, + "grad_norm": 0.1513562798500061, + "learning_rate": 0.00011479031384367248, + "loss": 4.3945, + "step": 7740 + }, + { + "epoch": 0.75595703125, + "grad_norm": 0.14876914024353027, + "learning_rate": 0.00011474113324953371, + "loss": 4.3789, + "step": 7741 + }, + { + "epoch": 0.7560546875, + "grad_norm": 0.14915242791175842, + "learning_rate": 0.00011469196819140437, + "loss": 4.4023, + "step": 7742 + }, + { + "epoch": 0.75615234375, + "grad_norm": 0.15721851587295532, + "learning_rate": 0.00011464281867405078, + "loss": 4.3984, + "step": 7743 + }, + { + "epoch": 0.75625, + "grad_norm": 0.14980484545230865, + "learning_rate": 0.0001145936847022375, + "loss": 4.3828, + "step": 7744 + }, + { + "epoch": 0.75634765625, + "grad_norm": 0.1525813341140747, + "learning_rate": 0.00011454456628072779, + "loss": 4.4297, + "step": 7745 + }, + { + "epoch": 0.7564453125, + "grad_norm": 0.15022605657577515, + "learning_rate": 0.0001144954634142834, + "loss": 4.4414, + "step": 7746 + }, + { + "epoch": 0.75654296875, + "grad_norm": 0.15064971148967743, + "learning_rate": 0.00011444637610766446, + "loss": 4.4375, + "step": 7747 + }, + { + "epoch": 0.756640625, + "grad_norm": 0.15191814303398132, + "learning_rate": 0.00011439730436562967, + "loss": 4.4062, + "step": 7748 + }, + { + "epoch": 0.75673828125, + "grad_norm": 0.15857632458209991, + "learning_rate": 0.00011434824819293622, + "loss": 4.3906, + "step": 7749 + }, + { + "epoch": 0.7568359375, + "grad_norm": 0.14877262711524963, + "learning_rate": 0.00011429920759433977, + "loss": 4.4453, + "step": 7750 + }, + { + "epoch": 0.75693359375, + "grad_norm": 0.15941143035888672, + "learning_rate": 0.00011425018257459445, + "loss": 4.4336, + "step": 7751 + }, + { + "epoch": 0.75703125, + "grad_norm": 0.14817331731319427, + "learning_rate": 0.000114201173138453, + "loss": 4.3867, + "step": 7752 + }, + { + "epoch": 0.75712890625, + "grad_norm": 0.16098734736442566, + "learning_rate": 0.00011415217929066638, + "loss": 4.4375, + "step": 7753 + }, + { + "epoch": 0.7572265625, + "grad_norm": 0.15675540268421173, + "learning_rate": 0.00011410320103598433, + "loss": 4.4141, + "step": 7754 + }, + { + "epoch": 0.75732421875, + "grad_norm": 0.15594036877155304, + "learning_rate": 0.00011405423837915492, + "loss": 4.457, + "step": 7755 + }, + { + "epoch": 0.757421875, + "grad_norm": 0.15575385093688965, + "learning_rate": 0.00011400529132492473, + "loss": 4.4336, + "step": 7756 + }, + { + "epoch": 0.75751953125, + "grad_norm": 0.15043160319328308, + "learning_rate": 0.0001139563598780389, + "loss": 4.4023, + "step": 7757 + }, + { + "epoch": 0.7576171875, + "grad_norm": 0.15328778326511383, + "learning_rate": 0.00011390744404324097, + "loss": 4.4141, + "step": 7758 + }, + { + "epoch": 0.75771484375, + "grad_norm": 0.1560472846031189, + "learning_rate": 0.00011385854382527299, + "loss": 4.4414, + "step": 7759 + }, + { + "epoch": 0.7578125, + "grad_norm": 0.14814290404319763, + "learning_rate": 0.00011380965922887558, + "loss": 4.4336, + "step": 7760 + }, + { + "epoch": 0.75791015625, + "grad_norm": 0.15976668894290924, + "learning_rate": 0.00011376079025878766, + "loss": 4.4062, + "step": 7761 + }, + { + "epoch": 0.7580078125, + "grad_norm": 0.16271643340587616, + "learning_rate": 0.00011371193691974676, + "loss": 4.4297, + "step": 7762 + }, + { + "epoch": 0.75810546875, + "grad_norm": 0.15674586594104767, + "learning_rate": 0.00011366309921648893, + "loss": 4.4492, + "step": 7763 + }, + { + "epoch": 0.758203125, + "grad_norm": 0.16139164566993713, + "learning_rate": 0.00011361427715374864, + "loss": 4.4219, + "step": 7764 + }, + { + "epoch": 0.75830078125, + "grad_norm": 0.1472456455230713, + "learning_rate": 0.00011356547073625884, + "loss": 4.375, + "step": 7765 + }, + { + "epoch": 0.7583984375, + "grad_norm": 0.1630847156047821, + "learning_rate": 0.00011351667996875101, + "loss": 4.4023, + "step": 7766 + }, + { + "epoch": 0.75849609375, + "grad_norm": 0.1479683220386505, + "learning_rate": 0.00011346790485595505, + "loss": 4.3984, + "step": 7767 + }, + { + "epoch": 0.75859375, + "grad_norm": 0.16432972252368927, + "learning_rate": 0.00011341914540259938, + "loss": 4.4414, + "step": 7768 + }, + { + "epoch": 0.75869140625, + "grad_norm": 0.16671431064605713, + "learning_rate": 0.00011337040161341099, + "loss": 4.4336, + "step": 7769 + }, + { + "epoch": 0.7587890625, + "grad_norm": 0.16193194687366486, + "learning_rate": 0.00011332167349311511, + "loss": 4.4141, + "step": 7770 + }, + { + "epoch": 0.75888671875, + "grad_norm": 0.17365039885044098, + "learning_rate": 0.00011327296104643568, + "loss": 4.375, + "step": 7771 + }, + { + "epoch": 0.758984375, + "grad_norm": 0.15110251307487488, + "learning_rate": 0.00011322426427809504, + "loss": 4.4023, + "step": 7772 + }, + { + "epoch": 0.75908203125, + "grad_norm": 0.16494342684745789, + "learning_rate": 0.000113175583192814, + "loss": 4.3945, + "step": 7773 + }, + { + "epoch": 0.7591796875, + "grad_norm": 0.15644890069961548, + "learning_rate": 0.00011312691779531184, + "loss": 4.4258, + "step": 7774 + }, + { + "epoch": 0.75927734375, + "grad_norm": 0.15525849163532257, + "learning_rate": 0.00011307826809030639, + "loss": 4.4062, + "step": 7775 + }, + { + "epoch": 0.759375, + "grad_norm": 0.15707316994667053, + "learning_rate": 0.00011302963408251388, + "loss": 4.4297, + "step": 7776 + }, + { + "epoch": 0.75947265625, + "grad_norm": 0.15604551136493683, + "learning_rate": 0.00011298101577664909, + "loss": 4.4492, + "step": 7777 + }, + { + "epoch": 0.7595703125, + "grad_norm": 0.14613108336925507, + "learning_rate": 0.00011293241317742517, + "loss": 4.3906, + "step": 7778 + }, + { + "epoch": 0.75966796875, + "grad_norm": 0.15320517122745514, + "learning_rate": 0.00011288382628955381, + "loss": 4.4492, + "step": 7779 + }, + { + "epoch": 0.759765625, + "grad_norm": 0.155707448720932, + "learning_rate": 0.00011283525511774518, + "loss": 4.418, + "step": 7780 + }, + { + "epoch": 0.75986328125, + "grad_norm": 0.15629521012306213, + "learning_rate": 0.00011278669966670798, + "loss": 4.3984, + "step": 7781 + }, + { + "epoch": 0.7599609375, + "grad_norm": 0.16102848947048187, + "learning_rate": 0.00011273815994114932, + "loss": 4.3945, + "step": 7782 + }, + { + "epoch": 0.76005859375, + "grad_norm": 0.1570339947938919, + "learning_rate": 0.00011268963594577476, + "loss": 4.4219, + "step": 7783 + }, + { + "epoch": 0.76015625, + "grad_norm": 0.15114130079746246, + "learning_rate": 0.00011264112768528838, + "loss": 4.4336, + "step": 7784 + }, + { + "epoch": 0.76025390625, + "grad_norm": 0.15755100548267365, + "learning_rate": 0.00011259263516439282, + "loss": 4.3906, + "step": 7785 + }, + { + "epoch": 0.7603515625, + "grad_norm": 0.15270864963531494, + "learning_rate": 0.00011254415838778897, + "loss": 4.418, + "step": 7786 + }, + { + "epoch": 0.76044921875, + "grad_norm": 0.14402668178081512, + "learning_rate": 0.00011249569736017635, + "loss": 4.4219, + "step": 7787 + }, + { + "epoch": 0.760546875, + "grad_norm": 0.15120914578437805, + "learning_rate": 0.00011244725208625293, + "loss": 4.4375, + "step": 7788 + }, + { + "epoch": 0.76064453125, + "grad_norm": 0.14426374435424805, + "learning_rate": 0.00011239882257071521, + "loss": 4.3867, + "step": 7789 + }, + { + "epoch": 0.7607421875, + "grad_norm": 0.15256813168525696, + "learning_rate": 0.00011235040881825805, + "loss": 4.418, + "step": 7790 + }, + { + "epoch": 0.76083984375, + "grad_norm": 0.14867115020751953, + "learning_rate": 0.00011230201083357482, + "loss": 4.4023, + "step": 7791 + }, + { + "epoch": 0.7609375, + "grad_norm": 0.15806671977043152, + "learning_rate": 0.00011225362862135747, + "loss": 4.4141, + "step": 7792 + }, + { + "epoch": 0.76103515625, + "grad_norm": 0.1459573358297348, + "learning_rate": 0.00011220526218629621, + "loss": 4.4258, + "step": 7793 + }, + { + "epoch": 0.7611328125, + "grad_norm": 0.1485978066921234, + "learning_rate": 0.00011215691153307998, + "loss": 4.4141, + "step": 7794 + }, + { + "epoch": 0.76123046875, + "grad_norm": 0.15408718585968018, + "learning_rate": 0.00011210857666639587, + "loss": 4.4336, + "step": 7795 + }, + { + "epoch": 0.761328125, + "grad_norm": 0.148426815867424, + "learning_rate": 0.00011206025759092971, + "loss": 4.3984, + "step": 7796 + }, + { + "epoch": 0.76142578125, + "grad_norm": 0.14146128296852112, + "learning_rate": 0.00011201195431136571, + "loss": 4.4219, + "step": 7797 + }, + { + "epoch": 0.7615234375, + "grad_norm": 0.14466623961925507, + "learning_rate": 0.00011196366683238654, + "loss": 4.3984, + "step": 7798 + }, + { + "epoch": 0.76162109375, + "grad_norm": 0.1502840220928192, + "learning_rate": 0.00011191539515867332, + "loss": 4.4219, + "step": 7799 + }, + { + "epoch": 0.76171875, + "grad_norm": 0.14737124741077423, + "learning_rate": 0.00011186713929490569, + "loss": 4.418, + "step": 7800 + }, + { + "epoch": 0.76181640625, + "grad_norm": 0.15488815307617188, + "learning_rate": 0.00011181889924576175, + "loss": 4.4375, + "step": 7801 + }, + { + "epoch": 0.7619140625, + "grad_norm": 0.15053331851959229, + "learning_rate": 0.00011177067501591803, + "loss": 4.4141, + "step": 7802 + }, + { + "epoch": 0.76201171875, + "grad_norm": 0.14859457314014435, + "learning_rate": 0.00011172246661004951, + "loss": 4.4141, + "step": 7803 + }, + { + "epoch": 0.762109375, + "grad_norm": 0.14972902834415436, + "learning_rate": 0.00011167427403282965, + "loss": 4.4492, + "step": 7804 + }, + { + "epoch": 0.76220703125, + "grad_norm": 0.16078399121761322, + "learning_rate": 0.00011162609728893044, + "loss": 4.4414, + "step": 7805 + }, + { + "epoch": 0.7623046875, + "grad_norm": 0.14503246545791626, + "learning_rate": 0.00011157793638302227, + "loss": 4.4141, + "step": 7806 + }, + { + "epoch": 0.76240234375, + "grad_norm": 0.16284635663032532, + "learning_rate": 0.00011152979131977403, + "loss": 4.3945, + "step": 7807 + }, + { + "epoch": 0.7625, + "grad_norm": 0.15371201932430267, + "learning_rate": 0.00011148166210385303, + "loss": 4.4414, + "step": 7808 + }, + { + "epoch": 0.76259765625, + "grad_norm": 0.1519307941198349, + "learning_rate": 0.00011143354873992508, + "loss": 4.4062, + "step": 7809 + }, + { + "epoch": 0.7626953125, + "grad_norm": 0.1544850766658783, + "learning_rate": 0.00011138545123265449, + "loss": 4.4062, + "step": 7810 + }, + { + "epoch": 0.76279296875, + "grad_norm": 0.14631684124469757, + "learning_rate": 0.00011133736958670398, + "loss": 4.4102, + "step": 7811 + }, + { + "epoch": 0.762890625, + "grad_norm": 0.15142768621444702, + "learning_rate": 0.00011128930380673464, + "loss": 4.418, + "step": 7812 + }, + { + "epoch": 0.76298828125, + "grad_norm": 0.15210208296775818, + "learning_rate": 0.0001112412538974062, + "loss": 4.418, + "step": 7813 + }, + { + "epoch": 0.7630859375, + "grad_norm": 0.1433621495962143, + "learning_rate": 0.00011119321986337678, + "loss": 4.4336, + "step": 7814 + }, + { + "epoch": 0.76318359375, + "grad_norm": 0.15080025792121887, + "learning_rate": 0.00011114520170930291, + "loss": 4.4023, + "step": 7815 + }, + { + "epoch": 0.76328125, + "grad_norm": 0.15594029426574707, + "learning_rate": 0.00011109719943983964, + "loss": 4.4258, + "step": 7816 + }, + { + "epoch": 0.76337890625, + "grad_norm": 0.15444600582122803, + "learning_rate": 0.00011104921305964047, + "loss": 4.4141, + "step": 7817 + }, + { + "epoch": 0.7634765625, + "grad_norm": 0.14385788142681122, + "learning_rate": 0.00011100124257335739, + "loss": 4.4023, + "step": 7818 + }, + { + "epoch": 0.76357421875, + "grad_norm": 0.15716509521007538, + "learning_rate": 0.00011095328798564083, + "loss": 4.4297, + "step": 7819 + }, + { + "epoch": 0.763671875, + "grad_norm": 0.15395821630954742, + "learning_rate": 0.00011090534930113955, + "loss": 4.4023, + "step": 7820 + }, + { + "epoch": 0.76376953125, + "grad_norm": 0.14872874319553375, + "learning_rate": 0.00011085742652450093, + "loss": 4.4102, + "step": 7821 + }, + { + "epoch": 0.7638671875, + "grad_norm": 0.1506529599428177, + "learning_rate": 0.00011080951966037075, + "loss": 4.418, + "step": 7822 + }, + { + "epoch": 0.76396484375, + "grad_norm": 0.15076130628585815, + "learning_rate": 0.00011076162871339331, + "loss": 4.4102, + "step": 7823 + }, + { + "epoch": 0.7640625, + "grad_norm": 0.14846675097942352, + "learning_rate": 0.00011071375368821127, + "loss": 4.3906, + "step": 7824 + }, + { + "epoch": 0.76416015625, + "grad_norm": 0.14685922861099243, + "learning_rate": 0.00011066589458946579, + "loss": 4.4453, + "step": 7825 + }, + { + "epoch": 0.7642578125, + "grad_norm": 0.14688707888126373, + "learning_rate": 0.00011061805142179649, + "loss": 4.4062, + "step": 7826 + }, + { + "epoch": 0.76435546875, + "grad_norm": 0.14835844933986664, + "learning_rate": 0.0001105702241898415, + "loss": 4.4336, + "step": 7827 + }, + { + "epoch": 0.764453125, + "grad_norm": 0.1547151505947113, + "learning_rate": 0.00011052241289823724, + "loss": 4.4102, + "step": 7828 + }, + { + "epoch": 0.76455078125, + "grad_norm": 0.15598632395267487, + "learning_rate": 0.00011047461755161873, + "loss": 4.4375, + "step": 7829 + }, + { + "epoch": 0.7646484375, + "grad_norm": 0.15280264616012573, + "learning_rate": 0.0001104268381546194, + "loss": 4.4414, + "step": 7830 + }, + { + "epoch": 0.76474609375, + "grad_norm": 0.154519185423851, + "learning_rate": 0.00011037907471187114, + "loss": 4.4141, + "step": 7831 + }, + { + "epoch": 0.76484375, + "grad_norm": 0.15622346103191376, + "learning_rate": 0.00011033132722800433, + "loss": 4.3984, + "step": 7832 + }, + { + "epoch": 0.76494140625, + "grad_norm": 0.15338046848773956, + "learning_rate": 0.00011028359570764774, + "loss": 4.4023, + "step": 7833 + }, + { + "epoch": 0.7650390625, + "grad_norm": 0.15551722049713135, + "learning_rate": 0.00011023588015542857, + "loss": 4.3945, + "step": 7834 + }, + { + "epoch": 0.76513671875, + "grad_norm": 0.1500433087348938, + "learning_rate": 0.0001101881805759726, + "loss": 4.4453, + "step": 7835 + }, + { + "epoch": 0.765234375, + "grad_norm": 0.15247154235839844, + "learning_rate": 0.00011014049697390399, + "loss": 4.4102, + "step": 7836 + }, + { + "epoch": 0.76533203125, + "grad_norm": 0.1543939858675003, + "learning_rate": 0.00011009282935384523, + "loss": 4.4219, + "step": 7837 + }, + { + "epoch": 0.7654296875, + "grad_norm": 0.1505936086177826, + "learning_rate": 0.00011004517772041744, + "loss": 4.3906, + "step": 7838 + }, + { + "epoch": 0.76552734375, + "grad_norm": 0.15735210478305817, + "learning_rate": 0.00010999754207824011, + "loss": 4.3867, + "step": 7839 + }, + { + "epoch": 0.765625, + "grad_norm": 0.15314485132694244, + "learning_rate": 0.00010994992243193123, + "loss": 4.4102, + "step": 7840 + }, + { + "epoch": 0.76572265625, + "grad_norm": 0.16316133737564087, + "learning_rate": 0.00010990231878610714, + "loss": 4.4062, + "step": 7841 + }, + { + "epoch": 0.7658203125, + "grad_norm": 0.15276256203651428, + "learning_rate": 0.00010985473114538272, + "loss": 4.4336, + "step": 7842 + }, + { + "epoch": 0.76591796875, + "grad_norm": 0.15129053592681885, + "learning_rate": 0.00010980715951437129, + "loss": 4.4453, + "step": 7843 + }, + { + "epoch": 0.766015625, + "grad_norm": 0.14984939992427826, + "learning_rate": 0.00010975960389768464, + "loss": 4.4062, + "step": 7844 + }, + { + "epoch": 0.76611328125, + "grad_norm": 0.1562565416097641, + "learning_rate": 0.00010971206429993283, + "loss": 4.3984, + "step": 7845 + }, + { + "epoch": 0.7662109375, + "grad_norm": 0.15165916085243225, + "learning_rate": 0.00010966454072572458, + "loss": 4.418, + "step": 7846 + }, + { + "epoch": 0.76630859375, + "grad_norm": 0.15228058397769928, + "learning_rate": 0.00010961703317966699, + "loss": 4.4023, + "step": 7847 + }, + { + "epoch": 0.76640625, + "grad_norm": 0.15105362236499786, + "learning_rate": 0.00010956954166636552, + "loss": 4.4375, + "step": 7848 + }, + { + "epoch": 0.76650390625, + "grad_norm": 0.15260683000087738, + "learning_rate": 0.00010952206619042425, + "loss": 4.3906, + "step": 7849 + }, + { + "epoch": 0.7666015625, + "grad_norm": 0.144510418176651, + "learning_rate": 0.00010947460675644555, + "loss": 4.4062, + "step": 7850 + }, + { + "epoch": 0.76669921875, + "grad_norm": 0.1524255871772766, + "learning_rate": 0.0001094271633690303, + "loss": 4.4219, + "step": 7851 + }, + { + "epoch": 0.766796875, + "grad_norm": 0.15061767399311066, + "learning_rate": 0.00010937973603277781, + "loss": 4.4023, + "step": 7852 + }, + { + "epoch": 0.76689453125, + "grad_norm": 0.16052626073360443, + "learning_rate": 0.00010933232475228592, + "loss": 4.4141, + "step": 7853 + }, + { + "epoch": 0.7669921875, + "grad_norm": 0.15028849244117737, + "learning_rate": 0.00010928492953215069, + "loss": 4.3984, + "step": 7854 + }, + { + "epoch": 0.76708984375, + "grad_norm": 0.15467461943626404, + "learning_rate": 0.0001092375503769668, + "loss": 4.4062, + "step": 7855 + }, + { + "epoch": 0.7671875, + "grad_norm": 0.14994756877422333, + "learning_rate": 0.0001091901872913274, + "loss": 4.4062, + "step": 7856 + }, + { + "epoch": 0.76728515625, + "grad_norm": 0.15013669431209564, + "learning_rate": 0.00010914284027982399, + "loss": 4.3906, + "step": 7857 + }, + { + "epoch": 0.7673828125, + "grad_norm": 0.15305913984775543, + "learning_rate": 0.00010909550934704653, + "loss": 4.3672, + "step": 7858 + }, + { + "epoch": 0.76748046875, + "grad_norm": 0.1478315144777298, + "learning_rate": 0.00010904819449758343, + "loss": 4.3789, + "step": 7859 + }, + { + "epoch": 0.767578125, + "grad_norm": 0.15649138391017914, + "learning_rate": 0.00010900089573602159, + "loss": 4.4102, + "step": 7860 + }, + { + "epoch": 0.76767578125, + "grad_norm": 0.1518089473247528, + "learning_rate": 0.00010895361306694631, + "loss": 4.4258, + "step": 7861 + }, + { + "epoch": 0.7677734375, + "grad_norm": 0.1548473984003067, + "learning_rate": 0.00010890634649494122, + "loss": 4.4258, + "step": 7862 + }, + { + "epoch": 0.76787109375, + "grad_norm": 0.15157786011695862, + "learning_rate": 0.00010885909602458854, + "loss": 4.418, + "step": 7863 + }, + { + "epoch": 0.76796875, + "grad_norm": 0.14980442821979523, + "learning_rate": 0.00010881186166046891, + "loss": 4.4336, + "step": 7864 + }, + { + "epoch": 0.76806640625, + "grad_norm": 0.1479744166135788, + "learning_rate": 0.00010876464340716139, + "loss": 4.418, + "step": 7865 + }, + { + "epoch": 0.7681640625, + "grad_norm": 0.1648143231868744, + "learning_rate": 0.00010871744126924343, + "loss": 4.418, + "step": 7866 + }, + { + "epoch": 0.76826171875, + "grad_norm": 0.1530577540397644, + "learning_rate": 0.00010867025525129098, + "loss": 4.4141, + "step": 7867 + }, + { + "epoch": 0.768359375, + "grad_norm": 0.16015180945396423, + "learning_rate": 0.0001086230853578784, + "loss": 4.3945, + "step": 7868 + }, + { + "epoch": 0.76845703125, + "grad_norm": 0.1536371409893036, + "learning_rate": 0.00010857593159357848, + "loss": 4.3672, + "step": 7869 + }, + { + "epoch": 0.7685546875, + "grad_norm": 0.15430103242397308, + "learning_rate": 0.00010852879396296254, + "loss": 4.4258, + "step": 7870 + }, + { + "epoch": 0.76865234375, + "grad_norm": 0.15518680214881897, + "learning_rate": 0.00010848167247060014, + "loss": 4.418, + "step": 7871 + }, + { + "epoch": 0.76875, + "grad_norm": 0.1579345464706421, + "learning_rate": 0.0001084345671210594, + "loss": 4.4141, + "step": 7872 + }, + { + "epoch": 0.76884765625, + "grad_norm": 0.15723150968551636, + "learning_rate": 0.00010838747791890689, + "loss": 4.4023, + "step": 7873 + }, + { + "epoch": 0.7689453125, + "grad_norm": 0.1571406126022339, + "learning_rate": 0.00010834040486870761, + "loss": 4.4141, + "step": 7874 + }, + { + "epoch": 0.76904296875, + "grad_norm": 0.1627904176712036, + "learning_rate": 0.00010829334797502496, + "loss": 4.4414, + "step": 7875 + }, + { + "epoch": 0.769140625, + "grad_norm": 0.1564416140317917, + "learning_rate": 0.00010824630724242076, + "loss": 4.418, + "step": 7876 + }, + { + "epoch": 0.76923828125, + "grad_norm": 0.14608342945575714, + "learning_rate": 0.00010819928267545535, + "loss": 4.4102, + "step": 7877 + }, + { + "epoch": 0.7693359375, + "grad_norm": 0.15790441632270813, + "learning_rate": 0.00010815227427868743, + "loss": 4.4258, + "step": 7878 + }, + { + "epoch": 0.76943359375, + "grad_norm": 0.15160879492759705, + "learning_rate": 0.00010810528205667409, + "loss": 4.4375, + "step": 7879 + }, + { + "epoch": 0.76953125, + "grad_norm": 0.15393513441085815, + "learning_rate": 0.00010805830601397093, + "loss": 4.4336, + "step": 7880 + }, + { + "epoch": 0.76962890625, + "grad_norm": 0.15257467329502106, + "learning_rate": 0.00010801134615513195, + "loss": 4.4219, + "step": 7881 + }, + { + "epoch": 0.7697265625, + "grad_norm": 0.1517518013715744, + "learning_rate": 0.0001079644024847096, + "loss": 4.4336, + "step": 7882 + }, + { + "epoch": 0.76982421875, + "grad_norm": 0.14938884973526, + "learning_rate": 0.00010791747500725477, + "loss": 4.375, + "step": 7883 + }, + { + "epoch": 0.769921875, + "grad_norm": 0.15497249364852905, + "learning_rate": 0.00010787056372731673, + "loss": 4.4062, + "step": 7884 + }, + { + "epoch": 0.77001953125, + "grad_norm": 0.15080688893795013, + "learning_rate": 0.00010782366864944323, + "loss": 4.3828, + "step": 7885 + }, + { + "epoch": 0.7701171875, + "grad_norm": 0.1574479341506958, + "learning_rate": 0.00010777678977818051, + "loss": 4.4375, + "step": 7886 + }, + { + "epoch": 0.77021484375, + "grad_norm": 0.15152472257614136, + "learning_rate": 0.000107729927118073, + "loss": 4.4258, + "step": 7887 + }, + { + "epoch": 0.7703125, + "grad_norm": 0.15665918588638306, + "learning_rate": 0.00010768308067366379, + "loss": 4.3906, + "step": 7888 + }, + { + "epoch": 0.77041015625, + "grad_norm": 0.1520407795906067, + "learning_rate": 0.00010763625044949432, + "loss": 4.4336, + "step": 7889 + }, + { + "epoch": 0.7705078125, + "grad_norm": 0.15367025136947632, + "learning_rate": 0.00010758943645010447, + "loss": 4.4258, + "step": 7890 + }, + { + "epoch": 0.77060546875, + "grad_norm": 0.1544661819934845, + "learning_rate": 0.00010754263868003253, + "loss": 4.4062, + "step": 7891 + }, + { + "epoch": 0.770703125, + "grad_norm": 0.16274404525756836, + "learning_rate": 0.00010749585714381523, + "loss": 4.3945, + "step": 7892 + }, + { + "epoch": 0.77080078125, + "grad_norm": 0.14945141971111298, + "learning_rate": 0.00010744909184598772, + "loss": 4.4062, + "step": 7893 + }, + { + "epoch": 0.7708984375, + "grad_norm": 0.149311363697052, + "learning_rate": 0.00010740234279108363, + "loss": 4.3867, + "step": 7894 + }, + { + "epoch": 0.77099609375, + "grad_norm": 0.16241337358951569, + "learning_rate": 0.00010735560998363495, + "loss": 4.3984, + "step": 7895 + }, + { + "epoch": 0.77109375, + "grad_norm": 0.14538919925689697, + "learning_rate": 0.00010730889342817202, + "loss": 4.418, + "step": 7896 + }, + { + "epoch": 0.77119140625, + "grad_norm": 0.15652205049991608, + "learning_rate": 0.00010726219312922372, + "loss": 4.4258, + "step": 7897 + }, + { + "epoch": 0.7712890625, + "grad_norm": 0.15291301906108856, + "learning_rate": 0.00010721550909131738, + "loss": 4.4375, + "step": 7898 + }, + { + "epoch": 0.77138671875, + "grad_norm": 0.14782248437404633, + "learning_rate": 0.00010716884131897869, + "loss": 4.4219, + "step": 7899 + }, + { + "epoch": 0.771484375, + "grad_norm": 0.15983131527900696, + "learning_rate": 0.00010712218981673175, + "loss": 4.418, + "step": 7900 + }, + { + "epoch": 0.77158203125, + "grad_norm": 0.15554626286029816, + "learning_rate": 0.0001070755545890991, + "loss": 4.4062, + "step": 7901 + }, + { + "epoch": 0.7716796875, + "grad_norm": 0.15242211520671844, + "learning_rate": 0.00010702893564060176, + "loss": 4.4258, + "step": 7902 + }, + { + "epoch": 0.77177734375, + "grad_norm": 0.14184489846229553, + "learning_rate": 0.00010698233297575913, + "loss": 4.3945, + "step": 7903 + }, + { + "epoch": 0.771875, + "grad_norm": 0.15113645792007446, + "learning_rate": 0.00010693574659908889, + "loss": 4.4062, + "step": 7904 + }, + { + "epoch": 0.77197265625, + "grad_norm": 0.1488281637430191, + "learning_rate": 0.00010688917651510741, + "loss": 4.3828, + "step": 7905 + }, + { + "epoch": 0.7720703125, + "grad_norm": 0.15390661358833313, + "learning_rate": 0.00010684262272832926, + "loss": 4.4219, + "step": 7906 + }, + { + "epoch": 0.77216796875, + "grad_norm": 0.14896152913570404, + "learning_rate": 0.00010679608524326756, + "loss": 4.3945, + "step": 7907 + }, + { + "epoch": 0.772265625, + "grad_norm": 0.15429389476776123, + "learning_rate": 0.00010674956406443378, + "loss": 4.4062, + "step": 7908 + }, + { + "epoch": 0.77236328125, + "grad_norm": 0.16481146216392517, + "learning_rate": 0.00010670305919633788, + "loss": 4.4102, + "step": 7909 + }, + { + "epoch": 0.7724609375, + "grad_norm": 0.14792270958423615, + "learning_rate": 0.00010665657064348813, + "loss": 4.4414, + "step": 7910 + }, + { + "epoch": 0.77255859375, + "grad_norm": 0.14153775572776794, + "learning_rate": 0.0001066100984103913, + "loss": 4.4219, + "step": 7911 + }, + { + "epoch": 0.77265625, + "grad_norm": 0.14986124634742737, + "learning_rate": 0.00010656364250155264, + "loss": 4.4219, + "step": 7912 + }, + { + "epoch": 0.77275390625, + "grad_norm": 0.1588495671749115, + "learning_rate": 0.0001065172029214756, + "loss": 4.4141, + "step": 7913 + }, + { + "epoch": 0.7728515625, + "grad_norm": 0.15765255689620972, + "learning_rate": 0.00010647077967466224, + "loss": 4.4258, + "step": 7914 + }, + { + "epoch": 0.77294921875, + "grad_norm": 0.15322215855121613, + "learning_rate": 0.00010642437276561298, + "loss": 4.3945, + "step": 7915 + }, + { + "epoch": 0.773046875, + "grad_norm": 0.1535881906747818, + "learning_rate": 0.00010637798219882664, + "loss": 4.418, + "step": 7916 + }, + { + "epoch": 0.77314453125, + "grad_norm": 0.15049166977405548, + "learning_rate": 0.00010633160797880048, + "loss": 4.3789, + "step": 7917 + }, + { + "epoch": 0.7732421875, + "grad_norm": 0.1515020728111267, + "learning_rate": 0.00010628525011003018, + "loss": 4.4062, + "step": 7918 + }, + { + "epoch": 0.77333984375, + "grad_norm": 0.1454581469297409, + "learning_rate": 0.0001062389085970098, + "loss": 4.3906, + "step": 7919 + }, + { + "epoch": 0.7734375, + "grad_norm": 0.15167170763015747, + "learning_rate": 0.00010619258344423194, + "loss": 4.4258, + "step": 7920 + }, + { + "epoch": 0.77353515625, + "grad_norm": 0.15352103114128113, + "learning_rate": 0.00010614627465618734, + "loss": 4.4531, + "step": 7921 + }, + { + "epoch": 0.7736328125, + "grad_norm": 0.15087415277957916, + "learning_rate": 0.00010609998223736539, + "loss": 4.4141, + "step": 7922 + }, + { + "epoch": 0.77373046875, + "grad_norm": 0.15437325835227966, + "learning_rate": 0.00010605370619225384, + "loss": 4.3945, + "step": 7923 + }, + { + "epoch": 0.773828125, + "grad_norm": 0.1550566554069519, + "learning_rate": 0.0001060074465253388, + "loss": 4.3711, + "step": 7924 + }, + { + "epoch": 0.77392578125, + "grad_norm": 0.1523357629776001, + "learning_rate": 0.0001059612032411049, + "loss": 4.3789, + "step": 7925 + }, + { + "epoch": 0.7740234375, + "grad_norm": 0.15385495126247406, + "learning_rate": 0.0001059149763440351, + "loss": 4.3828, + "step": 7926 + }, + { + "epoch": 0.77412109375, + "grad_norm": 0.15129749476909637, + "learning_rate": 0.00010586876583861073, + "loss": 4.3945, + "step": 7927 + }, + { + "epoch": 0.77421875, + "grad_norm": 0.14768925309181213, + "learning_rate": 0.00010582257172931159, + "loss": 4.3984, + "step": 7928 + }, + { + "epoch": 0.77431640625, + "grad_norm": 0.15236707031726837, + "learning_rate": 0.000105776394020616, + "loss": 4.4102, + "step": 7929 + }, + { + "epoch": 0.7744140625, + "grad_norm": 0.14552630484104156, + "learning_rate": 0.00010573023271700043, + "loss": 4.4062, + "step": 7930 + }, + { + "epoch": 0.77451171875, + "grad_norm": 0.15469224750995636, + "learning_rate": 0.00010568408782293996, + "loss": 4.4062, + "step": 7931 + }, + { + "epoch": 0.774609375, + "grad_norm": 0.15138240158557892, + "learning_rate": 0.000105637959342908, + "loss": 4.3945, + "step": 7932 + }, + { + "epoch": 0.77470703125, + "grad_norm": 0.15085656940937042, + "learning_rate": 0.00010559184728137645, + "loss": 4.4102, + "step": 7933 + }, + { + "epoch": 0.7748046875, + "grad_norm": 0.1579393446445465, + "learning_rate": 0.00010554575164281551, + "loss": 4.4023, + "step": 7934 + }, + { + "epoch": 0.77490234375, + "grad_norm": 0.1538194864988327, + "learning_rate": 0.00010549967243169386, + "loss": 4.3672, + "step": 7935 + }, + { + "epoch": 0.775, + "grad_norm": 0.14510883390903473, + "learning_rate": 0.00010545360965247857, + "loss": 4.4102, + "step": 7936 + }, + { + "epoch": 0.77509765625, + "grad_norm": 0.1600211262702942, + "learning_rate": 0.00010540756330963516, + "loss": 4.3945, + "step": 7937 + }, + { + "epoch": 0.7751953125, + "grad_norm": 0.15205062925815582, + "learning_rate": 0.00010536153340762742, + "loss": 4.4102, + "step": 7938 + }, + { + "epoch": 0.77529296875, + "grad_norm": 0.15917819738388062, + "learning_rate": 0.00010531551995091768, + "loss": 4.4414, + "step": 7939 + }, + { + "epoch": 0.775390625, + "grad_norm": 0.1518438160419464, + "learning_rate": 0.00010526952294396661, + "loss": 4.4375, + "step": 7940 + }, + { + "epoch": 0.77548828125, + "grad_norm": 0.149562269449234, + "learning_rate": 0.00010522354239123333, + "loss": 4.4141, + "step": 7941 + }, + { + "epoch": 0.7755859375, + "grad_norm": 0.15230610966682434, + "learning_rate": 0.00010517757829717531, + "loss": 4.3984, + "step": 7942 + }, + { + "epoch": 0.77568359375, + "grad_norm": 0.1568562537431717, + "learning_rate": 0.00010513163066624852, + "loss": 4.3906, + "step": 7943 + }, + { + "epoch": 0.77578125, + "grad_norm": 0.14640609920024872, + "learning_rate": 0.00010508569950290725, + "loss": 4.3906, + "step": 7944 + }, + { + "epoch": 0.77587890625, + "grad_norm": 0.1554626077413559, + "learning_rate": 0.00010503978481160425, + "loss": 4.4336, + "step": 7945 + }, + { + "epoch": 0.7759765625, + "grad_norm": 0.14640885591506958, + "learning_rate": 0.00010499388659679049, + "loss": 4.4062, + "step": 7946 + }, + { + "epoch": 0.77607421875, + "grad_norm": 0.1561599224805832, + "learning_rate": 0.00010494800486291564, + "loss": 4.4141, + "step": 7947 + }, + { + "epoch": 0.776171875, + "grad_norm": 0.1532311588525772, + "learning_rate": 0.00010490213961442757, + "loss": 4.4141, + "step": 7948 + }, + { + "epoch": 0.77626953125, + "grad_norm": 0.15718282759189606, + "learning_rate": 0.00010485629085577259, + "loss": 4.4102, + "step": 7949 + }, + { + "epoch": 0.7763671875, + "grad_norm": 0.14491264522075653, + "learning_rate": 0.00010481045859139543, + "loss": 4.4336, + "step": 7950 + }, + { + "epoch": 0.77646484375, + "grad_norm": 0.14690840244293213, + "learning_rate": 0.00010476464282573921, + "loss": 4.4375, + "step": 7951 + }, + { + "epoch": 0.7765625, + "grad_norm": 0.1669311672449112, + "learning_rate": 0.00010471884356324554, + "loss": 4.3906, + "step": 7952 + }, + { + "epoch": 0.77666015625, + "grad_norm": 0.1512814313173294, + "learning_rate": 0.00010467306080835425, + "loss": 4.4336, + "step": 7953 + }, + { + "epoch": 0.7767578125, + "grad_norm": 0.1496923714876175, + "learning_rate": 0.00010462729456550378, + "loss": 4.4023, + "step": 7954 + }, + { + "epoch": 0.77685546875, + "grad_norm": 0.1559845209121704, + "learning_rate": 0.00010458154483913071, + "loss": 4.418, + "step": 7955 + }, + { + "epoch": 0.776953125, + "grad_norm": 0.16076593101024628, + "learning_rate": 0.00010453581163367021, + "loss": 4.3633, + "step": 7956 + }, + { + "epoch": 0.77705078125, + "grad_norm": 0.15339908003807068, + "learning_rate": 0.00010449009495355585, + "loss": 4.4023, + "step": 7957 + }, + { + "epoch": 0.7771484375, + "grad_norm": 0.14783354103565216, + "learning_rate": 0.00010444439480321953, + "loss": 4.3789, + "step": 7958 + }, + { + "epoch": 0.77724609375, + "grad_norm": 0.14780405163764954, + "learning_rate": 0.00010439871118709156, + "loss": 4.4062, + "step": 7959 + }, + { + "epoch": 0.77734375, + "grad_norm": 0.14802132546901703, + "learning_rate": 0.00010435304410960069, + "loss": 4.4414, + "step": 7960 + }, + { + "epoch": 0.77744140625, + "grad_norm": 0.1543699949979782, + "learning_rate": 0.00010430739357517399, + "loss": 4.3945, + "step": 7961 + }, + { + "epoch": 0.7775390625, + "grad_norm": 0.15108270943164825, + "learning_rate": 0.00010426175958823696, + "loss": 4.4375, + "step": 7962 + }, + { + "epoch": 0.77763671875, + "grad_norm": 0.16081677377223969, + "learning_rate": 0.00010421614215321365, + "loss": 4.3945, + "step": 7963 + }, + { + "epoch": 0.777734375, + "grad_norm": 0.14908064901828766, + "learning_rate": 0.00010417054127452616, + "loss": 4.4023, + "step": 7964 + }, + { + "epoch": 0.77783203125, + "grad_norm": 0.14840863645076752, + "learning_rate": 0.00010412495695659529, + "loss": 4.3984, + "step": 7965 + }, + { + "epoch": 0.7779296875, + "grad_norm": 0.1466943472623825, + "learning_rate": 0.00010407938920384009, + "loss": 4.4102, + "step": 7966 + }, + { + "epoch": 0.77802734375, + "grad_norm": 0.14635570347309113, + "learning_rate": 0.00010403383802067805, + "loss": 4.4258, + "step": 7967 + }, + { + "epoch": 0.778125, + "grad_norm": 0.14545638859272003, + "learning_rate": 0.00010398830341152507, + "loss": 4.418, + "step": 7968 + }, + { + "epoch": 0.77822265625, + "grad_norm": 0.15191972255706787, + "learning_rate": 0.00010394278538079542, + "loss": 4.4141, + "step": 7969 + }, + { + "epoch": 0.7783203125, + "grad_norm": 0.14461171627044678, + "learning_rate": 0.00010389728393290177, + "loss": 4.4375, + "step": 7970 + }, + { + "epoch": 0.77841796875, + "grad_norm": 0.1448666751384735, + "learning_rate": 0.00010385179907225517, + "loss": 4.3984, + "step": 7971 + }, + { + "epoch": 0.778515625, + "grad_norm": 0.14332813024520874, + "learning_rate": 0.00010380633080326505, + "loss": 4.4062, + "step": 7972 + }, + { + "epoch": 0.77861328125, + "grad_norm": 0.1486864686012268, + "learning_rate": 0.00010376087913033932, + "loss": 4.4062, + "step": 7973 + }, + { + "epoch": 0.7787109375, + "grad_norm": 0.1537400782108307, + "learning_rate": 0.00010371544405788411, + "loss": 4.4023, + "step": 7974 + }, + { + "epoch": 0.77880859375, + "grad_norm": 0.14455735683441162, + "learning_rate": 0.00010367002559030406, + "loss": 4.3867, + "step": 7975 + }, + { + "epoch": 0.77890625, + "grad_norm": 0.15289349853992462, + "learning_rate": 0.00010362462373200224, + "loss": 4.3711, + "step": 7976 + }, + { + "epoch": 0.77900390625, + "grad_norm": 0.15260332822799683, + "learning_rate": 0.00010357923848738, + "loss": 4.4062, + "step": 7977 + }, + { + "epoch": 0.7791015625, + "grad_norm": 0.14756973087787628, + "learning_rate": 0.00010353386986083716, + "loss": 4.4023, + "step": 7978 + }, + { + "epoch": 0.77919921875, + "grad_norm": 0.1454693228006363, + "learning_rate": 0.00010348851785677188, + "loss": 4.4141, + "step": 7979 + }, + { + "epoch": 0.779296875, + "grad_norm": 0.1537448912858963, + "learning_rate": 0.00010344318247958078, + "loss": 4.4102, + "step": 7980 + }, + { + "epoch": 0.77939453125, + "grad_norm": 0.15259911119937897, + "learning_rate": 0.00010339786373365876, + "loss": 4.4062, + "step": 7981 + }, + { + "epoch": 0.7794921875, + "grad_norm": 0.15229813754558563, + "learning_rate": 0.00010335256162339915, + "loss": 4.3945, + "step": 7982 + }, + { + "epoch": 0.77958984375, + "grad_norm": 0.15489859879016876, + "learning_rate": 0.0001033072761531938, + "loss": 4.4336, + "step": 7983 + }, + { + "epoch": 0.7796875, + "grad_norm": 0.15020382404327393, + "learning_rate": 0.00010326200732743272, + "loss": 4.4141, + "step": 7984 + }, + { + "epoch": 0.77978515625, + "grad_norm": 0.1597193330526352, + "learning_rate": 0.00010321675515050438, + "loss": 4.375, + "step": 7985 + }, + { + "epoch": 0.7798828125, + "grad_norm": 0.14284950494766235, + "learning_rate": 0.00010317151962679575, + "loss": 4.4102, + "step": 7986 + }, + { + "epoch": 0.77998046875, + "grad_norm": 0.15179817378520966, + "learning_rate": 0.0001031263007606921, + "loss": 4.4023, + "step": 7987 + }, + { + "epoch": 0.780078125, + "grad_norm": 0.15429659187793732, + "learning_rate": 0.00010308109855657707, + "loss": 4.418, + "step": 7988 + }, + { + "epoch": 0.78017578125, + "grad_norm": 0.15448278188705444, + "learning_rate": 0.00010303591301883274, + "loss": 4.3945, + "step": 7989 + }, + { + "epoch": 0.7802734375, + "grad_norm": 0.15420667827129364, + "learning_rate": 0.00010299074415183952, + "loss": 4.4023, + "step": 7990 + }, + { + "epoch": 0.78037109375, + "grad_norm": 0.15149520337581635, + "learning_rate": 0.0001029455919599762, + "loss": 4.3945, + "step": 7991 + }, + { + "epoch": 0.78046875, + "grad_norm": 0.16243340075016022, + "learning_rate": 0.00010290045644762002, + "loss": 4.4492, + "step": 7992 + }, + { + "epoch": 0.78056640625, + "grad_norm": 0.15751424431800842, + "learning_rate": 0.00010285533761914659, + "loss": 4.4297, + "step": 7993 + }, + { + "epoch": 0.7806640625, + "grad_norm": 0.15373574197292328, + "learning_rate": 0.00010281023547892979, + "loss": 4.4023, + "step": 7994 + }, + { + "epoch": 0.78076171875, + "grad_norm": 0.16295623779296875, + "learning_rate": 0.00010276515003134199, + "loss": 4.4297, + "step": 7995 + }, + { + "epoch": 0.780859375, + "grad_norm": 0.15023791790008545, + "learning_rate": 0.00010272008128075394, + "loss": 4.3828, + "step": 7996 + }, + { + "epoch": 0.78095703125, + "grad_norm": 0.14861923456192017, + "learning_rate": 0.00010267502923153474, + "loss": 4.3867, + "step": 7997 + }, + { + "epoch": 0.7810546875, + "grad_norm": 0.15781669318675995, + "learning_rate": 0.00010262999388805188, + "loss": 4.3789, + "step": 7998 + }, + { + "epoch": 0.78115234375, + "grad_norm": 0.15433287620544434, + "learning_rate": 0.00010258497525467123, + "loss": 4.3984, + "step": 7999 + }, + { + "epoch": 0.78125, + "grad_norm": 0.16641157865524292, + "learning_rate": 0.00010253997333575705, + "loss": 4.4492, + "step": 8000 + }, + { + "epoch": 0.78134765625, + "grad_norm": 0.14971871674060822, + "learning_rate": 0.00010249498813567194, + "loss": 4.4062, + "step": 8001 + }, + { + "epoch": 0.7814453125, + "grad_norm": 0.15993599593639374, + "learning_rate": 0.00010245001965877698, + "loss": 4.3906, + "step": 8002 + }, + { + "epoch": 0.78154296875, + "grad_norm": 0.14609889686107635, + "learning_rate": 0.00010240506790943154, + "loss": 4.3984, + "step": 8003 + }, + { + "epoch": 0.781640625, + "grad_norm": 0.15671391785144806, + "learning_rate": 0.00010236013289199331, + "loss": 4.4102, + "step": 8004 + }, + { + "epoch": 0.78173828125, + "grad_norm": 0.1552707850933075, + "learning_rate": 0.0001023152146108185, + "loss": 4.4297, + "step": 8005 + }, + { + "epoch": 0.7818359375, + "grad_norm": 0.8937666416168213, + "learning_rate": 0.0001022703130702616, + "loss": 4.4531, + "step": 8006 + }, + { + "epoch": 0.78193359375, + "grad_norm": 0.155028834939003, + "learning_rate": 0.00010222542827467555, + "loss": 4.4297, + "step": 8007 + }, + { + "epoch": 0.78203125, + "grad_norm": 0.16029059886932373, + "learning_rate": 0.00010218056022841157, + "loss": 4.4102, + "step": 8008 + }, + { + "epoch": 0.78212890625, + "grad_norm": 0.16885076463222504, + "learning_rate": 0.00010213570893581937, + "loss": 4.4492, + "step": 8009 + }, + { + "epoch": 0.7822265625, + "grad_norm": 0.16435237228870392, + "learning_rate": 0.00010209087440124697, + "loss": 4.4062, + "step": 8010 + }, + { + "epoch": 0.78232421875, + "grad_norm": 0.1647331863641739, + "learning_rate": 0.00010204605662904074, + "loss": 4.4297, + "step": 8011 + }, + { + "epoch": 0.782421875, + "grad_norm": 0.168497234582901, + "learning_rate": 0.00010200125562354546, + "loss": 4.418, + "step": 8012 + }, + { + "epoch": 0.78251953125, + "grad_norm": 0.16665160655975342, + "learning_rate": 0.00010195647138910438, + "loss": 4.4023, + "step": 8013 + }, + { + "epoch": 0.7826171875, + "grad_norm": 0.15061305463314056, + "learning_rate": 0.0001019117039300589, + "loss": 4.4102, + "step": 8014 + }, + { + "epoch": 0.78271484375, + "grad_norm": 0.15036433935165405, + "learning_rate": 0.00010186695325074894, + "loss": 4.3594, + "step": 8015 + }, + { + "epoch": 0.7828125, + "grad_norm": 0.15365125238895416, + "learning_rate": 0.00010182221935551284, + "loss": 4.4336, + "step": 8016 + }, + { + "epoch": 0.78291015625, + "grad_norm": 0.14880138635635376, + "learning_rate": 0.0001017775022486872, + "loss": 4.4219, + "step": 8017 + }, + { + "epoch": 0.7830078125, + "grad_norm": 0.1543377786874771, + "learning_rate": 0.00010173280193460704, + "loss": 4.3789, + "step": 8018 + }, + { + "epoch": 0.78310546875, + "grad_norm": 0.14707376062870026, + "learning_rate": 0.00010168811841760576, + "loss": 4.4102, + "step": 8019 + }, + { + "epoch": 0.783203125, + "grad_norm": 0.14793191850185394, + "learning_rate": 0.00010164345170201516, + "loss": 4.375, + "step": 8020 + }, + { + "epoch": 0.78330078125, + "grad_norm": 0.14800989627838135, + "learning_rate": 0.00010159880179216539, + "loss": 4.418, + "step": 8021 + }, + { + "epoch": 0.7833984375, + "grad_norm": 0.1582210808992386, + "learning_rate": 0.00010155416869238482, + "loss": 4.3867, + "step": 8022 + }, + { + "epoch": 0.78349609375, + "grad_norm": 0.14393094182014465, + "learning_rate": 0.00010150955240700047, + "loss": 4.4102, + "step": 8023 + }, + { + "epoch": 0.78359375, + "grad_norm": 0.15178582072257996, + "learning_rate": 0.0001014649529403375, + "loss": 4.3984, + "step": 8024 + }, + { + "epoch": 0.78369140625, + "grad_norm": 0.14859360456466675, + "learning_rate": 0.00010142037029671954, + "loss": 4.4062, + "step": 8025 + }, + { + "epoch": 0.7837890625, + "grad_norm": 0.22755299508571625, + "learning_rate": 0.00010137580448046861, + "loss": 4.418, + "step": 8026 + }, + { + "epoch": 0.78388671875, + "grad_norm": 0.1503019630908966, + "learning_rate": 0.00010133125549590504, + "loss": 4.3789, + "step": 8027 + }, + { + "epoch": 0.783984375, + "grad_norm": 0.150207981467247, + "learning_rate": 0.00010128672334734756, + "loss": 4.4023, + "step": 8028 + }, + { + "epoch": 0.78408203125, + "grad_norm": 0.14693683385849, + "learning_rate": 0.00010124220803911326, + "loss": 4.3945, + "step": 8029 + }, + { + "epoch": 0.7841796875, + "grad_norm": 0.15031445026397705, + "learning_rate": 0.00010119770957551765, + "loss": 4.3672, + "step": 8030 + }, + { + "epoch": 0.78427734375, + "grad_norm": 0.1471838504076004, + "learning_rate": 0.00010115322796087442, + "loss": 4.4102, + "step": 8031 + }, + { + "epoch": 0.784375, + "grad_norm": 0.14523105323314667, + "learning_rate": 0.00010110876319949584, + "loss": 4.4023, + "step": 8032 + }, + { + "epoch": 0.78447265625, + "grad_norm": 0.15540798008441925, + "learning_rate": 0.00010106431529569249, + "loss": 4.4102, + "step": 8033 + }, + { + "epoch": 0.7845703125, + "grad_norm": 0.1465175449848175, + "learning_rate": 0.00010101988425377326, + "loss": 4.375, + "step": 8034 + }, + { + "epoch": 0.78466796875, + "grad_norm": 0.14395655691623688, + "learning_rate": 0.00010097547007804545, + "loss": 4.375, + "step": 8035 + }, + { + "epoch": 0.784765625, + "grad_norm": 0.15243244171142578, + "learning_rate": 0.00010093107277281468, + "loss": 4.3789, + "step": 8036 + }, + { + "epoch": 0.78486328125, + "grad_norm": 0.148013174533844, + "learning_rate": 0.00010088669234238501, + "loss": 4.3984, + "step": 8037 + }, + { + "epoch": 0.7849609375, + "grad_norm": 0.15119220316410065, + "learning_rate": 0.00010084232879105887, + "loss": 4.4258, + "step": 8038 + }, + { + "epoch": 0.78505859375, + "grad_norm": 0.15107117593288422, + "learning_rate": 0.00010079798212313688, + "loss": 4.4062, + "step": 8039 + }, + { + "epoch": 0.78515625, + "grad_norm": 0.1494452953338623, + "learning_rate": 0.0001007536523429182, + "loss": 4.4102, + "step": 8040 + }, + { + "epoch": 0.78525390625, + "grad_norm": 0.15409442782402039, + "learning_rate": 0.00010070933945470031, + "loss": 4.3984, + "step": 8041 + }, + { + "epoch": 0.7853515625, + "grad_norm": 0.15757612884044647, + "learning_rate": 0.0001006650434627791, + "loss": 4.4141, + "step": 8042 + }, + { + "epoch": 0.78544921875, + "grad_norm": 0.14749404788017273, + "learning_rate": 0.00010062076437144866, + "loss": 4.4023, + "step": 8043 + }, + { + "epoch": 0.785546875, + "grad_norm": 0.14654500782489777, + "learning_rate": 0.00010057650218500158, + "loss": 4.4297, + "step": 8044 + }, + { + "epoch": 0.78564453125, + "grad_norm": 0.15383663773536682, + "learning_rate": 0.00010053225690772882, + "loss": 4.3984, + "step": 8045 + }, + { + "epoch": 0.7857421875, + "grad_norm": 0.1487773358821869, + "learning_rate": 0.00010048802854391973, + "loss": 4.3711, + "step": 8046 + }, + { + "epoch": 0.78583984375, + "grad_norm": 0.1499847173690796, + "learning_rate": 0.00010044381709786176, + "loss": 4.4258, + "step": 8047 + }, + { + "epoch": 0.7859375, + "grad_norm": 0.15081682801246643, + "learning_rate": 0.00010039962257384103, + "loss": 4.4141, + "step": 8048 + }, + { + "epoch": 0.78603515625, + "grad_norm": 0.14849853515625, + "learning_rate": 0.00010035544497614184, + "loss": 4.4062, + "step": 8049 + }, + { + "epoch": 0.7861328125, + "grad_norm": 0.15132670104503632, + "learning_rate": 0.00010031128430904698, + "loss": 4.418, + "step": 8050 + }, + { + "epoch": 0.78623046875, + "grad_norm": 0.1449001133441925, + "learning_rate": 0.00010026714057683748, + "loss": 4.3945, + "step": 8051 + }, + { + "epoch": 0.786328125, + "grad_norm": 0.1499253213405609, + "learning_rate": 0.00010022301378379278, + "loss": 4.4102, + "step": 8052 + }, + { + "epoch": 0.78642578125, + "grad_norm": 0.1491183340549469, + "learning_rate": 0.00010017890393419067, + "loss": 4.418, + "step": 8053 + }, + { + "epoch": 0.7865234375, + "grad_norm": 0.1507994532585144, + "learning_rate": 0.00010013481103230729, + "loss": 4.418, + "step": 8054 + }, + { + "epoch": 0.78662109375, + "grad_norm": 0.15299591422080994, + "learning_rate": 0.00010009073508241728, + "loss": 4.4062, + "step": 8055 + }, + { + "epoch": 0.78671875, + "grad_norm": 0.1448696255683899, + "learning_rate": 0.0001000466760887933, + "loss": 4.418, + "step": 8056 + }, + { + "epoch": 0.78681640625, + "grad_norm": 0.15338866412639618, + "learning_rate": 0.00010000263405570666, + "loss": 4.4141, + "step": 8057 + }, + { + "epoch": 0.7869140625, + "grad_norm": 0.15309876203536987, + "learning_rate": 9.995860898742694e-05, + "loss": 4.4219, + "step": 8058 + }, + { + "epoch": 0.78701171875, + "grad_norm": 0.15767745673656464, + "learning_rate": 9.991460088822208e-05, + "loss": 4.3945, + "step": 8059 + }, + { + "epoch": 0.787109375, + "grad_norm": 0.14878781139850616, + "learning_rate": 9.987060976235835e-05, + "loss": 4.4102, + "step": 8060 + }, + { + "epoch": 0.78720703125, + "grad_norm": 0.14648319780826569, + "learning_rate": 9.982663561410038e-05, + "loss": 4.418, + "step": 8061 + }, + { + "epoch": 0.7873046875, + "grad_norm": 0.1510055512189865, + "learning_rate": 9.978267844771122e-05, + "loss": 4.4219, + "step": 8062 + }, + { + "epoch": 0.78740234375, + "grad_norm": 0.14996320009231567, + "learning_rate": 9.97387382674522e-05, + "loss": 4.4062, + "step": 8063 + }, + { + "epoch": 0.7875, + "grad_norm": 0.14858584105968475, + "learning_rate": 9.969481507758298e-05, + "loss": 4.3828, + "step": 8064 + }, + { + "epoch": 0.78759765625, + "grad_norm": 0.15176379680633545, + "learning_rate": 9.965090888236161e-05, + "loss": 4.4023, + "step": 8065 + }, + { + "epoch": 0.7876953125, + "grad_norm": 0.1482410877943039, + "learning_rate": 9.960701968604455e-05, + "loss": 4.4062, + "step": 8066 + }, + { + "epoch": 0.78779296875, + "grad_norm": 0.14765876531600952, + "learning_rate": 9.956314749288651e-05, + "loss": 4.3984, + "step": 8067 + }, + { + "epoch": 0.787890625, + "grad_norm": 0.14793841540813446, + "learning_rate": 9.951929230714064e-05, + "loss": 4.4062, + "step": 8068 + }, + { + "epoch": 0.78798828125, + "grad_norm": 0.15230858325958252, + "learning_rate": 9.94754541330584e-05, + "loss": 4.4023, + "step": 8069 + }, + { + "epoch": 0.7880859375, + "grad_norm": 0.15044331550598145, + "learning_rate": 9.943163297488957e-05, + "loss": 4.3984, + "step": 8070 + }, + { + "epoch": 0.78818359375, + "grad_norm": 0.1514436900615692, + "learning_rate": 9.938782883688235e-05, + "loss": 4.3984, + "step": 8071 + }, + { + "epoch": 0.78828125, + "grad_norm": 0.15431882441043854, + "learning_rate": 9.93440417232833e-05, + "loss": 4.4141, + "step": 8072 + }, + { + "epoch": 0.78837890625, + "grad_norm": 0.14588013291358948, + "learning_rate": 9.930027163833716e-05, + "loss": 4.3906, + "step": 8073 + }, + { + "epoch": 0.7884765625, + "grad_norm": 0.14586317539215088, + "learning_rate": 9.925651858628721e-05, + "loss": 4.3828, + "step": 8074 + }, + { + "epoch": 0.78857421875, + "grad_norm": 0.16529974341392517, + "learning_rate": 9.921278257137501e-05, + "loss": 4.4297, + "step": 8075 + }, + { + "epoch": 0.788671875, + "grad_norm": 0.14426936209201813, + "learning_rate": 9.916906359784048e-05, + "loss": 4.4062, + "step": 8076 + }, + { + "epoch": 0.78876953125, + "grad_norm": 0.1529703438282013, + "learning_rate": 9.912536166992185e-05, + "loss": 4.3945, + "step": 8077 + }, + { + "epoch": 0.7888671875, + "grad_norm": 0.1530573070049286, + "learning_rate": 9.908167679185574e-05, + "loss": 4.3984, + "step": 8078 + }, + { + "epoch": 0.78896484375, + "grad_norm": 0.14802533388137817, + "learning_rate": 9.903800896787711e-05, + "loss": 4.4297, + "step": 8079 + }, + { + "epoch": 0.7890625, + "grad_norm": 0.15279777348041534, + "learning_rate": 9.899435820221932e-05, + "loss": 4.418, + "step": 8080 + }, + { + "epoch": 0.78916015625, + "grad_norm": 0.1454186886548996, + "learning_rate": 9.895072449911388e-05, + "loss": 4.4336, + "step": 8081 + }, + { + "epoch": 0.7892578125, + "grad_norm": 0.15188796818256378, + "learning_rate": 9.890710786279086e-05, + "loss": 4.418, + "step": 8082 + }, + { + "epoch": 0.78935546875, + "grad_norm": 0.15474165976047516, + "learning_rate": 9.886350829747859e-05, + "loss": 4.4023, + "step": 8083 + }, + { + "epoch": 0.789453125, + "grad_norm": 0.14758148789405823, + "learning_rate": 9.881992580740373e-05, + "loss": 4.418, + "step": 8084 + }, + { + "epoch": 0.78955078125, + "grad_norm": 0.15949267148971558, + "learning_rate": 9.87763603967913e-05, + "loss": 4.4102, + "step": 8085 + }, + { + "epoch": 0.7896484375, + "grad_norm": 0.15260356664657593, + "learning_rate": 9.873281206986474e-05, + "loss": 4.3906, + "step": 8086 + }, + { + "epoch": 0.78974609375, + "grad_norm": 0.15244895219802856, + "learning_rate": 9.868928083084572e-05, + "loss": 4.3867, + "step": 8087 + }, + { + "epoch": 0.78984375, + "grad_norm": 0.1506461799144745, + "learning_rate": 9.864576668395427e-05, + "loss": 4.3828, + "step": 8088 + }, + { + "epoch": 0.78994140625, + "grad_norm": 0.1567615270614624, + "learning_rate": 9.86022696334089e-05, + "loss": 4.3828, + "step": 8089 + }, + { + "epoch": 0.7900390625, + "grad_norm": 0.1460544914007187, + "learning_rate": 9.855878968342621e-05, + "loss": 4.3984, + "step": 8090 + }, + { + "epoch": 0.79013671875, + "grad_norm": 0.15219971537590027, + "learning_rate": 9.851532683822135e-05, + "loss": 4.3945, + "step": 8091 + }, + { + "epoch": 0.790234375, + "grad_norm": 0.1532355695962906, + "learning_rate": 9.847188110200775e-05, + "loss": 4.4258, + "step": 8092 + }, + { + "epoch": 0.79033203125, + "grad_norm": 0.1513243317604065, + "learning_rate": 9.84284524789972e-05, + "loss": 4.457, + "step": 8093 + }, + { + "epoch": 0.7904296875, + "grad_norm": 0.15156985819339752, + "learning_rate": 9.838504097339974e-05, + "loss": 4.3789, + "step": 8094 + }, + { + "epoch": 0.79052734375, + "grad_norm": 0.14640237390995026, + "learning_rate": 9.83416465894239e-05, + "loss": 4.3984, + "step": 8095 + }, + { + "epoch": 0.790625, + "grad_norm": 0.14727123081684113, + "learning_rate": 9.829826933127642e-05, + "loss": 4.375, + "step": 8096 + }, + { + "epoch": 0.79072265625, + "grad_norm": 0.15116991102695465, + "learning_rate": 9.825490920316254e-05, + "loss": 4.4258, + "step": 8097 + }, + { + "epoch": 0.7908203125, + "grad_norm": 0.1497589349746704, + "learning_rate": 9.821156620928557e-05, + "loss": 4.4062, + "step": 8098 + }, + { + "epoch": 0.79091796875, + "grad_norm": 0.14963972568511963, + "learning_rate": 9.816824035384737e-05, + "loss": 4.3906, + "step": 8099 + }, + { + "epoch": 0.791015625, + "grad_norm": 0.14216068387031555, + "learning_rate": 9.81249316410481e-05, + "loss": 4.3906, + "step": 8100 + }, + { + "epoch": 0.79111328125, + "grad_norm": 0.1488150954246521, + "learning_rate": 9.808164007508627e-05, + "loss": 4.4062, + "step": 8101 + }, + { + "epoch": 0.7912109375, + "grad_norm": 0.14483614265918732, + "learning_rate": 9.803836566015873e-05, + "loss": 4.418, + "step": 8102 + }, + { + "epoch": 0.79130859375, + "grad_norm": 0.15005750954151154, + "learning_rate": 9.799510840046054e-05, + "loss": 4.418, + "step": 8103 + }, + { + "epoch": 0.79140625, + "grad_norm": 0.14986738562583923, + "learning_rate": 9.795186830018524e-05, + "loss": 4.4102, + "step": 8104 + }, + { + "epoch": 0.79150390625, + "grad_norm": 0.14477576315402985, + "learning_rate": 9.790864536352479e-05, + "loss": 4.418, + "step": 8105 + }, + { + "epoch": 0.7916015625, + "grad_norm": 0.14759336411952972, + "learning_rate": 9.786543959466918e-05, + "loss": 4.3906, + "step": 8106 + }, + { + "epoch": 0.79169921875, + "grad_norm": 0.15129052102565765, + "learning_rate": 9.782225099780697e-05, + "loss": 4.4258, + "step": 8107 + }, + { + "epoch": 0.791796875, + "grad_norm": 0.14619988203048706, + "learning_rate": 9.777907957712501e-05, + "loss": 4.3711, + "step": 8108 + }, + { + "epoch": 0.79189453125, + "grad_norm": 0.14823751151561737, + "learning_rate": 9.773592533680848e-05, + "loss": 4.375, + "step": 8109 + }, + { + "epoch": 0.7919921875, + "grad_norm": 0.14661657810211182, + "learning_rate": 9.769278828104092e-05, + "loss": 4.3945, + "step": 8110 + }, + { + "epoch": 0.79208984375, + "grad_norm": 0.14483331143856049, + "learning_rate": 9.764966841400413e-05, + "loss": 4.4141, + "step": 8111 + }, + { + "epoch": 0.7921875, + "grad_norm": 0.14512322843074799, + "learning_rate": 9.760656573987831e-05, + "loss": 4.4102, + "step": 8112 + }, + { + "epoch": 0.79228515625, + "grad_norm": 0.15073947608470917, + "learning_rate": 9.756348026284197e-05, + "loss": 4.3984, + "step": 8113 + }, + { + "epoch": 0.7923828125, + "grad_norm": 0.14395765960216522, + "learning_rate": 9.752041198707202e-05, + "loss": 4.4141, + "step": 8114 + }, + { + "epoch": 0.79248046875, + "grad_norm": 0.1465233415365219, + "learning_rate": 9.74773609167435e-05, + "loss": 4.4062, + "step": 8115 + }, + { + "epoch": 0.792578125, + "grad_norm": 0.15155115723609924, + "learning_rate": 9.743432705602999e-05, + "loss": 4.418, + "step": 8116 + }, + { + "epoch": 0.79267578125, + "grad_norm": 0.14504624903202057, + "learning_rate": 9.739131040910335e-05, + "loss": 4.4023, + "step": 8117 + }, + { + "epoch": 0.7927734375, + "grad_norm": 0.15074515342712402, + "learning_rate": 9.734831098013372e-05, + "loss": 4.3945, + "step": 8118 + }, + { + "epoch": 0.79287109375, + "grad_norm": 0.14708679914474487, + "learning_rate": 9.73053287732896e-05, + "loss": 4.4062, + "step": 8119 + }, + { + "epoch": 0.79296875, + "grad_norm": 0.14798524975776672, + "learning_rate": 9.726236379273784e-05, + "loss": 4.4258, + "step": 8120 + }, + { + "epoch": 0.79306640625, + "grad_norm": 0.14291197061538696, + "learning_rate": 9.721941604264362e-05, + "loss": 4.3867, + "step": 8121 + }, + { + "epoch": 0.7931640625, + "grad_norm": 0.15191549062728882, + "learning_rate": 9.717648552717045e-05, + "loss": 4.3945, + "step": 8122 + }, + { + "epoch": 0.79326171875, + "grad_norm": 0.14701008796691895, + "learning_rate": 9.713357225048008e-05, + "loss": 4.3984, + "step": 8123 + }, + { + "epoch": 0.793359375, + "grad_norm": 0.1487017273902893, + "learning_rate": 9.709067621673268e-05, + "loss": 4.3594, + "step": 8124 + }, + { + "epoch": 0.79345703125, + "grad_norm": 0.15230239927768707, + "learning_rate": 9.704779743008676e-05, + "loss": 4.3984, + "step": 8125 + }, + { + "epoch": 0.7935546875, + "grad_norm": 0.14708808064460754, + "learning_rate": 9.700493589469911e-05, + "loss": 4.3789, + "step": 8126 + }, + { + "epoch": 0.79365234375, + "grad_norm": 0.15470895171165466, + "learning_rate": 9.696209161472485e-05, + "loss": 4.4258, + "step": 8127 + }, + { + "epoch": 0.79375, + "grad_norm": 0.14859077334403992, + "learning_rate": 9.69192645943175e-05, + "loss": 4.3945, + "step": 8128 + }, + { + "epoch": 0.79384765625, + "grad_norm": 0.15224269032478333, + "learning_rate": 9.687645483762877e-05, + "loss": 4.4102, + "step": 8129 + }, + { + "epoch": 0.7939453125, + "grad_norm": 0.15604005753993988, + "learning_rate": 9.683366234880883e-05, + "loss": 4.4141, + "step": 8130 + }, + { + "epoch": 0.79404296875, + "grad_norm": 0.14996275305747986, + "learning_rate": 9.679088713200617e-05, + "loss": 4.4492, + "step": 8131 + }, + { + "epoch": 0.794140625, + "grad_norm": 0.15470263361930847, + "learning_rate": 9.674812919136747e-05, + "loss": 4.4102, + "step": 8132 + }, + { + "epoch": 0.79423828125, + "grad_norm": 0.15141981840133667, + "learning_rate": 9.67053885310378e-05, + "loss": 4.3672, + "step": 8133 + }, + { + "epoch": 0.7943359375, + "grad_norm": 0.16605165600776672, + "learning_rate": 9.666266515516062e-05, + "loss": 4.3984, + "step": 8134 + }, + { + "epoch": 0.79443359375, + "grad_norm": 0.1561633050441742, + "learning_rate": 9.66199590678777e-05, + "loss": 4.4258, + "step": 8135 + }, + { + "epoch": 0.79453125, + "grad_norm": 0.14804957807064056, + "learning_rate": 9.657727027332905e-05, + "loss": 4.4102, + "step": 8136 + }, + { + "epoch": 0.79462890625, + "grad_norm": 0.16479580104351044, + "learning_rate": 9.653459877565312e-05, + "loss": 4.418, + "step": 8137 + }, + { + "epoch": 0.7947265625, + "grad_norm": 0.15236738324165344, + "learning_rate": 9.649194457898658e-05, + "loss": 4.3945, + "step": 8138 + }, + { + "epoch": 0.79482421875, + "grad_norm": 0.15380410850048065, + "learning_rate": 9.644930768746451e-05, + "loss": 4.3789, + "step": 8139 + }, + { + "epoch": 0.794921875, + "grad_norm": 0.15953725576400757, + "learning_rate": 9.640668810522018e-05, + "loss": 4.3945, + "step": 8140 + }, + { + "epoch": 0.79501953125, + "grad_norm": 0.15151526033878326, + "learning_rate": 9.636408583638536e-05, + "loss": 4.4453, + "step": 8141 + }, + { + "epoch": 0.7951171875, + "grad_norm": 0.16161569952964783, + "learning_rate": 9.632150088508998e-05, + "loss": 4.4141, + "step": 8142 + }, + { + "epoch": 0.79521484375, + "grad_norm": 0.15325479209423065, + "learning_rate": 9.627893325546241e-05, + "loss": 4.457, + "step": 8143 + }, + { + "epoch": 0.7953125, + "grad_norm": 0.14512822031974792, + "learning_rate": 9.623638295162926e-05, + "loss": 4.4375, + "step": 8144 + }, + { + "epoch": 0.79541015625, + "grad_norm": 0.14783808588981628, + "learning_rate": 9.619384997771552e-05, + "loss": 4.3906, + "step": 8145 + }, + { + "epoch": 0.7955078125, + "grad_norm": 0.1600733995437622, + "learning_rate": 9.615133433784446e-05, + "loss": 4.4062, + "step": 8146 + }, + { + "epoch": 0.79560546875, + "grad_norm": 0.15448695421218872, + "learning_rate": 9.610883603613768e-05, + "loss": 4.4141, + "step": 8147 + }, + { + "epoch": 0.795703125, + "grad_norm": 0.14858804643154144, + "learning_rate": 9.606635507671518e-05, + "loss": 4.3945, + "step": 8148 + }, + { + "epoch": 0.79580078125, + "grad_norm": 0.14974188804626465, + "learning_rate": 9.602389146369506e-05, + "loss": 4.4141, + "step": 8149 + }, + { + "epoch": 0.7958984375, + "grad_norm": 0.15657289326190948, + "learning_rate": 9.598144520119396e-05, + "loss": 4.4023, + "step": 8150 + }, + { + "epoch": 0.79599609375, + "grad_norm": 0.14662820100784302, + "learning_rate": 9.593901629332675e-05, + "loss": 4.4375, + "step": 8151 + }, + { + "epoch": 0.79609375, + "grad_norm": 0.145959734916687, + "learning_rate": 9.589660474420661e-05, + "loss": 4.4141, + "step": 8152 + }, + { + "epoch": 0.79619140625, + "grad_norm": 0.14810477197170258, + "learning_rate": 9.585421055794507e-05, + "loss": 4.3984, + "step": 8153 + }, + { + "epoch": 0.7962890625, + "grad_norm": 0.14886851608753204, + "learning_rate": 9.581183373865194e-05, + "loss": 4.4258, + "step": 8154 + }, + { + "epoch": 0.79638671875, + "grad_norm": 0.14740145206451416, + "learning_rate": 9.576947429043536e-05, + "loss": 4.3945, + "step": 8155 + }, + { + "epoch": 0.796484375, + "grad_norm": 0.14410634338855743, + "learning_rate": 9.572713221740189e-05, + "loss": 4.3828, + "step": 8156 + }, + { + "epoch": 0.79658203125, + "grad_norm": 0.15342997014522552, + "learning_rate": 9.568480752365617e-05, + "loss": 4.4219, + "step": 8157 + }, + { + "epoch": 0.7966796875, + "grad_norm": 0.1424105018377304, + "learning_rate": 9.564250021330134e-05, + "loss": 4.4062, + "step": 8158 + }, + { + "epoch": 0.79677734375, + "grad_norm": 0.1457749307155609, + "learning_rate": 9.56002102904388e-05, + "loss": 4.3867, + "step": 8159 + }, + { + "epoch": 0.796875, + "grad_norm": 0.1461271345615387, + "learning_rate": 9.555793775916827e-05, + "loss": 4.3867, + "step": 8160 + }, + { + "epoch": 0.79697265625, + "grad_norm": 0.15157252550125122, + "learning_rate": 9.551568262358782e-05, + "loss": 4.4336, + "step": 8161 + }, + { + "epoch": 0.7970703125, + "grad_norm": 0.14968854188919067, + "learning_rate": 9.547344488779374e-05, + "loss": 4.4023, + "step": 8162 + }, + { + "epoch": 0.79716796875, + "grad_norm": 0.14238597452640533, + "learning_rate": 9.54312245558808e-05, + "loss": 4.4102, + "step": 8163 + }, + { + "epoch": 0.797265625, + "grad_norm": 0.15246444940567017, + "learning_rate": 9.53890216319419e-05, + "loss": 4.4062, + "step": 8164 + }, + { + "epoch": 0.79736328125, + "grad_norm": 0.15092189610004425, + "learning_rate": 9.53468361200683e-05, + "loss": 4.3828, + "step": 8165 + }, + { + "epoch": 0.7974609375, + "grad_norm": 0.14555586874485016, + "learning_rate": 9.53046680243496e-05, + "loss": 4.4414, + "step": 8166 + }, + { + "epoch": 0.79755859375, + "grad_norm": 0.15674617886543274, + "learning_rate": 9.526251734887375e-05, + "loss": 4.4062, + "step": 8167 + }, + { + "epoch": 0.79765625, + "grad_norm": 0.15244081616401672, + "learning_rate": 9.522038409772696e-05, + "loss": 4.3711, + "step": 8168 + }, + { + "epoch": 0.79775390625, + "grad_norm": 0.1523437201976776, + "learning_rate": 9.517826827499379e-05, + "loss": 4.3789, + "step": 8169 + }, + { + "epoch": 0.7978515625, + "grad_norm": 0.1435731202363968, + "learning_rate": 9.513616988475702e-05, + "loss": 4.4062, + "step": 8170 + }, + { + "epoch": 0.79794921875, + "grad_norm": 0.15172524750232697, + "learning_rate": 9.509408893109787e-05, + "loss": 4.3828, + "step": 8171 + }, + { + "epoch": 0.798046875, + "grad_norm": 0.14724871516227722, + "learning_rate": 9.505202541809577e-05, + "loss": 4.4023, + "step": 8172 + }, + { + "epoch": 0.79814453125, + "grad_norm": 0.15330496430397034, + "learning_rate": 9.500997934982854e-05, + "loss": 4.3984, + "step": 8173 + }, + { + "epoch": 0.7982421875, + "grad_norm": 0.15791289508342743, + "learning_rate": 9.496795073037214e-05, + "loss": 4.3945, + "step": 8174 + }, + { + "epoch": 0.79833984375, + "grad_norm": 0.14895431697368622, + "learning_rate": 9.492593956380106e-05, + "loss": 4.3633, + "step": 8175 + }, + { + "epoch": 0.7984375, + "grad_norm": 0.14419487118721008, + "learning_rate": 9.488394585418797e-05, + "loss": 4.4258, + "step": 8176 + }, + { + "epoch": 0.79853515625, + "grad_norm": 0.15027163922786713, + "learning_rate": 9.484196960560389e-05, + "loss": 4.4375, + "step": 8177 + }, + { + "epoch": 0.7986328125, + "grad_norm": 0.15082797408103943, + "learning_rate": 9.480001082211812e-05, + "loss": 4.3945, + "step": 8178 + }, + { + "epoch": 0.79873046875, + "grad_norm": 0.14982688426971436, + "learning_rate": 9.475806950779826e-05, + "loss": 4.418, + "step": 8179 + }, + { + "epoch": 0.798828125, + "grad_norm": 0.14923392236232758, + "learning_rate": 9.471614566671024e-05, + "loss": 4.4141, + "step": 8180 + }, + { + "epoch": 0.79892578125, + "grad_norm": 0.15080471336841583, + "learning_rate": 9.46742393029184e-05, + "loss": 4.4219, + "step": 8181 + }, + { + "epoch": 0.7990234375, + "grad_norm": 0.1566399335861206, + "learning_rate": 9.463235042048513e-05, + "loss": 4.3945, + "step": 8182 + }, + { + "epoch": 0.79912109375, + "grad_norm": 0.1460387408733368, + "learning_rate": 9.459047902347132e-05, + "loss": 4.4062, + "step": 8183 + }, + { + "epoch": 0.79921875, + "grad_norm": 0.15332332253456116, + "learning_rate": 9.454862511593612e-05, + "loss": 4.4141, + "step": 8184 + }, + { + "epoch": 0.79931640625, + "grad_norm": 0.1488557904958725, + "learning_rate": 9.450678870193699e-05, + "loss": 4.3711, + "step": 8185 + }, + { + "epoch": 0.7994140625, + "grad_norm": 0.1518842577934265, + "learning_rate": 9.446496978552968e-05, + "loss": 4.3945, + "step": 8186 + }, + { + "epoch": 0.79951171875, + "grad_norm": 0.14985167980194092, + "learning_rate": 9.442316837076827e-05, + "loss": 4.4102, + "step": 8187 + }, + { + "epoch": 0.799609375, + "grad_norm": 0.14451755583286285, + "learning_rate": 9.438138446170508e-05, + "loss": 4.3789, + "step": 8188 + }, + { + "epoch": 0.79970703125, + "grad_norm": 0.15662163496017456, + "learning_rate": 9.433961806239084e-05, + "loss": 4.3711, + "step": 8189 + }, + { + "epoch": 0.7998046875, + "grad_norm": 0.1536259949207306, + "learning_rate": 9.429786917687451e-05, + "loss": 4.4102, + "step": 8190 + }, + { + "epoch": 0.79990234375, + "grad_norm": 0.15289007127285004, + "learning_rate": 9.425613780920327e-05, + "loss": 4.4023, + "step": 8191 + }, + { + "epoch": 0.8, + "grad_norm": 0.15646637976169586, + "learning_rate": 9.421442396342278e-05, + "loss": 4.3633, + "step": 8192 + }, + { + "epoch": 0.80009765625, + "grad_norm": 0.15135012567043304, + "learning_rate": 9.417272764357688e-05, + "loss": 4.3945, + "step": 8193 + }, + { + "epoch": 0.8001953125, + "grad_norm": 0.16167257726192474, + "learning_rate": 9.413104885370777e-05, + "loss": 4.3789, + "step": 8194 + }, + { + "epoch": 0.80029296875, + "grad_norm": 0.14945366978645325, + "learning_rate": 9.40893875978559e-05, + "loss": 4.4062, + "step": 8195 + }, + { + "epoch": 0.800390625, + "grad_norm": 0.14540830254554749, + "learning_rate": 9.404774388006008e-05, + "loss": 4.4414, + "step": 8196 + }, + { + "epoch": 0.80048828125, + "grad_norm": 0.15251514315605164, + "learning_rate": 9.400611770435734e-05, + "loss": 4.3945, + "step": 8197 + }, + { + "epoch": 0.8005859375, + "grad_norm": 0.146982803940773, + "learning_rate": 9.396450907478313e-05, + "loss": 4.418, + "step": 8198 + }, + { + "epoch": 0.80068359375, + "grad_norm": 0.1429162472486496, + "learning_rate": 9.392291799537103e-05, + "loss": 4.3945, + "step": 8199 + }, + { + "epoch": 0.80078125, + "grad_norm": 0.14630700647830963, + "learning_rate": 9.388134447015306e-05, + "loss": 4.3672, + "step": 8200 + }, + { + "epoch": 0.80087890625, + "grad_norm": 0.15005335211753845, + "learning_rate": 9.383978850315947e-05, + "loss": 4.4102, + "step": 8201 + }, + { + "epoch": 0.8009765625, + "grad_norm": 0.14867247641086578, + "learning_rate": 9.379825009841886e-05, + "loss": 4.4062, + "step": 8202 + }, + { + "epoch": 0.80107421875, + "grad_norm": 0.14997807145118713, + "learning_rate": 9.37567292599581e-05, + "loss": 4.418, + "step": 8203 + }, + { + "epoch": 0.801171875, + "grad_norm": 0.1453149914741516, + "learning_rate": 9.371522599180232e-05, + "loss": 4.3984, + "step": 8204 + }, + { + "epoch": 0.80126953125, + "grad_norm": 0.14900121092796326, + "learning_rate": 9.367374029797499e-05, + "loss": 4.4062, + "step": 8205 + }, + { + "epoch": 0.8013671875, + "grad_norm": 0.14823034405708313, + "learning_rate": 9.363227218249795e-05, + "loss": 4.3945, + "step": 8206 + }, + { + "epoch": 0.80146484375, + "grad_norm": 0.16115358471870422, + "learning_rate": 9.35908216493911e-05, + "loss": 4.3984, + "step": 8207 + }, + { + "epoch": 0.8015625, + "grad_norm": 0.1482418328523636, + "learning_rate": 9.354938870267285e-05, + "loss": 4.375, + "step": 8208 + }, + { + "epoch": 0.80166015625, + "grad_norm": 0.1534464806318283, + "learning_rate": 9.350797334635989e-05, + "loss": 4.3945, + "step": 8209 + }, + { + "epoch": 0.8017578125, + "grad_norm": 0.15614360570907593, + "learning_rate": 9.346657558446711e-05, + "loss": 4.4414, + "step": 8210 + }, + { + "epoch": 0.80185546875, + "grad_norm": 0.1525532603263855, + "learning_rate": 9.342519542100775e-05, + "loss": 4.3828, + "step": 8211 + }, + { + "epoch": 0.801953125, + "grad_norm": 0.16506695747375488, + "learning_rate": 9.338383285999334e-05, + "loss": 4.4023, + "step": 8212 + }, + { + "epoch": 0.80205078125, + "grad_norm": 0.14936751127243042, + "learning_rate": 9.334248790543371e-05, + "loss": 4.4023, + "step": 8213 + }, + { + "epoch": 0.8021484375, + "grad_norm": 0.15421545505523682, + "learning_rate": 9.330116056133696e-05, + "loss": 4.3828, + "step": 8214 + }, + { + "epoch": 0.80224609375, + "grad_norm": 0.15343186259269714, + "learning_rate": 9.325985083170957e-05, + "loss": 4.4219, + "step": 8215 + }, + { + "epoch": 0.80234375, + "grad_norm": 0.15013398230075836, + "learning_rate": 9.321855872055611e-05, + "loss": 4.418, + "step": 8216 + }, + { + "epoch": 0.80244140625, + "grad_norm": 0.158683642745018, + "learning_rate": 9.31772842318796e-05, + "loss": 4.375, + "step": 8217 + }, + { + "epoch": 0.8025390625, + "grad_norm": 0.14342577755451202, + "learning_rate": 9.313602736968137e-05, + "loss": 4.4258, + "step": 8218 + }, + { + "epoch": 0.80263671875, + "grad_norm": 0.14482325315475464, + "learning_rate": 9.309478813796097e-05, + "loss": 4.4219, + "step": 8219 + }, + { + "epoch": 0.802734375, + "grad_norm": 0.14147360622882843, + "learning_rate": 9.305356654071627e-05, + "loss": 4.4609, + "step": 8220 + }, + { + "epoch": 0.80283203125, + "grad_norm": 0.14862249791622162, + "learning_rate": 9.30123625819434e-05, + "loss": 4.3789, + "step": 8221 + }, + { + "epoch": 0.8029296875, + "grad_norm": 0.14717541635036469, + "learning_rate": 9.297117626563687e-05, + "loss": 4.4023, + "step": 8222 + }, + { + "epoch": 0.80302734375, + "grad_norm": 0.16012568771839142, + "learning_rate": 9.293000759578932e-05, + "loss": 4.375, + "step": 8223 + }, + { + "epoch": 0.803125, + "grad_norm": 0.14990036189556122, + "learning_rate": 9.28888565763919e-05, + "loss": 4.4336, + "step": 8224 + }, + { + "epoch": 0.80322265625, + "grad_norm": 0.15246792137622833, + "learning_rate": 9.284772321143377e-05, + "loss": 4.418, + "step": 8225 + }, + { + "epoch": 0.8033203125, + "grad_norm": 0.14738552272319794, + "learning_rate": 9.280660750490262e-05, + "loss": 4.4102, + "step": 8226 + }, + { + "epoch": 0.80341796875, + "grad_norm": 0.15376627445220947, + "learning_rate": 9.276550946078429e-05, + "loss": 4.4258, + "step": 8227 + }, + { + "epoch": 0.803515625, + "grad_norm": 0.14887046813964844, + "learning_rate": 9.272442908306303e-05, + "loss": 4.3984, + "step": 8228 + }, + { + "epoch": 0.80361328125, + "grad_norm": 0.1557839959859848, + "learning_rate": 9.268336637572125e-05, + "loss": 4.4297, + "step": 8229 + }, + { + "epoch": 0.8037109375, + "grad_norm": 0.1511612832546234, + "learning_rate": 9.26423213427397e-05, + "loss": 4.418, + "step": 8230 + }, + { + "epoch": 0.80380859375, + "grad_norm": 0.15325793623924255, + "learning_rate": 9.260129398809747e-05, + "loss": 4.4062, + "step": 8231 + }, + { + "epoch": 0.80390625, + "grad_norm": 0.15801209211349487, + "learning_rate": 9.25602843157718e-05, + "loss": 4.4023, + "step": 8232 + }, + { + "epoch": 0.80400390625, + "grad_norm": 0.1513536423444748, + "learning_rate": 9.251929232973835e-05, + "loss": 4.418, + "step": 8233 + }, + { + "epoch": 0.8041015625, + "grad_norm": 0.15719591081142426, + "learning_rate": 9.247831803397107e-05, + "loss": 4.3594, + "step": 8234 + }, + { + "epoch": 0.80419921875, + "grad_norm": 0.15143990516662598, + "learning_rate": 9.243736143244205e-05, + "loss": 4.375, + "step": 8235 + }, + { + "epoch": 0.804296875, + "grad_norm": 0.15537792444229126, + "learning_rate": 9.239642252912174e-05, + "loss": 4.4023, + "step": 8236 + }, + { + "epoch": 0.80439453125, + "grad_norm": 0.14983530342578888, + "learning_rate": 9.235550132797893e-05, + "loss": 4.3984, + "step": 8237 + }, + { + "epoch": 0.8044921875, + "grad_norm": 0.15521769225597382, + "learning_rate": 9.231459783298068e-05, + "loss": 4.4062, + "step": 8238 + }, + { + "epoch": 0.80458984375, + "grad_norm": 0.14644908905029297, + "learning_rate": 9.227371204809228e-05, + "loss": 4.3828, + "step": 8239 + }, + { + "epoch": 0.8046875, + "grad_norm": 0.15553000569343567, + "learning_rate": 9.223284397727734e-05, + "loss": 4.4023, + "step": 8240 + }, + { + "epoch": 0.80478515625, + "grad_norm": 0.14550773799419403, + "learning_rate": 9.21919936244977e-05, + "loss": 4.4141, + "step": 8241 + }, + { + "epoch": 0.8048828125, + "grad_norm": 0.15259134769439697, + "learning_rate": 9.215116099371356e-05, + "loss": 4.4219, + "step": 8242 + }, + { + "epoch": 0.80498046875, + "grad_norm": 0.14942120015621185, + "learning_rate": 9.211034608888339e-05, + "loss": 4.3828, + "step": 8243 + }, + { + "epoch": 0.805078125, + "grad_norm": 0.14321723580360413, + "learning_rate": 9.206954891396394e-05, + "loss": 4.3867, + "step": 8244 + }, + { + "epoch": 0.80517578125, + "grad_norm": 0.14902234077453613, + "learning_rate": 9.202876947291009e-05, + "loss": 4.4023, + "step": 8245 + }, + { + "epoch": 0.8052734375, + "grad_norm": 0.15358346700668335, + "learning_rate": 9.198800776967523e-05, + "loss": 4.4023, + "step": 8246 + }, + { + "epoch": 0.80537109375, + "grad_norm": 0.15090768039226532, + "learning_rate": 9.19472638082109e-05, + "loss": 4.3828, + "step": 8247 + }, + { + "epoch": 0.80546875, + "grad_norm": 0.14648999273777008, + "learning_rate": 9.190653759246697e-05, + "loss": 4.418, + "step": 8248 + }, + { + "epoch": 0.80556640625, + "grad_norm": 0.15316633880138397, + "learning_rate": 9.186582912639156e-05, + "loss": 4.4023, + "step": 8249 + }, + { + "epoch": 0.8056640625, + "grad_norm": 0.146706223487854, + "learning_rate": 9.182513841393108e-05, + "loss": 4.4258, + "step": 8250 + }, + { + "epoch": 0.80576171875, + "grad_norm": 0.14189518988132477, + "learning_rate": 9.178446545903023e-05, + "loss": 4.3945, + "step": 8251 + }, + { + "epoch": 0.805859375, + "grad_norm": 0.14696989953517914, + "learning_rate": 9.174381026563195e-05, + "loss": 4.4023, + "step": 8252 + }, + { + "epoch": 0.80595703125, + "grad_norm": 0.14737001061439514, + "learning_rate": 9.170317283767757e-05, + "loss": 4.4062, + "step": 8253 + }, + { + "epoch": 0.8060546875, + "grad_norm": 0.14822599291801453, + "learning_rate": 9.166255317910649e-05, + "loss": 4.4375, + "step": 8254 + }, + { + "epoch": 0.80615234375, + "grad_norm": 0.1431070864200592, + "learning_rate": 9.162195129385656e-05, + "loss": 4.3828, + "step": 8255 + }, + { + "epoch": 0.80625, + "grad_norm": 0.1473986804485321, + "learning_rate": 9.158136718586386e-05, + "loss": 4.4102, + "step": 8256 + }, + { + "epoch": 0.80634765625, + "grad_norm": 0.14578111469745636, + "learning_rate": 9.154080085906272e-05, + "loss": 4.4023, + "step": 8257 + }, + { + "epoch": 0.8064453125, + "grad_norm": 0.15276332199573517, + "learning_rate": 9.150025231738583e-05, + "loss": 4.3867, + "step": 8258 + }, + { + "epoch": 0.80654296875, + "grad_norm": 0.14893554151058197, + "learning_rate": 9.145972156476404e-05, + "loss": 4.4141, + "step": 8259 + }, + { + "epoch": 0.806640625, + "grad_norm": 0.1458856165409088, + "learning_rate": 9.141920860512657e-05, + "loss": 4.3828, + "step": 8260 + }, + { + "epoch": 0.80673828125, + "grad_norm": 0.15039587020874023, + "learning_rate": 9.137871344240085e-05, + "loss": 4.4375, + "step": 8261 + }, + { + "epoch": 0.8068359375, + "grad_norm": 0.16264492273330688, + "learning_rate": 9.133823608051261e-05, + "loss": 4.4023, + "step": 8262 + }, + { + "epoch": 0.80693359375, + "grad_norm": 0.15285234153270721, + "learning_rate": 9.129777652338595e-05, + "loss": 4.4219, + "step": 8263 + }, + { + "epoch": 0.80703125, + "grad_norm": 0.14978288114070892, + "learning_rate": 9.125733477494297e-05, + "loss": 4.3945, + "step": 8264 + }, + { + "epoch": 0.80712890625, + "grad_norm": 0.147576704621315, + "learning_rate": 9.121691083910435e-05, + "loss": 4.3984, + "step": 8265 + }, + { + "epoch": 0.8072265625, + "grad_norm": 0.14701777696609497, + "learning_rate": 9.11765047197889e-05, + "loss": 4.3828, + "step": 8266 + }, + { + "epoch": 0.80732421875, + "grad_norm": 0.14658203721046448, + "learning_rate": 9.113611642091366e-05, + "loss": 4.4141, + "step": 8267 + }, + { + "epoch": 0.807421875, + "grad_norm": 0.15220299363136292, + "learning_rate": 9.109574594639409e-05, + "loss": 4.3945, + "step": 8268 + }, + { + "epoch": 0.80751953125, + "grad_norm": 0.14865906536579132, + "learning_rate": 9.105539330014373e-05, + "loss": 4.4062, + "step": 8269 + }, + { + "epoch": 0.8076171875, + "grad_norm": 0.1500426083803177, + "learning_rate": 9.101505848607461e-05, + "loss": 4.4023, + "step": 8270 + }, + { + "epoch": 0.80771484375, + "grad_norm": 0.14768216013908386, + "learning_rate": 9.097474150809681e-05, + "loss": 4.3984, + "step": 8271 + }, + { + "epoch": 0.8078125, + "grad_norm": 0.15344710648059845, + "learning_rate": 9.093444237011887e-05, + "loss": 4.418, + "step": 8272 + }, + { + "epoch": 0.80791015625, + "grad_norm": 0.15341851115226746, + "learning_rate": 9.089416107604753e-05, + "loss": 4.3828, + "step": 8273 + }, + { + "epoch": 0.8080078125, + "grad_norm": 0.14543966948986053, + "learning_rate": 9.085389762978771e-05, + "loss": 4.4102, + "step": 8274 + }, + { + "epoch": 0.80810546875, + "grad_norm": 0.1523716002702713, + "learning_rate": 9.081365203524269e-05, + "loss": 4.3906, + "step": 8275 + }, + { + "epoch": 0.808203125, + "grad_norm": 0.15151368081569672, + "learning_rate": 9.077342429631405e-05, + "loss": 4.4141, + "step": 8276 + }, + { + "epoch": 0.80830078125, + "grad_norm": 0.15652413666248322, + "learning_rate": 9.073321441690152e-05, + "loss": 4.4141, + "step": 8277 + }, + { + "epoch": 0.8083984375, + "grad_norm": 0.14288227260112762, + "learning_rate": 9.06930224009033e-05, + "loss": 4.4453, + "step": 8278 + }, + { + "epoch": 0.80849609375, + "grad_norm": 0.14093376696109772, + "learning_rate": 9.06528482522156e-05, + "loss": 4.3945, + "step": 8279 + }, + { + "epoch": 0.80859375, + "grad_norm": 0.1492370218038559, + "learning_rate": 9.061269197473313e-05, + "loss": 4.4141, + "step": 8280 + }, + { + "epoch": 0.80869140625, + "grad_norm": 0.15208327770233154, + "learning_rate": 9.057255357234875e-05, + "loss": 4.3828, + "step": 8281 + }, + { + "epoch": 0.8087890625, + "grad_norm": 0.14795367419719696, + "learning_rate": 9.053243304895361e-05, + "loss": 4.4141, + "step": 8282 + }, + { + "epoch": 0.80888671875, + "grad_norm": 0.15039007365703583, + "learning_rate": 9.049233040843705e-05, + "loss": 4.4062, + "step": 8283 + }, + { + "epoch": 0.808984375, + "grad_norm": 0.1506948322057724, + "learning_rate": 9.045224565468683e-05, + "loss": 4.3711, + "step": 8284 + }, + { + "epoch": 0.80908203125, + "grad_norm": 0.14771060645580292, + "learning_rate": 9.041217879158883e-05, + "loss": 4.4141, + "step": 8285 + }, + { + "epoch": 0.8091796875, + "grad_norm": 0.13891419768333435, + "learning_rate": 9.037212982302731e-05, + "loss": 4.4062, + "step": 8286 + }, + { + "epoch": 0.80927734375, + "grad_norm": 0.14503629505634308, + "learning_rate": 9.03320987528847e-05, + "loss": 4.3906, + "step": 8287 + }, + { + "epoch": 0.809375, + "grad_norm": 0.14504505693912506, + "learning_rate": 9.029208558504183e-05, + "loss": 4.3867, + "step": 8288 + }, + { + "epoch": 0.80947265625, + "grad_norm": 0.14616578817367554, + "learning_rate": 9.025209032337758e-05, + "loss": 4.418, + "step": 8289 + }, + { + "epoch": 0.8095703125, + "grad_norm": 0.14838464558124542, + "learning_rate": 9.021211297176935e-05, + "loss": 4.4102, + "step": 8290 + }, + { + "epoch": 0.80966796875, + "grad_norm": 0.1458343267440796, + "learning_rate": 9.017215353409264e-05, + "loss": 4.3828, + "step": 8291 + }, + { + "epoch": 0.809765625, + "grad_norm": 0.1481715887784958, + "learning_rate": 9.013221201422114e-05, + "loss": 4.3984, + "step": 8292 + }, + { + "epoch": 0.80986328125, + "grad_norm": 0.14794127643108368, + "learning_rate": 9.009228841602701e-05, + "loss": 4.418, + "step": 8293 + }, + { + "epoch": 0.8099609375, + "grad_norm": 0.145847350358963, + "learning_rate": 9.005238274338051e-05, + "loss": 4.4258, + "step": 8294 + }, + { + "epoch": 0.81005859375, + "grad_norm": 0.14743076264858246, + "learning_rate": 9.001249500015029e-05, + "loss": 4.4102, + "step": 8295 + }, + { + "epoch": 0.81015625, + "grad_norm": 0.14497458934783936, + "learning_rate": 8.997262519020317e-05, + "loss": 4.3984, + "step": 8296 + }, + { + "epoch": 0.81025390625, + "grad_norm": 0.143561452627182, + "learning_rate": 8.993277331740423e-05, + "loss": 4.3711, + "step": 8297 + }, + { + "epoch": 0.8103515625, + "grad_norm": 0.15508577227592468, + "learning_rate": 8.989293938561687e-05, + "loss": 4.3789, + "step": 8298 + }, + { + "epoch": 0.81044921875, + "grad_norm": 0.14545108377933502, + "learning_rate": 8.985312339870278e-05, + "loss": 4.4219, + "step": 8299 + }, + { + "epoch": 0.810546875, + "grad_norm": 0.14535903930664062, + "learning_rate": 8.981332536052172e-05, + "loss": 4.3828, + "step": 8300 + }, + { + "epoch": 0.81064453125, + "grad_norm": 0.1479174643754959, + "learning_rate": 8.977354527493188e-05, + "loss": 4.4375, + "step": 8301 + }, + { + "epoch": 0.8107421875, + "grad_norm": 0.1410520076751709, + "learning_rate": 8.97337831457897e-05, + "loss": 4.4336, + "step": 8302 + }, + { + "epoch": 0.81083984375, + "grad_norm": 0.14501158893108368, + "learning_rate": 8.969403897694985e-05, + "loss": 4.3711, + "step": 8303 + }, + { + "epoch": 0.8109375, + "grad_norm": 0.14512470364570618, + "learning_rate": 8.965431277226523e-05, + "loss": 4.3633, + "step": 8304 + }, + { + "epoch": 0.81103515625, + "grad_norm": 0.14290976524353027, + "learning_rate": 8.961460453558705e-05, + "loss": 4.375, + "step": 8305 + }, + { + "epoch": 0.8111328125, + "grad_norm": 0.1524972915649414, + "learning_rate": 8.957491427076475e-05, + "loss": 4.3906, + "step": 8306 + }, + { + "epoch": 0.81123046875, + "grad_norm": 0.15383608639240265, + "learning_rate": 8.953524198164603e-05, + "loss": 4.4062, + "step": 8307 + }, + { + "epoch": 0.811328125, + "grad_norm": 0.14725059270858765, + "learning_rate": 8.94955876720769e-05, + "loss": 4.3867, + "step": 8308 + }, + { + "epoch": 0.81142578125, + "grad_norm": 0.1517457216978073, + "learning_rate": 8.945595134590146e-05, + "loss": 4.3867, + "step": 8309 + }, + { + "epoch": 0.8115234375, + "grad_norm": 0.14962676167488098, + "learning_rate": 8.941633300696224e-05, + "loss": 4.4062, + "step": 8310 + }, + { + "epoch": 0.81162109375, + "grad_norm": 0.14709237217903137, + "learning_rate": 8.937673265909998e-05, + "loss": 4.3867, + "step": 8311 + }, + { + "epoch": 0.81171875, + "grad_norm": 0.15215986967086792, + "learning_rate": 8.933715030615366e-05, + "loss": 4.418, + "step": 8312 + }, + { + "epoch": 0.81181640625, + "grad_norm": 0.1479717344045639, + "learning_rate": 8.929758595196052e-05, + "loss": 4.4023, + "step": 8313 + }, + { + "epoch": 0.8119140625, + "grad_norm": 0.1494310349225998, + "learning_rate": 8.925803960035608e-05, + "loss": 4.3828, + "step": 8314 + }, + { + "epoch": 0.81201171875, + "grad_norm": 0.15135520696640015, + "learning_rate": 8.921851125517405e-05, + "loss": 4.4297, + "step": 8315 + }, + { + "epoch": 0.812109375, + "grad_norm": 0.15152046084403992, + "learning_rate": 8.91790009202465e-05, + "loss": 4.418, + "step": 8316 + }, + { + "epoch": 0.81220703125, + "grad_norm": 0.14529362320899963, + "learning_rate": 8.913950859940359e-05, + "loss": 4.4023, + "step": 8317 + }, + { + "epoch": 0.8123046875, + "grad_norm": 0.15247203409671783, + "learning_rate": 8.910003429647387e-05, + "loss": 4.3984, + "step": 8318 + }, + { + "epoch": 0.81240234375, + "grad_norm": 0.14713054895401, + "learning_rate": 8.906057801528413e-05, + "loss": 4.4023, + "step": 8319 + }, + { + "epoch": 0.8125, + "grad_norm": 0.306414932012558, + "learning_rate": 8.90211397596594e-05, + "loss": 4.3984, + "step": 8320 + }, + { + "epoch": 0.81259765625, + "grad_norm": 0.14373335242271423, + "learning_rate": 8.898171953342291e-05, + "loss": 4.4219, + "step": 8321 + }, + { + "epoch": 0.8126953125, + "grad_norm": 0.15412870049476624, + "learning_rate": 8.894231734039617e-05, + "loss": 4.4258, + "step": 8322 + }, + { + "epoch": 0.81279296875, + "grad_norm": 0.14918556809425354, + "learning_rate": 8.890293318439901e-05, + "loss": 4.4023, + "step": 8323 + }, + { + "epoch": 0.812890625, + "grad_norm": 0.15479934215545654, + "learning_rate": 8.88635670692495e-05, + "loss": 4.375, + "step": 8324 + }, + { + "epoch": 0.81298828125, + "grad_norm": 0.1465662121772766, + "learning_rate": 8.882421899876377e-05, + "loss": 4.375, + "step": 8325 + }, + { + "epoch": 0.8130859375, + "grad_norm": 0.1562531590461731, + "learning_rate": 8.878488897675645e-05, + "loss": 4.3906, + "step": 8326 + }, + { + "epoch": 0.81318359375, + "grad_norm": 0.1449177861213684, + "learning_rate": 8.874557700704031e-05, + "loss": 4.4102, + "step": 8327 + }, + { + "epoch": 0.81328125, + "grad_norm": 0.14659418165683746, + "learning_rate": 8.870628309342632e-05, + "loss": 4.4102, + "step": 8328 + }, + { + "epoch": 0.81337890625, + "grad_norm": 0.1403774470090866, + "learning_rate": 8.866700723972385e-05, + "loss": 4.3828, + "step": 8329 + }, + { + "epoch": 0.8134765625, + "grad_norm": 0.154759481549263, + "learning_rate": 8.862774944974038e-05, + "loss": 4.4141, + "step": 8330 + }, + { + "epoch": 0.81357421875, + "grad_norm": 0.1479886919260025, + "learning_rate": 8.858850972728166e-05, + "loss": 4.4141, + "step": 8331 + }, + { + "epoch": 0.813671875, + "grad_norm": 0.14516858756542206, + "learning_rate": 8.854928807615176e-05, + "loss": 4.3906, + "step": 8332 + }, + { + "epoch": 0.81376953125, + "grad_norm": 0.14586392045021057, + "learning_rate": 8.851008450015299e-05, + "loss": 4.3945, + "step": 8333 + }, + { + "epoch": 0.8138671875, + "grad_norm": 0.15308429300785065, + "learning_rate": 8.847089900308575e-05, + "loss": 4.4062, + "step": 8334 + }, + { + "epoch": 0.81396484375, + "grad_norm": 0.14909076690673828, + "learning_rate": 8.84317315887489e-05, + "loss": 4.3984, + "step": 8335 + }, + { + "epoch": 0.8140625, + "grad_norm": 0.15079016983509064, + "learning_rate": 8.83925822609394e-05, + "loss": 4.4023, + "step": 8336 + }, + { + "epoch": 0.81416015625, + "grad_norm": 0.14916661381721497, + "learning_rate": 8.835345102345258e-05, + "loss": 4.3984, + "step": 8337 + }, + { + "epoch": 0.8142578125, + "grad_norm": 0.1451139897108078, + "learning_rate": 8.831433788008186e-05, + "loss": 4.3828, + "step": 8338 + }, + { + "epoch": 0.81435546875, + "grad_norm": 0.1560429334640503, + "learning_rate": 8.82752428346191e-05, + "loss": 4.3906, + "step": 8339 + }, + { + "epoch": 0.814453125, + "grad_norm": 0.14784082770347595, + "learning_rate": 8.823616589085421e-05, + "loss": 4.418, + "step": 8340 + }, + { + "epoch": 0.81455078125, + "grad_norm": 0.1507742553949356, + "learning_rate": 8.819710705257553e-05, + "loss": 4.3984, + "step": 8341 + }, + { + "epoch": 0.8146484375, + "grad_norm": 0.1496204137802124, + "learning_rate": 8.815806632356943e-05, + "loss": 4.4102, + "step": 8342 + }, + { + "epoch": 0.81474609375, + "grad_norm": 0.14804889261722565, + "learning_rate": 8.811904370762068e-05, + "loss": 4.4062, + "step": 8343 + }, + { + "epoch": 0.81484375, + "grad_norm": 0.15219514071941376, + "learning_rate": 8.808003920851229e-05, + "loss": 4.4023, + "step": 8344 + }, + { + "epoch": 0.81494140625, + "grad_norm": 0.14785046875476837, + "learning_rate": 8.804105283002548e-05, + "loss": 4.4023, + "step": 8345 + }, + { + "epoch": 0.8150390625, + "grad_norm": 0.14543917775154114, + "learning_rate": 8.800208457593969e-05, + "loss": 4.4219, + "step": 8346 + }, + { + "epoch": 0.81513671875, + "grad_norm": 0.14742140471935272, + "learning_rate": 8.796313445003265e-05, + "loss": 4.4141, + "step": 8347 + }, + { + "epoch": 0.815234375, + "grad_norm": 0.15298697352409363, + "learning_rate": 8.79242024560803e-05, + "loss": 4.4336, + "step": 8348 + }, + { + "epoch": 0.81533203125, + "grad_norm": 0.14873236417770386, + "learning_rate": 8.788528859785682e-05, + "loss": 4.3633, + "step": 8349 + }, + { + "epoch": 0.8154296875, + "grad_norm": 0.14457812905311584, + "learning_rate": 8.784639287913473e-05, + "loss": 4.3867, + "step": 8350 + }, + { + "epoch": 0.81552734375, + "grad_norm": 0.142641082406044, + "learning_rate": 8.780751530368458e-05, + "loss": 4.4336, + "step": 8351 + }, + { + "epoch": 0.815625, + "grad_norm": 0.14739105105400085, + "learning_rate": 8.776865587527536e-05, + "loss": 4.4336, + "step": 8352 + }, + { + "epoch": 0.81572265625, + "grad_norm": 0.14988969266414642, + "learning_rate": 8.772981459767417e-05, + "loss": 4.4297, + "step": 8353 + }, + { + "epoch": 0.8158203125, + "grad_norm": 0.1504499763250351, + "learning_rate": 8.769099147464649e-05, + "loss": 4.4062, + "step": 8354 + }, + { + "epoch": 0.81591796875, + "grad_norm": 0.14907068014144897, + "learning_rate": 8.765218650995591e-05, + "loss": 4.3789, + "step": 8355 + }, + { + "epoch": 0.816015625, + "grad_norm": 0.142002135515213, + "learning_rate": 8.761339970736426e-05, + "loss": 4.4023, + "step": 8356 + }, + { + "epoch": 0.81611328125, + "grad_norm": 0.1477532833814621, + "learning_rate": 8.75746310706318e-05, + "loss": 4.3906, + "step": 8357 + }, + { + "epoch": 0.8162109375, + "grad_norm": 0.15283441543579102, + "learning_rate": 8.753588060351678e-05, + "loss": 4.4219, + "step": 8358 + }, + { + "epoch": 0.81630859375, + "grad_norm": 0.14436016976833344, + "learning_rate": 8.749714830977579e-05, + "loss": 4.3984, + "step": 8359 + }, + { + "epoch": 0.81640625, + "grad_norm": 0.14846009016036987, + "learning_rate": 8.745843419316367e-05, + "loss": 4.3906, + "step": 8360 + }, + { + "epoch": 0.81650390625, + "grad_norm": 0.15106524527072906, + "learning_rate": 8.741973825743351e-05, + "loss": 4.4102, + "step": 8361 + }, + { + "epoch": 0.8166015625, + "grad_norm": 0.15190380811691284, + "learning_rate": 8.738106050633662e-05, + "loss": 4.4141, + "step": 8362 + }, + { + "epoch": 0.81669921875, + "grad_norm": 0.1482548713684082, + "learning_rate": 8.734240094362253e-05, + "loss": 4.3789, + "step": 8363 + }, + { + "epoch": 0.816796875, + "grad_norm": 0.14853043854236603, + "learning_rate": 8.730375957303904e-05, + "loss": 4.3945, + "step": 8364 + }, + { + "epoch": 0.81689453125, + "grad_norm": 0.15512976050376892, + "learning_rate": 8.726513639833219e-05, + "loss": 4.4102, + "step": 8365 + }, + { + "epoch": 0.8169921875, + "grad_norm": 0.1410241276025772, + "learning_rate": 8.722653142324616e-05, + "loss": 4.3672, + "step": 8366 + }, + { + "epoch": 0.81708984375, + "grad_norm": 0.1452711969614029, + "learning_rate": 8.718794465152358e-05, + "loss": 4.4375, + "step": 8367 + }, + { + "epoch": 0.8171875, + "grad_norm": 0.14862869679927826, + "learning_rate": 8.714937608690499e-05, + "loss": 4.4062, + "step": 8368 + }, + { + "epoch": 0.81728515625, + "grad_norm": 0.14542156457901, + "learning_rate": 8.711082573312947e-05, + "loss": 4.3672, + "step": 8369 + }, + { + "epoch": 0.8173828125, + "grad_norm": 0.142251119017601, + "learning_rate": 8.707229359393418e-05, + "loss": 4.4219, + "step": 8370 + }, + { + "epoch": 0.81748046875, + "grad_norm": 0.15031582117080688, + "learning_rate": 8.703377967305456e-05, + "loss": 4.4023, + "step": 8371 + }, + { + "epoch": 0.817578125, + "grad_norm": 0.14423628151416779, + "learning_rate": 8.699528397422424e-05, + "loss": 4.4023, + "step": 8372 + }, + { + "epoch": 0.81767578125, + "grad_norm": 0.14724406599998474, + "learning_rate": 8.69568065011752e-05, + "loss": 4.375, + "step": 8373 + }, + { + "epoch": 0.8177734375, + "grad_norm": 0.1523314118385315, + "learning_rate": 8.691834725763748e-05, + "loss": 4.4062, + "step": 8374 + }, + { + "epoch": 0.81787109375, + "grad_norm": 0.14653252065181732, + "learning_rate": 8.687990624733955e-05, + "loss": 4.4102, + "step": 8375 + }, + { + "epoch": 0.81796875, + "grad_norm": 0.13658571243286133, + "learning_rate": 8.684148347400786e-05, + "loss": 4.3867, + "step": 8376 + }, + { + "epoch": 0.81806640625, + "grad_norm": 0.1457984745502472, + "learning_rate": 8.680307894136732e-05, + "loss": 4.4062, + "step": 8377 + }, + { + "epoch": 0.8181640625, + "grad_norm": 0.14724700152873993, + "learning_rate": 8.676469265314099e-05, + "loss": 4.4219, + "step": 8378 + }, + { + "epoch": 0.81826171875, + "grad_norm": 0.1458573043346405, + "learning_rate": 8.672632461305014e-05, + "loss": 4.4375, + "step": 8379 + }, + { + "epoch": 0.818359375, + "grad_norm": 0.1481630951166153, + "learning_rate": 8.66879748248143e-05, + "loss": 4.4219, + "step": 8380 + }, + { + "epoch": 0.81845703125, + "grad_norm": 0.14647211134433746, + "learning_rate": 8.664964329215125e-05, + "loss": 4.4102, + "step": 8381 + }, + { + "epoch": 0.8185546875, + "grad_norm": 0.15382546186447144, + "learning_rate": 8.66113300187769e-05, + "loss": 4.418, + "step": 8382 + }, + { + "epoch": 0.81865234375, + "grad_norm": 0.15214848518371582, + "learning_rate": 8.65730350084056e-05, + "loss": 4.418, + "step": 8383 + }, + { + "epoch": 0.81875, + "grad_norm": 0.15080559253692627, + "learning_rate": 8.653475826474964e-05, + "loss": 4.4102, + "step": 8384 + }, + { + "epoch": 0.81884765625, + "grad_norm": 0.1520901918411255, + "learning_rate": 8.649649979151974e-05, + "loss": 4.4375, + "step": 8385 + }, + { + "epoch": 0.8189453125, + "grad_norm": 0.14866188168525696, + "learning_rate": 8.645825959242479e-05, + "loss": 4.3867, + "step": 8386 + }, + { + "epoch": 0.81904296875, + "grad_norm": 0.14664621651172638, + "learning_rate": 8.642003767117196e-05, + "loss": 4.375, + "step": 8387 + }, + { + "epoch": 0.819140625, + "grad_norm": 0.15419548749923706, + "learning_rate": 8.638183403146657e-05, + "loss": 4.3711, + "step": 8388 + }, + { + "epoch": 0.81923828125, + "grad_norm": 0.14892062544822693, + "learning_rate": 8.634364867701222e-05, + "loss": 4.3789, + "step": 8389 + }, + { + "epoch": 0.8193359375, + "grad_norm": 0.15176373720169067, + "learning_rate": 8.63054816115107e-05, + "loss": 4.3867, + "step": 8390 + }, + { + "epoch": 0.81943359375, + "grad_norm": 0.14479543268680573, + "learning_rate": 8.626733283866209e-05, + "loss": 4.3789, + "step": 8391 + }, + { + "epoch": 0.81953125, + "grad_norm": 0.14334683120250702, + "learning_rate": 8.622920236216467e-05, + "loss": 4.3867, + "step": 8392 + }, + { + "epoch": 0.81962890625, + "grad_norm": 0.15585671365261078, + "learning_rate": 8.619109018571483e-05, + "loss": 4.4141, + "step": 8393 + }, + { + "epoch": 0.8197265625, + "grad_norm": 0.14535905420780182, + "learning_rate": 8.615299631300738e-05, + "loss": 4.4062, + "step": 8394 + }, + { + "epoch": 0.81982421875, + "grad_norm": 0.1437869518995285, + "learning_rate": 8.611492074773519e-05, + "loss": 4.3789, + "step": 8395 + }, + { + "epoch": 0.819921875, + "grad_norm": 0.14454060792922974, + "learning_rate": 8.607686349358949e-05, + "loss": 4.4336, + "step": 8396 + }, + { + "epoch": 0.82001953125, + "grad_norm": 0.14415159821510315, + "learning_rate": 8.603882455425962e-05, + "loss": 4.3945, + "step": 8397 + }, + { + "epoch": 0.8201171875, + "grad_norm": 0.1455949991941452, + "learning_rate": 8.600080393343327e-05, + "loss": 4.3789, + "step": 8398 + }, + { + "epoch": 0.82021484375, + "grad_norm": 0.1499527245759964, + "learning_rate": 8.596280163479621e-05, + "loss": 4.3672, + "step": 8399 + }, + { + "epoch": 0.8203125, + "grad_norm": 0.1479802429676056, + "learning_rate": 8.592481766203256e-05, + "loss": 4.3555, + "step": 8400 + }, + { + "epoch": 0.82041015625, + "grad_norm": 0.14630453288555145, + "learning_rate": 8.588685201882458e-05, + "loss": 4.3984, + "step": 8401 + }, + { + "epoch": 0.8205078125, + "grad_norm": 0.14667737483978271, + "learning_rate": 8.584890470885276e-05, + "loss": 4.3945, + "step": 8402 + }, + { + "epoch": 0.82060546875, + "grad_norm": 0.14361552894115448, + "learning_rate": 8.581097573579585e-05, + "loss": 4.418, + "step": 8403 + }, + { + "epoch": 0.820703125, + "grad_norm": 0.14567016065120697, + "learning_rate": 8.577306510333083e-05, + "loss": 4.4258, + "step": 8404 + }, + { + "epoch": 0.82080078125, + "grad_norm": 0.14326070249080658, + "learning_rate": 8.573517281513283e-05, + "loss": 4.3867, + "step": 8405 + }, + { + "epoch": 0.8208984375, + "grad_norm": 0.1462428718805313, + "learning_rate": 8.569729887487529e-05, + "loss": 4.4141, + "step": 8406 + }, + { + "epoch": 0.82099609375, + "grad_norm": 0.14323654770851135, + "learning_rate": 8.565944328622985e-05, + "loss": 4.3828, + "step": 8407 + }, + { + "epoch": 0.82109375, + "grad_norm": 0.14923126995563507, + "learning_rate": 8.562160605286628e-05, + "loss": 4.3672, + "step": 8408 + }, + { + "epoch": 0.82119140625, + "grad_norm": 0.1468546986579895, + "learning_rate": 8.55837871784528e-05, + "loss": 4.4375, + "step": 8409 + }, + { + "epoch": 0.8212890625, + "grad_norm": 0.1528107076883316, + "learning_rate": 8.554598666665548e-05, + "loss": 4.3633, + "step": 8410 + }, + { + "epoch": 0.82138671875, + "grad_norm": 0.1496308445930481, + "learning_rate": 8.550820452113897e-05, + "loss": 4.3945, + "step": 8411 + }, + { + "epoch": 0.821484375, + "grad_norm": 0.14624497294425964, + "learning_rate": 8.547044074556589e-05, + "loss": 4.4062, + "step": 8412 + }, + { + "epoch": 0.82158203125, + "grad_norm": 0.14897938072681427, + "learning_rate": 8.543269534359727e-05, + "loss": 4.3945, + "step": 8413 + }, + { + "epoch": 0.8216796875, + "grad_norm": 0.1496768593788147, + "learning_rate": 8.539496831889224e-05, + "loss": 4.3906, + "step": 8414 + }, + { + "epoch": 0.82177734375, + "grad_norm": 0.14504413306713104, + "learning_rate": 8.535725967510819e-05, + "loss": 4.4023, + "step": 8415 + }, + { + "epoch": 0.821875, + "grad_norm": 0.15636447072029114, + "learning_rate": 8.531956941590069e-05, + "loss": 4.3672, + "step": 8416 + }, + { + "epoch": 0.82197265625, + "grad_norm": 0.15391775965690613, + "learning_rate": 8.528189754492361e-05, + "loss": 4.418, + "step": 8417 + }, + { + "epoch": 0.8220703125, + "grad_norm": 0.14721357822418213, + "learning_rate": 8.52442440658289e-05, + "loss": 4.3906, + "step": 8418 + }, + { + "epoch": 0.82216796875, + "grad_norm": 0.14343221485614777, + "learning_rate": 8.520660898226685e-05, + "loss": 4.3945, + "step": 8419 + }, + { + "epoch": 0.822265625, + "grad_norm": 0.15703362226486206, + "learning_rate": 8.516899229788594e-05, + "loss": 4.4102, + "step": 8420 + }, + { + "epoch": 0.82236328125, + "grad_norm": 0.15323278307914734, + "learning_rate": 8.513139401633282e-05, + "loss": 4.375, + "step": 8421 + }, + { + "epoch": 0.8224609375, + "grad_norm": 0.14867658913135529, + "learning_rate": 8.509381414125243e-05, + "loss": 4.4062, + "step": 8422 + }, + { + "epoch": 0.82255859375, + "grad_norm": 0.1472143679857254, + "learning_rate": 8.505625267628783e-05, + "loss": 4.3945, + "step": 8423 + }, + { + "epoch": 0.82265625, + "grad_norm": 0.1462089717388153, + "learning_rate": 8.501870962508044e-05, + "loss": 4.3828, + "step": 8424 + }, + { + "epoch": 0.82275390625, + "grad_norm": 0.15283389389514923, + "learning_rate": 8.498118499126973e-05, + "loss": 4.3984, + "step": 8425 + }, + { + "epoch": 0.8228515625, + "grad_norm": 0.1459270715713501, + "learning_rate": 8.494367877849346e-05, + "loss": 4.3477, + "step": 8426 + }, + { + "epoch": 0.82294921875, + "grad_norm": 0.1534598171710968, + "learning_rate": 8.490619099038763e-05, + "loss": 4.3555, + "step": 8427 + }, + { + "epoch": 0.823046875, + "grad_norm": 0.14976857602596283, + "learning_rate": 8.486872163058637e-05, + "loss": 4.3984, + "step": 8428 + }, + { + "epoch": 0.82314453125, + "grad_norm": 0.1552300602197647, + "learning_rate": 8.483127070272215e-05, + "loss": 4.4023, + "step": 8429 + }, + { + "epoch": 0.8232421875, + "grad_norm": 0.13991516828536987, + "learning_rate": 8.479383821042555e-05, + "loss": 4.4102, + "step": 8430 + }, + { + "epoch": 0.82333984375, + "grad_norm": 0.15724730491638184, + "learning_rate": 8.47564241573254e-05, + "loss": 4.4219, + "step": 8431 + }, + { + "epoch": 0.8234375, + "grad_norm": 0.14747129380702972, + "learning_rate": 8.471902854704874e-05, + "loss": 4.3906, + "step": 8432 + }, + { + "epoch": 0.82353515625, + "grad_norm": 0.15061767399311066, + "learning_rate": 8.468165138322082e-05, + "loss": 4.4336, + "step": 8433 + }, + { + "epoch": 0.8236328125, + "grad_norm": 0.14903011918067932, + "learning_rate": 8.464429266946516e-05, + "loss": 4.4102, + "step": 8434 + }, + { + "epoch": 0.82373046875, + "grad_norm": 0.15653152763843536, + "learning_rate": 8.460695240940333e-05, + "loss": 4.3984, + "step": 8435 + }, + { + "epoch": 0.823828125, + "grad_norm": 0.1488579362630844, + "learning_rate": 8.456963060665528e-05, + "loss": 4.4102, + "step": 8436 + }, + { + "epoch": 0.82392578125, + "grad_norm": 0.14923426508903503, + "learning_rate": 8.453232726483903e-05, + "loss": 4.4062, + "step": 8437 + }, + { + "epoch": 0.8240234375, + "grad_norm": 0.14702695608139038, + "learning_rate": 8.4495042387571e-05, + "loss": 4.4062, + "step": 8438 + }, + { + "epoch": 0.82412109375, + "grad_norm": 0.14728863537311554, + "learning_rate": 8.445777597846563e-05, + "loss": 4.3711, + "step": 8439 + }, + { + "epoch": 0.82421875, + "grad_norm": 0.15192754566669464, + "learning_rate": 8.442052804113567e-05, + "loss": 4.4141, + "step": 8440 + }, + { + "epoch": 0.82431640625, + "grad_norm": 0.14320500195026398, + "learning_rate": 8.438329857919202e-05, + "loss": 4.4141, + "step": 8441 + }, + { + "epoch": 0.8244140625, + "grad_norm": 0.15218737721443176, + "learning_rate": 8.434608759624396e-05, + "loss": 4.4414, + "step": 8442 + }, + { + "epoch": 0.82451171875, + "grad_norm": 0.1482577919960022, + "learning_rate": 8.430889509589865e-05, + "loss": 4.4102, + "step": 8443 + }, + { + "epoch": 0.824609375, + "grad_norm": 0.1506595015525818, + "learning_rate": 8.427172108176173e-05, + "loss": 4.3906, + "step": 8444 + }, + { + "epoch": 0.82470703125, + "grad_norm": 0.14133042097091675, + "learning_rate": 8.4234565557437e-05, + "loss": 4.4258, + "step": 8445 + }, + { + "epoch": 0.8248046875, + "grad_norm": 0.14318174123764038, + "learning_rate": 8.419742852652636e-05, + "loss": 4.4258, + "step": 8446 + }, + { + "epoch": 0.82490234375, + "grad_norm": 0.1452440768480301, + "learning_rate": 8.416030999263008e-05, + "loss": 4.3828, + "step": 8447 + }, + { + "epoch": 0.825, + "grad_norm": 0.14919087290763855, + "learning_rate": 8.412320995934653e-05, + "loss": 4.4062, + "step": 8448 + }, + { + "epoch": 0.82509765625, + "grad_norm": 0.15016934275627136, + "learning_rate": 8.408612843027227e-05, + "loss": 4.4258, + "step": 8449 + }, + { + "epoch": 0.8251953125, + "grad_norm": 0.14896786212921143, + "learning_rate": 8.404906540900212e-05, + "loss": 4.3906, + "step": 8450 + }, + { + "epoch": 0.82529296875, + "grad_norm": 0.15203703939914703, + "learning_rate": 8.401202089912916e-05, + "loss": 4.4023, + "step": 8451 + }, + { + "epoch": 0.825390625, + "grad_norm": 0.14883556962013245, + "learning_rate": 8.397499490424447e-05, + "loss": 4.3984, + "step": 8452 + }, + { + "epoch": 0.82548828125, + "grad_norm": 0.1515895277261734, + "learning_rate": 8.393798742793753e-05, + "loss": 4.3828, + "step": 8453 + }, + { + "epoch": 0.8255859375, + "grad_norm": 0.15109430253505707, + "learning_rate": 8.390099847379596e-05, + "loss": 4.4062, + "step": 8454 + }, + { + "epoch": 0.82568359375, + "grad_norm": 0.1467689871788025, + "learning_rate": 8.38640280454056e-05, + "loss": 4.3789, + "step": 8455 + }, + { + "epoch": 0.82578125, + "grad_norm": 0.15915092825889587, + "learning_rate": 8.382707614635049e-05, + "loss": 4.3945, + "step": 8456 + }, + { + "epoch": 0.82587890625, + "grad_norm": 0.1516105979681015, + "learning_rate": 8.379014278021282e-05, + "loss": 4.3867, + "step": 8457 + }, + { + "epoch": 0.8259765625, + "grad_norm": 0.14641280472278595, + "learning_rate": 8.375322795057311e-05, + "loss": 4.3633, + "step": 8458 + }, + { + "epoch": 0.82607421875, + "grad_norm": 0.15036682784557343, + "learning_rate": 8.371633166100997e-05, + "loss": 4.3984, + "step": 8459 + }, + { + "epoch": 0.826171875, + "grad_norm": 0.1541857123374939, + "learning_rate": 8.36794539151002e-05, + "loss": 4.4023, + "step": 8460 + }, + { + "epoch": 0.82626953125, + "grad_norm": 0.14400732517242432, + "learning_rate": 8.364259471641886e-05, + "loss": 4.3984, + "step": 8461 + }, + { + "epoch": 0.8263671875, + "grad_norm": 0.15277965366840363, + "learning_rate": 8.360575406853923e-05, + "loss": 4.4023, + "step": 8462 + }, + { + "epoch": 0.82646484375, + "grad_norm": 0.1519594043493271, + "learning_rate": 8.356893197503273e-05, + "loss": 4.4141, + "step": 8463 + }, + { + "epoch": 0.8265625, + "grad_norm": 0.14246155321598053, + "learning_rate": 8.353212843946905e-05, + "loss": 4.3789, + "step": 8464 + }, + { + "epoch": 0.82666015625, + "grad_norm": 0.15056808292865753, + "learning_rate": 8.349534346541599e-05, + "loss": 4.3633, + "step": 8465 + }, + { + "epoch": 0.8267578125, + "grad_norm": 0.1505192369222641, + "learning_rate": 8.345857705643965e-05, + "loss": 4.4102, + "step": 8466 + }, + { + "epoch": 0.82685546875, + "grad_norm": 0.14451128244400024, + "learning_rate": 8.342182921610428e-05, + "loss": 4.4062, + "step": 8467 + }, + { + "epoch": 0.826953125, + "grad_norm": 0.14722764492034912, + "learning_rate": 8.338509994797236e-05, + "loss": 4.3828, + "step": 8468 + }, + { + "epoch": 0.82705078125, + "grad_norm": 0.14576074481010437, + "learning_rate": 8.334838925560448e-05, + "loss": 4.3984, + "step": 8469 + }, + { + "epoch": 0.8271484375, + "grad_norm": 0.1460888832807541, + "learning_rate": 8.331169714255949e-05, + "loss": 4.4023, + "step": 8470 + }, + { + "epoch": 0.82724609375, + "grad_norm": 0.14844797551631927, + "learning_rate": 8.327502361239449e-05, + "loss": 4.3906, + "step": 8471 + }, + { + "epoch": 0.82734375, + "grad_norm": 0.1506887674331665, + "learning_rate": 8.323836866866471e-05, + "loss": 4.3789, + "step": 8472 + }, + { + "epoch": 0.82744140625, + "grad_norm": 0.1436951607465744, + "learning_rate": 8.320173231492356e-05, + "loss": 4.375, + "step": 8473 + }, + { + "epoch": 0.8275390625, + "grad_norm": 0.14757156372070312, + "learning_rate": 8.316511455472276e-05, + "loss": 4.3984, + "step": 8474 + }, + { + "epoch": 0.82763671875, + "grad_norm": 0.14769572019577026, + "learning_rate": 8.312851539161209e-05, + "loss": 4.3867, + "step": 8475 + }, + { + "epoch": 0.827734375, + "grad_norm": 0.14559946954250336, + "learning_rate": 8.309193482913971e-05, + "loss": 4.3711, + "step": 8476 + }, + { + "epoch": 0.82783203125, + "grad_norm": 0.15453752875328064, + "learning_rate": 8.305537287085168e-05, + "loss": 4.418, + "step": 8477 + }, + { + "epoch": 0.8279296875, + "grad_norm": 0.15838053822517395, + "learning_rate": 8.301882952029253e-05, + "loss": 4.4102, + "step": 8478 + }, + { + "epoch": 0.82802734375, + "grad_norm": 0.14847728610038757, + "learning_rate": 8.298230478100485e-05, + "loss": 4.4141, + "step": 8479 + }, + { + "epoch": 0.828125, + "grad_norm": 0.14959578216075897, + "learning_rate": 8.294579865652954e-05, + "loss": 4.3789, + "step": 8480 + }, + { + "epoch": 0.82822265625, + "grad_norm": 0.15474185347557068, + "learning_rate": 8.290931115040553e-05, + "loss": 4.3477, + "step": 8481 + }, + { + "epoch": 0.8283203125, + "grad_norm": 0.14934295415878296, + "learning_rate": 8.287284226617011e-05, + "loss": 4.4141, + "step": 8482 + }, + { + "epoch": 0.82841796875, + "grad_norm": 0.1541387289762497, + "learning_rate": 8.283639200735867e-05, + "loss": 4.3867, + "step": 8483 + }, + { + "epoch": 0.828515625, + "grad_norm": 0.15473029017448425, + "learning_rate": 8.279996037750481e-05, + "loss": 4.3828, + "step": 8484 + }, + { + "epoch": 0.82861328125, + "grad_norm": 0.15064312517642975, + "learning_rate": 8.276354738014033e-05, + "loss": 4.3945, + "step": 8485 + }, + { + "epoch": 0.8287109375, + "grad_norm": 0.14623694121837616, + "learning_rate": 8.272715301879518e-05, + "loss": 4.3828, + "step": 8486 + }, + { + "epoch": 0.82880859375, + "grad_norm": 0.1727069467306137, + "learning_rate": 8.26907772969976e-05, + "loss": 4.3633, + "step": 8487 + }, + { + "epoch": 0.82890625, + "grad_norm": 0.1530914157629013, + "learning_rate": 8.265442021827398e-05, + "loss": 4.4062, + "step": 8488 + }, + { + "epoch": 0.82900390625, + "grad_norm": 0.15141765773296356, + "learning_rate": 8.261808178614882e-05, + "loss": 4.3945, + "step": 8489 + }, + { + "epoch": 0.8291015625, + "grad_norm": 0.15107151865959167, + "learning_rate": 8.258176200414495e-05, + "loss": 4.4141, + "step": 8490 + }, + { + "epoch": 0.82919921875, + "grad_norm": 0.1504492610692978, + "learning_rate": 8.254546087578333e-05, + "loss": 4.418, + "step": 8491 + }, + { + "epoch": 0.829296875, + "grad_norm": 0.14272019267082214, + "learning_rate": 8.250917840458305e-05, + "loss": 4.4023, + "step": 8492 + }, + { + "epoch": 0.82939453125, + "grad_norm": 0.15067419409751892, + "learning_rate": 8.247291459406149e-05, + "loss": 4.3828, + "step": 8493 + }, + { + "epoch": 0.8294921875, + "grad_norm": 0.17001837491989136, + "learning_rate": 8.243666944773425e-05, + "loss": 4.4141, + "step": 8494 + }, + { + "epoch": 0.82958984375, + "grad_norm": 0.15283799171447754, + "learning_rate": 8.240044296911489e-05, + "loss": 4.3945, + "step": 8495 + }, + { + "epoch": 0.8296875, + "grad_norm": 0.15003462135791779, + "learning_rate": 8.236423516171546e-05, + "loss": 4.3906, + "step": 8496 + }, + { + "epoch": 0.82978515625, + "grad_norm": 0.15087684988975525, + "learning_rate": 8.232804602904596e-05, + "loss": 4.4453, + "step": 8497 + }, + { + "epoch": 0.8298828125, + "grad_norm": 0.15625736117362976, + "learning_rate": 8.229187557461474e-05, + "loss": 4.4141, + "step": 8498 + }, + { + "epoch": 0.82998046875, + "grad_norm": 0.15333178639411926, + "learning_rate": 8.225572380192828e-05, + "loss": 4.4336, + "step": 8499 + }, + { + "epoch": 0.830078125, + "grad_norm": 0.14861641824245453, + "learning_rate": 8.221959071449123e-05, + "loss": 4.4023, + "step": 8500 + }, + { + "epoch": 0.83017578125, + "grad_norm": 0.14887414872646332, + "learning_rate": 8.218347631580645e-05, + "loss": 4.3828, + "step": 8501 + }, + { + "epoch": 0.8302734375, + "grad_norm": 0.1478920876979828, + "learning_rate": 8.214738060937499e-05, + "loss": 4.4023, + "step": 8502 + }, + { + "epoch": 0.83037109375, + "grad_norm": 0.15426205098628998, + "learning_rate": 8.21113035986961e-05, + "loss": 4.3906, + "step": 8503 + }, + { + "epoch": 0.83046875, + "grad_norm": 0.14978408813476562, + "learning_rate": 8.207524528726723e-05, + "loss": 4.3828, + "step": 8504 + }, + { + "epoch": 0.83056640625, + "grad_norm": 0.14897862076759338, + "learning_rate": 8.203920567858391e-05, + "loss": 4.3594, + "step": 8505 + }, + { + "epoch": 0.8306640625, + "grad_norm": 0.14825929701328278, + "learning_rate": 8.200318477613994e-05, + "loss": 4.3945, + "step": 8506 + }, + { + "epoch": 0.83076171875, + "grad_norm": 0.1436927169561386, + "learning_rate": 8.196718258342735e-05, + "loss": 4.4023, + "step": 8507 + }, + { + "epoch": 0.830859375, + "grad_norm": 0.14366796612739563, + "learning_rate": 8.193119910393629e-05, + "loss": 4.3906, + "step": 8508 + }, + { + "epoch": 0.83095703125, + "grad_norm": 0.15228915214538574, + "learning_rate": 8.18952343411551e-05, + "loss": 4.3867, + "step": 8509 + }, + { + "epoch": 0.8310546875, + "grad_norm": 0.1509367823600769, + "learning_rate": 8.185928829857035e-05, + "loss": 4.3828, + "step": 8510 + }, + { + "epoch": 0.83115234375, + "grad_norm": 0.14700588583946228, + "learning_rate": 8.182336097966675e-05, + "loss": 4.375, + "step": 8511 + }, + { + "epoch": 0.83125, + "grad_norm": 0.14659729599952698, + "learning_rate": 8.17874523879272e-05, + "loss": 4.3984, + "step": 8512 + }, + { + "epoch": 0.83134765625, + "grad_norm": 0.14903570711612701, + "learning_rate": 8.175156252683282e-05, + "loss": 4.4414, + "step": 8513 + }, + { + "epoch": 0.8314453125, + "grad_norm": 0.14319346845149994, + "learning_rate": 8.171569139986293e-05, + "loss": 4.4141, + "step": 8514 + }, + { + "epoch": 0.83154296875, + "grad_norm": 0.150045245885849, + "learning_rate": 8.167983901049486e-05, + "loss": 4.4062, + "step": 8515 + }, + { + "epoch": 0.831640625, + "grad_norm": 0.15064272284507751, + "learning_rate": 8.164400536220434e-05, + "loss": 4.3789, + "step": 8516 + }, + { + "epoch": 0.83173828125, + "grad_norm": 0.15415436029434204, + "learning_rate": 8.160819045846518e-05, + "loss": 4.4062, + "step": 8517 + }, + { + "epoch": 0.8318359375, + "grad_norm": 0.1513679027557373, + "learning_rate": 8.15723943027494e-05, + "loss": 4.3867, + "step": 8518 + }, + { + "epoch": 0.83193359375, + "grad_norm": 0.1530097872018814, + "learning_rate": 8.15366168985272e-05, + "loss": 4.3945, + "step": 8519 + }, + { + "epoch": 0.83203125, + "grad_norm": 0.1504172384738922, + "learning_rate": 8.150085824926696e-05, + "loss": 4.3984, + "step": 8520 + }, + { + "epoch": 0.83212890625, + "grad_norm": 0.15814021229743958, + "learning_rate": 8.146511835843523e-05, + "loss": 4.3711, + "step": 8521 + }, + { + "epoch": 0.8322265625, + "grad_norm": 0.14908240735530853, + "learning_rate": 8.142939722949674e-05, + "loss": 4.4062, + "step": 8522 + }, + { + "epoch": 0.83232421875, + "grad_norm": 0.14859454333782196, + "learning_rate": 8.139369486591444e-05, + "loss": 4.3945, + "step": 8523 + }, + { + "epoch": 0.832421875, + "grad_norm": 0.14795337617397308, + "learning_rate": 8.135801127114945e-05, + "loss": 4.4297, + "step": 8524 + }, + { + "epoch": 0.83251953125, + "grad_norm": 0.14547914266586304, + "learning_rate": 8.132234644866096e-05, + "loss": 4.3516, + "step": 8525 + }, + { + "epoch": 0.8326171875, + "grad_norm": 0.14672920107841492, + "learning_rate": 8.128670040190647e-05, + "loss": 4.4219, + "step": 8526 + }, + { + "epoch": 0.83271484375, + "grad_norm": 0.15000534057617188, + "learning_rate": 8.125107313434168e-05, + "loss": 4.3984, + "step": 8527 + }, + { + "epoch": 0.8328125, + "grad_norm": 0.15047216415405273, + "learning_rate": 8.121546464942035e-05, + "loss": 4.4219, + "step": 8528 + }, + { + "epoch": 0.83291015625, + "grad_norm": 0.1505347341299057, + "learning_rate": 8.117987495059451e-05, + "loss": 4.418, + "step": 8529 + }, + { + "epoch": 0.8330078125, + "grad_norm": 0.14585089683532715, + "learning_rate": 8.114430404131432e-05, + "loss": 4.3906, + "step": 8530 + }, + { + "epoch": 0.83310546875, + "grad_norm": 0.14598260819911957, + "learning_rate": 8.110875192502817e-05, + "loss": 4.3906, + "step": 8531 + }, + { + "epoch": 0.833203125, + "grad_norm": 0.14975914359092712, + "learning_rate": 8.107321860518255e-05, + "loss": 4.4141, + "step": 8532 + }, + { + "epoch": 0.83330078125, + "grad_norm": 0.14339154958724976, + "learning_rate": 8.103770408522222e-05, + "loss": 4.418, + "step": 8533 + }, + { + "epoch": 0.8333984375, + "grad_norm": 0.1483711451292038, + "learning_rate": 8.100220836859009e-05, + "loss": 4.4141, + "step": 8534 + }, + { + "epoch": 0.83349609375, + "grad_norm": 0.15038619935512543, + "learning_rate": 8.096673145872718e-05, + "loss": 4.4141, + "step": 8535 + }, + { + "epoch": 0.83359375, + "grad_norm": 0.14813782274723053, + "learning_rate": 8.09312733590727e-05, + "loss": 4.3945, + "step": 8536 + }, + { + "epoch": 0.83369140625, + "grad_norm": 0.14619852602481842, + "learning_rate": 8.089583407306415e-05, + "loss": 4.3789, + "step": 8537 + }, + { + "epoch": 0.8337890625, + "grad_norm": 0.14575888216495514, + "learning_rate": 8.086041360413709e-05, + "loss": 4.3945, + "step": 8538 + }, + { + "epoch": 0.83388671875, + "grad_norm": 0.14337213337421417, + "learning_rate": 8.08250119557253e-05, + "loss": 4.4062, + "step": 8539 + }, + { + "epoch": 0.833984375, + "grad_norm": 0.13914547860622406, + "learning_rate": 8.078962913126072e-05, + "loss": 4.4297, + "step": 8540 + }, + { + "epoch": 0.83408203125, + "grad_norm": 0.14970038831233978, + "learning_rate": 8.075426513417348e-05, + "loss": 4.3789, + "step": 8541 + }, + { + "epoch": 0.8341796875, + "grad_norm": 0.13848310708999634, + "learning_rate": 8.071891996789193e-05, + "loss": 4.3789, + "step": 8542 + }, + { + "epoch": 0.83427734375, + "grad_norm": 0.13985010981559753, + "learning_rate": 8.068359363584252e-05, + "loss": 4.4141, + "step": 8543 + }, + { + "epoch": 0.834375, + "grad_norm": 0.14505670964717865, + "learning_rate": 8.064828614144981e-05, + "loss": 4.3789, + "step": 8544 + }, + { + "epoch": 0.83447265625, + "grad_norm": 0.1521347612142563, + "learning_rate": 8.06129974881367e-05, + "loss": 4.4219, + "step": 8545 + }, + { + "epoch": 0.8345703125, + "grad_norm": 0.14274001121520996, + "learning_rate": 8.057772767932418e-05, + "loss": 4.3867, + "step": 8546 + }, + { + "epoch": 0.83466796875, + "grad_norm": 0.14671821892261505, + "learning_rate": 8.054247671843143e-05, + "loss": 4.3945, + "step": 8547 + }, + { + "epoch": 0.834765625, + "grad_norm": 0.14894503355026245, + "learning_rate": 8.050724460887575e-05, + "loss": 4.4102, + "step": 8548 + }, + { + "epoch": 0.83486328125, + "grad_norm": 0.14845959842205048, + "learning_rate": 8.047203135407268e-05, + "loss": 4.4062, + "step": 8549 + }, + { + "epoch": 0.8349609375, + "grad_norm": 0.14460456371307373, + "learning_rate": 8.043683695743593e-05, + "loss": 4.4141, + "step": 8550 + }, + { + "epoch": 0.83505859375, + "grad_norm": 0.1471911072731018, + "learning_rate": 8.040166142237731e-05, + "loss": 4.3594, + "step": 8551 + }, + { + "epoch": 0.83515625, + "grad_norm": 0.1484675109386444, + "learning_rate": 8.036650475230692e-05, + "loss": 4.3945, + "step": 8552 + }, + { + "epoch": 0.83525390625, + "grad_norm": 0.15478724241256714, + "learning_rate": 8.03313669506329e-05, + "loss": 4.4141, + "step": 8553 + }, + { + "epoch": 0.8353515625, + "grad_norm": 0.143818199634552, + "learning_rate": 8.02962480207616e-05, + "loss": 4.3867, + "step": 8554 + }, + { + "epoch": 0.83544921875, + "grad_norm": 0.15131616592407227, + "learning_rate": 8.026114796609758e-05, + "loss": 4.3984, + "step": 8555 + }, + { + "epoch": 0.835546875, + "grad_norm": 0.14899776875972748, + "learning_rate": 8.02260667900436e-05, + "loss": 4.4102, + "step": 8556 + }, + { + "epoch": 0.83564453125, + "grad_norm": 0.14506785571575165, + "learning_rate": 8.01910044960005e-05, + "loss": 4.375, + "step": 8557 + }, + { + "epoch": 0.8357421875, + "grad_norm": 0.14481522142887115, + "learning_rate": 8.015596108736736e-05, + "loss": 4.3906, + "step": 8558 + }, + { + "epoch": 0.83583984375, + "grad_norm": 0.1433260440826416, + "learning_rate": 8.012093656754135e-05, + "loss": 4.4062, + "step": 8559 + }, + { + "epoch": 0.8359375, + "grad_norm": 0.1499479115009308, + "learning_rate": 8.008593093991797e-05, + "loss": 4.4023, + "step": 8560 + }, + { + "epoch": 0.83603515625, + "grad_norm": 0.1437564194202423, + "learning_rate": 8.005094420789062e-05, + "loss": 4.4102, + "step": 8561 + }, + { + "epoch": 0.8361328125, + "grad_norm": 0.15128661692142487, + "learning_rate": 8.001597637485111e-05, + "loss": 4.3945, + "step": 8562 + }, + { + "epoch": 0.83623046875, + "grad_norm": 0.14023280143737793, + "learning_rate": 7.998102744418937e-05, + "loss": 4.4141, + "step": 8563 + }, + { + "epoch": 0.836328125, + "grad_norm": 0.14726528525352478, + "learning_rate": 7.994609741929337e-05, + "loss": 4.3867, + "step": 8564 + }, + { + "epoch": 0.83642578125, + "grad_norm": 0.14803801476955414, + "learning_rate": 7.991118630354944e-05, + "loss": 4.4102, + "step": 8565 + }, + { + "epoch": 0.8365234375, + "grad_norm": 0.1498871147632599, + "learning_rate": 7.987629410034189e-05, + "loss": 4.4219, + "step": 8566 + }, + { + "epoch": 0.83662109375, + "grad_norm": 0.14673075079917908, + "learning_rate": 7.984142081305332e-05, + "loss": 4.3789, + "step": 8567 + }, + { + "epoch": 0.83671875, + "grad_norm": 0.15336556732654572, + "learning_rate": 7.980656644506445e-05, + "loss": 4.3828, + "step": 8568 + }, + { + "epoch": 0.83681640625, + "grad_norm": 0.14878025650978088, + "learning_rate": 7.977173099975425e-05, + "loss": 4.3789, + "step": 8569 + }, + { + "epoch": 0.8369140625, + "grad_norm": 0.14910738170146942, + "learning_rate": 7.973691448049966e-05, + "loss": 4.3867, + "step": 8570 + }, + { + "epoch": 0.83701171875, + "grad_norm": 0.14056402444839478, + "learning_rate": 7.970211689067595e-05, + "loss": 4.4219, + "step": 8571 + }, + { + "epoch": 0.837109375, + "grad_norm": 0.14049795269966125, + "learning_rate": 7.966733823365652e-05, + "loss": 4.4141, + "step": 8572 + }, + { + "epoch": 0.83720703125, + "grad_norm": 0.1445598602294922, + "learning_rate": 7.963257851281294e-05, + "loss": 4.3711, + "step": 8573 + }, + { + "epoch": 0.8373046875, + "grad_norm": 0.14700119197368622, + "learning_rate": 7.959783773151489e-05, + "loss": 4.3633, + "step": 8574 + }, + { + "epoch": 0.83740234375, + "grad_norm": 0.14704354107379913, + "learning_rate": 7.956311589313028e-05, + "loss": 4.3906, + "step": 8575 + }, + { + "epoch": 0.8375, + "grad_norm": 0.15091827511787415, + "learning_rate": 7.952841300102513e-05, + "loss": 4.4141, + "step": 8576 + }, + { + "epoch": 0.83759765625, + "grad_norm": 0.14588803052902222, + "learning_rate": 7.949372905856376e-05, + "loss": 4.4023, + "step": 8577 + }, + { + "epoch": 0.8376953125, + "grad_norm": 0.14818157255649567, + "learning_rate": 7.945906406910838e-05, + "loss": 4.3633, + "step": 8578 + }, + { + "epoch": 0.83779296875, + "grad_norm": 0.15088725090026855, + "learning_rate": 7.94244180360196e-05, + "loss": 4.3828, + "step": 8579 + }, + { + "epoch": 0.837890625, + "grad_norm": 0.15612034499645233, + "learning_rate": 7.938979096265608e-05, + "loss": 4.4219, + "step": 8580 + }, + { + "epoch": 0.83798828125, + "grad_norm": 0.1515110582113266, + "learning_rate": 7.935518285237476e-05, + "loss": 4.3906, + "step": 8581 + }, + { + "epoch": 0.8380859375, + "grad_norm": 0.1440490037202835, + "learning_rate": 7.93205937085306e-05, + "loss": 4.4062, + "step": 8582 + }, + { + "epoch": 0.83818359375, + "grad_norm": 0.144277885556221, + "learning_rate": 7.92860235344768e-05, + "loss": 4.3984, + "step": 8583 + }, + { + "epoch": 0.83828125, + "grad_norm": 0.15136243402957916, + "learning_rate": 7.925147233356472e-05, + "loss": 4.4023, + "step": 8584 + }, + { + "epoch": 0.83837890625, + "grad_norm": 0.15414690971374512, + "learning_rate": 7.921694010914385e-05, + "loss": 4.3867, + "step": 8585 + }, + { + "epoch": 0.8384765625, + "grad_norm": 0.14804546535015106, + "learning_rate": 7.918242686456189e-05, + "loss": 4.4062, + "step": 8586 + }, + { + "epoch": 0.83857421875, + "grad_norm": 0.14857029914855957, + "learning_rate": 7.914793260316458e-05, + "loss": 4.3945, + "step": 8587 + }, + { + "epoch": 0.838671875, + "grad_norm": 0.14822237193584442, + "learning_rate": 7.911345732829595e-05, + "loss": 4.3945, + "step": 8588 + }, + { + "epoch": 0.83876953125, + "grad_norm": 0.1456032246351242, + "learning_rate": 7.907900104329813e-05, + "loss": 4.4141, + "step": 8589 + }, + { + "epoch": 0.8388671875, + "grad_norm": 0.14805404841899872, + "learning_rate": 7.904456375151148e-05, + "loss": 4.3867, + "step": 8590 + }, + { + "epoch": 0.83896484375, + "grad_norm": 0.15413834154605865, + "learning_rate": 7.901014545627439e-05, + "loss": 4.3711, + "step": 8591 + }, + { + "epoch": 0.8390625, + "grad_norm": 0.14247456192970276, + "learning_rate": 7.89757461609235e-05, + "loss": 4.4219, + "step": 8592 + }, + { + "epoch": 0.83916015625, + "grad_norm": 0.1449350267648697, + "learning_rate": 7.894136586879362e-05, + "loss": 4.3789, + "step": 8593 + }, + { + "epoch": 0.8392578125, + "grad_norm": 0.14833520352840424, + "learning_rate": 7.890700458321771e-05, + "loss": 4.418, + "step": 8594 + }, + { + "epoch": 0.83935546875, + "grad_norm": 0.14987240731716156, + "learning_rate": 7.887266230752677e-05, + "loss": 4.4414, + "step": 8595 + }, + { + "epoch": 0.839453125, + "grad_norm": 0.17268246412277222, + "learning_rate": 7.883833904505011e-05, + "loss": 4.4062, + "step": 8596 + }, + { + "epoch": 0.83955078125, + "grad_norm": 0.14990727603435516, + "learning_rate": 7.880403479911511e-05, + "loss": 4.3867, + "step": 8597 + }, + { + "epoch": 0.8396484375, + "grad_norm": 0.15497811138629913, + "learning_rate": 7.876974957304737e-05, + "loss": 4.3945, + "step": 8598 + }, + { + "epoch": 0.83974609375, + "grad_norm": 0.14399217069149017, + "learning_rate": 7.873548337017057e-05, + "loss": 4.3945, + "step": 8599 + }, + { + "epoch": 0.83984375, + "grad_norm": 0.14331316947937012, + "learning_rate": 7.87012361938066e-05, + "loss": 4.3945, + "step": 8600 + }, + { + "epoch": 0.83994140625, + "grad_norm": 0.1617630124092102, + "learning_rate": 7.86670080472755e-05, + "loss": 4.3828, + "step": 8601 + }, + { + "epoch": 0.8400390625, + "grad_norm": 0.1450270563364029, + "learning_rate": 7.863279893389552e-05, + "loss": 4.3945, + "step": 8602 + }, + { + "epoch": 0.84013671875, + "grad_norm": 0.14291134476661682, + "learning_rate": 7.859860885698289e-05, + "loss": 4.3867, + "step": 8603 + }, + { + "epoch": 0.840234375, + "grad_norm": 0.1568872183561325, + "learning_rate": 7.856443781985214e-05, + "loss": 4.3828, + "step": 8604 + }, + { + "epoch": 0.84033203125, + "grad_norm": 0.14872413873672485, + "learning_rate": 7.853028582581592e-05, + "loss": 4.4141, + "step": 8605 + }, + { + "epoch": 0.8404296875, + "grad_norm": 0.1475353240966797, + "learning_rate": 7.849615287818505e-05, + "loss": 4.4297, + "step": 8606 + }, + { + "epoch": 0.84052734375, + "grad_norm": 0.14345940947532654, + "learning_rate": 7.84620389802685e-05, + "loss": 4.375, + "step": 8607 + }, + { + "epoch": 0.840625, + "grad_norm": 0.14693418145179749, + "learning_rate": 7.842794413537334e-05, + "loss": 4.4023, + "step": 8608 + }, + { + "epoch": 0.84072265625, + "grad_norm": 0.15302631258964539, + "learning_rate": 7.839386834680487e-05, + "loss": 4.3906, + "step": 8609 + }, + { + "epoch": 0.8408203125, + "grad_norm": 0.14876464009284973, + "learning_rate": 7.835981161786651e-05, + "loss": 4.3984, + "step": 8610 + }, + { + "epoch": 0.84091796875, + "grad_norm": 0.1507531851530075, + "learning_rate": 7.832577395185984e-05, + "loss": 4.3984, + "step": 8611 + }, + { + "epoch": 0.841015625, + "grad_norm": 0.15567095577716827, + "learning_rate": 7.82917553520845e-05, + "loss": 4.3867, + "step": 8612 + }, + { + "epoch": 0.84111328125, + "grad_norm": 0.1431986689567566, + "learning_rate": 7.825775582183845e-05, + "loss": 4.4062, + "step": 8613 + }, + { + "epoch": 0.8412109375, + "grad_norm": 0.14383389055728912, + "learning_rate": 7.822377536441765e-05, + "loss": 4.4102, + "step": 8614 + }, + { + "epoch": 0.84130859375, + "grad_norm": 0.14537501335144043, + "learning_rate": 7.818981398311628e-05, + "loss": 4.3867, + "step": 8615 + }, + { + "epoch": 0.84140625, + "grad_norm": 0.14590910077095032, + "learning_rate": 7.815587168122672e-05, + "loss": 4.4102, + "step": 8616 + }, + { + "epoch": 0.84150390625, + "grad_norm": 0.14618858695030212, + "learning_rate": 7.812194846203944e-05, + "loss": 4.4219, + "step": 8617 + }, + { + "epoch": 0.8416015625, + "grad_norm": 0.1427505761384964, + "learning_rate": 7.808804432884301e-05, + "loss": 4.3906, + "step": 8618 + }, + { + "epoch": 0.84169921875, + "grad_norm": 0.14011891186237335, + "learning_rate": 7.805415928492429e-05, + "loss": 4.3672, + "step": 8619 + }, + { + "epoch": 0.841796875, + "grad_norm": 0.14410746097564697, + "learning_rate": 7.80202933335681e-05, + "loss": 4.3906, + "step": 8620 + }, + { + "epoch": 0.84189453125, + "grad_norm": 0.14411260187625885, + "learning_rate": 7.798644647805756e-05, + "loss": 4.418, + "step": 8621 + }, + { + "epoch": 0.8419921875, + "grad_norm": 0.14432325959205627, + "learning_rate": 7.795261872167391e-05, + "loss": 4.3984, + "step": 8622 + }, + { + "epoch": 0.84208984375, + "grad_norm": 0.1479870080947876, + "learning_rate": 7.791881006769652e-05, + "loss": 4.4023, + "step": 8623 + }, + { + "epoch": 0.8421875, + "grad_norm": 0.14906136691570282, + "learning_rate": 7.788502051940289e-05, + "loss": 4.418, + "step": 8624 + }, + { + "epoch": 0.84228515625, + "grad_norm": 0.1494377851486206, + "learning_rate": 7.785125008006868e-05, + "loss": 4.4102, + "step": 8625 + }, + { + "epoch": 0.8423828125, + "grad_norm": 0.14384035766124725, + "learning_rate": 7.781749875296776e-05, + "loss": 4.3945, + "step": 8626 + }, + { + "epoch": 0.84248046875, + "grad_norm": 0.1405549794435501, + "learning_rate": 7.778376654137201e-05, + "loss": 4.3984, + "step": 8627 + }, + { + "epoch": 0.842578125, + "grad_norm": 0.14900435507297516, + "learning_rate": 7.775005344855166e-05, + "loss": 4.3945, + "step": 8628 + }, + { + "epoch": 0.84267578125, + "grad_norm": 0.14112931489944458, + "learning_rate": 7.771635947777484e-05, + "loss": 4.4141, + "step": 8629 + }, + { + "epoch": 0.8427734375, + "grad_norm": 0.14811304211616516, + "learning_rate": 7.7682684632308e-05, + "loss": 4.3984, + "step": 8630 + }, + { + "epoch": 0.84287109375, + "grad_norm": 0.142348051071167, + "learning_rate": 7.764902891541568e-05, + "loss": 4.4102, + "step": 8631 + }, + { + "epoch": 0.84296875, + "grad_norm": 0.14091207087039948, + "learning_rate": 7.761539233036058e-05, + "loss": 4.3789, + "step": 8632 + }, + { + "epoch": 0.84306640625, + "grad_norm": 0.14531436562538147, + "learning_rate": 7.758177488040355e-05, + "loss": 4.3789, + "step": 8633 + }, + { + "epoch": 0.8431640625, + "grad_norm": 0.14418597519397736, + "learning_rate": 7.754817656880357e-05, + "loss": 4.375, + "step": 8634 + }, + { + "epoch": 0.84326171875, + "grad_norm": 0.13842584192752838, + "learning_rate": 7.751459739881774e-05, + "loss": 4.3984, + "step": 8635 + }, + { + "epoch": 0.843359375, + "grad_norm": 0.14167840778827667, + "learning_rate": 7.748103737370142e-05, + "loss": 4.375, + "step": 8636 + }, + { + "epoch": 0.84345703125, + "grad_norm": 0.14216433465480804, + "learning_rate": 7.74474964967079e-05, + "loss": 4.4023, + "step": 8637 + }, + { + "epoch": 0.8435546875, + "grad_norm": 0.14735674858093262, + "learning_rate": 7.74139747710888e-05, + "loss": 4.4102, + "step": 8638 + }, + { + "epoch": 0.84365234375, + "grad_norm": 0.1459888070821762, + "learning_rate": 7.738047220009385e-05, + "loss": 4.4023, + "step": 8639 + }, + { + "epoch": 0.84375, + "grad_norm": 0.14232724905014038, + "learning_rate": 7.734698878697085e-05, + "loss": 4.4336, + "step": 8640 + }, + { + "epoch": 0.84384765625, + "grad_norm": 0.14106474816799164, + "learning_rate": 7.731352453496585e-05, + "loss": 4.3945, + "step": 8641 + }, + { + "epoch": 0.8439453125, + "grad_norm": 0.14263860881328583, + "learning_rate": 7.728007944732289e-05, + "loss": 4.4219, + "step": 8642 + }, + { + "epoch": 0.84404296875, + "grad_norm": 0.14411525428295135, + "learning_rate": 7.724665352728433e-05, + "loss": 4.3633, + "step": 8643 + }, + { + "epoch": 0.844140625, + "grad_norm": 0.1430474817752838, + "learning_rate": 7.721324677809059e-05, + "loss": 4.3789, + "step": 8644 + }, + { + "epoch": 0.84423828125, + "grad_norm": 0.15586380660533905, + "learning_rate": 7.717985920298014e-05, + "loss": 4.3867, + "step": 8645 + }, + { + "epoch": 0.8443359375, + "grad_norm": 0.14881962537765503, + "learning_rate": 7.714649080518975e-05, + "loss": 4.3711, + "step": 8646 + }, + { + "epoch": 0.84443359375, + "grad_norm": 0.15096259117126465, + "learning_rate": 7.711314158795424e-05, + "loss": 4.3711, + "step": 8647 + }, + { + "epoch": 0.84453125, + "grad_norm": 0.14546455442905426, + "learning_rate": 7.707981155450657e-05, + "loss": 4.3906, + "step": 8648 + }, + { + "epoch": 0.84462890625, + "grad_norm": 0.14773735404014587, + "learning_rate": 7.704650070807791e-05, + "loss": 4.3906, + "step": 8649 + }, + { + "epoch": 0.8447265625, + "grad_norm": 0.14362503588199615, + "learning_rate": 7.701320905189746e-05, + "loss": 4.4023, + "step": 8650 + }, + { + "epoch": 0.84482421875, + "grad_norm": 0.1432264745235443, + "learning_rate": 7.697993658919267e-05, + "loss": 4.375, + "step": 8651 + }, + { + "epoch": 0.844921875, + "grad_norm": 0.14732682704925537, + "learning_rate": 7.694668332318907e-05, + "loss": 4.4023, + "step": 8652 + }, + { + "epoch": 0.84501953125, + "grad_norm": 0.1432601809501648, + "learning_rate": 7.691344925711034e-05, + "loss": 4.3516, + "step": 8653 + }, + { + "epoch": 0.8451171875, + "grad_norm": 0.1492948979139328, + "learning_rate": 7.688023439417828e-05, + "loss": 4.4102, + "step": 8654 + }, + { + "epoch": 0.84521484375, + "grad_norm": 0.1479274481534958, + "learning_rate": 7.684703873761284e-05, + "loss": 4.418, + "step": 8655 + }, + { + "epoch": 0.8453125, + "grad_norm": 0.15002548694610596, + "learning_rate": 7.681386229063212e-05, + "loss": 4.418, + "step": 8656 + }, + { + "epoch": 0.84541015625, + "grad_norm": 0.14656122028827667, + "learning_rate": 7.678070505645237e-05, + "loss": 4.4023, + "step": 8657 + }, + { + "epoch": 0.8455078125, + "grad_norm": 0.1526923030614853, + "learning_rate": 7.674756703828795e-05, + "loss": 4.4102, + "step": 8658 + }, + { + "epoch": 0.84560546875, + "grad_norm": 0.14848774671554565, + "learning_rate": 7.671444823935134e-05, + "loss": 4.4141, + "step": 8659 + }, + { + "epoch": 0.845703125, + "grad_norm": 0.14671997725963593, + "learning_rate": 7.668134866285321e-05, + "loss": 4.4141, + "step": 8660 + }, + { + "epoch": 0.84580078125, + "grad_norm": 0.1482830047607422, + "learning_rate": 7.664826831200239e-05, + "loss": 4.3594, + "step": 8661 + }, + { + "epoch": 0.8458984375, + "grad_norm": 0.15284346044063568, + "learning_rate": 7.661520719000568e-05, + "loss": 4.418, + "step": 8662 + }, + { + "epoch": 0.84599609375, + "grad_norm": 0.14998923242092133, + "learning_rate": 7.658216530006824e-05, + "loss": 4.4102, + "step": 8663 + }, + { + "epoch": 0.84609375, + "grad_norm": 0.14584991335868835, + "learning_rate": 7.654914264539314e-05, + "loss": 4.3828, + "step": 8664 + }, + { + "epoch": 0.84619140625, + "grad_norm": 0.14587782323360443, + "learning_rate": 7.65161392291818e-05, + "loss": 4.3828, + "step": 8665 + }, + { + "epoch": 0.8462890625, + "grad_norm": 0.14445647597312927, + "learning_rate": 7.648315505463365e-05, + "loss": 4.4297, + "step": 8666 + }, + { + "epoch": 0.84638671875, + "grad_norm": 0.14640620350837708, + "learning_rate": 7.645019012494628e-05, + "loss": 4.3516, + "step": 8667 + }, + { + "epoch": 0.846484375, + "grad_norm": 0.1430039256811142, + "learning_rate": 7.641724444331543e-05, + "loss": 4.4141, + "step": 8668 + }, + { + "epoch": 0.84658203125, + "grad_norm": 0.1453983336687088, + "learning_rate": 7.63843180129349e-05, + "loss": 4.375, + "step": 8669 + }, + { + "epoch": 0.8466796875, + "grad_norm": 0.1422090232372284, + "learning_rate": 7.635141083699682e-05, + "loss": 4.3945, + "step": 8670 + }, + { + "epoch": 0.84677734375, + "grad_norm": 0.14368700981140137, + "learning_rate": 7.631852291869115e-05, + "loss": 4.4141, + "step": 8671 + }, + { + "epoch": 0.846875, + "grad_norm": 0.14385755360126495, + "learning_rate": 7.628565426120627e-05, + "loss": 4.4062, + "step": 8672 + }, + { + "epoch": 0.84697265625, + "grad_norm": 0.14443638920783997, + "learning_rate": 7.62528048677285e-05, + "loss": 4.4258, + "step": 8673 + }, + { + "epoch": 0.8470703125, + "grad_norm": 0.1506260335445404, + "learning_rate": 7.62199747414424e-05, + "loss": 4.418, + "step": 8674 + }, + { + "epoch": 0.84716796875, + "grad_norm": 0.14740067720413208, + "learning_rate": 7.618716388553063e-05, + "loss": 4.375, + "step": 8675 + }, + { + "epoch": 0.847265625, + "grad_norm": 0.14063668251037598, + "learning_rate": 7.615437230317397e-05, + "loss": 4.3906, + "step": 8676 + }, + { + "epoch": 0.84736328125, + "grad_norm": 0.14621557295322418, + "learning_rate": 7.612159999755133e-05, + "loss": 4.4102, + "step": 8677 + }, + { + "epoch": 0.8474609375, + "grad_norm": 0.148193821310997, + "learning_rate": 7.608884697183987e-05, + "loss": 4.3828, + "step": 8678 + }, + { + "epoch": 0.84755859375, + "grad_norm": 0.14675161242485046, + "learning_rate": 7.60561132292146e-05, + "loss": 4.4336, + "step": 8679 + }, + { + "epoch": 0.84765625, + "grad_norm": 0.14798946678638458, + "learning_rate": 7.602339877284892e-05, + "loss": 4.3906, + "step": 8680 + }, + { + "epoch": 0.84775390625, + "grad_norm": 0.14861108362674713, + "learning_rate": 7.599070360591429e-05, + "loss": 4.3672, + "step": 8681 + }, + { + "epoch": 0.8478515625, + "grad_norm": 0.14864976704120636, + "learning_rate": 7.595802773158022e-05, + "loss": 4.3906, + "step": 8682 + }, + { + "epoch": 0.84794921875, + "grad_norm": 0.1515401154756546, + "learning_rate": 7.592537115301451e-05, + "loss": 4.3516, + "step": 8683 + }, + { + "epoch": 0.848046875, + "grad_norm": 0.14264018833637238, + "learning_rate": 7.589273387338293e-05, + "loss": 4.3711, + "step": 8684 + }, + { + "epoch": 0.84814453125, + "grad_norm": 0.1457412838935852, + "learning_rate": 7.586011589584944e-05, + "loss": 4.3867, + "step": 8685 + }, + { + "epoch": 0.8482421875, + "grad_norm": 0.14882080256938934, + "learning_rate": 7.582751722357616e-05, + "loss": 4.3906, + "step": 8686 + }, + { + "epoch": 0.84833984375, + "grad_norm": 0.14466865360736847, + "learning_rate": 7.579493785972333e-05, + "loss": 4.4062, + "step": 8687 + }, + { + "epoch": 0.8484375, + "grad_norm": 0.1465359479188919, + "learning_rate": 7.576237780744924e-05, + "loss": 4.4141, + "step": 8688 + }, + { + "epoch": 0.84853515625, + "grad_norm": 0.14612193405628204, + "learning_rate": 7.572983706991038e-05, + "loss": 4.4102, + "step": 8689 + }, + { + "epoch": 0.8486328125, + "grad_norm": 0.1489410698413849, + "learning_rate": 7.569731565026136e-05, + "loss": 4.4141, + "step": 8690 + }, + { + "epoch": 0.84873046875, + "grad_norm": 0.14539386332035065, + "learning_rate": 7.566481355165494e-05, + "loss": 4.4023, + "step": 8691 + }, + { + "epoch": 0.848828125, + "grad_norm": 0.148810476064682, + "learning_rate": 7.563233077724194e-05, + "loss": 4.3906, + "step": 8692 + }, + { + "epoch": 0.84892578125, + "grad_norm": 0.1477649211883545, + "learning_rate": 7.559986733017135e-05, + "loss": 4.3789, + "step": 8693 + }, + { + "epoch": 0.8490234375, + "grad_norm": 0.14763906598091125, + "learning_rate": 7.556742321359027e-05, + "loss": 4.3945, + "step": 8694 + }, + { + "epoch": 0.84912109375, + "grad_norm": 0.1546267718076706, + "learning_rate": 7.553499843064405e-05, + "loss": 4.3828, + "step": 8695 + }, + { + "epoch": 0.84921875, + "grad_norm": 0.1506766974925995, + "learning_rate": 7.550259298447589e-05, + "loss": 4.3828, + "step": 8696 + }, + { + "epoch": 0.84931640625, + "grad_norm": 0.14216677844524384, + "learning_rate": 7.547020687822735e-05, + "loss": 4.3984, + "step": 8697 + }, + { + "epoch": 0.8494140625, + "grad_norm": 0.14940088987350464, + "learning_rate": 7.543784011503805e-05, + "loss": 4.4062, + "step": 8698 + }, + { + "epoch": 0.84951171875, + "grad_norm": 0.15109632909297943, + "learning_rate": 7.54054926980457e-05, + "loss": 4.3789, + "step": 8699 + }, + { + "epoch": 0.849609375, + "grad_norm": 0.14802992343902588, + "learning_rate": 7.537316463038619e-05, + "loss": 4.3555, + "step": 8700 + }, + { + "epoch": 0.84970703125, + "grad_norm": 0.15316686034202576, + "learning_rate": 7.534085591519349e-05, + "loss": 4.4336, + "step": 8701 + }, + { + "epoch": 0.8498046875, + "grad_norm": 0.14751166105270386, + "learning_rate": 7.530856655559972e-05, + "loss": 4.3672, + "step": 8702 + }, + { + "epoch": 0.84990234375, + "grad_norm": 0.14932459592819214, + "learning_rate": 7.527629655473517e-05, + "loss": 4.3516, + "step": 8703 + }, + { + "epoch": 0.85, + "grad_norm": 0.14913220703601837, + "learning_rate": 7.524404591572809e-05, + "loss": 4.3594, + "step": 8704 + }, + { + "epoch": 0.85009765625, + "grad_norm": 0.1559034138917923, + "learning_rate": 7.5211814641705e-05, + "loss": 4.3711, + "step": 8705 + }, + { + "epoch": 0.8501953125, + "grad_norm": 0.1459207683801651, + "learning_rate": 7.517960273579052e-05, + "loss": 4.3789, + "step": 8706 + }, + { + "epoch": 0.85029296875, + "grad_norm": 0.15003354847431183, + "learning_rate": 7.514741020110738e-05, + "loss": 4.3672, + "step": 8707 + }, + { + "epoch": 0.850390625, + "grad_norm": 0.15233157575130463, + "learning_rate": 7.51152370407764e-05, + "loss": 4.3789, + "step": 8708 + }, + { + "epoch": 0.85048828125, + "grad_norm": 0.14528900384902954, + "learning_rate": 7.50830832579166e-05, + "loss": 4.3984, + "step": 8709 + }, + { + "epoch": 0.8505859375, + "grad_norm": 0.1516219675540924, + "learning_rate": 7.505094885564501e-05, + "loss": 4.4141, + "step": 8710 + }, + { + "epoch": 0.85068359375, + "grad_norm": 0.14666321873664856, + "learning_rate": 7.501883383707688e-05, + "loss": 4.4375, + "step": 8711 + }, + { + "epoch": 0.85078125, + "grad_norm": 0.14948290586471558, + "learning_rate": 7.498673820532557e-05, + "loss": 4.3945, + "step": 8712 + }, + { + "epoch": 0.85087890625, + "grad_norm": 0.15031005442142487, + "learning_rate": 7.495466196350245e-05, + "loss": 4.3984, + "step": 8713 + }, + { + "epoch": 0.8509765625, + "grad_norm": 0.14553794264793396, + "learning_rate": 7.492260511471715e-05, + "loss": 4.3984, + "step": 8714 + }, + { + "epoch": 0.85107421875, + "grad_norm": 0.14872746169567108, + "learning_rate": 7.489056766207735e-05, + "loss": 4.3906, + "step": 8715 + }, + { + "epoch": 0.851171875, + "grad_norm": 0.15741322934627533, + "learning_rate": 7.485854960868885e-05, + "loss": 4.3789, + "step": 8716 + }, + { + "epoch": 0.85126953125, + "grad_norm": 0.13833874464035034, + "learning_rate": 7.482655095765563e-05, + "loss": 4.4219, + "step": 8717 + }, + { + "epoch": 0.8513671875, + "grad_norm": 0.14975270628929138, + "learning_rate": 7.47945717120797e-05, + "loss": 4.4062, + "step": 8718 + }, + { + "epoch": 0.85146484375, + "grad_norm": 0.14450576901435852, + "learning_rate": 7.476261187506125e-05, + "loss": 4.3984, + "step": 8719 + }, + { + "epoch": 0.8515625, + "grad_norm": 0.1461220234632492, + "learning_rate": 7.473067144969861e-05, + "loss": 4.375, + "step": 8720 + }, + { + "epoch": 0.85166015625, + "grad_norm": 0.14098912477493286, + "learning_rate": 7.469875043908808e-05, + "loss": 4.4023, + "step": 8721 + }, + { + "epoch": 0.8517578125, + "grad_norm": 0.14648127555847168, + "learning_rate": 7.466684884632425e-05, + "loss": 4.3867, + "step": 8722 + }, + { + "epoch": 0.85185546875, + "grad_norm": 0.14874044060707092, + "learning_rate": 7.463496667449978e-05, + "loss": 4.4102, + "step": 8723 + }, + { + "epoch": 0.851953125, + "grad_norm": 0.14687255024909973, + "learning_rate": 7.460310392670541e-05, + "loss": 4.3633, + "step": 8724 + }, + { + "epoch": 0.85205078125, + "grad_norm": 0.14980892837047577, + "learning_rate": 7.457126060602998e-05, + "loss": 4.4258, + "step": 8725 + }, + { + "epoch": 0.8521484375, + "grad_norm": 0.14839224517345428, + "learning_rate": 7.453943671556056e-05, + "loss": 4.4023, + "step": 8726 + }, + { + "epoch": 0.85224609375, + "grad_norm": 0.14279909431934357, + "learning_rate": 7.450763225838222e-05, + "loss": 4.3906, + "step": 8727 + }, + { + "epoch": 0.85234375, + "grad_norm": 0.1505080610513687, + "learning_rate": 7.447584723757814e-05, + "loss": 4.3984, + "step": 8728 + }, + { + "epoch": 0.85244140625, + "grad_norm": 0.15479566156864166, + "learning_rate": 7.44440816562298e-05, + "loss": 4.3594, + "step": 8729 + }, + { + "epoch": 0.8525390625, + "grad_norm": 0.15123537182807922, + "learning_rate": 7.441233551741651e-05, + "loss": 4.4297, + "step": 8730 + }, + { + "epoch": 0.85263671875, + "grad_norm": 0.14483173191547394, + "learning_rate": 7.43806088242159e-05, + "loss": 4.3984, + "step": 8731 + }, + { + "epoch": 0.852734375, + "grad_norm": 0.14982518553733826, + "learning_rate": 7.434890157970365e-05, + "loss": 4.4141, + "step": 8732 + }, + { + "epoch": 0.85283203125, + "grad_norm": 0.1500513255596161, + "learning_rate": 7.431721378695355e-05, + "loss": 4.3984, + "step": 8733 + }, + { + "epoch": 0.8529296875, + "grad_norm": 0.15189136564731598, + "learning_rate": 7.428554544903756e-05, + "loss": 4.4062, + "step": 8734 + }, + { + "epoch": 0.85302734375, + "grad_norm": 0.15129588544368744, + "learning_rate": 7.42538965690257e-05, + "loss": 4.3906, + "step": 8735 + }, + { + "epoch": 0.853125, + "grad_norm": 0.1513906717300415, + "learning_rate": 7.422226714998607e-05, + "loss": 4.4102, + "step": 8736 + }, + { + "epoch": 0.85322265625, + "grad_norm": 0.15780386328697205, + "learning_rate": 7.4190657194985e-05, + "loss": 4.3828, + "step": 8737 + }, + { + "epoch": 0.8533203125, + "grad_norm": 0.15277199447155, + "learning_rate": 7.41590667070868e-05, + "loss": 4.3672, + "step": 8738 + }, + { + "epoch": 0.85341796875, + "grad_norm": 0.14367109537124634, + "learning_rate": 7.412749568935395e-05, + "loss": 4.3984, + "step": 8739 + }, + { + "epoch": 0.853515625, + "grad_norm": 0.14866866171360016, + "learning_rate": 7.409594414484709e-05, + "loss": 4.3945, + "step": 8740 + }, + { + "epoch": 0.85361328125, + "grad_norm": 0.14927756786346436, + "learning_rate": 7.406441207662487e-05, + "loss": 4.4023, + "step": 8741 + }, + { + "epoch": 0.8537109375, + "grad_norm": 0.15310212969779968, + "learning_rate": 7.403289948774417e-05, + "loss": 4.4023, + "step": 8742 + }, + { + "epoch": 0.85380859375, + "grad_norm": 0.14823776483535767, + "learning_rate": 7.40014063812599e-05, + "loss": 4.3945, + "step": 8743 + }, + { + "epoch": 0.85390625, + "grad_norm": 0.1468910127878189, + "learning_rate": 7.396993276022509e-05, + "loss": 4.3828, + "step": 8744 + }, + { + "epoch": 0.85400390625, + "grad_norm": 0.1433703601360321, + "learning_rate": 7.39384786276909e-05, + "loss": 4.4062, + "step": 8745 + }, + { + "epoch": 0.8541015625, + "grad_norm": 0.15347065031528473, + "learning_rate": 7.390704398670665e-05, + "loss": 4.4336, + "step": 8746 + }, + { + "epoch": 0.85419921875, + "grad_norm": 0.14637461304664612, + "learning_rate": 7.387562884031964e-05, + "loss": 4.3789, + "step": 8747 + }, + { + "epoch": 0.854296875, + "grad_norm": 0.15023504197597504, + "learning_rate": 7.384423319157534e-05, + "loss": 4.3984, + "step": 8748 + }, + { + "epoch": 0.85439453125, + "grad_norm": 0.14817431569099426, + "learning_rate": 7.38128570435174e-05, + "loss": 4.375, + "step": 8749 + }, + { + "epoch": 0.8544921875, + "grad_norm": 0.14755330979824066, + "learning_rate": 7.378150039918752e-05, + "loss": 4.3906, + "step": 8750 + }, + { + "epoch": 0.85458984375, + "grad_norm": 0.1455625593662262, + "learning_rate": 7.375016326162547e-05, + "loss": 4.3945, + "step": 8751 + }, + { + "epoch": 0.8546875, + "grad_norm": 0.14388085901737213, + "learning_rate": 7.371884563386925e-05, + "loss": 4.3945, + "step": 8752 + }, + { + "epoch": 0.85478515625, + "grad_norm": 0.14407770335674286, + "learning_rate": 7.36875475189548e-05, + "loss": 4.3672, + "step": 8753 + }, + { + "epoch": 0.8548828125, + "grad_norm": 0.15168391168117523, + "learning_rate": 7.365626891991639e-05, + "loss": 4.375, + "step": 8754 + }, + { + "epoch": 0.85498046875, + "grad_norm": 0.1420057713985443, + "learning_rate": 7.36250098397861e-05, + "loss": 4.3867, + "step": 8755 + }, + { + "epoch": 0.855078125, + "grad_norm": 0.147312730550766, + "learning_rate": 7.359377028159441e-05, + "loss": 4.3711, + "step": 8756 + }, + { + "epoch": 0.85517578125, + "grad_norm": 0.15266118943691254, + "learning_rate": 7.356255024836973e-05, + "loss": 4.4023, + "step": 8757 + }, + { + "epoch": 0.8552734375, + "grad_norm": 0.15117394924163818, + "learning_rate": 7.353134974313864e-05, + "loss": 4.3945, + "step": 8758 + }, + { + "epoch": 0.85537109375, + "grad_norm": 0.14924903213977814, + "learning_rate": 7.350016876892582e-05, + "loss": 4.4023, + "step": 8759 + }, + { + "epoch": 0.85546875, + "grad_norm": 0.1493339240550995, + "learning_rate": 7.346900732875403e-05, + "loss": 4.4062, + "step": 8760 + }, + { + "epoch": 0.85556640625, + "grad_norm": 0.1553177386522293, + "learning_rate": 7.343786542564421e-05, + "loss": 4.3828, + "step": 8761 + }, + { + "epoch": 0.8556640625, + "grad_norm": 0.14592377841472626, + "learning_rate": 7.340674306261535e-05, + "loss": 4.4062, + "step": 8762 + }, + { + "epoch": 0.85576171875, + "grad_norm": 0.14035877585411072, + "learning_rate": 7.33756402426845e-05, + "loss": 4.4062, + "step": 8763 + }, + { + "epoch": 0.855859375, + "grad_norm": 0.15333248674869537, + "learning_rate": 7.334455696886696e-05, + "loss": 4.4102, + "step": 8764 + }, + { + "epoch": 0.85595703125, + "grad_norm": 0.1457693725824356, + "learning_rate": 7.331349324417594e-05, + "loss": 4.3906, + "step": 8765 + }, + { + "epoch": 0.8560546875, + "grad_norm": 0.1505059003829956, + "learning_rate": 7.328244907162289e-05, + "loss": 4.4062, + "step": 8766 + }, + { + "epoch": 0.85615234375, + "grad_norm": 0.152518630027771, + "learning_rate": 7.325142445421735e-05, + "loss": 4.3906, + "step": 8767 + }, + { + "epoch": 0.85625, + "grad_norm": 0.14839142560958862, + "learning_rate": 7.322041939496694e-05, + "loss": 4.3945, + "step": 8768 + }, + { + "epoch": 0.85634765625, + "grad_norm": 0.1495244801044464, + "learning_rate": 7.318943389687738e-05, + "loss": 4.3867, + "step": 8769 + }, + { + "epoch": 0.8564453125, + "grad_norm": 0.15259191393852234, + "learning_rate": 7.315846796295251e-05, + "loss": 4.4102, + "step": 8770 + }, + { + "epoch": 0.85654296875, + "grad_norm": 0.14457902312278748, + "learning_rate": 7.312752159619425e-05, + "loss": 4.4219, + "step": 8771 + }, + { + "epoch": 0.856640625, + "grad_norm": 0.1485748589038849, + "learning_rate": 7.309659479960268e-05, + "loss": 4.3711, + "step": 8772 + }, + { + "epoch": 0.85673828125, + "grad_norm": 0.15057824552059174, + "learning_rate": 7.306568757617593e-05, + "loss": 4.3633, + "step": 8773 + }, + { + "epoch": 0.8568359375, + "grad_norm": 0.15240198373794556, + "learning_rate": 7.303479992891027e-05, + "loss": 4.3672, + "step": 8774 + }, + { + "epoch": 0.85693359375, + "grad_norm": 0.14536118507385254, + "learning_rate": 7.300393186079997e-05, + "loss": 4.3867, + "step": 8775 + }, + { + "epoch": 0.85703125, + "grad_norm": 0.14813369512557983, + "learning_rate": 7.297308337483753e-05, + "loss": 4.3867, + "step": 8776 + }, + { + "epoch": 0.85712890625, + "grad_norm": 0.14428137242794037, + "learning_rate": 7.294225447401348e-05, + "loss": 4.3828, + "step": 8777 + }, + { + "epoch": 0.8572265625, + "grad_norm": 0.14271315932273865, + "learning_rate": 7.291144516131651e-05, + "loss": 4.3906, + "step": 8778 + }, + { + "epoch": 0.85732421875, + "grad_norm": 0.14320975542068481, + "learning_rate": 7.288065543973335e-05, + "loss": 4.4102, + "step": 8779 + }, + { + "epoch": 0.857421875, + "grad_norm": 0.14813977479934692, + "learning_rate": 7.284988531224884e-05, + "loss": 4.3945, + "step": 8780 + }, + { + "epoch": 0.85751953125, + "grad_norm": 0.14144477248191833, + "learning_rate": 7.281913478184597e-05, + "loss": 4.375, + "step": 8781 + }, + { + "epoch": 0.8576171875, + "grad_norm": 0.14660249650478363, + "learning_rate": 7.278840385150577e-05, + "loss": 4.3789, + "step": 8782 + }, + { + "epoch": 0.85771484375, + "grad_norm": 0.14951027929782867, + "learning_rate": 7.275769252420738e-05, + "loss": 4.3906, + "step": 8783 + }, + { + "epoch": 0.8578125, + "grad_norm": 0.1401640921831131, + "learning_rate": 7.272700080292816e-05, + "loss": 4.3555, + "step": 8784 + }, + { + "epoch": 0.85791015625, + "grad_norm": 0.14819757640361786, + "learning_rate": 7.269632869064334e-05, + "loss": 4.3906, + "step": 8785 + }, + { + "epoch": 0.8580078125, + "grad_norm": 0.14799878001213074, + "learning_rate": 7.26656761903264e-05, + "loss": 4.4141, + "step": 8786 + }, + { + "epoch": 0.85810546875, + "grad_norm": 0.15004488825798035, + "learning_rate": 7.263504330494889e-05, + "loss": 4.3828, + "step": 8787 + }, + { + "epoch": 0.858203125, + "grad_norm": 0.14823928475379944, + "learning_rate": 7.260443003748049e-05, + "loss": 4.3906, + "step": 8788 + }, + { + "epoch": 0.85830078125, + "grad_norm": 0.14428777992725372, + "learning_rate": 7.257383639088892e-05, + "loss": 4.375, + "step": 8789 + }, + { + "epoch": 0.8583984375, + "grad_norm": 0.14878924190998077, + "learning_rate": 7.254326236814007e-05, + "loss": 4.4023, + "step": 8790 + }, + { + "epoch": 0.85849609375, + "grad_norm": 0.15710829198360443, + "learning_rate": 7.251270797219782e-05, + "loss": 4.3711, + "step": 8791 + }, + { + "epoch": 0.85859375, + "grad_norm": 0.1539025455713272, + "learning_rate": 7.248217320602429e-05, + "loss": 4.4023, + "step": 8792 + }, + { + "epoch": 0.85869140625, + "grad_norm": 0.14636893570423126, + "learning_rate": 7.245165807257953e-05, + "loss": 4.3984, + "step": 8793 + }, + { + "epoch": 0.8587890625, + "grad_norm": 0.1521969735622406, + "learning_rate": 7.242116257482188e-05, + "loss": 4.4414, + "step": 8794 + }, + { + "epoch": 0.85888671875, + "grad_norm": 0.1490299552679062, + "learning_rate": 7.239068671570758e-05, + "loss": 4.3555, + "step": 8795 + }, + { + "epoch": 0.858984375, + "grad_norm": 0.1457424908876419, + "learning_rate": 7.23602304981911e-05, + "loss": 4.3828, + "step": 8796 + }, + { + "epoch": 0.85908203125, + "grad_norm": 0.14579744637012482, + "learning_rate": 7.232979392522491e-05, + "loss": 4.3711, + "step": 8797 + }, + { + "epoch": 0.8591796875, + "grad_norm": 0.15403148531913757, + "learning_rate": 7.229937699975972e-05, + "loss": 4.375, + "step": 8798 + }, + { + "epoch": 0.85927734375, + "grad_norm": 0.14852744340896606, + "learning_rate": 7.226897972474417e-05, + "loss": 4.3867, + "step": 8799 + }, + { + "epoch": 0.859375, + "grad_norm": 0.14713339507579803, + "learning_rate": 7.22386021031251e-05, + "loss": 4.4102, + "step": 8800 + }, + { + "epoch": 0.85947265625, + "grad_norm": 0.15375390648841858, + "learning_rate": 7.220824413784741e-05, + "loss": 4.3945, + "step": 8801 + }, + { + "epoch": 0.8595703125, + "grad_norm": 0.14209844172000885, + "learning_rate": 7.21779058318541e-05, + "loss": 4.3867, + "step": 8802 + }, + { + "epoch": 0.85966796875, + "grad_norm": 0.14725449681282043, + "learning_rate": 7.214758718808627e-05, + "loss": 4.418, + "step": 8803 + }, + { + "epoch": 0.859765625, + "grad_norm": 0.1446736454963684, + "learning_rate": 7.211728820948315e-05, + "loss": 4.3945, + "step": 8804 + }, + { + "epoch": 0.85986328125, + "grad_norm": 0.14359335601329803, + "learning_rate": 7.20870088989819e-05, + "loss": 4.3789, + "step": 8805 + }, + { + "epoch": 0.8599609375, + "grad_norm": 0.14856547117233276, + "learning_rate": 7.205674925951797e-05, + "loss": 4.4336, + "step": 8806 + }, + { + "epoch": 0.86005859375, + "grad_norm": 0.14472715556621552, + "learning_rate": 7.202650929402482e-05, + "loss": 4.3633, + "step": 8807 + }, + { + "epoch": 0.86015625, + "grad_norm": 0.14728708565235138, + "learning_rate": 7.199628900543401e-05, + "loss": 4.4141, + "step": 8808 + }, + { + "epoch": 0.86025390625, + "grad_norm": 0.14874403178691864, + "learning_rate": 7.196608839667517e-05, + "loss": 4.3789, + "step": 8809 + }, + { + "epoch": 0.8603515625, + "grad_norm": 0.1416388899087906, + "learning_rate": 7.19359074706761e-05, + "loss": 4.3906, + "step": 8810 + }, + { + "epoch": 0.86044921875, + "grad_norm": 0.14743781089782715, + "learning_rate": 7.19057462303626e-05, + "loss": 4.3516, + "step": 8811 + }, + { + "epoch": 0.860546875, + "grad_norm": 0.14421522617340088, + "learning_rate": 7.187560467865857e-05, + "loss": 4.3906, + "step": 8812 + }, + { + "epoch": 0.86064453125, + "grad_norm": 0.14367064833641052, + "learning_rate": 7.184548281848613e-05, + "loss": 4.3594, + "step": 8813 + }, + { + "epoch": 0.8607421875, + "grad_norm": 0.14541365206241608, + "learning_rate": 7.181538065276524e-05, + "loss": 4.3945, + "step": 8814 + }, + { + "epoch": 0.86083984375, + "grad_norm": 0.1492708921432495, + "learning_rate": 7.17852981844142e-05, + "loss": 4.3867, + "step": 8815 + }, + { + "epoch": 0.8609375, + "grad_norm": 0.1464526206254959, + "learning_rate": 7.175523541634928e-05, + "loss": 4.3672, + "step": 8816 + }, + { + "epoch": 0.86103515625, + "grad_norm": 0.14747361838817596, + "learning_rate": 7.172519235148486e-05, + "loss": 4.4102, + "step": 8817 + }, + { + "epoch": 0.8611328125, + "grad_norm": 0.14186392724514008, + "learning_rate": 7.16951689927334e-05, + "loss": 4.3789, + "step": 8818 + }, + { + "epoch": 0.86123046875, + "grad_norm": 0.16160956025123596, + "learning_rate": 7.16651653430055e-05, + "loss": 4.4023, + "step": 8819 + }, + { + "epoch": 0.861328125, + "grad_norm": 0.14545810222625732, + "learning_rate": 7.163518140520973e-05, + "loss": 4.4102, + "step": 8820 + }, + { + "epoch": 0.86142578125, + "grad_norm": 0.1475006490945816, + "learning_rate": 7.160521718225297e-05, + "loss": 4.3633, + "step": 8821 + }, + { + "epoch": 0.8615234375, + "grad_norm": 0.14298062026500702, + "learning_rate": 7.15752726770399e-05, + "loss": 4.4062, + "step": 8822 + }, + { + "epoch": 0.86162109375, + "grad_norm": 0.14660833775997162, + "learning_rate": 7.154534789247349e-05, + "loss": 4.4062, + "step": 8823 + }, + { + "epoch": 0.86171875, + "grad_norm": 0.14302952587604523, + "learning_rate": 7.151544283145478e-05, + "loss": 4.3945, + "step": 8824 + }, + { + "epoch": 0.86181640625, + "grad_norm": 0.14271900057792664, + "learning_rate": 7.14855574968828e-05, + "loss": 4.4102, + "step": 8825 + }, + { + "epoch": 0.8619140625, + "grad_norm": 0.14243385195732117, + "learning_rate": 7.145569189165477e-05, + "loss": 4.3516, + "step": 8826 + }, + { + "epoch": 0.86201171875, + "grad_norm": 0.1424732804298401, + "learning_rate": 7.142584601866597e-05, + "loss": 4.3672, + "step": 8827 + }, + { + "epoch": 0.862109375, + "grad_norm": 0.14745967090129852, + "learning_rate": 7.13960198808097e-05, + "loss": 4.4062, + "step": 8828 + }, + { + "epoch": 0.86220703125, + "grad_norm": 0.15073296427726746, + "learning_rate": 7.136621348097749e-05, + "loss": 4.3984, + "step": 8829 + }, + { + "epoch": 0.8623046875, + "grad_norm": 0.14312444627285004, + "learning_rate": 7.133642682205883e-05, + "loss": 4.418, + "step": 8830 + }, + { + "epoch": 0.86240234375, + "grad_norm": 0.14277106523513794, + "learning_rate": 7.130665990694129e-05, + "loss": 4.3633, + "step": 8831 + }, + { + "epoch": 0.8625, + "grad_norm": 0.1506606787443161, + "learning_rate": 7.127691273851057e-05, + "loss": 4.3711, + "step": 8832 + }, + { + "epoch": 0.86259765625, + "grad_norm": 0.14954020082950592, + "learning_rate": 7.124718531965051e-05, + "loss": 4.3516, + "step": 8833 + }, + { + "epoch": 0.8626953125, + "grad_norm": 0.13891777396202087, + "learning_rate": 7.121747765324297e-05, + "loss": 4.4023, + "step": 8834 + }, + { + "epoch": 0.86279296875, + "grad_norm": 0.1479310244321823, + "learning_rate": 7.118778974216788e-05, + "loss": 4.3828, + "step": 8835 + }, + { + "epoch": 0.862890625, + "grad_norm": 0.15418404340744019, + "learning_rate": 7.115812158930331e-05, + "loss": 4.3789, + "step": 8836 + }, + { + "epoch": 0.86298828125, + "grad_norm": 0.1498394012451172, + "learning_rate": 7.112847319752534e-05, + "loss": 4.3867, + "step": 8837 + }, + { + "epoch": 0.8630859375, + "grad_norm": 0.14585170149803162, + "learning_rate": 7.109884456970831e-05, + "loss": 4.3867, + "step": 8838 + }, + { + "epoch": 0.86318359375, + "grad_norm": 0.14814281463623047, + "learning_rate": 7.106923570872434e-05, + "loss": 4.4062, + "step": 8839 + }, + { + "epoch": 0.86328125, + "grad_norm": 0.14402790367603302, + "learning_rate": 7.103964661744388e-05, + "loss": 4.4141, + "step": 8840 + }, + { + "epoch": 0.86337890625, + "grad_norm": 0.1519707292318344, + "learning_rate": 7.101007729873539e-05, + "loss": 4.3984, + "step": 8841 + }, + { + "epoch": 0.8634765625, + "grad_norm": 0.14270925521850586, + "learning_rate": 7.098052775546546e-05, + "loss": 4.3906, + "step": 8842 + }, + { + "epoch": 0.86357421875, + "grad_norm": 0.14621807634830475, + "learning_rate": 7.095099799049866e-05, + "loss": 4.3594, + "step": 8843 + }, + { + "epoch": 0.863671875, + "grad_norm": 0.1509707123041153, + "learning_rate": 7.092148800669771e-05, + "loss": 4.4414, + "step": 8844 + }, + { + "epoch": 0.86376953125, + "grad_norm": 0.146833598613739, + "learning_rate": 7.089199780692341e-05, + "loss": 4.3711, + "step": 8845 + }, + { + "epoch": 0.8638671875, + "grad_norm": 0.14456643164157867, + "learning_rate": 7.086252739403465e-05, + "loss": 4.4102, + "step": 8846 + }, + { + "epoch": 0.86396484375, + "grad_norm": 0.14919167757034302, + "learning_rate": 7.083307677088842e-05, + "loss": 4.3672, + "step": 8847 + }, + { + "epoch": 0.8640625, + "grad_norm": 0.14973995089530945, + "learning_rate": 7.080364594033966e-05, + "loss": 4.3906, + "step": 8848 + }, + { + "epoch": 0.86416015625, + "grad_norm": 0.1464468091726303, + "learning_rate": 7.077423490524156e-05, + "loss": 4.3867, + "step": 8849 + }, + { + "epoch": 0.8642578125, + "grad_norm": 0.15680775046348572, + "learning_rate": 7.074484366844528e-05, + "loss": 4.4062, + "step": 8850 + }, + { + "epoch": 0.86435546875, + "grad_norm": 0.14978449046611786, + "learning_rate": 7.071547223280015e-05, + "loss": 4.375, + "step": 8851 + }, + { + "epoch": 0.864453125, + "grad_norm": 0.15302665531635284, + "learning_rate": 7.068612060115346e-05, + "loss": 4.3711, + "step": 8852 + }, + { + "epoch": 0.86455078125, + "grad_norm": 0.14933711290359497, + "learning_rate": 7.065678877635075e-05, + "loss": 4.3672, + "step": 8853 + }, + { + "epoch": 0.8646484375, + "grad_norm": 0.1505487859249115, + "learning_rate": 7.062747676123543e-05, + "loss": 4.3906, + "step": 8854 + }, + { + "epoch": 0.86474609375, + "grad_norm": 0.14989010989665985, + "learning_rate": 7.059818455864925e-05, + "loss": 4.3906, + "step": 8855 + }, + { + "epoch": 0.86484375, + "grad_norm": 0.14541885256767273, + "learning_rate": 7.056891217143171e-05, + "loss": 4.3906, + "step": 8856 + }, + { + "epoch": 0.86494140625, + "grad_norm": 0.14363187551498413, + "learning_rate": 7.053965960242071e-05, + "loss": 4.4023, + "step": 8857 + }, + { + "epoch": 0.8650390625, + "grad_norm": 0.14489904046058655, + "learning_rate": 7.051042685445199e-05, + "loss": 4.3945, + "step": 8858 + }, + { + "epoch": 0.86513671875, + "grad_norm": 0.1469685286283493, + "learning_rate": 7.048121393035952e-05, + "loss": 4.3672, + "step": 8859 + }, + { + "epoch": 0.865234375, + "grad_norm": 0.14394962787628174, + "learning_rate": 7.045202083297529e-05, + "loss": 4.4102, + "step": 8860 + }, + { + "epoch": 0.86533203125, + "grad_norm": 0.1452508121728897, + "learning_rate": 7.042284756512939e-05, + "loss": 4.4023, + "step": 8861 + }, + { + "epoch": 0.8654296875, + "grad_norm": 0.14508238434791565, + "learning_rate": 7.039369412964992e-05, + "loss": 4.4141, + "step": 8862 + }, + { + "epoch": 0.86552734375, + "grad_norm": 0.1464054137468338, + "learning_rate": 7.036456052936318e-05, + "loss": 4.3906, + "step": 8863 + }, + { + "epoch": 0.865625, + "grad_norm": 0.1462278962135315, + "learning_rate": 7.03354467670934e-05, + "loss": 4.4141, + "step": 8864 + }, + { + "epoch": 0.86572265625, + "grad_norm": 0.14469964802265167, + "learning_rate": 7.030635284566301e-05, + "loss": 4.3828, + "step": 8865 + }, + { + "epoch": 0.8658203125, + "grad_norm": 0.14363709092140198, + "learning_rate": 7.027727876789241e-05, + "loss": 4.4062, + "step": 8866 + }, + { + "epoch": 0.86591796875, + "grad_norm": 0.1480044722557068, + "learning_rate": 7.024822453660019e-05, + "loss": 4.3828, + "step": 8867 + }, + { + "epoch": 0.866015625, + "grad_norm": 0.13767226040363312, + "learning_rate": 7.021919015460299e-05, + "loss": 4.4023, + "step": 8868 + }, + { + "epoch": 0.86611328125, + "grad_norm": 0.14655128121376038, + "learning_rate": 7.01901756247154e-05, + "loss": 4.3711, + "step": 8869 + }, + { + "epoch": 0.8662109375, + "grad_norm": 0.14799323678016663, + "learning_rate": 7.016118094975024e-05, + "loss": 4.3828, + "step": 8870 + }, + { + "epoch": 0.86630859375, + "grad_norm": 0.14796414971351624, + "learning_rate": 7.013220613251837e-05, + "loss": 4.3906, + "step": 8871 + }, + { + "epoch": 0.86640625, + "grad_norm": 0.1504342406988144, + "learning_rate": 7.010325117582869e-05, + "loss": 4.4141, + "step": 8872 + }, + { + "epoch": 0.86650390625, + "grad_norm": 0.14436116814613342, + "learning_rate": 7.007431608248815e-05, + "loss": 4.3906, + "step": 8873 + }, + { + "epoch": 0.8666015625, + "grad_norm": 0.14391006529331207, + "learning_rate": 7.004540085530184e-05, + "loss": 4.3867, + "step": 8874 + }, + { + "epoch": 0.86669921875, + "grad_norm": 0.1500828117132187, + "learning_rate": 7.001650549707283e-05, + "loss": 4.4375, + "step": 8875 + }, + { + "epoch": 0.866796875, + "grad_norm": 0.14786095917224884, + "learning_rate": 6.998763001060244e-05, + "loss": 4.3789, + "step": 8876 + }, + { + "epoch": 0.86689453125, + "grad_norm": 0.15559735894203186, + "learning_rate": 6.995877439868989e-05, + "loss": 4.4023, + "step": 8877 + }, + { + "epoch": 0.8669921875, + "grad_norm": 0.14653314650058746, + "learning_rate": 6.99299386641325e-05, + "loss": 4.3984, + "step": 8878 + }, + { + "epoch": 0.86708984375, + "grad_norm": 0.14751982688903809, + "learning_rate": 6.99011228097258e-05, + "loss": 4.3984, + "step": 8879 + }, + { + "epoch": 0.8671875, + "grad_norm": 0.1542923003435135, + "learning_rate": 6.987232683826325e-05, + "loss": 4.3633, + "step": 8880 + }, + { + "epoch": 0.86728515625, + "grad_norm": 0.14119315147399902, + "learning_rate": 6.984355075253635e-05, + "loss": 4.4023, + "step": 8881 + }, + { + "epoch": 0.8673828125, + "grad_norm": 0.1518603414297104, + "learning_rate": 6.981479455533485e-05, + "loss": 4.3867, + "step": 8882 + }, + { + "epoch": 0.86748046875, + "grad_norm": 0.15691013634204865, + "learning_rate": 6.978605824944636e-05, + "loss": 4.3633, + "step": 8883 + }, + { + "epoch": 0.867578125, + "grad_norm": 0.15573550760746002, + "learning_rate": 6.975734183765678e-05, + "loss": 4.4023, + "step": 8884 + }, + { + "epoch": 0.86767578125, + "grad_norm": 0.1466553509235382, + "learning_rate": 6.972864532274989e-05, + "loss": 4.3867, + "step": 8885 + }, + { + "epoch": 0.8677734375, + "grad_norm": 0.1553698480129242, + "learning_rate": 6.969996870750767e-05, + "loss": 4.3828, + "step": 8886 + }, + { + "epoch": 0.86787109375, + "grad_norm": 0.1650436818599701, + "learning_rate": 6.967131199471011e-05, + "loss": 4.3906, + "step": 8887 + }, + { + "epoch": 0.86796875, + "grad_norm": 0.14378397166728973, + "learning_rate": 6.96426751871353e-05, + "loss": 4.3789, + "step": 8888 + }, + { + "epoch": 0.86806640625, + "grad_norm": 0.15929026901721954, + "learning_rate": 6.961405828755939e-05, + "loss": 4.3672, + "step": 8889 + }, + { + "epoch": 0.8681640625, + "grad_norm": 0.1478239893913269, + "learning_rate": 6.958546129875651e-05, + "loss": 4.3828, + "step": 8890 + }, + { + "epoch": 0.86826171875, + "grad_norm": 0.14460691809654236, + "learning_rate": 6.955688422349901e-05, + "loss": 4.418, + "step": 8891 + }, + { + "epoch": 0.868359375, + "grad_norm": 0.14372768998146057, + "learning_rate": 6.952832706455726e-05, + "loss": 4.3945, + "step": 8892 + }, + { + "epoch": 0.86845703125, + "grad_norm": 0.15215498208999634, + "learning_rate": 6.949978982469965e-05, + "loss": 4.3828, + "step": 8893 + }, + { + "epoch": 0.8685546875, + "grad_norm": 0.14913409948349, + "learning_rate": 6.947127250669268e-05, + "loss": 4.3945, + "step": 8894 + }, + { + "epoch": 0.86865234375, + "grad_norm": 0.14291760325431824, + "learning_rate": 6.944277511330093e-05, + "loss": 4.3867, + "step": 8895 + }, + { + "epoch": 0.86875, + "grad_norm": 0.16068217158317566, + "learning_rate": 6.9414297647287e-05, + "loss": 4.3984, + "step": 8896 + }, + { + "epoch": 0.86884765625, + "grad_norm": 0.1480381190776825, + "learning_rate": 6.93858401114116e-05, + "loss": 4.3828, + "step": 8897 + }, + { + "epoch": 0.8689453125, + "grad_norm": 0.14909791946411133, + "learning_rate": 6.93574025084335e-05, + "loss": 4.375, + "step": 8898 + }, + { + "epoch": 0.86904296875, + "grad_norm": 0.152455136179924, + "learning_rate": 6.932898484110951e-05, + "loss": 4.4219, + "step": 8899 + }, + { + "epoch": 0.869140625, + "grad_norm": 0.1536921262741089, + "learning_rate": 6.930058711219456e-05, + "loss": 4.3945, + "step": 8900 + }, + { + "epoch": 0.86923828125, + "grad_norm": 0.14522571861743927, + "learning_rate": 6.927220932444159e-05, + "loss": 4.3789, + "step": 8901 + }, + { + "epoch": 0.8693359375, + "grad_norm": 0.1423751711845398, + "learning_rate": 6.924385148060167e-05, + "loss": 4.3867, + "step": 8902 + }, + { + "epoch": 0.86943359375, + "grad_norm": 0.15197959542274475, + "learning_rate": 6.921551358342384e-05, + "loss": 4.3945, + "step": 8903 + }, + { + "epoch": 0.86953125, + "grad_norm": 0.1506188064813614, + "learning_rate": 6.918719563565533e-05, + "loss": 4.3945, + "step": 8904 + }, + { + "epoch": 0.86962890625, + "grad_norm": 0.14490409195423126, + "learning_rate": 6.915889764004136e-05, + "loss": 4.3945, + "step": 8905 + }, + { + "epoch": 0.8697265625, + "grad_norm": 0.15326227247714996, + "learning_rate": 6.913061959932524e-05, + "loss": 4.3984, + "step": 8906 + }, + { + "epoch": 0.86982421875, + "grad_norm": 0.1494387984275818, + "learning_rate": 6.910236151624826e-05, + "loss": 4.3906, + "step": 8907 + }, + { + "epoch": 0.869921875, + "grad_norm": 0.14650191366672516, + "learning_rate": 6.907412339354992e-05, + "loss": 4.3945, + "step": 8908 + }, + { + "epoch": 0.87001953125, + "grad_norm": 0.15047316253185272, + "learning_rate": 6.904590523396769e-05, + "loss": 4.3906, + "step": 8909 + }, + { + "epoch": 0.8701171875, + "grad_norm": 0.14876294136047363, + "learning_rate": 6.90177070402371e-05, + "loss": 4.3828, + "step": 8910 + }, + { + "epoch": 0.87021484375, + "grad_norm": 0.14703673124313354, + "learning_rate": 6.898952881509185e-05, + "loss": 4.4102, + "step": 8911 + }, + { + "epoch": 0.8703125, + "grad_norm": 0.14696727693080902, + "learning_rate": 6.896137056126355e-05, + "loss": 4.3438, + "step": 8912 + }, + { + "epoch": 0.87041015625, + "grad_norm": 0.15083788335323334, + "learning_rate": 6.893323228148201e-05, + "loss": 4.4023, + "step": 8913 + }, + { + "epoch": 0.8705078125, + "grad_norm": 0.14293745160102844, + "learning_rate": 6.890511397847505e-05, + "loss": 4.4258, + "step": 8914 + }, + { + "epoch": 0.87060546875, + "grad_norm": 0.14710396528244019, + "learning_rate": 6.887701565496848e-05, + "loss": 4.3945, + "step": 8915 + }, + { + "epoch": 0.870703125, + "grad_norm": 0.1459885984659195, + "learning_rate": 6.884893731368628e-05, + "loss": 4.4141, + "step": 8916 + }, + { + "epoch": 0.87080078125, + "grad_norm": 0.14741556346416473, + "learning_rate": 6.882087895735045e-05, + "loss": 4.3789, + "step": 8917 + }, + { + "epoch": 0.8708984375, + "grad_norm": 0.14475291967391968, + "learning_rate": 6.879284058868107e-05, + "loss": 4.3945, + "step": 8918 + }, + { + "epoch": 0.87099609375, + "grad_norm": 0.15186148881912231, + "learning_rate": 6.876482221039628e-05, + "loss": 4.3945, + "step": 8919 + }, + { + "epoch": 0.87109375, + "grad_norm": 0.14632610976696014, + "learning_rate": 6.873682382521225e-05, + "loss": 4.3945, + "step": 8920 + }, + { + "epoch": 0.87119140625, + "grad_norm": 0.1436140388250351, + "learning_rate": 6.870884543584322e-05, + "loss": 4.3906, + "step": 8921 + }, + { + "epoch": 0.8712890625, + "grad_norm": 0.14582093060016632, + "learning_rate": 6.868088704500155e-05, + "loss": 4.3984, + "step": 8922 + }, + { + "epoch": 0.87138671875, + "grad_norm": 0.15047577023506165, + "learning_rate": 6.86529486553976e-05, + "loss": 4.3945, + "step": 8923 + }, + { + "epoch": 0.871484375, + "grad_norm": 0.14851802587509155, + "learning_rate": 6.862503026973976e-05, + "loss": 4.3984, + "step": 8924 + }, + { + "epoch": 0.87158203125, + "grad_norm": 0.15050090849399567, + "learning_rate": 6.859713189073457e-05, + "loss": 4.3867, + "step": 8925 + }, + { + "epoch": 0.8716796875, + "grad_norm": 0.14493553340435028, + "learning_rate": 6.856925352108657e-05, + "loss": 4.3789, + "step": 8926 + }, + { + "epoch": 0.87177734375, + "grad_norm": 0.14828041195869446, + "learning_rate": 6.854139516349842e-05, + "loss": 4.4102, + "step": 8927 + }, + { + "epoch": 0.871875, + "grad_norm": 0.14975591003894806, + "learning_rate": 6.851355682067073e-05, + "loss": 4.4023, + "step": 8928 + }, + { + "epoch": 0.87197265625, + "grad_norm": 0.14585615694522858, + "learning_rate": 6.848573849530232e-05, + "loss": 4.418, + "step": 8929 + }, + { + "epoch": 0.8720703125, + "grad_norm": 0.15159842371940613, + "learning_rate": 6.845794019008992e-05, + "loss": 4.3711, + "step": 8930 + }, + { + "epoch": 0.87216796875, + "grad_norm": 0.1508469581604004, + "learning_rate": 6.843016190772847e-05, + "loss": 4.3789, + "step": 8931 + }, + { + "epoch": 0.872265625, + "grad_norm": 0.14573785662651062, + "learning_rate": 6.840240365091076e-05, + "loss": 4.3555, + "step": 8932 + }, + { + "epoch": 0.87236328125, + "grad_norm": 0.1509004682302475, + "learning_rate": 6.837466542232787e-05, + "loss": 4.3555, + "step": 8933 + }, + { + "epoch": 0.8724609375, + "grad_norm": 0.1479395627975464, + "learning_rate": 6.834694722466879e-05, + "loss": 4.3906, + "step": 8934 + }, + { + "epoch": 0.87255859375, + "grad_norm": 0.15501372516155243, + "learning_rate": 6.831924906062062e-05, + "loss": 4.3828, + "step": 8935 + }, + { + "epoch": 0.87265625, + "grad_norm": 0.14134545624256134, + "learning_rate": 6.829157093286849e-05, + "loss": 4.418, + "step": 8936 + }, + { + "epoch": 0.87275390625, + "grad_norm": 0.1503482162952423, + "learning_rate": 6.826391284409564e-05, + "loss": 4.3555, + "step": 8937 + }, + { + "epoch": 0.8728515625, + "grad_norm": 0.15063899755477905, + "learning_rate": 6.823627479698328e-05, + "loss": 4.3672, + "step": 8938 + }, + { + "epoch": 0.87294921875, + "grad_norm": 0.14417144656181335, + "learning_rate": 6.820865679421086e-05, + "loss": 4.375, + "step": 8939 + }, + { + "epoch": 0.873046875, + "grad_norm": 0.14891891181468964, + "learning_rate": 6.818105883845558e-05, + "loss": 4.3984, + "step": 8940 + }, + { + "epoch": 0.87314453125, + "grad_norm": 0.14799056947231293, + "learning_rate": 6.815348093239301e-05, + "loss": 4.3438, + "step": 8941 + }, + { + "epoch": 0.8732421875, + "grad_norm": 0.15142256021499634, + "learning_rate": 6.812592307869657e-05, + "loss": 4.4102, + "step": 8942 + }, + { + "epoch": 0.87333984375, + "grad_norm": 0.1425715684890747, + "learning_rate": 6.809838528003781e-05, + "loss": 4.4062, + "step": 8943 + }, + { + "epoch": 0.8734375, + "grad_norm": 0.14246028661727905, + "learning_rate": 6.807086753908636e-05, + "loss": 4.3555, + "step": 8944 + }, + { + "epoch": 0.87353515625, + "grad_norm": 0.15302224457263947, + "learning_rate": 6.804336985850989e-05, + "loss": 4.3945, + "step": 8945 + }, + { + "epoch": 0.8736328125, + "grad_norm": 0.14499178528785706, + "learning_rate": 6.80158922409741e-05, + "loss": 4.3906, + "step": 8946 + }, + { + "epoch": 0.87373046875, + "grad_norm": 0.15099434554576874, + "learning_rate": 6.798843468914273e-05, + "loss": 4.4102, + "step": 8947 + }, + { + "epoch": 0.873828125, + "grad_norm": 0.14186333119869232, + "learning_rate": 6.796099720567767e-05, + "loss": 4.3672, + "step": 8948 + }, + { + "epoch": 0.87392578125, + "grad_norm": 0.1488317996263504, + "learning_rate": 6.79335797932387e-05, + "loss": 4.3555, + "step": 8949 + }, + { + "epoch": 0.8740234375, + "grad_norm": 0.14730657637119293, + "learning_rate": 6.790618245448382e-05, + "loss": 4.3594, + "step": 8950 + }, + { + "epoch": 0.87412109375, + "grad_norm": 0.14646779000759125, + "learning_rate": 6.787880519206899e-05, + "loss": 4.4219, + "step": 8951 + }, + { + "epoch": 0.87421875, + "grad_norm": 0.1430400311946869, + "learning_rate": 6.785144800864827e-05, + "loss": 4.3711, + "step": 8952 + }, + { + "epoch": 0.87431640625, + "grad_norm": 0.15296170115470886, + "learning_rate": 6.782411090687375e-05, + "loss": 4.3945, + "step": 8953 + }, + { + "epoch": 0.8744140625, + "grad_norm": 0.14377039670944214, + "learning_rate": 6.779679388939555e-05, + "loss": 4.3672, + "step": 8954 + }, + { + "epoch": 0.87451171875, + "grad_norm": 0.13961009681224823, + "learning_rate": 6.776949695886191e-05, + "loss": 4.3867, + "step": 8955 + }, + { + "epoch": 0.874609375, + "grad_norm": 0.1415305882692337, + "learning_rate": 6.77422201179191e-05, + "loss": 4.3945, + "step": 8956 + }, + { + "epoch": 0.87470703125, + "grad_norm": 0.14652162790298462, + "learning_rate": 6.771496336921134e-05, + "loss": 4.3945, + "step": 8957 + }, + { + "epoch": 0.8748046875, + "grad_norm": 0.14379429817199707, + "learning_rate": 6.768772671538103e-05, + "loss": 4.3828, + "step": 8958 + }, + { + "epoch": 0.87490234375, + "grad_norm": 0.14595907926559448, + "learning_rate": 6.76605101590686e-05, + "loss": 4.3867, + "step": 8959 + }, + { + "epoch": 0.875, + "grad_norm": 0.1491325944662094, + "learning_rate": 6.763331370291248e-05, + "loss": 4.3711, + "step": 8960 + }, + { + "epoch": 0.87509765625, + "grad_norm": 0.15024317800998688, + "learning_rate": 6.760613734954919e-05, + "loss": 4.4023, + "step": 8961 + }, + { + "epoch": 0.8751953125, + "grad_norm": 0.1458400934934616, + "learning_rate": 6.757898110161332e-05, + "loss": 4.4219, + "step": 8962 + }, + { + "epoch": 0.87529296875, + "grad_norm": 0.1480124592781067, + "learning_rate": 6.755184496173742e-05, + "loss": 4.3906, + "step": 8963 + }, + { + "epoch": 0.875390625, + "grad_norm": 0.14789597690105438, + "learning_rate": 6.752472893255224e-05, + "loss": 4.4141, + "step": 8964 + }, + { + "epoch": 0.87548828125, + "grad_norm": 0.14877234399318695, + "learning_rate": 6.749763301668647e-05, + "loss": 4.3906, + "step": 8965 + }, + { + "epoch": 0.8755859375, + "grad_norm": 0.14373165369033813, + "learning_rate": 6.747055721676683e-05, + "loss": 4.3711, + "step": 8966 + }, + { + "epoch": 0.87568359375, + "grad_norm": 0.15176448225975037, + "learning_rate": 6.744350153541815e-05, + "loss": 4.4062, + "step": 8967 + }, + { + "epoch": 0.87578125, + "grad_norm": 0.1431221067905426, + "learning_rate": 6.741646597526329e-05, + "loss": 4.3984, + "step": 8968 + }, + { + "epoch": 0.87587890625, + "grad_norm": 0.1452053189277649, + "learning_rate": 6.73894505389232e-05, + "loss": 4.3828, + "step": 8969 + }, + { + "epoch": 0.8759765625, + "grad_norm": 0.1430642157793045, + "learning_rate": 6.736245522901682e-05, + "loss": 4.3828, + "step": 8970 + }, + { + "epoch": 0.87607421875, + "grad_norm": 0.14831577241420746, + "learning_rate": 6.733548004816117e-05, + "loss": 4.3477, + "step": 8971 + }, + { + "epoch": 0.876171875, + "grad_norm": 0.1443537473678589, + "learning_rate": 6.73085249989713e-05, + "loss": 4.4062, + "step": 8972 + }, + { + "epoch": 0.87626953125, + "grad_norm": 0.14486804604530334, + "learning_rate": 6.728159008406037e-05, + "loss": 4.4062, + "step": 8973 + }, + { + "epoch": 0.8763671875, + "grad_norm": 0.14425858855247498, + "learning_rate": 6.725467530603944e-05, + "loss": 4.3555, + "step": 8974 + }, + { + "epoch": 0.87646484375, + "grad_norm": 0.14716212451457977, + "learning_rate": 6.722778066751778e-05, + "loss": 4.3633, + "step": 8975 + }, + { + "epoch": 0.8765625, + "grad_norm": 0.15022966265678406, + "learning_rate": 6.720090617110264e-05, + "loss": 4.3555, + "step": 8976 + }, + { + "epoch": 0.87666015625, + "grad_norm": 0.1456318199634552, + "learning_rate": 6.717405181939928e-05, + "loss": 4.4062, + "step": 8977 + }, + { + "epoch": 0.8767578125, + "grad_norm": 0.14914971590042114, + "learning_rate": 6.71472176150111e-05, + "loss": 4.4102, + "step": 8978 + }, + { + "epoch": 0.87685546875, + "grad_norm": 0.14559727907180786, + "learning_rate": 6.712040356053946e-05, + "loss": 4.3906, + "step": 8979 + }, + { + "epoch": 0.876953125, + "grad_norm": 0.1488996148109436, + "learning_rate": 6.70936096585838e-05, + "loss": 4.3945, + "step": 8980 + }, + { + "epoch": 0.87705078125, + "grad_norm": 0.15061749517917633, + "learning_rate": 6.706683591174168e-05, + "loss": 4.3984, + "step": 8981 + }, + { + "epoch": 0.8771484375, + "grad_norm": 0.14564159512519836, + "learning_rate": 6.704008232260852e-05, + "loss": 4.3867, + "step": 8982 + }, + { + "epoch": 0.87724609375, + "grad_norm": 0.3011604845523834, + "learning_rate": 6.701334889377797e-05, + "loss": 4.3906, + "step": 8983 + }, + { + "epoch": 0.87734375, + "grad_norm": 0.14480391144752502, + "learning_rate": 6.69866356278416e-05, + "loss": 4.4062, + "step": 8984 + }, + { + "epoch": 0.87744140625, + "grad_norm": 0.14742915332317352, + "learning_rate": 6.695994252738915e-05, + "loss": 4.3984, + "step": 8985 + }, + { + "epoch": 0.8775390625, + "grad_norm": 0.14873699843883514, + "learning_rate": 6.69332695950083e-05, + "loss": 4.3594, + "step": 8986 + }, + { + "epoch": 0.87763671875, + "grad_norm": 0.15016338229179382, + "learning_rate": 6.690661683328478e-05, + "loss": 4.3984, + "step": 8987 + }, + { + "epoch": 0.877734375, + "grad_norm": 0.15255272388458252, + "learning_rate": 6.687998424480246e-05, + "loss": 4.4102, + "step": 8988 + }, + { + "epoch": 0.87783203125, + "grad_norm": 0.15759055316448212, + "learning_rate": 6.685337183214312e-05, + "loss": 4.3594, + "step": 8989 + }, + { + "epoch": 0.8779296875, + "grad_norm": 0.15167075395584106, + "learning_rate": 6.682677959788675e-05, + "loss": 4.375, + "step": 8990 + }, + { + "epoch": 0.87802734375, + "grad_norm": 0.14515595138072968, + "learning_rate": 6.680020754461115e-05, + "loss": 4.3633, + "step": 8991 + }, + { + "epoch": 0.878125, + "grad_norm": 0.14797750115394592, + "learning_rate": 6.677365567489241e-05, + "loss": 4.3789, + "step": 8992 + }, + { + "epoch": 0.87822265625, + "grad_norm": 0.1482226699590683, + "learning_rate": 6.674712399130448e-05, + "loss": 4.375, + "step": 8993 + }, + { + "epoch": 0.8783203125, + "grad_norm": 0.14677990972995758, + "learning_rate": 6.67206124964195e-05, + "loss": 4.3945, + "step": 8994 + }, + { + "epoch": 0.87841796875, + "grad_norm": 0.15139663219451904, + "learning_rate": 6.669412119280752e-05, + "loss": 4.4023, + "step": 8995 + }, + { + "epoch": 0.878515625, + "grad_norm": 0.1487555205821991, + "learning_rate": 6.666765008303671e-05, + "loss": 4.3789, + "step": 8996 + }, + { + "epoch": 0.87861328125, + "grad_norm": 0.14890432357788086, + "learning_rate": 6.664119916967325e-05, + "loss": 4.3594, + "step": 8997 + }, + { + "epoch": 0.8787109375, + "grad_norm": 0.15141025185585022, + "learning_rate": 6.66147684552815e-05, + "loss": 4.4023, + "step": 8998 + }, + { + "epoch": 0.87880859375, + "grad_norm": 0.14836078882217407, + "learning_rate": 6.658835794242354e-05, + "loss": 4.4023, + "step": 8999 + }, + { + "epoch": 0.87890625, + "grad_norm": 0.15147854387760162, + "learning_rate": 6.65619676336598e-05, + "loss": 4.3867, + "step": 9000 + }, + { + "epoch": 0.87900390625, + "grad_norm": 0.15168160200119019, + "learning_rate": 6.653559753154863e-05, + "loss": 4.3828, + "step": 9001 + }, + { + "epoch": 0.8791015625, + "grad_norm": 0.1460506170988083, + "learning_rate": 6.650924763864642e-05, + "loss": 4.375, + "step": 9002 + }, + { + "epoch": 0.87919921875, + "grad_norm": 0.14768317341804504, + "learning_rate": 6.64829179575076e-05, + "loss": 4.3633, + "step": 9003 + }, + { + "epoch": 0.879296875, + "grad_norm": 0.1453259438276291, + "learning_rate": 6.645660849068472e-05, + "loss": 4.3711, + "step": 9004 + }, + { + "epoch": 0.87939453125, + "grad_norm": 0.14434634149074554, + "learning_rate": 6.643031924072823e-05, + "loss": 4.4023, + "step": 9005 + }, + { + "epoch": 0.8794921875, + "grad_norm": 0.1529405415058136, + "learning_rate": 6.640405021018673e-05, + "loss": 4.4102, + "step": 9006 + }, + { + "epoch": 0.87958984375, + "grad_norm": 0.1471836119890213, + "learning_rate": 6.637780140160684e-05, + "loss": 4.3867, + "step": 9007 + }, + { + "epoch": 0.8796875, + "grad_norm": 0.14870373904705048, + "learning_rate": 6.635157281753314e-05, + "loss": 4.418, + "step": 9008 + }, + { + "epoch": 0.87978515625, + "grad_norm": 0.14342452585697174, + "learning_rate": 6.632536446050838e-05, + "loss": 4.4023, + "step": 9009 + }, + { + "epoch": 0.8798828125, + "grad_norm": 0.15037570893764496, + "learning_rate": 6.629917633307323e-05, + "loss": 4.375, + "step": 9010 + }, + { + "epoch": 0.87998046875, + "grad_norm": 0.14742928743362427, + "learning_rate": 6.627300843776651e-05, + "loss": 4.4375, + "step": 9011 + }, + { + "epoch": 0.880078125, + "grad_norm": 0.14005182683467865, + "learning_rate": 6.624686077712497e-05, + "loss": 4.3516, + "step": 9012 + }, + { + "epoch": 0.88017578125, + "grad_norm": 0.14960086345672607, + "learning_rate": 6.622073335368344e-05, + "loss": 4.4219, + "step": 9013 + }, + { + "epoch": 0.8802734375, + "grad_norm": 0.14186294376850128, + "learning_rate": 6.619462616997488e-05, + "loss": 4.3711, + "step": 9014 + }, + { + "epoch": 0.88037109375, + "grad_norm": 0.14298512041568756, + "learning_rate": 6.616853922853013e-05, + "loss": 4.3711, + "step": 9015 + }, + { + "epoch": 0.88046875, + "grad_norm": 0.14295744895935059, + "learning_rate": 6.614247253187816e-05, + "loss": 4.3555, + "step": 9016 + }, + { + "epoch": 0.88056640625, + "grad_norm": 0.14928875863552094, + "learning_rate": 6.611642608254593e-05, + "loss": 4.4023, + "step": 9017 + }, + { + "epoch": 0.8806640625, + "grad_norm": 0.14415262639522552, + "learning_rate": 6.609039988305851e-05, + "loss": 4.3672, + "step": 9018 + }, + { + "epoch": 0.88076171875, + "grad_norm": 0.14613351225852966, + "learning_rate": 6.606439393593895e-05, + "loss": 4.3633, + "step": 9019 + }, + { + "epoch": 0.880859375, + "grad_norm": 0.14568105340003967, + "learning_rate": 6.603840824370835e-05, + "loss": 4.3906, + "step": 9020 + }, + { + "epoch": 0.88095703125, + "grad_norm": 0.14112257957458496, + "learning_rate": 6.601244280888582e-05, + "loss": 4.3594, + "step": 9021 + }, + { + "epoch": 0.8810546875, + "grad_norm": 0.14277367293834686, + "learning_rate": 6.59864976339886e-05, + "loss": 4.3711, + "step": 9022 + }, + { + "epoch": 0.88115234375, + "grad_norm": 0.14463940262794495, + "learning_rate": 6.596057272153182e-05, + "loss": 4.375, + "step": 9023 + }, + { + "epoch": 0.88125, + "grad_norm": 0.14160259068012238, + "learning_rate": 6.593466807402874e-05, + "loss": 4.3828, + "step": 9024 + }, + { + "epoch": 0.88134765625, + "grad_norm": 0.14475932717323303, + "learning_rate": 6.590878369399073e-05, + "loss": 4.4102, + "step": 9025 + }, + { + "epoch": 0.8814453125, + "grad_norm": 0.13947176933288574, + "learning_rate": 6.588291958392702e-05, + "loss": 4.4336, + "step": 9026 + }, + { + "epoch": 0.88154296875, + "grad_norm": 0.14751550555229187, + "learning_rate": 6.585707574634492e-05, + "loss": 4.3984, + "step": 9027 + }, + { + "epoch": 0.881640625, + "grad_norm": 0.14679856598377228, + "learning_rate": 6.583125218374992e-05, + "loss": 4.418, + "step": 9028 + }, + { + "epoch": 0.88173828125, + "grad_norm": 0.1422235071659088, + "learning_rate": 6.580544889864536e-05, + "loss": 4.4219, + "step": 9029 + }, + { + "epoch": 0.8818359375, + "grad_norm": 0.14435319602489471, + "learning_rate": 6.577966589353272e-05, + "loss": 4.3984, + "step": 9030 + }, + { + "epoch": 0.88193359375, + "grad_norm": 0.14942294359207153, + "learning_rate": 6.575390317091149e-05, + "loss": 4.418, + "step": 9031 + }, + { + "epoch": 0.88203125, + "grad_norm": 0.13773180544376373, + "learning_rate": 6.572816073327918e-05, + "loss": 4.3984, + "step": 9032 + }, + { + "epoch": 0.88212890625, + "grad_norm": 0.15514390170574188, + "learning_rate": 6.570243858313139e-05, + "loss": 4.3672, + "step": 9033 + }, + { + "epoch": 0.8822265625, + "grad_norm": 0.14861541986465454, + "learning_rate": 6.567673672296167e-05, + "loss": 4.375, + "step": 9034 + }, + { + "epoch": 0.88232421875, + "grad_norm": 0.1453334540128708, + "learning_rate": 6.565105515526162e-05, + "loss": 4.3945, + "step": 9035 + }, + { + "epoch": 0.882421875, + "grad_norm": 0.14477375149726868, + "learning_rate": 6.562539388252093e-05, + "loss": 4.375, + "step": 9036 + }, + { + "epoch": 0.88251953125, + "grad_norm": 0.14247383177280426, + "learning_rate": 6.559975290722727e-05, + "loss": 4.3867, + "step": 9037 + }, + { + "epoch": 0.8826171875, + "grad_norm": 0.14814023673534393, + "learning_rate": 6.557413223186635e-05, + "loss": 4.3828, + "step": 9038 + }, + { + "epoch": 0.88271484375, + "grad_norm": 0.1399742215871811, + "learning_rate": 6.554853185892194e-05, + "loss": 4.4297, + "step": 9039 + }, + { + "epoch": 0.8828125, + "grad_norm": 0.1427522599697113, + "learning_rate": 6.552295179087581e-05, + "loss": 4.4102, + "step": 9040 + }, + { + "epoch": 0.88291015625, + "grad_norm": 0.14012984931468964, + "learning_rate": 6.549739203020782e-05, + "loss": 4.4023, + "step": 9041 + }, + { + "epoch": 0.8830078125, + "grad_norm": 0.15187619626522064, + "learning_rate": 6.547185257939572e-05, + "loss": 4.4219, + "step": 9042 + }, + { + "epoch": 0.88310546875, + "grad_norm": 0.1436062604188919, + "learning_rate": 6.544633344091546e-05, + "loss": 4.3555, + "step": 9043 + }, + { + "epoch": 0.883203125, + "grad_norm": 0.14954087138175964, + "learning_rate": 6.542083461724095e-05, + "loss": 4.3711, + "step": 9044 + }, + { + "epoch": 0.88330078125, + "grad_norm": 0.14394350349903107, + "learning_rate": 6.539535611084407e-05, + "loss": 4.4023, + "step": 9045 + }, + { + "epoch": 0.8833984375, + "grad_norm": 0.1499234288930893, + "learning_rate": 6.536989792419484e-05, + "loss": 4.418, + "step": 9046 + }, + { + "epoch": 0.88349609375, + "grad_norm": 0.1474909782409668, + "learning_rate": 6.534446005976125e-05, + "loss": 4.3672, + "step": 9047 + }, + { + "epoch": 0.88359375, + "grad_norm": 0.14788895845413208, + "learning_rate": 6.531904252000931e-05, + "loss": 4.4141, + "step": 9048 + }, + { + "epoch": 0.88369140625, + "grad_norm": 0.14391854405403137, + "learning_rate": 6.52936453074031e-05, + "loss": 4.3867, + "step": 9049 + }, + { + "epoch": 0.8837890625, + "grad_norm": 0.14436475932598114, + "learning_rate": 6.526826842440468e-05, + "loss": 4.4062, + "step": 9050 + }, + { + "epoch": 0.88388671875, + "grad_norm": 0.14479714632034302, + "learning_rate": 6.52429118734742e-05, + "loss": 4.3477, + "step": 9051 + }, + { + "epoch": 0.883984375, + "grad_norm": 0.1448213756084442, + "learning_rate": 6.521757565706976e-05, + "loss": 4.3945, + "step": 9052 + }, + { + "epoch": 0.88408203125, + "grad_norm": 0.147447869181633, + "learning_rate": 6.519225977764758e-05, + "loss": 4.4375, + "step": 9053 + }, + { + "epoch": 0.8841796875, + "grad_norm": 0.14715354144573212, + "learning_rate": 6.516696423766189e-05, + "loss": 4.4023, + "step": 9054 + }, + { + "epoch": 0.88427734375, + "grad_norm": 0.14778724312782288, + "learning_rate": 6.51416890395648e-05, + "loss": 4.4062, + "step": 9055 + }, + { + "epoch": 0.884375, + "grad_norm": 0.14861759543418884, + "learning_rate": 6.51164341858067e-05, + "loss": 4.3398, + "step": 9056 + }, + { + "epoch": 0.88447265625, + "grad_norm": 0.14571991562843323, + "learning_rate": 6.509119967883578e-05, + "loss": 4.4219, + "step": 9057 + }, + { + "epoch": 0.8845703125, + "grad_norm": 0.14648374915122986, + "learning_rate": 6.50659855210984e-05, + "loss": 4.3867, + "step": 9058 + }, + { + "epoch": 0.88466796875, + "grad_norm": 0.14548373222351074, + "learning_rate": 6.504079171503892e-05, + "loss": 4.375, + "step": 9059 + }, + { + "epoch": 0.884765625, + "grad_norm": 0.15104582905769348, + "learning_rate": 6.501561826309969e-05, + "loss": 4.3867, + "step": 9060 + }, + { + "epoch": 0.88486328125, + "grad_norm": 0.1512313038110733, + "learning_rate": 6.499046516772107e-05, + "loss": 4.3789, + "step": 9061 + }, + { + "epoch": 0.8849609375, + "grad_norm": 0.15206752717494965, + "learning_rate": 6.496533243134151e-05, + "loss": 4.375, + "step": 9062 + }, + { + "epoch": 0.88505859375, + "grad_norm": 0.15196780860424042, + "learning_rate": 6.49402200563975e-05, + "loss": 4.3711, + "step": 9063 + }, + { + "epoch": 0.88515625, + "grad_norm": 0.14690403640270233, + "learning_rate": 6.491512804532349e-05, + "loss": 4.3906, + "step": 9064 + }, + { + "epoch": 0.88525390625, + "grad_norm": 0.15021398663520813, + "learning_rate": 6.489005640055192e-05, + "loss": 4.3672, + "step": 9065 + }, + { + "epoch": 0.8853515625, + "grad_norm": 0.14769993722438812, + "learning_rate": 6.48650051245134e-05, + "loss": 4.4102, + "step": 9066 + }, + { + "epoch": 0.88544921875, + "grad_norm": 0.1500520259141922, + "learning_rate": 6.483997421963641e-05, + "loss": 4.3516, + "step": 9067 + }, + { + "epoch": 0.885546875, + "grad_norm": 0.14806228876113892, + "learning_rate": 6.481496368834755e-05, + "loss": 4.3672, + "step": 9068 + }, + { + "epoch": 0.88564453125, + "grad_norm": 0.1508478969335556, + "learning_rate": 6.478997353307145e-05, + "loss": 4.3906, + "step": 9069 + }, + { + "epoch": 0.8857421875, + "grad_norm": 0.144372820854187, + "learning_rate": 6.47650037562307e-05, + "loss": 4.3906, + "step": 9070 + }, + { + "epoch": 0.88583984375, + "grad_norm": 0.14191342890262604, + "learning_rate": 6.474005436024601e-05, + "loss": 4.3789, + "step": 9071 + }, + { + "epoch": 0.8859375, + "grad_norm": 0.14269863069057465, + "learning_rate": 6.471512534753596e-05, + "loss": 4.4102, + "step": 9072 + }, + { + "epoch": 0.88603515625, + "grad_norm": 0.15396232903003693, + "learning_rate": 6.469021672051732e-05, + "loss": 4.3945, + "step": 9073 + }, + { + "epoch": 0.8861328125, + "grad_norm": 0.14117403328418732, + "learning_rate": 6.466532848160485e-05, + "loss": 4.4023, + "step": 9074 + }, + { + "epoch": 0.88623046875, + "grad_norm": 0.14192523062229156, + "learning_rate": 6.464046063321116e-05, + "loss": 4.3594, + "step": 9075 + }, + { + "epoch": 0.886328125, + "grad_norm": 0.14930447936058044, + "learning_rate": 6.461561317774712e-05, + "loss": 4.4375, + "step": 9076 + }, + { + "epoch": 0.88642578125, + "grad_norm": 0.15287673473358154, + "learning_rate": 6.459078611762148e-05, + "loss": 4.3828, + "step": 9077 + }, + { + "epoch": 0.8865234375, + "grad_norm": 0.1444351077079773, + "learning_rate": 6.45659794552411e-05, + "loss": 4.3789, + "step": 9078 + }, + { + "epoch": 0.88662109375, + "grad_norm": 0.15636518597602844, + "learning_rate": 6.454119319301079e-05, + "loss": 4.3789, + "step": 9079 + }, + { + "epoch": 0.88671875, + "grad_norm": 0.14854060113430023, + "learning_rate": 6.45164273333334e-05, + "loss": 4.3828, + "step": 9080 + }, + { + "epoch": 0.88681640625, + "grad_norm": 0.1421976387500763, + "learning_rate": 6.449168187860984e-05, + "loss": 4.3906, + "step": 9081 + }, + { + "epoch": 0.8869140625, + "grad_norm": 0.1513184756040573, + "learning_rate": 6.446695683123901e-05, + "loss": 4.3984, + "step": 9082 + }, + { + "epoch": 0.88701171875, + "grad_norm": 0.15103955566883087, + "learning_rate": 6.444225219361781e-05, + "loss": 4.4297, + "step": 9083 + }, + { + "epoch": 0.887109375, + "grad_norm": 0.14477276802062988, + "learning_rate": 6.441756796814118e-05, + "loss": 4.3906, + "step": 9084 + }, + { + "epoch": 0.88720703125, + "grad_norm": 0.1480569988489151, + "learning_rate": 6.439290415720213e-05, + "loss": 4.3984, + "step": 9085 + }, + { + "epoch": 0.8873046875, + "grad_norm": 0.14636801183223724, + "learning_rate": 6.436826076319159e-05, + "loss": 4.4023, + "step": 9086 + }, + { + "epoch": 0.88740234375, + "grad_norm": 0.15187689661979675, + "learning_rate": 6.434363778849864e-05, + "loss": 4.4258, + "step": 9087 + }, + { + "epoch": 0.8875, + "grad_norm": 0.14853641390800476, + "learning_rate": 6.431903523551028e-05, + "loss": 4.3984, + "step": 9088 + }, + { + "epoch": 0.88759765625, + "grad_norm": 0.14223462343215942, + "learning_rate": 6.429445310661152e-05, + "loss": 4.3984, + "step": 9089 + }, + { + "epoch": 0.8876953125, + "grad_norm": 0.14879189431667328, + "learning_rate": 6.42698914041855e-05, + "loss": 4.3711, + "step": 9090 + }, + { + "epoch": 0.88779296875, + "grad_norm": 0.14459435641765594, + "learning_rate": 6.424535013061331e-05, + "loss": 4.3555, + "step": 9091 + }, + { + "epoch": 0.887890625, + "grad_norm": 0.15028531849384308, + "learning_rate": 6.422082928827399e-05, + "loss": 4.3789, + "step": 9092 + }, + { + "epoch": 0.88798828125, + "grad_norm": 0.14909176528453827, + "learning_rate": 6.419632887954473e-05, + "loss": 4.3984, + "step": 9093 + }, + { + "epoch": 0.8880859375, + "grad_norm": 0.14658069610595703, + "learning_rate": 6.417184890680063e-05, + "loss": 4.3789, + "step": 9094 + }, + { + "epoch": 0.88818359375, + "grad_norm": 0.14979128539562225, + "learning_rate": 6.414738937241489e-05, + "loss": 4.3789, + "step": 9095 + }, + { + "epoch": 0.88828125, + "grad_norm": 0.14564932882785797, + "learning_rate": 6.412295027875868e-05, + "loss": 4.3828, + "step": 9096 + }, + { + "epoch": 0.88837890625, + "grad_norm": 0.14366215467453003, + "learning_rate": 6.409853162820123e-05, + "loss": 4.3906, + "step": 9097 + }, + { + "epoch": 0.8884765625, + "grad_norm": 0.14720959961414337, + "learning_rate": 6.407413342310973e-05, + "loss": 4.3438, + "step": 9098 + }, + { + "epoch": 0.88857421875, + "grad_norm": 0.14598305523395538, + "learning_rate": 6.404975566584948e-05, + "loss": 4.4141, + "step": 9099 + }, + { + "epoch": 0.888671875, + "grad_norm": 0.1431853026151657, + "learning_rate": 6.402539835878367e-05, + "loss": 4.3984, + "step": 9100 + }, + { + "epoch": 0.88876953125, + "grad_norm": 0.15002523362636566, + "learning_rate": 6.40010615042736e-05, + "loss": 4.4102, + "step": 9101 + }, + { + "epoch": 0.8888671875, + "grad_norm": 0.14679428935050964, + "learning_rate": 6.397674510467854e-05, + "loss": 4.418, + "step": 9102 + }, + { + "epoch": 0.88896484375, + "grad_norm": 0.14645428955554962, + "learning_rate": 6.395244916235583e-05, + "loss": 4.3867, + "step": 9103 + }, + { + "epoch": 0.8890625, + "grad_norm": 0.14729158580303192, + "learning_rate": 6.392817367966081e-05, + "loss": 4.4102, + "step": 9104 + }, + { + "epoch": 0.88916015625, + "grad_norm": 0.14763854444026947, + "learning_rate": 6.39039186589468e-05, + "loss": 4.3867, + "step": 9105 + }, + { + "epoch": 0.8892578125, + "grad_norm": 0.14966431260108948, + "learning_rate": 6.387968410256514e-05, + "loss": 4.3711, + "step": 9106 + }, + { + "epoch": 0.88935546875, + "grad_norm": 0.1490459144115448, + "learning_rate": 6.385547001286525e-05, + "loss": 4.3789, + "step": 9107 + }, + { + "epoch": 0.889453125, + "grad_norm": 0.1471119225025177, + "learning_rate": 6.383127639219452e-05, + "loss": 4.3633, + "step": 9108 + }, + { + "epoch": 0.88955078125, + "grad_norm": 0.15470710396766663, + "learning_rate": 6.38071032428983e-05, + "loss": 4.375, + "step": 9109 + }, + { + "epoch": 0.8896484375, + "grad_norm": 0.15030550956726074, + "learning_rate": 6.378295056732007e-05, + "loss": 4.3828, + "step": 9110 + }, + { + "epoch": 0.88974609375, + "grad_norm": 0.14592988789081573, + "learning_rate": 6.375881836780123e-05, + "loss": 4.3711, + "step": 9111 + }, + { + "epoch": 0.88984375, + "grad_norm": 0.14628124237060547, + "learning_rate": 6.373470664668124e-05, + "loss": 4.3789, + "step": 9112 + }, + { + "epoch": 0.88994140625, + "grad_norm": 0.14794589579105377, + "learning_rate": 6.371061540629762e-05, + "loss": 4.3672, + "step": 9113 + }, + { + "epoch": 0.8900390625, + "grad_norm": 0.14097513258457184, + "learning_rate": 6.368654464898577e-05, + "loss": 4.3867, + "step": 9114 + }, + { + "epoch": 0.89013671875, + "grad_norm": 0.14538359642028809, + "learning_rate": 6.366249437707926e-05, + "loss": 4.3867, + "step": 9115 + }, + { + "epoch": 0.890234375, + "grad_norm": 0.13950839638710022, + "learning_rate": 6.363846459290958e-05, + "loss": 4.3672, + "step": 9116 + }, + { + "epoch": 0.89033203125, + "grad_norm": 0.1439312994480133, + "learning_rate": 6.361445529880622e-05, + "loss": 4.3477, + "step": 9117 + }, + { + "epoch": 0.8904296875, + "grad_norm": 0.14955458045005798, + "learning_rate": 6.359046649709674e-05, + "loss": 4.3867, + "step": 9118 + }, + { + "epoch": 0.89052734375, + "grad_norm": 0.14362987875938416, + "learning_rate": 6.356649819010666e-05, + "loss": 4.3828, + "step": 9119 + }, + { + "epoch": 0.890625, + "grad_norm": 0.1451408565044403, + "learning_rate": 6.354255038015962e-05, + "loss": 4.375, + "step": 9120 + }, + { + "epoch": 0.89072265625, + "grad_norm": 0.14604885876178741, + "learning_rate": 6.351862306957714e-05, + "loss": 4.4062, + "step": 9121 + }, + { + "epoch": 0.8908203125, + "grad_norm": 0.14918336272239685, + "learning_rate": 6.349471626067882e-05, + "loss": 4.4023, + "step": 9122 + }, + { + "epoch": 0.89091796875, + "grad_norm": 0.14553457498550415, + "learning_rate": 6.34708299557823e-05, + "loss": 4.3828, + "step": 9123 + }, + { + "epoch": 0.891015625, + "grad_norm": 0.1475497931241989, + "learning_rate": 6.344696415720311e-05, + "loss": 4.3984, + "step": 9124 + }, + { + "epoch": 0.89111328125, + "grad_norm": 0.14803455770015717, + "learning_rate": 6.342311886725501e-05, + "loss": 4.3945, + "step": 9125 + }, + { + "epoch": 0.8912109375, + "grad_norm": 0.1392158716917038, + "learning_rate": 6.33992940882495e-05, + "loss": 4.3711, + "step": 9126 + }, + { + "epoch": 0.89130859375, + "grad_norm": 0.14390845596790314, + "learning_rate": 6.337548982249629e-05, + "loss": 4.3594, + "step": 9127 + }, + { + "epoch": 0.89140625, + "grad_norm": 0.14489732682704926, + "learning_rate": 6.335170607230304e-05, + "loss": 4.4297, + "step": 9128 + }, + { + "epoch": 0.89150390625, + "grad_norm": 0.14377643167972565, + "learning_rate": 6.332794283997544e-05, + "loss": 4.3711, + "step": 9129 + }, + { + "epoch": 0.8916015625, + "grad_norm": 0.15443040430545807, + "learning_rate": 6.330420012781715e-05, + "loss": 4.418, + "step": 9130 + }, + { + "epoch": 0.89169921875, + "grad_norm": 0.14307239651679993, + "learning_rate": 6.328047793812987e-05, + "loss": 4.3555, + "step": 9131 + }, + { + "epoch": 0.891796875, + "grad_norm": 0.14928865432739258, + "learning_rate": 6.325677627321333e-05, + "loss": 4.3555, + "step": 9132 + }, + { + "epoch": 0.89189453125, + "grad_norm": 0.14704877138137817, + "learning_rate": 6.323309513536523e-05, + "loss": 4.4141, + "step": 9133 + }, + { + "epoch": 0.8919921875, + "grad_norm": 0.14227375388145447, + "learning_rate": 6.320943452688128e-05, + "loss": 4.3906, + "step": 9134 + }, + { + "epoch": 0.89208984375, + "grad_norm": 0.1425650417804718, + "learning_rate": 6.318579445005522e-05, + "loss": 4.3906, + "step": 9135 + }, + { + "epoch": 0.8921875, + "grad_norm": 0.1479882001876831, + "learning_rate": 6.316217490717879e-05, + "loss": 4.3711, + "step": 9136 + }, + { + "epoch": 0.89228515625, + "grad_norm": 0.14116953313350677, + "learning_rate": 6.313857590054174e-05, + "loss": 4.3438, + "step": 9137 + }, + { + "epoch": 0.8923828125, + "grad_norm": 0.14671190083026886, + "learning_rate": 6.311499743243189e-05, + "loss": 4.4102, + "step": 9138 + }, + { + "epoch": 0.89248046875, + "grad_norm": 0.14298003911972046, + "learning_rate": 6.309143950513495e-05, + "loss": 4.3828, + "step": 9139 + }, + { + "epoch": 0.892578125, + "grad_norm": 0.1456432044506073, + "learning_rate": 6.306790212093473e-05, + "loss": 4.3438, + "step": 9140 + }, + { + "epoch": 0.89267578125, + "grad_norm": 0.14493873715400696, + "learning_rate": 6.304438528211303e-05, + "loss": 4.3906, + "step": 9141 + }, + { + "epoch": 0.8927734375, + "grad_norm": 0.14158549904823303, + "learning_rate": 6.30208889909496e-05, + "loss": 4.418, + "step": 9142 + }, + { + "epoch": 0.89287109375, + "grad_norm": 0.15148070454597473, + "learning_rate": 6.299741324972229e-05, + "loss": 4.3906, + "step": 9143 + }, + { + "epoch": 0.89296875, + "grad_norm": 0.14706172049045563, + "learning_rate": 6.297395806070688e-05, + "loss": 4.4023, + "step": 9144 + }, + { + "epoch": 0.89306640625, + "grad_norm": 0.14917954802513123, + "learning_rate": 6.295052342617724e-05, + "loss": 4.375, + "step": 9145 + }, + { + "epoch": 0.8931640625, + "grad_norm": 0.14831791818141937, + "learning_rate": 6.292710934840513e-05, + "loss": 4.3867, + "step": 9146 + }, + { + "epoch": 0.89326171875, + "grad_norm": 0.14104337990283966, + "learning_rate": 6.290371582966046e-05, + "loss": 4.4102, + "step": 9147 + }, + { + "epoch": 0.893359375, + "grad_norm": 0.14458054304122925, + "learning_rate": 6.288034287221102e-05, + "loss": 4.4102, + "step": 9148 + }, + { + "epoch": 0.89345703125, + "grad_norm": 0.1478259414434433, + "learning_rate": 6.285699047832267e-05, + "loss": 4.4062, + "step": 9149 + }, + { + "epoch": 0.8935546875, + "grad_norm": 0.14377036690711975, + "learning_rate": 6.283365865025932e-05, + "loss": 4.3711, + "step": 9150 + }, + { + "epoch": 0.89365234375, + "grad_norm": 0.14250801503658295, + "learning_rate": 6.281034739028275e-05, + "loss": 4.418, + "step": 9151 + }, + { + "epoch": 0.89375, + "grad_norm": 0.15397106111049652, + "learning_rate": 6.278705670065289e-05, + "loss": 4.3477, + "step": 9152 + }, + { + "epoch": 0.89384765625, + "grad_norm": 0.14178471267223358, + "learning_rate": 6.276378658362756e-05, + "loss": 4.3711, + "step": 9153 + }, + { + "epoch": 0.8939453125, + "grad_norm": 0.15278401970863342, + "learning_rate": 6.274053704146268e-05, + "loss": 4.3984, + "step": 9154 + }, + { + "epoch": 0.89404296875, + "grad_norm": 0.1407594233751297, + "learning_rate": 6.271730807641214e-05, + "loss": 4.3945, + "step": 9155 + }, + { + "epoch": 0.894140625, + "grad_norm": 0.14520221948623657, + "learning_rate": 6.269409969072778e-05, + "loss": 4.3984, + "step": 9156 + }, + { + "epoch": 0.89423828125, + "grad_norm": 0.14374132454395294, + "learning_rate": 6.267091188665956e-05, + "loss": 4.3867, + "step": 9157 + }, + { + "epoch": 0.8943359375, + "grad_norm": 0.14384301006793976, + "learning_rate": 6.264774466645537e-05, + "loss": 4.3594, + "step": 9158 + }, + { + "epoch": 0.89443359375, + "grad_norm": 0.150284081697464, + "learning_rate": 6.262459803236105e-05, + "loss": 4.3672, + "step": 9159 + }, + { + "epoch": 0.89453125, + "grad_norm": 0.14852263033390045, + "learning_rate": 6.260147198662058e-05, + "loss": 4.4023, + "step": 9160 + }, + { + "epoch": 0.89462890625, + "grad_norm": 0.14347340166568756, + "learning_rate": 6.257836653147585e-05, + "loss": 4.3828, + "step": 9161 + }, + { + "epoch": 0.8947265625, + "grad_norm": 0.14695727825164795, + "learning_rate": 6.255528166916674e-05, + "loss": 4.3711, + "step": 9162 + }, + { + "epoch": 0.89482421875, + "grad_norm": 0.15450428426265717, + "learning_rate": 6.253221740193123e-05, + "loss": 4.3984, + "step": 9163 + }, + { + "epoch": 0.894921875, + "grad_norm": 0.1573951095342636, + "learning_rate": 6.250917373200522e-05, + "loss": 4.3789, + "step": 9164 + }, + { + "epoch": 0.89501953125, + "grad_norm": 0.15574415028095245, + "learning_rate": 6.248615066162262e-05, + "loss": 4.4062, + "step": 9165 + }, + { + "epoch": 0.8951171875, + "grad_norm": 0.14739078283309937, + "learning_rate": 6.24631481930154e-05, + "loss": 4.3438, + "step": 9166 + }, + { + "epoch": 0.89521484375, + "grad_norm": 0.1449914574623108, + "learning_rate": 6.244016632841352e-05, + "loss": 4.3555, + "step": 9167 + }, + { + "epoch": 0.8953125, + "grad_norm": 0.14445146918296814, + "learning_rate": 6.241720507004479e-05, + "loss": 4.3984, + "step": 9168 + }, + { + "epoch": 0.89541015625, + "grad_norm": 0.1404712051153183, + "learning_rate": 6.239426442013526e-05, + "loss": 4.3828, + "step": 9169 + }, + { + "epoch": 0.8955078125, + "grad_norm": 0.14678223431110382, + "learning_rate": 6.23713443809088e-05, + "loss": 4.418, + "step": 9170 + }, + { + "epoch": 0.89560546875, + "grad_norm": 0.15178053081035614, + "learning_rate": 6.234844495458741e-05, + "loss": 4.3867, + "step": 9171 + }, + { + "epoch": 0.895703125, + "grad_norm": 0.1450154334306717, + "learning_rate": 6.232556614339101e-05, + "loss": 4.3594, + "step": 9172 + }, + { + "epoch": 0.89580078125, + "grad_norm": 0.14555981755256653, + "learning_rate": 6.230270794953754e-05, + "loss": 4.4102, + "step": 9173 + }, + { + "epoch": 0.8958984375, + "grad_norm": 0.1492014080286026, + "learning_rate": 6.227987037524297e-05, + "loss": 4.3867, + "step": 9174 + }, + { + "epoch": 0.89599609375, + "grad_norm": 0.14487388730049133, + "learning_rate": 6.225705342272124e-05, + "loss": 4.3477, + "step": 9175 + }, + { + "epoch": 0.89609375, + "grad_norm": 0.15214993059635162, + "learning_rate": 6.223425709418425e-05, + "loss": 4.3945, + "step": 9176 + }, + { + "epoch": 0.89619140625, + "grad_norm": 0.14894436299800873, + "learning_rate": 6.221148139184203e-05, + "loss": 4.3789, + "step": 9177 + }, + { + "epoch": 0.8962890625, + "grad_norm": 0.15442483127117157, + "learning_rate": 6.218872631790246e-05, + "loss": 4.3984, + "step": 9178 + }, + { + "epoch": 0.89638671875, + "grad_norm": 0.15302924811840057, + "learning_rate": 6.216599187457152e-05, + "loss": 4.3789, + "step": 9179 + }, + { + "epoch": 0.896484375, + "grad_norm": 0.14540962874889374, + "learning_rate": 6.214327806405318e-05, + "loss": 4.3945, + "step": 9180 + }, + { + "epoch": 0.89658203125, + "grad_norm": 0.14044633507728577, + "learning_rate": 6.212058488854934e-05, + "loss": 4.3711, + "step": 9181 + }, + { + "epoch": 0.8966796875, + "grad_norm": 0.14281176030635834, + "learning_rate": 6.209791235025998e-05, + "loss": 4.3711, + "step": 9182 + }, + { + "epoch": 0.89677734375, + "grad_norm": 0.14326317608356476, + "learning_rate": 6.207526045138311e-05, + "loss": 4.3789, + "step": 9183 + }, + { + "epoch": 0.896875, + "grad_norm": 0.14751791954040527, + "learning_rate": 6.205262919411456e-05, + "loss": 4.4023, + "step": 9184 + }, + { + "epoch": 0.89697265625, + "grad_norm": 0.13878469169139862, + "learning_rate": 6.203001858064832e-05, + "loss": 4.3984, + "step": 9185 + }, + { + "epoch": 0.8970703125, + "grad_norm": 0.14342913031578064, + "learning_rate": 6.200742861317638e-05, + "loss": 4.375, + "step": 9186 + }, + { + "epoch": 0.89716796875, + "grad_norm": 0.14088186621665955, + "learning_rate": 6.198485929388862e-05, + "loss": 4.3906, + "step": 9187 + }, + { + "epoch": 0.897265625, + "grad_norm": 0.1486337035894394, + "learning_rate": 6.196231062497302e-05, + "loss": 4.4023, + "step": 9188 + }, + { + "epoch": 0.89736328125, + "grad_norm": 0.14459311962127686, + "learning_rate": 6.193978260861554e-05, + "loss": 4.375, + "step": 9189 + }, + { + "epoch": 0.8974609375, + "grad_norm": 0.1386542171239853, + "learning_rate": 6.191727524700005e-05, + "loss": 4.4062, + "step": 9190 + }, + { + "epoch": 0.89755859375, + "grad_norm": 0.14142288267612457, + "learning_rate": 6.189478854230856e-05, + "loss": 4.3711, + "step": 9191 + }, + { + "epoch": 0.89765625, + "grad_norm": 0.14793041348457336, + "learning_rate": 6.1872322496721e-05, + "loss": 4.4062, + "step": 9192 + }, + { + "epoch": 0.89775390625, + "grad_norm": 0.14470869302749634, + "learning_rate": 6.184987711241525e-05, + "loss": 4.4062, + "step": 9193 + }, + { + "epoch": 0.8978515625, + "grad_norm": 0.14679516851902008, + "learning_rate": 6.182745239156723e-05, + "loss": 4.3711, + "step": 9194 + }, + { + "epoch": 0.89794921875, + "grad_norm": 0.14778900146484375, + "learning_rate": 6.18050483363509e-05, + "loss": 4.3516, + "step": 9195 + }, + { + "epoch": 0.898046875, + "grad_norm": 0.1441812664270401, + "learning_rate": 6.178266494893818e-05, + "loss": 4.4023, + "step": 9196 + }, + { + "epoch": 0.89814453125, + "grad_norm": 0.14101967215538025, + "learning_rate": 6.176030223149899e-05, + "loss": 4.4102, + "step": 9197 + }, + { + "epoch": 0.8982421875, + "grad_norm": 0.1443154662847519, + "learning_rate": 6.173796018620123e-05, + "loss": 4.3789, + "step": 9198 + }, + { + "epoch": 0.89833984375, + "grad_norm": 0.1445460021495819, + "learning_rate": 6.171563881521081e-05, + "loss": 4.3867, + "step": 9199 + }, + { + "epoch": 0.8984375, + "grad_norm": 0.14899206161499023, + "learning_rate": 6.169333812069169e-05, + "loss": 4.3711, + "step": 9200 + } + ], + "logging_steps": 1.0, + "max_steps": 10240, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 200, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 5.632705469601546e+19, + "train_batch_size": 64, + "trial_name": null, + "trial_params": null +}