|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.05859375, |
|
"eval_steps": 500, |
|
"global_step": 600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 9.765625e-05, |
|
"grad_norm": 13.125093460083008, |
|
"learning_rate": 1.6666666666666669e-06, |
|
"loss": 6.0473, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001953125, |
|
"grad_norm": 13.109691619873047, |
|
"learning_rate": 3.3333333333333337e-06, |
|
"loss": 6.0151, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00029296875, |
|
"grad_norm": 12.975897789001465, |
|
"learning_rate": 5e-06, |
|
"loss": 6.0374, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.000390625, |
|
"grad_norm": 12.861470222473145, |
|
"learning_rate": 6.6666666666666675e-06, |
|
"loss": 5.9534, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00048828125, |
|
"grad_norm": 11.551366806030273, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 5.7805, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005859375, |
|
"grad_norm": 7.968364238739014, |
|
"learning_rate": 1e-05, |
|
"loss": 5.5664, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00068359375, |
|
"grad_norm": 6.854646682739258, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 5.4982, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00078125, |
|
"grad_norm": 3.759798049926758, |
|
"learning_rate": 1.3333333333333335e-05, |
|
"loss": 5.1824, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00087890625, |
|
"grad_norm": 3.48822283744812, |
|
"learning_rate": 1.5e-05, |
|
"loss": 5.0667, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0009765625, |
|
"grad_norm": 3.1011886596679688, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 5.0028, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00107421875, |
|
"grad_norm": 2.5868849754333496, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 4.7787, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.001171875, |
|
"grad_norm": 2.378497838973999, |
|
"learning_rate": 2e-05, |
|
"loss": 4.6855, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.00126953125, |
|
"grad_norm": 2.2101738452911377, |
|
"learning_rate": 2.1666666666666667e-05, |
|
"loss": 4.6042, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0013671875, |
|
"grad_norm": 1.9938961267471313, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 4.3843, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00146484375, |
|
"grad_norm": 1.9345463514328003, |
|
"learning_rate": 2.5e-05, |
|
"loss": 4.2548, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0015625, |
|
"grad_norm": 1.7730776071548462, |
|
"learning_rate": 2.666666666666667e-05, |
|
"loss": 4.1363, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00166015625, |
|
"grad_norm": 1.58334481716156, |
|
"learning_rate": 2.8333333333333332e-05, |
|
"loss": 3.9704, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0017578125, |
|
"grad_norm": 1.4827001094818115, |
|
"learning_rate": 3e-05, |
|
"loss": 3.81, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.00185546875, |
|
"grad_norm": 1.4140430688858032, |
|
"learning_rate": 3.166666666666667e-05, |
|
"loss": 3.7305, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.001953125, |
|
"grad_norm": 1.3934496641159058, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 3.5375, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00205078125, |
|
"grad_norm": 1.3384771347045898, |
|
"learning_rate": 3.5000000000000004e-05, |
|
"loss": 3.5217, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0021484375, |
|
"grad_norm": 1.2228978872299194, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 3.3742, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00224609375, |
|
"grad_norm": 1.1775028705596924, |
|
"learning_rate": 3.8333333333333334e-05, |
|
"loss": 3.2626, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00234375, |
|
"grad_norm": 1.0886842012405396, |
|
"learning_rate": 4e-05, |
|
"loss": 3.1864, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.00244140625, |
|
"grad_norm": 1.0981535911560059, |
|
"learning_rate": 4.1666666666666665e-05, |
|
"loss": 3.0962, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0025390625, |
|
"grad_norm": 1.009294867515564, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 3.0507, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00263671875, |
|
"grad_norm": 0.9390103816986084, |
|
"learning_rate": 4.4999999999999996e-05, |
|
"loss": 2.9579, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.002734375, |
|
"grad_norm": 0.8647847175598145, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 2.8166, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00283203125, |
|
"grad_norm": 0.8606237769126892, |
|
"learning_rate": 4.8333333333333334e-05, |
|
"loss": 2.7919, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0029296875, |
|
"grad_norm": 0.8069576025009155, |
|
"learning_rate": 5e-05, |
|
"loss": 2.7426, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.00302734375, |
|
"grad_norm": 0.7007808685302734, |
|
"learning_rate": 5.1666666666666664e-05, |
|
"loss": 2.6926, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.003125, |
|
"grad_norm": 0.6825646162033081, |
|
"learning_rate": 5.333333333333334e-05, |
|
"loss": 2.6783, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.00322265625, |
|
"grad_norm": 0.7054751515388489, |
|
"learning_rate": 5.5e-05, |
|
"loss": 2.6239, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0033203125, |
|
"grad_norm": 0.7101700305938721, |
|
"learning_rate": 5.6666666666666664e-05, |
|
"loss": 2.5993, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.00341796875, |
|
"grad_norm": 0.56828773021698, |
|
"learning_rate": 5.833333333333333e-05, |
|
"loss": 2.5258, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.003515625, |
|
"grad_norm": 0.5513983368873596, |
|
"learning_rate": 6e-05, |
|
"loss": 2.5018, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.00361328125, |
|
"grad_norm": 0.573391318321228, |
|
"learning_rate": 6.166666666666667e-05, |
|
"loss": 2.4881, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0037109375, |
|
"grad_norm": 0.4520920515060425, |
|
"learning_rate": 6.333333333333335e-05, |
|
"loss": 2.4489, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.00380859375, |
|
"grad_norm": 0.43038809299468994, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 2.4387, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00390625, |
|
"grad_norm": 0.4071808457374573, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 2.4261, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00400390625, |
|
"grad_norm": 0.3631410300731659, |
|
"learning_rate": 6.833333333333333e-05, |
|
"loss": 2.3656, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0041015625, |
|
"grad_norm": 0.33457818627357483, |
|
"learning_rate": 7.000000000000001e-05, |
|
"loss": 2.4011, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00419921875, |
|
"grad_norm": 0.3582305610179901, |
|
"learning_rate": 7.166666666666667e-05, |
|
"loss": 2.3662, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.004296875, |
|
"grad_norm": 0.4090467691421509, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 2.3282, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.00439453125, |
|
"grad_norm": 0.34438556432724, |
|
"learning_rate": 7.5e-05, |
|
"loss": 2.2991, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0044921875, |
|
"grad_norm": 0.29381102323532104, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 2.2506, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.00458984375, |
|
"grad_norm": 0.4019562304019928, |
|
"learning_rate": 7.833333333333334e-05, |
|
"loss": 2.2494, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0046875, |
|
"grad_norm": 0.3183911144733429, |
|
"learning_rate": 8e-05, |
|
"loss": 2.2853, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.00478515625, |
|
"grad_norm": 0.33041393756866455, |
|
"learning_rate": 8.166666666666667e-05, |
|
"loss": 2.2251, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0048828125, |
|
"grad_norm": 0.28936123847961426, |
|
"learning_rate": 8.333333333333333e-05, |
|
"loss": 2.2308, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.00498046875, |
|
"grad_norm": 0.36125779151916504, |
|
"learning_rate": 8.5e-05, |
|
"loss": 2.2473, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.005078125, |
|
"grad_norm": 0.2832348942756653, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 2.201, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.00517578125, |
|
"grad_norm": 0.3250133693218231, |
|
"learning_rate": 8.833333333333333e-05, |
|
"loss": 2.1596, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0052734375, |
|
"grad_norm": 0.31502828001976013, |
|
"learning_rate": 8.999999999999999e-05, |
|
"loss": 2.2183, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.00537109375, |
|
"grad_norm": 0.33068645000457764, |
|
"learning_rate": 9.166666666666667e-05, |
|
"loss": 2.2254, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.00546875, |
|
"grad_norm": 0.30684661865234375, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 2.2572, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.00556640625, |
|
"grad_norm": 0.31540846824645996, |
|
"learning_rate": 9.5e-05, |
|
"loss": 2.1594, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0056640625, |
|
"grad_norm": 0.23259752988815308, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 2.172, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.00576171875, |
|
"grad_norm": 0.26666638255119324, |
|
"learning_rate": 9.833333333333333e-05, |
|
"loss": 2.1961, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.005859375, |
|
"grad_norm": 0.26295527815818787, |
|
"learning_rate": 0.0001, |
|
"loss": 2.1811, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.00595703125, |
|
"grad_norm": 0.2302207201719284, |
|
"learning_rate": 0.00010166666666666667, |
|
"loss": 2.1431, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0060546875, |
|
"grad_norm": 0.468537837266922, |
|
"learning_rate": 0.00010333333333333333, |
|
"loss": 2.133, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.00615234375, |
|
"grad_norm": 0.27946797013282776, |
|
"learning_rate": 0.000105, |
|
"loss": 2.1917, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.00625, |
|
"grad_norm": 0.26789286732673645, |
|
"learning_rate": 0.00010666666666666668, |
|
"loss": 2.1417, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.00634765625, |
|
"grad_norm": 0.22977805137634277, |
|
"learning_rate": 0.00010833333333333334, |
|
"loss": 2.1371, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0064453125, |
|
"grad_norm": 0.2590682804584503, |
|
"learning_rate": 0.00011, |
|
"loss": 2.113, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.00654296875, |
|
"grad_norm": 0.2660805583000183, |
|
"learning_rate": 0.00011166666666666667, |
|
"loss": 2.1346, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.006640625, |
|
"grad_norm": 0.24538874626159668, |
|
"learning_rate": 0.00011333333333333333, |
|
"loss": 2.1078, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.00673828125, |
|
"grad_norm": 0.2986001968383789, |
|
"learning_rate": 0.000115, |
|
"loss": 2.1048, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0068359375, |
|
"grad_norm": 0.30193910002708435, |
|
"learning_rate": 0.00011666666666666667, |
|
"loss": 2.135, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.00693359375, |
|
"grad_norm": 0.25153177976608276, |
|
"learning_rate": 0.00011833333333333334, |
|
"loss": 2.1101, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.00703125, |
|
"grad_norm": 0.7685809135437012, |
|
"learning_rate": 0.00012, |
|
"loss": 2.0833, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.00712890625, |
|
"grad_norm": 0.24468126893043518, |
|
"learning_rate": 0.00012166666666666668, |
|
"loss": 2.1409, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0072265625, |
|
"grad_norm": 0.3070752024650574, |
|
"learning_rate": 0.00012333333333333334, |
|
"loss": 2.1402, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.00732421875, |
|
"grad_norm": 0.352273553609848, |
|
"learning_rate": 0.000125, |
|
"loss": 2.171, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.007421875, |
|
"grad_norm": 0.34806111454963684, |
|
"learning_rate": 0.0001266666666666667, |
|
"loss": 2.1155, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.00751953125, |
|
"grad_norm": 0.317414253950119, |
|
"learning_rate": 0.00012833333333333333, |
|
"loss": 2.0812, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0076171875, |
|
"grad_norm": 0.3320539891719818, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 2.0987, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.00771484375, |
|
"grad_norm": 0.3315901756286621, |
|
"learning_rate": 0.00013166666666666665, |
|
"loss": 2.1025, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0078125, |
|
"grad_norm": 0.29021286964416504, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 2.1474, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00791015625, |
|
"grad_norm": 0.2593044936656952, |
|
"learning_rate": 0.000135, |
|
"loss": 2.0667, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0080078125, |
|
"grad_norm": 0.3192054033279419, |
|
"learning_rate": 0.00013666666666666666, |
|
"loss": 2.0764, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.00810546875, |
|
"grad_norm": 0.26053664088249207, |
|
"learning_rate": 0.00013833333333333333, |
|
"loss": 2.0698, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.008203125, |
|
"grad_norm": 0.24790963530540466, |
|
"learning_rate": 0.00014000000000000001, |
|
"loss": 2.124, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.00830078125, |
|
"grad_norm": 0.2546316087245941, |
|
"learning_rate": 0.00014166666666666668, |
|
"loss": 2.1025, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0083984375, |
|
"grad_norm": 0.22275258600711823, |
|
"learning_rate": 0.00014333333333333334, |
|
"loss": 2.0778, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.00849609375, |
|
"grad_norm": 0.22855599224567413, |
|
"learning_rate": 0.000145, |
|
"loss": 2.1113, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.00859375, |
|
"grad_norm": 0.2456902116537094, |
|
"learning_rate": 0.00014666666666666666, |
|
"loss": 2.1399, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.00869140625, |
|
"grad_norm": 0.22144420444965363, |
|
"learning_rate": 0.00014833333333333335, |
|
"loss": 2.0211, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0087890625, |
|
"grad_norm": 0.254894495010376, |
|
"learning_rate": 0.00015, |
|
"loss": 2.1382, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.00888671875, |
|
"grad_norm": 0.2729082703590393, |
|
"learning_rate": 0.00015166666666666668, |
|
"loss": 2.1271, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.008984375, |
|
"grad_norm": 0.2564642131328583, |
|
"learning_rate": 0.00015333333333333334, |
|
"loss": 2.0423, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.00908203125, |
|
"grad_norm": 0.24927419424057007, |
|
"learning_rate": 0.000155, |
|
"loss": 2.0603, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0091796875, |
|
"grad_norm": 0.2591923475265503, |
|
"learning_rate": 0.0001566666666666667, |
|
"loss": 2.0522, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.00927734375, |
|
"grad_norm": 0.3350547254085541, |
|
"learning_rate": 0.00015833333333333332, |
|
"loss": 2.1095, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.009375, |
|
"grad_norm": 0.30227839946746826, |
|
"learning_rate": 0.00016, |
|
"loss": 2.121, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.00947265625, |
|
"grad_norm": 0.3027198314666748, |
|
"learning_rate": 0.00016166666666666665, |
|
"loss": 2.0288, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0095703125, |
|
"grad_norm": 0.29791951179504395, |
|
"learning_rate": 0.00016333333333333334, |
|
"loss": 2.0471, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.00966796875, |
|
"grad_norm": 0.30918803811073303, |
|
"learning_rate": 0.000165, |
|
"loss": 2.1036, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.009765625, |
|
"grad_norm": 0.2510216534137726, |
|
"learning_rate": 0.00016666666666666666, |
|
"loss": 2.0217, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.00986328125, |
|
"grad_norm": 0.288231760263443, |
|
"learning_rate": 0.00016833333333333335, |
|
"loss": 2.0178, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.0099609375, |
|
"grad_norm": 0.3326691687107086, |
|
"learning_rate": 0.00017, |
|
"loss": 2.0947, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.01005859375, |
|
"grad_norm": 0.3552946150302887, |
|
"learning_rate": 0.00017166666666666667, |
|
"loss": 1.9953, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.01015625, |
|
"grad_norm": 0.34882935881614685, |
|
"learning_rate": 0.00017333333333333334, |
|
"loss": 2.1223, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.01025390625, |
|
"grad_norm": 0.35487979650497437, |
|
"learning_rate": 0.000175, |
|
"loss": 2.0599, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0103515625, |
|
"grad_norm": 0.33561578392982483, |
|
"learning_rate": 0.00017666666666666666, |
|
"loss": 2.0398, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.01044921875, |
|
"grad_norm": 0.27415409684181213, |
|
"learning_rate": 0.00017833333333333335, |
|
"loss": 2.0982, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.010546875, |
|
"grad_norm": 0.28592920303344727, |
|
"learning_rate": 0.00017999999999999998, |
|
"loss": 2.0571, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.01064453125, |
|
"grad_norm": 0.3228552043437958, |
|
"learning_rate": 0.00018166666666666667, |
|
"loss": 1.9923, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.0107421875, |
|
"grad_norm": 0.29005661606788635, |
|
"learning_rate": 0.00018333333333333334, |
|
"loss": 2.0394, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01083984375, |
|
"grad_norm": 0.36677825450897217, |
|
"learning_rate": 0.000185, |
|
"loss": 2.0153, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.0109375, |
|
"grad_norm": 0.2562806308269501, |
|
"learning_rate": 0.0001866666666666667, |
|
"loss": 2.05, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.01103515625, |
|
"grad_norm": 0.2748093008995056, |
|
"learning_rate": 0.00018833333333333332, |
|
"loss": 2.0449, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.0111328125, |
|
"grad_norm": 0.3924459218978882, |
|
"learning_rate": 0.00019, |
|
"loss": 2.0409, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.01123046875, |
|
"grad_norm": 0.4368191063404083, |
|
"learning_rate": 0.00019166666666666667, |
|
"loss": 2.0178, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.011328125, |
|
"grad_norm": 0.4236035943031311, |
|
"learning_rate": 0.00019333333333333333, |
|
"loss": 2.0518, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.01142578125, |
|
"grad_norm": 0.28912851214408875, |
|
"learning_rate": 0.00019500000000000002, |
|
"loss": 2.0058, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.0115234375, |
|
"grad_norm": 0.38847798109054565, |
|
"learning_rate": 0.00019666666666666666, |
|
"loss": 2.0818, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.01162109375, |
|
"grad_norm": 0.29429811239242554, |
|
"learning_rate": 0.00019833333333333335, |
|
"loss": 2.004, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.01171875, |
|
"grad_norm": 0.2969271242618561, |
|
"learning_rate": 0.0002, |
|
"loss": 2.046, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01181640625, |
|
"grad_norm": 0.297894150018692, |
|
"learning_rate": 0.00020166666666666667, |
|
"loss": 2.0204, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0119140625, |
|
"grad_norm": 0.28794237971305847, |
|
"learning_rate": 0.00020333333333333333, |
|
"loss": 2.0663, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.01201171875, |
|
"grad_norm": 0.2766033411026001, |
|
"learning_rate": 0.000205, |
|
"loss": 2.0399, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.012109375, |
|
"grad_norm": 0.3516612648963928, |
|
"learning_rate": 0.00020666666666666666, |
|
"loss": 2.0389, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.01220703125, |
|
"grad_norm": 0.28204381465911865, |
|
"learning_rate": 0.00020833333333333335, |
|
"loss": 2.0084, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0123046875, |
|
"grad_norm": 0.3306240737438202, |
|
"learning_rate": 0.00021, |
|
"loss": 2.013, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.01240234375, |
|
"grad_norm": 0.3221111595630646, |
|
"learning_rate": 0.00021166666666666667, |
|
"loss": 2.0312, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0125, |
|
"grad_norm": 0.2742249071598053, |
|
"learning_rate": 0.00021333333333333336, |
|
"loss": 2.0375, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.01259765625, |
|
"grad_norm": 0.2785228490829468, |
|
"learning_rate": 0.000215, |
|
"loss": 2.0449, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.0126953125, |
|
"grad_norm": 0.2666397988796234, |
|
"learning_rate": 0.00021666666666666668, |
|
"loss": 2.0355, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01279296875, |
|
"grad_norm": 0.22522135078907013, |
|
"learning_rate": 0.00021833333333333332, |
|
"loss": 1.9693, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.012890625, |
|
"grad_norm": 0.2724483013153076, |
|
"learning_rate": 0.00022, |
|
"loss": 2.0671, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.01298828125, |
|
"grad_norm": 0.2655040919780731, |
|
"learning_rate": 0.00022166666666666667, |
|
"loss": 1.9805, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.0130859375, |
|
"grad_norm": 0.3194504678249359, |
|
"learning_rate": 0.00022333333333333333, |
|
"loss": 2.067, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.01318359375, |
|
"grad_norm": 0.25511813163757324, |
|
"learning_rate": 0.00022500000000000002, |
|
"loss": 2.0141, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.01328125, |
|
"grad_norm": 0.3995087444782257, |
|
"learning_rate": 0.00022666666666666666, |
|
"loss": 2.0278, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.01337890625, |
|
"grad_norm": 0.2623380422592163, |
|
"learning_rate": 0.00022833333333333334, |
|
"loss": 1.9751, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.0134765625, |
|
"grad_norm": 0.23814889788627625, |
|
"learning_rate": 0.00023, |
|
"loss": 1.969, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.01357421875, |
|
"grad_norm": 0.23566491901874542, |
|
"learning_rate": 0.00023166666666666667, |
|
"loss": 2.0135, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.013671875, |
|
"grad_norm": 0.2437373697757721, |
|
"learning_rate": 0.00023333333333333333, |
|
"loss": 2.0044, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01376953125, |
|
"grad_norm": 0.2861543595790863, |
|
"learning_rate": 0.000235, |
|
"loss": 2.0378, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.0138671875, |
|
"grad_norm": 0.318050354719162, |
|
"learning_rate": 0.00023666666666666668, |
|
"loss": 2.0857, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.01396484375, |
|
"grad_norm": 0.39669227600097656, |
|
"learning_rate": 0.00023833333333333334, |
|
"loss": 2.0535, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0140625, |
|
"grad_norm": 0.4359401762485504, |
|
"learning_rate": 0.00024, |
|
"loss": 2.0432, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.01416015625, |
|
"grad_norm": 0.4532039165496826, |
|
"learning_rate": 0.00024166666666666667, |
|
"loss": 1.9909, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0142578125, |
|
"grad_norm": 0.4570695161819458, |
|
"learning_rate": 0.00024333333333333336, |
|
"loss": 2.0123, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.01435546875, |
|
"grad_norm": 0.36623403429985046, |
|
"learning_rate": 0.000245, |
|
"loss": 2.0179, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.014453125, |
|
"grad_norm": 0.3069714307785034, |
|
"learning_rate": 0.0002466666666666667, |
|
"loss": 2.0014, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.01455078125, |
|
"grad_norm": 0.3980304002761841, |
|
"learning_rate": 0.0002483333333333333, |
|
"loss": 2.0489, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0146484375, |
|
"grad_norm": 0.31907564401626587, |
|
"learning_rate": 0.00025, |
|
"loss": 2.013, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01474609375, |
|
"grad_norm": 0.2952549159526825, |
|
"learning_rate": 0.00025166666666666664, |
|
"loss": 2.0709, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.01484375, |
|
"grad_norm": 0.29451197385787964, |
|
"learning_rate": 0.0002533333333333334, |
|
"loss": 1.9613, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.01494140625, |
|
"grad_norm": 0.2893507778644562, |
|
"learning_rate": 0.000255, |
|
"loss": 2.0048, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.0150390625, |
|
"grad_norm": 0.24850639700889587, |
|
"learning_rate": 0.00025666666666666665, |
|
"loss": 2.0198, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.01513671875, |
|
"grad_norm": 0.24297639727592468, |
|
"learning_rate": 0.00025833333333333334, |
|
"loss": 2.0561, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.015234375, |
|
"grad_norm": 0.2777438461780548, |
|
"learning_rate": 0.00026000000000000003, |
|
"loss": 2.0206, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.01533203125, |
|
"grad_norm": 0.28714093565940857, |
|
"learning_rate": 0.00026166666666666667, |
|
"loss": 1.9891, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.0154296875, |
|
"grad_norm": 0.25796255469322205, |
|
"learning_rate": 0.0002633333333333333, |
|
"loss": 2.0369, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.01552734375, |
|
"grad_norm": 0.2391008883714676, |
|
"learning_rate": 0.00026500000000000004, |
|
"loss": 2.0015, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.015625, |
|
"grad_norm": 0.3203892409801483, |
|
"learning_rate": 0.0002666666666666667, |
|
"loss": 2.0213, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01572265625, |
|
"grad_norm": 0.3396870791912079, |
|
"learning_rate": 0.0002683333333333333, |
|
"loss": 2.0333, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0158203125, |
|
"grad_norm": 0.315060555934906, |
|
"learning_rate": 0.00027, |
|
"loss": 2.0195, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.01591796875, |
|
"grad_norm": 0.2672436535358429, |
|
"learning_rate": 0.0002716666666666667, |
|
"loss": 1.9946, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.016015625, |
|
"grad_norm": 0.2996402382850647, |
|
"learning_rate": 0.00027333333333333333, |
|
"loss": 2.0112, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.01611328125, |
|
"grad_norm": 0.2894189953804016, |
|
"learning_rate": 0.000275, |
|
"loss": 2.0157, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0162109375, |
|
"grad_norm": 0.26241254806518555, |
|
"learning_rate": 0.00027666666666666665, |
|
"loss": 2.0177, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.01630859375, |
|
"grad_norm": 0.22900305688381195, |
|
"learning_rate": 0.00027833333333333334, |
|
"loss": 1.9834, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.01640625, |
|
"grad_norm": 0.2373427301645279, |
|
"learning_rate": 0.00028000000000000003, |
|
"loss": 1.9792, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.01650390625, |
|
"grad_norm": 0.2663004696369171, |
|
"learning_rate": 0.00028166666666666666, |
|
"loss": 2.0203, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.0166015625, |
|
"grad_norm": 0.31653544306755066, |
|
"learning_rate": 0.00028333333333333335, |
|
"loss": 2.0216, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01669921875, |
|
"grad_norm": 0.3077234923839569, |
|
"learning_rate": 0.000285, |
|
"loss": 2.0361, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.016796875, |
|
"grad_norm": 0.25555703043937683, |
|
"learning_rate": 0.0002866666666666667, |
|
"loss": 2.0102, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.01689453125, |
|
"grad_norm": 0.29817435145378113, |
|
"learning_rate": 0.0002883333333333333, |
|
"loss": 1.972, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.0169921875, |
|
"grad_norm": 0.3075692355632782, |
|
"learning_rate": 0.00029, |
|
"loss": 2.0195, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.01708984375, |
|
"grad_norm": 0.29917964339256287, |
|
"learning_rate": 0.0002916666666666667, |
|
"loss": 1.9972, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.0171875, |
|
"grad_norm": 0.32018229365348816, |
|
"learning_rate": 0.0002933333333333333, |
|
"loss": 1.9895, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.01728515625, |
|
"grad_norm": 0.2907097339630127, |
|
"learning_rate": 0.000295, |
|
"loss": 1.9777, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.0173828125, |
|
"grad_norm": 0.22390642762184143, |
|
"learning_rate": 0.0002966666666666667, |
|
"loss": 1.9961, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.01748046875, |
|
"grad_norm": 0.25350186228752136, |
|
"learning_rate": 0.00029833333333333334, |
|
"loss": 1.9875, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.017578125, |
|
"grad_norm": 0.22856706380844116, |
|
"learning_rate": 0.0003, |
|
"loss": 2.0169, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01767578125, |
|
"grad_norm": 0.2288493812084198, |
|
"learning_rate": 0.0003016666666666667, |
|
"loss": 2.0238, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.0177734375, |
|
"grad_norm": 0.27326855063438416, |
|
"learning_rate": 0.00030333333333333335, |
|
"loss": 2.0134, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.01787109375, |
|
"grad_norm": 0.2447524517774582, |
|
"learning_rate": 0.000305, |
|
"loss": 1.9852, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.01796875, |
|
"grad_norm": 0.4363366663455963, |
|
"learning_rate": 0.0003066666666666667, |
|
"loss": 1.9921, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.01806640625, |
|
"grad_norm": 0.4569666385650635, |
|
"learning_rate": 0.00030833333333333337, |
|
"loss": 1.9997, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.0181640625, |
|
"grad_norm": 0.43348655104637146, |
|
"learning_rate": 0.00031, |
|
"loss": 2.0584, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.01826171875, |
|
"grad_norm": 0.3844921588897705, |
|
"learning_rate": 0.00031166666666666663, |
|
"loss": 2.0035, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.018359375, |
|
"grad_norm": 0.3427641987800598, |
|
"learning_rate": 0.0003133333333333334, |
|
"loss": 1.9536, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.01845703125, |
|
"grad_norm": 0.33557865023612976, |
|
"learning_rate": 0.000315, |
|
"loss": 1.972, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.0185546875, |
|
"grad_norm": 0.4006612300872803, |
|
"learning_rate": 0.00031666666666666665, |
|
"loss": 2.0652, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01865234375, |
|
"grad_norm": 0.3158099055290222, |
|
"learning_rate": 0.00031833333333333334, |
|
"loss": 2.0516, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.01875, |
|
"grad_norm": 0.3799190819263458, |
|
"learning_rate": 0.00032, |
|
"loss": 2.011, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.01884765625, |
|
"grad_norm": 0.2948876619338989, |
|
"learning_rate": 0.00032166666666666666, |
|
"loss": 2.0109, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.0189453125, |
|
"grad_norm": 0.24561335146427155, |
|
"learning_rate": 0.0003233333333333333, |
|
"loss": 2.0264, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.01904296875, |
|
"grad_norm": 0.24896866083145142, |
|
"learning_rate": 0.00032500000000000004, |
|
"loss": 1.9726, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.019140625, |
|
"grad_norm": 0.26887547969818115, |
|
"learning_rate": 0.0003266666666666667, |
|
"loss": 2.0036, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.01923828125, |
|
"grad_norm": 0.3186735212802887, |
|
"learning_rate": 0.0003283333333333333, |
|
"loss": 2.0174, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.0193359375, |
|
"grad_norm": 0.3317165672779083, |
|
"learning_rate": 0.00033, |
|
"loss": 2.0047, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.01943359375, |
|
"grad_norm": 0.3068574070930481, |
|
"learning_rate": 0.0003316666666666667, |
|
"loss": 2.0032, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.01953125, |
|
"grad_norm": 0.29292526841163635, |
|
"learning_rate": 0.0003333333333333333, |
|
"loss": 2.0533, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01962890625, |
|
"grad_norm": 0.2519834041595459, |
|
"learning_rate": 0.000335, |
|
"loss": 2.0113, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.0197265625, |
|
"grad_norm": 0.25766584277153015, |
|
"learning_rate": 0.0003366666666666667, |
|
"loss": 2.0278, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.01982421875, |
|
"grad_norm": 0.2704983055591583, |
|
"learning_rate": 0.00033833333333333334, |
|
"loss": 1.9725, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.019921875, |
|
"grad_norm": 0.2882053256034851, |
|
"learning_rate": 0.00034, |
|
"loss": 2.0706, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.02001953125, |
|
"grad_norm": 0.34524375200271606, |
|
"learning_rate": 0.00034166666666666666, |
|
"loss": 1.9868, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.0201171875, |
|
"grad_norm": 0.3718552887439728, |
|
"learning_rate": 0.00034333333333333335, |
|
"loss": 1.9596, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.02021484375, |
|
"grad_norm": 0.2747247815132141, |
|
"learning_rate": 0.000345, |
|
"loss": 2.0125, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.0203125, |
|
"grad_norm": 0.3062858283519745, |
|
"learning_rate": 0.00034666666666666667, |
|
"loss": 2.0224, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.02041015625, |
|
"grad_norm": 0.32505863904953003, |
|
"learning_rate": 0.00034833333333333336, |
|
"loss": 2.0376, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.0205078125, |
|
"grad_norm": 0.35048386454582214, |
|
"learning_rate": 0.00035, |
|
"loss": 2.0268, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.02060546875, |
|
"grad_norm": 0.31204426288604736, |
|
"learning_rate": 0.0003516666666666667, |
|
"loss": 2.0198, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.020703125, |
|
"grad_norm": 0.24253524839878082, |
|
"learning_rate": 0.0003533333333333333, |
|
"loss": 2.028, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.02080078125, |
|
"grad_norm": 0.286915123462677, |
|
"learning_rate": 0.000355, |
|
"loss": 1.9358, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.0208984375, |
|
"grad_norm": 0.2800680994987488, |
|
"learning_rate": 0.0003566666666666667, |
|
"loss": 1.9905, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.02099609375, |
|
"grad_norm": 0.2718358635902405, |
|
"learning_rate": 0.00035833333333333333, |
|
"loss": 1.9902, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.02109375, |
|
"grad_norm": 0.28583604097366333, |
|
"learning_rate": 0.00035999999999999997, |
|
"loss": 1.991, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.02119140625, |
|
"grad_norm": 0.2911478281021118, |
|
"learning_rate": 0.0003616666666666667, |
|
"loss": 1.973, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0212890625, |
|
"grad_norm": 0.3601188361644745, |
|
"learning_rate": 0.00036333333333333335, |
|
"loss": 1.9727, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.02138671875, |
|
"grad_norm": 0.2888337969779968, |
|
"learning_rate": 0.000365, |
|
"loss": 1.988, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.021484375, |
|
"grad_norm": 0.25628700852394104, |
|
"learning_rate": 0.00036666666666666667, |
|
"loss": 1.9984, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.02158203125, |
|
"grad_norm": 0.2637641429901123, |
|
"learning_rate": 0.00036833333333333336, |
|
"loss": 2.0229, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.0216796875, |
|
"grad_norm": 0.23845899105072021, |
|
"learning_rate": 0.00037, |
|
"loss": 1.9985, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.02177734375, |
|
"grad_norm": 0.28519535064697266, |
|
"learning_rate": 0.00037166666666666663, |
|
"loss": 2.0061, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.021875, |
|
"grad_norm": 0.31845173239707947, |
|
"learning_rate": 0.0003733333333333334, |
|
"loss": 2.0081, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.02197265625, |
|
"grad_norm": 0.3725838363170624, |
|
"learning_rate": 0.000375, |
|
"loss": 2.0032, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.0220703125, |
|
"grad_norm": 0.49783870577812195, |
|
"learning_rate": 0.00037666666666666664, |
|
"loss": 2.0404, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.02216796875, |
|
"grad_norm": 0.5059479475021362, |
|
"learning_rate": 0.0003783333333333334, |
|
"loss": 2.0498, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.022265625, |
|
"grad_norm": 0.461291640996933, |
|
"learning_rate": 0.00038, |
|
"loss": 2.0078, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.02236328125, |
|
"grad_norm": 0.3970203399658203, |
|
"learning_rate": 0.00038166666666666666, |
|
"loss": 1.9966, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.0224609375, |
|
"grad_norm": 0.3155679404735565, |
|
"learning_rate": 0.00038333333333333334, |
|
"loss": 2.0263, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02255859375, |
|
"grad_norm": 0.32979920506477356, |
|
"learning_rate": 0.00038500000000000003, |
|
"loss": 1.9967, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.02265625, |
|
"grad_norm": 0.27470117807388306, |
|
"learning_rate": 0.00038666666666666667, |
|
"loss": 2.0461, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.02275390625, |
|
"grad_norm": 0.2981088161468506, |
|
"learning_rate": 0.0003883333333333333, |
|
"loss": 1.9944, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.0228515625, |
|
"grad_norm": 0.3496599495410919, |
|
"learning_rate": 0.00039000000000000005, |
|
"loss": 1.9907, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.02294921875, |
|
"grad_norm": 0.3318106532096863, |
|
"learning_rate": 0.0003916666666666667, |
|
"loss": 2.0576, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.023046875, |
|
"grad_norm": 0.29498377442359924, |
|
"learning_rate": 0.0003933333333333333, |
|
"loss": 2.0242, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.02314453125, |
|
"grad_norm": 0.2970214784145355, |
|
"learning_rate": 0.000395, |
|
"loss": 2.0087, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.0232421875, |
|
"grad_norm": 0.37431418895721436, |
|
"learning_rate": 0.0003966666666666667, |
|
"loss": 2.0657, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.02333984375, |
|
"grad_norm": 0.30095174908638, |
|
"learning_rate": 0.00039833333333333333, |
|
"loss": 2.0217, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.0234375, |
|
"grad_norm": 0.24695053696632385, |
|
"learning_rate": 0.0004, |
|
"loss": 1.9833, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02353515625, |
|
"grad_norm": 0.2923540771007538, |
|
"learning_rate": 0.00040166666666666665, |
|
"loss": 2.0272, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.0236328125, |
|
"grad_norm": 0.2788209915161133, |
|
"learning_rate": 0.00040333333333333334, |
|
"loss": 2.0104, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.02373046875, |
|
"grad_norm": 0.2529614567756653, |
|
"learning_rate": 0.00040500000000000003, |
|
"loss": 2.003, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.023828125, |
|
"grad_norm": 0.2551966905593872, |
|
"learning_rate": 0.00040666666666666667, |
|
"loss": 2.001, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.02392578125, |
|
"grad_norm": 0.2613292634487152, |
|
"learning_rate": 0.00040833333333333336, |
|
"loss": 1.9822, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.0240234375, |
|
"grad_norm": 0.3060430884361267, |
|
"learning_rate": 0.00041, |
|
"loss": 2.0024, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.02412109375, |
|
"grad_norm": 0.33755916357040405, |
|
"learning_rate": 0.0004116666666666667, |
|
"loss": 2.0023, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.02421875, |
|
"grad_norm": 0.33021774888038635, |
|
"learning_rate": 0.0004133333333333333, |
|
"loss": 1.9086, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.02431640625, |
|
"grad_norm": 0.26662060618400574, |
|
"learning_rate": 0.000415, |
|
"loss": 2.009, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.0244140625, |
|
"grad_norm": 0.27698251605033875, |
|
"learning_rate": 0.0004166666666666667, |
|
"loss": 2.0183, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02451171875, |
|
"grad_norm": 0.2582184970378876, |
|
"learning_rate": 0.00041833333333333333, |
|
"loss": 1.9932, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.024609375, |
|
"grad_norm": 0.28684699535369873, |
|
"learning_rate": 0.00042, |
|
"loss": 2.0021, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.02470703125, |
|
"grad_norm": 0.33535540103912354, |
|
"learning_rate": 0.0004216666666666667, |
|
"loss": 2.0387, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.0248046875, |
|
"grad_norm": 0.3330588638782501, |
|
"learning_rate": 0.00042333333333333334, |
|
"loss": 1.9776, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.02490234375, |
|
"grad_norm": 0.27919256687164307, |
|
"learning_rate": 0.000425, |
|
"loss": 2.0111, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 0.25296416878700256, |
|
"learning_rate": 0.0004266666666666667, |
|
"loss": 1.9755, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.02509765625, |
|
"grad_norm": 0.31288138031959534, |
|
"learning_rate": 0.00042833333333333335, |
|
"loss": 2.032, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.0251953125, |
|
"grad_norm": 0.3360923230648041, |
|
"learning_rate": 0.00043, |
|
"loss": 2.0837, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.02529296875, |
|
"grad_norm": 0.36317816376686096, |
|
"learning_rate": 0.0004316666666666667, |
|
"loss": 1.9696, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.025390625, |
|
"grad_norm": 0.366953581571579, |
|
"learning_rate": 0.00043333333333333337, |
|
"loss": 2.031, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.02548828125, |
|
"grad_norm": 0.34289368987083435, |
|
"learning_rate": 0.000435, |
|
"loss": 1.9968, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.0255859375, |
|
"grad_norm": 0.35170793533325195, |
|
"learning_rate": 0.00043666666666666664, |
|
"loss": 1.9963, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.02568359375, |
|
"grad_norm": 0.28625521063804626, |
|
"learning_rate": 0.0004383333333333334, |
|
"loss": 1.9932, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.02578125, |
|
"grad_norm": 0.2861610949039459, |
|
"learning_rate": 0.00044, |
|
"loss": 2.0297, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.02587890625, |
|
"grad_norm": 0.30467647314071655, |
|
"learning_rate": 0.00044166666666666665, |
|
"loss": 1.992, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.0259765625, |
|
"grad_norm": 0.31711357831954956, |
|
"learning_rate": 0.00044333333333333334, |
|
"loss": 2.0135, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.02607421875, |
|
"grad_norm": 0.341530978679657, |
|
"learning_rate": 0.00044500000000000003, |
|
"loss": 1.983, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.026171875, |
|
"grad_norm": 0.373901903629303, |
|
"learning_rate": 0.00044666666666666666, |
|
"loss": 2.0048, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.02626953125, |
|
"grad_norm": 0.3105134665966034, |
|
"learning_rate": 0.0004483333333333333, |
|
"loss": 2.0368, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.0263671875, |
|
"grad_norm": 0.29363134503364563, |
|
"learning_rate": 0.00045000000000000004, |
|
"loss": 1.978, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.02646484375, |
|
"grad_norm": 0.3060167133808136, |
|
"learning_rate": 0.0004516666666666667, |
|
"loss": 1.9479, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.0265625, |
|
"grad_norm": 0.30803290009498596, |
|
"learning_rate": 0.0004533333333333333, |
|
"loss": 1.9662, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.02666015625, |
|
"grad_norm": 0.3324045240879059, |
|
"learning_rate": 0.000455, |
|
"loss": 2.0299, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.0267578125, |
|
"grad_norm": 0.39051148295402527, |
|
"learning_rate": 0.0004566666666666667, |
|
"loss": 1.9856, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.02685546875, |
|
"grad_norm": 0.4288715124130249, |
|
"learning_rate": 0.0004583333333333333, |
|
"loss": 2.0264, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.026953125, |
|
"grad_norm": 0.34478962421417236, |
|
"learning_rate": 0.00046, |
|
"loss": 1.9824, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.02705078125, |
|
"grad_norm": 0.2766290009021759, |
|
"learning_rate": 0.0004616666666666667, |
|
"loss": 2.0066, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.0271484375, |
|
"grad_norm": 0.2508682608604431, |
|
"learning_rate": 0.00046333333333333334, |
|
"loss": 1.9663, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.02724609375, |
|
"grad_norm": 0.26924827694892883, |
|
"learning_rate": 0.000465, |
|
"loss": 1.9903, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.02734375, |
|
"grad_norm": 0.27668496966362, |
|
"learning_rate": 0.00046666666666666666, |
|
"loss": 2.0097, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.02744140625, |
|
"grad_norm": 0.25026220083236694, |
|
"learning_rate": 0.00046833333333333335, |
|
"loss": 2.0583, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.0275390625, |
|
"grad_norm": 0.2158055454492569, |
|
"learning_rate": 0.00047, |
|
"loss": 2.0137, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.02763671875, |
|
"grad_norm": 0.22540244460105896, |
|
"learning_rate": 0.0004716666666666667, |
|
"loss": 1.994, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.027734375, |
|
"grad_norm": 0.26405519247055054, |
|
"learning_rate": 0.00047333333333333336, |
|
"loss": 2.0221, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.02783203125, |
|
"grad_norm": 0.2979099452495575, |
|
"learning_rate": 0.000475, |
|
"loss": 2.0047, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.0279296875, |
|
"grad_norm": 0.34131935238838196, |
|
"learning_rate": 0.0004766666666666667, |
|
"loss": 1.9907, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.02802734375, |
|
"grad_norm": 0.37178686261177063, |
|
"learning_rate": 0.0004783333333333333, |
|
"loss": 1.9806, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.028125, |
|
"grad_norm": 0.36835598945617676, |
|
"learning_rate": 0.00048, |
|
"loss": 2.0134, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.02822265625, |
|
"grad_norm": 0.29690125584602356, |
|
"learning_rate": 0.0004816666666666667, |
|
"loss": 2.0261, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.0283203125, |
|
"grad_norm": 0.2690771818161011, |
|
"learning_rate": 0.00048333333333333334, |
|
"loss": 1.9718, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.02841796875, |
|
"grad_norm": 0.3377201557159424, |
|
"learning_rate": 0.00048499999999999997, |
|
"loss": 1.99, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.028515625, |
|
"grad_norm": 0.34973010420799255, |
|
"learning_rate": 0.0004866666666666667, |
|
"loss": 1.9721, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.02861328125, |
|
"grad_norm": 0.3172457218170166, |
|
"learning_rate": 0.0004883333333333333, |
|
"loss": 1.9928, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.0287109375, |
|
"grad_norm": 0.34357598423957825, |
|
"learning_rate": 0.00049, |
|
"loss": 1.9995, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.02880859375, |
|
"grad_norm": 0.3824540376663208, |
|
"learning_rate": 0.0004916666666666666, |
|
"loss": 1.9772, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.02890625, |
|
"grad_norm": 0.3704535663127899, |
|
"learning_rate": 0.0004933333333333334, |
|
"loss": 1.9829, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.02900390625, |
|
"grad_norm": 0.2571757733821869, |
|
"learning_rate": 0.000495, |
|
"loss": 1.9966, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.0291015625, |
|
"grad_norm": 0.3970927894115448, |
|
"learning_rate": 0.0004966666666666666, |
|
"loss": 1.9841, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.02919921875, |
|
"grad_norm": 0.3420144319534302, |
|
"learning_rate": 0.0004983333333333334, |
|
"loss": 1.9747, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.029296875, |
|
"grad_norm": 0.30147823691368103, |
|
"learning_rate": 0.0005, |
|
"loss": 1.9999, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02939453125, |
|
"grad_norm": 0.33727970719337463, |
|
"learning_rate": 0.0004999999887622467, |
|
"loss": 2.0084, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.0294921875, |
|
"grad_norm": 0.29407384991645813, |
|
"learning_rate": 0.0004999999550489878, |
|
"loss": 2.0144, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.02958984375, |
|
"grad_norm": 0.3489755094051361, |
|
"learning_rate": 0.0004999998988602267, |
|
"loss": 2.0058, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.0296875, |
|
"grad_norm": 0.3327770233154297, |
|
"learning_rate": 0.0004999998201959691, |
|
"loss": 2.0166, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.02978515625, |
|
"grad_norm": 0.2923370599746704, |
|
"learning_rate": 0.0004999997190562227, |
|
"loss": 2.0148, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.0298828125, |
|
"grad_norm": 0.31616437435150146, |
|
"learning_rate": 0.0004999995954409976, |
|
"loss": 1.9772, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.02998046875, |
|
"grad_norm": 0.22982288897037506, |
|
"learning_rate": 0.0004999994493503064, |
|
"loss": 2.0584, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.030078125, |
|
"grad_norm": 0.2886744737625122, |
|
"learning_rate": 0.0004999992807841634, |
|
"loss": 2.0114, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.03017578125, |
|
"grad_norm": 0.3027271032333374, |
|
"learning_rate": 0.0004999990897425856, |
|
"loss": 2.011, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.0302734375, |
|
"grad_norm": 0.3191162645816803, |
|
"learning_rate": 0.0004999988762255922, |
|
"loss": 1.9962, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03037109375, |
|
"grad_norm": 0.34986981749534607, |
|
"learning_rate": 0.0004999986402332042, |
|
"loss": 1.9612, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.03046875, |
|
"grad_norm": 0.36431390047073364, |
|
"learning_rate": 0.0004999983817654454, |
|
"loss": 2.045, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.03056640625, |
|
"grad_norm": 0.4198042154312134, |
|
"learning_rate": 0.0004999981008223416, |
|
"loss": 2.0132, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.0306640625, |
|
"grad_norm": 0.43374890089035034, |
|
"learning_rate": 0.0004999977974039207, |
|
"loss": 2.0578, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.03076171875, |
|
"grad_norm": 0.3654812276363373, |
|
"learning_rate": 0.0004999974715102132, |
|
"loss": 1.9721, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.030859375, |
|
"grad_norm": 0.29420921206474304, |
|
"learning_rate": 0.0004999971231412517, |
|
"loss": 2.0296, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.03095703125, |
|
"grad_norm": 0.31979072093963623, |
|
"learning_rate": 0.0004999967522970708, |
|
"loss": 1.9623, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.0310546875, |
|
"grad_norm": 0.3570129871368408, |
|
"learning_rate": 0.0004999963589777076, |
|
"loss": 1.9966, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.03115234375, |
|
"grad_norm": 0.29743143916130066, |
|
"learning_rate": 0.0004999959431832016, |
|
"loss": 1.9535, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 0.23641493916511536, |
|
"learning_rate": 0.000499995504913594, |
|
"loss": 2.0304, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03134765625, |
|
"grad_norm": 0.3556622564792633, |
|
"learning_rate": 0.0004999950441689288, |
|
"loss": 1.971, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.0314453125, |
|
"grad_norm": 0.323939710855484, |
|
"learning_rate": 0.0004999945609492519, |
|
"loss": 2.0153, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.03154296875, |
|
"grad_norm": 0.22797244787216187, |
|
"learning_rate": 0.0004999940552546118, |
|
"loss": 1.9807, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.031640625, |
|
"grad_norm": 0.2641647458076477, |
|
"learning_rate": 0.0004999935270850587, |
|
"loss": 1.9988, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.03173828125, |
|
"grad_norm": 0.25289344787597656, |
|
"learning_rate": 0.0004999929764406455, |
|
"loss": 1.9748, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.0318359375, |
|
"grad_norm": 0.2377796769142151, |
|
"learning_rate": 0.0004999924033214274, |
|
"loss": 1.9983, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.03193359375, |
|
"grad_norm": 0.2711915671825409, |
|
"learning_rate": 0.0004999918077274612, |
|
"loss": 1.9643, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.03203125, |
|
"grad_norm": 0.2866462767124176, |
|
"learning_rate": 0.0004999911896588068, |
|
"loss": 2.0196, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.03212890625, |
|
"grad_norm": 0.25075578689575195, |
|
"learning_rate": 0.0004999905491155257, |
|
"loss": 2.0426, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.0322265625, |
|
"grad_norm": 0.266648530960083, |
|
"learning_rate": 0.000499989886097682, |
|
"loss": 2.0318, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.03232421875, |
|
"grad_norm": 0.22494247555732727, |
|
"learning_rate": 0.0004999892006053421, |
|
"loss": 1.9839, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.032421875, |
|
"grad_norm": 0.3179854452610016, |
|
"learning_rate": 0.0004999884926385741, |
|
"loss": 1.9981, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.03251953125, |
|
"grad_norm": 0.2754990756511688, |
|
"learning_rate": 0.000499987762197449, |
|
"loss": 1.9879, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.0326171875, |
|
"grad_norm": 0.2669137418270111, |
|
"learning_rate": 0.0004999870092820395, |
|
"loss": 2.0083, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.03271484375, |
|
"grad_norm": 0.31376007199287415, |
|
"learning_rate": 0.0004999862338924212, |
|
"loss": 2.0166, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.0328125, |
|
"grad_norm": 0.32645899057388306, |
|
"learning_rate": 0.0004999854360286712, |
|
"loss": 2.0019, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.03291015625, |
|
"grad_norm": 0.3550071120262146, |
|
"learning_rate": 0.0004999846156908692, |
|
"loss": 1.9744, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.0330078125, |
|
"grad_norm": 0.30171769857406616, |
|
"learning_rate": 0.0004999837728790975, |
|
"loss": 2.0231, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.03310546875, |
|
"grad_norm": 0.23128142952919006, |
|
"learning_rate": 0.00049998290759344, |
|
"loss": 2.001, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.033203125, |
|
"grad_norm": 0.2433364987373352, |
|
"learning_rate": 0.0004999820198339832, |
|
"loss": 1.9945, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03330078125, |
|
"grad_norm": 0.30562201142311096, |
|
"learning_rate": 0.0004999811096008159, |
|
"loss": 2.0228, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.0333984375, |
|
"grad_norm": 0.33020487427711487, |
|
"learning_rate": 0.0004999801768940287, |
|
"loss": 1.9691, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.03349609375, |
|
"grad_norm": 0.3125375807285309, |
|
"learning_rate": 0.0004999792217137151, |
|
"loss": 1.9728, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.03359375, |
|
"grad_norm": 0.26769348978996277, |
|
"learning_rate": 0.0004999782440599702, |
|
"loss": 2.0066, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.03369140625, |
|
"grad_norm": 0.3596431314945221, |
|
"learning_rate": 0.0004999772439328921, |
|
"loss": 2.0426, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.0337890625, |
|
"grad_norm": 0.34117281436920166, |
|
"learning_rate": 0.0004999762213325803, |
|
"loss": 1.9985, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.03388671875, |
|
"grad_norm": 0.2922564148902893, |
|
"learning_rate": 0.0004999751762591371, |
|
"loss": 2.0346, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.033984375, |
|
"grad_norm": 0.2813419997692108, |
|
"learning_rate": 0.0004999741087126669, |
|
"loss": 2.0573, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.03408203125, |
|
"grad_norm": 0.27346089482307434, |
|
"learning_rate": 0.0004999730186932764, |
|
"loss": 1.9655, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.0341796875, |
|
"grad_norm": 0.24704065918922424, |
|
"learning_rate": 0.0004999719062010745, |
|
"loss": 1.9542, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.03427734375, |
|
"grad_norm": 0.24761976301670074, |
|
"learning_rate": 0.000499970771236172, |
|
"loss": 1.9526, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.034375, |
|
"grad_norm": 0.2508860230445862, |
|
"learning_rate": 0.0004999696137986826, |
|
"loss": 2.0119, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.03447265625, |
|
"grad_norm": 0.24160990118980408, |
|
"learning_rate": 0.0004999684338887219, |
|
"loss": 2.0148, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.0345703125, |
|
"grad_norm": 0.2879098355770111, |
|
"learning_rate": 0.0004999672315064076, |
|
"loss": 2.0206, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.03466796875, |
|
"grad_norm": 0.28366580605506897, |
|
"learning_rate": 0.0004999660066518601, |
|
"loss": 2.0737, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.034765625, |
|
"grad_norm": 0.3401612937450409, |
|
"learning_rate": 0.0004999647593252013, |
|
"loss": 1.9868, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.03486328125, |
|
"grad_norm": 0.4696269631385803, |
|
"learning_rate": 0.0004999634895265562, |
|
"loss": 2.0079, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.0349609375, |
|
"grad_norm": 0.596168041229248, |
|
"learning_rate": 0.0004999621972560515, |
|
"loss": 2.0084, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.03505859375, |
|
"grad_norm": 0.5319205522537231, |
|
"learning_rate": 0.0004999608825138162, |
|
"loss": 2.0084, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.03515625, |
|
"grad_norm": 0.24298223853111267, |
|
"learning_rate": 0.0004999595452999818, |
|
"loss": 1.9183, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.03525390625, |
|
"grad_norm": 0.39650923013687134, |
|
"learning_rate": 0.0004999581856146817, |
|
"loss": 1.9886, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.0353515625, |
|
"grad_norm": 0.3819667100906372, |
|
"learning_rate": 0.0004999568034580518, |
|
"loss": 2.0047, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.03544921875, |
|
"grad_norm": 0.3298279345035553, |
|
"learning_rate": 0.0004999553988302303, |
|
"loss": 1.9466, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.035546875, |
|
"grad_norm": 0.2343115359544754, |
|
"learning_rate": 0.0004999539717313573, |
|
"loss": 1.9828, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.03564453125, |
|
"grad_norm": 0.31238943338394165, |
|
"learning_rate": 0.0004999525221615755, |
|
"loss": 2.0721, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.0357421875, |
|
"grad_norm": 0.27481910586357117, |
|
"learning_rate": 0.0004999510501210295, |
|
"loss": 2.0106, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.03583984375, |
|
"grad_norm": 0.24870915710926056, |
|
"learning_rate": 0.0004999495556098666, |
|
"loss": 1.9846, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.0359375, |
|
"grad_norm": 0.2666539251804352, |
|
"learning_rate": 0.0004999480386282359, |
|
"loss": 1.9988, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.03603515625, |
|
"grad_norm": 0.23231451213359833, |
|
"learning_rate": 0.000499946499176289, |
|
"loss": 2.0235, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.0361328125, |
|
"grad_norm": 0.19276577234268188, |
|
"learning_rate": 0.0004999449372541798, |
|
"loss": 1.9832, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.03623046875, |
|
"grad_norm": 0.23545822501182556, |
|
"learning_rate": 0.000499943352862064, |
|
"loss": 1.9879, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.036328125, |
|
"grad_norm": 0.2608807384967804, |
|
"learning_rate": 0.0004999417460001002, |
|
"loss": 2.047, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.03642578125, |
|
"grad_norm": 0.27395492792129517, |
|
"learning_rate": 0.0004999401166684487, |
|
"loss": 1.9496, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.0365234375, |
|
"grad_norm": 0.24661333858966827, |
|
"learning_rate": 0.0004999384648672724, |
|
"loss": 1.9924, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.03662109375, |
|
"grad_norm": 0.23559828102588654, |
|
"learning_rate": 0.0004999367905967362, |
|
"loss": 1.979, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.03671875, |
|
"grad_norm": 0.2556357681751251, |
|
"learning_rate": 0.0004999350938570074, |
|
"loss": 1.9793, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.03681640625, |
|
"grad_norm": 0.29081088304519653, |
|
"learning_rate": 0.0004999333746482555, |
|
"loss": 1.962, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.0369140625, |
|
"grad_norm": 0.2583800256252289, |
|
"learning_rate": 0.0004999316329706521, |
|
"loss": 1.9765, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.03701171875, |
|
"grad_norm": 0.2506580054759979, |
|
"learning_rate": 0.0004999298688243714, |
|
"loss": 2.0302, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.037109375, |
|
"grad_norm": 0.289530485868454, |
|
"learning_rate": 0.0004999280822095895, |
|
"loss": 1.9514, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.03720703125, |
|
"grad_norm": 0.24479152262210846, |
|
"learning_rate": 0.0004999262731264848, |
|
"loss": 1.9742, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.0373046875, |
|
"grad_norm": 0.23676550388336182, |
|
"learning_rate": 0.0004999244415752381, |
|
"loss": 2.0161, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.03740234375, |
|
"grad_norm": 0.2447502315044403, |
|
"learning_rate": 0.0004999225875560323, |
|
"loss": 2.0114, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.0375, |
|
"grad_norm": 0.20993874967098236, |
|
"learning_rate": 0.0004999207110690528, |
|
"loss": 1.9786, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.03759765625, |
|
"grad_norm": 0.23405557870864868, |
|
"learning_rate": 0.0004999188121144867, |
|
"loss": 2.0004, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.0376953125, |
|
"grad_norm": 0.2310025990009308, |
|
"learning_rate": 0.0004999168906925238, |
|
"loss": 1.924, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.03779296875, |
|
"grad_norm": 0.2507460117340088, |
|
"learning_rate": 0.0004999149468033564, |
|
"loss": 2.0301, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.037890625, |
|
"grad_norm": 0.30916762351989746, |
|
"learning_rate": 0.0004999129804471782, |
|
"loss": 1.9948, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.03798828125, |
|
"grad_norm": 0.42094507813453674, |
|
"learning_rate": 0.0004999109916241858, |
|
"loss": 2.0128, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.0380859375, |
|
"grad_norm": 0.5398088693618774, |
|
"learning_rate": 0.0004999089803345779, |
|
"loss": 1.9866, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.03818359375, |
|
"grad_norm": 0.4601620137691498, |
|
"learning_rate": 0.0004999069465785554, |
|
"loss": 1.9807, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.03828125, |
|
"grad_norm": 0.24873095750808716, |
|
"learning_rate": 0.0004999048903563213, |
|
"loss": 1.9225, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.03837890625, |
|
"grad_norm": 0.37363573908805847, |
|
"learning_rate": 0.0004999028116680814, |
|
"loss": 2.0212, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.0384765625, |
|
"grad_norm": 0.27399107813835144, |
|
"learning_rate": 0.0004999007105140428, |
|
"loss": 1.9797, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.03857421875, |
|
"grad_norm": 0.2717953622341156, |
|
"learning_rate": 0.0004998985868944158, |
|
"loss": 2.0119, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.038671875, |
|
"grad_norm": 0.2766648530960083, |
|
"learning_rate": 0.0004998964408094124, |
|
"loss": 1.9936, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.03876953125, |
|
"grad_norm": 0.2989843487739563, |
|
"learning_rate": 0.0004998942722592469, |
|
"loss": 2.0261, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.0388671875, |
|
"grad_norm": 0.31954097747802734, |
|
"learning_rate": 0.000499892081244136, |
|
"loss": 1.9713, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.03896484375, |
|
"grad_norm": 0.31199777126312256, |
|
"learning_rate": 0.0004998898677642987, |
|
"loss": 1.9548, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.0390625, |
|
"grad_norm": 0.2475418746471405, |
|
"learning_rate": 0.0004998876318199557, |
|
"loss": 1.9878, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.03916015625, |
|
"grad_norm": 0.25001809000968933, |
|
"learning_rate": 0.0004998853734113308, |
|
"loss": 1.991, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.0392578125, |
|
"grad_norm": 0.2468329817056656, |
|
"learning_rate": 0.0004998830925386492, |
|
"loss": 1.9988, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.03935546875, |
|
"grad_norm": 0.23273305594921112, |
|
"learning_rate": 0.000499880789202139, |
|
"loss": 2.0138, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.039453125, |
|
"grad_norm": 0.21512626111507416, |
|
"learning_rate": 0.0004998784634020303, |
|
"loss": 1.9326, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.03955078125, |
|
"grad_norm": 0.2123369723558426, |
|
"learning_rate": 0.0004998761151385554, |
|
"loss": 1.9954, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.0396484375, |
|
"grad_norm": 0.20985403656959534, |
|
"learning_rate": 0.0004998737444119488, |
|
"loss": 1.996, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.03974609375, |
|
"grad_norm": 0.19709332287311554, |
|
"learning_rate": 0.0004998713512224473, |
|
"loss": 1.9642, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.03984375, |
|
"grad_norm": 0.21997478604316711, |
|
"learning_rate": 0.00049986893557029, |
|
"loss": 1.9925, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.03994140625, |
|
"grad_norm": 0.2419801503419876, |
|
"learning_rate": 0.0004998664974557182, |
|
"loss": 2.0262, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.0400390625, |
|
"grad_norm": 0.2527197003364563, |
|
"learning_rate": 0.0004998640368789754, |
|
"loss": 2.0011, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04013671875, |
|
"grad_norm": 0.2539777159690857, |
|
"learning_rate": 0.0004998615538403074, |
|
"loss": 1.9799, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.040234375, |
|
"grad_norm": 0.2419251948595047, |
|
"learning_rate": 0.0004998590483399623, |
|
"loss": 2.0035, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.04033203125, |
|
"grad_norm": 0.26659199595451355, |
|
"learning_rate": 0.0004998565203781904, |
|
"loss": 1.9743, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.0404296875, |
|
"grad_norm": 0.3216726779937744, |
|
"learning_rate": 0.0004998539699552441, |
|
"loss": 1.979, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.04052734375, |
|
"grad_norm": 0.2780512571334839, |
|
"learning_rate": 0.0004998513970713783, |
|
"loss": 1.9892, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.040625, |
|
"grad_norm": 0.2947809100151062, |
|
"learning_rate": 0.00049984880172685, |
|
"loss": 1.9836, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.04072265625, |
|
"grad_norm": 0.33937209844589233, |
|
"learning_rate": 0.0004998461839219182, |
|
"loss": 2.0007, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.0408203125, |
|
"grad_norm": 0.3530365228652954, |
|
"learning_rate": 0.0004998435436568446, |
|
"loss": 1.9462, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.04091796875, |
|
"grad_norm": 0.34433260560035706, |
|
"learning_rate": 0.000499840880931893, |
|
"loss": 1.9733, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.041015625, |
|
"grad_norm": 0.323081910610199, |
|
"learning_rate": 0.0004998381957473293, |
|
"loss": 1.9522, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04111328125, |
|
"grad_norm": 0.22895868122577667, |
|
"learning_rate": 0.0004998354881034217, |
|
"loss": 1.9672, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.0412109375, |
|
"grad_norm": 0.2543105185031891, |
|
"learning_rate": 0.0004998327580004408, |
|
"loss": 1.9429, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.04130859375, |
|
"grad_norm": 0.2795581519603729, |
|
"learning_rate": 0.0004998300054386591, |
|
"loss": 1.9902, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.04140625, |
|
"grad_norm": 0.2201048880815506, |
|
"learning_rate": 0.0004998272304183517, |
|
"loss": 1.9365, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.04150390625, |
|
"grad_norm": 0.26653197407722473, |
|
"learning_rate": 0.0004998244329397958, |
|
"loss": 2.0178, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.0416015625, |
|
"grad_norm": 0.26959821581840515, |
|
"learning_rate": 0.0004998216130032708, |
|
"loss": 1.9625, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.04169921875, |
|
"grad_norm": 0.2673629820346832, |
|
"learning_rate": 0.0004998187706090584, |
|
"loss": 1.995, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.041796875, |
|
"grad_norm": 0.3190925121307373, |
|
"learning_rate": 0.0004998159057574426, |
|
"loss": 2.0094, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.04189453125, |
|
"grad_norm": 0.3376927673816681, |
|
"learning_rate": 0.0004998130184487094, |
|
"loss": 2.0535, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.0419921875, |
|
"grad_norm": 0.35333067178726196, |
|
"learning_rate": 0.0004998101086831474, |
|
"loss": 2.0256, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.04208984375, |
|
"grad_norm": 0.3051585853099823, |
|
"learning_rate": 0.0004998071764610471, |
|
"loss": 1.9798, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.0421875, |
|
"grad_norm": 0.21881523728370667, |
|
"learning_rate": 0.0004998042217827015, |
|
"loss": 1.9726, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.04228515625, |
|
"grad_norm": 0.2296576052904129, |
|
"learning_rate": 0.0004998012446484057, |
|
"loss": 1.9675, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.0423828125, |
|
"grad_norm": 0.24055050313472748, |
|
"learning_rate": 0.0004997982450584572, |
|
"loss": 1.9488, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.04248046875, |
|
"grad_norm": 0.22267191112041473, |
|
"learning_rate": 0.0004997952230131555, |
|
"loss": 1.9617, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.042578125, |
|
"grad_norm": 0.2590855658054352, |
|
"learning_rate": 0.0004997921785128026, |
|
"loss": 1.9745, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.04267578125, |
|
"grad_norm": 0.2995806336402893, |
|
"learning_rate": 0.0004997891115577025, |
|
"loss": 1.9667, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.0427734375, |
|
"grad_norm": 0.3083277642726898, |
|
"learning_rate": 0.0004997860221481616, |
|
"loss": 1.9661, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.04287109375, |
|
"grad_norm": 0.31743094325065613, |
|
"learning_rate": 0.0004997829102844885, |
|
"loss": 2.0172, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.04296875, |
|
"grad_norm": 0.37507855892181396, |
|
"learning_rate": 0.0004997797759669941, |
|
"loss": 2.0211, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04306640625, |
|
"grad_norm": 0.35785287618637085, |
|
"learning_rate": 0.0004997766191959914, |
|
"loss": 2.038, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.0431640625, |
|
"grad_norm": 0.3099903166294098, |
|
"learning_rate": 0.0004997734399717958, |
|
"loss": 1.9879, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.04326171875, |
|
"grad_norm": 0.32211577892303467, |
|
"learning_rate": 0.0004997702382947248, |
|
"loss": 1.987, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.043359375, |
|
"grad_norm": 0.24999700486660004, |
|
"learning_rate": 0.0004997670141650984, |
|
"loss": 1.9765, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.04345703125, |
|
"grad_norm": 0.23301391303539276, |
|
"learning_rate": 0.0004997637675832386, |
|
"loss": 1.9592, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.0435546875, |
|
"grad_norm": 0.22780932486057281, |
|
"learning_rate": 0.0004997604985494694, |
|
"loss": 1.9762, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.04365234375, |
|
"grad_norm": 0.2347191423177719, |
|
"learning_rate": 0.0004997572070641178, |
|
"loss": 1.9791, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.04375, |
|
"grad_norm": 0.3288608193397522, |
|
"learning_rate": 0.0004997538931275123, |
|
"loss": 1.9664, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.04384765625, |
|
"grad_norm": 0.29897212982177734, |
|
"learning_rate": 0.000499750556739984, |
|
"loss": 1.9876, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.0439453125, |
|
"grad_norm": 0.24389693140983582, |
|
"learning_rate": 0.0004997471979018663, |
|
"loss": 1.9742, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04404296875, |
|
"grad_norm": 0.3172384798526764, |
|
"learning_rate": 0.0004997438166134945, |
|
"loss": 1.969, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.044140625, |
|
"grad_norm": 0.22951611876487732, |
|
"learning_rate": 0.0004997404128752065, |
|
"loss": 1.9832, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.04423828125, |
|
"grad_norm": 0.2356499582529068, |
|
"learning_rate": 0.0004997369866873423, |
|
"loss": 2.0236, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.0443359375, |
|
"grad_norm": 0.2687929570674896, |
|
"learning_rate": 0.000499733538050244, |
|
"loss": 1.9521, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.04443359375, |
|
"grad_norm": 0.2512679398059845, |
|
"learning_rate": 0.0004997300669642564, |
|
"loss": 1.9754, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.04453125, |
|
"grad_norm": 0.22110989689826965, |
|
"learning_rate": 0.0004997265734297259, |
|
"loss": 1.9931, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.04462890625, |
|
"grad_norm": 0.2548464834690094, |
|
"learning_rate": 0.0004997230574470017, |
|
"loss": 2.001, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.0447265625, |
|
"grad_norm": 0.3107970356941223, |
|
"learning_rate": 0.0004997195190164349, |
|
"loss": 1.9855, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.04482421875, |
|
"grad_norm": 0.2989678680896759, |
|
"learning_rate": 0.0004997159581383789, |
|
"loss": 1.9646, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.044921875, |
|
"grad_norm": 0.3651764690876007, |
|
"learning_rate": 0.0004997123748131896, |
|
"loss": 1.9865, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.04501953125, |
|
"grad_norm": 0.41693389415740967, |
|
"learning_rate": 0.0004997087690412248, |
|
"loss": 1.9756, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.0451171875, |
|
"grad_norm": 0.425959974527359, |
|
"learning_rate": 0.0004997051408228447, |
|
"loss": 1.9887, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.04521484375, |
|
"grad_norm": 0.38966843485832214, |
|
"learning_rate": 0.0004997014901584118, |
|
"loss": 1.9532, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.0453125, |
|
"grad_norm": 0.29931432008743286, |
|
"learning_rate": 0.0004996978170482906, |
|
"loss": 1.9806, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.04541015625, |
|
"grad_norm": 0.23077397048473358, |
|
"learning_rate": 0.0004996941214928482, |
|
"loss": 2.0258, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.0455078125, |
|
"grad_norm": 0.32101985812187195, |
|
"learning_rate": 0.0004996904034924536, |
|
"loss": 2.0023, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.04560546875, |
|
"grad_norm": 0.3225870430469513, |
|
"learning_rate": 0.0004996866630474783, |
|
"loss": 1.9385, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.045703125, |
|
"grad_norm": 0.2592551112174988, |
|
"learning_rate": 0.0004996829001582959, |
|
"loss": 2.0176, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.04580078125, |
|
"grad_norm": 0.24367393553256989, |
|
"learning_rate": 0.0004996791148252824, |
|
"loss": 1.9844, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.0458984375, |
|
"grad_norm": 0.26423218846321106, |
|
"learning_rate": 0.0004996753070488156, |
|
"loss": 1.9675, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.04599609375, |
|
"grad_norm": 0.27801692485809326, |
|
"learning_rate": 0.0004996714768292762, |
|
"loss": 1.9265, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.04609375, |
|
"grad_norm": 0.3210003077983856, |
|
"learning_rate": 0.0004996676241670467, |
|
"loss": 1.9937, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.04619140625, |
|
"grad_norm": 0.3082759380340576, |
|
"learning_rate": 0.0004996637490625118, |
|
"loss": 1.9997, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.0462890625, |
|
"grad_norm": 0.2486785650253296, |
|
"learning_rate": 0.0004996598515160589, |
|
"loss": 2.0687, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.04638671875, |
|
"grad_norm": 0.31927499175071716, |
|
"learning_rate": 0.000499655931528077, |
|
"loss": 1.9853, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.046484375, |
|
"grad_norm": 0.2736678123474121, |
|
"learning_rate": 0.0004996519890989578, |
|
"loss": 1.9916, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.04658203125, |
|
"grad_norm": 0.29522353410720825, |
|
"learning_rate": 0.0004996480242290952, |
|
"loss": 1.9554, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.0466796875, |
|
"grad_norm": 0.26841938495635986, |
|
"learning_rate": 0.0004996440369188851, |
|
"loss": 1.9275, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.04677734375, |
|
"grad_norm": 0.2695488929748535, |
|
"learning_rate": 0.0004996400271687259, |
|
"loss": 1.9898, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.046875, |
|
"grad_norm": 0.36634454131126404, |
|
"learning_rate": 0.0004996359949790181, |
|
"loss": 1.9799, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.04697265625, |
|
"grad_norm": 0.37612974643707275, |
|
"learning_rate": 0.0004996319403501647, |
|
"loss": 1.9645, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.0470703125, |
|
"grad_norm": 0.29007261991500854, |
|
"learning_rate": 0.0004996278632825703, |
|
"loss": 1.9486, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.04716796875, |
|
"grad_norm": 0.30373314023017883, |
|
"learning_rate": 0.0004996237637766424, |
|
"loss": 1.9913, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.047265625, |
|
"grad_norm": 0.2949075400829315, |
|
"learning_rate": 0.0004996196418327906, |
|
"loss": 1.9894, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.04736328125, |
|
"grad_norm": 0.30601680278778076, |
|
"learning_rate": 0.0004996154974514264, |
|
"loss": 1.9835, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.0474609375, |
|
"grad_norm": 0.2896345853805542, |
|
"learning_rate": 0.000499611330632964, |
|
"loss": 2.0043, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.04755859375, |
|
"grad_norm": 0.3836851418018341, |
|
"learning_rate": 0.0004996071413778195, |
|
"loss": 1.9782, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.04765625, |
|
"grad_norm": 0.4167245626449585, |
|
"learning_rate": 0.0004996029296864114, |
|
"loss": 1.9792, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.04775390625, |
|
"grad_norm": 0.3247157335281372, |
|
"learning_rate": 0.0004995986955591606, |
|
"loss": 1.9548, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.0478515625, |
|
"grad_norm": 0.36726462841033936, |
|
"learning_rate": 0.0004995944389964897, |
|
"loss": 1.9844, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.04794921875, |
|
"grad_norm": 0.27440258860588074, |
|
"learning_rate": 0.0004995901599988241, |
|
"loss": 1.9608, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.048046875, |
|
"grad_norm": 0.271771103143692, |
|
"learning_rate": 0.0004995858585665912, |
|
"loss": 2.022, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.04814453125, |
|
"grad_norm": 0.3079121708869934, |
|
"learning_rate": 0.0004995815347002208, |
|
"loss": 1.9804, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.0482421875, |
|
"grad_norm": 0.2976233661174774, |
|
"learning_rate": 0.0004995771884001445, |
|
"loss": 1.9679, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.04833984375, |
|
"grad_norm": 0.31318438053131104, |
|
"learning_rate": 0.0004995728196667969, |
|
"loss": 2.0342, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.0484375, |
|
"grad_norm": 0.2397848516702652, |
|
"learning_rate": 0.0004995684285006139, |
|
"loss": 1.9801, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.04853515625, |
|
"grad_norm": 0.2205589860677719, |
|
"learning_rate": 0.0004995640149020346, |
|
"loss": 2.0088, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.0486328125, |
|
"grad_norm": 0.23892101645469666, |
|
"learning_rate": 0.0004995595788714995, |
|
"loss": 1.9888, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.04873046875, |
|
"grad_norm": 0.2999497354030609, |
|
"learning_rate": 0.000499555120409452, |
|
"loss": 1.9896, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.048828125, |
|
"grad_norm": 0.3309103548526764, |
|
"learning_rate": 0.0004995506395163372, |
|
"loss": 1.9394, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.04892578125, |
|
"grad_norm": 0.36243122816085815, |
|
"learning_rate": 0.000499546136192603, |
|
"loss": 1.9604, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.0490234375, |
|
"grad_norm": 0.3052852153778076, |
|
"learning_rate": 0.0004995416104386991, |
|
"loss": 1.9887, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.04912109375, |
|
"grad_norm": 0.2957259714603424, |
|
"learning_rate": 0.0004995370622550775, |
|
"loss": 1.9876, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.04921875, |
|
"grad_norm": 0.3991003632545471, |
|
"learning_rate": 0.0004995324916421926, |
|
"loss": 1.9932, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.04931640625, |
|
"grad_norm": 0.27617356181144714, |
|
"learning_rate": 0.000499527898600501, |
|
"loss": 1.9387, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.0494140625, |
|
"grad_norm": 0.2820669412612915, |
|
"learning_rate": 0.0004995232831304614, |
|
"loss": 2.012, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.04951171875, |
|
"grad_norm": 0.3026665449142456, |
|
"learning_rate": 0.0004995186452325351, |
|
"loss": 1.9709, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.049609375, |
|
"grad_norm": 0.2331731766462326, |
|
"learning_rate": 0.000499513984907185, |
|
"loss": 1.9617, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.04970703125, |
|
"grad_norm": 0.2791639268398285, |
|
"learning_rate": 0.0004995093021548768, |
|
"loss": 1.9635, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.0498046875, |
|
"grad_norm": 0.257519006729126, |
|
"learning_rate": 0.0004995045969760785, |
|
"loss": 1.8908, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.04990234375, |
|
"grad_norm": 0.26278433203697205, |
|
"learning_rate": 0.0004994998693712598, |
|
"loss": 1.9418, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2534024715423584, |
|
"learning_rate": 0.0004994951193408929, |
|
"loss": 1.9821, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.05009765625, |
|
"grad_norm": 0.272348552942276, |
|
"learning_rate": 0.0004994903468854527, |
|
"loss": 1.982, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.0501953125, |
|
"grad_norm": 0.23883765935897827, |
|
"learning_rate": 0.0004994855520054154, |
|
"loss": 2.006, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.05029296875, |
|
"grad_norm": 0.25627830624580383, |
|
"learning_rate": 0.0004994807347012603, |
|
"loss": 2.0008, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.050390625, |
|
"grad_norm": 0.33997049927711487, |
|
"learning_rate": 0.0004994758949734686, |
|
"loss": 2.0339, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.05048828125, |
|
"grad_norm": 0.4115971028804779, |
|
"learning_rate": 0.0004994710328225236, |
|
"loss": 1.9982, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.0505859375, |
|
"grad_norm": 0.4417625069618225, |
|
"learning_rate": 0.000499466148248911, |
|
"loss": 1.9742, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.05068359375, |
|
"grad_norm": 0.4123833477497101, |
|
"learning_rate": 0.0004994612412531189, |
|
"loss": 2.0148, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.05078125, |
|
"grad_norm": 0.2234133630990982, |
|
"learning_rate": 0.0004994563118356373, |
|
"loss": 1.9679, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.05087890625, |
|
"grad_norm": 0.35439473390579224, |
|
"learning_rate": 0.0004994513599969586, |
|
"loss": 1.9602, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.0509765625, |
|
"grad_norm": 0.32346996665000916, |
|
"learning_rate": 0.0004994463857375776, |
|
"loss": 2.0106, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.05107421875, |
|
"grad_norm": 0.24506594240665436, |
|
"learning_rate": 0.000499441389057991, |
|
"loss": 1.993, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.051171875, |
|
"grad_norm": 0.26808494329452515, |
|
"learning_rate": 0.000499436369958698, |
|
"loss": 1.9764, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.05126953125, |
|
"grad_norm": 0.21936193108558655, |
|
"learning_rate": 0.0004994313284401999, |
|
"loss": 1.9619, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.0513671875, |
|
"grad_norm": 0.2545083165168762, |
|
"learning_rate": 0.0004994262645030005, |
|
"loss": 1.9825, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.05146484375, |
|
"grad_norm": 0.26330727338790894, |
|
"learning_rate": 0.0004994211781476055, |
|
"loss": 1.9512, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.0515625, |
|
"grad_norm": 0.2708059549331665, |
|
"learning_rate": 0.0004994160693745229, |
|
"loss": 1.9761, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.05166015625, |
|
"grad_norm": 0.2882954478263855, |
|
"learning_rate": 0.0004994109381842632, |
|
"loss": 2.0223, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.0517578125, |
|
"grad_norm": 0.25243133306503296, |
|
"learning_rate": 0.0004994057845773389, |
|
"loss": 1.976, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.05185546875, |
|
"grad_norm": 0.23666390776634216, |
|
"learning_rate": 0.0004994006085542648, |
|
"loss": 1.9729, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.051953125, |
|
"grad_norm": 0.24242763221263885, |
|
"learning_rate": 0.0004993954101155578, |
|
"loss": 1.9452, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.05205078125, |
|
"grad_norm": 0.259750634431839, |
|
"learning_rate": 0.0004993901892617373, |
|
"loss": 1.9582, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.0521484375, |
|
"grad_norm": 0.28516751527786255, |
|
"learning_rate": 0.0004993849459933249, |
|
"loss": 1.9367, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.05224609375, |
|
"grad_norm": 0.26534441113471985, |
|
"learning_rate": 0.0004993796803108442, |
|
"loss": 2.0137, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.05234375, |
|
"grad_norm": 0.28771716356277466, |
|
"learning_rate": 0.0004993743922148213, |
|
"loss": 1.927, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.05244140625, |
|
"grad_norm": 0.2747785449028015, |
|
"learning_rate": 0.0004993690817057844, |
|
"loss": 1.9729, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.0525390625, |
|
"grad_norm": 0.29758942127227783, |
|
"learning_rate": 0.0004993637487842639, |
|
"loss": 1.9713, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.05263671875, |
|
"grad_norm": 0.3059535026550293, |
|
"learning_rate": 0.0004993583934507927, |
|
"loss": 2.0313, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.052734375, |
|
"grad_norm": 0.36820656061172485, |
|
"learning_rate": 0.0004993530157059056, |
|
"loss": 1.9476, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.05283203125, |
|
"grad_norm": 0.39971432089805603, |
|
"learning_rate": 0.0004993476155501396, |
|
"loss": 2.0238, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.0529296875, |
|
"grad_norm": 0.3369678854942322, |
|
"learning_rate": 0.0004993421929840346, |
|
"loss": 1.9502, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.05302734375, |
|
"grad_norm": 0.3733840882778168, |
|
"learning_rate": 0.000499336748008132, |
|
"loss": 2.0114, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.053125, |
|
"grad_norm": 0.3251579999923706, |
|
"learning_rate": 0.0004993312806229757, |
|
"loss": 1.964, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.05322265625, |
|
"grad_norm": 0.4093327522277832, |
|
"learning_rate": 0.0004993257908291117, |
|
"loss": 1.9853, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.0533203125, |
|
"grad_norm": 0.26201343536376953, |
|
"learning_rate": 0.0004993202786270888, |
|
"loss": 2.0297, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.05341796875, |
|
"grad_norm": 0.27842357754707336, |
|
"learning_rate": 0.0004993147440174572, |
|
"loss": 2.003, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.053515625, |
|
"grad_norm": 0.2930687367916107, |
|
"learning_rate": 0.00049930918700077, |
|
"loss": 1.9634, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.05361328125, |
|
"grad_norm": 0.2799973785877228, |
|
"learning_rate": 0.0004993036075775821, |
|
"loss": 2.0023, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.0537109375, |
|
"grad_norm": 0.35084354877471924, |
|
"learning_rate": 0.000499298005748451, |
|
"loss": 2.0508, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.05380859375, |
|
"grad_norm": 0.2895161211490631, |
|
"learning_rate": 0.0004992923815139362, |
|
"loss": 2.0074, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.05390625, |
|
"grad_norm": 0.2637045979499817, |
|
"learning_rate": 0.0004992867348745997, |
|
"loss": 1.9559, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.05400390625, |
|
"grad_norm": 0.2837629020214081, |
|
"learning_rate": 0.0004992810658310052, |
|
"loss": 1.9603, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.0541015625, |
|
"grad_norm": 0.2644243836402893, |
|
"learning_rate": 0.0004992753743837193, |
|
"loss": 1.9767, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.05419921875, |
|
"grad_norm": 0.21815134584903717, |
|
"learning_rate": 0.0004992696605333103, |
|
"loss": 1.9684, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.054296875, |
|
"grad_norm": 0.28167852759361267, |
|
"learning_rate": 0.0004992639242803492, |
|
"loss": 1.97, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.05439453125, |
|
"grad_norm": 0.26942357420921326, |
|
"learning_rate": 0.0004992581656254087, |
|
"loss": 1.9825, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.0544921875, |
|
"grad_norm": 0.25316697359085083, |
|
"learning_rate": 0.0004992523845690644, |
|
"loss": 1.9698, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.05458984375, |
|
"grad_norm": 0.29587239027023315, |
|
"learning_rate": 0.0004992465811118934, |
|
"loss": 2.0063, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.0546875, |
|
"grad_norm": 0.3209396004676819, |
|
"learning_rate": 0.0004992407552544757, |
|
"loss": 2.0002, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.05478515625, |
|
"grad_norm": 0.2541654109954834, |
|
"learning_rate": 0.0004992349069973931, |
|
"loss": 2.0039, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.0548828125, |
|
"grad_norm": 0.34102505445480347, |
|
"learning_rate": 0.0004992290363412298, |
|
"loss": 1.9181, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.05498046875, |
|
"grad_norm": 0.36123788356781006, |
|
"learning_rate": 0.0004992231432865723, |
|
"loss": 2.042, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.055078125, |
|
"grad_norm": 0.27344051003456116, |
|
"learning_rate": 0.0004992172278340093, |
|
"loss": 2.0236, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.05517578125, |
|
"grad_norm": 0.34781453013420105, |
|
"learning_rate": 0.0004992112899841315, |
|
"loss": 2.0017, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.0552734375, |
|
"grad_norm": 0.2822319567203522, |
|
"learning_rate": 0.0004992053297375322, |
|
"loss": 1.9607, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.05537109375, |
|
"grad_norm": 0.27945882081985474, |
|
"learning_rate": 0.0004991993470948066, |
|
"loss": 1.9707, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.05546875, |
|
"grad_norm": 0.31535595655441284, |
|
"learning_rate": 0.0004991933420565527, |
|
"loss": 1.9708, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.05556640625, |
|
"grad_norm": 0.2658466398715973, |
|
"learning_rate": 0.00049918731462337, |
|
"loss": 1.9763, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.0556640625, |
|
"grad_norm": 0.3618657886981964, |
|
"learning_rate": 0.0004991812647958607, |
|
"loss": 2.0154, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.05576171875, |
|
"grad_norm": 0.34552058577537537, |
|
"learning_rate": 0.000499175192574629, |
|
"loss": 1.978, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.055859375, |
|
"grad_norm": 0.32903313636779785, |
|
"learning_rate": 0.0004991690979602817, |
|
"loss": 1.9762, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.05595703125, |
|
"grad_norm": 0.30814382433891296, |
|
"learning_rate": 0.0004991629809534275, |
|
"loss": 1.9944, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.0560546875, |
|
"grad_norm": 0.21156492829322815, |
|
"learning_rate": 0.0004991568415546775, |
|
"loss": 1.9919, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.05615234375, |
|
"grad_norm": 0.32283446192741394, |
|
"learning_rate": 0.0004991506797646446, |
|
"loss": 1.9697, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.05625, |
|
"grad_norm": 0.2915102541446686, |
|
"learning_rate": 0.0004991444955839447, |
|
"loss": 1.9736, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.05634765625, |
|
"grad_norm": 0.28996148705482483, |
|
"learning_rate": 0.0004991382890131955, |
|
"loss": 2.002, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.0564453125, |
|
"grad_norm": 0.3859401345252991, |
|
"learning_rate": 0.0004991320600530168, |
|
"loss": 1.9753, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.05654296875, |
|
"grad_norm": 0.28169432282447815, |
|
"learning_rate": 0.0004991258087040312, |
|
"loss": 1.9798, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.056640625, |
|
"grad_norm": 0.30669984221458435, |
|
"learning_rate": 0.0004991195349668626, |
|
"loss": 2.0007, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.05673828125, |
|
"grad_norm": 0.26832297444343567, |
|
"learning_rate": 0.000499113238842138, |
|
"loss": 2.0146, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.0568359375, |
|
"grad_norm": 0.25695785880088806, |
|
"learning_rate": 0.0004991069203304865, |
|
"loss": 1.9814, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.05693359375, |
|
"grad_norm": 0.25700533390045166, |
|
"learning_rate": 0.0004991005794325389, |
|
"loss": 1.9623, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.05703125, |
|
"grad_norm": 0.20786869525909424, |
|
"learning_rate": 0.0004990942161489288, |
|
"loss": 1.9728, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.05712890625, |
|
"grad_norm": 0.23988646268844604, |
|
"learning_rate": 0.0004990878304802918, |
|
"loss": 1.9749, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.0572265625, |
|
"grad_norm": 0.2418268769979477, |
|
"learning_rate": 0.0004990814224272658, |
|
"loss": 1.928, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.05732421875, |
|
"grad_norm": 0.27739882469177246, |
|
"learning_rate": 0.0004990749919904909, |
|
"loss": 1.964, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.057421875, |
|
"grad_norm": 0.2773842513561249, |
|
"learning_rate": 0.0004990685391706094, |
|
"loss": 1.9589, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.05751953125, |
|
"grad_norm": 0.24483682215213776, |
|
"learning_rate": 0.0004990620639682659, |
|
"loss": 1.9965, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.0576171875, |
|
"grad_norm": 0.27345889806747437, |
|
"learning_rate": 0.0004990555663841071, |
|
"loss": 1.9444, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.05771484375, |
|
"grad_norm": 0.26006460189819336, |
|
"learning_rate": 0.0004990490464187824, |
|
"loss": 1.9391, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.0578125, |
|
"grad_norm": 0.1935306191444397, |
|
"learning_rate": 0.0004990425040729427, |
|
"loss": 1.9679, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.05791015625, |
|
"grad_norm": 0.26823100447654724, |
|
"learning_rate": 0.0004990359393472418, |
|
"loss": 2.0055, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.0580078125, |
|
"grad_norm": 0.2706959545612335, |
|
"learning_rate": 0.0004990293522423352, |
|
"loss": 1.9853, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.05810546875, |
|
"grad_norm": 0.2099694162607193, |
|
"learning_rate": 0.0004990227427588811, |
|
"loss": 1.959, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.058203125, |
|
"grad_norm": 0.22345881164073944, |
|
"learning_rate": 0.0004990161108975398, |
|
"loss": 1.9867, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.05830078125, |
|
"grad_norm": 0.34050315618515015, |
|
"learning_rate": 0.0004990094566589734, |
|
"loss": 1.9555, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.0583984375, |
|
"grad_norm": 0.5298358798027039, |
|
"learning_rate": 0.0004990027800438468, |
|
"loss": 1.9824, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.05849609375, |
|
"grad_norm": 0.6408056616783142, |
|
"learning_rate": 0.0004989960810528271, |
|
"loss": 1.9647, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.05859375, |
|
"grad_norm": 0.4058885872364044, |
|
"learning_rate": 0.0004989893596865833, |
|
"loss": 1.9564, |
|
"step": 600 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 10240, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.673503567131443e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|