|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.051152203381160645, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00010230440676232128, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.751, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00020460881352464257, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.8722, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0003069132202869639, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 3e-06, |
|
"loss": 1.5552, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00040921762704928513, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.8481, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0005115220338116064, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 5e-06, |
|
"loss": 1.6085, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0006138264405739278, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 6e-06, |
|
"loss": 1.8854, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0007161308473362491, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 1.5885, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0008184352540985703, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.6188, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0009207396608608916, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9e-06, |
|
"loss": 1.7513, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0010230440676232129, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 1e-05, |
|
"loss": 1.847, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.001125348474385534, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 1.5958, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0012276528811478555, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.7284, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0013299572879101767, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 1.6829, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0014322616946724981, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.8158, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0015345661014348193, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.9778, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0016368705081971405, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.7056, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.001739174914959462, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 1.8864, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0018414793217217831, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.7853, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0019437837284841043, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 1.9e-05, |
|
"loss": 1.7343, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0020460881352464258, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 2e-05, |
|
"loss": 1.9046, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.002148392542008747, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.8652, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.002250696948771068, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.916, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0023530013555333896, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 1.6101, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.002455305762295711, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.7165, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.002557610169058032, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.9596, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0026599145758203534, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.0135, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.002762218982582675, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.8613, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0028645233893449962, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.6937, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0029668277961073172, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 2.9e-05, |
|
"loss": 1.8989, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0030691322028696386, |
|
"grad_norm": 0.625, |
|
"learning_rate": 3e-05, |
|
"loss": 1.9366, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.00317143660963196, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 3.1e-05, |
|
"loss": 1.8176, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.003273741016394281, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.965, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0033760454231566025, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 3.3e-05, |
|
"loss": 1.8058, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.003478349829918924, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.8037, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.003580654236681245, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.9695, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0036829586434435663, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.7983, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0037852630502058877, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 3.7e-05, |
|
"loss": 1.8376, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0038875674569682087, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.6842, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0039898718637305305, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 1.57, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0040921762704928515, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 4e-05, |
|
"loss": 1.8307, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0041944806772551725, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 4.1e-05, |
|
"loss": 1.6629, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.004296785084017494, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.801, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.004399089490779815, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 4.3e-05, |
|
"loss": 1.7171, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.004501393897542136, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.7683, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.004603698304304458, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.6179, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.004706002711066779, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.8107, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0048083071178291, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 4.7e-05, |
|
"loss": 1.9287, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.004910611524591422, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.8884, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.005012915931353743, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 4.9e-05, |
|
"loss": 1.6786, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.005115220338116064, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 5e-05, |
|
"loss": 1.848, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.005217524744878386, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 1.6701, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.005319829151640707, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.7677, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.005422133558403028, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 1.5958, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.00552443796516535, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.794, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.005626742371927671, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.7429, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0057290467786899925, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.898, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0058313511854523135, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 1.9033, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0059336555922146345, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.7433, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.006035959998976956, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 5.9e-05, |
|
"loss": 1.7133, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.006138264405739277, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 6e-05, |
|
"loss": 1.8123, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.006240568812501598, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 6.1e-05, |
|
"loss": 1.604, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.00634287321926392, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.7869, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.006445177626026241, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 6.3e-05, |
|
"loss": 1.559, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.006547482032788562, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.7037, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.006649786439550884, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.7831, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.006752090846313205, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.6768, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.006854395253075526, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 6.7e-05, |
|
"loss": 1.8762, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.006956699659837848, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.8037, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.007059004066600169, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 6.9e-05, |
|
"loss": 1.9943, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.00716130847336249, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 7e-05, |
|
"loss": 1.6346, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.007263612880124812, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.1e-05, |
|
"loss": 1.7755, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.007365917286887133, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.9162, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0074682216936494536, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 7.3e-05, |
|
"loss": 1.6629, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.007570526100411775, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.7918, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.007672830507174096, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.8483, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.007775134913936417, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.5213, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.007877439320698738, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 7.7e-05, |
|
"loss": 1.7234, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.007979743727461061, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.5943, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.008082048134223382, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 1.7327, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.008184352540985703, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6263, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.008286656947748024, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 8.1e-05, |
|
"loss": 1.6922, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.008388961354510345, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.6937, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.008491265761272666, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 8.3e-05, |
|
"loss": 1.5954, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.008593570168034989, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.7655, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.00869587457479731, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.8177, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.00879817898155963, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.6066, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.008900483388321952, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 8.7e-05, |
|
"loss": 1.7382, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.009002787795084273, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.5273, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.009105092201846595, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 1.633, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.009207396608608916, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 9e-05, |
|
"loss": 1.4586, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.009309701015371237, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 9.1e-05, |
|
"loss": 1.6444, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.009412005422133558, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.6237, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.00951430982889588, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 1.5545, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0096166142356582, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.6549, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.009718918642420523, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.5817, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.009821223049182844, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.617, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.009923527455945165, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 9.7e-05, |
|
"loss": 1.7116, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.010025831862707486, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.6373, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.010128136269469807, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 1.5059, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.010230440676232128, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4517, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01033274508299445, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.999497487437187e-05, |
|
"loss": 1.5179, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.010435049489756772, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.998994974874373e-05, |
|
"loss": 1.4234, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.010537353896519093, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 9.998492462311558e-05, |
|
"loss": 1.7412, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.010639658303281414, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 9.997989949748744e-05, |
|
"loss": 1.7474, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.010741962710043735, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 9.99748743718593e-05, |
|
"loss": 1.7978, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.010844267116806056, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 9.996984924623116e-05, |
|
"loss": 1.5856, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.010946571523568378, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.996482412060301e-05, |
|
"loss": 1.6459, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0110488759303307, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.995979899497487e-05, |
|
"loss": 2.6578, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.01115118033709302, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.995477386934674e-05, |
|
"loss": 1.5426, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.011253484743855341, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 9.99497487437186e-05, |
|
"loss": 1.5997, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.011355789150617662, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 9.994472361809045e-05, |
|
"loss": 1.6825, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.011458093557379985, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 9.993969849246232e-05, |
|
"loss": 1.5822, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.011560397964142306, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 9.993467336683417e-05, |
|
"loss": 1.6862, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.011662702370904627, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 9.992964824120603e-05, |
|
"loss": 1.4022, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.011765006777666948, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.99246231155779e-05, |
|
"loss": 1.5491, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.011867311184429269, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.991959798994976e-05, |
|
"loss": 1.5846, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.01196961559119159, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.99145728643216e-05, |
|
"loss": 1.5171, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.012071919997953913, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.990954773869348e-05, |
|
"loss": 1.506, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.012174224404716234, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.990452261306533e-05, |
|
"loss": 1.7183, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.012276528811478555, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.989949748743719e-05, |
|
"loss": 1.6749, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.012378833218240876, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.989447236180905e-05, |
|
"loss": 1.6602, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.012481137625003197, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.988944723618092e-05, |
|
"loss": 1.6384, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.012583442031765518, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.988442211055276e-05, |
|
"loss": 1.6169, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.01268574643852784, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.987939698492463e-05, |
|
"loss": 1.5463, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.012788050845290161, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 9.987437185929649e-05, |
|
"loss": 1.6794, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.012890355252052482, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 9.986934673366835e-05, |
|
"loss": 1.5976, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.012992659658814803, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 9.98643216080402e-05, |
|
"loss": 1.5735, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.013094964065577124, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.985929648241207e-05, |
|
"loss": 1.4446, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.013197268472339445, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.985427135678392e-05, |
|
"loss": 1.5231, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.013299572879101768, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.984924623115578e-05, |
|
"loss": 1.6299, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.013401877285864089, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.984422110552765e-05, |
|
"loss": 1.6611, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.01350418169262641, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 9.983919597989951e-05, |
|
"loss": 1.6093, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.01360648609938873, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.983417085427136e-05, |
|
"loss": 1.5908, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.013708790506151052, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.982914572864322e-05, |
|
"loss": 1.5976, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.013811094912913375, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.982412060301508e-05, |
|
"loss": 1.4695, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.013913399319675696, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.981909547738694e-05, |
|
"loss": 1.4971, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.014015703726438017, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.98140703517588e-05, |
|
"loss": 1.6059, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.014118008133200338, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.980904522613065e-05, |
|
"loss": 1.5415, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.014220312539962658, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 9.980402010050252e-05, |
|
"loss": 1.6073, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.01432261694672498, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.979899497487438e-05, |
|
"loss": 1.4738, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.014424921353487302, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.979396984924624e-05, |
|
"loss": 1.6089, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.014527225760249623, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.978894472361809e-05, |
|
"loss": 1.5486, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.014629530167011944, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.978391959798995e-05, |
|
"loss": 1.5261, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.014731834573774265, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 9.977889447236181e-05, |
|
"loss": 1.726, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.014834138980536586, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.977386934673367e-05, |
|
"loss": 1.7616, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.014936443387298907, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.976884422110552e-05, |
|
"loss": 1.8157, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.01503874779406123, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.97638190954774e-05, |
|
"loss": 1.5536, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.01514105220082355, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.975879396984925e-05, |
|
"loss": 1.6118, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.015243356607585872, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.975376884422111e-05, |
|
"loss": 1.7143, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.015345661014348193, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.974874371859297e-05, |
|
"loss": 1.4682, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.015447965421110514, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.974371859296483e-05, |
|
"loss": 1.5663, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.015550269827872835, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.973869346733668e-05, |
|
"loss": 1.6771, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.015652574234635157, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.973366834170856e-05, |
|
"loss": 1.6855, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.015754878641397477, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.97286432160804e-05, |
|
"loss": 1.7027, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.0158571830481598, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.972361809045227e-05, |
|
"loss": 1.4748, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.015959487454922122, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.971859296482412e-05, |
|
"loss": 1.5864, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.01606179186168444, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.971356783919599e-05, |
|
"loss": 1.5928, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.016164096268446764, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.970854271356784e-05, |
|
"loss": 1.7351, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.016266400675209083, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 9.97035175879397e-05, |
|
"loss": 1.7008, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.016368705081971406, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.969849246231156e-05, |
|
"loss": 1.4992, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01647100948873373, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.969346733668343e-05, |
|
"loss": 1.635, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.016573313895496048, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.968844221105527e-05, |
|
"loss": 1.5781, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.01667561830225837, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.968341708542715e-05, |
|
"loss": 1.4841, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.01677792270902069, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.9678391959799e-05, |
|
"loss": 1.3593, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.016880227115783013, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.967336683417086e-05, |
|
"loss": 1.5774, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.016982531522545332, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.966834170854272e-05, |
|
"loss": 1.5977, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.017084835929307655, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.966331658291458e-05, |
|
"loss": 1.4401, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.017187140336069977, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.965829145728643e-05, |
|
"loss": 1.4486, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.017289444742832297, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.96532663316583e-05, |
|
"loss": 1.5923, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.01739174914959462, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.964824120603016e-05, |
|
"loss": 1.6353, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01749405355635694, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.964321608040202e-05, |
|
"loss": 1.4871, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.01759635796311926, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 9.963819095477387e-05, |
|
"loss": 1.6157, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.017698662369881584, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.963316582914573e-05, |
|
"loss": 1.5152, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.017800966776643903, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.962814070351759e-05, |
|
"loss": 1.5647, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.017903271183406226, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.962311557788945e-05, |
|
"loss": 1.6144, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.018005575590168545, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.961809045226132e-05, |
|
"loss": 1.5505, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.018107879996930868, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.961306532663316e-05, |
|
"loss": 1.4726, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.01821018440369319, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.960804020100503e-05, |
|
"loss": 1.484, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.01831248881045551, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.960301507537689e-05, |
|
"loss": 1.3879, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.018414793217217833, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.959798994974875e-05, |
|
"loss": 1.5664, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.018517097623980152, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.95929648241206e-05, |
|
"loss": 1.5777, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.018619402030742475, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.958793969849247e-05, |
|
"loss": 1.4148, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.018721706437504794, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.958291457286432e-05, |
|
"loss": 1.5065, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.018824010844267117, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 9.957788944723619e-05, |
|
"loss": 1.5637, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.01892631525102944, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 9.957286432160805e-05, |
|
"loss": 1.4061, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.01902861965779176, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.956783919597991e-05, |
|
"loss": 1.5302, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.01913092406455408, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.956281407035176e-05, |
|
"loss": 1.5825, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.0192332284713164, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.955778894472362e-05, |
|
"loss": 1.4345, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.019335532878078723, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.955276381909548e-05, |
|
"loss": 1.4496, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.019437837284841046, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.954773869346734e-05, |
|
"loss": 1.5227, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.019540141691603365, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.954271356783919e-05, |
|
"loss": 1.5828, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.019642446098365688, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.953768844221107e-05, |
|
"loss": 1.4511, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.019744750505128007, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.953266331658292e-05, |
|
"loss": 1.4965, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.01984705491189033, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.952763819095478e-05, |
|
"loss": 1.6729, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.01994935931865265, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.952261306532664e-05, |
|
"loss": 1.7777, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.020051663725414972, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.95175879396985e-05, |
|
"loss": 1.4149, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.020153968132177295, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.951256281407035e-05, |
|
"loss": 1.7133, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.020256272538939614, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.950753768844223e-05, |
|
"loss": 1.5531, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.020358576945701937, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 9.950251256281408e-05, |
|
"loss": 1.6573, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.020460881352464256, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.949748743718594e-05, |
|
"loss": 1.3231, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.02056318575922658, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.94924623115578e-05, |
|
"loss": 1.7199, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.0206654901659889, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.948743718592966e-05, |
|
"loss": 1.4348, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.02076779457275122, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.948241206030151e-05, |
|
"loss": 1.51, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.020870098979513543, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.947738693467337e-05, |
|
"loss": 1.4171, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.020972403386275863, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.947236180904523e-05, |
|
"loss": 1.5366, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.021074707793038185, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.94673366834171e-05, |
|
"loss": 1.5415, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.021177012199800508, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.946231155778894e-05, |
|
"loss": 1.5641, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.021279316606562827, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.94572864321608e-05, |
|
"loss": 1.5054, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.02138162101332515, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.945226130653267e-05, |
|
"loss": 1.5638, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.02148392542008747, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.944723618090453e-05, |
|
"loss": 1.4641, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.021586229826849792, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.944221105527639e-05, |
|
"loss": 1.5075, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.02168853423361211, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.943718592964824e-05, |
|
"loss": 1.3774, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.021790838640374434, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.94321608040201e-05, |
|
"loss": 1.5137, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.021893143047136757, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.942713567839197e-05, |
|
"loss": 1.463, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.021995447453899076, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.942211055276383e-05, |
|
"loss": 1.5722, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.0220977518606614, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.941708542713568e-05, |
|
"loss": 1.5695, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.022200056267423718, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.941206030150754e-05, |
|
"loss": 1.4522, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.02230236067418604, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 9.94070351758794e-05, |
|
"loss": 1.5649, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.022404665080948363, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 9.940201005025126e-05, |
|
"loss": 1.3819, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.022506969487710683, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.939698492462311e-05, |
|
"loss": 1.4209, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.022609273894473005, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.939195979899499e-05, |
|
"loss": 1.5313, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.022711578301235325, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.938693467336683e-05, |
|
"loss": 1.6329, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.022813882707997647, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.93819095477387e-05, |
|
"loss": 1.5602, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.02291618711475997, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.937688442211056e-05, |
|
"loss": 1.4092, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.02301849152152229, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.937185929648242e-05, |
|
"loss": 1.507, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.023120795928284612, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.936683417085427e-05, |
|
"loss": 1.3996, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.02322310033504693, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.936180904522614e-05, |
|
"loss": 1.3146, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.023325404741809254, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.935678391959799e-05, |
|
"loss": 1.4642, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.023427709148571573, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.935175879396985e-05, |
|
"loss": 1.5534, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.023530013555333896, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.934673366834172e-05, |
|
"loss": 1.5299, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02363231796209622, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.934170854271358e-05, |
|
"loss": 1.6027, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.023734622368858538, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.933668341708543e-05, |
|
"loss": 1.3886, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.02383692677562086, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.933165829145729e-05, |
|
"loss": 1.513, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.02393923118238318, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.932663316582915e-05, |
|
"loss": 1.5952, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.024041535589145503, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.932160804020101e-05, |
|
"loss": 1.4902, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.024143839995907825, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 9.931658291457286e-05, |
|
"loss": 1.5195, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.024246144402670144, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 9.931155778894474e-05, |
|
"loss": 1.5003, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.024348448809432467, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.930653266331659e-05, |
|
"loss": 1.6125, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.024450753216194786, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.930150753768845e-05, |
|
"loss": 1.4048, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.02455305762295711, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.929648241206031e-05, |
|
"loss": 1.404, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.024655362029719432, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.929145728643217e-05, |
|
"loss": 1.5336, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.02475766643648175, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.928643216080402e-05, |
|
"loss": 1.5893, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.024859970843244074, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.928140703517588e-05, |
|
"loss": 1.4552, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.024962275250006393, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.927638190954774e-05, |
|
"loss": 1.5381, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.025064579656768716, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.927135678391961e-05, |
|
"loss": 1.4568, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.025166884063531035, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.926633165829147e-05, |
|
"loss": 1.3433, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.025269188470293358, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 9.926130653266332e-05, |
|
"loss": 1.4223, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.02537149287705568, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.925628140703518e-05, |
|
"loss": 1.6224, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.025473797283818, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 9.925125628140703e-05, |
|
"loss": 1.4886, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.025576101690580322, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.92462311557789e-05, |
|
"loss": 1.3823, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02567840609734264, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.924120603015075e-05, |
|
"loss": 1.6174, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.025780710504104964, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.923618090452261e-05, |
|
"loss": 1.6667, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.025883014910867287, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.923115577889448e-05, |
|
"loss": 1.5771, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.025985319317629606, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.922613065326634e-05, |
|
"loss": 1.3376, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.02608762372439193, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.922110552763819e-05, |
|
"loss": 1.621, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.02618992813115425, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.921608040201006e-05, |
|
"loss": 1.413, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.02629223253791657, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.921105527638191e-05, |
|
"loss": 1.6237, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.02639453694467889, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.920603015075377e-05, |
|
"loss": 1.459, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.026496841351441213, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.920100502512563e-05, |
|
"loss": 1.5026, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.026599145758203536, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 9.91959798994975e-05, |
|
"loss": 1.517, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.026701450164965855, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.919095477386935e-05, |
|
"loss": 1.6779, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.026803754571728178, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.918592964824122e-05, |
|
"loss": 1.4558, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.026906058978490497, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.918090452261307e-05, |
|
"loss": 1.451, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.02700836338525282, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.917587939698493e-05, |
|
"loss": 1.538, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.027110667792015142, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.917085427135678e-05, |
|
"loss": 1.4029, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.02721297219877746, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.916582914572866e-05, |
|
"loss": 1.6634, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.027315276605539784, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.91608040201005e-05, |
|
"loss": 1.4518, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.027417581012302104, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.915577889447237e-05, |
|
"loss": 1.5737, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.027519885419064426, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.915075376884423e-05, |
|
"loss": 1.4141, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.02762218982582675, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.914572864321609e-05, |
|
"loss": 1.6944, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.02772449423258907, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 9.914070351758794e-05, |
|
"loss": 1.6906, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.02782679863935139, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.913567839195981e-05, |
|
"loss": 1.7094, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.02792910304611371, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.913065326633166e-05, |
|
"loss": 1.5747, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.028031407452876033, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.912562814070352e-05, |
|
"loss": 1.508, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.028133711859638352, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.912060301507539e-05, |
|
"loss": 1.4204, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.028236016266400675, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.911557788944725e-05, |
|
"loss": 1.3767, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.028338320673162998, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.91105527638191e-05, |
|
"loss": 1.5665, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.028440625079925317, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.910552763819096e-05, |
|
"loss": 1.4584, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.02854292948668764, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.910050251256282e-05, |
|
"loss": 1.5575, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.02864523389344996, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.909547738693468e-05, |
|
"loss": 1.6704, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.02874753830021228, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.909045226130653e-05, |
|
"loss": 1.6166, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.028849842706974604, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.90854271356784e-05, |
|
"loss": 1.5372, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.028952147113736924, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 9.908040201005026e-05, |
|
"loss": 1.5092, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.029054451520499246, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.90753768844221e-05, |
|
"loss": 1.5686, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.029156755927261566, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.907035175879398e-05, |
|
"loss": 1.4031, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.02925906033402389, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.906532663316583e-05, |
|
"loss": 1.4016, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.02936136474078621, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.906030150753769e-05, |
|
"loss": 1.4951, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.02946366914754853, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.905527638190955e-05, |
|
"loss": 1.4838, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.029565973554310853, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.905025125628141e-05, |
|
"loss": 1.6449, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.029668277961073172, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.904522613065326e-05, |
|
"loss": 1.6668, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.029770582367835495, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.904020100502514e-05, |
|
"loss": 1.4133, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.029872886774597814, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.903517587939699e-05, |
|
"loss": 1.5274, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.029975191181360137, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.903015075376885e-05, |
|
"loss": 1.4883, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.03007749558812246, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.902512562814071e-05, |
|
"loss": 1.5292, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.03017979999488478, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.902010050251257e-05, |
|
"loss": 1.5329, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.0302821044016471, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.901507537688442e-05, |
|
"loss": 1.5716, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.03038440880840942, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.901005025125628e-05, |
|
"loss": 1.6152, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.030486713215171744, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.900502512562815e-05, |
|
"loss": 1.5215, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.030589017621934066, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 1.4974, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.030691322028696386, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.899497487437186e-05, |
|
"loss": 1.429, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03079362643545871, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.898994974874373e-05, |
|
"loss": 1.6325, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.030895930842221028, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 9.898492462311558e-05, |
|
"loss": 1.3455, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.03099823524898335, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 9.897989949748744e-05, |
|
"loss": 1.3542, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.03110053965574567, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.89748743718593e-05, |
|
"loss": 1.6288, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.031202844062507992, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.896984924623117e-05, |
|
"loss": 1.567, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.031305148469270315, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.896482412060301e-05, |
|
"loss": 1.4221, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.03140745287603264, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.895979899497489e-05, |
|
"loss": 1.6211, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.031509757282794953, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.895477386934674e-05, |
|
"loss": 1.5028, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.031612061689557276, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 9.89497487437186e-05, |
|
"loss": 1.4722, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.0317143660963196, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.894472361809046e-05, |
|
"loss": 1.4819, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03181667050308192, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.893969849246232e-05, |
|
"loss": 1.4361, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.031918974909844244, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.893467336683417e-05, |
|
"loss": 1.3847, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.03202127931660656, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.892964824120604e-05, |
|
"loss": 1.6572, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.03212358372336888, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.89246231155779e-05, |
|
"loss": 1.4203, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.032225888130131206, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.891959798994975e-05, |
|
"loss": 1.5494, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.03232819253689353, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.891457286432161e-05, |
|
"loss": 1.535, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.03243049694365585, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.890954773869347e-05, |
|
"loss": 1.4293, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.03253280135041817, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.890452261306533e-05, |
|
"loss": 1.5293, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.03263510575718049, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 9.889949748743718e-05, |
|
"loss": 1.321, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.03273741016394281, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.889447236180906e-05, |
|
"loss": 1.3901, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.032839714570705135, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.88894472361809e-05, |
|
"loss": 1.6575, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.03294201897746746, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.888442211055277e-05, |
|
"loss": 1.3796, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.03304432338422977, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 9.887939698492463e-05, |
|
"loss": 1.5548, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.033146627790992096, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.887437185929649e-05, |
|
"loss": 1.385, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.03324893219775442, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.886934673366834e-05, |
|
"loss": 1.5527, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.03335123660451674, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.886432160804021e-05, |
|
"loss": 1.546, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.03345354101127906, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.885929648241206e-05, |
|
"loss": 1.4091, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.03355584541804138, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.885427135678393e-05, |
|
"loss": 1.4189, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.0336581498248037, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.884924623115577e-05, |
|
"loss": 1.5757, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.033760454231566026, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.884422110552765e-05, |
|
"loss": 1.4861, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.03386275863832835, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.88391959798995e-05, |
|
"loss": 1.4099, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.033965063045090664, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.883417085427136e-05, |
|
"loss": 1.4004, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.03406736745185299, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 9.882914572864322e-05, |
|
"loss": 1.5382, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.03416967185861531, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.882412060301508e-05, |
|
"loss": 1.5474, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.03427197626537763, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 9.881909547738693e-05, |
|
"loss": 2.0222, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.034374280672139955, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.881407035175881e-05, |
|
"loss": 1.506, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.03447658507890227, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.880904522613066e-05, |
|
"loss": 1.5479, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.03457888948566459, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.880402010050252e-05, |
|
"loss": 1.4794, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.034681193892426916, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.879899497487438e-05, |
|
"loss": 1.4704, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.03478349829918924, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.879396984924624e-05, |
|
"loss": 1.72, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03488580270595156, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 9.878894472361809e-05, |
|
"loss": 1.3659, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.03498810711271388, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.878391959798995e-05, |
|
"loss": 1.5252, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.0350904115194762, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 9.877889447236182e-05, |
|
"loss": 1.6494, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.03519271592623852, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.877386934673368e-05, |
|
"loss": 1.4853, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.035295020333000846, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.876884422110553e-05, |
|
"loss": 1.4591, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.03539732473976317, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 9.87638190954774e-05, |
|
"loss": 1.462, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.035499629146525484, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 9.875879396984925e-05, |
|
"loss": 1.5346, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.03560193355328781, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.875376884422111e-05, |
|
"loss": 1.4419, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.03570423796005013, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.874874371859297e-05, |
|
"loss": 1.5451, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.03580654236681245, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 9.874371859296482e-05, |
|
"loss": 1.5113, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.035908846773574775, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.873869346733668e-05, |
|
"loss": 1.4416, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.03601115118033709, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.873366834170855e-05, |
|
"loss": 1.6185, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.03611345558709941, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.872864321608041e-05, |
|
"loss": 1.4294, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.036215759993861736, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.872361809045226e-05, |
|
"loss": 1.6404, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.03631806440062406, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 9.871859296482413e-05, |
|
"loss": 1.3696, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.03642036880738638, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.871356783919598e-05, |
|
"loss": 1.411, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.0365226732141487, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.870854271356784e-05, |
|
"loss": 1.4582, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.03662497762091102, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 9.870351758793969e-05, |
|
"loss": 1.7792, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.03672728202767334, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.869849246231157e-05, |
|
"loss": 1.517, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.036829586434435665, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.869346733668342e-05, |
|
"loss": 1.3991, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.03693189084119798, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.868844221105528e-05, |
|
"loss": 1.5767, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.037034195247960304, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.868341708542714e-05, |
|
"loss": 1.4882, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.03713649965472263, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.8678391959799e-05, |
|
"loss": 1.5225, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.03723880406148495, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.867336683417085e-05, |
|
"loss": 1.4675, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.03734110846824727, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.866834170854273e-05, |
|
"loss": 1.5146, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.03744341287500959, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.866331658291457e-05, |
|
"loss": 1.4159, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.03754571728177191, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.865829145728644e-05, |
|
"loss": 1.4227, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.03764802168853423, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.86532663316583e-05, |
|
"loss": 1.548, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.037750326095296556, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.864824120603016e-05, |
|
"loss": 1.5389, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.03785263050205888, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.864321608040201e-05, |
|
"loss": 1.4892, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.037954934908821195, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 9.863819095477388e-05, |
|
"loss": 1.4655, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.03805723931558352, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.863316582914573e-05, |
|
"loss": 1.4212, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.03815954372234584, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 9.86281407035176e-05, |
|
"loss": 1.5425, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.03826184812910816, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.862311557788944e-05, |
|
"loss": 1.4689, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.038364152535870485, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.861809045226132e-05, |
|
"loss": 1.432, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.0384664569426328, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.861306532663317e-05, |
|
"loss": 1.5188, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.038568761349395124, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.860804020100503e-05, |
|
"loss": 1.3553, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.03867106575615745, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.860301507537689e-05, |
|
"loss": 1.33, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.03877337016291977, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.859798994974875e-05, |
|
"loss": 1.5601, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.03887567456968209, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 9.85929648241206e-05, |
|
"loss": 1.4254, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.03897797897644441, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.858793969849246e-05, |
|
"loss": 1.5918, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.03908028338320673, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.858291457286433e-05, |
|
"loss": 1.4162, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.03918258778996905, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.857788944723619e-05, |
|
"loss": 1.5642, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.039284892196731376, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 9.857286432160805e-05, |
|
"loss": 1.4412, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.0393871966034937, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 9.85678391959799e-05, |
|
"loss": 1.3841, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.039489501010256015, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.856281407035176e-05, |
|
"loss": 1.498, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.03959180541701834, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.855778894472362e-05, |
|
"loss": 1.4214, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.03969410982378066, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.855276381909548e-05, |
|
"loss": 1.4923, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.03979641423054298, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.854773869346733e-05, |
|
"loss": 1.4241, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.0398987186373053, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.85427135678392e-05, |
|
"loss": 1.7142, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.04000102304406762, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.853768844221106e-05, |
|
"loss": 1.6048, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.040103327450829944, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.853266331658292e-05, |
|
"loss": 1.2976, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.04020563185759227, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.852763819095477e-05, |
|
"loss": 1.4336, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.04030793626435459, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.852261306532664e-05, |
|
"loss": 1.526, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.040410240671116905, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.851758793969849e-05, |
|
"loss": 1.5614, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.04051254507787923, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.851256281407035e-05, |
|
"loss": 1.5674, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.04061484948464155, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.850753768844222e-05, |
|
"loss": 1.4084, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.04071715389140387, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.850251256281408e-05, |
|
"loss": 1.4501, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.040819458298166196, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.849748743718593e-05, |
|
"loss": 1.5337, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.04092176270492851, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 9.84924623115578e-05, |
|
"loss": 1.4503, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.041024067111690835, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 9.848743718592965e-05, |
|
"loss": 1.44, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.04112637151845316, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.848241206030151e-05, |
|
"loss": 1.5709, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.04122867592521548, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.847738693467337e-05, |
|
"loss": 1.5478, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.0413309803319778, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.847236180904524e-05, |
|
"loss": 1.2712, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.04143328473874012, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.846733668341709e-05, |
|
"loss": 1.5433, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.04153558914550244, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.846231155778895e-05, |
|
"loss": 1.4632, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.041637893552264764, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.845728643216081e-05, |
|
"loss": 1.5674, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.04174019795902709, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.845226130653267e-05, |
|
"loss": 1.3531, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.04184250236578941, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 9.844723618090452e-05, |
|
"loss": 1.6524, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.041944806772551725, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.84422110552764e-05, |
|
"loss": 1.4676, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04204711117931405, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.843718592964824e-05, |
|
"loss": 1.4209, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.04214941558607637, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.84321608040201e-05, |
|
"loss": 1.5188, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.04225171999283869, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.842713567839197e-05, |
|
"loss": 1.4647, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.042354024399601016, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 9.842211055276383e-05, |
|
"loss": 1.3745, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.04245632880636333, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.841708542713568e-05, |
|
"loss": 1.4112, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.042558633213125655, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 9.841206030150754e-05, |
|
"loss": 1.2667, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.04266093761988798, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.84070351758794e-05, |
|
"loss": 1.5598, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.0427632420266503, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.840201005025126e-05, |
|
"loss": 1.4135, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.04286554643341262, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.839698492462313e-05, |
|
"loss": 1.507, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.04296785084017494, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 9.839195979899497e-05, |
|
"loss": 1.4718, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04307015524693726, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.838693467336684e-05, |
|
"loss": 1.4224, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.043172459653699584, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.83819095477387e-05, |
|
"loss": 1.4815, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.04327476406046191, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.837688442211056e-05, |
|
"loss": 1.5333, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.04337706846722422, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.837185929648241e-05, |
|
"loss": 1.4247, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.043479372873986545, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.836683417085427e-05, |
|
"loss": 1.5218, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.04358167728074887, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.836180904522613e-05, |
|
"loss": 1.6057, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.04368398168751119, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 9.8356783919598e-05, |
|
"loss": 1.6459, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.04378628609427351, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 9.835175879396984e-05, |
|
"loss": 1.3835, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.04388859050103583, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.834673366834172e-05, |
|
"loss": 1.4054, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.04399089490779815, |
|
"grad_norm": 0.251953125, |
|
"learning_rate": 9.834170854271357e-05, |
|
"loss": 1.4183, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.044093199314560474, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 9.833668341708543e-05, |
|
"loss": 1.509, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.0441955037213228, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.833165829145729e-05, |
|
"loss": 1.5587, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.04429780812808512, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.832663316582915e-05, |
|
"loss": 1.6455, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.044400112534847436, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 9.8321608040201e-05, |
|
"loss": 1.3427, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.04450241694160976, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 9.831658291457288e-05, |
|
"loss": 1.382, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.04460472134837208, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.831155778894473e-05, |
|
"loss": 1.42, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.044707025755134404, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.830653266331659e-05, |
|
"loss": 1.4817, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.04480933016189673, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.830150753768844e-05, |
|
"loss": 1.5843, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.04491163456865904, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.829648241206031e-05, |
|
"loss": 1.4559, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.045013938975421365, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 9.829145728643216e-05, |
|
"loss": 1.4828, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04511624338218369, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.828643216080402e-05, |
|
"loss": 1.7695, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.04521854778894601, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 9.828140703517589e-05, |
|
"loss": 1.3626, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.04532085219570833, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 9.827638190954775e-05, |
|
"loss": 1.3116, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.04542315660247065, |
|
"grad_norm": 0.263671875, |
|
"learning_rate": 9.82713567839196e-05, |
|
"loss": 1.3595, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.04552546100923297, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 9.826633165829147e-05, |
|
"loss": 1.5105, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.045627765415995294, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 9.826130653266332e-05, |
|
"loss": 1.5128, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.04573006982275762, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 9.825628140703518e-05, |
|
"loss": 1.3877, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.04583237422951994, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.825125628140704e-05, |
|
"loss": 1.5253, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.045934678636282256, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.82462311557789e-05, |
|
"loss": 1.3744, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.04603698304304458, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.824120603015075e-05, |
|
"loss": 1.4214, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.0461392874498069, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.823618090452262e-05, |
|
"loss": 1.3398, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.046241591856569224, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.823115577889448e-05, |
|
"loss": 1.5119, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.04634389626333154, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.822613065326634e-05, |
|
"loss": 1.5934, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.04644620067009386, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.822110552763819e-05, |
|
"loss": 1.4602, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.046548505076856185, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.821608040201005e-05, |
|
"loss": 1.5107, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.04665080948361851, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.821105527638191e-05, |
|
"loss": 1.6103, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.04675311389038083, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.820603015075378e-05, |
|
"loss": 1.5265, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.046855418297143146, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 9.820100502512564e-05, |
|
"loss": 1.3611, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.04695772270390547, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.819597989949749e-05, |
|
"loss": 1.6182, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.04706002711066779, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.819095477386935e-05, |
|
"loss": 1.3604, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.047162331517430114, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.818592964824121e-05, |
|
"loss": 1.5613, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.04726463592419244, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.818090452261307e-05, |
|
"loss": 1.5404, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.04736694033095475, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.817587939698492e-05, |
|
"loss": 1.5799, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.047469244737717076, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 9.81708542713568e-05, |
|
"loss": 1.2581, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.0475715491444794, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.816582914572864e-05, |
|
"loss": 1.4294, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.04767385355124172, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.81608040201005e-05, |
|
"loss": 1.4916, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.047776157958004044, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.815577889447236e-05, |
|
"loss": 1.3478, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.04787846236476636, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.815075376884423e-05, |
|
"loss": 1.5497, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.04798076677152868, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.814572864321608e-05, |
|
"loss": 1.5409, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.048083071178291005, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 9.814070351758794e-05, |
|
"loss": 1.3988, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.04818537558505333, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 9.81356783919598e-05, |
|
"loss": 2.4223, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.04828767999181565, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 9.813065326633167e-05, |
|
"loss": 1.3415, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.048389984398577966, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 9.812562814070351e-05, |
|
"loss": 1.3554, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.04849228880534029, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.812060301507539e-05, |
|
"loss": 1.5009, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.04859459321210261, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.811557788944724e-05, |
|
"loss": 1.6927, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.048696897618864934, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.81105527638191e-05, |
|
"loss": 1.6283, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.04879920202562726, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.810552763819096e-05, |
|
"loss": 1.434, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.04890150643238957, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.810050251256282e-05, |
|
"loss": 1.522, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.049003810839151896, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.809547738693467e-05, |
|
"loss": 1.4241, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.04910611524591422, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.809045226130655e-05, |
|
"loss": 1.4464, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.04920841965267654, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.80854271356784e-05, |
|
"loss": 1.5481, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.049310724059438864, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.808040201005026e-05, |
|
"loss": 1.6201, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.04941302846620118, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 9.807537688442211e-05, |
|
"loss": 1.4617, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.0495153328729635, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 9.807035175879398e-05, |
|
"loss": 1.5015, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.049617637279725825, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.806532663316583e-05, |
|
"loss": 1.5654, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.04971994168648815, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.806030150753769e-05, |
|
"loss": 1.4838, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.049822246093250463, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.805527638190956e-05, |
|
"loss": 1.4796, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.049924550500012786, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.805025125628142e-05, |
|
"loss": 1.5272, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.05002685490677511, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.804522613065327e-05, |
|
"loss": 1.6112, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.05012915931353743, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.804020100502513e-05, |
|
"loss": 1.4579, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.050231463720299754, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.803517587939699e-05, |
|
"loss": 1.4171, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.05033376812706207, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.803015075376885e-05, |
|
"loss": 1.6056, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.05043607253382439, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.802512562814071e-05, |
|
"loss": 1.5128, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.050538376940586716, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 9.802010050251256e-05, |
|
"loss": 1.55, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.05064068134734904, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.801507537688442e-05, |
|
"loss": 1.5617, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.05074298575411136, |
|
"grad_norm": 0.263671875, |
|
"learning_rate": 9.801005025125629e-05, |
|
"loss": 1.4943, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.05084529016087368, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 9.800502512562815e-05, |
|
"loss": 1.5396, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.050947594567636, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.5213, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.05104989897439832, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 9.799497487437186e-05, |
|
"loss": 1.3655, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.051152203381160645, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 9.798994974874372e-05, |
|
"loss": 1.4502, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 20000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.29157249892352e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|