|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9992422328870927, |
|
"eval_steps": 495, |
|
"global_step": 989, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 53.622260681945534, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.1058, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 1.1559669971466064, |
|
"eval_runtime": 134.2853, |
|
"eval_samples_per_second": 9.733, |
|
"eval_steps_per_second": 0.276, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 40.68249059631914, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.1361, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 43.27766914416912, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.1577, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 64.72729420725338, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.1292, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 56.372636612148945, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.1337, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 50.14861511180959, |
|
"learning_rate": 3e-06, |
|
"loss": 1.1778, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 62.95626490942665, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.1317, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 25.825764536536617, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.0504, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 30.983005109311666, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.0636, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 42.69870581147392, |
|
"learning_rate": 5e-06, |
|
"loss": 1.1061, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 30.4102208779435, |
|
"learning_rate": 4.9999968146329895e-06, |
|
"loss": 1.0577, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 19.477219524924408, |
|
"learning_rate": 4.999987258540075e-06, |
|
"loss": 1.0341, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.065115351967986, |
|
"learning_rate": 4.9999713317456065e-06, |
|
"loss": 1.0212, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 20.305999270879685, |
|
"learning_rate": 4.999949034290173e-06, |
|
"loss": 0.9934, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 15.296134261870147, |
|
"learning_rate": 4.999920366230593e-06, |
|
"loss": 1.0452, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 21.688573186878063, |
|
"learning_rate": 4.9998853276399215e-06, |
|
"loss": 1.0699, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.335847546461085, |
|
"learning_rate": 4.999843918607447e-06, |
|
"loss": 1.0098, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.3952228510413, |
|
"learning_rate": 4.999796139238694e-06, |
|
"loss": 1.0408, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.166221867386932, |
|
"learning_rate": 4.999741989655415e-06, |
|
"loss": 1.0028, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 11.838379188476093, |
|
"learning_rate": 4.999681469995601e-06, |
|
"loss": 1.0009, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 10.419092295341123, |
|
"learning_rate": 4.9996145804134735e-06, |
|
"loss": 1.022, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.35336523909701, |
|
"learning_rate": 4.999541321079486e-06, |
|
"loss": 0.9748, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.198682807281516, |
|
"learning_rate": 4.999461692180326e-06, |
|
"loss": 1.0549, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 12.547373233238517, |
|
"learning_rate": 4.999375693918911e-06, |
|
"loss": 0.9437, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.595672331068808, |
|
"learning_rate": 4.99928332651439e-06, |
|
"loss": 1.0163, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 12.993405908580561, |
|
"learning_rate": 4.999184590202142e-06, |
|
"loss": 1.0146, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.549924685378986, |
|
"learning_rate": 4.999079485233775e-06, |
|
"loss": 0.9542, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.198320823624972, |
|
"learning_rate": 4.998968011877129e-06, |
|
"loss": 0.9641, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.2545266805555, |
|
"learning_rate": 4.99885017041627e-06, |
|
"loss": 0.9832, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.9313407875475646, |
|
"learning_rate": 4.998725961151493e-06, |
|
"loss": 0.9685, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.807588698454, |
|
"learning_rate": 4.998595384399319e-06, |
|
"loss": 0.9193, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.876327124281431, |
|
"learning_rate": 4.998458440492498e-06, |
|
"loss": 0.9616, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.98501521873747, |
|
"learning_rate": 4.998315129780001e-06, |
|
"loss": 1.0099, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.044564125219294, |
|
"learning_rate": 4.998165452627025e-06, |
|
"loss": 0.9615, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 11.210345590334603, |
|
"learning_rate": 4.9980094094149945e-06, |
|
"loss": 1.1204, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.886709354787903, |
|
"learning_rate": 4.997847000541551e-06, |
|
"loss": 0.9783, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.5254666516991335, |
|
"learning_rate": 4.997678226420561e-06, |
|
"loss": 0.9161, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.272181825775892, |
|
"learning_rate": 4.99750308748211e-06, |
|
"loss": 0.9425, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.553520865941246, |
|
"learning_rate": 4.997321584172504e-06, |
|
"loss": 0.9962, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.229506711694187, |
|
"learning_rate": 4.997133716954266e-06, |
|
"loss": 0.9347, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.567484614549636, |
|
"learning_rate": 4.996939486306138e-06, |
|
"loss": 0.9485, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.61521839575945, |
|
"learning_rate": 4.996738892723076e-06, |
|
"loss": 0.883, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.10212907196996, |
|
"learning_rate": 4.99653193671625e-06, |
|
"loss": 1.1048, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.80462692647593, |
|
"learning_rate": 4.996318618813046e-06, |
|
"loss": 0.9816, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.097343998505614, |
|
"learning_rate": 4.996098939557062e-06, |
|
"loss": 0.9561, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.282799226817679, |
|
"learning_rate": 4.995872899508103e-06, |
|
"loss": 0.9617, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 12.216537774733089, |
|
"learning_rate": 4.995640499242187e-06, |
|
"loss": 0.9243, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.359198206461496, |
|
"learning_rate": 4.995401739351537e-06, |
|
"loss": 0.9436, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.346073589036576, |
|
"learning_rate": 4.995156620444584e-06, |
|
"loss": 0.8964, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.688812989274403, |
|
"learning_rate": 4.9949051431459615e-06, |
|
"loss": 0.9005, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.002996068199546, |
|
"learning_rate": 4.994647308096509e-06, |
|
"loss": 0.9334, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.083307002817586, |
|
"learning_rate": 4.994383115953266e-06, |
|
"loss": 0.8988, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.468005918770268, |
|
"learning_rate": 4.9941125673894705e-06, |
|
"loss": 0.9222, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.444789411001062, |
|
"learning_rate": 4.993835663094562e-06, |
|
"loss": 0.9338, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.644539402695577, |
|
"learning_rate": 4.9935524037741705e-06, |
|
"loss": 1.0552, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.40049014930346, |
|
"learning_rate": 4.9932627901501265e-06, |
|
"loss": 0.9328, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 8.906097856477972, |
|
"learning_rate": 4.99296682296045e-06, |
|
"loss": 1.1228, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.079585029259878, |
|
"learning_rate": 4.992664502959351e-06, |
|
"loss": 1.0413, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 10.34845460315542, |
|
"learning_rate": 4.992355830917232e-06, |
|
"loss": 0.9309, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.2525622909780765, |
|
"learning_rate": 4.992040807620678e-06, |
|
"loss": 0.9742, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.750158577512716, |
|
"learning_rate": 4.991719433872461e-06, |
|
"loss": 0.9132, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.2784065217091145, |
|
"learning_rate": 4.9913917104915374e-06, |
|
"loss": 0.9586, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.747179104322235, |
|
"learning_rate": 4.991057638313042e-06, |
|
"loss": 0.8983, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.39546333182503, |
|
"learning_rate": 4.990717218188286e-06, |
|
"loss": 0.9282, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.225226219601699, |
|
"learning_rate": 4.990370450984764e-06, |
|
"loss": 0.8964, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 7.297944379719202, |
|
"learning_rate": 4.990017337586137e-06, |
|
"loss": 0.886, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.684537834909945, |
|
"learning_rate": 4.989657878892245e-06, |
|
"loss": 0.9474, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.800644526106279, |
|
"learning_rate": 4.989292075819091e-06, |
|
"loss": 0.9222, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.935752803831776, |
|
"learning_rate": 4.988919929298851e-06, |
|
"loss": 0.9675, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.0962518911658385, |
|
"learning_rate": 4.988541440279862e-06, |
|
"loss": 0.8945, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.739181681630852, |
|
"learning_rate": 4.988156609726628e-06, |
|
"loss": 0.9216, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.531531611955777, |
|
"learning_rate": 4.987765438619806e-06, |
|
"loss": 0.9153, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 11.36936049546851, |
|
"learning_rate": 4.987367927956218e-06, |
|
"loss": 1.2286, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.073878979737398, |
|
"learning_rate": 4.9869640787488375e-06, |
|
"loss": 0.9115, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.8272340999466445, |
|
"learning_rate": 4.98655389202679e-06, |
|
"loss": 0.9509, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.397354616805974, |
|
"learning_rate": 4.986137368835351e-06, |
|
"loss": 1.0375, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.966255876371102, |
|
"learning_rate": 4.985714510235945e-06, |
|
"loss": 1.0293, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.185986548775906, |
|
"learning_rate": 4.985285317306141e-06, |
|
"loss": 0.9109, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.024522011830165, |
|
"learning_rate": 4.984849791139647e-06, |
|
"loss": 0.9061, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.995863723081675, |
|
"learning_rate": 4.984407932846311e-06, |
|
"loss": 0.8925, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.590869203505634, |
|
"learning_rate": 4.983959743552118e-06, |
|
"loss": 0.9194, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.037924790619887, |
|
"learning_rate": 4.983505224399188e-06, |
|
"loss": 0.9598, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.517206233677988, |
|
"learning_rate": 4.983044376545767e-06, |
|
"loss": 0.8645, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.230061445243754, |
|
"learning_rate": 4.982577201166232e-06, |
|
"loss": 0.9021, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.911990663487462, |
|
"learning_rate": 4.9821036994510816e-06, |
|
"loss": 0.858, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.984021384400987, |
|
"learning_rate": 4.981623872606938e-06, |
|
"loss": 0.8723, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.703129385119859, |
|
"learning_rate": 4.981137721856541e-06, |
|
"loss": 0.9518, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.12479649165451, |
|
"learning_rate": 4.980645248438746e-06, |
|
"loss": 0.9136, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 11.122916834992573, |
|
"learning_rate": 4.9801464536085184e-06, |
|
"loss": 0.9066, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.841215482759505, |
|
"learning_rate": 4.9796413386369344e-06, |
|
"loss": 1.0641, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.958073787403037, |
|
"learning_rate": 4.979129904811177e-06, |
|
"loss": 0.9263, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.020080058824307, |
|
"learning_rate": 4.978612153434527e-06, |
|
"loss": 0.9603, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.574162423590218, |
|
"learning_rate": 4.9780880858263684e-06, |
|
"loss": 0.8855, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.834495325366524, |
|
"learning_rate": 4.977557703322178e-06, |
|
"loss": 0.8873, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.461734817525809, |
|
"learning_rate": 4.9770210072735286e-06, |
|
"loss": 0.8965, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.653047237885576, |
|
"learning_rate": 4.976477999048078e-06, |
|
"loss": 1.0051, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.092048158281413, |
|
"learning_rate": 4.975928680029571e-06, |
|
"loss": 0.8779, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.730921274763532, |
|
"learning_rate": 4.975373051617831e-06, |
|
"loss": 0.8886, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.999386874766411, |
|
"learning_rate": 4.974811115228767e-06, |
|
"loss": 0.8932, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.569281154720038, |
|
"learning_rate": 4.974242872294354e-06, |
|
"loss": 0.9093, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.243416936742017, |
|
"learning_rate": 4.973668324262645e-06, |
|
"loss": 0.8787, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.425532736177959, |
|
"learning_rate": 4.973087472597754e-06, |
|
"loss": 0.8689, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.834945735385297, |
|
"learning_rate": 4.972500318779864e-06, |
|
"loss": 1.0439, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.010749117953628, |
|
"learning_rate": 4.971906864305214e-06, |
|
"loss": 1.0111, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.399008383715632, |
|
"learning_rate": 4.9713071106861e-06, |
|
"loss": 0.9488, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.852009850723007, |
|
"learning_rate": 4.970701059450872e-06, |
|
"loss": 0.9165, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.490567287919795, |
|
"learning_rate": 4.970088712143924e-06, |
|
"loss": 0.93, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.466174338824931, |
|
"learning_rate": 4.969470070325699e-06, |
|
"loss": 0.8258, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.3052755303637325, |
|
"learning_rate": 4.968845135572678e-06, |
|
"loss": 0.8643, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.79116786116443, |
|
"learning_rate": 4.968213909477376e-06, |
|
"loss": 0.9092, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.243372776357591, |
|
"learning_rate": 4.967576393648344e-06, |
|
"loss": 0.8854, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.554172933410867, |
|
"learning_rate": 4.96693258971016e-06, |
|
"loss": 0.9373, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 14.37748982425115, |
|
"learning_rate": 4.9662824993034245e-06, |
|
"loss": 0.8937, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.41048524226914, |
|
"learning_rate": 4.965626124084759e-06, |
|
"loss": 0.9385, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.3566507966065355, |
|
"learning_rate": 4.9649634657268e-06, |
|
"loss": 0.9047, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.532323859056279, |
|
"learning_rate": 4.964294525918196e-06, |
|
"loss": 0.8841, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 8.816489663062468, |
|
"learning_rate": 4.963619306363602e-06, |
|
"loss": 0.8959, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 7.372965529585357, |
|
"learning_rate": 4.962937808783675e-06, |
|
"loss": 1.0141, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.68129098809545, |
|
"learning_rate": 4.962250034915072e-06, |
|
"loss": 0.8776, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.138429945353462, |
|
"learning_rate": 4.961555986510443e-06, |
|
"loss": 0.9064, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.2074388893831625, |
|
"learning_rate": 4.960855665338424e-06, |
|
"loss": 0.8984, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.423407890211476, |
|
"learning_rate": 4.960149073183643e-06, |
|
"loss": 0.8478, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 15.084543572578946, |
|
"learning_rate": 4.959436211846702e-06, |
|
"loss": 1.0345, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.093406275304441, |
|
"learning_rate": 4.958717083144182e-06, |
|
"loss": 0.8912, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.398558925299228, |
|
"learning_rate": 4.9579916889086336e-06, |
|
"loss": 0.9241, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.573650372167155, |
|
"learning_rate": 4.957260030988575e-06, |
|
"loss": 0.9476, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.430534040644023, |
|
"learning_rate": 4.956522111248483e-06, |
|
"loss": 0.9208, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.56067970590197, |
|
"learning_rate": 4.955777931568797e-06, |
|
"loss": 0.9274, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.601548738488328, |
|
"learning_rate": 4.955027493845903e-06, |
|
"loss": 0.8426, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 8.927604688533856, |
|
"learning_rate": 4.954270799992138e-06, |
|
"loss": 0.8618, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.6362861369729895, |
|
"learning_rate": 4.953507851935779e-06, |
|
"loss": 0.921, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.966230987922584, |
|
"learning_rate": 4.952738651621043e-06, |
|
"loss": 0.92, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.8283544064228145, |
|
"learning_rate": 4.9519632010080765e-06, |
|
"loss": 0.8719, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.733524485804168, |
|
"learning_rate": 4.951181502072957e-06, |
|
"loss": 0.8664, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.93365687921664, |
|
"learning_rate": 4.950393556807682e-06, |
|
"loss": 0.9109, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.720266452276143, |
|
"learning_rate": 4.9495993672201675e-06, |
|
"loss": 1.0141, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.577392904905377, |
|
"learning_rate": 4.948798935334242e-06, |
|
"loss": 0.8947, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.248018611453421, |
|
"learning_rate": 4.947992263189641e-06, |
|
"loss": 0.8519, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.964178402688073, |
|
"learning_rate": 4.947179352842001e-06, |
|
"loss": 0.8732, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 7.727649852322783, |
|
"learning_rate": 4.946360206362858e-06, |
|
"loss": 0.9003, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.678031094542374, |
|
"learning_rate": 4.945534825839637e-06, |
|
"loss": 0.931, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.8975198222340195, |
|
"learning_rate": 4.944703213375648e-06, |
|
"loss": 0.9022, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.2460458320933485, |
|
"learning_rate": 4.943865371090087e-06, |
|
"loss": 0.8835, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.080524254102179, |
|
"learning_rate": 4.94302130111802e-06, |
|
"loss": 0.8957, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.614189755845358, |
|
"learning_rate": 4.942171005610385e-06, |
|
"loss": 0.8961, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.2283236967224, |
|
"learning_rate": 4.941314486733986e-06, |
|
"loss": 0.82, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.903937090069375, |
|
"learning_rate": 4.940451746671484e-06, |
|
"loss": 0.8733, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.044592476065338, |
|
"learning_rate": 4.939582787621394e-06, |
|
"loss": 0.9423, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 10.766060880843327, |
|
"learning_rate": 4.938707611798078e-06, |
|
"loss": 0.8449, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.623203037666776, |
|
"learning_rate": 4.937826221431742e-06, |
|
"loss": 0.8623, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.0474657841525525, |
|
"learning_rate": 4.936938618768427e-06, |
|
"loss": 0.9026, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 31.531685747916384, |
|
"learning_rate": 4.936044806070005e-06, |
|
"loss": 0.8545, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.2640420937536, |
|
"learning_rate": 4.935144785614173e-06, |
|
"loss": 0.8793, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.645683842200697, |
|
"learning_rate": 4.934238559694448e-06, |
|
"loss": 0.8472, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.108168033210612, |
|
"learning_rate": 4.93332613062016e-06, |
|
"loss": 1.004, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.832130176498325, |
|
"learning_rate": 4.932407500716445e-06, |
|
"loss": 0.9278, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.2388094461996575, |
|
"learning_rate": 4.9314826723242425e-06, |
|
"loss": 1.0358, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.954370983448125, |
|
"learning_rate": 4.930551647800287e-06, |
|
"loss": 0.9919, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.997616854364736, |
|
"learning_rate": 4.9296144295171025e-06, |
|
"loss": 0.8445, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.115062150515696, |
|
"learning_rate": 4.928671019862995e-06, |
|
"loss": 0.8546, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.146468237609964, |
|
"learning_rate": 4.927721421242051e-06, |
|
"loss": 0.8879, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.269100479953049, |
|
"learning_rate": 4.926765636074124e-06, |
|
"loss": 0.8928, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.829206775414763, |
|
"learning_rate": 4.925803666794839e-06, |
|
"loss": 0.8912, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 13.500834231735407, |
|
"learning_rate": 4.924835515855573e-06, |
|
"loss": 0.89, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.876726093761002, |
|
"learning_rate": 4.923861185723461e-06, |
|
"loss": 1.0295, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.552028600594945, |
|
"learning_rate": 4.92288067888138e-06, |
|
"loss": 0.8523, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 9.604721277839626, |
|
"learning_rate": 4.921893997827951e-06, |
|
"loss": 0.8793, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 19.05047261104596, |
|
"learning_rate": 4.920901145077527e-06, |
|
"loss": 1.0183, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.670208792155977, |
|
"learning_rate": 4.919902123160187e-06, |
|
"loss": 0.916, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.963579222763698, |
|
"learning_rate": 4.918896934621734e-06, |
|
"loss": 0.8927, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.698308686475166, |
|
"learning_rate": 4.917885582023683e-06, |
|
"loss": 0.973, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.158791715235347, |
|
"learning_rate": 4.9168680679432565e-06, |
|
"loss": 0.8786, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.935267549779024, |
|
"learning_rate": 4.915844394973379e-06, |
|
"loss": 0.9105, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.85639856911323, |
|
"learning_rate": 4.914814565722671e-06, |
|
"loss": 0.8709, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.730019511721717, |
|
"learning_rate": 4.913778582815439e-06, |
|
"loss": 0.9203, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 30.04084618027315, |
|
"learning_rate": 4.912736448891672e-06, |
|
"loss": 0.8835, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 11.53316056998077, |
|
"learning_rate": 4.9116881666070325e-06, |
|
"loss": 0.8722, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 36.52217384345542, |
|
"learning_rate": 4.9106337386328524e-06, |
|
"loss": 0.8615, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 115.42434006092905, |
|
"learning_rate": 4.9095731676561246e-06, |
|
"loss": 0.8587, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.54123495731584, |
|
"learning_rate": 4.908506456379493e-06, |
|
"loss": 0.863, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 85.99384572854959, |
|
"learning_rate": 4.907433607521252e-06, |
|
"loss": 0.894, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 36.13462335675736, |
|
"learning_rate": 4.906354623815336e-06, |
|
"loss": 0.9055, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 27.02795167506559, |
|
"learning_rate": 4.905269508011312e-06, |
|
"loss": 0.9135, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 78.08453850338199, |
|
"learning_rate": 4.904178262874374e-06, |
|
"loss": 0.9165, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 36.25089616666813, |
|
"learning_rate": 4.903080891185335e-06, |
|
"loss": 0.9778, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 49.253048863400004, |
|
"learning_rate": 4.9019773957406194e-06, |
|
"loss": 0.8498, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 32.09689217815892, |
|
"learning_rate": 4.900867779352258e-06, |
|
"loss": 0.9056, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 26.828381083665327, |
|
"learning_rate": 4.899752044847881e-06, |
|
"loss": 0.8266, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 144.02426662483788, |
|
"learning_rate": 4.898630195070705e-06, |
|
"loss": 0.8645, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 47.52439763925523, |
|
"learning_rate": 4.897502232879533e-06, |
|
"loss": 0.8619, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 74.68661681875047, |
|
"learning_rate": 4.896368161148744e-06, |
|
"loss": 0.8985, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 75.68493630646316, |
|
"learning_rate": 4.895227982768287e-06, |
|
"loss": 0.8809, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 29.706946472395224, |
|
"learning_rate": 4.89408170064367e-06, |
|
"loss": 0.8396, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 161.48487196225275, |
|
"learning_rate": 4.892929317695957e-06, |
|
"loss": 0.8857, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 41.09638985280378, |
|
"learning_rate": 4.891770836861757e-06, |
|
"loss": 0.8633, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 69.69119197887811, |
|
"learning_rate": 4.890606261093221e-06, |
|
"loss": 1.0396, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 88.82274883602727, |
|
"learning_rate": 4.889435593358029e-06, |
|
"loss": 0.8651, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 81.79928830463821, |
|
"learning_rate": 4.888258836639386e-06, |
|
"loss": 0.8389, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 29.87578836447596, |
|
"learning_rate": 4.887075993936014e-06, |
|
"loss": 0.8775, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 108.15216101207292, |
|
"learning_rate": 4.885887068262143e-06, |
|
"loss": 0.8656, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 37.34405700410443, |
|
"learning_rate": 4.884692062647506e-06, |
|
"loss": 0.8682, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 18.005378230428164, |
|
"learning_rate": 4.883490980137327e-06, |
|
"loss": 0.9104, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 13.182657069815662, |
|
"learning_rate": 4.8822838237923164e-06, |
|
"loss": 0.8802, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 20.126124496397114, |
|
"learning_rate": 4.881070596688664e-06, |
|
"loss": 0.9229, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 8.025782615380805, |
|
"learning_rate": 4.8798513019180295e-06, |
|
"loss": 1.0137, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 11.303199949810612, |
|
"learning_rate": 4.878625942587531e-06, |
|
"loss": 0.8844, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.929950753578784, |
|
"learning_rate": 4.8773945218197475e-06, |
|
"loss": 0.8617, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 10.422846526317487, |
|
"learning_rate": 4.876157042752698e-06, |
|
"loss": 0.8628, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.406124330714249, |
|
"learning_rate": 4.874913508539844e-06, |
|
"loss": 0.8978, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 10.297329933831254, |
|
"learning_rate": 4.8736639223500734e-06, |
|
"loss": 0.9213, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 39.92063204593234, |
|
"learning_rate": 4.872408287367702e-06, |
|
"loss": 0.9151, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 12.707518091656361, |
|
"learning_rate": 4.871146606792455e-06, |
|
"loss": 0.8726, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 12.451656943060126, |
|
"learning_rate": 4.869878883839464e-06, |
|
"loss": 0.8831, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 11.149775907376688, |
|
"learning_rate": 4.868605121739261e-06, |
|
"loss": 0.7839, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.069136050487129, |
|
"learning_rate": 4.867325323737765e-06, |
|
"loss": 0.8095, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 33.341330536587115, |
|
"learning_rate": 4.866039493096276e-06, |
|
"loss": 0.8652, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 12.327283349644702, |
|
"learning_rate": 4.86474763309147e-06, |
|
"loss": 0.9332, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 14.886718679591452, |
|
"learning_rate": 4.863449747015384e-06, |
|
"loss": 0.9123, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 18.439054151579196, |
|
"learning_rate": 4.862145838175413e-06, |
|
"loss": 0.9952, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.450032781643884, |
|
"learning_rate": 4.8608359098943014e-06, |
|
"loss": 0.8619, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.76591848048908, |
|
"learning_rate": 4.859519965510129e-06, |
|
"loss": 0.8904, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 19.230556702347112, |
|
"learning_rate": 4.858198008376308e-06, |
|
"loss": 0.8454, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 12.812798200753768, |
|
"learning_rate": 4.856870041861576e-06, |
|
"loss": 0.8541, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 20.396035600104717, |
|
"learning_rate": 4.8555360693499786e-06, |
|
"loss": 0.8172, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 28.324370260593682, |
|
"learning_rate": 4.854196094240872e-06, |
|
"loss": 0.8665, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 24.090190708297367, |
|
"learning_rate": 4.8528501199489045e-06, |
|
"loss": 0.8964, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 51.69442396543579, |
|
"learning_rate": 4.8514981499040146e-06, |
|
"loss": 1.0263, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 13.682340026316318, |
|
"learning_rate": 4.850140187551417e-06, |
|
"loss": 0.8913, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 10.568651019154684, |
|
"learning_rate": 4.848776236351602e-06, |
|
"loss": 0.8711, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 17.95282322327484, |
|
"learning_rate": 4.847406299780316e-06, |
|
"loss": 0.8534, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 11.086531755411718, |
|
"learning_rate": 4.846030381328559e-06, |
|
"loss": 0.8577, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 20.43851904662153, |
|
"learning_rate": 4.8446484845025754e-06, |
|
"loss": 0.8711, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 9.019286512020804, |
|
"learning_rate": 4.843260612823844e-06, |
|
"loss": 0.8493, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 28.047694724485186, |
|
"learning_rate": 4.84186676982907e-06, |
|
"loss": 0.8913, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 12.170251493093959, |
|
"learning_rate": 4.840466959070174e-06, |
|
"loss": 0.8561, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 8.931963450098133, |
|
"learning_rate": 4.839061184114286e-06, |
|
"loss": 0.831, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 11.80446928845208, |
|
"learning_rate": 4.837649448543731e-06, |
|
"loss": 0.8314, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.844616516126078, |
|
"learning_rate": 4.836231755956028e-06, |
|
"loss": 0.8366, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 36.62950097854142, |
|
"learning_rate": 4.834808109963873e-06, |
|
"loss": 0.8674, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.123248446431571, |
|
"learning_rate": 4.8333785141951335e-06, |
|
"loss": 0.8705, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 53.32802137920739, |
|
"learning_rate": 4.83194297229284e-06, |
|
"loss": 0.8973, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 17.134182129435587, |
|
"learning_rate": 4.830501487915175e-06, |
|
"loss": 0.8552, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 18.056985775383417, |
|
"learning_rate": 4.829054064735463e-06, |
|
"loss": 0.8743, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 23.339383649662626, |
|
"learning_rate": 4.827600706442164e-06, |
|
"loss": 0.8401, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 44.31150837853898, |
|
"learning_rate": 4.826141416738861e-06, |
|
"loss": 0.8045, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 46.76596172244923, |
|
"learning_rate": 4.824676199344253e-06, |
|
"loss": 0.8392, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 19.466255218481894, |
|
"learning_rate": 4.823205057992145e-06, |
|
"loss": 0.8422, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 31.8224396652641, |
|
"learning_rate": 4.821727996431435e-06, |
|
"loss": 1.0119, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 28.30370717907097, |
|
"learning_rate": 4.8202450184261114e-06, |
|
"loss": 0.8858, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 14.967566855603655, |
|
"learning_rate": 4.8187561277552376e-06, |
|
"loss": 0.8458, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 40.45152173099994, |
|
"learning_rate": 4.817261328212942e-06, |
|
"loss": 0.8414, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 44.04696789627551, |
|
"learning_rate": 4.815760623608415e-06, |
|
"loss": 0.8429, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 22.669028244807055, |
|
"learning_rate": 4.8142540177658925e-06, |
|
"loss": 0.8377, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 36.679299334418545, |
|
"learning_rate": 4.812741514524647e-06, |
|
"loss": 0.8908, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 19.281368535984196, |
|
"learning_rate": 4.811223117738981e-06, |
|
"loss": 0.8768, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 44.69042639681561, |
|
"learning_rate": 4.809698831278217e-06, |
|
"loss": 0.8919, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 20.917629234011702, |
|
"learning_rate": 4.808168659026683e-06, |
|
"loss": 0.8443, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 20.01311483179014, |
|
"learning_rate": 4.806632604883709e-06, |
|
"loss": 0.8775, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 68.90691376267046, |
|
"learning_rate": 4.805090672763609e-06, |
|
"loss": 0.8913, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 41.87111801337849, |
|
"learning_rate": 4.803542866595681e-06, |
|
"loss": 0.823, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 25.071970545031796, |
|
"learning_rate": 4.801989190324187e-06, |
|
"loss": 0.8796, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 109.413625000989, |
|
"learning_rate": 4.800429647908354e-06, |
|
"loss": 0.858, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 43.38280987527835, |
|
"learning_rate": 4.7988642433223535e-06, |
|
"loss": 0.8521, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 32.13753250435139, |
|
"learning_rate": 4.797292980555293e-06, |
|
"loss": 0.8757, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 46.78789327684748, |
|
"learning_rate": 4.795715863611212e-06, |
|
"loss": 0.895, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 43.281917729979604, |
|
"learning_rate": 4.7941328965090705e-06, |
|
"loss": 0.8568, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 41.384686856921995, |
|
"learning_rate": 4.7925440832827305e-06, |
|
"loss": 0.9944, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 40.92527648821274, |
|
"learning_rate": 4.790949427980956e-06, |
|
"loss": 0.8338, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 82.6200683793308, |
|
"learning_rate": 4.789348934667397e-06, |
|
"loss": 0.844, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 46.48434653058032, |
|
"learning_rate": 4.787742607420579e-06, |
|
"loss": 0.8408, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 100.35892484768685, |
|
"learning_rate": 4.786130450333897e-06, |
|
"loss": 0.8429, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 104.75380555405746, |
|
"learning_rate": 4.784512467515599e-06, |
|
"loss": 0.8469, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 62.43051068810147, |
|
"learning_rate": 4.782888663088781e-06, |
|
"loss": 0.8564, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 93.44666911645837, |
|
"learning_rate": 4.7812590411913755e-06, |
|
"loss": 0.8957, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 19.862765945899945, |
|
"learning_rate": 4.779623605976135e-06, |
|
"loss": 1.0236, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 46.26361057838037, |
|
"learning_rate": 4.777982361610629e-06, |
|
"loss": 0.9026, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 52.97493760905037, |
|
"learning_rate": 4.77633531227723e-06, |
|
"loss": 0.871, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 34.041391067268314, |
|
"learning_rate": 4.774682462173105e-06, |
|
"loss": 0.8587, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 83.06466502493622, |
|
"learning_rate": 4.773023815510199e-06, |
|
"loss": 0.9954, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 32.778966883694586, |
|
"learning_rate": 4.771359376515231e-06, |
|
"loss": 0.8576, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 58.90680833070677, |
|
"learning_rate": 4.769689149429683e-06, |
|
"loss": 0.8974, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 18.71277300441414, |
|
"learning_rate": 4.768013138509781e-06, |
|
"loss": 0.804, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 107.32301554273414, |
|
"learning_rate": 4.766331348026493e-06, |
|
"loss": 0.7948, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 32.37059166820261, |
|
"learning_rate": 4.764643782265517e-06, |
|
"loss": 0.7972, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 60.12326441678363, |
|
"learning_rate": 4.762950445527264e-06, |
|
"loss": 0.8201, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 53.85163139056427, |
|
"learning_rate": 4.7612513421268546e-06, |
|
"loss": 0.8695, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 62.74196467038289, |
|
"learning_rate": 4.759546476394103e-06, |
|
"loss": 0.8713, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 39.83211475405053, |
|
"learning_rate": 4.757835852673506e-06, |
|
"loss": 0.7924, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 44.36619825791928, |
|
"learning_rate": 4.756119475324237e-06, |
|
"loss": 0.9933, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 37.10810986202249, |
|
"learning_rate": 4.754397348720129e-06, |
|
"loss": 0.8461, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 168.41339372943395, |
|
"learning_rate": 4.752669477249666e-06, |
|
"loss": 1.0045, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 38.375750184514935, |
|
"learning_rate": 4.750935865315972e-06, |
|
"loss": 0.8597, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 139.29153601089564, |
|
"learning_rate": 4.749196517336798e-06, |
|
"loss": 0.8526, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 84.96134212632315, |
|
"learning_rate": 4.7474514377445155e-06, |
|
"loss": 0.8705, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 79.39624824781504, |
|
"learning_rate": 4.745700630986097e-06, |
|
"loss": 0.8023, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 20.962475475808905, |
|
"learning_rate": 4.743944101523116e-06, |
|
"loss": 0.9936, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 18.755287063878363, |
|
"learning_rate": 4.742181853831721e-06, |
|
"loss": 0.822, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 14.452455674915326, |
|
"learning_rate": 4.740413892402639e-06, |
|
"loss": 0.8518, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 123.72945221996332, |
|
"learning_rate": 4.7386402217411555e-06, |
|
"loss": 0.846, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 114.85788665830623, |
|
"learning_rate": 4.7368608463671015e-06, |
|
"loss": 0.8264, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 47.28480009336326, |
|
"learning_rate": 4.73507577081485e-06, |
|
"loss": 0.8695, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 41.276139747783475, |
|
"learning_rate": 4.733284999633297e-06, |
|
"loss": 0.8486, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 17.925091199690936, |
|
"learning_rate": 4.731488537385853e-06, |
|
"loss": 0.8492, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 61.187483963098465, |
|
"learning_rate": 4.7296863886504315e-06, |
|
"loss": 0.8728, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 13.651808271590621, |
|
"learning_rate": 4.7278785580194365e-06, |
|
"loss": 0.8495, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 27.644885260501276, |
|
"learning_rate": 4.726065050099752e-06, |
|
"loss": 0.848, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 53.55797464707186, |
|
"learning_rate": 4.7242458695127275e-06, |
|
"loss": 0.894, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 48.26707421432808, |
|
"learning_rate": 4.72242102089417e-06, |
|
"loss": 0.8572, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 35.94106221579397, |
|
"learning_rate": 4.720590508894329e-06, |
|
"loss": 0.8329, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 118.69134781259584, |
|
"learning_rate": 4.718754338177887e-06, |
|
"loss": 0.8335, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 10.673509155086721, |
|
"learning_rate": 4.7169125134239456e-06, |
|
"loss": 0.8335, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 20.703878872323784, |
|
"learning_rate": 4.715065039326015e-06, |
|
"loss": 0.8741, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 42.51236987142145, |
|
"learning_rate": 4.713211920592003e-06, |
|
"loss": 0.8686, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 31.669223786445617, |
|
"learning_rate": 4.711353161944199e-06, |
|
"loss": 0.844, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 55.15593720360342, |
|
"learning_rate": 4.709488768119266e-06, |
|
"loss": 0.8519, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 95.39927875998099, |
|
"learning_rate": 4.707618743868226e-06, |
|
"loss": 0.8131, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 10.654454638290147, |
|
"learning_rate": 4.705743093956452e-06, |
|
"loss": 0.9639, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 38.23375131308745, |
|
"learning_rate": 4.703861823163649e-06, |
|
"loss": 0.8365, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 33.691960306044486, |
|
"learning_rate": 4.701974936283848e-06, |
|
"loss": 0.9664, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 10.611314812796477, |
|
"learning_rate": 4.700082438125391e-06, |
|
"loss": 0.8283, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 20.55619262108289, |
|
"learning_rate": 4.6981843335109176e-06, |
|
"loss": 0.8272, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 34.03935919948644, |
|
"learning_rate": 4.696280627277356e-06, |
|
"loss": 0.8481, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 20.169566617502596, |
|
"learning_rate": 4.69437132427591e-06, |
|
"loss": 0.8419, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 40.583282897440746, |
|
"learning_rate": 4.692456429372044e-06, |
|
"loss": 0.8535, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 8.056427978564315, |
|
"learning_rate": 4.690535947445471e-06, |
|
"loss": 0.851, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 24.00351867462924, |
|
"learning_rate": 4.688609883390144e-06, |
|
"loss": 0.8446, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 16.11523409967761, |
|
"learning_rate": 4.686678242114239e-06, |
|
"loss": 0.7832, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 8.214685204880942, |
|
"learning_rate": 4.6847410285401465e-06, |
|
"loss": 0.839, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 52.51672691072579, |
|
"learning_rate": 4.682798247604453e-06, |
|
"loss": 0.8327, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 8.016142819670371, |
|
"learning_rate": 4.680849904257938e-06, |
|
"loss": 0.8135, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 46.655885402782395, |
|
"learning_rate": 4.67889600346555e-06, |
|
"loss": 0.7983, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 16.138965226785817, |
|
"learning_rate": 4.676936550206402e-06, |
|
"loss": 0.8542, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 55.643105621600114, |
|
"learning_rate": 4.674971549473757e-06, |
|
"loss": 0.9038, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 16.31989957270255, |
|
"learning_rate": 4.673001006275013e-06, |
|
"loss": 0.8399, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 14.138316979026195, |
|
"learning_rate": 4.671024925631694e-06, |
|
"loss": 0.7812, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 50.2685770405374, |
|
"learning_rate": 4.669043312579433e-06, |
|
"loss": 0.8358, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 9.047670202154327, |
|
"learning_rate": 4.667056172167962e-06, |
|
"loss": 0.8361, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 15.329346838170585, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 0.8112, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 34.581957147308756, |
|
"learning_rate": 4.6630653295367286e-06, |
|
"loss": 0.8819, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 70.00837894867692, |
|
"learning_rate": 4.6610616374868066e-06, |
|
"loss": 0.8316, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 19.54707036059632, |
|
"learning_rate": 4.659052438417326e-06, |
|
"loss": 0.8171, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 122.51019371942445, |
|
"learning_rate": 4.6570377374483155e-06, |
|
"loss": 0.8908, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 24.88314319301423, |
|
"learning_rate": 4.655017539713826e-06, |
|
"loss": 0.8623, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 17.959493645537147, |
|
"learning_rate": 4.652991850361912e-06, |
|
"loss": 0.8354, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 13.1801020702163, |
|
"learning_rate": 4.650960674554627e-06, |
|
"loss": 0.8247, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 14.455355688503463, |
|
"learning_rate": 4.648924017468003e-06, |
|
"loss": 0.8286, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 8.731820345524651, |
|
"learning_rate": 4.64688188429204e-06, |
|
"loss": 0.8327, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 42.87517105426398, |
|
"learning_rate": 4.644834280230693e-06, |
|
"loss": 0.8393, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 50.868804589197836, |
|
"learning_rate": 4.642781210501858e-06, |
|
"loss": 0.8598, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.919995360552276, |
|
"learning_rate": 4.640722680337358e-06, |
|
"loss": 0.962, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 26.64669124982461, |
|
"learning_rate": 4.638658694982936e-06, |
|
"loss": 0.798, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 26.1192454778817, |
|
"learning_rate": 4.6365892596982295e-06, |
|
"loss": 0.8762, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 18.9062598091514, |
|
"learning_rate": 4.634514379756769e-06, |
|
"loss": 0.7937, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 11.482713649160976, |
|
"learning_rate": 4.632434060445956e-06, |
|
"loss": 0.8616, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 8.370599147559595, |
|
"learning_rate": 4.6303483070670574e-06, |
|
"loss": 0.7956, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 12.925585913640592, |
|
"learning_rate": 4.628257124935183e-06, |
|
"loss": 0.8246, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 13.662180432298918, |
|
"learning_rate": 4.626160519379279e-06, |
|
"loss": 0.8376, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 30.308232889166852, |
|
"learning_rate": 4.624058495742115e-06, |
|
"loss": 0.8591, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 64.26811094645683, |
|
"learning_rate": 4.621951059380259e-06, |
|
"loss": 0.8079, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 34.37337832128252, |
|
"learning_rate": 4.619838215664082e-06, |
|
"loss": 0.7918, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 15.079822107556895, |
|
"learning_rate": 4.617719969977729e-06, |
|
"loss": 0.8811, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 11.388476367649812, |
|
"learning_rate": 4.615596327719111e-06, |
|
"loss": 0.8445, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 8.881147580394376, |
|
"learning_rate": 4.613467294299893e-06, |
|
"loss": 0.8489, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.3102827631277885, |
|
"learning_rate": 4.611332875145476e-06, |
|
"loss": 0.8141, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 19.341931669078043, |
|
"learning_rate": 4.609193075694989e-06, |
|
"loss": 0.8301, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 16.293526833129757, |
|
"learning_rate": 4.607047901401267e-06, |
|
"loss": 0.8472, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 12.701861210256677, |
|
"learning_rate": 4.604897357730846e-06, |
|
"loss": 0.8258, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 12.022062250305899, |
|
"learning_rate": 4.60274145016394e-06, |
|
"loss": 0.7865, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 8.942347186149718, |
|
"learning_rate": 4.600580184194436e-06, |
|
"loss": 0.8538, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.084710601938015, |
|
"learning_rate": 4.598413565329876e-06, |
|
"loss": 0.8213, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 8.773416021429492, |
|
"learning_rate": 4.596241599091438e-06, |
|
"loss": 0.8281, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 11.412681654994278, |
|
"learning_rate": 4.59406429101393e-06, |
|
"loss": 0.8073, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 33.38456658772077, |
|
"learning_rate": 4.591881646645775e-06, |
|
"loss": 0.8078, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 20.116008377655895, |
|
"learning_rate": 4.589693671548989e-06, |
|
"loss": 0.8791, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 25.441358730169238, |
|
"learning_rate": 4.587500371299176e-06, |
|
"loss": 0.8096, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 10.632165430252845, |
|
"learning_rate": 4.585301751485508e-06, |
|
"loss": 0.8124, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 24.69121638703892, |
|
"learning_rate": 4.583097817710716e-06, |
|
"loss": 0.8292, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 42.373835243000144, |
|
"learning_rate": 4.580888575591068e-06, |
|
"loss": 0.8124, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 15.688601624623248, |
|
"learning_rate": 4.578674030756364e-06, |
|
"loss": 0.834, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 12.872910424438674, |
|
"learning_rate": 4.5764541888499116e-06, |
|
"loss": 0.8293, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 11.085141619803203, |
|
"learning_rate": 4.574229055528522e-06, |
|
"loss": 0.8419, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 20.128058817237306, |
|
"learning_rate": 4.571998636462487e-06, |
|
"loss": 0.8072, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 45.8693129268715, |
|
"learning_rate": 4.5697629373355694e-06, |
|
"loss": 0.8516, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 12.540832336612032, |
|
"learning_rate": 4.567521963844987e-06, |
|
"loss": 0.7809, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 7.272618009907343, |
|
"learning_rate": 4.5652757217014e-06, |
|
"loss": 0.8836, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 25.542789288767313, |
|
"learning_rate": 4.56302421662889e-06, |
|
"loss": 0.8266, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 21.60322316347993, |
|
"learning_rate": 4.560767454364955e-06, |
|
"loss": 0.8591, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 23.667006186382807, |
|
"learning_rate": 4.5585054406604865e-06, |
|
"loss": 0.8391, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 9.485503231699525, |
|
"learning_rate": 4.556238181279761e-06, |
|
"loss": 0.8561, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 24.57297216692732, |
|
"learning_rate": 4.55396568200042e-06, |
|
"loss": 0.8277, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 13.729895392458525, |
|
"learning_rate": 4.551687948613459e-06, |
|
"loss": 0.8032, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 18.865208120754964, |
|
"learning_rate": 4.549404986923213e-06, |
|
"loss": 0.863, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 22.190589070773914, |
|
"learning_rate": 4.547116802747335e-06, |
|
"loss": 0.8989, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 21.450617982266408, |
|
"learning_rate": 4.544823401916794e-06, |
|
"loss": 0.792, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 35.65422229133744, |
|
"learning_rate": 4.542524790275848e-06, |
|
"loss": 0.845, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 11.364569307665759, |
|
"learning_rate": 4.5402209736820325e-06, |
|
"loss": 0.829, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 21.94205192387575, |
|
"learning_rate": 4.537911958006149e-06, |
|
"loss": 0.8234, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 10.63181070102972, |
|
"learning_rate": 4.5355977491322485e-06, |
|
"loss": 0.9657, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 98.56328297192321, |
|
"learning_rate": 4.5332783529576145e-06, |
|
"loss": 0.851, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 15.486561420545545, |
|
"learning_rate": 4.530953775392749e-06, |
|
"loss": 1.0024, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.36867796146416, |
|
"learning_rate": 4.528624022361359e-06, |
|
"loss": 0.8203, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 23.16373737065163, |
|
"learning_rate": 4.5262890998003374e-06, |
|
"loss": 0.8405, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 24.557753611805698, |
|
"learning_rate": 4.523949013659754e-06, |
|
"loss": 0.8594, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 239.577821661523, |
|
"learning_rate": 4.521603769902835e-06, |
|
"loss": 0.8801, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 17.722681590802623, |
|
"learning_rate": 4.5192533745059494e-06, |
|
"loss": 0.7956, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 21.064904857136227, |
|
"learning_rate": 4.5168978334585955e-06, |
|
"loss": 0.849, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 29.204434577367632, |
|
"learning_rate": 4.514537152763384e-06, |
|
"loss": 0.8081, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 31.677494628738625, |
|
"learning_rate": 4.512171338436022e-06, |
|
"loss": 0.8123, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 28.259106443263146, |
|
"learning_rate": 4.509800396505298e-06, |
|
"loss": 0.7939, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 31.436330986692848, |
|
"learning_rate": 4.507424333013069e-06, |
|
"loss": 0.9276, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 62.15027720090623, |
|
"learning_rate": 4.505043154014243e-06, |
|
"loss": 0.8111, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 30.339486412023945, |
|
"learning_rate": 4.502656865576762e-06, |
|
"loss": 0.8102, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 29.14289188449159, |
|
"learning_rate": 4.500265473781591e-06, |
|
"loss": 0.8526, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 16.33958767689576, |
|
"learning_rate": 4.497868984722698e-06, |
|
"loss": 0.7931, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.946767762788769, |
|
"learning_rate": 4.495467404507039e-06, |
|
"loss": 0.7723, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 16.808686330416283, |
|
"learning_rate": 4.493060739254548e-06, |
|
"loss": 0.7941, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 11.193622425241314, |
|
"learning_rate": 4.4906489950981125e-06, |
|
"loss": 0.8121, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.172258662371558, |
|
"learning_rate": 4.4882321781835666e-06, |
|
"loss": 0.8399, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.172757454398357, |
|
"learning_rate": 4.485810294669668e-06, |
|
"loss": 0.7951, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 12.912002056626209, |
|
"learning_rate": 4.4833833507280884e-06, |
|
"loss": 0.8724, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 9.5299105957465, |
|
"learning_rate": 4.4809513525433925e-06, |
|
"loss": 0.8688, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.942401217715412, |
|
"learning_rate": 4.478514306313026e-06, |
|
"loss": 0.8349, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 14.70118213793697, |
|
"learning_rate": 4.476072218247297e-06, |
|
"loss": 0.8126, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 35.64062421380103, |
|
"learning_rate": 4.473625094569366e-06, |
|
"loss": 0.8502, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 9.515237433838145, |
|
"learning_rate": 4.471172941515219e-06, |
|
"loss": 0.8318, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.52525000568301, |
|
"learning_rate": 4.4687157653336645e-06, |
|
"loss": 0.836, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 14.93156121415628, |
|
"learning_rate": 4.466253572286308e-06, |
|
"loss": 0.8386, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.403603957950335, |
|
"learning_rate": 4.463786368647541e-06, |
|
"loss": 0.8026, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.101795351603165, |
|
"learning_rate": 4.461314160704521e-06, |
|
"loss": 0.8424, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.657146697628601, |
|
"learning_rate": 4.458836954757161e-06, |
|
"loss": 0.8853, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.8266562887019315, |
|
"learning_rate": 4.456354757118109e-06, |
|
"loss": 0.8311, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.734850079452724, |
|
"learning_rate": 4.453867574112733e-06, |
|
"loss": 0.8143, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 8.241211166354187, |
|
"learning_rate": 4.4513754120791065e-06, |
|
"loss": 0.8309, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.984336730679723, |
|
"learning_rate": 4.448878277367988e-06, |
|
"loss": 0.834, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.435146207156021, |
|
"learning_rate": 4.446376176342812e-06, |
|
"loss": 0.8344, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.354979176494093, |
|
"learning_rate": 4.443869115379667e-06, |
|
"loss": 0.8822, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.740333651131116, |
|
"learning_rate": 4.441357100867278e-06, |
|
"loss": 0.7742, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.7966105580368605, |
|
"learning_rate": 4.438840139206998e-06, |
|
"loss": 0.8741, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.13633031473771, |
|
"learning_rate": 4.436318236812782e-06, |
|
"loss": 0.8654, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.791216706993845, |
|
"learning_rate": 4.433791400111179e-06, |
|
"loss": 0.8262, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.234562179308168, |
|
"learning_rate": 4.431259635541312e-06, |
|
"loss": 0.8392, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.150150040963309, |
|
"learning_rate": 4.428722949554858e-06, |
|
"loss": 0.8191, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.563262237056235, |
|
"learning_rate": 4.426181348616038e-06, |
|
"loss": 0.7961, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.409500438314574, |
|
"learning_rate": 4.423634839201601e-06, |
|
"loss": 0.8554, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.103854158767785, |
|
"learning_rate": 4.421083427800795e-06, |
|
"loss": 0.8073, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.956272764605345, |
|
"learning_rate": 4.41852712091537e-06, |
|
"loss": 0.822, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 47.36121108696217, |
|
"learning_rate": 4.415965925059544e-06, |
|
"loss": 0.8117, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 7.6663655272761515, |
|
"learning_rate": 4.413399846759998e-06, |
|
"loss": 0.8469, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 7.344767060016519, |
|
"learning_rate": 4.4108288925558505e-06, |
|
"loss": 0.8716, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.520414583397527, |
|
"learning_rate": 4.40825306899865e-06, |
|
"loss": 0.8154, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.337724239493111, |
|
"learning_rate": 4.405672382652349e-06, |
|
"loss": 0.8204, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.931095236367972, |
|
"learning_rate": 4.403086840093297e-06, |
|
"loss": 0.7721, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.283750585511646, |
|
"learning_rate": 4.400496447910212e-06, |
|
"loss": 0.8099, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.885379997216326, |
|
"learning_rate": 4.397901212704176e-06, |
|
"loss": 0.7897, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.083991051490237, |
|
"learning_rate": 4.3953011410886105e-06, |
|
"loss": 0.8497, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.86609034632746, |
|
"learning_rate": 4.392696239689261e-06, |
|
"loss": 0.8271, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 7.4379794775118615, |
|
"learning_rate": 4.390086515144179e-06, |
|
"loss": 0.8416, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 10.915187189394999, |
|
"learning_rate": 4.387471974103713e-06, |
|
"loss": 0.7873, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.839190871758675, |
|
"learning_rate": 4.384852623230478e-06, |
|
"loss": 0.7898, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.670517697152507, |
|
"learning_rate": 4.38222846919935e-06, |
|
"loss": 0.8431, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.4010524847241115, |
|
"learning_rate": 4.379599518697444e-06, |
|
"loss": 0.8489, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.505346933106831, |
|
"learning_rate": 4.3769657784240975e-06, |
|
"loss": 0.8056, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.519181163960888, |
|
"learning_rate": 4.3743272550908545e-06, |
|
"loss": 0.82, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.2720001800167475, |
|
"learning_rate": 4.3716839554214475e-06, |
|
"loss": 0.8261, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.48180004445423, |
|
"learning_rate": 4.369035886151778e-06, |
|
"loss": 0.7666, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 10.355727305381658, |
|
"learning_rate": 4.366383054029907e-06, |
|
"loss": 0.8203, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.185243645713917, |
|
"learning_rate": 4.363725465816028e-06, |
|
"loss": 0.8336, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.537447056649908, |
|
"learning_rate": 4.3610631282824556e-06, |
|
"loss": 0.8141, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 10.059335937073103, |
|
"learning_rate": 4.358396048213609e-06, |
|
"loss": 0.9323, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 7.558752810891884, |
|
"learning_rate": 4.355724232405989e-06, |
|
"loss": 0.8052, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 8.901083371299169, |
|
"learning_rate": 4.35304768766817e-06, |
|
"loss": 0.8681, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.749432223564655, |
|
"learning_rate": 4.350366420820771e-06, |
|
"loss": 0.7636, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.467983174988701, |
|
"learning_rate": 4.3476804386964486e-06, |
|
"loss": 0.798, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 14.340080127005345, |
|
"learning_rate": 4.3449897481398735e-06, |
|
"loss": 0.8325, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 10.900246873425738, |
|
"learning_rate": 4.342294356007715e-06, |
|
"loss": 0.8191, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 27.212678856649585, |
|
"learning_rate": 4.339594269168624e-06, |
|
"loss": 0.7983, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 12.291890124401506, |
|
"learning_rate": 4.336889494503215e-06, |
|
"loss": 0.8223, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 64.09126736889749, |
|
"learning_rate": 4.3341800389040465e-06, |
|
"loss": 0.7935, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.384829430193068, |
|
"learning_rate": 4.331465909275608e-06, |
|
"loss": 0.8063, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.980479175502184, |
|
"learning_rate": 4.3287471125342994e-06, |
|
"loss": 0.8264, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 14.891763292361196, |
|
"learning_rate": 4.326023655608412e-06, |
|
"loss": 0.8724, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 47.12998834101147, |
|
"learning_rate": 4.3232955454381126e-06, |
|
"loss": 0.9869, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.471610169915462, |
|
"learning_rate": 4.320562788975429e-06, |
|
"loss": 0.8504, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.687055238382911, |
|
"learning_rate": 4.317825393184226e-06, |
|
"loss": 0.8192, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 6.0149060191653625, |
|
"learning_rate": 4.315083365040193e-06, |
|
"loss": 0.8474, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.497573287322376, |
|
"learning_rate": 4.31233671153082e-06, |
|
"loss": 0.7821, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.877574667166528, |
|
"learning_rate": 4.30958543965539e-06, |
|
"loss": 0.7886, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 8.602706507929737, |
|
"learning_rate": 4.306829556424949e-06, |
|
"loss": 0.922, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.056672401512086, |
|
"learning_rate": 4.3040690688622965e-06, |
|
"loss": 0.824, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 19.382968104583764, |
|
"learning_rate": 4.3013039840019675e-06, |
|
"loss": 0.8193, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.880915789014604, |
|
"learning_rate": 4.2985343088902096e-06, |
|
"loss": 0.8291, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 12.940372419412574, |
|
"learning_rate": 4.295760050584966e-06, |
|
"loss": 0.8503, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 5.731004069006167, |
|
"learning_rate": 4.292981216155864e-06, |
|
"loss": 0.7975, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 14.301842614329003, |
|
"learning_rate": 4.2901978126841885e-06, |
|
"loss": 0.8179, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.304262507046851, |
|
"learning_rate": 4.287409847262868e-06, |
|
"loss": 0.8559, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.8057979345321655, |
|
"eval_runtime": 132.6219, |
|
"eval_samples_per_second": 9.855, |
|
"eval_steps_per_second": 0.279, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 12.900039338569156, |
|
"learning_rate": 4.284617326996458e-06, |
|
"loss": 0.8245, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 27.720291530372055, |
|
"learning_rate": 4.281820259001121e-06, |
|
"loss": 0.8123, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 9.875124909540267, |
|
"learning_rate": 4.2790186504046045e-06, |
|
"loss": 0.8077, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.959102865654325, |
|
"learning_rate": 4.276212508346232e-06, |
|
"loss": 0.8102, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 8.445468566132282, |
|
"learning_rate": 4.273401839976877e-06, |
|
"loss": 0.9495, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 7.168758336315809, |
|
"learning_rate": 4.2705866524589475e-06, |
|
"loss": 0.794, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 23.960316842270476, |
|
"learning_rate": 4.267766952966369e-06, |
|
"loss": 0.8234, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 18.37836287213558, |
|
"learning_rate": 4.264942748684563e-06, |
|
"loss": 0.8037, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.337874181371052, |
|
"learning_rate": 4.262114046810429e-06, |
|
"loss": 0.8341, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 23.756712822011433, |
|
"learning_rate": 4.2592808545523335e-06, |
|
"loss": 0.7835, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.5799458933423365, |
|
"learning_rate": 4.256443179130081e-06, |
|
"loss": 0.823, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.340830389502352, |
|
"learning_rate": 4.2536010277748995e-06, |
|
"loss": 0.7958, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.274138909771709, |
|
"learning_rate": 4.250754407729428e-06, |
|
"loss": 0.806, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.0306349490940985, |
|
"learning_rate": 4.2479033262476885e-06, |
|
"loss": 0.8728, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.644338434000799, |
|
"learning_rate": 4.2450477905950745e-06, |
|
"loss": 0.8304, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.447934903535319, |
|
"learning_rate": 4.242187808048329e-06, |
|
"loss": 0.8294, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.468749772112203, |
|
"learning_rate": 4.239323385895527e-06, |
|
"loss": 0.8134, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.595150327241665, |
|
"learning_rate": 4.236454531436058e-06, |
|
"loss": 0.9786, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.7601508175241807, |
|
"learning_rate": 4.233581251980604e-06, |
|
"loss": 0.8269, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.86366757480535, |
|
"learning_rate": 4.230703554851127e-06, |
|
"loss": 0.8258, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.352222075706524, |
|
"learning_rate": 4.227821447380842e-06, |
|
"loss": 0.9652, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 8.73857112989018, |
|
"learning_rate": 4.224934936914206e-06, |
|
"loss": 0.8383, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.044749221503628, |
|
"learning_rate": 4.222044030806894e-06, |
|
"loss": 0.8207, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.613463894710261, |
|
"learning_rate": 4.219148736425785e-06, |
|
"loss": 0.8116, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.8777142171863686, |
|
"learning_rate": 4.216249061148939e-06, |
|
"loss": 0.7927, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.297186700651947, |
|
"learning_rate": 4.2133450123655805e-06, |
|
"loss": 0.8784, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.9580457958209765, |
|
"learning_rate": 4.210436597476077e-06, |
|
"loss": 0.8232, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.156257841735334, |
|
"learning_rate": 4.207523823891924e-06, |
|
"loss": 0.8446, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.364319132512279, |
|
"learning_rate": 4.204606699035724e-06, |
|
"loss": 0.8001, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.626437632049346, |
|
"learning_rate": 4.201685230341168e-06, |
|
"loss": 0.8488, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.6979503756764536, |
|
"learning_rate": 4.198759425253015e-06, |
|
"loss": 0.8231, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.9375406125469117, |
|
"learning_rate": 4.195829291227076e-06, |
|
"loss": 0.8262, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.757592718600369, |
|
"learning_rate": 4.192894835730193e-06, |
|
"loss": 0.8325, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.166098354237156, |
|
"learning_rate": 4.1899560662402204e-06, |
|
"loss": 0.8207, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.5626403449402146, |
|
"learning_rate": 4.187012990246005e-06, |
|
"loss": 0.848, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.405523794955747, |
|
"learning_rate": 4.18406561524737e-06, |
|
"loss": 0.8153, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.461492151159491, |
|
"learning_rate": 4.18111394875509e-06, |
|
"loss": 0.7961, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.9579080031218945, |
|
"learning_rate": 4.178157998290879e-06, |
|
"loss": 0.7961, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.4157753139788114, |
|
"learning_rate": 4.175197771387368e-06, |
|
"loss": 0.77, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.6556329052007372, |
|
"learning_rate": 4.172233275588082e-06, |
|
"loss": 0.757, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.56739734349662, |
|
"learning_rate": 4.169264518447428e-06, |
|
"loss": 0.816, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.929236437983629, |
|
"learning_rate": 4.16629150753067e-06, |
|
"loss": 0.7774, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.162206048648253, |
|
"learning_rate": 4.163314250413913e-06, |
|
"loss": 0.8242, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.5348802562769683, |
|
"learning_rate": 4.160332754684085e-06, |
|
"loss": 0.8292, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.026476793505602, |
|
"learning_rate": 4.157347027938907e-06, |
|
"loss": 0.8438, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.6419095683671205, |
|
"learning_rate": 4.154357077786892e-06, |
|
"loss": 0.8358, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.410825648674678, |
|
"learning_rate": 4.1513629118473095e-06, |
|
"loss": 0.7966, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.7192835271993423, |
|
"learning_rate": 4.1483645377501726e-06, |
|
"loss": 0.7922, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.2093727504165575, |
|
"learning_rate": 4.145361963136219e-06, |
|
"loss": 0.8179, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.81543786003894, |
|
"learning_rate": 4.1423551956568916e-06, |
|
"loss": 0.8072, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.0092000330441335, |
|
"learning_rate": 4.139344242974317e-06, |
|
"loss": 0.811, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.927721495131119, |
|
"learning_rate": 4.136329112761285e-06, |
|
"loss": 0.8587, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.6480183964959076, |
|
"learning_rate": 4.133309812701233e-06, |
|
"loss": 0.7872, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.603038562895384, |
|
"learning_rate": 4.130286350488224e-06, |
|
"loss": 0.8211, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.502055880585956, |
|
"learning_rate": 4.127258733826929e-06, |
|
"loss": 0.7947, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.0596180713400845, |
|
"learning_rate": 4.124226970432602e-06, |
|
"loss": 0.8371, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 5.32641556527492, |
|
"learning_rate": 4.121191068031067e-06, |
|
"loss": 0.9996, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.657535832118382, |
|
"learning_rate": 4.118151034358696e-06, |
|
"loss": 0.7846, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.542970770312805, |
|
"learning_rate": 4.1151068771623864e-06, |
|
"loss": 0.9695, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.4165335352607418, |
|
"learning_rate": 4.112058604199544e-06, |
|
"loss": 0.7936, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.38297960645218, |
|
"learning_rate": 4.109006223238064e-06, |
|
"loss": 0.7862, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.597014556198219, |
|
"learning_rate": 4.105949742056309e-06, |
|
"loss": 0.9645, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.431709372470043, |
|
"learning_rate": 4.102889168443091e-06, |
|
"loss": 0.8077, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.787025575201097, |
|
"learning_rate": 4.0998245101976495e-06, |
|
"loss": 0.811, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.1176444935534535, |
|
"learning_rate": 4.096755775129634e-06, |
|
"loss": 0.824, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.874994826180069, |
|
"learning_rate": 4.093682971059081e-06, |
|
"loss": 0.8207, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 5.615505880427777, |
|
"learning_rate": 4.0906061058164e-06, |
|
"loss": 0.9337, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.159261391680892, |
|
"learning_rate": 4.087525187242345e-06, |
|
"loss": 0.7938, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.982328427595764, |
|
"learning_rate": 4.084440223188002e-06, |
|
"loss": 0.8353, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.501905560670443, |
|
"learning_rate": 4.081351221514765e-06, |
|
"loss": 0.8038, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.4798436725132795, |
|
"learning_rate": 4.078258190094318e-06, |
|
"loss": 0.8106, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.630604033125049, |
|
"learning_rate": 4.0751611368086115e-06, |
|
"loss": 0.8368, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.247722886052707, |
|
"learning_rate": 4.072060069549848e-06, |
|
"loss": 0.7804, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.976719242252589, |
|
"learning_rate": 4.068954996220457e-06, |
|
"loss": 0.8517, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.7378777926374647, |
|
"learning_rate": 4.065845924733077e-06, |
|
"loss": 0.7879, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.377152336223136, |
|
"learning_rate": 4.062732863010534e-06, |
|
"loss": 0.8151, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.958932705104774, |
|
"learning_rate": 4.059615818985826e-06, |
|
"loss": 0.8261, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.056870191904715, |
|
"learning_rate": 4.056494800602093e-06, |
|
"loss": 0.8036, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.276330614300562, |
|
"learning_rate": 4.053369815812608e-06, |
|
"loss": 0.7653, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.7171393121481557, |
|
"learning_rate": 4.050240872580749e-06, |
|
"loss": 0.811, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.8515114977006384, |
|
"learning_rate": 4.047107978879985e-06, |
|
"loss": 0.837, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.990125985702027, |
|
"learning_rate": 4.043971142693845e-06, |
|
"loss": 0.7478, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.038946309180022, |
|
"learning_rate": 4.040830372015909e-06, |
|
"loss": 0.8186, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.347765467237379, |
|
"learning_rate": 4.0376856748497865e-06, |
|
"loss": 0.8224, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.8056078948982957, |
|
"learning_rate": 4.034537059209085e-06, |
|
"loss": 0.7922, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.699749057539843, |
|
"learning_rate": 4.031384533117404e-06, |
|
"loss": 0.8004, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.985653883322575, |
|
"learning_rate": 4.0282281046083045e-06, |
|
"loss": 0.8352, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.6031461215401115, |
|
"learning_rate": 4.025067781725294e-06, |
|
"loss": 0.8557, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.374834805628173, |
|
"learning_rate": 4.021903572521802e-06, |
|
"loss": 0.8473, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.706489580366399, |
|
"learning_rate": 4.0187354850611634e-06, |
|
"loss": 0.8707, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.6196942519877844, |
|
"learning_rate": 4.015563527416596e-06, |
|
"loss": 0.7828, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.021419041448392, |
|
"learning_rate": 4.012387707671177e-06, |
|
"loss": 0.8212, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 7.5051269502991484, |
|
"learning_rate": 4.00920803391783e-06, |
|
"loss": 0.8125, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.816524818864674, |
|
"learning_rate": 4.006024514259295e-06, |
|
"loss": 0.8458, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.999047814247483, |
|
"learning_rate": 4.002837156808116e-06, |
|
"loss": 0.9794, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.053935034209712, |
|
"learning_rate": 3.999645969686616e-06, |
|
"loss": 0.8172, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.4988389719653408, |
|
"learning_rate": 3.996450961026876e-06, |
|
"loss": 0.8358, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.7828610862275447, |
|
"learning_rate": 3.993252138970716e-06, |
|
"loss": 0.8472, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.30179599391661, |
|
"learning_rate": 3.990049511669675e-06, |
|
"loss": 0.9382, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.7610184281848373, |
|
"learning_rate": 3.986843087284986e-06, |
|
"loss": 0.8122, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.7656350305584976, |
|
"learning_rate": 3.983632873987562e-06, |
|
"loss": 0.7727, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.760295447041278, |
|
"learning_rate": 3.980418879957967e-06, |
|
"loss": 0.7896, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.1049226948000386, |
|
"learning_rate": 3.977201113386402e-06, |
|
"loss": 0.7927, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 11.952856680061187, |
|
"learning_rate": 3.973979582472681e-06, |
|
"loss": 0.82, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.6775385257137136, |
|
"learning_rate": 3.970754295426211e-06, |
|
"loss": 0.7982, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.126563331457935, |
|
"learning_rate": 3.96752526046597e-06, |
|
"loss": 0.7973, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.9969733729191637, |
|
"learning_rate": 3.964292485820487e-06, |
|
"loss": 0.7848, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.843494951843216, |
|
"learning_rate": 3.961055979727822e-06, |
|
"loss": 0.7809, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.722737193377863, |
|
"learning_rate": 3.957815750435542e-06, |
|
"loss": 0.8272, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.505878864813977, |
|
"learning_rate": 3.954571806200702e-06, |
|
"loss": 0.7547, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.816576897584943, |
|
"learning_rate": 3.951324155289825e-06, |
|
"loss": 0.8178, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.6494681155339252, |
|
"learning_rate": 3.948072805978879e-06, |
|
"loss": 0.8163, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.31954721595174, |
|
"learning_rate": 3.9448177665532575e-06, |
|
"loss": 0.8036, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.99601790046458, |
|
"learning_rate": 3.941559045307756e-06, |
|
"loss": 0.8052, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.256204345344998, |
|
"learning_rate": 3.938296650546552e-06, |
|
"loss": 0.8062, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.6522897880772778, |
|
"learning_rate": 3.935030590583186e-06, |
|
"loss": 0.8461, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.5724478570414706, |
|
"learning_rate": 3.931760873740539e-06, |
|
"loss": 0.8173, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.369010546999307, |
|
"learning_rate": 3.928487508350808e-06, |
|
"loss": 0.8355, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.867196950400295, |
|
"learning_rate": 3.9252105027554885e-06, |
|
"loss": 0.7982, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.757894555280206, |
|
"learning_rate": 3.9219298653053544e-06, |
|
"loss": 0.744, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.483773308176931, |
|
"learning_rate": 3.918645604360434e-06, |
|
"loss": 0.8264, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.5936542631360995, |
|
"learning_rate": 3.915357728289985e-06, |
|
"loss": 0.773, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.7125514529042447, |
|
"learning_rate": 3.912066245472484e-06, |
|
"loss": 0.789, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.994678394733242, |
|
"learning_rate": 3.908771164295595e-06, |
|
"loss": 0.8054, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.440863065927586, |
|
"learning_rate": 3.9054724931561516e-06, |
|
"loss": 0.7667, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.6354877018750003, |
|
"learning_rate": 3.902170240460137e-06, |
|
"loss": 0.8434, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.6697310301169783, |
|
"learning_rate": 3.898864414622661e-06, |
|
"loss": 0.8277, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.4657019048153543, |
|
"learning_rate": 3.895555024067937e-06, |
|
"loss": 0.8058, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.569249201300816, |
|
"learning_rate": 3.8922420772292645e-06, |
|
"loss": 0.8646, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.6960654260718675, |
|
"learning_rate": 3.888925582549006e-06, |
|
"loss": 0.8439, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.206703534075406, |
|
"learning_rate": 3.8856055484785625e-06, |
|
"loss": 0.958, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.8024335105854963, |
|
"learning_rate": 3.8822819834783556e-06, |
|
"loss": 0.7933, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.311892134941326, |
|
"learning_rate": 3.878954896017804e-06, |
|
"loss": 0.7679, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.6892320743049383, |
|
"learning_rate": 3.875624294575306e-06, |
|
"loss": 0.8663, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.747838560381203, |
|
"learning_rate": 3.872290187638208e-06, |
|
"loss": 0.8028, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.130227898119578, |
|
"learning_rate": 3.868952583702798e-06, |
|
"loss": 0.7775, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.8622011098862004, |
|
"learning_rate": 3.865611491274267e-06, |
|
"loss": 0.8362, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.076018210248722, |
|
"learning_rate": 3.862266918866702e-06, |
|
"loss": 0.9772, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.361556277412654, |
|
"learning_rate": 3.858918875003053e-06, |
|
"loss": 0.7957, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.386167012643809, |
|
"learning_rate": 3.855567368215122e-06, |
|
"loss": 0.9516, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.9439193106997736, |
|
"learning_rate": 3.852212407043528e-06, |
|
"loss": 0.8231, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.465518470978496, |
|
"learning_rate": 3.848854000037702e-06, |
|
"loss": 0.8476, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.675424607415688, |
|
"learning_rate": 3.845492155755848e-06, |
|
"loss": 0.7827, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 6.031861648386004, |
|
"learning_rate": 3.8421268827649325e-06, |
|
"loss": 0.8044, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.884738538466192, |
|
"learning_rate": 3.83875818964066e-06, |
|
"loss": 0.9754, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 6.050577830000915, |
|
"learning_rate": 3.835386084967451e-06, |
|
"loss": 0.7774, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.992604354329953, |
|
"learning_rate": 3.832010577338414e-06, |
|
"loss": 0.8663, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.137703965401395, |
|
"learning_rate": 3.828631675355338e-06, |
|
"loss": 0.8247, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.9250818325319647, |
|
"learning_rate": 3.825249387628654e-06, |
|
"loss": 0.9427, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.876925468778531, |
|
"learning_rate": 3.821863722777427e-06, |
|
"loss": 0.7972, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.029506273549229, |
|
"learning_rate": 3.818474689429324e-06, |
|
"loss": 0.8085, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.095701024225992, |
|
"learning_rate": 3.815082296220596e-06, |
|
"loss": 0.8278, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.0688198508439255, |
|
"learning_rate": 3.8116865517960584e-06, |
|
"loss": 0.7869, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.679593702074773, |
|
"learning_rate": 3.808287464809063e-06, |
|
"loss": 0.8103, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.980055385102619, |
|
"learning_rate": 3.8048850439214844e-06, |
|
"loss": 0.9489, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.89441546745002, |
|
"learning_rate": 3.8014792978036874e-06, |
|
"loss": 0.8053, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.495335889379114, |
|
"learning_rate": 3.7980702351345146e-06, |
|
"loss": 0.796, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.69936931190018, |
|
"learning_rate": 3.7946578646012578e-06, |
|
"loss": 0.8388, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.711786581597773, |
|
"learning_rate": 3.7912421948996394e-06, |
|
"loss": 0.772, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.3115598706174225, |
|
"learning_rate": 3.787823234733788e-06, |
|
"loss": 0.8086, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.206524036573733, |
|
"learning_rate": 3.7844009928162195e-06, |
|
"loss": 0.8422, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.581806945598464, |
|
"learning_rate": 3.78097547786781e-06, |
|
"loss": 0.8299, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.649063963125477, |
|
"learning_rate": 3.7775466986177763e-06, |
|
"loss": 0.8233, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.9874069159693497, |
|
"learning_rate": 3.7741146638036567e-06, |
|
"loss": 0.836, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.101352434056481, |
|
"learning_rate": 3.770679382171283e-06, |
|
"loss": 0.8053, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.057518103549027, |
|
"learning_rate": 3.7672408624747598e-06, |
|
"loss": 0.8119, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.134292768219472, |
|
"learning_rate": 3.7637991134764475e-06, |
|
"loss": 0.8441, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.037293558864936, |
|
"learning_rate": 3.7603541439469314e-06, |
|
"loss": 0.8475, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.067939888641445, |
|
"learning_rate": 3.756905962665005e-06, |
|
"loss": 0.8176, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.741871018197297, |
|
"learning_rate": 3.7534545784176486e-06, |
|
"loss": 0.8252, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.7687469109452536, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.8222, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.0489323193480855, |
|
"learning_rate": 3.7465422362153416e-06, |
|
"loss": 0.8325, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.049191319739393, |
|
"learning_rate": 3.7430812958750695e-06, |
|
"loss": 0.8391, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.601920010559379, |
|
"learning_rate": 3.7396171877986766e-06, |
|
"loss": 0.7752, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.353923235687504, |
|
"learning_rate": 3.736149920813726e-06, |
|
"loss": 0.7507, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.022804558800863, |
|
"learning_rate": 3.7326795037558335e-06, |
|
"loss": 0.8216, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.056771582273963, |
|
"learning_rate": 3.72920594546864e-06, |
|
"loss": 0.7952, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.418690092445234, |
|
"learning_rate": 3.7257292548037917e-06, |
|
"loss": 0.7834, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.027419729266051, |
|
"learning_rate": 3.7222494406209174e-06, |
|
"loss": 0.8238, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.557745951925816, |
|
"learning_rate": 3.718766511787606e-06, |
|
"loss": 0.8367, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.16754573699968, |
|
"learning_rate": 3.715280477179382e-06, |
|
"loss": 0.8561, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.213296187055132, |
|
"learning_rate": 3.7117913456796855e-06, |
|
"loss": 0.8006, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.895541456046058, |
|
"learning_rate": 3.7082991261798473e-06, |
|
"loss": 0.8019, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 4.225646010065031, |
|
"learning_rate": 3.7048038275790695e-06, |
|
"loss": 0.7799, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.793719931690171, |
|
"learning_rate": 3.701305458784397e-06, |
|
"loss": 0.7877, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.6918538059697505, |
|
"learning_rate": 3.697804028710703e-06, |
|
"loss": 0.8472, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.7943218099807967, |
|
"learning_rate": 3.6942995462806574e-06, |
|
"loss": 0.84, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.603972411456462, |
|
"learning_rate": 3.6907920204247122e-06, |
|
"loss": 0.8205, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.755592793493984, |
|
"learning_rate": 3.6872814600810716e-06, |
|
"loss": 0.8354, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 4.658007950410123, |
|
"learning_rate": 3.6837678741956747e-06, |
|
"loss": 0.8767, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.4964527787354416, |
|
"learning_rate": 3.6802512717221694e-06, |
|
"loss": 0.7814, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.434255209389191, |
|
"learning_rate": 3.6767316616218927e-06, |
|
"loss": 0.8071, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.272321481007903, |
|
"learning_rate": 3.6732090528638432e-06, |
|
"loss": 0.7787, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.221528130430344, |
|
"learning_rate": 3.669683454424663e-06, |
|
"loss": 0.7729, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.9597068009542915, |
|
"learning_rate": 3.6661548752886113e-06, |
|
"loss": 0.7689, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.5568349195672253, |
|
"learning_rate": 3.6626233244475445e-06, |
|
"loss": 0.9255, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.719503943417636, |
|
"learning_rate": 3.6590888109008904e-06, |
|
"loss": 0.8281, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.6514574646812568, |
|
"learning_rate": 3.6555513436556285e-06, |
|
"loss": 0.8008, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.548664005213854, |
|
"learning_rate": 3.6520109317262624e-06, |
|
"loss": 0.7797, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.704773651343669, |
|
"learning_rate": 3.6484675841348024e-06, |
|
"loss": 0.813, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.6582715454571657, |
|
"learning_rate": 3.6449213099107376e-06, |
|
"loss": 0.8007, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.094141645704761, |
|
"learning_rate": 3.6413721180910165e-06, |
|
"loss": 0.837, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.618837723050664, |
|
"learning_rate": 3.6378200177200223e-06, |
|
"loss": 0.8316, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.38898352006268, |
|
"learning_rate": 3.6342650178495488e-06, |
|
"loss": 0.8103, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.7976368474709896, |
|
"learning_rate": 3.6307071275387807e-06, |
|
"loss": 0.8059, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.5189322963766205, |
|
"learning_rate": 3.6271463558542646e-06, |
|
"loss": 0.7959, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.3331229796313027, |
|
"learning_rate": 3.623582711869895e-06, |
|
"loss": 0.7882, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.530082944684263, |
|
"learning_rate": 3.6200162046668826e-06, |
|
"loss": 0.79, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.3051251617528568, |
|
"learning_rate": 3.616446843333733e-06, |
|
"loss": 0.7773, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.3812653747476427, |
|
"learning_rate": 3.6128746369662283e-06, |
|
"loss": 0.7819, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.5371020189256805, |
|
"learning_rate": 3.6092995946673996e-06, |
|
"loss": 0.8518, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.834778332537793, |
|
"learning_rate": 3.6057217255475034e-06, |
|
"loss": 0.8418, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.415666375807929, |
|
"learning_rate": 3.602141038724001e-06, |
|
"loss": 0.8146, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.641170161321054, |
|
"learning_rate": 3.5985575433215345e-06, |
|
"loss": 0.8347, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.7635739548814207, |
|
"learning_rate": 3.5949712484719013e-06, |
|
"loss": 0.8207, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.1180161320176225, |
|
"learning_rate": 3.591382163314034e-06, |
|
"loss": 0.9224, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.130810975608334, |
|
"learning_rate": 3.587790296993976e-06, |
|
"loss": 0.843, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.691833335291648, |
|
"learning_rate": 3.5841956586648553e-06, |
|
"loss": 0.9047, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.657933022135858, |
|
"learning_rate": 3.5805982574868673e-06, |
|
"loss": 1.1555, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.2260069416532207, |
|
"learning_rate": 3.5769981026272477e-06, |
|
"loss": 0.7869, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.4291174313472643, |
|
"learning_rate": 3.5733952032602454e-06, |
|
"loss": 0.7723, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.919942403046828, |
|
"learning_rate": 3.5697895685671076e-06, |
|
"loss": 0.8133, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.9420409702367167, |
|
"learning_rate": 3.5661812077360496e-06, |
|
"loss": 0.8135, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.062763811575889, |
|
"learning_rate": 3.562570129962234e-06, |
|
"loss": 0.8098, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.1692597156645306, |
|
"learning_rate": 3.5589563444477477e-06, |
|
"loss": 0.7864, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.529414675977005, |
|
"learning_rate": 3.5553398604015777e-06, |
|
"loss": 0.8194, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.6410323820649144, |
|
"learning_rate": 3.5517206870395852e-06, |
|
"loss": 0.8061, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.5825284359470673, |
|
"learning_rate": 3.548098833584489e-06, |
|
"loss": 0.8055, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.6994250667973314, |
|
"learning_rate": 3.544474309265834e-06, |
|
"loss": 0.8014, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.609457196655879, |
|
"learning_rate": 3.5408471233199713e-06, |
|
"loss": 0.8953, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.357338921046579, |
|
"learning_rate": 3.5372172849900377e-06, |
|
"loss": 0.815, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.534756505244773, |
|
"learning_rate": 3.5335848035259257e-06, |
|
"loss": 0.8281, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.46200388603068, |
|
"learning_rate": 3.5299496881842654e-06, |
|
"loss": 0.8979, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.8353237250725893, |
|
"learning_rate": 3.526311948228397e-06, |
|
"loss": 0.8392, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.513564550196794, |
|
"learning_rate": 3.5226715929283507e-06, |
|
"loss": 0.8276, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.840869909676071, |
|
"learning_rate": 3.5190286315608196e-06, |
|
"loss": 0.7808, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.6376115911500326, |
|
"learning_rate": 3.5153830734091404e-06, |
|
"loss": 0.7985, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 8.73646238891082, |
|
"learning_rate": 3.511734927763265e-06, |
|
"loss": 0.8393, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.215260596616087, |
|
"learning_rate": 3.508084203919739e-06, |
|
"loss": 0.8183, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.0886422935222395, |
|
"learning_rate": 3.50443091118168e-06, |
|
"loss": 0.7957, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.7885638513567383, |
|
"learning_rate": 3.5007750588587495e-06, |
|
"loss": 0.8112, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.9547192279690653, |
|
"learning_rate": 3.497116656267133e-06, |
|
"loss": 0.7731, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.86013814393367, |
|
"learning_rate": 3.493455712729514e-06, |
|
"loss": 0.8547, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.9264148109718997, |
|
"learning_rate": 3.4897922375750517e-06, |
|
"loss": 0.7858, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.5652346553585925, |
|
"learning_rate": 3.4861262401393563e-06, |
|
"loss": 0.785, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.1139632776522586, |
|
"learning_rate": 3.4824577297644663e-06, |
|
"loss": 0.9653, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.588885807787667, |
|
"learning_rate": 3.478786715798823e-06, |
|
"loss": 0.8312, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 6.383737000046943, |
|
"learning_rate": 3.4751132075972473e-06, |
|
"loss": 0.7787, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.038979136777571, |
|
"learning_rate": 3.471437214520917e-06, |
|
"loss": 0.8058, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.227693679919142, |
|
"learning_rate": 3.4677587459373417e-06, |
|
"loss": 0.9012, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.985607862312997, |
|
"learning_rate": 3.46407781122034e-06, |
|
"loss": 0.819, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.922668922254852, |
|
"learning_rate": 3.4603944197500126e-06, |
|
"loss": 0.7354, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.343492283466782, |
|
"learning_rate": 3.4567085809127247e-06, |
|
"loss": 0.786, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.484528470192613, |
|
"learning_rate": 3.4530203041010745e-06, |
|
"loss": 0.7849, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.367273259116789, |
|
"learning_rate": 3.449329598713874e-06, |
|
"loss": 0.7921, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.927650734068294, |
|
"learning_rate": 3.4456364741561256e-06, |
|
"loss": 0.7746, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.586041331240587, |
|
"learning_rate": 3.4419409398389937e-06, |
|
"loss": 0.8183, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.7164145526774224, |
|
"learning_rate": 3.4382430051797844e-06, |
|
"loss": 0.8202, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.394679226693479, |
|
"learning_rate": 3.434542679601922e-06, |
|
"loss": 0.7859, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.096618466435277, |
|
"learning_rate": 3.430839972534923e-06, |
|
"loss": 0.7583, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.013175572729404, |
|
"learning_rate": 3.4271348934143707e-06, |
|
"loss": 0.8071, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.292337945500214, |
|
"learning_rate": 3.423427451681895e-06, |
|
"loss": 0.8085, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.9544343369431463, |
|
"learning_rate": 3.4197176567851463e-06, |
|
"loss": 0.8177, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.433418332095438, |
|
"learning_rate": 3.4160055181777714e-06, |
|
"loss": 0.7884, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.956803986907128, |
|
"learning_rate": 3.4122910453193885e-06, |
|
"loss": 0.7959, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 5.450063143284857, |
|
"learning_rate": 3.4085742476755657e-06, |
|
"loss": 0.9269, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 5.196193261350649, |
|
"learning_rate": 3.404855134717795e-06, |
|
"loss": 1.0828, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 8.093995434969097, |
|
"learning_rate": 3.4011337159234674e-06, |
|
"loss": 0.7985, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.575208484444267, |
|
"learning_rate": 3.3974100007758514e-06, |
|
"loss": 0.7914, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.88561287010472, |
|
"learning_rate": 3.3936839987640665e-06, |
|
"loss": 0.8413, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.10258246501254, |
|
"learning_rate": 3.3899557193830585e-06, |
|
"loss": 0.7827, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.134612322280866, |
|
"learning_rate": 3.3862251721335793e-06, |
|
"loss": 0.7846, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.127521869620768, |
|
"learning_rate": 3.382492366522158e-06, |
|
"loss": 0.8187, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.694265995175311, |
|
"learning_rate": 3.3787573120610794e-06, |
|
"loss": 0.9095, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.832053974686459, |
|
"learning_rate": 3.375020018268359e-06, |
|
"loss": 0.8422, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 3.615649772558988, |
|
"learning_rate": 3.371280494667719e-06, |
|
"loss": 0.7797, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.377263570264409, |
|
"learning_rate": 3.367538750788563e-06, |
|
"loss": 0.9017, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.6300240178979815, |
|
"learning_rate": 3.3637947961659533e-06, |
|
"loss": 0.8064, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.977615821468784, |
|
"learning_rate": 3.360048640340585e-06, |
|
"loss": 0.7614, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 3.9739872782044285, |
|
"learning_rate": 3.356300292858763e-06, |
|
"loss": 0.8155, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.365429767411158, |
|
"learning_rate": 3.3525497632723786e-06, |
|
"loss": 0.7931, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.03510482219512, |
|
"learning_rate": 3.348797061138881e-06, |
|
"loss": 0.7533, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.423899720091732, |
|
"learning_rate": 3.345042196021257e-06, |
|
"loss": 0.849, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.3577483066057745, |
|
"learning_rate": 3.3412851774880066e-06, |
|
"loss": 0.914, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.8429976699063095, |
|
"learning_rate": 3.3375260151131156e-06, |
|
"loss": 0.8208, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 3.846433730762683, |
|
"learning_rate": 3.333764718476032e-06, |
|
"loss": 0.7667, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.424150572222642, |
|
"learning_rate": 3.3300012971616467e-06, |
|
"loss": 0.7906, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 3.78110316938379, |
|
"learning_rate": 3.3262357607602596e-06, |
|
"loss": 0.8568, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.484285048821905, |
|
"learning_rate": 3.3224681188675643e-06, |
|
"loss": 0.7874, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.710007453384313, |
|
"learning_rate": 3.318698381084619e-06, |
|
"loss": 0.812, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.303370184571771, |
|
"learning_rate": 3.3149265570178215e-06, |
|
"loss": 0.9035, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.26213714508879, |
|
"learning_rate": 3.3111526562788864e-06, |
|
"loss": 0.8047, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.9733132790695183, |
|
"learning_rate": 3.3073766884848235e-06, |
|
"loss": 0.7961, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.156615645995947, |
|
"learning_rate": 3.303598663257904e-06, |
|
"loss": 0.824, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.736017510667661, |
|
"learning_rate": 3.2998185902256475e-06, |
|
"loss": 0.8016, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.180043733793966, |
|
"learning_rate": 3.2960364790207897e-06, |
|
"loss": 0.7707, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.905861811752156, |
|
"learning_rate": 3.2922523392812605e-06, |
|
"loss": 0.7466, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.923996642696014, |
|
"learning_rate": 3.2884661806501576e-06, |
|
"loss": 0.8046, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.9631467827534426, |
|
"learning_rate": 3.284678012775727e-06, |
|
"loss": 0.9511, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.965325234874838, |
|
"learning_rate": 3.2808878453113317e-06, |
|
"loss": 0.8636, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.642633499379332, |
|
"learning_rate": 3.2770956879154305e-06, |
|
"loss": 0.7702, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.492042123923082, |
|
"learning_rate": 3.273301550251555e-06, |
|
"loss": 0.8537, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.8876988328768602, |
|
"learning_rate": 3.269505441988281e-06, |
|
"loss": 0.8605, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.766606063581812, |
|
"learning_rate": 3.2657073727992078e-06, |
|
"loss": 0.7985, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.267264555929007, |
|
"learning_rate": 3.2619073523629304e-06, |
|
"loss": 0.7971, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.919439020397232, |
|
"learning_rate": 3.258105390363016e-06, |
|
"loss": 0.8164, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.24583647910275, |
|
"learning_rate": 3.2543014964879814e-06, |
|
"loss": 0.8043, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.4364112443855985, |
|
"learning_rate": 3.250495680431264e-06, |
|
"loss": 0.769, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.40163318603266, |
|
"learning_rate": 3.246687951891201e-06, |
|
"loss": 0.7878, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.273441543771241, |
|
"learning_rate": 3.2428783205710023e-06, |
|
"loss": 0.8331, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.7031687463067633, |
|
"learning_rate": 3.2390667961787276e-06, |
|
"loss": 0.7873, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.5284627603373284, |
|
"learning_rate": 3.2352533884272596e-06, |
|
"loss": 0.7826, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.901059309743076, |
|
"learning_rate": 3.2314381070342815e-06, |
|
"loss": 0.9248, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 13.116776373765031, |
|
"learning_rate": 3.2276209617222497e-06, |
|
"loss": 0.8109, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.5825950028846907, |
|
"learning_rate": 3.223801962218372e-06, |
|
"loss": 0.7666, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.500505244324813, |
|
"learning_rate": 3.21998111825458e-06, |
|
"loss": 0.7398, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.496092976488147, |
|
"learning_rate": 3.216158439567506e-06, |
|
"loss": 0.7631, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.556135014522858, |
|
"learning_rate": 3.2123339358984573e-06, |
|
"loss": 0.8207, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.7277520517697136, |
|
"learning_rate": 3.208507616993393e-06, |
|
"loss": 0.8516, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.423036921270627, |
|
"learning_rate": 3.2046794926028966e-06, |
|
"loss": 0.791, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.235611807092007, |
|
"learning_rate": 3.200849572482153e-06, |
|
"loss": 0.7558, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.824187344341294, |
|
"learning_rate": 3.1970178663909233e-06, |
|
"loss": 0.7658, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.275625768452698, |
|
"learning_rate": 3.19318438409352e-06, |
|
"loss": 0.8375, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.566672221414914, |
|
"learning_rate": 3.189349135358781e-06, |
|
"loss": 0.762, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.020446057440514, |
|
"learning_rate": 3.1855121299600454e-06, |
|
"loss": 0.8817, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.2881734956366095, |
|
"learning_rate": 3.1816733776751313e-06, |
|
"loss": 0.8106, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.673485294928623, |
|
"learning_rate": 3.1778328882863058e-06, |
|
"loss": 0.9695, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 7.339180964057777, |
|
"learning_rate": 3.173990671580263e-06, |
|
"loss": 0.7922, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.7378322020107713, |
|
"learning_rate": 3.170146737348099e-06, |
|
"loss": 0.8213, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.440411381382036, |
|
"learning_rate": 3.166301095385288e-06, |
|
"loss": 0.8058, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.645734306429232, |
|
"learning_rate": 3.162453755491655e-06, |
|
"loss": 0.7686, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.646322329035178, |
|
"learning_rate": 3.1586047274713493e-06, |
|
"loss": 0.8386, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.205705304738026, |
|
"learning_rate": 3.154754021132827e-06, |
|
"loss": 0.857, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 49.63215769717914, |
|
"learning_rate": 3.1509016462888175e-06, |
|
"loss": 0.8218, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.780984494435262, |
|
"learning_rate": 3.147047612756302e-06, |
|
"loss": 0.8394, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.58164822302537, |
|
"learning_rate": 3.143191930356491e-06, |
|
"loss": 0.837, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.852494144118555, |
|
"learning_rate": 3.139334608914795e-06, |
|
"loss": 0.9018, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.145761545326323, |
|
"learning_rate": 3.135475658260801e-06, |
|
"loss": 0.8081, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.7323925690162945, |
|
"learning_rate": 3.1316150882282486e-06, |
|
"loss": 0.7904, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.828580217813585, |
|
"learning_rate": 3.1277529086550044e-06, |
|
"loss": 0.8209, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 7.169193160513471, |
|
"learning_rate": 3.1238891293830344e-06, |
|
"loss": 0.8518, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.7890433555958705, |
|
"learning_rate": 3.120023760258384e-06, |
|
"loss": 0.81, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.4100072300835915, |
|
"learning_rate": 3.1161568111311487e-06, |
|
"loss": 0.8143, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.745257896570691, |
|
"learning_rate": 3.112288291855449e-06, |
|
"loss": 0.7812, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.989422633795527, |
|
"learning_rate": 3.108418212289408e-06, |
|
"loss": 0.7939, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.69468243636268, |
|
"learning_rate": 3.1045465822951265e-06, |
|
"loss": 0.7719, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.734646448984784, |
|
"learning_rate": 3.1006734117386517e-06, |
|
"loss": 0.8206, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.406895307041761, |
|
"learning_rate": 3.0967987104899623e-06, |
|
"loss": 0.849, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.120926190738434, |
|
"learning_rate": 3.092922488422933e-06, |
|
"loss": 0.8067, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.395257481206765, |
|
"learning_rate": 3.0890447554153153e-06, |
|
"loss": 0.7899, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.381884034429759, |
|
"learning_rate": 3.0851655213487123e-06, |
|
"loss": 0.8286, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.4432938469880736, |
|
"learning_rate": 3.0812847961085527e-06, |
|
"loss": 0.7555, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.491662807975452, |
|
"learning_rate": 3.077402589584061e-06, |
|
"loss": 0.7953, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.109785316548567, |
|
"learning_rate": 3.073518911668241e-06, |
|
"loss": 0.8068, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.816255710860229, |
|
"learning_rate": 3.0696337722578444e-06, |
|
"loss": 0.8116, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.0550180873058945, |
|
"learning_rate": 3.0657471812533464e-06, |
|
"loss": 0.7445, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.428409369009171, |
|
"learning_rate": 3.061859148558922e-06, |
|
"loss": 0.7809, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 7.399300412933422, |
|
"learning_rate": 3.057969684082421e-06, |
|
"loss": 0.9132, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.438275269071275, |
|
"learning_rate": 3.05407879773534e-06, |
|
"loss": 0.7677, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.029683232488459, |
|
"learning_rate": 3.0501864994328002e-06, |
|
"loss": 0.8277, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.985086578254067, |
|
"learning_rate": 3.04629279909352e-06, |
|
"loss": 0.7709, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.989233419159114, |
|
"learning_rate": 3.0423977066397913e-06, |
|
"loss": 0.7942, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.59818214654168, |
|
"learning_rate": 3.038501231997454e-06, |
|
"loss": 0.7889, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.738840603418049, |
|
"learning_rate": 3.0346033850958685e-06, |
|
"loss": 0.8176, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.316647635809497, |
|
"learning_rate": 3.0307041758678933e-06, |
|
"loss": 0.771, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.364622380247118, |
|
"learning_rate": 3.0268036142498596e-06, |
|
"loss": 0.7959, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 9.785481265487443, |
|
"learning_rate": 3.0229017101815424e-06, |
|
"loss": 0.8661, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.766052528284133, |
|
"learning_rate": 3.018998473606139e-06, |
|
"loss": 0.7826, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.701099515941216, |
|
"learning_rate": 3.0150939144702425e-06, |
|
"loss": 0.8236, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 9.461997482098507, |
|
"learning_rate": 3.011188042723816e-06, |
|
"loss": 0.7458, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.380245649553912, |
|
"learning_rate": 3.007280868320167e-06, |
|
"loss": 0.9145, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 10.163712466924556, |
|
"learning_rate": 3.0033724012159244e-06, |
|
"loss": 0.8479, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.2825214697150455, |
|
"learning_rate": 2.9994626513710085e-06, |
|
"loss": 0.8418, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.133634973081057, |
|
"learning_rate": 2.9955516287486103e-06, |
|
"loss": 0.7698, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.448695379643913, |
|
"learning_rate": 2.9916393433151636e-06, |
|
"loss": 0.8066, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.466338581251602, |
|
"learning_rate": 2.9877258050403214e-06, |
|
"loss": 0.8007, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 10.810936084229239, |
|
"learning_rate": 2.9838110238969266e-06, |
|
"loss": 0.8186, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.803654636244916, |
|
"learning_rate": 2.9798950098609925e-06, |
|
"loss": 0.8928, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.603349966398751, |
|
"learning_rate": 2.975977772911671e-06, |
|
"loss": 0.82, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.1551088640505, |
|
"learning_rate": 2.9720593230312337e-06, |
|
"loss": 0.8218, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.340225183652387, |
|
"learning_rate": 2.968139670205041e-06, |
|
"loss": 0.8037, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.864128739357516, |
|
"learning_rate": 2.964218824421518e-06, |
|
"loss": 0.7639, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.9960489196439664, |
|
"learning_rate": 2.960296795672132e-06, |
|
"loss": 0.7699, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.852346612250402, |
|
"learning_rate": 2.9563735939513637e-06, |
|
"loss": 0.8014, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.3848451966395325, |
|
"learning_rate": 2.9524492292566824e-06, |
|
"loss": 0.8093, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 8.205235412527301, |
|
"learning_rate": 2.9485237115885223e-06, |
|
"loss": 0.8619, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 6.176360237119004, |
|
"learning_rate": 2.9445970509502547e-06, |
|
"loss": 0.7676, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.630007100247436, |
|
"learning_rate": 2.9406692573481634e-06, |
|
"loss": 0.8479, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.947598794426371, |
|
"learning_rate": 2.93674034079142e-06, |
|
"loss": 0.8159, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.208912980619626, |
|
"learning_rate": 2.932810311292058e-06, |
|
"loss": 0.7876, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.419301085882204, |
|
"learning_rate": 2.928879178864946e-06, |
|
"loss": 0.8171, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.453129993869502, |
|
"learning_rate": 2.9249469535277635e-06, |
|
"loss": 0.8192, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.978894479574167, |
|
"learning_rate": 2.9210136453009753e-06, |
|
"loss": 0.8177, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.085215406941065, |
|
"learning_rate": 2.9170792642078057e-06, |
|
"loss": 0.7986, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.6198554119473725, |
|
"learning_rate": 2.9131438202742123e-06, |
|
"loss": 0.7632, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.722723234078222, |
|
"learning_rate": 2.9092073235288633e-06, |
|
"loss": 0.8149, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.181515177263234, |
|
"learning_rate": 2.9052697840031065e-06, |
|
"loss": 0.8288, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.794572021110422, |
|
"learning_rate": 2.901331211730949e-06, |
|
"loss": 0.7967, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.424733629629019, |
|
"learning_rate": 2.897391616749031e-06, |
|
"loss": 0.7658, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.217478880240176, |
|
"learning_rate": 2.8934510090965943e-06, |
|
"loss": 0.7973, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 8.450381133458173, |
|
"learning_rate": 2.889509398815467e-06, |
|
"loss": 0.8968, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 6.678550127443945, |
|
"learning_rate": 2.885566795950028e-06, |
|
"loss": 0.7672, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.847140586125922, |
|
"learning_rate": 2.8816232105471864e-06, |
|
"loss": 0.7939, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.027451532927153, |
|
"learning_rate": 2.877678652656358e-06, |
|
"loss": 0.7753, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.483294011279615, |
|
"learning_rate": 2.8737331323294315e-06, |
|
"loss": 0.7848, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.358769311669604, |
|
"learning_rate": 2.8697866596207524e-06, |
|
"loss": 0.9225, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.9764053893031757, |
|
"learning_rate": 2.865839244587093e-06, |
|
"loss": 0.8153, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.817588176503937, |
|
"learning_rate": 2.861890897287625e-06, |
|
"loss": 0.8854, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.88032298358066, |
|
"learning_rate": 2.8579416277838952e-06, |
|
"loss": 0.8062, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.4231075227119225, |
|
"learning_rate": 2.8539914461398043e-06, |
|
"loss": 0.8082, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.255932637689089, |
|
"learning_rate": 2.8500403624215733e-06, |
|
"loss": 0.801, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.8397836719406153, |
|
"learning_rate": 2.846088386697723e-06, |
|
"loss": 0.8418, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.9932629786289753, |
|
"learning_rate": 2.8421355290390506e-06, |
|
"loss": 0.8162, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.487065147178443, |
|
"learning_rate": 2.838181799518595e-06, |
|
"loss": 0.8014, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.914529347080895, |
|
"learning_rate": 2.8342272082116214e-06, |
|
"loss": 0.8356, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.004022127968303, |
|
"learning_rate": 2.83027176519559e-06, |
|
"loss": 0.8389, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 8.828551661954858, |
|
"learning_rate": 2.82631548055013e-06, |
|
"loss": 0.7885, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.492552440308808, |
|
"learning_rate": 2.822358364357015e-06, |
|
"loss": 0.8, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.342294518537859, |
|
"learning_rate": 2.8184004267001427e-06, |
|
"loss": 0.7626, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.14513404654822, |
|
"learning_rate": 2.8144416776654964e-06, |
|
"loss": 0.7403, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.207420214999504, |
|
"learning_rate": 2.8104821273411333e-06, |
|
"loss": 0.7719, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.76849216288104, |
|
"learning_rate": 2.8065217858171495e-06, |
|
"loss": 0.772, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.087940784747258, |
|
"learning_rate": 2.802560663185658e-06, |
|
"loss": 0.755, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.804412743927739, |
|
"learning_rate": 2.7985987695407618e-06, |
|
"loss": 0.7937, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.3563337300652725, |
|
"learning_rate": 2.7946361149785304e-06, |
|
"loss": 0.7953, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.353344075424233, |
|
"learning_rate": 2.79067270959697e-06, |
|
"loss": 0.7835, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.606865353160219, |
|
"learning_rate": 2.786708563496002e-06, |
|
"loss": 0.8217, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.151168827461153, |
|
"learning_rate": 2.7827436867774334e-06, |
|
"loss": 0.7994, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.3077719748381815, |
|
"learning_rate": 2.7787780895449353e-06, |
|
"loss": 0.81, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.939054267141848, |
|
"learning_rate": 2.774811781904013e-06, |
|
"loss": 0.8043, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.6482393833284537, |
|
"learning_rate": 2.7708447739619833e-06, |
|
"loss": 0.7984, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.469643319922747, |
|
"learning_rate": 2.7668770758279473e-06, |
|
"loss": 0.7579, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.297430505606569, |
|
"learning_rate": 2.762908697612765e-06, |
|
"loss": 0.7396, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.299365762769891, |
|
"learning_rate": 2.7589396494290287e-06, |
|
"loss": 0.9516, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.820980097501582, |
|
"learning_rate": 2.7549699413910387e-06, |
|
"loss": 0.7753, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.070459691106533, |
|
"learning_rate": 2.750999583614777e-06, |
|
"loss": 0.7792, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.9507862249719974, |
|
"learning_rate": 2.7470285862178804e-06, |
|
"loss": 0.7307, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.6716263593517255, |
|
"learning_rate": 2.743056959319616e-06, |
|
"loss": 0.7952, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.098216751623958, |
|
"learning_rate": 2.739084713040856e-06, |
|
"loss": 0.7741, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.192569762900567, |
|
"learning_rate": 2.73511185750405e-06, |
|
"loss": 0.7985, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.261338885304019, |
|
"learning_rate": 2.7311384028332e-06, |
|
"loss": 0.7996, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.104647555320273, |
|
"learning_rate": 2.7271643591538355e-06, |
|
"loss": 0.7984, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.119079048596377, |
|
"learning_rate": 2.723189736592986e-06, |
|
"loss": 0.8057, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 15.046890388512228, |
|
"learning_rate": 2.719214545279158e-06, |
|
"loss": 0.805, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.22626992572264, |
|
"learning_rate": 2.7152387953423047e-06, |
|
"loss": 0.8173, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.450359321360147, |
|
"learning_rate": 2.711262496913805e-06, |
|
"loss": 0.7907, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.084842715465518, |
|
"learning_rate": 2.707285660126435e-06, |
|
"loss": 0.7497, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.0680641158312865, |
|
"learning_rate": 2.703308295114342e-06, |
|
"loss": 0.8914, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.9396881683951595, |
|
"learning_rate": 2.6993304120130197e-06, |
|
"loss": 0.7543, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 8.168996657657983, |
|
"learning_rate": 2.6953520209592827e-06, |
|
"loss": 0.832, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.615116753830786, |
|
"learning_rate": 2.69137313209124e-06, |
|
"loss": 0.7986, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.1733727377866865, |
|
"learning_rate": 2.6873937555482664e-06, |
|
"loss": 0.8034, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 13.47432961457841, |
|
"learning_rate": 2.683413901470984e-06, |
|
"loss": 0.8469, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.634549734894035, |
|
"learning_rate": 2.6794335800012294e-06, |
|
"loss": 0.7886, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.128741319695401, |
|
"learning_rate": 2.6754528012820288e-06, |
|
"loss": 0.7701, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.1198392744805545, |
|
"learning_rate": 2.6714715754575757e-06, |
|
"loss": 0.803, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.337325262757384, |
|
"learning_rate": 2.6674899126732045e-06, |
|
"loss": 0.8442, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.143567365031371, |
|
"learning_rate": 2.663507823075358e-06, |
|
"loss": 0.7851, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.745829541899124, |
|
"learning_rate": 2.6595253168115707e-06, |
|
"loss": 0.7785, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.933563947271949, |
|
"learning_rate": 2.65554240403044e-06, |
|
"loss": 0.8497, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.798457207492107, |
|
"learning_rate": 2.6515590948815934e-06, |
|
"loss": 0.7611, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.660299502636275, |
|
"learning_rate": 2.6475753995156746e-06, |
|
"loss": 0.8, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.930258601091212, |
|
"learning_rate": 2.643591328084309e-06, |
|
"loss": 1.0316, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.17822237801153, |
|
"learning_rate": 2.6396068907400784e-06, |
|
"loss": 0.8839, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.338544848126124, |
|
"learning_rate": 2.635622097636501e-06, |
|
"loss": 0.7836, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.40163809982145, |
|
"learning_rate": 2.631636958928e-06, |
|
"loss": 0.7965, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.411528020705482, |
|
"learning_rate": 2.6276514847698763e-06, |
|
"loss": 0.745, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.007219388292756, |
|
"learning_rate": 2.623665685318291e-06, |
|
"loss": 0.7881, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.914836989848749, |
|
"learning_rate": 2.6196795707302304e-06, |
|
"loss": 0.7929, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.063241782135146, |
|
"learning_rate": 2.6156931511634838e-06, |
|
"loss": 0.801, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.213904333151254, |
|
"learning_rate": 2.61170643677662e-06, |
|
"loss": 0.8638, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.285745911664518, |
|
"learning_rate": 2.607719437728957e-06, |
|
"loss": 0.77, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.689303741924254, |
|
"learning_rate": 2.603732164180539e-06, |
|
"loss": 0.7846, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.666129815883496, |
|
"learning_rate": 2.5997446262921105e-06, |
|
"loss": 0.7803, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.477070260564313, |
|
"learning_rate": 2.595756834225089e-06, |
|
"loss": 0.7998, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.184569743823347, |
|
"learning_rate": 2.5917687981415375e-06, |
|
"loss": 0.8212, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.242849593041545, |
|
"learning_rate": 2.5877805282041456e-06, |
|
"loss": 0.7881, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.735902947064209, |
|
"learning_rate": 2.583792034576194e-06, |
|
"loss": 0.7876, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.46938280982608, |
|
"learning_rate": 2.5798033274215363e-06, |
|
"loss": 0.8288, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.033690870298263, |
|
"learning_rate": 2.575814416904569e-06, |
|
"loss": 0.8134, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.597709987316143, |
|
"learning_rate": 2.5718253131902084e-06, |
|
"loss": 0.823, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.142434190114618, |
|
"learning_rate": 2.5678360264438608e-06, |
|
"loss": 0.797, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.1203821819631115, |
|
"learning_rate": 2.5638465668314006e-06, |
|
"loss": 0.7622, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 10.857001786231981, |
|
"learning_rate": 2.5598569445191418e-06, |
|
"loss": 0.7962, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 5.560092742577767, |
|
"learning_rate": 2.5558671696738145e-06, |
|
"loss": 0.7627, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.847492024501584, |
|
"learning_rate": 2.5518772524625356e-06, |
|
"loss": 0.812, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.635222745766512, |
|
"learning_rate": 2.547887203052786e-06, |
|
"loss": 0.8278, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.436016580970295, |
|
"learning_rate": 2.543897031612382e-06, |
|
"loss": 0.7846, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.7357290599674355, |
|
"learning_rate": 2.539906748309454e-06, |
|
"loss": 0.8013, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.288805373285202, |
|
"learning_rate": 2.535916363312414e-06, |
|
"loss": 0.8952, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.097132254798519, |
|
"learning_rate": 2.531925886789935e-06, |
|
"loss": 0.7853, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.933595710527326, |
|
"learning_rate": 2.5279353289109227e-06, |
|
"loss": 0.7475, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.626882278982255, |
|
"learning_rate": 2.52394469984449e-06, |
|
"loss": 0.7467, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.637647959979263, |
|
"learning_rate": 2.5199540097599316e-06, |
|
"loss": 0.8511, |
|
"step": 989 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1978, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 989, |
|
"total_flos": 1585526184345600.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|