|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.779220779220779, |
|
"eval_steps": 100000, |
|
"global_step": 368, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012987012987012988, |
|
"grad_norm": 2.8171603240352896, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5466, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.025974025974025976, |
|
"grad_norm": 2.8001921525321785, |
|
"learning_rate": 9.973958333333335e-06, |
|
"loss": 0.547, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03896103896103896, |
|
"grad_norm": 1.5392157012941012, |
|
"learning_rate": 9.947916666666667e-06, |
|
"loss": 0.459, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05194805194805195, |
|
"grad_norm": 1.1573857414234903, |
|
"learning_rate": 9.921875e-06, |
|
"loss": 0.4574, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06493506493506493, |
|
"grad_norm": 0.8995358249149084, |
|
"learning_rate": 9.895833333333334e-06, |
|
"loss": 0.421, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07792207792207792, |
|
"grad_norm": 0.9904798146163455, |
|
"learning_rate": 9.869791666666667e-06, |
|
"loss": 0.3695, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 0.9283957441209373, |
|
"learning_rate": 9.84375e-06, |
|
"loss": 0.3627, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1038961038961039, |
|
"grad_norm": 0.7771365268975639, |
|
"learning_rate": 9.817708333333334e-06, |
|
"loss": 0.3576, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.11688311688311688, |
|
"grad_norm": 0.8022346587811421, |
|
"learning_rate": 9.791666666666666e-06, |
|
"loss": 0.3191, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.12987012987012986, |
|
"grad_norm": 0.819398725227865, |
|
"learning_rate": 9.765625e-06, |
|
"loss": 0.3287, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.6805869638028169, |
|
"learning_rate": 9.739583333333334e-06, |
|
"loss": 0.3184, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.15584415584415584, |
|
"grad_norm": 0.5977289690207203, |
|
"learning_rate": 9.713541666666668e-06, |
|
"loss": 0.3123, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.16883116883116883, |
|
"grad_norm": 0.6566097354029716, |
|
"learning_rate": 9.6875e-06, |
|
"loss": 0.3023, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.6239198357102247, |
|
"learning_rate": 9.661458333333334e-06, |
|
"loss": 0.2982, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.19480519480519481, |
|
"grad_norm": 0.5689846226409981, |
|
"learning_rate": 9.635416666666668e-06, |
|
"loss": 0.3, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2077922077922078, |
|
"grad_norm": 0.5003921754161651, |
|
"learning_rate": 9.609375000000001e-06, |
|
"loss": 0.2806, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.22077922077922077, |
|
"grad_norm": 0.4669430776472446, |
|
"learning_rate": 9.583333333333335e-06, |
|
"loss": 0.2712, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.23376623376623376, |
|
"grad_norm": 0.4464962776593643, |
|
"learning_rate": 9.557291666666667e-06, |
|
"loss": 0.2649, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.24675324675324675, |
|
"grad_norm": 0.4429858692736543, |
|
"learning_rate": 9.531250000000001e-06, |
|
"loss": 0.2709, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2597402597402597, |
|
"grad_norm": 0.4534349349480874, |
|
"learning_rate": 9.505208333333335e-06, |
|
"loss": 0.2659, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.45604692611068276, |
|
"learning_rate": 9.479166666666667e-06, |
|
"loss": 0.274, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.42386666170861903, |
|
"learning_rate": 9.453125000000001e-06, |
|
"loss": 0.2432, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2987012987012987, |
|
"grad_norm": 0.4539418581699697, |
|
"learning_rate": 9.427083333333335e-06, |
|
"loss": 0.2479, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3116883116883117, |
|
"grad_norm": 0.36873765590858, |
|
"learning_rate": 9.401041666666667e-06, |
|
"loss": 0.2533, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3246753246753247, |
|
"grad_norm": 0.3597660839493326, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 0.2427, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.33766233766233766, |
|
"grad_norm": 0.43380755500127693, |
|
"learning_rate": 9.348958333333335e-06, |
|
"loss": 0.2629, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.35064935064935066, |
|
"grad_norm": 0.4124812851086015, |
|
"learning_rate": 9.322916666666667e-06, |
|
"loss": 0.2555, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.3899208758432942, |
|
"learning_rate": 9.296875e-06, |
|
"loss": 0.2385, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.37662337662337664, |
|
"grad_norm": 0.4307637034312358, |
|
"learning_rate": 9.270833333333334e-06, |
|
"loss": 0.2676, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.38961038961038963, |
|
"grad_norm": 0.381920816609879, |
|
"learning_rate": 9.244791666666667e-06, |
|
"loss": 0.2366, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4025974025974026, |
|
"grad_norm": 0.3816394656089062, |
|
"learning_rate": 9.21875e-06, |
|
"loss": 0.2321, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4155844155844156, |
|
"grad_norm": 0.42534009456351374, |
|
"learning_rate": 9.192708333333334e-06, |
|
"loss": 0.265, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 0.37160388698301566, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.2373, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.44155844155844154, |
|
"grad_norm": 0.3801489729504126, |
|
"learning_rate": 9.140625e-06, |
|
"loss": 0.2414, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.3913349306860418, |
|
"learning_rate": 9.114583333333334e-06, |
|
"loss": 0.2206, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4675324675324675, |
|
"grad_norm": 0.3943320977947317, |
|
"learning_rate": 9.088541666666666e-06, |
|
"loss": 0.223, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4805194805194805, |
|
"grad_norm": 0.3702281679651669, |
|
"learning_rate": 9.0625e-06, |
|
"loss": 0.2147, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4935064935064935, |
|
"grad_norm": 0.422823856947034, |
|
"learning_rate": 9.036458333333334e-06, |
|
"loss": 0.2493, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5064935064935064, |
|
"grad_norm": 0.38040718111635613, |
|
"learning_rate": 9.010416666666668e-06, |
|
"loss": 0.2245, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.5194805194805194, |
|
"grad_norm": 0.3440649156844092, |
|
"learning_rate": 8.984375000000002e-06, |
|
"loss": 0.2216, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5324675324675324, |
|
"grad_norm": 0.3646349376051012, |
|
"learning_rate": 8.958333333333334e-06, |
|
"loss": 0.2127, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.34399756355860645, |
|
"learning_rate": 8.932291666666668e-06, |
|
"loss": 0.2148, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5584415584415584, |
|
"grad_norm": 0.3823461645884333, |
|
"learning_rate": 8.906250000000001e-06, |
|
"loss": 0.2367, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.3396475181827773, |
|
"learning_rate": 8.880208333333335e-06, |
|
"loss": 0.2097, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5844155844155844, |
|
"grad_norm": 0.40580965210317627, |
|
"learning_rate": 8.854166666666667e-06, |
|
"loss": 0.2539, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5974025974025974, |
|
"grad_norm": 0.3720924077531135, |
|
"learning_rate": 8.828125000000001e-06, |
|
"loss": 0.2416, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6103896103896104, |
|
"grad_norm": 0.36386894867601904, |
|
"learning_rate": 8.802083333333335e-06, |
|
"loss": 0.2159, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6233766233766234, |
|
"grad_norm": 0.39963494878899575, |
|
"learning_rate": 8.776041666666667e-06, |
|
"loss": 0.2155, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.41491580966165253, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.2441, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6493506493506493, |
|
"grad_norm": 0.413722874540919, |
|
"learning_rate": 8.723958333333335e-06, |
|
"loss": 0.2411, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6623376623376623, |
|
"grad_norm": 0.3627710473819897, |
|
"learning_rate": 8.697916666666667e-06, |
|
"loss": 0.2032, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6753246753246753, |
|
"grad_norm": 0.37183990680520124, |
|
"learning_rate": 8.671875e-06, |
|
"loss": 0.2297, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6883116883116883, |
|
"grad_norm": 0.3447910113593886, |
|
"learning_rate": 8.645833333333335e-06, |
|
"loss": 0.2152, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.7012987012987013, |
|
"grad_norm": 0.3746990091470779, |
|
"learning_rate": 8.619791666666667e-06, |
|
"loss": 0.2196, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.38360093812511664, |
|
"learning_rate": 8.59375e-06, |
|
"loss": 0.2351, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.3401850780087419, |
|
"learning_rate": 8.567708333333334e-06, |
|
"loss": 0.1948, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7402597402597403, |
|
"grad_norm": 0.3341353144174157, |
|
"learning_rate": 8.541666666666666e-06, |
|
"loss": 0.1985, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7532467532467533, |
|
"grad_norm": 0.3317473111284822, |
|
"learning_rate": 8.515625e-06, |
|
"loss": 0.2019, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7662337662337663, |
|
"grad_norm": 0.37322280586801393, |
|
"learning_rate": 8.489583333333334e-06, |
|
"loss": 0.2258, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.7792207792207793, |
|
"grad_norm": 0.36560124214432044, |
|
"learning_rate": 8.463541666666666e-06, |
|
"loss": 0.2188, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7922077922077922, |
|
"grad_norm": 0.3372344746230244, |
|
"learning_rate": 8.4375e-06, |
|
"loss": 0.2157, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.8051948051948052, |
|
"grad_norm": 0.3486634994045839, |
|
"learning_rate": 8.411458333333334e-06, |
|
"loss": 0.202, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.39190266385589323, |
|
"learning_rate": 8.385416666666668e-06, |
|
"loss": 0.2148, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8311688311688312, |
|
"grad_norm": 0.36845516731632094, |
|
"learning_rate": 8.359375e-06, |
|
"loss": 0.2184, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8441558441558441, |
|
"grad_norm": 0.35457910296524253, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.2149, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.3603241548710575, |
|
"learning_rate": 8.307291666666668e-06, |
|
"loss": 0.2155, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8701298701298701, |
|
"grad_norm": 0.36738480430355874, |
|
"learning_rate": 8.281250000000001e-06, |
|
"loss": 0.2262, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8831168831168831, |
|
"grad_norm": 0.3994418788798323, |
|
"learning_rate": 8.255208333333335e-06, |
|
"loss": 0.2237, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8961038961038961, |
|
"grad_norm": 0.3818265307209604, |
|
"learning_rate": 8.229166666666667e-06, |
|
"loss": 0.2134, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.3498820594838212, |
|
"learning_rate": 8.203125000000001e-06, |
|
"loss": 0.2069, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.922077922077922, |
|
"grad_norm": 0.385917668930501, |
|
"learning_rate": 8.177083333333335e-06, |
|
"loss": 0.2123, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.935064935064935, |
|
"grad_norm": 0.39236894265571665, |
|
"learning_rate": 8.151041666666667e-06, |
|
"loss": 0.2234, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.948051948051948, |
|
"grad_norm": 0.3725234037920641, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 0.2066, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.961038961038961, |
|
"grad_norm": 0.35880219297664784, |
|
"learning_rate": 8.098958333333335e-06, |
|
"loss": 0.1897, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.974025974025974, |
|
"grad_norm": 0.3907765260625405, |
|
"learning_rate": 8.072916666666667e-06, |
|
"loss": 0.2294, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.987012987012987, |
|
"grad_norm": 0.36488776593694006, |
|
"learning_rate": 8.046875e-06, |
|
"loss": 0.1958, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.3787789187575852, |
|
"learning_rate": 8.020833333333335e-06, |
|
"loss": 0.2067, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.0129870129870129, |
|
"grad_norm": 0.448465153413621, |
|
"learning_rate": 7.994791666666667e-06, |
|
"loss": 0.1244, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.025974025974026, |
|
"grad_norm": 0.3915974513800719, |
|
"learning_rate": 7.96875e-06, |
|
"loss": 0.1243, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.0389610389610389, |
|
"grad_norm": 0.3963769912247312, |
|
"learning_rate": 7.942708333333334e-06, |
|
"loss": 0.1172, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.051948051948052, |
|
"grad_norm": 0.6014772610010978, |
|
"learning_rate": 7.916666666666667e-06, |
|
"loss": 0.1342, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0649350649350648, |
|
"grad_norm": 0.36513022825436714, |
|
"learning_rate": 7.890625e-06, |
|
"loss": 0.1155, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.077922077922078, |
|
"grad_norm": 0.36495385435491967, |
|
"learning_rate": 7.864583333333334e-06, |
|
"loss": 0.1278, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 0.3817117188167101, |
|
"learning_rate": 7.838541666666666e-06, |
|
"loss": 0.1294, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.103896103896104, |
|
"grad_norm": 0.3466875502600336, |
|
"learning_rate": 7.8125e-06, |
|
"loss": 0.1187, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1168831168831168, |
|
"grad_norm": 0.38224176701280815, |
|
"learning_rate": 7.786458333333334e-06, |
|
"loss": 0.1163, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.12987012987013, |
|
"grad_norm": 0.387717741867272, |
|
"learning_rate": 7.760416666666666e-06, |
|
"loss": 0.1229, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 0.33922354465727605, |
|
"learning_rate": 7.734375e-06, |
|
"loss": 0.1026, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.155844155844156, |
|
"grad_norm": 0.35409301762559464, |
|
"learning_rate": 7.708333333333334e-06, |
|
"loss": 0.1208, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.1688311688311688, |
|
"grad_norm": 0.4114669310411066, |
|
"learning_rate": 7.682291666666668e-06, |
|
"loss": 0.138, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 0.3522471695287209, |
|
"learning_rate": 7.656250000000001e-06, |
|
"loss": 0.1268, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1948051948051948, |
|
"grad_norm": 0.3494758184949061, |
|
"learning_rate": 7.630208333333334e-06, |
|
"loss": 0.1118, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.2077922077922079, |
|
"grad_norm": 0.35673580541279293, |
|
"learning_rate": 7.6041666666666666e-06, |
|
"loss": 0.111, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2207792207792207, |
|
"grad_norm": 0.2877699742066818, |
|
"learning_rate": 7.578125e-06, |
|
"loss": 0.1024, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.2337662337662338, |
|
"grad_norm": 0.36809061483198696, |
|
"learning_rate": 7.552083333333334e-06, |
|
"loss": 0.125, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2467532467532467, |
|
"grad_norm": 0.3326924572780158, |
|
"learning_rate": 7.526041666666667e-06, |
|
"loss": 0.1165, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2597402597402598, |
|
"grad_norm": 0.3381064907954975, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.1242, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.31808537453343166, |
|
"learning_rate": 7.473958333333334e-06, |
|
"loss": 0.1194, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 0.3338293488215449, |
|
"learning_rate": 7.447916666666667e-06, |
|
"loss": 0.1045, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.2987012987012987, |
|
"grad_norm": 0.32638382374328245, |
|
"learning_rate": 7.421875000000001e-06, |
|
"loss": 0.1274, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.3116883116883118, |
|
"grad_norm": 0.3798687835449094, |
|
"learning_rate": 7.395833333333335e-06, |
|
"loss": 0.1216, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3246753246753247, |
|
"grad_norm": 0.42118782006430655, |
|
"learning_rate": 7.369791666666667e-06, |
|
"loss": 0.1116, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3376623376623376, |
|
"grad_norm": 0.31491431320109803, |
|
"learning_rate": 7.343750000000001e-06, |
|
"loss": 0.123, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3506493506493507, |
|
"grad_norm": 0.31006216567911504, |
|
"learning_rate": 7.3177083333333345e-06, |
|
"loss": 0.1186, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 0.32515923422393395, |
|
"learning_rate": 7.291666666666667e-06, |
|
"loss": 0.1069, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3766233766233766, |
|
"grad_norm": 0.3258624400110308, |
|
"learning_rate": 7.265625e-06, |
|
"loss": 0.0925, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3896103896103895, |
|
"grad_norm": 0.3147044898639944, |
|
"learning_rate": 7.239583333333334e-06, |
|
"loss": 0.1114, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.4025974025974026, |
|
"grad_norm": 0.3155522217588101, |
|
"learning_rate": 7.213541666666667e-06, |
|
"loss": 0.1237, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.4155844155844157, |
|
"grad_norm": 0.3144003636320752, |
|
"learning_rate": 7.1875e-06, |
|
"loss": 0.1159, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.32254719268793725, |
|
"learning_rate": 7.161458333333334e-06, |
|
"loss": 0.1196, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4415584415584415, |
|
"grad_norm": 0.315615751768773, |
|
"learning_rate": 7.135416666666667e-06, |
|
"loss": 0.1121, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.2995055345846169, |
|
"learning_rate": 7.109375000000001e-06, |
|
"loss": 0.1199, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4675324675324675, |
|
"grad_norm": 0.3457518743206924, |
|
"learning_rate": 7.083333333333335e-06, |
|
"loss": 0.1262, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4805194805194806, |
|
"grad_norm": 0.34216271471145815, |
|
"learning_rate": 7.057291666666667e-06, |
|
"loss": 0.1067, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.4935064935064934, |
|
"grad_norm": 0.3178295536529888, |
|
"learning_rate": 7.031250000000001e-06, |
|
"loss": 0.1202, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.5064935064935066, |
|
"grad_norm": 0.2819573454273538, |
|
"learning_rate": 7.0052083333333345e-06, |
|
"loss": 0.1033, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.5194805194805194, |
|
"grad_norm": 0.32378485295884113, |
|
"learning_rate": 6.979166666666667e-06, |
|
"loss": 0.117, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5324675324675323, |
|
"grad_norm": 0.3154734102111175, |
|
"learning_rate": 6.9531250000000004e-06, |
|
"loss": 0.1089, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.3112595772562475, |
|
"learning_rate": 6.927083333333334e-06, |
|
"loss": 0.1131, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.5584415584415585, |
|
"grad_norm": 0.308567184284047, |
|
"learning_rate": 6.901041666666667e-06, |
|
"loss": 0.0981, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 0.2941894268668758, |
|
"learning_rate": 6.875e-06, |
|
"loss": 0.0992, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5844155844155843, |
|
"grad_norm": 0.3244832758495866, |
|
"learning_rate": 6.848958333333334e-06, |
|
"loss": 0.1151, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5974025974025974, |
|
"grad_norm": 0.3486204249378559, |
|
"learning_rate": 6.822916666666667e-06, |
|
"loss": 0.1144, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.6103896103896105, |
|
"grad_norm": 0.3108839874338576, |
|
"learning_rate": 6.796875000000001e-06, |
|
"loss": 0.1166, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6233766233766234, |
|
"grad_norm": 0.3362850203366801, |
|
"learning_rate": 6.770833333333334e-06, |
|
"loss": 0.1166, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 0.3001509180382928, |
|
"learning_rate": 6.744791666666667e-06, |
|
"loss": 0.1105, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6493506493506493, |
|
"grad_norm": 0.2997092451007753, |
|
"learning_rate": 6.718750000000001e-06, |
|
"loss": 0.1086, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6623376623376624, |
|
"grad_norm": 0.3427204611958163, |
|
"learning_rate": 6.6927083333333345e-06, |
|
"loss": 0.1054, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6753246753246753, |
|
"grad_norm": 0.2994776086256174, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.1154, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6883116883116882, |
|
"grad_norm": 0.31431249708262365, |
|
"learning_rate": 6.6406250000000005e-06, |
|
"loss": 0.1227, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.7012987012987013, |
|
"grad_norm": 0.311578248576694, |
|
"learning_rate": 6.614583333333334e-06, |
|
"loss": 0.1084, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 0.3316176770331044, |
|
"learning_rate": 6.5885416666666664e-06, |
|
"loss": 0.1104, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 0.30426933433479547, |
|
"learning_rate": 6.5625e-06, |
|
"loss": 0.1125, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7402597402597402, |
|
"grad_norm": 0.28043555053128094, |
|
"learning_rate": 6.536458333333334e-06, |
|
"loss": 0.0992, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.7532467532467533, |
|
"grad_norm": 0.30386469668813254, |
|
"learning_rate": 6.510416666666667e-06, |
|
"loss": 0.1091, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7662337662337664, |
|
"grad_norm": 0.3224355180252757, |
|
"learning_rate": 6.484375000000001e-06, |
|
"loss": 0.1057, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7792207792207793, |
|
"grad_norm": 0.33241428871080214, |
|
"learning_rate": 6.458333333333334e-06, |
|
"loss": 0.1059, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7922077922077921, |
|
"grad_norm": 0.3147577283191284, |
|
"learning_rate": 6.432291666666667e-06, |
|
"loss": 0.1182, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.8051948051948052, |
|
"grad_norm": 0.3078113700946741, |
|
"learning_rate": 6.406250000000001e-06, |
|
"loss": 0.1041, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.29167494344756056, |
|
"learning_rate": 6.3802083333333345e-06, |
|
"loss": 0.1118, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8311688311688312, |
|
"grad_norm": 0.3185841818138518, |
|
"learning_rate": 6.354166666666667e-06, |
|
"loss": 0.1086, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.844155844155844, |
|
"grad_norm": 0.32641774304326837, |
|
"learning_rate": 6.3281250000000005e-06, |
|
"loss": 0.1109, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8571428571428572, |
|
"grad_norm": 0.3203734291459141, |
|
"learning_rate": 6.302083333333334e-06, |
|
"loss": 0.0955, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.87012987012987, |
|
"grad_norm": 0.2890498272171896, |
|
"learning_rate": 6.2760416666666665e-06, |
|
"loss": 0.101, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.883116883116883, |
|
"grad_norm": 0.29736342466973853, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.118, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.896103896103896, |
|
"grad_norm": 0.31984473875794944, |
|
"learning_rate": 6.223958333333334e-06, |
|
"loss": 0.1102, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 0.3172704335356637, |
|
"learning_rate": 6.197916666666667e-06, |
|
"loss": 0.1082, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.922077922077922, |
|
"grad_norm": 0.2973111874066074, |
|
"learning_rate": 6.171875e-06, |
|
"loss": 0.1197, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.935064935064935, |
|
"grad_norm": 0.3095998340575272, |
|
"learning_rate": 6.145833333333334e-06, |
|
"loss": 0.1079, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.948051948051948, |
|
"grad_norm": 0.31787152770234606, |
|
"learning_rate": 6.119791666666667e-06, |
|
"loss": 0.0994, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9610389610389611, |
|
"grad_norm": 0.28985366328378903, |
|
"learning_rate": 6.093750000000001e-06, |
|
"loss": 0.1202, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.974025974025974, |
|
"grad_norm": 0.3052098497087059, |
|
"learning_rate": 6.0677083333333346e-06, |
|
"loss": 0.1137, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.987012987012987, |
|
"grad_norm": 0.3066915552338606, |
|
"learning_rate": 6.041666666666667e-06, |
|
"loss": 0.1172, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.30629249293934785, |
|
"learning_rate": 6.0156250000000005e-06, |
|
"loss": 0.1032, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.012987012987013, |
|
"grad_norm": 0.35023782090844824, |
|
"learning_rate": 5.989583333333334e-06, |
|
"loss": 0.0557, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.0259740259740258, |
|
"grad_norm": 0.2862415000889261, |
|
"learning_rate": 5.9635416666666665e-06, |
|
"loss": 0.0656, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.038961038961039, |
|
"grad_norm": 0.25460716359572677, |
|
"learning_rate": 5.9375e-06, |
|
"loss": 0.0559, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.051948051948052, |
|
"grad_norm": 0.32189640709965023, |
|
"learning_rate": 5.911458333333334e-06, |
|
"loss": 0.0563, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.064935064935065, |
|
"grad_norm": 0.34761763544000046, |
|
"learning_rate": 5.885416666666667e-06, |
|
"loss": 0.0695, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.0779220779220777, |
|
"grad_norm": 0.2780054835792156, |
|
"learning_rate": 5.859375e-06, |
|
"loss": 0.0555, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.090909090909091, |
|
"grad_norm": 0.24032473108802174, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.0565, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.103896103896104, |
|
"grad_norm": 0.2950389512514924, |
|
"learning_rate": 5.807291666666667e-06, |
|
"loss": 0.0566, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.116883116883117, |
|
"grad_norm": 0.22432748520418566, |
|
"learning_rate": 5.781250000000001e-06, |
|
"loss": 0.056, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.1298701298701297, |
|
"grad_norm": 0.24784953484105113, |
|
"learning_rate": 5.755208333333335e-06, |
|
"loss": 0.0479, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 0.2702464812816263, |
|
"learning_rate": 5.729166666666667e-06, |
|
"loss": 0.0591, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.155844155844156, |
|
"grad_norm": 0.24201294320174244, |
|
"learning_rate": 5.7031250000000006e-06, |
|
"loss": 0.0599, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.168831168831169, |
|
"grad_norm": 0.27746964323966916, |
|
"learning_rate": 5.677083333333334e-06, |
|
"loss": 0.0533, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.1818181818181817, |
|
"grad_norm": 0.2897264068192118, |
|
"learning_rate": 5.6510416666666665e-06, |
|
"loss": 0.053, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.1948051948051948, |
|
"grad_norm": 0.27265023383714393, |
|
"learning_rate": 5.625e-06, |
|
"loss": 0.0663, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.207792207792208, |
|
"grad_norm": 0.23815823204180192, |
|
"learning_rate": 5.598958333333334e-06, |
|
"loss": 0.0553, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.220779220779221, |
|
"grad_norm": 0.27451200943828685, |
|
"learning_rate": 5.572916666666667e-06, |
|
"loss": 0.0628, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.2337662337662336, |
|
"grad_norm": 0.24756583000652174, |
|
"learning_rate": 5.546875e-06, |
|
"loss": 0.0556, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2467532467532467, |
|
"grad_norm": 0.2544315370018155, |
|
"learning_rate": 5.520833333333334e-06, |
|
"loss": 0.0578, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.25974025974026, |
|
"grad_norm": 0.23275351662072738, |
|
"learning_rate": 5.494791666666667e-06, |
|
"loss": 0.0538, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 0.2618251778102254, |
|
"learning_rate": 5.468750000000001e-06, |
|
"loss": 0.0658, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.2857142857142856, |
|
"grad_norm": 0.25188249686135555, |
|
"learning_rate": 5.442708333333334e-06, |
|
"loss": 0.0486, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.2987012987012987, |
|
"grad_norm": 0.28055649582376346, |
|
"learning_rate": 5.416666666666667e-06, |
|
"loss": 0.0626, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.311688311688312, |
|
"grad_norm": 0.24599632120103626, |
|
"learning_rate": 5.390625000000001e-06, |
|
"loss": 0.0583, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.324675324675325, |
|
"grad_norm": 0.27127338612129825, |
|
"learning_rate": 5.364583333333334e-06, |
|
"loss": 0.0586, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.3376623376623376, |
|
"grad_norm": 0.24409520592231887, |
|
"learning_rate": 5.3385416666666666e-06, |
|
"loss": 0.0546, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.3506493506493507, |
|
"grad_norm": 0.2730534578010532, |
|
"learning_rate": 5.3125e-06, |
|
"loss": 0.049, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.3636363636363638, |
|
"grad_norm": 0.3063550918757163, |
|
"learning_rate": 5.286458333333334e-06, |
|
"loss": 0.0626, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3766233766233764, |
|
"grad_norm": 0.24974475183252906, |
|
"learning_rate": 5.260416666666666e-06, |
|
"loss": 0.0644, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.3896103896103895, |
|
"grad_norm": 0.27100592128125456, |
|
"learning_rate": 5.234375e-06, |
|
"loss": 0.0585, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.4025974025974026, |
|
"grad_norm": 0.2814186690984644, |
|
"learning_rate": 5.208333333333334e-06, |
|
"loss": 0.0582, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.4155844155844157, |
|
"grad_norm": 0.25261977006702085, |
|
"learning_rate": 5.182291666666667e-06, |
|
"loss": 0.0518, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.4285714285714284, |
|
"grad_norm": 0.2657725313570488, |
|
"learning_rate": 5.156250000000001e-06, |
|
"loss": 0.0578, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.4415584415584415, |
|
"grad_norm": 0.2546037193133823, |
|
"learning_rate": 5.130208333333334e-06, |
|
"loss": 0.0538, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4545454545454546, |
|
"grad_norm": 0.281253389764468, |
|
"learning_rate": 5.104166666666667e-06, |
|
"loss": 0.0635, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.4675324675324677, |
|
"grad_norm": 0.26338794462461806, |
|
"learning_rate": 5.078125000000001e-06, |
|
"loss": 0.0562, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.4805194805194803, |
|
"grad_norm": 0.24941779389697705, |
|
"learning_rate": 5.0520833333333344e-06, |
|
"loss": 0.0556, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.4935064935064934, |
|
"grad_norm": 0.24447967903315854, |
|
"learning_rate": 5.026041666666667e-06, |
|
"loss": 0.0583, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.5064935064935066, |
|
"grad_norm": 0.23904209946561095, |
|
"learning_rate": 5e-06, |
|
"loss": 0.0466, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.5194805194805197, |
|
"grad_norm": 0.2647184458213586, |
|
"learning_rate": 4.973958333333333e-06, |
|
"loss": 0.0631, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.5324675324675323, |
|
"grad_norm": 0.22821154672037003, |
|
"learning_rate": 4.947916666666667e-06, |
|
"loss": 0.0473, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.5454545454545454, |
|
"grad_norm": 0.2626904902801862, |
|
"learning_rate": 4.921875e-06, |
|
"loss": 0.0506, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.5584415584415585, |
|
"grad_norm": 0.24930087318211805, |
|
"learning_rate": 4.895833333333333e-06, |
|
"loss": 0.0572, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.571428571428571, |
|
"grad_norm": 0.2481390560509281, |
|
"learning_rate": 4.869791666666667e-06, |
|
"loss": 0.0555, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5844155844155843, |
|
"grad_norm": 0.2877573573303393, |
|
"learning_rate": 4.84375e-06, |
|
"loss": 0.067, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.5974025974025974, |
|
"grad_norm": 0.25217308853248577, |
|
"learning_rate": 4.817708333333334e-06, |
|
"loss": 0.0471, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.6103896103896105, |
|
"grad_norm": 0.24328918721469303, |
|
"learning_rate": 4.791666666666668e-06, |
|
"loss": 0.0576, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.6233766233766236, |
|
"grad_norm": 0.2523930869041385, |
|
"learning_rate": 4.765625000000001e-06, |
|
"loss": 0.0545, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.6363636363636362, |
|
"grad_norm": 0.2535471838454832, |
|
"learning_rate": 4.739583333333334e-06, |
|
"loss": 0.0625, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.6493506493506493, |
|
"grad_norm": 0.2712254471205784, |
|
"learning_rate": 4.7135416666666675e-06, |
|
"loss": 0.061, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.6623376623376624, |
|
"grad_norm": 0.26161221717819355, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 0.0567, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.675324675324675, |
|
"grad_norm": 0.24838669747413192, |
|
"learning_rate": 4.661458333333333e-06, |
|
"loss": 0.0518, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.688311688311688, |
|
"grad_norm": 0.23966177399361988, |
|
"learning_rate": 4.635416666666667e-06, |
|
"loss": 0.0542, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.7012987012987013, |
|
"grad_norm": 0.24657984039301706, |
|
"learning_rate": 4.609375e-06, |
|
"loss": 0.0666, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.7142857142857144, |
|
"grad_norm": 0.2583103237439079, |
|
"learning_rate": 4.583333333333333e-06, |
|
"loss": 0.0545, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 0.2562617620737508, |
|
"learning_rate": 4.557291666666667e-06, |
|
"loss": 0.0558, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.74025974025974, |
|
"grad_norm": 0.23308521833875492, |
|
"learning_rate": 4.53125e-06, |
|
"loss": 0.0496, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.7532467532467533, |
|
"grad_norm": 0.24092047046705078, |
|
"learning_rate": 4.505208333333334e-06, |
|
"loss": 0.0596, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.7662337662337664, |
|
"grad_norm": 0.27349855814243884, |
|
"learning_rate": 4.479166666666667e-06, |
|
"loss": 0.0468, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.779220779220779, |
|
"grad_norm": 0.2275758123203626, |
|
"learning_rate": 4.453125000000001e-06, |
|
"loss": 0.056, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.792207792207792, |
|
"grad_norm": 0.266884630074945, |
|
"learning_rate": 4.427083333333334e-06, |
|
"loss": 0.0571, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.8051948051948052, |
|
"grad_norm": 0.25024069366637947, |
|
"learning_rate": 4.4010416666666675e-06, |
|
"loss": 0.0642, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.8181818181818183, |
|
"grad_norm": 0.24857662695800786, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.0486, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.8311688311688314, |
|
"grad_norm": 0.25080858473278267, |
|
"learning_rate": 4.3489583333333334e-06, |
|
"loss": 0.0584, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.844155844155844, |
|
"grad_norm": 0.24530740860883163, |
|
"learning_rate": 4.322916666666667e-06, |
|
"loss": 0.0633, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 0.23913022308407994, |
|
"learning_rate": 4.296875e-06, |
|
"loss": 0.0549, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.87012987012987, |
|
"grad_norm": 0.24149083025156237, |
|
"learning_rate": 4.270833333333333e-06, |
|
"loss": 0.0467, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.883116883116883, |
|
"grad_norm": 0.26965528602346694, |
|
"learning_rate": 4.244791666666667e-06, |
|
"loss": 0.0606, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.896103896103896, |
|
"grad_norm": 0.25365858810429237, |
|
"learning_rate": 4.21875e-06, |
|
"loss": 0.0694, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.909090909090909, |
|
"grad_norm": 0.24179541312218353, |
|
"learning_rate": 4.192708333333334e-06, |
|
"loss": 0.0616, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.9220779220779223, |
|
"grad_norm": 0.2361020434984871, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.0476, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.935064935064935, |
|
"grad_norm": 0.27226949918910437, |
|
"learning_rate": 4.140625000000001e-06, |
|
"loss": 0.0642, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.948051948051948, |
|
"grad_norm": 0.2527942476147371, |
|
"learning_rate": 4.114583333333334e-06, |
|
"loss": 0.0619, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.961038961038961, |
|
"grad_norm": 0.24251482806607963, |
|
"learning_rate": 4.0885416666666675e-06, |
|
"loss": 0.0525, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.974025974025974, |
|
"grad_norm": 0.23476269808521869, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 0.062, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.987012987012987, |
|
"grad_norm": 0.2374808258403478, |
|
"learning_rate": 4.0364583333333335e-06, |
|
"loss": 0.0505, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.24928176045345568, |
|
"learning_rate": 4.010416666666667e-06, |
|
"loss": 0.0626, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.012987012987013, |
|
"grad_norm": 0.22194182328876036, |
|
"learning_rate": 3.984375e-06, |
|
"loss": 0.0344, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 3.0259740259740258, |
|
"grad_norm": 0.22872923006570003, |
|
"learning_rate": 3.958333333333333e-06, |
|
"loss": 0.0413, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 3.038961038961039, |
|
"grad_norm": 0.19665583235009848, |
|
"learning_rate": 3.932291666666667e-06, |
|
"loss": 0.0287, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.051948051948052, |
|
"grad_norm": 0.19667069366179346, |
|
"learning_rate": 3.90625e-06, |
|
"loss": 0.0352, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.064935064935065, |
|
"grad_norm": 0.215711200723706, |
|
"learning_rate": 3.880208333333333e-06, |
|
"loss": 0.0333, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.0779220779220777, |
|
"grad_norm": 0.2313666120277444, |
|
"learning_rate": 3.854166666666667e-06, |
|
"loss": 0.0284, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.090909090909091, |
|
"grad_norm": 0.23091064810625503, |
|
"learning_rate": 3.828125000000001e-06, |
|
"loss": 0.0328, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.103896103896104, |
|
"grad_norm": 0.20452236865758217, |
|
"learning_rate": 3.8020833333333333e-06, |
|
"loss": 0.0319, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.116883116883117, |
|
"grad_norm": 0.2130097271956879, |
|
"learning_rate": 3.776041666666667e-06, |
|
"loss": 0.0353, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.1298701298701297, |
|
"grad_norm": 0.17171823929322136, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.0259, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.142857142857143, |
|
"grad_norm": 0.18378369851335255, |
|
"learning_rate": 3.7239583333333335e-06, |
|
"loss": 0.0313, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.155844155844156, |
|
"grad_norm": 0.1828525728293844, |
|
"learning_rate": 3.6979166666666673e-06, |
|
"loss": 0.028, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.168831168831169, |
|
"grad_norm": 0.19380765006793751, |
|
"learning_rate": 3.6718750000000003e-06, |
|
"loss": 0.0289, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.1818181818181817, |
|
"grad_norm": 0.1990094351044486, |
|
"learning_rate": 3.6458333333333333e-06, |
|
"loss": 0.027, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.1948051948051948, |
|
"grad_norm": 0.18792815598678744, |
|
"learning_rate": 3.619791666666667e-06, |
|
"loss": 0.0291, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.207792207792208, |
|
"grad_norm": 0.18192941363864568, |
|
"learning_rate": 3.59375e-06, |
|
"loss": 0.0333, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.220779220779221, |
|
"grad_norm": 0.19920093771078823, |
|
"learning_rate": 3.5677083333333335e-06, |
|
"loss": 0.0359, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.2337662337662336, |
|
"grad_norm": 0.20752665916487095, |
|
"learning_rate": 3.5416666666666673e-06, |
|
"loss": 0.0346, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.2467532467532467, |
|
"grad_norm": 0.19735194752925486, |
|
"learning_rate": 3.5156250000000003e-06, |
|
"loss": 0.0286, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.25974025974026, |
|
"grad_norm": 0.21107169053286765, |
|
"learning_rate": 3.4895833333333333e-06, |
|
"loss": 0.0302, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.2727272727272725, |
|
"grad_norm": 0.19257404946331436, |
|
"learning_rate": 3.463541666666667e-06, |
|
"loss": 0.0266, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.2857142857142856, |
|
"grad_norm": 0.20416526855617004, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 0.0338, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.2987012987012987, |
|
"grad_norm": 0.16009297749675383, |
|
"learning_rate": 3.4114583333333335e-06, |
|
"loss": 0.0298, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.311688311688312, |
|
"grad_norm": 0.1865805004238836, |
|
"learning_rate": 3.385416666666667e-06, |
|
"loss": 0.0381, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.324675324675325, |
|
"grad_norm": 0.1856518020717426, |
|
"learning_rate": 3.3593750000000003e-06, |
|
"loss": 0.032, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.3376623376623376, |
|
"grad_norm": 0.1643615680531049, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.0261, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.3506493506493507, |
|
"grad_norm": 0.20350173056124413, |
|
"learning_rate": 3.307291666666667e-06, |
|
"loss": 0.0289, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.3636363636363638, |
|
"grad_norm": 0.1851001849213331, |
|
"learning_rate": 3.28125e-06, |
|
"loss": 0.0303, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.3766233766233764, |
|
"grad_norm": 0.17486718674776422, |
|
"learning_rate": 3.2552083333333335e-06, |
|
"loss": 0.0341, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3896103896103895, |
|
"grad_norm": 0.20208654835258116, |
|
"learning_rate": 3.229166666666667e-06, |
|
"loss": 0.0292, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.4025974025974026, |
|
"grad_norm": 0.1879927363412167, |
|
"learning_rate": 3.2031250000000004e-06, |
|
"loss": 0.029, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.4155844155844157, |
|
"grad_norm": 0.2092150414129076, |
|
"learning_rate": 3.1770833333333333e-06, |
|
"loss": 0.0335, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.4285714285714284, |
|
"grad_norm": 0.1828398699501031, |
|
"learning_rate": 3.151041666666667e-06, |
|
"loss": 0.0287, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.4415584415584415, |
|
"grad_norm": 0.1646440626911746, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.0249, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.4545454545454546, |
|
"grad_norm": 0.2213658118877642, |
|
"learning_rate": 3.0989583333333336e-06, |
|
"loss": 0.0313, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.4675324675324677, |
|
"grad_norm": 0.19833753553756947, |
|
"learning_rate": 3.072916666666667e-06, |
|
"loss": 0.0305, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.4805194805194803, |
|
"grad_norm": 0.18639002939061947, |
|
"learning_rate": 3.0468750000000004e-06, |
|
"loss": 0.0285, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4935064935064934, |
|
"grad_norm": 0.2049138683286163, |
|
"learning_rate": 3.0208333333333334e-06, |
|
"loss": 0.0253, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.5064935064935066, |
|
"grad_norm": 0.17884962390491255, |
|
"learning_rate": 2.994791666666667e-06, |
|
"loss": 0.0307, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.5194805194805197, |
|
"grad_norm": 0.1982271960154676, |
|
"learning_rate": 2.96875e-06, |
|
"loss": 0.0269, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.5324675324675323, |
|
"grad_norm": 0.1802446997280461, |
|
"learning_rate": 2.9427083333333336e-06, |
|
"loss": 0.0289, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.5454545454545454, |
|
"grad_norm": 0.22296142453499057, |
|
"learning_rate": 2.916666666666667e-06, |
|
"loss": 0.0312, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.5584415584415585, |
|
"grad_norm": 0.20453986728427528, |
|
"learning_rate": 2.8906250000000004e-06, |
|
"loss": 0.0314, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.571428571428571, |
|
"grad_norm": 0.18470206760727637, |
|
"learning_rate": 2.8645833333333334e-06, |
|
"loss": 0.0224, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.5844155844155843, |
|
"grad_norm": 0.18999613246744235, |
|
"learning_rate": 2.838541666666667e-06, |
|
"loss": 0.0362, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.5974025974025974, |
|
"grad_norm": 0.1680826823967811, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 0.0295, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.6103896103896105, |
|
"grad_norm": 0.1750343035924861, |
|
"learning_rate": 2.7864583333333336e-06, |
|
"loss": 0.0252, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.6233766233766236, |
|
"grad_norm": 0.1889879529926468, |
|
"learning_rate": 2.760416666666667e-06, |
|
"loss": 0.0251, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 0.18898957778162337, |
|
"learning_rate": 2.7343750000000004e-06, |
|
"loss": 0.0289, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.6493506493506493, |
|
"grad_norm": 0.178208525512879, |
|
"learning_rate": 2.7083333333333334e-06, |
|
"loss": 0.0256, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.6623376623376624, |
|
"grad_norm": 0.18961129083755054, |
|
"learning_rate": 2.682291666666667e-06, |
|
"loss": 0.0338, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.675324675324675, |
|
"grad_norm": 0.19080605075919305, |
|
"learning_rate": 2.65625e-06, |
|
"loss": 0.0267, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.688311688311688, |
|
"grad_norm": 0.2147288655205537, |
|
"learning_rate": 2.630208333333333e-06, |
|
"loss": 0.0261, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.7012987012987013, |
|
"grad_norm": 0.17582709254045928, |
|
"learning_rate": 2.604166666666667e-06, |
|
"loss": 0.0223, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.7142857142857144, |
|
"grad_norm": 0.17094890369659926, |
|
"learning_rate": 2.5781250000000004e-06, |
|
"loss": 0.0292, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.7272727272727275, |
|
"grad_norm": 0.1998709196442666, |
|
"learning_rate": 2.5520833333333334e-06, |
|
"loss": 0.0304, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.74025974025974, |
|
"grad_norm": 0.17072439958723487, |
|
"learning_rate": 2.5260416666666672e-06, |
|
"loss": 0.0269, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.7532467532467533, |
|
"grad_norm": 0.1903923054426443, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.0296, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.7662337662337664, |
|
"grad_norm": 0.18728539960718932, |
|
"learning_rate": 2.4739583333333336e-06, |
|
"loss": 0.0286, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.779220779220779, |
|
"grad_norm": 0.1747917940824676, |
|
"learning_rate": 2.4479166666666666e-06, |
|
"loss": 0.0349, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.792207792207792, |
|
"grad_norm": 0.17515549499079433, |
|
"learning_rate": 2.421875e-06, |
|
"loss": 0.027, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.8051948051948052, |
|
"grad_norm": 0.17289038920757535, |
|
"learning_rate": 2.395833333333334e-06, |
|
"loss": 0.0308, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.8181818181818183, |
|
"grad_norm": 0.1620641486147313, |
|
"learning_rate": 2.369791666666667e-06, |
|
"loss": 0.0235, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.8311688311688314, |
|
"grad_norm": 0.1800272114039067, |
|
"learning_rate": 2.3437500000000002e-06, |
|
"loss": 0.029, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.844155844155844, |
|
"grad_norm": 0.1714629421802426, |
|
"learning_rate": 2.3177083333333336e-06, |
|
"loss": 0.0325, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.857142857142857, |
|
"grad_norm": 0.19442415161637205, |
|
"learning_rate": 2.2916666666666666e-06, |
|
"loss": 0.0289, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.87012987012987, |
|
"grad_norm": 0.17365311658981006, |
|
"learning_rate": 2.265625e-06, |
|
"loss": 0.0264, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.883116883116883, |
|
"grad_norm": 0.1562256172576751, |
|
"learning_rate": 2.2395833333333334e-06, |
|
"loss": 0.0204, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.896103896103896, |
|
"grad_norm": 0.18527276756636948, |
|
"learning_rate": 2.213541666666667e-06, |
|
"loss": 0.038, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.909090909090909, |
|
"grad_norm": 0.185548190702714, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 0.0274, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.9220779220779223, |
|
"grad_norm": 0.18710521272243544, |
|
"learning_rate": 2.1614583333333336e-06, |
|
"loss": 0.0282, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.935064935064935, |
|
"grad_norm": 0.19705254815926593, |
|
"learning_rate": 2.1354166666666666e-06, |
|
"loss": 0.029, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.948051948051948, |
|
"grad_norm": 0.18926365228746053, |
|
"learning_rate": 2.109375e-06, |
|
"loss": 0.031, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.961038961038961, |
|
"grad_norm": 0.20932783484563577, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 0.0347, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.974025974025974, |
|
"grad_norm": 0.1844456439281899, |
|
"learning_rate": 2.057291666666667e-06, |
|
"loss": 0.0297, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.987012987012987, |
|
"grad_norm": 0.17809794425485584, |
|
"learning_rate": 2.0312500000000002e-06, |
|
"loss": 0.0286, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.20096357456266348, |
|
"learning_rate": 2.0052083333333337e-06, |
|
"loss": 0.0258, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 4.012987012987013, |
|
"grad_norm": 0.15779488457929053, |
|
"learning_rate": 1.9791666666666666e-06, |
|
"loss": 0.0197, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 4.025974025974026, |
|
"grad_norm": 0.17635189527472814, |
|
"learning_rate": 1.953125e-06, |
|
"loss": 0.0214, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.038961038961039, |
|
"grad_norm": 0.13517784425731946, |
|
"learning_rate": 1.9270833333333334e-06, |
|
"loss": 0.0226, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 4.0519480519480515, |
|
"grad_norm": 0.12354611702984837, |
|
"learning_rate": 1.9010416666666666e-06, |
|
"loss": 0.0211, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.064935064935065, |
|
"grad_norm": 0.12929184375596317, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 0.0184, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.077922077922078, |
|
"grad_norm": 0.13540347091068558, |
|
"learning_rate": 1.8489583333333337e-06, |
|
"loss": 0.0216, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.090909090909091, |
|
"grad_norm": 0.16456545986510454, |
|
"learning_rate": 1.8229166666666666e-06, |
|
"loss": 0.0275, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.103896103896104, |
|
"grad_norm": 0.13103783773070332, |
|
"learning_rate": 1.796875e-06, |
|
"loss": 0.0167, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.116883116883117, |
|
"grad_norm": 0.15039425342491727, |
|
"learning_rate": 1.7708333333333337e-06, |
|
"loss": 0.0176, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.12987012987013, |
|
"grad_norm": 0.1528874968260015, |
|
"learning_rate": 1.7447916666666667e-06, |
|
"loss": 0.0228, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.142857142857143, |
|
"grad_norm": 0.1277979944302909, |
|
"learning_rate": 1.71875e-06, |
|
"loss": 0.0153, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.1558441558441555, |
|
"grad_norm": 0.10986972687139382, |
|
"learning_rate": 1.6927083333333335e-06, |
|
"loss": 0.0114, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.1688311688311686, |
|
"grad_norm": 0.12327195530898087, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.0247, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.181818181818182, |
|
"grad_norm": 0.1303364571800614, |
|
"learning_rate": 1.640625e-06, |
|
"loss": 0.0132, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.194805194805195, |
|
"grad_norm": 0.1129046695889519, |
|
"learning_rate": 1.6145833333333335e-06, |
|
"loss": 0.0137, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.207792207792208, |
|
"grad_norm": 0.12487716628769142, |
|
"learning_rate": 1.5885416666666667e-06, |
|
"loss": 0.0171, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.220779220779221, |
|
"grad_norm": 0.11121142713696242, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 0.0119, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.233766233766234, |
|
"grad_norm": 0.12219540809008053, |
|
"learning_rate": 1.5364583333333335e-06, |
|
"loss": 0.02, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.246753246753247, |
|
"grad_norm": 0.1231988249512158, |
|
"learning_rate": 1.5104166666666667e-06, |
|
"loss": 0.0167, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.259740259740259, |
|
"grad_norm": 0.12591039931221573, |
|
"learning_rate": 1.484375e-06, |
|
"loss": 0.0145, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.2727272727272725, |
|
"grad_norm": 0.13528042945474514, |
|
"learning_rate": 1.4583333333333335e-06, |
|
"loss": 0.0164, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 0.13318619366466836, |
|
"learning_rate": 1.4322916666666667e-06, |
|
"loss": 0.0178, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.298701298701299, |
|
"grad_norm": 0.1147621867962911, |
|
"learning_rate": 1.40625e-06, |
|
"loss": 0.0146, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.311688311688312, |
|
"grad_norm": 0.1171420417701479, |
|
"learning_rate": 1.3802083333333335e-06, |
|
"loss": 0.0155, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.324675324675325, |
|
"grad_norm": 0.15069462003471634, |
|
"learning_rate": 1.3541666666666667e-06, |
|
"loss": 0.0169, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.337662337662338, |
|
"grad_norm": 0.13369044996596968, |
|
"learning_rate": 1.328125e-06, |
|
"loss": 0.02, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.35064935064935, |
|
"grad_norm": 0.1454995879584439, |
|
"learning_rate": 1.3020833333333335e-06, |
|
"loss": 0.0172, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.363636363636363, |
|
"grad_norm": 0.10957228964118756, |
|
"learning_rate": 1.2760416666666667e-06, |
|
"loss": 0.0146, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.376623376623376, |
|
"grad_norm": 0.13794250836423072, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.0186, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.3896103896103895, |
|
"grad_norm": 0.13155919219887055, |
|
"learning_rate": 1.2239583333333333e-06, |
|
"loss": 0.02, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.402597402597403, |
|
"grad_norm": 0.13527086701859414, |
|
"learning_rate": 1.197916666666667e-06, |
|
"loss": 0.0194, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.415584415584416, |
|
"grad_norm": 0.13783015873528512, |
|
"learning_rate": 1.1718750000000001e-06, |
|
"loss": 0.016, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.428571428571429, |
|
"grad_norm": 0.14817876036194735, |
|
"learning_rate": 1.1458333333333333e-06, |
|
"loss": 0.0202, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.441558441558442, |
|
"grad_norm": 0.13486669752884248, |
|
"learning_rate": 1.1197916666666667e-06, |
|
"loss": 0.0133, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.454545454545454, |
|
"grad_norm": 0.127193126169577, |
|
"learning_rate": 1.0937500000000001e-06, |
|
"loss": 0.0225, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.467532467532467, |
|
"grad_norm": 0.12716918562412002, |
|
"learning_rate": 1.0677083333333333e-06, |
|
"loss": 0.0161, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.48051948051948, |
|
"grad_norm": 0.12022862958417269, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"loss": 0.0178, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.4935064935064934, |
|
"grad_norm": 0.11185223739581632, |
|
"learning_rate": 1.0156250000000001e-06, |
|
"loss": 0.0125, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.5064935064935066, |
|
"grad_norm": 0.12306589435982454, |
|
"learning_rate": 9.895833333333333e-07, |
|
"loss": 0.0177, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.51948051948052, |
|
"grad_norm": 0.1356833136264956, |
|
"learning_rate": 9.635416666666667e-07, |
|
"loss": 0.0225, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.532467532467533, |
|
"grad_norm": 0.12707329769968861, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 0.0144, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 0.13157923603380584, |
|
"learning_rate": 9.114583333333333e-07, |
|
"loss": 0.0246, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.558441558441558, |
|
"grad_norm": 0.11570507668897385, |
|
"learning_rate": 8.854166666666668e-07, |
|
"loss": 0.0136, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.571428571428571, |
|
"grad_norm": 0.1133212555134258, |
|
"learning_rate": 8.59375e-07, |
|
"loss": 0.016, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.584415584415584, |
|
"grad_norm": 0.11403791282962417, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.0173, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.597402597402597, |
|
"grad_norm": 0.1261694785889651, |
|
"learning_rate": 8.072916666666667e-07, |
|
"loss": 0.0164, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.6103896103896105, |
|
"grad_norm": 0.1647802144483293, |
|
"learning_rate": 7.8125e-07, |
|
"loss": 0.0194, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.623376623376624, |
|
"grad_norm": 0.12321856169539074, |
|
"learning_rate": 7.552083333333333e-07, |
|
"loss": 0.0154, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.636363636363637, |
|
"grad_norm": 0.11947597399789361, |
|
"learning_rate": 7.291666666666667e-07, |
|
"loss": 0.0128, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.64935064935065, |
|
"grad_norm": 0.12412634751604261, |
|
"learning_rate": 7.03125e-07, |
|
"loss": 0.0216, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.662337662337662, |
|
"grad_norm": 0.10648985459552097, |
|
"learning_rate": 6.770833333333333e-07, |
|
"loss": 0.016, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.675324675324675, |
|
"grad_norm": 0.10795065481176093, |
|
"learning_rate": 6.510416666666668e-07, |
|
"loss": 0.0149, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.688311688311688, |
|
"grad_norm": 0.13861642312992048, |
|
"learning_rate": 6.25e-07, |
|
"loss": 0.0197, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.701298701298701, |
|
"grad_norm": 0.12770449682753193, |
|
"learning_rate": 5.989583333333335e-07, |
|
"loss": 0.0221, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.714285714285714, |
|
"grad_norm": 0.10430014511475291, |
|
"learning_rate": 5.729166666666667e-07, |
|
"loss": 0.0129, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.7272727272727275, |
|
"grad_norm": 0.11966755573044428, |
|
"learning_rate": 5.468750000000001e-07, |
|
"loss": 0.0206, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.740259740259741, |
|
"grad_norm": 0.1275756019900647, |
|
"learning_rate": 5.208333333333334e-07, |
|
"loss": 0.0171, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.753246753246753, |
|
"grad_norm": 0.12808966422886903, |
|
"learning_rate": 4.947916666666667e-07, |
|
"loss": 0.0192, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.766233766233766, |
|
"grad_norm": 0.12149697378823344, |
|
"learning_rate": 4.6875000000000006e-07, |
|
"loss": 0.0143, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.779220779220779, |
|
"grad_norm": 0.1120503286114493, |
|
"learning_rate": 4.427083333333334e-07, |
|
"loss": 0.013, |
|
"step": 368 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 385, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 16, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 552596339163136.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|