|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.1235955056179776, |
|
"eval_steps": 500, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.5, |
|
"learning_rate": 0.00019902985447817174, |
|
"loss": 2.5508, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.5, |
|
"learning_rate": 0.00019802970445566834, |
|
"loss": 2.7598, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 0.00019702955443316497, |
|
"loss": 2.4902, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 0.0001960294044106616, |
|
"loss": 2.4863, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 0.00019502925438815824, |
|
"loss": 3.25, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 0.00019402910436565487, |
|
"loss": 3.0195, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.875, |
|
"learning_rate": 0.0001930289543431515, |
|
"loss": 3.6758, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 0.0001920288043206481, |
|
"loss": 2.1699, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 0.00019102865429814473, |
|
"loss": 2.4805, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.625, |
|
"learning_rate": 0.00019002850427564136, |
|
"loss": 2.6797, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 0.000189028354253138, |
|
"loss": 2.2031, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 0.0001880282042306346, |
|
"loss": 2.4688, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 0.00018702805420813123, |
|
"loss": 2.668, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 0.00018602790418562786, |
|
"loss": 2.8516, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 0.0001850277541631245, |
|
"loss": 2.3086, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.5, |
|
"learning_rate": 0.0001840276041406211, |
|
"loss": 2.1504, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.0, |
|
"learning_rate": 0.00018302745411811772, |
|
"loss": 2.5664, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 0.00018202730409561436, |
|
"loss": 2.4023, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 0.00018102715407311096, |
|
"loss": 2.6562, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 0.0001800270040506076, |
|
"loss": 1.9863, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 0.00017902685402810422, |
|
"loss": 2.4648, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.375, |
|
"learning_rate": 0.00017802670400560085, |
|
"loss": 2.5781, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 0.00017702655398309746, |
|
"loss": 2.2715, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 8.125, |
|
"learning_rate": 0.00017602640396059409, |
|
"loss": 2.5195, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 0.00017502625393809074, |
|
"loss": 2.4141, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 0.00017402610391558735, |
|
"loss": 2.2051, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 0.00017302595389308398, |
|
"loss": 2.5781, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 0.0001720258038705806, |
|
"loss": 2.2676, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 0.00017102565384807724, |
|
"loss": 2.291, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 0.00017002550382557384, |
|
"loss": 1.8398, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 0.00016902535380307047, |
|
"loss": 2.1543, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 0.0001680252037805671, |
|
"loss": 2.8789, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.5, |
|
"learning_rate": 0.0001670250537580637, |
|
"loss": 2.1621, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 0.00016602490373556034, |
|
"loss": 2.2969, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 0.00016502475371305697, |
|
"loss": 2.4668, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 0.0001640246036905536, |
|
"loss": 2.7734, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.5, |
|
"learning_rate": 0.0001630244536680502, |
|
"loss": 1.9102, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 9.75, |
|
"learning_rate": 0.00016202430364554684, |
|
"loss": 2.6055, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 0.00016102415362304347, |
|
"loss": 2.0996, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 7.53125, |
|
"learning_rate": 0.0001600240036005401, |
|
"loss": 2.5117, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 8.125, |
|
"learning_rate": 0.0001590238535780367, |
|
"loss": 2.3262, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.375, |
|
"learning_rate": 0.00015802370355553333, |
|
"loss": 2.4004, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.75, |
|
"learning_rate": 0.00015702355353302996, |
|
"loss": 1.7539, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 0.00015602340351052657, |
|
"loss": 2.1699, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 0.0001550232534880232, |
|
"loss": 2.418, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 0.00015402310346551986, |
|
"loss": 2.1543, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.875, |
|
"learning_rate": 0.00015302295344301646, |
|
"loss": 2.2812, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 0.0001520228034205131, |
|
"loss": 2.2422, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 0.00015102265339800972, |
|
"loss": 2.7344, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 0.00015002250337550635, |
|
"loss": 2.1504, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 8.25, |
|
"learning_rate": 0.00014902235335300296, |
|
"loss": 2.6289, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 0.00014802220333049959, |
|
"loss": 2.0312, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 9.75, |
|
"learning_rate": 0.00014702205330799622, |
|
"loss": 2.1367, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 0.00014602190328549282, |
|
"loss": 1.7891, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 0.00014502175326298945, |
|
"loss": 2.2305, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 9.625, |
|
"learning_rate": 0.00014402160324048608, |
|
"loss": 2.4395, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 0.0001430214532179827, |
|
"loss": 2.1445, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 0.00014202130319547932, |
|
"loss": 2.5586, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 7.71875, |
|
"learning_rate": 0.00014102115317297595, |
|
"loss": 2.7031, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 0.00014002100315047258, |
|
"loss": 2.373, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 8.375, |
|
"learning_rate": 0.0001390208531279692, |
|
"loss": 2.582, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.75, |
|
"learning_rate": 0.0001380207031054658, |
|
"loss": 1.9961, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 0.00013702055308296244, |
|
"loss": 2.4199, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 8.8125, |
|
"learning_rate": 0.00013602040306045908, |
|
"loss": 2.875, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 0.00013502025303795568, |
|
"loss": 2.5273, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 0.0001340201030154523, |
|
"loss": 2.2988, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 8.625, |
|
"learning_rate": 0.00013301995299294897, |
|
"loss": 2.9023, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 0.00013201980297044557, |
|
"loss": 2.8047, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 9.625, |
|
"learning_rate": 0.0001310196529479422, |
|
"loss": 2.6328, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 0.00013001950292543883, |
|
"loss": 2.168, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 0.00012901935290293546, |
|
"loss": 2.5234, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 0.00012801920288043207, |
|
"loss": 2.8516, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 0.0001270190528579287, |
|
"loss": 2.5273, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 9.375, |
|
"learning_rate": 0.00012601890283542533, |
|
"loss": 3.0469, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.875, |
|
"learning_rate": 0.00012501875281292196, |
|
"loss": 2.543, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 9.25, |
|
"learning_rate": 0.00012401860279041856, |
|
"loss": 3.3125, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 0.0001230184527679152, |
|
"loss": 2.1328, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 7.875, |
|
"learning_rate": 0.00012201830274541181, |
|
"loss": 2.875, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 8.25, |
|
"learning_rate": 0.00012101815272290844, |
|
"loss": 2.4727, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 0.00012001800270040506, |
|
"loss": 2.5703, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 0.00011901785267790169, |
|
"loss": 3.3555, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 0.00011801770265539831, |
|
"loss": 2.5352, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 0.00011701755263289494, |
|
"loss": 2.0664, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 9.5, |
|
"learning_rate": 0.00011601740261039156, |
|
"loss": 2.9844, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 0.00011501725258788819, |
|
"loss": 2.6602, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 0.0001140171025653848, |
|
"loss": 2.1367, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 0.00011301695254288145, |
|
"loss": 2.459, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 7.75, |
|
"learning_rate": 0.00011201680252037807, |
|
"loss": 2.1484, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 0.0001110166524978747, |
|
"loss": 2.8867, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 11.3125, |
|
"learning_rate": 0.00011001650247537131, |
|
"loss": 3.5586, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.375, |
|
"learning_rate": 0.00010901635245286794, |
|
"loss": 2.8594, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 0.00010801620243036456, |
|
"loss": 3.4688, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 8.5, |
|
"learning_rate": 0.00010701605240786119, |
|
"loss": 2.7031, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 10.0, |
|
"learning_rate": 0.00010601590238535781, |
|
"loss": 3.0859, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 8.5, |
|
"learning_rate": 0.00010501575236285444, |
|
"loss": 3.0352, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 0.00010401560234035106, |
|
"loss": 2.084, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 7.0, |
|
"learning_rate": 0.00010301545231784769, |
|
"loss": 2.0508, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 8.1875, |
|
"learning_rate": 0.0001020153022953443, |
|
"loss": 2.9805, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 0.00010101515227284092, |
|
"loss": 2.9766, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 8.125, |
|
"learning_rate": 0.00010001500225033755, |
|
"loss": 2.502, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 9.901485222783417e-05, |
|
"loss": 2.4844, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 9.80147022053308e-05, |
|
"loss": 2.4492, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 9.701455218282743e-05, |
|
"loss": 2.5742, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 9.601440216032405e-05, |
|
"loss": 2.4688, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 9.501425213782068e-05, |
|
"loss": 2.6211, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 8.9375, |
|
"learning_rate": 9.40141021153173e-05, |
|
"loss": 2.8027, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 9.301395209281393e-05, |
|
"loss": 2.5742, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 9.201380207031055e-05, |
|
"loss": 2.4258, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 9.25, |
|
"learning_rate": 9.101365204780718e-05, |
|
"loss": 2.9141, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 9.00135020253038e-05, |
|
"loss": 2.2578, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 8.901335200280043e-05, |
|
"loss": 2.5312, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 8.801320198029704e-05, |
|
"loss": 2.1133, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.625, |
|
"learning_rate": 8.701305195779367e-05, |
|
"loss": 2.6523, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 8.60129019352903e-05, |
|
"loss": 2.9453, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 8.501275191278692e-05, |
|
"loss": 2.6816, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 8.401260189028355e-05, |
|
"loss": 2.7227, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 8.301245186778017e-05, |
|
"loss": 2.6191, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 8.20123018452768e-05, |
|
"loss": 2.459, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 8.101215182277342e-05, |
|
"loss": 2.4297, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 8.375, |
|
"learning_rate": 8.001200180027005e-05, |
|
"loss": 2.4609, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 7.875, |
|
"learning_rate": 7.901185177776667e-05, |
|
"loss": 2.7539, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 7.801170175526328e-05, |
|
"loss": 2.2148, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 6.75, |
|
"learning_rate": 7.701155173275993e-05, |
|
"loss": 2.4512, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 7.601140171025655e-05, |
|
"loss": 2.5859, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 8.3125, |
|
"learning_rate": 7.501125168775318e-05, |
|
"loss": 2.6797, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 9.125, |
|
"learning_rate": 7.401110166524979e-05, |
|
"loss": 3.168, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 7.301095164274641e-05, |
|
"loss": 2.1816, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 7.201080162024304e-05, |
|
"loss": 2.9766, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.625, |
|
"learning_rate": 7.101065159773966e-05, |
|
"loss": 2.1836, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 10.5, |
|
"learning_rate": 7.001050157523629e-05, |
|
"loss": 2.8086, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 6.90103515527329e-05, |
|
"loss": 2.4141, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 7.375, |
|
"learning_rate": 6.801020153022954e-05, |
|
"loss": 2.5742, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 6.701005150772615e-05, |
|
"loss": 2.9883, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 6.600990148522279e-05, |
|
"loss": 2.418, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 7.53125, |
|
"learning_rate": 6.500975146271942e-05, |
|
"loss": 2.7578, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 6.400960144021603e-05, |
|
"loss": 3.0078, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 6.125, |
|
"learning_rate": 6.300945141771266e-05, |
|
"loss": 2.2012, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 8.0, |
|
"learning_rate": 6.200930139520928e-05, |
|
"loss": 3.0703, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 6.1009151372705906e-05, |
|
"loss": 2.7031, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 6.000900135020253e-05, |
|
"loss": 1.9014, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 5.9008851327699154e-05, |
|
"loss": 2.3809, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 5.800870130519578e-05, |
|
"loss": 2.5469, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 6.0, |
|
"learning_rate": 5.70085512826924e-05, |
|
"loss": 3.0938, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.25, |
|
"learning_rate": 5.600840126018903e-05, |
|
"loss": 2.4395, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 5.500825123768566e-05, |
|
"loss": 2.5254, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 5.400810121518228e-05, |
|
"loss": 2.7773, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 5.3007951192678905e-05, |
|
"loss": 2.6875, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 5.200780117017553e-05, |
|
"loss": 3.0898, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 5.100765114767215e-05, |
|
"loss": 2.3984, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 5.000750112516878e-05, |
|
"loss": 2.1445, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 4.90073511026654e-05, |
|
"loss": 2.8125, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 4.8007201080162025e-05, |
|
"loss": 2.3242, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 4.700705105765865e-05, |
|
"loss": 2.3965, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 4.6006901035155273e-05, |
|
"loss": 2.4824, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 4.50067510126519e-05, |
|
"loss": 2.6445, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.71875, |
|
"learning_rate": 4.400660099014852e-05, |
|
"loss": 2.1689, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 4.300645096764515e-05, |
|
"loss": 2.4219, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 4.2006300945141776e-05, |
|
"loss": 2.7227, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 6.25, |
|
"learning_rate": 4.10061509226384e-05, |
|
"loss": 2.7812, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.5, |
|
"learning_rate": 4.0006000900135024e-05, |
|
"loss": 2.3574, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 3.900585087763164e-05, |
|
"loss": 2.6016, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 3.800570085512827e-05, |
|
"loss": 2.7617, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 3.7005550832624897e-05, |
|
"loss": 2.2285, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 7.75, |
|
"learning_rate": 3.600540081012152e-05, |
|
"loss": 3.0, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 3.5005250787618145e-05, |
|
"loss": 2.6172, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 3.400510076511477e-05, |
|
"loss": 2.3242, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 6.25, |
|
"learning_rate": 3.300495074261139e-05, |
|
"loss": 2.0098, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 7.25, |
|
"learning_rate": 3.200480072010802e-05, |
|
"loss": 1.9238, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 3.100465069760464e-05, |
|
"loss": 2.4082, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 3.0004500675101265e-05, |
|
"loss": 1.8848, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 2.900435065259789e-05, |
|
"loss": 2.293, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 8.5, |
|
"learning_rate": 2.8004200630094516e-05, |
|
"loss": 3.332, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 2.700405060759114e-05, |
|
"loss": 2.1543, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 9.0, |
|
"learning_rate": 2.6003900585087765e-05, |
|
"loss": 3.0156, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 2.500375056258439e-05, |
|
"loss": 2.4844, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 2.4003600540081013e-05, |
|
"loss": 2.4414, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 2.3003450517577637e-05, |
|
"loss": 2.584, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 8.6875, |
|
"learning_rate": 2.200330049507426e-05, |
|
"loss": 2.9062, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 2.1003150472570888e-05, |
|
"loss": 2.127, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 2.0003000450067512e-05, |
|
"loss": 2.3066, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 6.75, |
|
"learning_rate": 1.9002850427564136e-05, |
|
"loss": 1.6729, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.800270040506076e-05, |
|
"loss": 2.0488, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.7002550382557384e-05, |
|
"loss": 1.5322, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.600240036005401e-05, |
|
"loss": 1.4639, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.5002250337550632e-05, |
|
"loss": 2.5938, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.4002100315047258e-05, |
|
"loss": 1.7109, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.875, |
|
"learning_rate": 1.3001950292543882e-05, |
|
"loss": 1.6211, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 6.25, |
|
"learning_rate": 1.2001800270040506e-05, |
|
"loss": 2.0449, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 1.100165024753713e-05, |
|
"loss": 1.6562, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 1.0001500225033756e-05, |
|
"loss": 2.041, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 5.5, |
|
"learning_rate": 9.00135020253038e-06, |
|
"loss": 1.8105, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.875, |
|
"learning_rate": 8.001200180027004e-06, |
|
"loss": 1.916, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 5.75, |
|
"learning_rate": 7.001050157523629e-06, |
|
"loss": 1.7607, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 6.000900135020253e-06, |
|
"loss": 1.6367, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 5.0, |
|
"learning_rate": 5.000750112516878e-06, |
|
"loss": 2.0332, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 4.000600090013502e-06, |
|
"loss": 1.7188, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 3.0004500675101266e-06, |
|
"loss": 1.832, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 2.000300045006751e-06, |
|
"loss": 2.0049, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.0001500225033755e-06, |
|
"loss": 1.6953, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 6.875, |
|
"learning_rate": 0.0, |
|
"loss": 1.8525, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"total_flos": 509004324495360.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|