|
{ |
|
"best_metric": 2.344815969467163, |
|
"best_model_checkpoint": "outputs/checkpoint-615", |
|
"epoch": 1.0, |
|
"eval_steps": 205, |
|
"global_step": 818, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012224938875305623, |
|
"grad_norm": 8.178321838378906, |
|
"learning_rate": 0.001, |
|
"loss": 5.6592, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0024449877750611247, |
|
"grad_norm": 6.750397205352783, |
|
"learning_rate": 0.001, |
|
"loss": 4.8289, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.003667481662591687, |
|
"grad_norm": 4.012933731079102, |
|
"learning_rate": 0.001, |
|
"loss": 3.9844, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004889975550122249, |
|
"grad_norm": 4.729739665985107, |
|
"learning_rate": 0.001, |
|
"loss": 3.4222, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006112469437652812, |
|
"grad_norm": 9.694947242736816, |
|
"learning_rate": 0.001, |
|
"loss": 3.1776, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007334963325183374, |
|
"grad_norm": 3.631967306137085, |
|
"learning_rate": 0.001, |
|
"loss": 3.0943, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.008557457212713936, |
|
"grad_norm": 1.8573168516159058, |
|
"learning_rate": 0.001, |
|
"loss": 3.0438, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.009779951100244499, |
|
"grad_norm": 1.1996679306030273, |
|
"learning_rate": 0.001, |
|
"loss": 2.9171, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011002444987775062, |
|
"grad_norm": 6.059974193572998, |
|
"learning_rate": 0.001, |
|
"loss": 2.896, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012224938875305624, |
|
"grad_norm": 2.2413384914398193, |
|
"learning_rate": 0.001, |
|
"loss": 2.8097, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013447432762836185, |
|
"grad_norm": 3.7154123783111572, |
|
"learning_rate": 0.001, |
|
"loss": 2.9233, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.014669926650366748, |
|
"grad_norm": 9.796507835388184, |
|
"learning_rate": 0.001, |
|
"loss": 2.9526, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01589242053789731, |
|
"grad_norm": 1.3841707706451416, |
|
"learning_rate": 0.001, |
|
"loss": 2.9088, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.017114914425427872, |
|
"grad_norm": 3.4672791957855225, |
|
"learning_rate": 0.001, |
|
"loss": 2.7995, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.018337408312958436, |
|
"grad_norm": 0.9586966037750244, |
|
"learning_rate": 0.001, |
|
"loss": 2.7673, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019559902200488997, |
|
"grad_norm": 0.747156023979187, |
|
"learning_rate": 0.001, |
|
"loss": 2.7673, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02078239608801956, |
|
"grad_norm": 0.7908157110214233, |
|
"learning_rate": 0.001, |
|
"loss": 2.7809, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.022004889975550123, |
|
"grad_norm": 0.9333175420761108, |
|
"learning_rate": 0.001, |
|
"loss": 2.738, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.023227383863080684, |
|
"grad_norm": 1.2243553400039673, |
|
"learning_rate": 0.001, |
|
"loss": 2.6815, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02444987775061125, |
|
"grad_norm": 0.9989001750946045, |
|
"learning_rate": 0.001, |
|
"loss": 2.7002, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02567237163814181, |
|
"grad_norm": 0.664013147354126, |
|
"learning_rate": 0.001, |
|
"loss": 2.6622, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02689486552567237, |
|
"grad_norm": 0.8166808485984802, |
|
"learning_rate": 0.001, |
|
"loss": 2.6829, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.028117359413202935, |
|
"grad_norm": 0.8486875295639038, |
|
"learning_rate": 0.001, |
|
"loss": 2.7649, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.029339853300733496, |
|
"grad_norm": 0.672136664390564, |
|
"learning_rate": 0.001, |
|
"loss": 2.5397, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.030562347188264057, |
|
"grad_norm": 0.6852248907089233, |
|
"learning_rate": 0.001, |
|
"loss": 2.5688, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03178484107579462, |
|
"grad_norm": 0.6197850704193115, |
|
"learning_rate": 0.001, |
|
"loss": 2.6037, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03300733496332518, |
|
"grad_norm": 0.671241044998169, |
|
"learning_rate": 0.001, |
|
"loss": 2.6554, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.034229828850855744, |
|
"grad_norm": 0.675551176071167, |
|
"learning_rate": 0.001, |
|
"loss": 2.5556, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.035452322738386305, |
|
"grad_norm": 0.5328904390335083, |
|
"learning_rate": 0.001, |
|
"loss": 2.5926, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03667481662591687, |
|
"grad_norm": 0.6622318029403687, |
|
"learning_rate": 0.001, |
|
"loss": 2.5812, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.037897310513447434, |
|
"grad_norm": 0.7308458685874939, |
|
"learning_rate": 0.001, |
|
"loss": 2.5887, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.039119804400977995, |
|
"grad_norm": 0.6278252601623535, |
|
"learning_rate": 0.001, |
|
"loss": 2.6097, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.040342298288508556, |
|
"grad_norm": 0.7257432341575623, |
|
"learning_rate": 0.001, |
|
"loss": 2.6019, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04156479217603912, |
|
"grad_norm": 0.5468400120735168, |
|
"learning_rate": 0.001, |
|
"loss": 2.4808, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.042787286063569685, |
|
"grad_norm": 0.5938531160354614, |
|
"learning_rate": 0.001, |
|
"loss": 2.6267, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.044009779951100246, |
|
"grad_norm": 0.6380032896995544, |
|
"learning_rate": 0.001, |
|
"loss": 2.4288, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04523227383863081, |
|
"grad_norm": 0.5627170205116272, |
|
"learning_rate": 0.001, |
|
"loss": 2.5154, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04645476772616137, |
|
"grad_norm": 0.638714075088501, |
|
"learning_rate": 0.001, |
|
"loss": 2.5662, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04767726161369193, |
|
"grad_norm": 0.5756145715713501, |
|
"learning_rate": 0.001, |
|
"loss": 2.5508, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0488997555012225, |
|
"grad_norm": 0.631705105304718, |
|
"learning_rate": 0.001, |
|
"loss": 2.4879, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05012224938875306, |
|
"grad_norm": 0.5856027007102966, |
|
"learning_rate": 0.001, |
|
"loss": 2.5639, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05134474327628362, |
|
"grad_norm": 0.5451909303665161, |
|
"learning_rate": 0.001, |
|
"loss": 2.4391, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05256723716381418, |
|
"grad_norm": 0.8142032623291016, |
|
"learning_rate": 0.001, |
|
"loss": 2.6332, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05378973105134474, |
|
"grad_norm": 0.5957739353179932, |
|
"learning_rate": 0.001, |
|
"loss": 2.5546, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0550122249388753, |
|
"grad_norm": 0.5841150283813477, |
|
"learning_rate": 0.001, |
|
"loss": 2.5132, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05623471882640587, |
|
"grad_norm": 0.634911298751831, |
|
"learning_rate": 0.001, |
|
"loss": 2.4678, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05745721271393643, |
|
"grad_norm": 0.6035284399986267, |
|
"learning_rate": 0.001, |
|
"loss": 2.5785, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05867970660146699, |
|
"grad_norm": 0.5425002574920654, |
|
"learning_rate": 0.001, |
|
"loss": 2.5575, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05990220048899755, |
|
"grad_norm": 0.6026980876922607, |
|
"learning_rate": 0.001, |
|
"loss": 2.5258, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.061124694376528114, |
|
"grad_norm": 0.5246846079826355, |
|
"learning_rate": 0.001, |
|
"loss": 2.4779, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06234718826405868, |
|
"grad_norm": 0.5958104729652405, |
|
"learning_rate": 0.001, |
|
"loss": 2.5578, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06356968215158924, |
|
"grad_norm": 0.5678538084030151, |
|
"learning_rate": 0.001, |
|
"loss": 2.4066, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0647921760391198, |
|
"grad_norm": 0.5942307114601135, |
|
"learning_rate": 0.001, |
|
"loss": 2.4738, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06601466992665037, |
|
"grad_norm": 0.5238040089607239, |
|
"learning_rate": 0.001, |
|
"loss": 2.4607, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06723716381418093, |
|
"grad_norm": 0.511229932308197, |
|
"learning_rate": 0.001, |
|
"loss": 2.549, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06845965770171149, |
|
"grad_norm": 0.5474979281425476, |
|
"learning_rate": 0.001, |
|
"loss": 2.5325, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06968215158924206, |
|
"grad_norm": 0.5083491206169128, |
|
"learning_rate": 0.001, |
|
"loss": 2.49, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07090464547677261, |
|
"grad_norm": 0.4880111813545227, |
|
"learning_rate": 0.001, |
|
"loss": 2.4353, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07212713936430318, |
|
"grad_norm": 0.5999699831008911, |
|
"learning_rate": 0.001, |
|
"loss": 2.4644, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07334963325183375, |
|
"grad_norm": 0.5478151440620422, |
|
"learning_rate": 0.001, |
|
"loss": 2.62, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0745721271393643, |
|
"grad_norm": 0.4989413619041443, |
|
"learning_rate": 0.001, |
|
"loss": 2.4704, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07579462102689487, |
|
"grad_norm": 0.6229702830314636, |
|
"learning_rate": 0.001, |
|
"loss": 2.5979, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07701711491442542, |
|
"grad_norm": 0.7081549167633057, |
|
"learning_rate": 0.001, |
|
"loss": 2.4865, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07823960880195599, |
|
"grad_norm": 0.5195762515068054, |
|
"learning_rate": 0.001, |
|
"loss": 2.5042, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07946210268948656, |
|
"grad_norm": 0.5830724835395813, |
|
"learning_rate": 0.001, |
|
"loss": 2.5001, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08068459657701711, |
|
"grad_norm": 0.5564406514167786, |
|
"learning_rate": 0.001, |
|
"loss": 2.5957, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08190709046454768, |
|
"grad_norm": 0.5549057722091675, |
|
"learning_rate": 0.001, |
|
"loss": 2.5493, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08312958435207823, |
|
"grad_norm": 0.5666396617889404, |
|
"learning_rate": 0.001, |
|
"loss": 2.5333, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0843520782396088, |
|
"grad_norm": 0.5428187251091003, |
|
"learning_rate": 0.001, |
|
"loss": 2.5426, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08557457212713937, |
|
"grad_norm": 0.653217613697052, |
|
"learning_rate": 0.001, |
|
"loss": 2.4808, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08679706601466992, |
|
"grad_norm": 0.5372606515884399, |
|
"learning_rate": 0.001, |
|
"loss": 2.4246, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.08801955990220049, |
|
"grad_norm": 0.6210645437240601, |
|
"learning_rate": 0.001, |
|
"loss": 2.4826, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08924205378973105, |
|
"grad_norm": 0.6422852277755737, |
|
"learning_rate": 0.001, |
|
"loss": 2.5226, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09046454767726161, |
|
"grad_norm": 0.5775130987167358, |
|
"learning_rate": 0.001, |
|
"loss": 2.4696, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09168704156479218, |
|
"grad_norm": 0.5871543884277344, |
|
"learning_rate": 0.001, |
|
"loss": 2.4852, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09290953545232274, |
|
"grad_norm": 0.5282509326934814, |
|
"learning_rate": 0.001, |
|
"loss": 2.4266, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0941320293398533, |
|
"grad_norm": 0.6395830512046814, |
|
"learning_rate": 0.001, |
|
"loss": 2.5749, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09535452322738386, |
|
"grad_norm": 0.5609179139137268, |
|
"learning_rate": 0.001, |
|
"loss": 2.4805, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09657701711491443, |
|
"grad_norm": 0.5313189029693604, |
|
"learning_rate": 0.001, |
|
"loss": 2.4155, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.097799511002445, |
|
"grad_norm": 0.5251710414886475, |
|
"learning_rate": 0.001, |
|
"loss": 2.4232, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09902200488997555, |
|
"grad_norm": 0.5261017084121704, |
|
"learning_rate": 0.001, |
|
"loss": 2.4875, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10024449877750612, |
|
"grad_norm": 0.5420725345611572, |
|
"learning_rate": 0.001, |
|
"loss": 2.4924, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10146699266503667, |
|
"grad_norm": 0.5187302827835083, |
|
"learning_rate": 0.001, |
|
"loss": 2.3575, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10268948655256724, |
|
"grad_norm": 0.5224878787994385, |
|
"learning_rate": 0.001, |
|
"loss": 2.6074, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1039119804400978, |
|
"grad_norm": 0.5518419146537781, |
|
"learning_rate": 0.001, |
|
"loss": 2.4174, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10513447432762836, |
|
"grad_norm": 0.4974496066570282, |
|
"learning_rate": 0.001, |
|
"loss": 2.425, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.10635696821515893, |
|
"grad_norm": 0.47749748826026917, |
|
"learning_rate": 0.001, |
|
"loss": 2.468, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.10757946210268948, |
|
"grad_norm": 0.5429526567459106, |
|
"learning_rate": 0.001, |
|
"loss": 2.4249, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.10880195599022005, |
|
"grad_norm": 0.4692726135253906, |
|
"learning_rate": 0.001, |
|
"loss": 2.4903, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1100244498777506, |
|
"grad_norm": 0.5202171206474304, |
|
"learning_rate": 0.001, |
|
"loss": 2.4415, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11124694376528117, |
|
"grad_norm": 0.5242786407470703, |
|
"learning_rate": 0.001, |
|
"loss": 2.5025, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11246943765281174, |
|
"grad_norm": 0.5316799879074097, |
|
"learning_rate": 0.001, |
|
"loss": 2.4626, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1136919315403423, |
|
"grad_norm": 0.5226518511772156, |
|
"learning_rate": 0.001, |
|
"loss": 2.3977, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11491442542787286, |
|
"grad_norm": 0.5194711685180664, |
|
"learning_rate": 0.001, |
|
"loss": 2.4447, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11613691931540342, |
|
"grad_norm": 0.5524601340293884, |
|
"learning_rate": 0.001, |
|
"loss": 2.3604, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.11735941320293398, |
|
"grad_norm": 0.4714466333389282, |
|
"learning_rate": 0.001, |
|
"loss": 2.4618, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.11858190709046455, |
|
"grad_norm": 0.4986712336540222, |
|
"learning_rate": 0.001, |
|
"loss": 2.551, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1198044009779951, |
|
"grad_norm": 0.5235204100608826, |
|
"learning_rate": 0.001, |
|
"loss": 2.5354, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12102689486552567, |
|
"grad_norm": 0.5750053524971008, |
|
"learning_rate": 0.001, |
|
"loss": 2.4457, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12224938875305623, |
|
"grad_norm": 0.5225120782852173, |
|
"learning_rate": 0.001, |
|
"loss": 2.4616, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1234718826405868, |
|
"grad_norm": 0.5144570469856262, |
|
"learning_rate": 0.001, |
|
"loss": 2.4833, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.12469437652811736, |
|
"grad_norm": 0.5372641086578369, |
|
"learning_rate": 0.001, |
|
"loss": 2.504, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.12591687041564792, |
|
"grad_norm": 0.5242504477500916, |
|
"learning_rate": 0.001, |
|
"loss": 2.4074, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1271393643031785, |
|
"grad_norm": 0.5856664180755615, |
|
"learning_rate": 0.001, |
|
"loss": 2.5, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.12836185819070906, |
|
"grad_norm": 0.5015215873718262, |
|
"learning_rate": 0.001, |
|
"loss": 2.4654, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1295843520782396, |
|
"grad_norm": 0.5485916137695312, |
|
"learning_rate": 0.001, |
|
"loss": 2.4536, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13080684596577016, |
|
"grad_norm": 0.4952264726161957, |
|
"learning_rate": 0.001, |
|
"loss": 2.449, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.13202933985330073, |
|
"grad_norm": 0.5012094378471375, |
|
"learning_rate": 0.001, |
|
"loss": 2.5035, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1332518337408313, |
|
"grad_norm": 0.5141774415969849, |
|
"learning_rate": 0.001, |
|
"loss": 2.403, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13447432762836187, |
|
"grad_norm": 0.5255733728408813, |
|
"learning_rate": 0.001, |
|
"loss": 2.385, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1356968215158924, |
|
"grad_norm": 0.5168445706367493, |
|
"learning_rate": 0.001, |
|
"loss": 2.4347, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13691931540342298, |
|
"grad_norm": 0.5361398458480835, |
|
"learning_rate": 0.001, |
|
"loss": 2.4392, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.13814180929095354, |
|
"grad_norm": 0.48438146710395813, |
|
"learning_rate": 0.001, |
|
"loss": 2.5366, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1393643031784841, |
|
"grad_norm": 0.5347726941108704, |
|
"learning_rate": 0.001, |
|
"loss": 2.5162, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.14058679706601468, |
|
"grad_norm": 0.5615862011909485, |
|
"learning_rate": 0.001, |
|
"loss": 2.4167, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14180929095354522, |
|
"grad_norm": 0.4931885004043579, |
|
"learning_rate": 0.001, |
|
"loss": 2.414, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1430317848410758, |
|
"grad_norm": 0.5923001766204834, |
|
"learning_rate": 0.001, |
|
"loss": 2.568, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14425427872860636, |
|
"grad_norm": 0.5969946980476379, |
|
"learning_rate": 0.001, |
|
"loss": 2.424, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.14547677261613692, |
|
"grad_norm": 0.5627629160881042, |
|
"learning_rate": 0.001, |
|
"loss": 2.4501, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1466992665036675, |
|
"grad_norm": 0.5513372421264648, |
|
"learning_rate": 0.001, |
|
"loss": 2.4966, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14792176039119803, |
|
"grad_norm": 0.5943652391433716, |
|
"learning_rate": 0.001, |
|
"loss": 2.5369, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1491442542787286, |
|
"grad_norm": 0.5254310369491577, |
|
"learning_rate": 0.001, |
|
"loss": 2.5201, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15036674816625917, |
|
"grad_norm": 0.5547128319740295, |
|
"learning_rate": 0.001, |
|
"loss": 2.4959, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15158924205378974, |
|
"grad_norm": 0.48801299929618835, |
|
"learning_rate": 0.001, |
|
"loss": 2.4478, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1528117359413203, |
|
"grad_norm": 0.5650225877761841, |
|
"learning_rate": 0.001, |
|
"loss": 2.4411, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.15403422982885084, |
|
"grad_norm": 0.5800032615661621, |
|
"learning_rate": 0.001, |
|
"loss": 2.3194, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1552567237163814, |
|
"grad_norm": 0.517168402671814, |
|
"learning_rate": 0.001, |
|
"loss": 2.4967, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.15647921760391198, |
|
"grad_norm": 0.5031977295875549, |
|
"learning_rate": 0.001, |
|
"loss": 2.5614, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.15770171149144255, |
|
"grad_norm": 0.5302630066871643, |
|
"learning_rate": 0.001, |
|
"loss": 2.4458, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.15892420537897312, |
|
"grad_norm": 0.47516071796417236, |
|
"learning_rate": 0.001, |
|
"loss": 2.4412, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16014669926650366, |
|
"grad_norm": 0.4758307635784149, |
|
"learning_rate": 0.001, |
|
"loss": 2.4896, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.16136919315403422, |
|
"grad_norm": 0.5344830751419067, |
|
"learning_rate": 0.001, |
|
"loss": 2.4147, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.1625916870415648, |
|
"grad_norm": 0.528596043586731, |
|
"learning_rate": 0.001, |
|
"loss": 2.5358, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.16381418092909536, |
|
"grad_norm": 0.47966817021369934, |
|
"learning_rate": 0.001, |
|
"loss": 2.4143, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.16503667481662593, |
|
"grad_norm": 0.6001800298690796, |
|
"learning_rate": 0.001, |
|
"loss": 2.363, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.16625916870415647, |
|
"grad_norm": 0.5011780261993408, |
|
"learning_rate": 0.001, |
|
"loss": 2.4008, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.16748166259168704, |
|
"grad_norm": 0.5605891346931458, |
|
"learning_rate": 0.001, |
|
"loss": 2.3927, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1687041564792176, |
|
"grad_norm": 0.48609134554862976, |
|
"learning_rate": 0.001, |
|
"loss": 2.4455, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.16992665036674817, |
|
"grad_norm": 0.5741631388664246, |
|
"learning_rate": 0.001, |
|
"loss": 2.4371, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17114914425427874, |
|
"grad_norm": 0.49061235785484314, |
|
"learning_rate": 0.001, |
|
"loss": 2.495, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17237163814180928, |
|
"grad_norm": 0.48857757449150085, |
|
"learning_rate": 0.001, |
|
"loss": 2.4461, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.17359413202933985, |
|
"grad_norm": 0.6730465292930603, |
|
"learning_rate": 0.001, |
|
"loss": 2.5839, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.17481662591687042, |
|
"grad_norm": 0.5764455795288086, |
|
"learning_rate": 0.001, |
|
"loss": 2.4067, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.17603911980440098, |
|
"grad_norm": 0.5319432020187378, |
|
"learning_rate": 0.001, |
|
"loss": 2.4074, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.17726161369193155, |
|
"grad_norm": 0.5144186019897461, |
|
"learning_rate": 0.001, |
|
"loss": 2.3889, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1784841075794621, |
|
"grad_norm": 0.6696956157684326, |
|
"learning_rate": 0.001, |
|
"loss": 2.4969, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.17970660146699266, |
|
"grad_norm": 0.5184170007705688, |
|
"learning_rate": 0.001, |
|
"loss": 2.3741, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18092909535452323, |
|
"grad_norm": 0.4999801218509674, |
|
"learning_rate": 0.001, |
|
"loss": 2.3909, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1821515892420538, |
|
"grad_norm": 0.5894871950149536, |
|
"learning_rate": 0.001, |
|
"loss": 2.4628, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.18337408312958436, |
|
"grad_norm": 0.5019111633300781, |
|
"learning_rate": 0.001, |
|
"loss": 2.5056, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1845965770171149, |
|
"grad_norm": 0.636766791343689, |
|
"learning_rate": 0.001, |
|
"loss": 2.4687, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.18581907090464547, |
|
"grad_norm": 0.5625051259994507, |
|
"learning_rate": 0.001, |
|
"loss": 2.5754, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.18704156479217604, |
|
"grad_norm": 0.5557712912559509, |
|
"learning_rate": 0.001, |
|
"loss": 2.4302, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1882640586797066, |
|
"grad_norm": 0.5352074503898621, |
|
"learning_rate": 0.001, |
|
"loss": 2.3118, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.18948655256723718, |
|
"grad_norm": 0.5917800664901733, |
|
"learning_rate": 0.001, |
|
"loss": 2.4828, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.19070904645476772, |
|
"grad_norm": 0.49687594175338745, |
|
"learning_rate": 0.001, |
|
"loss": 2.4071, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19193154034229828, |
|
"grad_norm": 0.5303874611854553, |
|
"learning_rate": 0.001, |
|
"loss": 2.4907, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.19315403422982885, |
|
"grad_norm": 0.5479294061660767, |
|
"learning_rate": 0.001, |
|
"loss": 2.4308, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.19437652811735942, |
|
"grad_norm": 0.56096351146698, |
|
"learning_rate": 0.001, |
|
"loss": 2.4961, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.19559902200489, |
|
"grad_norm": 0.5233320593833923, |
|
"learning_rate": 0.001, |
|
"loss": 2.4017, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.19682151589242053, |
|
"grad_norm": 0.540540337562561, |
|
"learning_rate": 0.001, |
|
"loss": 2.3942, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1980440097799511, |
|
"grad_norm": 0.5374789834022522, |
|
"learning_rate": 0.001, |
|
"loss": 2.4976, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.19926650366748166, |
|
"grad_norm": 0.5012461543083191, |
|
"learning_rate": 0.001, |
|
"loss": 2.4017, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.20048899755501223, |
|
"grad_norm": 0.5607964396476746, |
|
"learning_rate": 0.001, |
|
"loss": 2.3732, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2017114914425428, |
|
"grad_norm": 0.48764991760253906, |
|
"learning_rate": 0.001, |
|
"loss": 2.4003, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.20293398533007334, |
|
"grad_norm": 0.5440587997436523, |
|
"learning_rate": 0.001, |
|
"loss": 2.4653, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2041564792176039, |
|
"grad_norm": 0.5021042227745056, |
|
"learning_rate": 0.001, |
|
"loss": 2.3921, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.20537897310513448, |
|
"grad_norm": 0.5661401748657227, |
|
"learning_rate": 0.001, |
|
"loss": 2.471, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.20660146699266504, |
|
"grad_norm": 0.5360035300254822, |
|
"learning_rate": 0.001, |
|
"loss": 2.5091, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2078239608801956, |
|
"grad_norm": 0.5328943133354187, |
|
"learning_rate": 0.001, |
|
"loss": 2.3627, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.20904645476772615, |
|
"grad_norm": 0.5662642121315002, |
|
"learning_rate": 0.001, |
|
"loss": 2.4411, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.21026894865525672, |
|
"grad_norm": 0.4912776052951813, |
|
"learning_rate": 0.001, |
|
"loss": 2.3941, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2114914425427873, |
|
"grad_norm": 0.5288503766059875, |
|
"learning_rate": 0.001, |
|
"loss": 2.4138, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.21271393643031786, |
|
"grad_norm": 0.5616488456726074, |
|
"learning_rate": 0.001, |
|
"loss": 2.5105, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2139364303178484, |
|
"grad_norm": 0.5832340717315674, |
|
"learning_rate": 0.001, |
|
"loss": 2.5503, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.21515892420537897, |
|
"grad_norm": 0.49610635638237, |
|
"learning_rate": 0.001, |
|
"loss": 2.4249, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.21638141809290953, |
|
"grad_norm": 0.5436179637908936, |
|
"learning_rate": 0.001, |
|
"loss": 2.3545, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2176039119804401, |
|
"grad_norm": 0.586430549621582, |
|
"learning_rate": 0.001, |
|
"loss": 2.418, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.21882640586797067, |
|
"grad_norm": 0.5396440029144287, |
|
"learning_rate": 0.001, |
|
"loss": 2.3565, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2200488997555012, |
|
"grad_norm": 0.5660167336463928, |
|
"learning_rate": 0.001, |
|
"loss": 2.4284, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22127139364303178, |
|
"grad_norm": 0.5947809219360352, |
|
"learning_rate": 0.001, |
|
"loss": 2.3546, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.22249388753056235, |
|
"grad_norm": 0.5506652593612671, |
|
"learning_rate": 0.001, |
|
"loss": 2.3467, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2237163814180929, |
|
"grad_norm": 0.6263591647148132, |
|
"learning_rate": 0.001, |
|
"loss": 2.4552, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.22493887530562348, |
|
"grad_norm": 0.5154738426208496, |
|
"learning_rate": 0.001, |
|
"loss": 2.4508, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.22616136919315402, |
|
"grad_norm": 0.6300072073936462, |
|
"learning_rate": 0.001, |
|
"loss": 2.4095, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2273838630806846, |
|
"grad_norm": 0.5022745132446289, |
|
"learning_rate": 0.001, |
|
"loss": 2.423, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.22860635696821516, |
|
"grad_norm": 0.599410355091095, |
|
"learning_rate": 0.001, |
|
"loss": 2.3954, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.22982885085574573, |
|
"grad_norm": 0.5577519536018372, |
|
"learning_rate": 0.001, |
|
"loss": 2.3529, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.2310513447432763, |
|
"grad_norm": 0.5671312808990479, |
|
"learning_rate": 0.001, |
|
"loss": 2.4023, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.23227383863080683, |
|
"grad_norm": 0.5201452970504761, |
|
"learning_rate": 0.001, |
|
"loss": 2.4185, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2334963325183374, |
|
"grad_norm": 0.5384425520896912, |
|
"learning_rate": 0.001, |
|
"loss": 2.4863, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.23471882640586797, |
|
"grad_norm": 0.4962243139743805, |
|
"learning_rate": 0.001, |
|
"loss": 2.5203, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.23594132029339854, |
|
"grad_norm": 0.5636964440345764, |
|
"learning_rate": 0.001, |
|
"loss": 2.4688, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2371638141809291, |
|
"grad_norm": 0.538328230381012, |
|
"learning_rate": 0.001, |
|
"loss": 2.3808, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.23838630806845965, |
|
"grad_norm": 0.5091468691825867, |
|
"learning_rate": 0.001, |
|
"loss": 2.3791, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2396088019559902, |
|
"grad_norm": 0.5237581729888916, |
|
"learning_rate": 0.001, |
|
"loss": 2.3898, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.24083129584352078, |
|
"grad_norm": 0.510797381401062, |
|
"learning_rate": 0.001, |
|
"loss": 2.4203, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.24205378973105135, |
|
"grad_norm": 0.5363763570785522, |
|
"learning_rate": 0.001, |
|
"loss": 2.4296, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.24327628361858192, |
|
"grad_norm": 0.5146409869194031, |
|
"learning_rate": 0.001, |
|
"loss": 2.5014, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.24449877750611246, |
|
"grad_norm": 0.4853112995624542, |
|
"learning_rate": 0.001, |
|
"loss": 2.4751, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.24572127139364303, |
|
"grad_norm": 0.5601246356964111, |
|
"learning_rate": 0.001, |
|
"loss": 2.4001, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2469437652811736, |
|
"grad_norm": 0.5031394362449646, |
|
"learning_rate": 0.001, |
|
"loss": 2.3763, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.24816625916870416, |
|
"grad_norm": 0.5693891644477844, |
|
"learning_rate": 0.001, |
|
"loss": 2.3937, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.24938875305623473, |
|
"grad_norm": 0.5505186319351196, |
|
"learning_rate": 0.001, |
|
"loss": 2.3517, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2506112469437653, |
|
"grad_norm": 0.5346199870109558, |
|
"learning_rate": 0.001, |
|
"loss": 2.4259, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2506112469437653, |
|
"eval_loss": 2.4109721183776855, |
|
"eval_runtime": 44.1933, |
|
"eval_samples_per_second": 236.801, |
|
"eval_steps_per_second": 29.62, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.25183374083129584, |
|
"grad_norm": 0.5326516032218933, |
|
"learning_rate": 0.001, |
|
"loss": 2.3483, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2530562347188264, |
|
"grad_norm": 0.522074818611145, |
|
"learning_rate": 0.001, |
|
"loss": 2.3972, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.254278728606357, |
|
"grad_norm": 0.5074703097343445, |
|
"learning_rate": 0.001, |
|
"loss": 2.4478, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2555012224938875, |
|
"grad_norm": 0.5311232805252075, |
|
"learning_rate": 0.001, |
|
"loss": 2.5704, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2567237163814181, |
|
"grad_norm": 0.6050538420677185, |
|
"learning_rate": 0.001, |
|
"loss": 2.4128, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.25794621026894865, |
|
"grad_norm": 0.5808090567588806, |
|
"learning_rate": 0.001, |
|
"loss": 2.4556, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.2591687041564792, |
|
"grad_norm": 0.6892769932746887, |
|
"learning_rate": 0.001, |
|
"loss": 2.4149, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.2603911980440098, |
|
"grad_norm": 0.5970401167869568, |
|
"learning_rate": 0.001, |
|
"loss": 2.4635, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.2616136919315403, |
|
"grad_norm": 0.5463938117027283, |
|
"learning_rate": 0.001, |
|
"loss": 2.4833, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.2628361858190709, |
|
"grad_norm": 0.5739268660545349, |
|
"learning_rate": 0.001, |
|
"loss": 2.3873, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.26405867970660146, |
|
"grad_norm": 0.5724747776985168, |
|
"learning_rate": 0.001, |
|
"loss": 2.4843, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.265281173594132, |
|
"grad_norm": 0.6063271760940552, |
|
"learning_rate": 0.001, |
|
"loss": 2.4753, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2665036674816626, |
|
"grad_norm": 0.5265262722969055, |
|
"learning_rate": 0.001, |
|
"loss": 2.4289, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.26772616136919314, |
|
"grad_norm": 0.6319820880889893, |
|
"learning_rate": 0.001, |
|
"loss": 2.3428, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.26894865525672373, |
|
"grad_norm": 0.6463683247566223, |
|
"learning_rate": 0.001, |
|
"loss": 2.4524, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2701711491442543, |
|
"grad_norm": 0.5888187885284424, |
|
"learning_rate": 0.001, |
|
"loss": 2.4852, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.2713936430317848, |
|
"grad_norm": 0.6894919276237488, |
|
"learning_rate": 0.001, |
|
"loss": 2.4537, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.2726161369193154, |
|
"grad_norm": 0.5876211524009705, |
|
"learning_rate": 0.001, |
|
"loss": 2.3938, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.27383863080684595, |
|
"grad_norm": 0.5431022644042969, |
|
"learning_rate": 0.001, |
|
"loss": 2.3515, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.27506112469437655, |
|
"grad_norm": 0.5700949430465698, |
|
"learning_rate": 0.001, |
|
"loss": 2.2915, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2762836185819071, |
|
"grad_norm": 0.5603176355361938, |
|
"learning_rate": 0.001, |
|
"loss": 2.3743, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.2775061124694376, |
|
"grad_norm": 0.6375773549079895, |
|
"learning_rate": 0.001, |
|
"loss": 2.5753, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.2787286063569682, |
|
"grad_norm": 0.5627942085266113, |
|
"learning_rate": 0.001, |
|
"loss": 2.308, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.27995110024449876, |
|
"grad_norm": 0.5948708653450012, |
|
"learning_rate": 0.001, |
|
"loss": 2.3451, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.28117359413202936, |
|
"grad_norm": 0.48628291487693787, |
|
"learning_rate": 0.001, |
|
"loss": 2.4578, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2823960880195599, |
|
"grad_norm": 0.651255190372467, |
|
"learning_rate": 0.001, |
|
"loss": 2.4205, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.28361858190709044, |
|
"grad_norm": 0.5403560400009155, |
|
"learning_rate": 0.001, |
|
"loss": 2.4456, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.28484107579462103, |
|
"grad_norm": 0.5174692869186401, |
|
"learning_rate": 0.001, |
|
"loss": 2.3616, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2860635696821516, |
|
"grad_norm": 0.6177205443382263, |
|
"learning_rate": 0.001, |
|
"loss": 2.5222, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.28728606356968217, |
|
"grad_norm": 0.5286775231361389, |
|
"learning_rate": 0.001, |
|
"loss": 2.4493, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2885085574572127, |
|
"grad_norm": 0.5577534437179565, |
|
"learning_rate": 0.001, |
|
"loss": 2.4203, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.28973105134474325, |
|
"grad_norm": 0.5461423397064209, |
|
"learning_rate": 0.001, |
|
"loss": 2.4714, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.29095354523227385, |
|
"grad_norm": 0.5145041346549988, |
|
"learning_rate": 0.001, |
|
"loss": 2.4164, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.2921760391198044, |
|
"grad_norm": 0.5773872137069702, |
|
"learning_rate": 0.001, |
|
"loss": 2.4079, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.293398533007335, |
|
"grad_norm": 0.5076879858970642, |
|
"learning_rate": 0.001, |
|
"loss": 2.4656, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2946210268948655, |
|
"grad_norm": 0.5377123951911926, |
|
"learning_rate": 0.001, |
|
"loss": 2.4747, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.29584352078239606, |
|
"grad_norm": 0.537397563457489, |
|
"learning_rate": 0.001, |
|
"loss": 2.4301, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.29706601466992666, |
|
"grad_norm": 0.5414688587188721, |
|
"learning_rate": 0.001, |
|
"loss": 2.4186, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.2982885085574572, |
|
"grad_norm": 0.5407987833023071, |
|
"learning_rate": 0.001, |
|
"loss": 2.4053, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.2995110024449878, |
|
"grad_norm": 0.6226968169212341, |
|
"learning_rate": 0.001, |
|
"loss": 2.2887, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.30073349633251834, |
|
"grad_norm": 0.5672168135643005, |
|
"learning_rate": 0.001, |
|
"loss": 2.4579, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3019559902200489, |
|
"grad_norm": 0.5122395157814026, |
|
"learning_rate": 0.001, |
|
"loss": 2.3413, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.30317848410757947, |
|
"grad_norm": 0.5855796337127686, |
|
"learning_rate": 0.001, |
|
"loss": 2.3533, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.30440097799511, |
|
"grad_norm": 0.5746926069259644, |
|
"learning_rate": 0.001, |
|
"loss": 2.4499, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3056234718826406, |
|
"grad_norm": 0.5400431752204895, |
|
"learning_rate": 0.001, |
|
"loss": 2.3914, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.30684596577017115, |
|
"grad_norm": 0.571844756603241, |
|
"learning_rate": 0.001, |
|
"loss": 2.3883, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3080684596577017, |
|
"grad_norm": 0.5428387522697449, |
|
"learning_rate": 0.001, |
|
"loss": 2.3792, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3092909535452323, |
|
"grad_norm": 0.5149421095848083, |
|
"learning_rate": 0.001, |
|
"loss": 2.3698, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3105134474327628, |
|
"grad_norm": 0.8051120638847351, |
|
"learning_rate": 0.001, |
|
"loss": 2.4721, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3117359413202934, |
|
"grad_norm": 0.5592311024665833, |
|
"learning_rate": 0.001, |
|
"loss": 2.4215, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.31295843520782396, |
|
"grad_norm": 0.6142417788505554, |
|
"learning_rate": 0.001, |
|
"loss": 2.4274, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3141809290953545, |
|
"grad_norm": 0.5631799697875977, |
|
"learning_rate": 0.001, |
|
"loss": 2.4201, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3154034229828851, |
|
"grad_norm": 0.5501461029052734, |
|
"learning_rate": 0.001, |
|
"loss": 2.481, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.31662591687041564, |
|
"grad_norm": 0.5580623149871826, |
|
"learning_rate": 0.001, |
|
"loss": 2.4626, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.31784841075794623, |
|
"grad_norm": 0.5429579019546509, |
|
"learning_rate": 0.001, |
|
"loss": 2.4345, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.31907090464547677, |
|
"grad_norm": 0.5621469020843506, |
|
"learning_rate": 0.001, |
|
"loss": 2.4714, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.3202933985330073, |
|
"grad_norm": 0.6367106437683105, |
|
"learning_rate": 0.001, |
|
"loss": 2.4289, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3215158924205379, |
|
"grad_norm": 0.5402144193649292, |
|
"learning_rate": 0.001, |
|
"loss": 2.4761, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.32273838630806845, |
|
"grad_norm": 0.5153480172157288, |
|
"learning_rate": 0.001, |
|
"loss": 2.3916, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.32396088019559904, |
|
"grad_norm": 0.554441511631012, |
|
"learning_rate": 0.001, |
|
"loss": 2.4412, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3251833740831296, |
|
"grad_norm": 0.5480029582977295, |
|
"learning_rate": 0.001, |
|
"loss": 2.3595, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.3264058679706601, |
|
"grad_norm": 0.6597166657447815, |
|
"learning_rate": 0.001, |
|
"loss": 2.434, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3276283618581907, |
|
"grad_norm": 0.6232181787490845, |
|
"learning_rate": 0.001, |
|
"loss": 2.438, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.32885085574572126, |
|
"grad_norm": 0.5220692157745361, |
|
"learning_rate": 0.001, |
|
"loss": 2.3218, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.33007334963325186, |
|
"grad_norm": 0.5414378046989441, |
|
"learning_rate": 0.001, |
|
"loss": 2.449, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3312958435207824, |
|
"grad_norm": 0.5560607314109802, |
|
"learning_rate": 0.001, |
|
"loss": 2.4541, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.33251833740831294, |
|
"grad_norm": 0.6442712545394897, |
|
"learning_rate": 0.001, |
|
"loss": 2.4726, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.33374083129584353, |
|
"grad_norm": 0.5586593151092529, |
|
"learning_rate": 0.001, |
|
"loss": 2.4483, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.33496332518337407, |
|
"grad_norm": 0.5124000906944275, |
|
"learning_rate": 0.001, |
|
"loss": 2.3708, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.33618581907090467, |
|
"grad_norm": 0.5178530812263489, |
|
"learning_rate": 0.001, |
|
"loss": 2.4624, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3374083129584352, |
|
"grad_norm": 0.5543733835220337, |
|
"learning_rate": 0.001, |
|
"loss": 2.4235, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.33863080684596575, |
|
"grad_norm": 0.4837280213832855, |
|
"learning_rate": 0.001, |
|
"loss": 2.3998, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.33985330073349634, |
|
"grad_norm": 0.5489963293075562, |
|
"learning_rate": 0.001, |
|
"loss": 2.3875, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3410757946210269, |
|
"grad_norm": 0.4998355209827423, |
|
"learning_rate": 0.001, |
|
"loss": 2.3404, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.3422982885085575, |
|
"grad_norm": 0.5590275526046753, |
|
"learning_rate": 0.001, |
|
"loss": 2.4129, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.343520782396088, |
|
"grad_norm": 0.5230002403259277, |
|
"learning_rate": 0.001, |
|
"loss": 2.4659, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.34474327628361856, |
|
"grad_norm": 0.5316391587257385, |
|
"learning_rate": 0.001, |
|
"loss": 2.4093, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.34596577017114916, |
|
"grad_norm": 0.5131670832633972, |
|
"learning_rate": 0.001, |
|
"loss": 2.4951, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3471882640586797, |
|
"grad_norm": 0.6025881767272949, |
|
"learning_rate": 0.001, |
|
"loss": 2.3711, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3484107579462103, |
|
"grad_norm": 0.5566193461418152, |
|
"learning_rate": 0.001, |
|
"loss": 2.4284, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.34963325183374083, |
|
"grad_norm": 0.5089775323867798, |
|
"learning_rate": 0.001, |
|
"loss": 2.3437, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3508557457212714, |
|
"grad_norm": 0.5217143297195435, |
|
"learning_rate": 0.001, |
|
"loss": 2.3794, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.35207823960880197, |
|
"grad_norm": 0.5089473128318787, |
|
"learning_rate": 0.001, |
|
"loss": 2.4371, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3533007334963325, |
|
"grad_norm": 0.6327386498451233, |
|
"learning_rate": 0.001, |
|
"loss": 2.4685, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3545232273838631, |
|
"grad_norm": 0.5239629745483398, |
|
"learning_rate": 0.001, |
|
"loss": 2.3723, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.35574572127139364, |
|
"grad_norm": 0.5306011438369751, |
|
"learning_rate": 0.001, |
|
"loss": 2.3622, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3569682151589242, |
|
"grad_norm": 0.5014710426330566, |
|
"learning_rate": 0.001, |
|
"loss": 2.3371, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3581907090464548, |
|
"grad_norm": 0.5310878157615662, |
|
"learning_rate": 0.001, |
|
"loss": 2.4087, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.3594132029339853, |
|
"grad_norm": 0.5552276968955994, |
|
"learning_rate": 0.001, |
|
"loss": 2.4021, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3606356968215159, |
|
"grad_norm": 0.5808470845222473, |
|
"learning_rate": 0.001, |
|
"loss": 2.4378, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.36185819070904646, |
|
"grad_norm": 0.5333114266395569, |
|
"learning_rate": 0.001, |
|
"loss": 2.5238, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.363080684596577, |
|
"grad_norm": 0.5774756073951721, |
|
"learning_rate": 0.001, |
|
"loss": 2.48, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.3643031784841076, |
|
"grad_norm": 0.5121151804924011, |
|
"learning_rate": 0.001, |
|
"loss": 2.3687, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.36552567237163813, |
|
"grad_norm": 0.5714420676231384, |
|
"learning_rate": 0.001, |
|
"loss": 2.4214, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.36674816625916873, |
|
"grad_norm": 0.5754179358482361, |
|
"learning_rate": 0.001, |
|
"loss": 2.4247, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.36797066014669927, |
|
"grad_norm": 0.7446674704551697, |
|
"learning_rate": 0.001, |
|
"loss": 2.4925, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.3691931540342298, |
|
"grad_norm": 0.5484544038772583, |
|
"learning_rate": 0.001, |
|
"loss": 2.3883, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.3704156479217604, |
|
"grad_norm": 0.5274435877799988, |
|
"learning_rate": 0.001, |
|
"loss": 2.4063, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.37163814180929094, |
|
"grad_norm": 0.5567039847373962, |
|
"learning_rate": 0.001, |
|
"loss": 2.4273, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.37286063569682154, |
|
"grad_norm": 0.5994822978973389, |
|
"learning_rate": 0.001, |
|
"loss": 2.5011, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3740831295843521, |
|
"grad_norm": 0.5415587425231934, |
|
"learning_rate": 0.001, |
|
"loss": 2.3553, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.3753056234718826, |
|
"grad_norm": 0.6581827998161316, |
|
"learning_rate": 0.001, |
|
"loss": 2.4837, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.3765281173594132, |
|
"grad_norm": 0.6114663481712341, |
|
"learning_rate": 0.001, |
|
"loss": 2.4078, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.37775061124694376, |
|
"grad_norm": 0.5403485894203186, |
|
"learning_rate": 0.001, |
|
"loss": 2.3983, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.37897310513447435, |
|
"grad_norm": 0.5375344157218933, |
|
"learning_rate": 0.001, |
|
"loss": 2.392, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3801955990220049, |
|
"grad_norm": 0.5954229831695557, |
|
"learning_rate": 0.001, |
|
"loss": 2.3351, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.38141809290953543, |
|
"grad_norm": 0.6141201257705688, |
|
"learning_rate": 0.001, |
|
"loss": 2.4802, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.38264058679706603, |
|
"grad_norm": 0.5843420028686523, |
|
"learning_rate": 0.001, |
|
"loss": 2.3837, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.38386308068459657, |
|
"grad_norm": 0.5854500532150269, |
|
"learning_rate": 0.001, |
|
"loss": 2.4691, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.38508557457212717, |
|
"grad_norm": 0.49812009930610657, |
|
"learning_rate": 0.001, |
|
"loss": 2.2868, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3863080684596577, |
|
"grad_norm": 0.5608780980110168, |
|
"learning_rate": 0.001, |
|
"loss": 2.3501, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.38753056234718825, |
|
"grad_norm": 0.6470975279808044, |
|
"learning_rate": 0.001, |
|
"loss": 2.3845, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.38875305623471884, |
|
"grad_norm": 0.5103499293327332, |
|
"learning_rate": 0.001, |
|
"loss": 2.3544, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.3899755501222494, |
|
"grad_norm": 0.5402243733406067, |
|
"learning_rate": 0.001, |
|
"loss": 2.494, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.39119804400978, |
|
"grad_norm": 0.5098109245300293, |
|
"learning_rate": 0.001, |
|
"loss": 2.3771, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3924205378973105, |
|
"grad_norm": 0.5166260600090027, |
|
"learning_rate": 0.001, |
|
"loss": 2.4165, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.39364303178484106, |
|
"grad_norm": 0.47534164786338806, |
|
"learning_rate": 0.001, |
|
"loss": 2.376, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.39486552567237165, |
|
"grad_norm": 0.5538984537124634, |
|
"learning_rate": 0.001, |
|
"loss": 2.377, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.3960880195599022, |
|
"grad_norm": 0.516322910785675, |
|
"learning_rate": 0.001, |
|
"loss": 2.4509, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.3973105134474328, |
|
"grad_norm": 0.6167128682136536, |
|
"learning_rate": 0.001, |
|
"loss": 2.3846, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.39853300733496333, |
|
"grad_norm": 0.5723311305046082, |
|
"learning_rate": 0.001, |
|
"loss": 2.3715, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.39975550122249387, |
|
"grad_norm": 0.5759738087654114, |
|
"learning_rate": 0.001, |
|
"loss": 2.3472, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.40097799511002447, |
|
"grad_norm": 0.6038229465484619, |
|
"learning_rate": 0.001, |
|
"loss": 2.4291, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.402200488997555, |
|
"grad_norm": 0.534037709236145, |
|
"learning_rate": 0.001, |
|
"loss": 2.4265, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.4034229828850856, |
|
"grad_norm": 0.59047931432724, |
|
"learning_rate": 0.001, |
|
"loss": 2.421, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.40464547677261614, |
|
"grad_norm": 0.5716297626495361, |
|
"learning_rate": 0.001, |
|
"loss": 2.4336, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4058679706601467, |
|
"grad_norm": 0.6059308052062988, |
|
"learning_rate": 0.001, |
|
"loss": 2.4722, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4070904645476773, |
|
"grad_norm": 0.5313076376914978, |
|
"learning_rate": 0.001, |
|
"loss": 2.4054, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.4083129584352078, |
|
"grad_norm": 0.5419294834136963, |
|
"learning_rate": 0.001, |
|
"loss": 2.3978, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.4095354523227384, |
|
"grad_norm": 0.5434341430664062, |
|
"learning_rate": 0.001, |
|
"loss": 2.423, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.41075794621026895, |
|
"grad_norm": 0.5907472968101501, |
|
"learning_rate": 0.001, |
|
"loss": 2.399, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.4119804400977995, |
|
"grad_norm": 0.5154129266738892, |
|
"learning_rate": 0.001, |
|
"loss": 2.4192, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4132029339853301, |
|
"grad_norm": 0.5305936336517334, |
|
"learning_rate": 0.001, |
|
"loss": 2.3631, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.41442542787286063, |
|
"grad_norm": 0.5667030215263367, |
|
"learning_rate": 0.001, |
|
"loss": 2.4021, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.4156479217603912, |
|
"grad_norm": 0.602198600769043, |
|
"learning_rate": 0.001, |
|
"loss": 2.3771, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.41687041564792177, |
|
"grad_norm": 0.5672926902770996, |
|
"learning_rate": 0.001, |
|
"loss": 2.4006, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.4180929095354523, |
|
"grad_norm": 0.5844594836235046, |
|
"learning_rate": 0.001, |
|
"loss": 2.3617, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.4193154034229829, |
|
"grad_norm": 0.5684323906898499, |
|
"learning_rate": 0.001, |
|
"loss": 2.4159, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.42053789731051344, |
|
"grad_norm": 0.6692149639129639, |
|
"learning_rate": 0.001, |
|
"loss": 2.4121, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.421760391198044, |
|
"grad_norm": 0.5522905588150024, |
|
"learning_rate": 0.001, |
|
"loss": 2.5351, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.4229828850855746, |
|
"grad_norm": 0.5908128619194031, |
|
"learning_rate": 0.001, |
|
"loss": 2.303, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.4242053789731051, |
|
"grad_norm": 0.6019754409790039, |
|
"learning_rate": 0.001, |
|
"loss": 2.4212, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.4254278728606357, |
|
"grad_norm": 0.5244648456573486, |
|
"learning_rate": 0.001, |
|
"loss": 2.4506, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.42665036674816625, |
|
"grad_norm": 0.6195573806762695, |
|
"learning_rate": 0.001, |
|
"loss": 2.4397, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.4278728606356968, |
|
"grad_norm": 0.5599300265312195, |
|
"learning_rate": 0.001, |
|
"loss": 2.4369, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4290953545232274, |
|
"grad_norm": 0.5785163640975952, |
|
"learning_rate": 0.001, |
|
"loss": 2.2845, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.43031784841075793, |
|
"grad_norm": 0.5526220202445984, |
|
"learning_rate": 0.001, |
|
"loss": 2.3835, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.4315403422982885, |
|
"grad_norm": 0.520535409450531, |
|
"learning_rate": 0.001, |
|
"loss": 2.4043, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.43276283618581907, |
|
"grad_norm": 0.5346786975860596, |
|
"learning_rate": 0.001, |
|
"loss": 2.4634, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4339853300733496, |
|
"grad_norm": 0.5633550882339478, |
|
"learning_rate": 0.001, |
|
"loss": 2.4424, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4352078239608802, |
|
"grad_norm": 0.5584668517112732, |
|
"learning_rate": 0.001, |
|
"loss": 2.4128, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.43643031784841074, |
|
"grad_norm": 0.5257803201675415, |
|
"learning_rate": 0.001, |
|
"loss": 2.3762, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.43765281173594134, |
|
"grad_norm": 0.5566312670707703, |
|
"learning_rate": 0.001, |
|
"loss": 2.3918, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.4388753056234719, |
|
"grad_norm": 0.514863908290863, |
|
"learning_rate": 0.001, |
|
"loss": 2.3162, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.4400977995110024, |
|
"grad_norm": 0.5604429841041565, |
|
"learning_rate": 0.001, |
|
"loss": 2.4515, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.441320293398533, |
|
"grad_norm": 0.5394683480262756, |
|
"learning_rate": 0.001, |
|
"loss": 2.3196, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.44254278728606355, |
|
"grad_norm": 0.6114275455474854, |
|
"learning_rate": 0.001, |
|
"loss": 2.3253, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.44376528117359415, |
|
"grad_norm": 0.55677330493927, |
|
"learning_rate": 0.001, |
|
"loss": 2.3898, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.4449877750611247, |
|
"grad_norm": 0.6122755408287048, |
|
"learning_rate": 0.001, |
|
"loss": 2.4332, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.44621026894865523, |
|
"grad_norm": 0.5454927682876587, |
|
"learning_rate": 0.001, |
|
"loss": 2.3705, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4474327628361858, |
|
"grad_norm": 0.6068079471588135, |
|
"learning_rate": 0.001, |
|
"loss": 2.2946, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.44865525672371637, |
|
"grad_norm": 0.542259693145752, |
|
"learning_rate": 0.001, |
|
"loss": 2.3949, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.44987775061124696, |
|
"grad_norm": 0.5911988615989685, |
|
"learning_rate": 0.001, |
|
"loss": 2.479, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.4511002444987775, |
|
"grad_norm": 0.5436906814575195, |
|
"learning_rate": 0.001, |
|
"loss": 2.4562, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.45232273838630804, |
|
"grad_norm": 0.6720252633094788, |
|
"learning_rate": 0.001, |
|
"loss": 2.3401, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.45354523227383864, |
|
"grad_norm": 0.594752848148346, |
|
"learning_rate": 0.001, |
|
"loss": 2.3364, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.4547677261613692, |
|
"grad_norm": 0.5525220036506653, |
|
"learning_rate": 0.001, |
|
"loss": 2.3593, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.4559902200488998, |
|
"grad_norm": 0.5798346996307373, |
|
"learning_rate": 0.001, |
|
"loss": 2.3677, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.4572127139364303, |
|
"grad_norm": 0.5627158880233765, |
|
"learning_rate": 0.001, |
|
"loss": 2.3556, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.45843520782396086, |
|
"grad_norm": 0.5486916303634644, |
|
"learning_rate": 0.001, |
|
"loss": 2.3581, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.45965770171149145, |
|
"grad_norm": 0.515575110912323, |
|
"learning_rate": 0.001, |
|
"loss": 2.2755, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.460880195599022, |
|
"grad_norm": 0.5921189785003662, |
|
"learning_rate": 0.001, |
|
"loss": 2.3344, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.4621026894865526, |
|
"grad_norm": 0.5230167508125305, |
|
"learning_rate": 0.001, |
|
"loss": 2.3135, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4633251833740831, |
|
"grad_norm": 0.5792455673217773, |
|
"learning_rate": 0.001, |
|
"loss": 2.3844, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.46454767726161367, |
|
"grad_norm": 0.5649365186691284, |
|
"learning_rate": 0.001, |
|
"loss": 2.4243, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.46577017114914426, |
|
"grad_norm": 0.5909812450408936, |
|
"learning_rate": 0.001, |
|
"loss": 2.4008, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.4669926650366748, |
|
"grad_norm": 0.6039749383926392, |
|
"learning_rate": 0.001, |
|
"loss": 2.4104, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4682151589242054, |
|
"grad_norm": 0.6151771545410156, |
|
"learning_rate": 0.001, |
|
"loss": 2.4914, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.46943765281173594, |
|
"grad_norm": 0.6254723072052002, |
|
"learning_rate": 0.001, |
|
"loss": 2.3747, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.4706601466992665, |
|
"grad_norm": 0.5194308161735535, |
|
"learning_rate": 0.001, |
|
"loss": 2.3765, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.4718826405867971, |
|
"grad_norm": 0.5091795325279236, |
|
"learning_rate": 0.001, |
|
"loss": 2.4028, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.4731051344743276, |
|
"grad_norm": 0.5679914951324463, |
|
"learning_rate": 0.001, |
|
"loss": 2.4091, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.4743276283618582, |
|
"grad_norm": 0.6551725268363953, |
|
"learning_rate": 0.001, |
|
"loss": 2.4814, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.47555012224938875, |
|
"grad_norm": 0.5748315453529358, |
|
"learning_rate": 0.001, |
|
"loss": 2.4518, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.4767726161369193, |
|
"grad_norm": 0.5252829194068909, |
|
"learning_rate": 0.001, |
|
"loss": 2.5021, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4779951100244499, |
|
"grad_norm": 0.6099783182144165, |
|
"learning_rate": 0.001, |
|
"loss": 2.4139, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.4792176039119804, |
|
"grad_norm": 0.5559167265892029, |
|
"learning_rate": 0.001, |
|
"loss": 2.3817, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.480440097799511, |
|
"grad_norm": 0.5587165951728821, |
|
"learning_rate": 0.001, |
|
"loss": 2.4002, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.48166259168704156, |
|
"grad_norm": 0.5515249967575073, |
|
"learning_rate": 0.001, |
|
"loss": 2.4173, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.4828850855745721, |
|
"grad_norm": 0.5275049209594727, |
|
"learning_rate": 0.001, |
|
"loss": 2.4235, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4841075794621027, |
|
"grad_norm": 0.519877552986145, |
|
"learning_rate": 0.001, |
|
"loss": 2.3802, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.48533007334963324, |
|
"grad_norm": 0.5368367433547974, |
|
"learning_rate": 0.001, |
|
"loss": 2.4164, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.48655256723716384, |
|
"grad_norm": 0.5479859113693237, |
|
"learning_rate": 0.001, |
|
"loss": 2.4451, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.4877750611246944, |
|
"grad_norm": 0.524018406867981, |
|
"learning_rate": 0.001, |
|
"loss": 2.2618, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.4889975550122249, |
|
"grad_norm": 0.5853855609893799, |
|
"learning_rate": 0.001, |
|
"loss": 2.4095, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4902200488997555, |
|
"grad_norm": 0.5248914957046509, |
|
"learning_rate": 0.001, |
|
"loss": 2.3441, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.49144254278728605, |
|
"grad_norm": 0.5809511542320251, |
|
"learning_rate": 0.001, |
|
"loss": 2.4463, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.49266503667481665, |
|
"grad_norm": 0.5517978668212891, |
|
"learning_rate": 0.001, |
|
"loss": 2.4001, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.4938875305623472, |
|
"grad_norm": 0.5410706400871277, |
|
"learning_rate": 0.001, |
|
"loss": 2.3972, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.49511002444987773, |
|
"grad_norm": 0.5287546515464783, |
|
"learning_rate": 0.001, |
|
"loss": 2.3371, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.4963325183374083, |
|
"grad_norm": 0.5337257981300354, |
|
"learning_rate": 0.001, |
|
"loss": 2.3417, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.49755501222493886, |
|
"grad_norm": 0.5736370086669922, |
|
"learning_rate": 0.001, |
|
"loss": 2.3409, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.49877750611246946, |
|
"grad_norm": 0.5621830224990845, |
|
"learning_rate": 0.001, |
|
"loss": 2.3743, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5526315569877625, |
|
"learning_rate": 0.001, |
|
"loss": 2.4956, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.5012224938875306, |
|
"grad_norm": 0.513525128364563, |
|
"learning_rate": 0.001, |
|
"loss": 2.3512, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5012224938875306, |
|
"eval_loss": 2.371687889099121, |
|
"eval_runtime": 44.3741, |
|
"eval_samples_per_second": 235.836, |
|
"eval_steps_per_second": 29.499, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5024449877750611, |
|
"grad_norm": 0.5600430965423584, |
|
"learning_rate": 0.001, |
|
"loss": 2.3617, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5036674816625917, |
|
"grad_norm": 0.5299474000930786, |
|
"learning_rate": 0.001, |
|
"loss": 2.3518, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5048899755501223, |
|
"grad_norm": 0.6220374703407288, |
|
"learning_rate": 0.001, |
|
"loss": 2.4092, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.5061124694376528, |
|
"grad_norm": 0.5975641012191772, |
|
"learning_rate": 0.001, |
|
"loss": 2.4272, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.5073349633251834, |
|
"grad_norm": 0.57210773229599, |
|
"learning_rate": 0.001, |
|
"loss": 2.4094, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.508557457212714, |
|
"grad_norm": 0.5716942548751831, |
|
"learning_rate": 0.001, |
|
"loss": 2.3184, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5097799511002445, |
|
"grad_norm": 0.5893986225128174, |
|
"learning_rate": 0.001, |
|
"loss": 2.3627, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.511002444987775, |
|
"grad_norm": 0.5796381235122681, |
|
"learning_rate": 0.001, |
|
"loss": 2.4797, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5122249388753056, |
|
"grad_norm": 0.6783269047737122, |
|
"learning_rate": 0.001, |
|
"loss": 2.4106, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5134474327628362, |
|
"grad_norm": 0.6270769238471985, |
|
"learning_rate": 0.001, |
|
"loss": 2.4528, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5146699266503667, |
|
"grad_norm": 0.5378878712654114, |
|
"learning_rate": 0.001, |
|
"loss": 2.3958, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5158924205378973, |
|
"grad_norm": 0.6691122651100159, |
|
"learning_rate": 0.001, |
|
"loss": 2.3864, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5171149144254279, |
|
"grad_norm": 0.6228435039520264, |
|
"learning_rate": 0.001, |
|
"loss": 2.4361, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5183374083129584, |
|
"grad_norm": 0.5937249660491943, |
|
"learning_rate": 0.001, |
|
"loss": 2.403, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.519559902200489, |
|
"grad_norm": 0.7730590105056763, |
|
"learning_rate": 0.001, |
|
"loss": 2.3516, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5207823960880196, |
|
"grad_norm": 0.5874658823013306, |
|
"learning_rate": 0.001, |
|
"loss": 2.3768, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.5220048899755502, |
|
"grad_norm": 0.610345184803009, |
|
"learning_rate": 0.001, |
|
"loss": 2.3741, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5232273838630807, |
|
"grad_norm": 0.6153433322906494, |
|
"learning_rate": 0.001, |
|
"loss": 2.3821, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.5244498777506112, |
|
"grad_norm": 0.553920567035675, |
|
"learning_rate": 0.001, |
|
"loss": 2.4033, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.5256723716381418, |
|
"grad_norm": 0.6375141143798828, |
|
"learning_rate": 0.001, |
|
"loss": 2.3904, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5268948655256723, |
|
"grad_norm": 0.6468214988708496, |
|
"learning_rate": 0.001, |
|
"loss": 2.422, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.5281173594132029, |
|
"grad_norm": 0.502112090587616, |
|
"learning_rate": 0.001, |
|
"loss": 2.4082, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.5293398533007335, |
|
"grad_norm": 0.63483726978302, |
|
"learning_rate": 0.001, |
|
"loss": 2.3971, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.530562347188264, |
|
"grad_norm": 0.6903530359268188, |
|
"learning_rate": 0.001, |
|
"loss": 2.5567, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.5317848410757946, |
|
"grad_norm": 0.590471088886261, |
|
"learning_rate": 0.001, |
|
"loss": 2.3714, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5330073349633252, |
|
"grad_norm": 0.5665633678436279, |
|
"learning_rate": 0.001, |
|
"loss": 2.51, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.5342298288508558, |
|
"grad_norm": 0.5448029637336731, |
|
"learning_rate": 0.001, |
|
"loss": 2.4756, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.5354523227383863, |
|
"grad_norm": 0.5371047258377075, |
|
"learning_rate": 0.001, |
|
"loss": 2.4703, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.5366748166259169, |
|
"grad_norm": 0.6409993171691895, |
|
"learning_rate": 0.001, |
|
"loss": 2.5327, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.5378973105134475, |
|
"grad_norm": 0.5884791612625122, |
|
"learning_rate": 0.001, |
|
"loss": 2.4883, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.539119804400978, |
|
"grad_norm": 0.5778174996376038, |
|
"learning_rate": 0.001, |
|
"loss": 2.3682, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5403422982885085, |
|
"grad_norm": 0.6152352094650269, |
|
"learning_rate": 0.001, |
|
"loss": 2.5577, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.5415647921760391, |
|
"grad_norm": 0.5729324221611023, |
|
"learning_rate": 0.001, |
|
"loss": 2.4113, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.5427872860635696, |
|
"grad_norm": 0.5118098258972168, |
|
"learning_rate": 0.001, |
|
"loss": 2.3661, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5440097799511002, |
|
"grad_norm": 0.5968663692474365, |
|
"learning_rate": 0.001, |
|
"loss": 2.3956, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5452322738386308, |
|
"grad_norm": 0.5854983329772949, |
|
"learning_rate": 0.001, |
|
"loss": 2.3685, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5464547677261614, |
|
"grad_norm": 0.5025555491447449, |
|
"learning_rate": 0.001, |
|
"loss": 2.4087, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.5476772616136919, |
|
"grad_norm": 0.5501389503479004, |
|
"learning_rate": 0.001, |
|
"loss": 2.464, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5488997555012225, |
|
"grad_norm": 0.5119859576225281, |
|
"learning_rate": 0.001, |
|
"loss": 2.4211, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.5501222493887531, |
|
"grad_norm": 0.5670396089553833, |
|
"learning_rate": 0.001, |
|
"loss": 2.3828, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5513447432762836, |
|
"grad_norm": 0.6066712141036987, |
|
"learning_rate": 0.001, |
|
"loss": 2.398, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.5525672371638142, |
|
"grad_norm": 0.6207214593887329, |
|
"learning_rate": 0.001, |
|
"loss": 2.4942, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.5537897310513448, |
|
"grad_norm": 0.5594907402992249, |
|
"learning_rate": 0.001, |
|
"loss": 2.4766, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.5550122249388753, |
|
"grad_norm": 0.5723139643669128, |
|
"learning_rate": 0.001, |
|
"loss": 2.3692, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.5562347188264058, |
|
"grad_norm": 0.5299721956253052, |
|
"learning_rate": 0.001, |
|
"loss": 2.3481, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.5574572127139364, |
|
"grad_norm": 0.5901131629943848, |
|
"learning_rate": 0.001, |
|
"loss": 2.4067, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.558679706601467, |
|
"grad_norm": 0.6540071368217468, |
|
"learning_rate": 0.001, |
|
"loss": 2.4758, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.5599022004889975, |
|
"grad_norm": 0.6255263686180115, |
|
"learning_rate": 0.001, |
|
"loss": 2.4349, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.5611246943765281, |
|
"grad_norm": 0.6452546715736389, |
|
"learning_rate": 0.001, |
|
"loss": 2.3834, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.5623471882640587, |
|
"grad_norm": 0.6318612098693848, |
|
"learning_rate": 0.001, |
|
"loss": 2.38, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5635696821515892, |
|
"grad_norm": 0.518966794013977, |
|
"learning_rate": 0.001, |
|
"loss": 2.3138, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.5647921760391198, |
|
"grad_norm": 0.6043367385864258, |
|
"learning_rate": 0.001, |
|
"loss": 2.512, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.5660146699266504, |
|
"grad_norm": 0.49966487288475037, |
|
"learning_rate": 0.001, |
|
"loss": 2.3268, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.5672371638141809, |
|
"grad_norm": 0.5622513890266418, |
|
"learning_rate": 0.001, |
|
"loss": 2.4228, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.5684596577017115, |
|
"grad_norm": 0.539992094039917, |
|
"learning_rate": 0.001, |
|
"loss": 2.4627, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5696821515892421, |
|
"grad_norm": 0.5305809378623962, |
|
"learning_rate": 0.001, |
|
"loss": 2.3507, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.5709046454767727, |
|
"grad_norm": 0.5940194725990295, |
|
"learning_rate": 0.001, |
|
"loss": 2.3989, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.5721271393643031, |
|
"grad_norm": 0.765998125076294, |
|
"learning_rate": 0.001, |
|
"loss": 2.3981, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.5733496332518337, |
|
"grad_norm": 0.6094310283660889, |
|
"learning_rate": 0.001, |
|
"loss": 2.451, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5745721271393643, |
|
"grad_norm": 0.523844838142395, |
|
"learning_rate": 0.001, |
|
"loss": 2.3669, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5757946210268948, |
|
"grad_norm": 0.6060677170753479, |
|
"learning_rate": 0.001, |
|
"loss": 2.3594, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5770171149144254, |
|
"grad_norm": 0.6068428158760071, |
|
"learning_rate": 0.001, |
|
"loss": 2.3889, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.578239608801956, |
|
"grad_norm": 0.6003894805908203, |
|
"learning_rate": 0.001, |
|
"loss": 2.3811, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.5794621026894865, |
|
"grad_norm": 0.6235598921775818, |
|
"learning_rate": 0.001, |
|
"loss": 2.4453, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.5806845965770171, |
|
"grad_norm": 0.6744781732559204, |
|
"learning_rate": 0.001, |
|
"loss": 2.3894, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5819070904645477, |
|
"grad_norm": 0.6238363981246948, |
|
"learning_rate": 0.001, |
|
"loss": 2.3208, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.5831295843520783, |
|
"grad_norm": 0.6921210289001465, |
|
"learning_rate": 0.001, |
|
"loss": 2.4008, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.5843520782396088, |
|
"grad_norm": 0.6112361550331116, |
|
"learning_rate": 0.001, |
|
"loss": 2.5079, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.5855745721271394, |
|
"grad_norm": 0.6528260707855225, |
|
"learning_rate": 0.001, |
|
"loss": 2.4417, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.58679706601467, |
|
"grad_norm": 0.5697323679924011, |
|
"learning_rate": 0.001, |
|
"loss": 2.3566, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5880195599022005, |
|
"grad_norm": 0.5770347714424133, |
|
"learning_rate": 0.001, |
|
"loss": 2.3936, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.589242053789731, |
|
"grad_norm": 0.620617151260376, |
|
"learning_rate": 0.001, |
|
"loss": 2.3786, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.5904645476772616, |
|
"grad_norm": 0.5476077198982239, |
|
"learning_rate": 0.001, |
|
"loss": 2.439, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.5916870415647921, |
|
"grad_norm": 0.5232786536216736, |
|
"learning_rate": 0.001, |
|
"loss": 2.38, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.5929095354523227, |
|
"grad_norm": 0.552232563495636, |
|
"learning_rate": 0.001, |
|
"loss": 2.4613, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5941320293398533, |
|
"grad_norm": 0.5390328764915466, |
|
"learning_rate": 0.001, |
|
"loss": 2.3949, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.5953545232273839, |
|
"grad_norm": 0.5667278170585632, |
|
"learning_rate": 0.001, |
|
"loss": 2.3934, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.5965770171149144, |
|
"grad_norm": 0.5378979444503784, |
|
"learning_rate": 0.001, |
|
"loss": 2.328, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.597799511002445, |
|
"grad_norm": 0.520291805267334, |
|
"learning_rate": 0.001, |
|
"loss": 2.4101, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.5990220048899756, |
|
"grad_norm": 0.5654510855674744, |
|
"learning_rate": 0.001, |
|
"loss": 2.4033, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6002444987775061, |
|
"grad_norm": 0.5622351765632629, |
|
"learning_rate": 0.001, |
|
"loss": 2.4365, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.6014669926650367, |
|
"grad_norm": 0.5403612852096558, |
|
"learning_rate": 0.001, |
|
"loss": 2.3842, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.6026894865525673, |
|
"grad_norm": 0.5934503674507141, |
|
"learning_rate": 0.001, |
|
"loss": 2.4396, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.6039119804400978, |
|
"grad_norm": 0.5867724418640137, |
|
"learning_rate": 0.001, |
|
"loss": 2.4173, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.6051344743276283, |
|
"grad_norm": 0.5405483841896057, |
|
"learning_rate": 0.001, |
|
"loss": 2.4576, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6063569682151589, |
|
"grad_norm": 0.5979167819023132, |
|
"learning_rate": 0.001, |
|
"loss": 2.4444, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.6075794621026895, |
|
"grad_norm": 0.55023193359375, |
|
"learning_rate": 0.001, |
|
"loss": 2.4416, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.60880195599022, |
|
"grad_norm": 0.5640861392021179, |
|
"learning_rate": 0.001, |
|
"loss": 2.4057, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.6100244498777506, |
|
"grad_norm": 0.6123327016830444, |
|
"learning_rate": 0.001, |
|
"loss": 2.3173, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.6112469437652812, |
|
"grad_norm": 0.5508670806884766, |
|
"learning_rate": 0.001, |
|
"loss": 2.3907, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6124694376528117, |
|
"grad_norm": 0.5583838224411011, |
|
"learning_rate": 0.001, |
|
"loss": 2.3548, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.6136919315403423, |
|
"grad_norm": 0.5637821555137634, |
|
"learning_rate": 0.001, |
|
"loss": 2.3176, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.6149144254278729, |
|
"grad_norm": 0.5943612456321716, |
|
"learning_rate": 0.001, |
|
"loss": 2.388, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.6161369193154034, |
|
"grad_norm": 0.717356264591217, |
|
"learning_rate": 0.001, |
|
"loss": 2.3649, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.617359413202934, |
|
"grad_norm": 0.5627100467681885, |
|
"learning_rate": 0.001, |
|
"loss": 2.3938, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6185819070904646, |
|
"grad_norm": 0.5377146601676941, |
|
"learning_rate": 0.001, |
|
"loss": 2.369, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.6198044009779952, |
|
"grad_norm": 0.5633614659309387, |
|
"learning_rate": 0.001, |
|
"loss": 2.4127, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.6210268948655256, |
|
"grad_norm": 0.5996368527412415, |
|
"learning_rate": 0.001, |
|
"loss": 2.3729, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.6222493887530562, |
|
"grad_norm": 0.6659756898880005, |
|
"learning_rate": 0.001, |
|
"loss": 2.3896, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.6234718826405868, |
|
"grad_norm": 0.4918464124202728, |
|
"learning_rate": 0.001, |
|
"loss": 2.3871, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6246943765281173, |
|
"grad_norm": 0.5441576838493347, |
|
"learning_rate": 0.001, |
|
"loss": 2.3487, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.6259168704156479, |
|
"grad_norm": 0.5248016715049744, |
|
"learning_rate": 0.001, |
|
"loss": 2.4756, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.6271393643031785, |
|
"grad_norm": 0.5138648152351379, |
|
"learning_rate": 0.001, |
|
"loss": 2.4046, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.628361858190709, |
|
"grad_norm": 0.5615628361701965, |
|
"learning_rate": 0.001, |
|
"loss": 2.434, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.6295843520782396, |
|
"grad_norm": 0.5971861481666565, |
|
"learning_rate": 0.001, |
|
"loss": 2.3772, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.6308068459657702, |
|
"grad_norm": 0.6121475100517273, |
|
"learning_rate": 0.001, |
|
"loss": 2.3834, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.6320293398533008, |
|
"grad_norm": 0.5112194418907166, |
|
"learning_rate": 0.001, |
|
"loss": 2.3088, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.6332518337408313, |
|
"grad_norm": 0.5220752358436584, |
|
"learning_rate": 0.001, |
|
"loss": 2.3559, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.6344743276283619, |
|
"grad_norm": 0.5204265713691711, |
|
"learning_rate": 0.001, |
|
"loss": 2.496, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.6356968215158925, |
|
"grad_norm": 0.5493486523628235, |
|
"learning_rate": 0.001, |
|
"loss": 2.3303, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.636919315403423, |
|
"grad_norm": 0.5837205052375793, |
|
"learning_rate": 0.001, |
|
"loss": 2.3196, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.6381418092909535, |
|
"grad_norm": 0.5629017353057861, |
|
"learning_rate": 0.001, |
|
"loss": 2.458, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.6393643031784841, |
|
"grad_norm": 0.5637010931968689, |
|
"learning_rate": 0.001, |
|
"loss": 2.3734, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.6405867970660146, |
|
"grad_norm": 0.5641871094703674, |
|
"learning_rate": 0.001, |
|
"loss": 2.353, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.6418092909535452, |
|
"grad_norm": 0.5671785473823547, |
|
"learning_rate": 0.001, |
|
"loss": 2.4907, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6430317848410758, |
|
"grad_norm": 0.5508561730384827, |
|
"learning_rate": 0.001, |
|
"loss": 2.3804, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.6442542787286064, |
|
"grad_norm": 0.5103657245635986, |
|
"learning_rate": 0.001, |
|
"loss": 2.4565, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.6454767726161369, |
|
"grad_norm": 0.5293738842010498, |
|
"learning_rate": 0.001, |
|
"loss": 2.4496, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.6466992665036675, |
|
"grad_norm": 0.5606434941291809, |
|
"learning_rate": 0.001, |
|
"loss": 2.2755, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.6479217603911981, |
|
"grad_norm": 0.5806160569190979, |
|
"learning_rate": 0.001, |
|
"loss": 2.4226, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6491442542787286, |
|
"grad_norm": 0.6059505343437195, |
|
"learning_rate": 0.001, |
|
"loss": 2.3963, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.6503667481662592, |
|
"grad_norm": 0.6261471509933472, |
|
"learning_rate": 0.001, |
|
"loss": 2.3978, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.6515892420537898, |
|
"grad_norm": 0.5536214113235474, |
|
"learning_rate": 0.001, |
|
"loss": 2.3348, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.6528117359413202, |
|
"grad_norm": 0.5901937484741211, |
|
"learning_rate": 0.001, |
|
"loss": 2.4816, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.6540342298288508, |
|
"grad_norm": 0.5521092414855957, |
|
"learning_rate": 0.001, |
|
"loss": 2.3598, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6552567237163814, |
|
"grad_norm": 0.526321291923523, |
|
"learning_rate": 0.001, |
|
"loss": 2.358, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.656479217603912, |
|
"grad_norm": 0.5706471800804138, |
|
"learning_rate": 0.001, |
|
"loss": 2.324, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.6577017114914425, |
|
"grad_norm": 0.5586467981338501, |
|
"learning_rate": 0.001, |
|
"loss": 2.3881, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.6589242053789731, |
|
"grad_norm": 0.5118228793144226, |
|
"learning_rate": 0.001, |
|
"loss": 2.3338, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.6601466992665037, |
|
"grad_norm": 0.6145654320716858, |
|
"learning_rate": 0.001, |
|
"loss": 2.3243, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.6613691931540342, |
|
"grad_norm": 0.5887059569358826, |
|
"learning_rate": 0.001, |
|
"loss": 2.335, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.6625916870415648, |
|
"grad_norm": 0.5634593367576599, |
|
"learning_rate": 0.001, |
|
"loss": 2.3915, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.6638141809290954, |
|
"grad_norm": 0.5326385498046875, |
|
"learning_rate": 0.001, |
|
"loss": 2.3112, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.6650366748166259, |
|
"grad_norm": 0.5700005888938904, |
|
"learning_rate": 0.001, |
|
"loss": 2.3378, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.6662591687041565, |
|
"grad_norm": 0.5960122346878052, |
|
"learning_rate": 0.001, |
|
"loss": 2.4213, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.6674816625916871, |
|
"grad_norm": 0.6229601502418518, |
|
"learning_rate": 0.001, |
|
"loss": 2.4157, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.6687041564792175, |
|
"grad_norm": 0.588280975818634, |
|
"learning_rate": 0.001, |
|
"loss": 2.3102, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.6699266503667481, |
|
"grad_norm": 0.509946346282959, |
|
"learning_rate": 0.001, |
|
"loss": 2.4242, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.6711491442542787, |
|
"grad_norm": 0.5596705675125122, |
|
"learning_rate": 0.001, |
|
"loss": 2.4396, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.6723716381418093, |
|
"grad_norm": 0.5317002534866333, |
|
"learning_rate": 0.001, |
|
"loss": 2.3731, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6735941320293398, |
|
"grad_norm": 0.5407710671424866, |
|
"learning_rate": 0.001, |
|
"loss": 2.3861, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.6748166259168704, |
|
"grad_norm": 0.5366936922073364, |
|
"learning_rate": 0.001, |
|
"loss": 2.3498, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.676039119804401, |
|
"grad_norm": 0.5394434332847595, |
|
"learning_rate": 0.001, |
|
"loss": 2.4139, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.6772616136919315, |
|
"grad_norm": 0.6320561766624451, |
|
"learning_rate": 0.001, |
|
"loss": 2.3204, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.6784841075794621, |
|
"grad_norm": 0.5851796865463257, |
|
"learning_rate": 0.001, |
|
"loss": 2.5296, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6797066014669927, |
|
"grad_norm": 0.7752942442893982, |
|
"learning_rate": 0.001, |
|
"loss": 2.3896, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.6809290953545232, |
|
"grad_norm": 0.6305093765258789, |
|
"learning_rate": 0.001, |
|
"loss": 2.4496, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.6821515892420538, |
|
"grad_norm": 0.563012957572937, |
|
"learning_rate": 0.001, |
|
"loss": 2.3887, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.6833740831295844, |
|
"grad_norm": 0.676686704158783, |
|
"learning_rate": 0.001, |
|
"loss": 2.4618, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.684596577017115, |
|
"grad_norm": 0.5765560269355774, |
|
"learning_rate": 0.001, |
|
"loss": 2.3942, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6858190709046454, |
|
"grad_norm": 0.5478991866111755, |
|
"learning_rate": 0.001, |
|
"loss": 2.4521, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.687041564792176, |
|
"grad_norm": 0.6409326791763306, |
|
"learning_rate": 0.001, |
|
"loss": 2.5013, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.6882640586797066, |
|
"grad_norm": 0.49936145544052124, |
|
"learning_rate": 0.001, |
|
"loss": 2.3794, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.6894865525672371, |
|
"grad_norm": 0.5707321763038635, |
|
"learning_rate": 0.001, |
|
"loss": 2.3323, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.6907090464547677, |
|
"grad_norm": 0.5072236657142639, |
|
"learning_rate": 0.001, |
|
"loss": 2.4768, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6919315403422983, |
|
"grad_norm": 0.5288402438163757, |
|
"learning_rate": 0.001, |
|
"loss": 2.227, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.6931540342298288, |
|
"grad_norm": 0.5510168671607971, |
|
"learning_rate": 0.001, |
|
"loss": 2.4368, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.6943765281173594, |
|
"grad_norm": 0.5543464422225952, |
|
"learning_rate": 0.001, |
|
"loss": 2.5117, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.69559902200489, |
|
"grad_norm": 0.6391542553901672, |
|
"learning_rate": 0.001, |
|
"loss": 2.3455, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.6968215158924206, |
|
"grad_norm": 0.5454332232475281, |
|
"learning_rate": 0.001, |
|
"loss": 2.4398, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6980440097799511, |
|
"grad_norm": 0.5730705261230469, |
|
"learning_rate": 0.001, |
|
"loss": 2.4693, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.6992665036674817, |
|
"grad_norm": 0.6171644330024719, |
|
"learning_rate": 0.001, |
|
"loss": 2.5105, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.7004889975550123, |
|
"grad_norm": 0.5583361387252808, |
|
"learning_rate": 0.001, |
|
"loss": 2.3169, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.7017114914425427, |
|
"grad_norm": 0.7579666972160339, |
|
"learning_rate": 0.001, |
|
"loss": 2.4683, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.7029339853300733, |
|
"grad_norm": 0.5713133215904236, |
|
"learning_rate": 0.001, |
|
"loss": 2.3683, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7041564792176039, |
|
"grad_norm": 0.5870834589004517, |
|
"learning_rate": 0.001, |
|
"loss": 2.4675, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.7053789731051344, |
|
"grad_norm": 0.631624162197113, |
|
"learning_rate": 0.001, |
|
"loss": 2.3634, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.706601466992665, |
|
"grad_norm": 0.6153734922409058, |
|
"learning_rate": 0.001, |
|
"loss": 2.3948, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.7078239608801956, |
|
"grad_norm": 0.5715532302856445, |
|
"learning_rate": 0.001, |
|
"loss": 2.4551, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.7090464547677262, |
|
"grad_norm": 0.5857019424438477, |
|
"learning_rate": 0.001, |
|
"loss": 2.4935, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7102689486552567, |
|
"grad_norm": 0.6221391558647156, |
|
"learning_rate": 0.001, |
|
"loss": 2.4721, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.7114914425427873, |
|
"grad_norm": 0.6044909358024597, |
|
"learning_rate": 0.001, |
|
"loss": 2.2741, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.7127139364303179, |
|
"grad_norm": 0.6020300388336182, |
|
"learning_rate": 0.001, |
|
"loss": 2.3917, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.7139364303178484, |
|
"grad_norm": 0.5730435252189636, |
|
"learning_rate": 0.001, |
|
"loss": 2.4477, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.715158924205379, |
|
"grad_norm": 0.5461198687553406, |
|
"learning_rate": 0.001, |
|
"loss": 2.4284, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7163814180929096, |
|
"grad_norm": 0.5864646434783936, |
|
"learning_rate": 0.001, |
|
"loss": 2.3605, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.71760391198044, |
|
"grad_norm": 0.6093571782112122, |
|
"learning_rate": 0.001, |
|
"loss": 2.4669, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.7188264058679706, |
|
"grad_norm": 0.6475409865379333, |
|
"learning_rate": 0.001, |
|
"loss": 2.4698, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.7200488997555012, |
|
"grad_norm": 0.6631804704666138, |
|
"learning_rate": 0.001, |
|
"loss": 2.3759, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.7212713936430318, |
|
"grad_norm": 0.5505629777908325, |
|
"learning_rate": 0.001, |
|
"loss": 2.3559, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7224938875305623, |
|
"grad_norm": 0.6232132315635681, |
|
"learning_rate": 0.001, |
|
"loss": 2.3618, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.7237163814180929, |
|
"grad_norm": 0.6157463788986206, |
|
"learning_rate": 0.001, |
|
"loss": 2.3225, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.7249388753056235, |
|
"grad_norm": 0.6107804179191589, |
|
"learning_rate": 0.001, |
|
"loss": 2.4047, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.726161369193154, |
|
"grad_norm": 0.6085322499275208, |
|
"learning_rate": 0.001, |
|
"loss": 2.4066, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.7273838630806846, |
|
"grad_norm": 0.5943775177001953, |
|
"learning_rate": 0.001, |
|
"loss": 2.4326, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7286063569682152, |
|
"grad_norm": 0.5640223026275635, |
|
"learning_rate": 0.001, |
|
"loss": 2.3126, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.7298288508557457, |
|
"grad_norm": 0.5814293026924133, |
|
"learning_rate": 0.001, |
|
"loss": 2.4832, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.7310513447432763, |
|
"grad_norm": 0.5499405264854431, |
|
"learning_rate": 0.001, |
|
"loss": 2.3934, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.7322738386308069, |
|
"grad_norm": 0.5558736324310303, |
|
"learning_rate": 0.001, |
|
"loss": 2.4986, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.7334963325183375, |
|
"grad_norm": 0.5608446598052979, |
|
"learning_rate": 0.001, |
|
"loss": 2.3605, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7347188264058679, |
|
"grad_norm": 0.5901703238487244, |
|
"learning_rate": 0.001, |
|
"loss": 2.4288, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.7359413202933985, |
|
"grad_norm": 0.6036423444747925, |
|
"learning_rate": 0.001, |
|
"loss": 2.3991, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.7371638141809291, |
|
"grad_norm": 0.5142916440963745, |
|
"learning_rate": 0.001, |
|
"loss": 2.3906, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.7383863080684596, |
|
"grad_norm": 0.627358078956604, |
|
"learning_rate": 0.001, |
|
"loss": 2.374, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.7396088019559902, |
|
"grad_norm": 0.546642541885376, |
|
"learning_rate": 0.001, |
|
"loss": 2.3263, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7408312958435208, |
|
"grad_norm": 0.5707128643989563, |
|
"learning_rate": 0.001, |
|
"loss": 2.3437, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.7420537897310513, |
|
"grad_norm": 0.6058228015899658, |
|
"learning_rate": 0.001, |
|
"loss": 2.4895, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.7432762836185819, |
|
"grad_norm": 0.5580143928527832, |
|
"learning_rate": 0.001, |
|
"loss": 2.3298, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.7444987775061125, |
|
"grad_norm": 0.6101015210151672, |
|
"learning_rate": 0.001, |
|
"loss": 2.3968, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.7457212713936431, |
|
"grad_norm": 0.6657527089118958, |
|
"learning_rate": 0.001, |
|
"loss": 2.447, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7469437652811736, |
|
"grad_norm": 0.5650172829627991, |
|
"learning_rate": 0.001, |
|
"loss": 2.3942, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.7481662591687042, |
|
"grad_norm": 0.6829528212547302, |
|
"learning_rate": 0.001, |
|
"loss": 2.4355, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.7493887530562348, |
|
"grad_norm": 0.646058976650238, |
|
"learning_rate": 0.001, |
|
"loss": 2.3792, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.7506112469437652, |
|
"grad_norm": 0.6063521504402161, |
|
"learning_rate": 0.001, |
|
"loss": 2.3637, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.7518337408312958, |
|
"grad_norm": 0.5286898612976074, |
|
"learning_rate": 0.001, |
|
"loss": 2.4182, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.7518337408312958, |
|
"eval_loss": 2.344815969467163, |
|
"eval_runtime": 104.078, |
|
"eval_samples_per_second": 100.55, |
|
"eval_steps_per_second": 12.577, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.7530562347188264, |
|
"grad_norm": 0.5201755166053772, |
|
"learning_rate": 0.001, |
|
"loss": 2.3792, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.7542787286063569, |
|
"grad_norm": 0.5472811460494995, |
|
"learning_rate": 0.001, |
|
"loss": 2.3532, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.7555012224938875, |
|
"grad_norm": 0.5708293914794922, |
|
"learning_rate": 0.001, |
|
"loss": 2.4103, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.7567237163814181, |
|
"grad_norm": 0.5900188088417053, |
|
"learning_rate": 0.001, |
|
"loss": 2.4371, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.7579462102689487, |
|
"grad_norm": 0.6372197866439819, |
|
"learning_rate": 0.001, |
|
"loss": 2.3237, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7591687041564792, |
|
"grad_norm": 0.5709636807441711, |
|
"learning_rate": 0.001, |
|
"loss": 2.3432, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.7603911980440098, |
|
"grad_norm": 0.6171260476112366, |
|
"learning_rate": 0.001, |
|
"loss": 2.3663, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.7616136919315404, |
|
"grad_norm": 0.6021854877471924, |
|
"learning_rate": 0.001, |
|
"loss": 2.3367, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.7628361858190709, |
|
"grad_norm": 0.6226273775100708, |
|
"learning_rate": 0.001, |
|
"loss": 2.336, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.7640586797066015, |
|
"grad_norm": 0.6232383251190186, |
|
"learning_rate": 0.001, |
|
"loss": 2.3983, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7652811735941321, |
|
"grad_norm": 0.5813260674476624, |
|
"learning_rate": 0.001, |
|
"loss": 2.4898, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.7665036674816625, |
|
"grad_norm": 0.620922863483429, |
|
"learning_rate": 0.001, |
|
"loss": 2.4339, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.7677261613691931, |
|
"grad_norm": 0.5710574388504028, |
|
"learning_rate": 0.001, |
|
"loss": 2.3937, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.7689486552567237, |
|
"grad_norm": 0.5268881916999817, |
|
"learning_rate": 0.001, |
|
"loss": 2.4622, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.7701711491442543, |
|
"grad_norm": 0.5241808295249939, |
|
"learning_rate": 0.001, |
|
"loss": 2.3836, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.7713936430317848, |
|
"grad_norm": 0.5768167972564697, |
|
"learning_rate": 0.001, |
|
"loss": 2.3474, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.7726161369193154, |
|
"grad_norm": 0.5535464286804199, |
|
"learning_rate": 0.001, |
|
"loss": 2.4747, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.773838630806846, |
|
"grad_norm": 0.5237547159194946, |
|
"learning_rate": 0.001, |
|
"loss": 2.4178, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.7750611246943765, |
|
"grad_norm": 0.6961969137191772, |
|
"learning_rate": 0.001, |
|
"loss": 2.3371, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.7762836185819071, |
|
"grad_norm": 0.6474157571792603, |
|
"learning_rate": 0.001, |
|
"loss": 2.448, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.7775061124694377, |
|
"grad_norm": 0.5978342294692993, |
|
"learning_rate": 0.001, |
|
"loss": 2.3929, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.7787286063569682, |
|
"grad_norm": 0.5999522805213928, |
|
"learning_rate": 0.001, |
|
"loss": 2.3485, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.7799511002444988, |
|
"grad_norm": 0.5849292278289795, |
|
"learning_rate": 0.001, |
|
"loss": 2.487, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.7811735941320294, |
|
"grad_norm": 0.6249788999557495, |
|
"learning_rate": 0.001, |
|
"loss": 2.4578, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.78239608801956, |
|
"grad_norm": 0.6195868253707886, |
|
"learning_rate": 0.001, |
|
"loss": 2.5804, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7836185819070904, |
|
"grad_norm": 0.5440888404846191, |
|
"learning_rate": 0.001, |
|
"loss": 2.4577, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.784841075794621, |
|
"grad_norm": 0.6838685870170593, |
|
"learning_rate": 0.001, |
|
"loss": 2.3788, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.7860635696821516, |
|
"grad_norm": 0.6188467741012573, |
|
"learning_rate": 0.001, |
|
"loss": 2.4285, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.7872860635696821, |
|
"grad_norm": 0.5791029930114746, |
|
"learning_rate": 0.001, |
|
"loss": 2.4295, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.7885085574572127, |
|
"grad_norm": 0.5324996709823608, |
|
"learning_rate": 0.001, |
|
"loss": 2.2872, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.7897310513447433, |
|
"grad_norm": 0.5715858936309814, |
|
"learning_rate": 0.001, |
|
"loss": 2.3472, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.7909535452322738, |
|
"grad_norm": 0.5822765231132507, |
|
"learning_rate": 0.001, |
|
"loss": 2.4128, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.7921760391198044, |
|
"grad_norm": 0.6102983951568604, |
|
"learning_rate": 0.001, |
|
"loss": 2.4367, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.793398533007335, |
|
"grad_norm": 0.5699944496154785, |
|
"learning_rate": 0.001, |
|
"loss": 2.4394, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.7946210268948656, |
|
"grad_norm": 0.5703781843185425, |
|
"learning_rate": 0.001, |
|
"loss": 2.3587, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7958435207823961, |
|
"grad_norm": 0.6027846336364746, |
|
"learning_rate": 0.001, |
|
"loss": 2.4859, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.7970660146699267, |
|
"grad_norm": 0.5607686638832092, |
|
"learning_rate": 0.001, |
|
"loss": 2.3393, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.7982885085574573, |
|
"grad_norm": 0.5783531665802002, |
|
"learning_rate": 0.001, |
|
"loss": 2.4062, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.7995110024449877, |
|
"grad_norm": 0.5493980050086975, |
|
"learning_rate": 0.001, |
|
"loss": 2.3283, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.8007334963325183, |
|
"grad_norm": 0.5421778559684753, |
|
"learning_rate": 0.001, |
|
"loss": 2.4083, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.8019559902200489, |
|
"grad_norm": 0.539010763168335, |
|
"learning_rate": 0.001, |
|
"loss": 2.3952, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.8031784841075794, |
|
"grad_norm": 0.5817722082138062, |
|
"learning_rate": 0.001, |
|
"loss": 2.4124, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.80440097799511, |
|
"grad_norm": 0.5872360467910767, |
|
"learning_rate": 0.001, |
|
"loss": 2.4038, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.8056234718826406, |
|
"grad_norm": 0.6575203537940979, |
|
"learning_rate": 0.001, |
|
"loss": 2.4344, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.8068459657701712, |
|
"grad_norm": 0.5091464519500732, |
|
"learning_rate": 0.001, |
|
"loss": 2.3349, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8080684596577017, |
|
"grad_norm": 0.5787954330444336, |
|
"learning_rate": 0.001, |
|
"loss": 2.3587, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.8092909535452323, |
|
"grad_norm": 0.6609556078910828, |
|
"learning_rate": 0.001, |
|
"loss": 2.4197, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.8105134474327629, |
|
"grad_norm": 0.5616955161094666, |
|
"learning_rate": 0.001, |
|
"loss": 2.5327, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.8117359413202934, |
|
"grad_norm": 0.5685104727745056, |
|
"learning_rate": 0.001, |
|
"loss": 2.3639, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.812958435207824, |
|
"grad_norm": 0.6164966225624084, |
|
"learning_rate": 0.001, |
|
"loss": 2.3692, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.8141809290953546, |
|
"grad_norm": 0.6559178829193115, |
|
"learning_rate": 0.001, |
|
"loss": 2.3249, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.815403422982885, |
|
"grad_norm": 0.5554947257041931, |
|
"learning_rate": 0.001, |
|
"loss": 2.3248, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.8166259168704156, |
|
"grad_norm": 0.5663809180259705, |
|
"learning_rate": 0.001, |
|
"loss": 2.4098, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.8178484107579462, |
|
"grad_norm": 0.5895231366157532, |
|
"learning_rate": 0.001, |
|
"loss": 2.3251, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.8190709046454768, |
|
"grad_norm": 0.5413554906845093, |
|
"learning_rate": 0.001, |
|
"loss": 2.3619, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8202933985330073, |
|
"grad_norm": 0.5487458109855652, |
|
"learning_rate": 0.001, |
|
"loss": 2.404, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.8215158924205379, |
|
"grad_norm": 0.5602384805679321, |
|
"learning_rate": 0.001, |
|
"loss": 2.3715, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.8227383863080685, |
|
"grad_norm": 0.5400522947311401, |
|
"learning_rate": 0.001, |
|
"loss": 2.3271, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.823960880195599, |
|
"grad_norm": 0.5108867883682251, |
|
"learning_rate": 0.001, |
|
"loss": 2.4399, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.8251833740831296, |
|
"grad_norm": 0.5453303456306458, |
|
"learning_rate": 0.001, |
|
"loss": 2.4627, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8264058679706602, |
|
"grad_norm": 0.6444577574729919, |
|
"learning_rate": 0.001, |
|
"loss": 2.3977, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.8276283618581907, |
|
"grad_norm": 0.5792852640151978, |
|
"learning_rate": 0.001, |
|
"loss": 2.4131, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.8288508557457213, |
|
"grad_norm": 0.738950252532959, |
|
"learning_rate": 0.001, |
|
"loss": 2.2458, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.8300733496332519, |
|
"grad_norm": 0.5346181988716125, |
|
"learning_rate": 0.001, |
|
"loss": 2.3125, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.8312958435207825, |
|
"grad_norm": 0.5854019522666931, |
|
"learning_rate": 0.001, |
|
"loss": 2.389, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8325183374083129, |
|
"grad_norm": 0.6267631649971008, |
|
"learning_rate": 0.001, |
|
"loss": 2.5181, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.8337408312958435, |
|
"grad_norm": 0.6359632015228271, |
|
"learning_rate": 0.001, |
|
"loss": 2.3248, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.8349633251833741, |
|
"grad_norm": 0.5600630640983582, |
|
"learning_rate": 0.001, |
|
"loss": 2.3173, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.8361858190709046, |
|
"grad_norm": 0.5631881952285767, |
|
"learning_rate": 0.001, |
|
"loss": 2.4153, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.8374083129584352, |
|
"grad_norm": 0.5842933654785156, |
|
"learning_rate": 0.001, |
|
"loss": 2.4056, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.8386308068459658, |
|
"grad_norm": 0.5515209436416626, |
|
"learning_rate": 0.001, |
|
"loss": 2.373, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.8398533007334963, |
|
"grad_norm": 0.6496553421020508, |
|
"learning_rate": 0.001, |
|
"loss": 2.3827, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.8410757946210269, |
|
"grad_norm": 0.6773370504379272, |
|
"learning_rate": 0.001, |
|
"loss": 2.4588, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.8422982885085575, |
|
"grad_norm": 0.5586926937103271, |
|
"learning_rate": 0.001, |
|
"loss": 2.3336, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.843520782396088, |
|
"grad_norm": 0.576976478099823, |
|
"learning_rate": 0.001, |
|
"loss": 2.3248, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8447432762836186, |
|
"grad_norm": 0.5717596411705017, |
|
"learning_rate": 0.001, |
|
"loss": 2.3765, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.8459657701711492, |
|
"grad_norm": 0.878169059753418, |
|
"learning_rate": 0.001, |
|
"loss": 2.3114, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.8471882640586798, |
|
"grad_norm": 0.5785524845123291, |
|
"learning_rate": 0.001, |
|
"loss": 2.4307, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.8484107579462102, |
|
"grad_norm": 0.5417555570602417, |
|
"learning_rate": 0.001, |
|
"loss": 2.3569, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.8496332518337408, |
|
"grad_norm": 0.5829872488975525, |
|
"learning_rate": 0.001, |
|
"loss": 2.3458, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.8508557457212714, |
|
"grad_norm": 0.5894091129302979, |
|
"learning_rate": 0.001, |
|
"loss": 2.2858, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.8520782396088019, |
|
"grad_norm": 0.601750910282135, |
|
"learning_rate": 0.001, |
|
"loss": 2.2999, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.8533007334963325, |
|
"grad_norm": 0.6677522659301758, |
|
"learning_rate": 0.001, |
|
"loss": 2.4061, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.8545232273838631, |
|
"grad_norm": 0.616008460521698, |
|
"learning_rate": 0.001, |
|
"loss": 2.4467, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.8557457212713936, |
|
"grad_norm": 0.6218184232711792, |
|
"learning_rate": 0.001, |
|
"loss": 2.4714, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8569682151589242, |
|
"grad_norm": 0.6129118204116821, |
|
"learning_rate": 0.001, |
|
"loss": 2.3915, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.8581907090464548, |
|
"grad_norm": 0.5880963802337646, |
|
"learning_rate": 0.001, |
|
"loss": 2.3564, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.8594132029339854, |
|
"grad_norm": 0.5852961540222168, |
|
"learning_rate": 0.001, |
|
"loss": 2.3743, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.8606356968215159, |
|
"grad_norm": 0.595486581325531, |
|
"learning_rate": 0.001, |
|
"loss": 2.2989, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.8618581907090465, |
|
"grad_norm": 0.5296614766120911, |
|
"learning_rate": 0.001, |
|
"loss": 2.3549, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.863080684596577, |
|
"grad_norm": 0.5757498145103455, |
|
"learning_rate": 0.001, |
|
"loss": 2.4028, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.8643031784841075, |
|
"grad_norm": 0.537127673625946, |
|
"learning_rate": 0.001, |
|
"loss": 2.3948, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.8655256723716381, |
|
"grad_norm": 0.5664181113243103, |
|
"learning_rate": 0.001, |
|
"loss": 2.4288, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.8667481662591687, |
|
"grad_norm": 0.5783668160438538, |
|
"learning_rate": 0.001, |
|
"loss": 2.3615, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.8679706601466992, |
|
"grad_norm": 0.5113952159881592, |
|
"learning_rate": 0.001, |
|
"loss": 2.3418, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8691931540342298, |
|
"grad_norm": 0.7263479232788086, |
|
"learning_rate": 0.001, |
|
"loss": 2.4439, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.8704156479217604, |
|
"grad_norm": 0.5564237833023071, |
|
"learning_rate": 0.001, |
|
"loss": 2.4239, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.871638141809291, |
|
"grad_norm": 0.6223247051239014, |
|
"learning_rate": 0.001, |
|
"loss": 2.3884, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.8728606356968215, |
|
"grad_norm": 0.592124342918396, |
|
"learning_rate": 0.001, |
|
"loss": 2.3621, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.8740831295843521, |
|
"grad_norm": 0.5872472524642944, |
|
"learning_rate": 0.001, |
|
"loss": 2.4129, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.8753056234718827, |
|
"grad_norm": 0.7134682536125183, |
|
"learning_rate": 0.001, |
|
"loss": 2.4038, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.8765281173594132, |
|
"grad_norm": 0.7206189632415771, |
|
"learning_rate": 0.001, |
|
"loss": 2.3734, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.8777506112469438, |
|
"grad_norm": 0.6114242076873779, |
|
"learning_rate": 0.001, |
|
"loss": 2.4213, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.8789731051344744, |
|
"grad_norm": 0.6361877918243408, |
|
"learning_rate": 0.001, |
|
"loss": 2.399, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.8801955990220048, |
|
"grad_norm": 0.6251677870750427, |
|
"learning_rate": 0.001, |
|
"loss": 2.422, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.8814180929095354, |
|
"grad_norm": 0.6224179863929749, |
|
"learning_rate": 0.001, |
|
"loss": 2.4036, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.882640586797066, |
|
"grad_norm": 0.6215149164199829, |
|
"learning_rate": 0.001, |
|
"loss": 2.3502, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.8838630806845966, |
|
"grad_norm": 0.6429134011268616, |
|
"learning_rate": 0.001, |
|
"loss": 2.5216, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.8850855745721271, |
|
"grad_norm": 0.5223509073257446, |
|
"learning_rate": 0.001, |
|
"loss": 2.3913, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.8863080684596577, |
|
"grad_norm": 0.6248764395713806, |
|
"learning_rate": 0.001, |
|
"loss": 2.352, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.8875305623471883, |
|
"grad_norm": 0.5972513556480408, |
|
"learning_rate": 0.001, |
|
"loss": 2.3474, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.8887530562347188, |
|
"grad_norm": 0.5999692678451538, |
|
"learning_rate": 0.001, |
|
"loss": 2.3817, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.8899755501222494, |
|
"grad_norm": 0.6036021709442139, |
|
"learning_rate": 0.001, |
|
"loss": 2.4853, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.89119804400978, |
|
"grad_norm": 0.6136165261268616, |
|
"learning_rate": 0.001, |
|
"loss": 2.4091, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.8924205378973105, |
|
"grad_norm": 0.6014193296432495, |
|
"learning_rate": 0.001, |
|
"loss": 2.3446, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.8936430317848411, |
|
"grad_norm": 0.5973393321037292, |
|
"learning_rate": 0.001, |
|
"loss": 2.2264, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.8948655256723717, |
|
"grad_norm": 0.5201925039291382, |
|
"learning_rate": 0.001, |
|
"loss": 2.3571, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.8960880195599022, |
|
"grad_norm": 0.6981174945831299, |
|
"learning_rate": 0.001, |
|
"loss": 2.3528, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.8973105134474327, |
|
"grad_norm": 0.6088836789131165, |
|
"learning_rate": 0.001, |
|
"loss": 2.3604, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.8985330073349633, |
|
"grad_norm": 0.5979176759719849, |
|
"learning_rate": 0.001, |
|
"loss": 2.3966, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.8997555012224939, |
|
"grad_norm": 0.5857772827148438, |
|
"learning_rate": 0.001, |
|
"loss": 2.4123, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.9009779951100244, |
|
"grad_norm": 0.5723506212234497, |
|
"learning_rate": 0.001, |
|
"loss": 2.3642, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.902200488997555, |
|
"grad_norm": 0.6450780034065247, |
|
"learning_rate": 0.001, |
|
"loss": 2.3892, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.9034229828850856, |
|
"grad_norm": 0.5753511190414429, |
|
"learning_rate": 0.001, |
|
"loss": 2.3683, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.9046454767726161, |
|
"grad_norm": 0.5976255536079407, |
|
"learning_rate": 0.001, |
|
"loss": 2.4115, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9058679706601467, |
|
"grad_norm": 0.603735625743866, |
|
"learning_rate": 0.001, |
|
"loss": 2.3854, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.9070904645476773, |
|
"grad_norm": 0.5742889642715454, |
|
"learning_rate": 0.001, |
|
"loss": 2.461, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.9083129584352079, |
|
"grad_norm": 0.5430094003677368, |
|
"learning_rate": 0.001, |
|
"loss": 2.4041, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.9095354523227384, |
|
"grad_norm": 0.6404337286949158, |
|
"learning_rate": 0.001, |
|
"loss": 2.498, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.910757946210269, |
|
"grad_norm": 0.6157411932945251, |
|
"learning_rate": 0.001, |
|
"loss": 2.3859, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9119804400977995, |
|
"grad_norm": 0.5406906008720398, |
|
"learning_rate": 0.001, |
|
"loss": 2.4062, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.91320293398533, |
|
"grad_norm": 0.5866022109985352, |
|
"learning_rate": 0.001, |
|
"loss": 2.5032, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.9144254278728606, |
|
"grad_norm": 0.5461808443069458, |
|
"learning_rate": 0.001, |
|
"loss": 2.3162, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.9156479217603912, |
|
"grad_norm": 0.6297031044960022, |
|
"learning_rate": 0.001, |
|
"loss": 2.4081, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.9168704156479217, |
|
"grad_norm": 0.5485174059867859, |
|
"learning_rate": 0.001, |
|
"loss": 2.3578, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9180929095354523, |
|
"grad_norm": 0.6516104340553284, |
|
"learning_rate": 0.001, |
|
"loss": 2.4404, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.9193154034229829, |
|
"grad_norm": 0.6003808379173279, |
|
"learning_rate": 0.001, |
|
"loss": 2.3506, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.9205378973105135, |
|
"grad_norm": 0.583228349685669, |
|
"learning_rate": 0.001, |
|
"loss": 2.4624, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.921760391198044, |
|
"grad_norm": 0.6453356742858887, |
|
"learning_rate": 0.001, |
|
"loss": 2.4198, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.9229828850855746, |
|
"grad_norm": 0.5110669136047363, |
|
"learning_rate": 0.001, |
|
"loss": 2.3705, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.9242053789731052, |
|
"grad_norm": 0.5734696984291077, |
|
"learning_rate": 0.001, |
|
"loss": 2.3523, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.9254278728606357, |
|
"grad_norm": 0.5642021894454956, |
|
"learning_rate": 0.001, |
|
"loss": 2.2984, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.9266503667481663, |
|
"grad_norm": 0.5395249724388123, |
|
"learning_rate": 0.001, |
|
"loss": 2.2935, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.9278728606356969, |
|
"grad_norm": 0.5964503288269043, |
|
"learning_rate": 0.001, |
|
"loss": 2.4717, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.9290953545232273, |
|
"grad_norm": 0.530672550201416, |
|
"learning_rate": 0.001, |
|
"loss": 2.4752, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9303178484107579, |
|
"grad_norm": 0.5743510127067566, |
|
"learning_rate": 0.001, |
|
"loss": 2.3707, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.9315403422982885, |
|
"grad_norm": 0.6197736263275146, |
|
"learning_rate": 0.001, |
|
"loss": 2.4328, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.9327628361858191, |
|
"grad_norm": 0.5292767286300659, |
|
"learning_rate": 0.001, |
|
"loss": 2.3821, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.9339853300733496, |
|
"grad_norm": 0.6877421736717224, |
|
"learning_rate": 0.001, |
|
"loss": 2.3904, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.9352078239608802, |
|
"grad_norm": 0.5813961029052734, |
|
"learning_rate": 0.001, |
|
"loss": 2.4042, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.9364303178484108, |
|
"grad_norm": 0.6664645075798035, |
|
"learning_rate": 0.001, |
|
"loss": 2.407, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.9376528117359413, |
|
"grad_norm": 0.6104966402053833, |
|
"learning_rate": 0.001, |
|
"loss": 2.4558, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.9388753056234719, |
|
"grad_norm": 0.7187817692756653, |
|
"learning_rate": 0.001, |
|
"loss": 2.4366, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.9400977995110025, |
|
"grad_norm": 0.7083351612091064, |
|
"learning_rate": 0.001, |
|
"loss": 2.3937, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.941320293398533, |
|
"grad_norm": 0.5205307006835938, |
|
"learning_rate": 0.001, |
|
"loss": 2.333, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9425427872860636, |
|
"grad_norm": 0.7257646918296814, |
|
"learning_rate": 0.001, |
|
"loss": 2.3803, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.9437652811735942, |
|
"grad_norm": 0.5259858965873718, |
|
"learning_rate": 0.001, |
|
"loss": 2.361, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.9449877750611247, |
|
"grad_norm": 0.7148779630661011, |
|
"learning_rate": 0.001, |
|
"loss": 2.3112, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.9462102689486552, |
|
"grad_norm": 0.644771158695221, |
|
"learning_rate": 0.001, |
|
"loss": 2.3839, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.9474327628361858, |
|
"grad_norm": 0.5165205597877502, |
|
"learning_rate": 0.001, |
|
"loss": 2.4143, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9486552567237164, |
|
"grad_norm": 0.600974977016449, |
|
"learning_rate": 0.001, |
|
"loss": 2.389, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.9498777506112469, |
|
"grad_norm": 0.6595199108123779, |
|
"learning_rate": 0.001, |
|
"loss": 2.3992, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.9511002444987775, |
|
"grad_norm": 0.556730329990387, |
|
"learning_rate": 0.001, |
|
"loss": 2.4006, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.9523227383863081, |
|
"grad_norm": 0.6529319882392883, |
|
"learning_rate": 0.001, |
|
"loss": 2.3843, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.9535452322738386, |
|
"grad_norm": 0.5654332637786865, |
|
"learning_rate": 0.001, |
|
"loss": 2.3503, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9547677261613692, |
|
"grad_norm": 0.5724945664405823, |
|
"learning_rate": 0.001, |
|
"loss": 2.4135, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.9559902200488998, |
|
"grad_norm": 0.5892456769943237, |
|
"learning_rate": 0.001, |
|
"loss": 2.5017, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.9572127139364304, |
|
"grad_norm": 0.6245690584182739, |
|
"learning_rate": 0.001, |
|
"loss": 2.3421, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.9584352078239609, |
|
"grad_norm": 0.5814808011054993, |
|
"learning_rate": 0.001, |
|
"loss": 2.4081, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.9596577017114915, |
|
"grad_norm": 0.6698013544082642, |
|
"learning_rate": 0.001, |
|
"loss": 2.4073, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.960880195599022, |
|
"grad_norm": 0.5794418454170227, |
|
"learning_rate": 0.001, |
|
"loss": 2.4799, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.9621026894865525, |
|
"grad_norm": 0.5708101987838745, |
|
"learning_rate": 0.001, |
|
"loss": 2.4533, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.9633251833740831, |
|
"grad_norm": 0.6697220802307129, |
|
"learning_rate": 0.001, |
|
"loss": 2.5029, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.9645476772616137, |
|
"grad_norm": 0.5752573609352112, |
|
"learning_rate": 0.001, |
|
"loss": 2.402, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.9657701711491442, |
|
"grad_norm": 0.5376598834991455, |
|
"learning_rate": 0.001, |
|
"loss": 2.3827, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.9669926650366748, |
|
"grad_norm": 0.6070631146430969, |
|
"learning_rate": 0.001, |
|
"loss": 2.4628, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.9682151589242054, |
|
"grad_norm": 0.5507974624633789, |
|
"learning_rate": 0.001, |
|
"loss": 2.4281, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.969437652811736, |
|
"grad_norm": 0.6238107085227966, |
|
"learning_rate": 0.001, |
|
"loss": 2.403, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.9706601466992665, |
|
"grad_norm": 0.5629689693450928, |
|
"learning_rate": 0.001, |
|
"loss": 2.2755, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.9718826405867971, |
|
"grad_norm": 0.5647727847099304, |
|
"learning_rate": 0.001, |
|
"loss": 2.3897, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.9731051344743277, |
|
"grad_norm": 0.5957530736923218, |
|
"learning_rate": 0.001, |
|
"loss": 2.2812, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.9743276283618582, |
|
"grad_norm": 0.5573234558105469, |
|
"learning_rate": 0.001, |
|
"loss": 2.3699, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.9755501222493888, |
|
"grad_norm": 0.5734822750091553, |
|
"learning_rate": 0.001, |
|
"loss": 2.3682, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.9767726161369193, |
|
"grad_norm": 0.5806570649147034, |
|
"learning_rate": 0.001, |
|
"loss": 2.2825, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.9779951100244498, |
|
"grad_norm": 0.6516204476356506, |
|
"learning_rate": 0.001, |
|
"loss": 2.3894, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9792176039119804, |
|
"grad_norm": 0.6128764748573303, |
|
"learning_rate": 0.001, |
|
"loss": 2.3822, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.980440097799511, |
|
"grad_norm": 0.5696424841880798, |
|
"learning_rate": 0.001, |
|
"loss": 2.4311, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.9816625916870416, |
|
"grad_norm": 0.5665503740310669, |
|
"learning_rate": 0.001, |
|
"loss": 2.4249, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.9828850855745721, |
|
"grad_norm": 0.6103429198265076, |
|
"learning_rate": 0.001, |
|
"loss": 2.3657, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.9841075794621027, |
|
"grad_norm": 0.6361311078071594, |
|
"learning_rate": 0.001, |
|
"loss": 2.368, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.9853300733496333, |
|
"grad_norm": 0.5717911720275879, |
|
"learning_rate": 0.001, |
|
"loss": 2.3035, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.9865525672371638, |
|
"grad_norm": 0.6038938164710999, |
|
"learning_rate": 0.001, |
|
"loss": 2.4214, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.9877750611246944, |
|
"grad_norm": 0.5755453705787659, |
|
"learning_rate": 0.001, |
|
"loss": 2.3541, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.988997555012225, |
|
"grad_norm": 0.5793865323066711, |
|
"learning_rate": 0.001, |
|
"loss": 2.3962, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.9902200488997555, |
|
"grad_norm": 0.5786888599395752, |
|
"learning_rate": 0.001, |
|
"loss": 2.3091, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.991442542787286, |
|
"grad_norm": 0.5730612277984619, |
|
"learning_rate": 0.001, |
|
"loss": 2.2928, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.9926650366748166, |
|
"grad_norm": 0.5544816255569458, |
|
"learning_rate": 0.001, |
|
"loss": 2.4369, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.9938875305623472, |
|
"grad_norm": 0.6146392226219177, |
|
"learning_rate": 0.001, |
|
"loss": 2.3794, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.9951100244498777, |
|
"grad_norm": 0.5923306941986084, |
|
"learning_rate": 0.001, |
|
"loss": 2.3317, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.9963325183374083, |
|
"grad_norm": 0.531193733215332, |
|
"learning_rate": 0.001, |
|
"loss": 2.3787, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.9975550122249389, |
|
"grad_norm": 0.5377926826477051, |
|
"learning_rate": 0.001, |
|
"loss": 2.3605, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.9987775061124694, |
|
"grad_norm": 0.5309790372848511, |
|
"learning_rate": 0.001, |
|
"loss": 2.386, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.753987193107605, |
|
"learning_rate": 0.001, |
|
"loss": 2.4023, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 818, |
|
"total_flos": 5.575689983066112e+16, |
|
"train_loss": 2.4359203247685888, |
|
"train_runtime": 747.848, |
|
"train_samples_per_second": 139.932, |
|
"train_steps_per_second": 1.094 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 818, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 205, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.575689983066112e+16, |
|
"train_batch_size": 128, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|