|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9985734664764622, |
|
"eval_steps": 500, |
|
"global_step": 175, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005706134094151213, |
|
"grad_norm": 0.19783145189285278, |
|
"learning_rate": 8e-07, |
|
"loss": 1.7061, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.011412268188302425, |
|
"grad_norm": 0.07535335421562195, |
|
"learning_rate": 1.6e-06, |
|
"loss": 1.7548, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.017118402282453638, |
|
"grad_norm": 0.06703274697065353, |
|
"learning_rate": 2.4e-06, |
|
"loss": 1.7272, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02282453637660485, |
|
"grad_norm": 0.07223883271217346, |
|
"learning_rate": 3.2e-06, |
|
"loss": 1.7647, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.028530670470756064, |
|
"grad_norm": 0.0680837407708168, |
|
"learning_rate": 4e-06, |
|
"loss": 1.7567, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.034236804564907276, |
|
"grad_norm": 0.07516428083181381, |
|
"learning_rate": 4.8e-06, |
|
"loss": 1.7508, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.039942938659058486, |
|
"grad_norm": 0.0698549747467041, |
|
"learning_rate": 5.6e-06, |
|
"loss": 1.6912, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0456490727532097, |
|
"grad_norm": 0.0676363930106163, |
|
"learning_rate": 6.4e-06, |
|
"loss": 1.7945, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05135520684736091, |
|
"grad_norm": 0.07524928450584412, |
|
"learning_rate": 7.2e-06, |
|
"loss": 1.7098, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05706134094151213, |
|
"grad_norm": 2.2318921089172363, |
|
"learning_rate": 8e-06, |
|
"loss": 1.753, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06276747503566334, |
|
"grad_norm": 0.08612281829118729, |
|
"learning_rate": 7.999274982094103e-06, |
|
"loss": 1.7445, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06847360912981455, |
|
"grad_norm": 0.10080026090145111, |
|
"learning_rate": 7.997100191201894e-06, |
|
"loss": 1.7603, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07417974322396577, |
|
"grad_norm": 0.0728079155087471, |
|
"learning_rate": 7.993476415704541e-06, |
|
"loss": 1.7145, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07988587731811697, |
|
"grad_norm": 0.09083904325962067, |
|
"learning_rate": 7.988404969253109e-06, |
|
"loss": 1.7196, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08559201141226819, |
|
"grad_norm": 0.11588940024375916, |
|
"learning_rate": 7.981887690292338e-06, |
|
"loss": 1.7525, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0912981455064194, |
|
"grad_norm": 0.09483379870653152, |
|
"learning_rate": 7.973926941394201e-06, |
|
"loss": 1.7095, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09700427960057062, |
|
"grad_norm": 0.07794108241796494, |
|
"learning_rate": 7.964525608401445e-06, |
|
"loss": 1.7198, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.10271041369472182, |
|
"grad_norm": 0.08914346247911453, |
|
"learning_rate": 7.953687099381448e-06, |
|
"loss": 1.7009, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.10841654778887304, |
|
"grad_norm": 0.10453473776578903, |
|
"learning_rate": 7.941415343390771e-06, |
|
"loss": 1.6877, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11412268188302425, |
|
"grad_norm": 0.08587995171546936, |
|
"learning_rate": 7.927714789050827e-06, |
|
"loss": 1.7668, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11982881597717546, |
|
"grad_norm": 0.14489293098449707, |
|
"learning_rate": 7.912590402935222e-06, |
|
"loss": 1.7715, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12553495007132667, |
|
"grad_norm": 0.09673446416854858, |
|
"learning_rate": 7.896047667769334e-06, |
|
"loss": 1.7606, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1312410841654779, |
|
"grad_norm": 0.09498561918735504, |
|
"learning_rate": 7.878092580442764e-06, |
|
"loss": 1.6759, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1369472182596291, |
|
"grad_norm": 2.831144332885742, |
|
"learning_rate": 7.858731649835423e-06, |
|
"loss": 1.7195, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14265335235378032, |
|
"grad_norm": 0.10375729948282242, |
|
"learning_rate": 7.837971894457989e-06, |
|
"loss": 1.7335, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14835948644793154, |
|
"grad_norm": 0.10433638095855713, |
|
"learning_rate": 7.81582083990765e-06, |
|
"loss": 1.7296, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15406562054208273, |
|
"grad_norm": 0.15234149992465973, |
|
"learning_rate": 7.792286516139997e-06, |
|
"loss": 1.7611, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.15977175463623394, |
|
"grad_norm": 0.10203755646944046, |
|
"learning_rate": 7.767377454558098e-06, |
|
"loss": 1.7692, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.16547788873038516, |
|
"grad_norm": 0.11315208673477173, |
|
"learning_rate": 7.741102684919786e-06, |
|
"loss": 1.785, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.17118402282453637, |
|
"grad_norm": 0.6384971141815186, |
|
"learning_rate": 7.71347173206429e-06, |
|
"loss": 1.7158, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1768901569186876, |
|
"grad_norm": 0.09646636992692947, |
|
"learning_rate": 7.6844946124594e-06, |
|
"loss": 1.7082, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1825962910128388, |
|
"grad_norm": 0.10170239955186844, |
|
"learning_rate": 7.654181830570403e-06, |
|
"loss": 1.7142, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.18830242510699002, |
|
"grad_norm": 0.1125100702047348, |
|
"learning_rate": 7.622544375052123e-06, |
|
"loss": 1.7012, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19400855920114124, |
|
"grad_norm": 0.09590566158294678, |
|
"learning_rate": 7.589593714765433e-06, |
|
"loss": 1.6863, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.19971469329529243, |
|
"grad_norm": 0.12389321625232697, |
|
"learning_rate": 7.555341794619694e-06, |
|
"loss": 1.761, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.20542082738944364, |
|
"grad_norm": 0.1353520005941391, |
|
"learning_rate": 7.519801031242613e-06, |
|
"loss": 1.7201, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.21112696148359486, |
|
"grad_norm": 0.13579188287258148, |
|
"learning_rate": 7.4829843084791085e-06, |
|
"loss": 1.6785, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.21683309557774608, |
|
"grad_norm": 0.10585342347621918, |
|
"learning_rate": 7.4449049727208025e-06, |
|
"loss": 1.7489, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2225392296718973, |
|
"grad_norm": 0.10238084942102432, |
|
"learning_rate": 7.405576828067827e-06, |
|
"loss": 1.6709, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2282453637660485, |
|
"grad_norm": 0.15183645486831665, |
|
"learning_rate": 7.365014131324725e-06, |
|
"loss": 1.6628, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23395149786019973, |
|
"grad_norm": 0.18305522203445435, |
|
"learning_rate": 7.323231586832218e-06, |
|
"loss": 1.694, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2396576319543509, |
|
"grad_norm": 0.08764177560806274, |
|
"learning_rate": 7.2802443411367645e-06, |
|
"loss": 1.7025, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.24536376604850213, |
|
"grad_norm": 0.11116138845682144, |
|
"learning_rate": 7.2360679774997895e-06, |
|
"loss": 1.7481, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.25106990014265335, |
|
"grad_norm": 0.09760347753763199, |
|
"learning_rate": 7.190718510248621e-06, |
|
"loss": 1.7106, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.25677603423680456, |
|
"grad_norm": 0.1078905463218689, |
|
"learning_rate": 7.1442123789711495e-06, |
|
"loss": 1.7066, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2624821683309558, |
|
"grad_norm": 0.11371272057294846, |
|
"learning_rate": 7.09656644255633e-06, |
|
"loss": 1.7185, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.268188302425107, |
|
"grad_norm": 0.11446070671081543, |
|
"learning_rate": 7.047797973082684e-06, |
|
"loss": 1.6853, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2738944365192582, |
|
"grad_norm": 0.09420523792505264, |
|
"learning_rate": 6.997924649557016e-06, |
|
"loss": 1.6615, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2796005706134094, |
|
"grad_norm": 0.15625618398189545, |
|
"learning_rate": 6.946964551505619e-06, |
|
"loss": 1.7103, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.28530670470756064, |
|
"grad_norm": 0.18687541782855988, |
|
"learning_rate": 6.89493615242028e-06, |
|
"loss": 1.6903, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.29101283880171186, |
|
"grad_norm": 0.1630401462316513, |
|
"learning_rate": 6.8418583130614755e-06, |
|
"loss": 1.7384, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2967189728958631, |
|
"grad_norm": 0.10917814075946808, |
|
"learning_rate": 6.7877502746211744e-06, |
|
"loss": 1.6806, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3024251069900143, |
|
"grad_norm": 0.1082145944237709, |
|
"learning_rate": 6.732631651747738e-06, |
|
"loss": 1.767, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.30813124108416545, |
|
"grad_norm": 0.09101668745279312, |
|
"learning_rate": 6.676522425435432e-06, |
|
"loss": 1.7022, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.31383737517831667, |
|
"grad_norm": 0.1459859013557434, |
|
"learning_rate": 6.619442935781141e-06, |
|
"loss": 1.6798, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3195435092724679, |
|
"grad_norm": 0.17180761694908142, |
|
"learning_rate": 6.561413874610889e-06, |
|
"loss": 1.675, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3252496433666191, |
|
"grad_norm": 0.1387602835893631, |
|
"learning_rate": 6.502456277978886e-06, |
|
"loss": 1.6825, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3309557774607703, |
|
"grad_norm": 0.1187388226389885, |
|
"learning_rate": 6.442591518541753e-06, |
|
"loss": 1.7759, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.33666191155492153, |
|
"grad_norm": 0.31210440397262573, |
|
"learning_rate": 6.381841297810752e-06, |
|
"loss": 1.647, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.34236804564907275, |
|
"grad_norm": 0.16231843829154968, |
|
"learning_rate": 6.3202276382847925e-06, |
|
"loss": 1.7275, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34807417974322397, |
|
"grad_norm": 0.13726073503494263, |
|
"learning_rate": 6.257772875467077e-06, |
|
"loss": 1.7107, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3537803138373752, |
|
"grad_norm": 0.11954201012849808, |
|
"learning_rate": 6.1944996497682805e-06, |
|
"loss": 1.7106, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3594864479315264, |
|
"grad_norm": 0.17120261490345, |
|
"learning_rate": 6.130430898299199e-06, |
|
"loss": 1.7048, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3651925820256776, |
|
"grad_norm": 0.2885434627532959, |
|
"learning_rate": 6.065589846555847e-06, |
|
"loss": 1.734, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.37089871611982883, |
|
"grad_norm": 0.11576446890830994, |
|
"learning_rate": 6e-06, |
|
"loss": 1.7462, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.37660485021398005, |
|
"grad_norm": 0.1531599462032318, |
|
"learning_rate": 5.933685135538254e-06, |
|
"loss": 1.7104, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.38231098430813126, |
|
"grad_norm": 0.12773863971233368, |
|
"learning_rate": 5.866669292902695e-06, |
|
"loss": 1.719, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3880171184022825, |
|
"grad_norm": 0.0994916781783104, |
|
"learning_rate": 5.798976765936263e-06, |
|
"loss": 1.6403, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.39372325249643364, |
|
"grad_norm": 0.09449557214975357, |
|
"learning_rate": 5.730632093786033e-06, |
|
"loss": 1.6675, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.39942938659058486, |
|
"grad_norm": 0.18067103624343872, |
|
"learning_rate": 5.661660052007546e-06, |
|
"loss": 1.7129, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4051355206847361, |
|
"grad_norm": 0.0928979218006134, |
|
"learning_rate": 5.59208564358345e-06, |
|
"loss": 1.6611, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4108416547788873, |
|
"grad_norm": 0.11206196993589401, |
|
"learning_rate": 5.521934089859692e-06, |
|
"loss": 1.6997, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4165477888730385, |
|
"grad_norm": 0.1811038702726364, |
|
"learning_rate": 5.451230821402563e-06, |
|
"loss": 1.7485, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4222539229671897, |
|
"grad_norm": 0.24817204475402832, |
|
"learning_rate": 5.380001468779882e-06, |
|
"loss": 1.6898, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.42796005706134094, |
|
"grad_norm": 0.13622242212295532, |
|
"learning_rate": 5.308271853269687e-06, |
|
"loss": 1.7142, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.43366619115549215, |
|
"grad_norm": 0.13679026067256927, |
|
"learning_rate": 5.236067977499789e-06, |
|
"loss": 1.743, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.43937232524964337, |
|
"grad_norm": 0.15019570291042328, |
|
"learning_rate": 5.163416016021596e-06, |
|
"loss": 1.6775, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4450784593437946, |
|
"grad_norm": 0.10396311432123184, |
|
"learning_rate": 5.090342305821591e-06, |
|
"loss": 1.6702, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4507845934379458, |
|
"grad_norm": 0.2947867512702942, |
|
"learning_rate": 5.0168733367739484e-06, |
|
"loss": 1.7389, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.456490727532097, |
|
"grad_norm": 0.23940597474575043, |
|
"learning_rate": 4.943035742037709e-06, |
|
"loss": 1.7602, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.46219686162624823, |
|
"grad_norm": 0.17485539615154266, |
|
"learning_rate": 4.868856288402031e-06, |
|
"loss": 1.7057, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.46790299572039945, |
|
"grad_norm": 0.15298104286193848, |
|
"learning_rate": 4.794361866582981e-06, |
|
"loss": 1.6806, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.47360912981455067, |
|
"grad_norm": 0.18328501284122467, |
|
"learning_rate": 4.719579481475415e-06, |
|
"loss": 1.6852, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4793152639087018, |
|
"grad_norm": 0.13269121944904327, |
|
"learning_rate": 4.644536242363457e-06, |
|
"loss": 1.6662, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.48502139800285304, |
|
"grad_norm": 0.1813397854566574, |
|
"learning_rate": 4.569259353093141e-06, |
|
"loss": 1.7516, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.49072753209700426, |
|
"grad_norm": 0.10082633793354034, |
|
"learning_rate": 4.493776102210779e-06, |
|
"loss": 1.7438, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4964336661911555, |
|
"grad_norm": 0.10862607508897781, |
|
"learning_rate": 4.418113853070614e-06, |
|
"loss": 1.6985, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5021398002853067, |
|
"grad_norm": 0.1371043622493744, |
|
"learning_rate": 4.342300033915359e-06, |
|
"loss": 1.6684, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5078459343794579, |
|
"grad_norm": 0.19980739057064056, |
|
"learning_rate": 4.266362127933216e-06, |
|
"loss": 1.6687, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5135520684736091, |
|
"grad_norm": 0.1500827819108963, |
|
"learning_rate": 4.1903276632949695e-06, |
|
"loss": 1.7018, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5192582025677603, |
|
"grad_norm": 0.19625936448574066, |
|
"learning_rate": 4.1142242031747846e-06, |
|
"loss": 1.6426, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5249643366619116, |
|
"grad_norm": 0.11851513385772705, |
|
"learning_rate": 4.038079335758307e-06, |
|
"loss": 1.6565, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5306704707560628, |
|
"grad_norm": 0.10231585800647736, |
|
"learning_rate": 3.961920664241693e-06, |
|
"loss": 1.6705, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.536376604850214, |
|
"grad_norm": 0.15432794392108917, |
|
"learning_rate": 3.885775796825215e-06, |
|
"loss": 1.6802, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.5420827389443652, |
|
"grad_norm": 0.10955602675676346, |
|
"learning_rate": 3.8096723367050306e-06, |
|
"loss": 1.7885, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5477888730385164, |
|
"grad_norm": 0.11839420348405838, |
|
"learning_rate": 3.733637872066784e-06, |
|
"loss": 1.7251, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5534950071326676, |
|
"grad_norm": 0.19860365986824036, |
|
"learning_rate": 3.657699966084642e-06, |
|
"loss": 1.7, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5592011412268189, |
|
"grad_norm": 0.09971334040164948, |
|
"learning_rate": 3.5818861469293865e-06, |
|
"loss": 1.6763, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5649072753209701, |
|
"grad_norm": 0.1208500787615776, |
|
"learning_rate": 3.506223897789221e-06, |
|
"loss": 1.6746, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5706134094151213, |
|
"grad_norm": 0.3567933142185211, |
|
"learning_rate": 3.4307406469068596e-06, |
|
"loss": 1.6917, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5763195435092725, |
|
"grad_norm": 0.13815858960151672, |
|
"learning_rate": 3.355463757636544e-06, |
|
"loss": 1.6664, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5820256776034237, |
|
"grad_norm": 0.14547164738178253, |
|
"learning_rate": 3.2804205185245845e-06, |
|
"loss": 1.6911, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5877318116975749, |
|
"grad_norm": 0.12211696803569794, |
|
"learning_rate": 3.205638133417019e-06, |
|
"loss": 1.7104, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5934379457917262, |
|
"grad_norm": 0.09823016822338104, |
|
"learning_rate": 3.1311437115979695e-06, |
|
"loss": 1.6655, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5991440798858774, |
|
"grad_norm": 0.2253655046224594, |
|
"learning_rate": 3.0569642579622904e-06, |
|
"loss": 1.6848, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6048502139800286, |
|
"grad_norm": 0.16188812255859375, |
|
"learning_rate": 2.983126663226053e-06, |
|
"loss": 1.7024, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6105563480741797, |
|
"grad_norm": 0.20521730184555054, |
|
"learning_rate": 2.909657694178409e-06, |
|
"loss": 1.6877, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6162624821683309, |
|
"grad_norm": 0.11846901476383209, |
|
"learning_rate": 2.8365839839784036e-06, |
|
"loss": 1.6708, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6219686162624821, |
|
"grad_norm": 0.1269894540309906, |
|
"learning_rate": 2.7639320225002105e-06, |
|
"loss": 1.7331, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6276747503566333, |
|
"grad_norm": 0.12369225174188614, |
|
"learning_rate": 2.6917281467303133e-06, |
|
"loss": 1.6938, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6333808844507846, |
|
"grad_norm": 0.2189532369375229, |
|
"learning_rate": 2.6199985312201182e-06, |
|
"loss": 1.6623, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6390870185449358, |
|
"grad_norm": 0.12445096671581268, |
|
"learning_rate": 2.5487691785974363e-06, |
|
"loss": 1.72, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.644793152639087, |
|
"grad_norm": 0.3778502941131592, |
|
"learning_rate": 2.478065910140308e-06, |
|
"loss": 1.7012, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6504992867332382, |
|
"grad_norm": 0.12095467001199722, |
|
"learning_rate": 2.407914356416552e-06, |
|
"loss": 1.6972, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.6562054208273894, |
|
"grad_norm": 0.1321328580379486, |
|
"learning_rate": 2.3383399479924544e-06, |
|
"loss": 1.702, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6619115549215406, |
|
"grad_norm": 0.3205236792564392, |
|
"learning_rate": 2.269367906213966e-06, |
|
"loss": 1.6933, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.6676176890156919, |
|
"grad_norm": 0.24388933181762695, |
|
"learning_rate": 2.2010232340637375e-06, |
|
"loss": 1.6686, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6733238231098431, |
|
"grad_norm": 0.1540047526359558, |
|
"learning_rate": 2.133330707097305e-06, |
|
"loss": 1.699, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6790299572039943, |
|
"grad_norm": 0.20498210191726685, |
|
"learning_rate": 2.066314864461744e-06, |
|
"loss": 1.7129, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6847360912981455, |
|
"grad_norm": 0.13441228866577148, |
|
"learning_rate": 2.0000000000000008e-06, |
|
"loss": 1.6787, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6904422253922967, |
|
"grad_norm": 0.16007143259048462, |
|
"learning_rate": 1.9344101534441527e-06, |
|
"loss": 1.7257, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6961483594864479, |
|
"grad_norm": 0.11582008004188538, |
|
"learning_rate": 1.8695691017008004e-06, |
|
"loss": 1.7686, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7018544935805991, |
|
"grad_norm": 0.15291719138622284, |
|
"learning_rate": 1.8055003502317212e-06, |
|
"loss": 1.7115, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7075606276747504, |
|
"grad_norm": 0.13494698703289032, |
|
"learning_rate": 1.742227124532924e-06, |
|
"loss": 1.6658, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7132667617689016, |
|
"grad_norm": 0.1385360211133957, |
|
"learning_rate": 1.6797723617152077e-06, |
|
"loss": 1.7218, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7189728958630528, |
|
"grad_norm": 0.11533591896295547, |
|
"learning_rate": 1.6181587021892483e-06, |
|
"loss": 1.6812, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.724679029957204, |
|
"grad_norm": 0.14457185566425323, |
|
"learning_rate": 1.557408481458247e-06, |
|
"loss": 1.6951, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.7303851640513552, |
|
"grad_norm": 0.11555743962526321, |
|
"learning_rate": 1.4975437220211138e-06, |
|
"loss": 1.7104, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.7360912981455064, |
|
"grad_norm": 0.23432128131389618, |
|
"learning_rate": 1.4385861253891109e-06, |
|
"loss": 1.7321, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.7417974322396577, |
|
"grad_norm": 0.16473281383514404, |
|
"learning_rate": 1.38055706421886e-06, |
|
"loss": 1.6835, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7475035663338089, |
|
"grad_norm": 0.13933511078357697, |
|
"learning_rate": 1.3234775745645683e-06, |
|
"loss": 1.6737, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7532097004279601, |
|
"grad_norm": 0.1289213001728058, |
|
"learning_rate": 1.2673683482522628e-06, |
|
"loss": 1.6766, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7589158345221113, |
|
"grad_norm": 0.13357162475585938, |
|
"learning_rate": 1.2122497253788267e-06, |
|
"loss": 1.6895, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.7646219686162625, |
|
"grad_norm": 0.24594484269618988, |
|
"learning_rate": 1.1581416869385252e-06, |
|
"loss": 1.6795, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.7703281027104137, |
|
"grad_norm": 0.1374809890985489, |
|
"learning_rate": 1.1050638475797191e-06, |
|
"loss": 1.6351, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.776034236804565, |
|
"grad_norm": 0.12060262262821198, |
|
"learning_rate": 1.0530354484943798e-06, |
|
"loss": 1.7299, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7817403708987162, |
|
"grad_norm": 0.16807512938976288, |
|
"learning_rate": 1.0020753504429843e-06, |
|
"loss": 1.6747, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.7874465049928673, |
|
"grad_norm": 0.12208617478609085, |
|
"learning_rate": 9.52202026917317e-07, |
|
"loss": 1.7106, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7931526390870185, |
|
"grad_norm": 0.2812899351119995, |
|
"learning_rate": 9.0343355744367e-07, |
|
"loss": 1.7116, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.7988587731811697, |
|
"grad_norm": 0.15467038750648499, |
|
"learning_rate": 8.557876210288508e-07, |
|
"loss": 1.6687, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8045649072753209, |
|
"grad_norm": 0.24310462176799774, |
|
"learning_rate": 8.092814897513789e-07, |
|
"loss": 1.69, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8102710413694721, |
|
"grad_norm": 0.36173245310783386, |
|
"learning_rate": 7.639320225002106e-07, |
|
"loss": 1.7107, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.8159771754636234, |
|
"grad_norm": 0.11579599976539612, |
|
"learning_rate": 7.197556588632357e-07, |
|
"loss": 1.7039, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.8216833095577746, |
|
"grad_norm": 0.1441017985343933, |
|
"learning_rate": 6.767684131677813e-07, |
|
"loss": 1.7099, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.8273894436519258, |
|
"grad_norm": 0.6187875270843506, |
|
"learning_rate": 6.349858686752747e-07, |
|
"loss": 1.6932, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.833095577746077, |
|
"grad_norm": 0.20695286989212036, |
|
"learning_rate": 5.944231719321724e-07, |
|
"loss": 1.6552, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.8388017118402282, |
|
"grad_norm": 0.11275847256183624, |
|
"learning_rate": 5.550950272791977e-07, |
|
"loss": 1.67, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.8445078459343794, |
|
"grad_norm": 0.1475992500782013, |
|
"learning_rate": 5.170156915208919e-07, |
|
"loss": 1.6698, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.8502139800285307, |
|
"grad_norm": 0.12389583885669708, |
|
"learning_rate": 4.801989687573878e-07, |
|
"loss": 1.7556, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.8559201141226819, |
|
"grad_norm": 0.11729484051465988, |
|
"learning_rate": 4.4465820538030653e-07, |
|
"loss": 1.6801, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8616262482168331, |
|
"grad_norm": 0.7728609442710876, |
|
"learning_rate": 4.104062852345671e-07, |
|
"loss": 1.7103, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8673323823109843, |
|
"grad_norm": 0.10821712017059326, |
|
"learning_rate": 3.774556249478764e-07, |
|
"loss": 1.6808, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.8730385164051355, |
|
"grad_norm": 0.11125564575195312, |
|
"learning_rate": 3.458181694295961e-07, |
|
"loss": 1.6624, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.8787446504992867, |
|
"grad_norm": 0.32291027903556824, |
|
"learning_rate": 3.155053875406e-07, |
|
"loss": 1.6835, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.884450784593438, |
|
"grad_norm": 0.13799256086349487, |
|
"learning_rate": 2.865282679357097e-07, |
|
"loss": 1.6339, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8901569186875892, |
|
"grad_norm": 0.13891300559043884, |
|
"learning_rate": 2.588973150802136e-07, |
|
"loss": 1.6816, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.8958630527817404, |
|
"grad_norm": 0.4576370120048523, |
|
"learning_rate": 2.3262254544190153e-07, |
|
"loss": 1.6876, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.9015691868758916, |
|
"grad_norm": 0.10818008333444595, |
|
"learning_rate": 2.0771348386000233e-07, |
|
"loss": 1.7076, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.9072753209700428, |
|
"grad_norm": 0.13714520633220673, |
|
"learning_rate": 1.8417916009234946e-07, |
|
"loss": 1.6874, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.912981455064194, |
|
"grad_norm": 0.2243780493736267, |
|
"learning_rate": 1.6202810554201097e-07, |
|
"loss": 1.6801, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9186875891583453, |
|
"grad_norm": 0.10469643771648407, |
|
"learning_rate": 1.4126835016457749e-07, |
|
"loss": 1.6569, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.9243937232524965, |
|
"grad_norm": 0.14431771636009216, |
|
"learning_rate": 1.2190741955723494e-07, |
|
"loss": 1.7185, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.9300998573466477, |
|
"grad_norm": 0.15075361728668213, |
|
"learning_rate": 1.0395233223066613e-07, |
|
"loss": 1.6624, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.9358059914407989, |
|
"grad_norm": 0.37998688220977783, |
|
"learning_rate": 8.740959706477724e-08, |
|
"loss": 1.7068, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.9415121255349501, |
|
"grad_norm": 0.4298974275588989, |
|
"learning_rate": 7.228521094917317e-08, |
|
"loss": 1.6794, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.9472182596291013, |
|
"grad_norm": 0.18983392417430878, |
|
"learning_rate": 5.8584656609228065e-08, |
|
"loss": 1.6797, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.9529243937232525, |
|
"grad_norm": 0.1515531986951828, |
|
"learning_rate": 4.631290061855031e-08, |
|
"loss": 1.7324, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.9586305278174037, |
|
"grad_norm": 0.18322157859802246, |
|
"learning_rate": 3.547439159855559e-08, |
|
"loss": 1.6838, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9643366619115549, |
|
"grad_norm": 0.15800349414348602, |
|
"learning_rate": 2.6073058605799115e-08, |
|
"loss": 1.669, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.9700427960057061, |
|
"grad_norm": 0.17917828261852264, |
|
"learning_rate": 1.8112309707661643e-08, |
|
"loss": 1.6658, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9757489300998573, |
|
"grad_norm": 0.2858942747116089, |
|
"learning_rate": 1.1595030746890166e-08, |
|
"loss": 1.6626, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.9814550641940085, |
|
"grad_norm": 0.1326640099287033, |
|
"learning_rate": 6.523584295457407e-09, |
|
"loss": 1.689, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.9871611982881597, |
|
"grad_norm": 0.17796051502227783, |
|
"learning_rate": 2.8998087981055853e-09, |
|
"loss": 1.7692, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.992867332382311, |
|
"grad_norm": 0.20098765194416046, |
|
"learning_rate": 7.25017905896852e-10, |
|
"loss": 1.7088, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.9985734664764622, |
|
"grad_norm": 0.16359874606132507, |
|
"learning_rate": 0.0, |
|
"loss": 1.692, |
|
"step": 175 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 175, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.111205269274624e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|