|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9992985737666586, |
|
"eval_steps": 500, |
|
"global_step": 6414, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04676174888940846, |
|
"grad_norm": 0.414691299200058, |
|
"learning_rate": 9.923004399748587e-06, |
|
"loss": 1.3939, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09352349777881692, |
|
"grad_norm": 0.373704195022583, |
|
"learning_rate": 9.765870521684476e-06, |
|
"loss": 1.2045, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1402852466682254, |
|
"grad_norm": 0.4187537431716919, |
|
"learning_rate": 9.608736643620365e-06, |
|
"loss": 1.2093, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18704699555763385, |
|
"grad_norm": 0.49985188245773315, |
|
"learning_rate": 9.451602765556255e-06, |
|
"loss": 1.1465, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.23380874444704233, |
|
"grad_norm": 0.547389805316925, |
|
"learning_rate": 9.294468887492145e-06, |
|
"loss": 1.1502, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2805704933364508, |
|
"grad_norm": 0.521124541759491, |
|
"learning_rate": 9.137335009428033e-06, |
|
"loss": 1.1455, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.32733224222585927, |
|
"grad_norm": 0.5321390628814697, |
|
"learning_rate": 8.980201131363923e-06, |
|
"loss": 1.1196, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3740939911152677, |
|
"grad_norm": 0.5767441987991333, |
|
"learning_rate": 8.823067253299812e-06, |
|
"loss": 1.115, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4208557400046762, |
|
"grad_norm": 0.6053145527839661, |
|
"learning_rate": 8.6659333752357e-06, |
|
"loss": 1.1419, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.46761748889408467, |
|
"grad_norm": 0.6067389249801636, |
|
"learning_rate": 8.50879949717159e-06, |
|
"loss": 1.1077, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5143792377834931, |
|
"grad_norm": 0.6225459575653076, |
|
"learning_rate": 8.35166561910748e-06, |
|
"loss": 1.1181, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5611409866729016, |
|
"grad_norm": 0.6895543932914734, |
|
"learning_rate": 8.19453174104337e-06, |
|
"loss": 1.1015, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.60790273556231, |
|
"grad_norm": 0.6458436846733093, |
|
"learning_rate": 8.03739786297926e-06, |
|
"loss": 1.1198, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6546644844517185, |
|
"grad_norm": 0.663877010345459, |
|
"learning_rate": 7.88026398491515e-06, |
|
"loss": 1.0982, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.701426233341127, |
|
"grad_norm": 0.6076435446739197, |
|
"learning_rate": 7.723130106851037e-06, |
|
"loss": 1.0911, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7481879822305354, |
|
"grad_norm": 1.181686282157898, |
|
"learning_rate": 7.565996228786927e-06, |
|
"loss": 1.0917, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7949497311199439, |
|
"grad_norm": 0.8685171008110046, |
|
"learning_rate": 7.408862350722817e-06, |
|
"loss": 1.0836, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.8417114800093524, |
|
"grad_norm": 0.7595590949058533, |
|
"learning_rate": 7.251728472658706e-06, |
|
"loss": 1.0937, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8884732288987608, |
|
"grad_norm": 1.043165683746338, |
|
"learning_rate": 7.0945945945945946e-06, |
|
"loss": 1.0681, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.9352349777881693, |
|
"grad_norm": 0.7252351641654968, |
|
"learning_rate": 6.937460716530484e-06, |
|
"loss": 1.0753, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9819967266775778, |
|
"grad_norm": 16.970993041992188, |
|
"learning_rate": 6.780326838466373e-06, |
|
"loss": 1.068, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.0287584755669863, |
|
"grad_norm": 0.7194657921791077, |
|
"learning_rate": 6.623192960402264e-06, |
|
"loss": 1.0816, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.0755202244563946, |
|
"grad_norm": 0.8986325860023499, |
|
"learning_rate": 6.467630421118794e-06, |
|
"loss": 1.0516, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.1222819733458032, |
|
"grad_norm": 0.8846977353096008, |
|
"learning_rate": 6.3104965430546826e-06, |
|
"loss": 1.0775, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.1690437222352117, |
|
"grad_norm": 0.8062325119972229, |
|
"learning_rate": 6.153362664990572e-06, |
|
"loss": 1.0796, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.21580547112462, |
|
"grad_norm": 9.224539756774902, |
|
"learning_rate": 5.996228786926462e-06, |
|
"loss": 1.0646, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.2625672200140285, |
|
"grad_norm": 0.7716740369796753, |
|
"learning_rate": 5.839094908862352e-06, |
|
"loss": 1.0699, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.3093289689034369, |
|
"grad_norm": 1.1291840076446533, |
|
"learning_rate": 5.681961030798241e-06, |
|
"loss": 1.0679, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.3560907177928454, |
|
"grad_norm": 1.0691558122634888, |
|
"learning_rate": 5.5248271527341305e-06, |
|
"loss": 1.0871, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.402852466682254, |
|
"grad_norm": 1.0153058767318726, |
|
"learning_rate": 5.367693274670019e-06, |
|
"loss": 1.0418, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.4496142155716623, |
|
"grad_norm": 0.8397168517112732, |
|
"learning_rate": 5.210559396605908e-06, |
|
"loss": 1.0751, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.4963759644610708, |
|
"grad_norm": 0.9355253577232361, |
|
"learning_rate": 5.053425518541798e-06, |
|
"loss": 1.0588, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.5431377133504793, |
|
"grad_norm": 0.9902454614639282, |
|
"learning_rate": 4.896291640477687e-06, |
|
"loss": 1.0847, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.5898994622398877, |
|
"grad_norm": 0.9210862517356873, |
|
"learning_rate": 4.739157762413577e-06, |
|
"loss": 1.076, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.6366612111292962, |
|
"grad_norm": 0.9090029001235962, |
|
"learning_rate": 4.5820238843494665e-06, |
|
"loss": 1.0579, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.6834229600187047, |
|
"grad_norm": 0.9450795650482178, |
|
"learning_rate": 4.424890006285355e-06, |
|
"loss": 1.079, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.730184708908113, |
|
"grad_norm": 0.8659577965736389, |
|
"learning_rate": 4.267756128221245e-06, |
|
"loss": 1.0583, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.7769464577975216, |
|
"grad_norm": 0.9201979041099548, |
|
"learning_rate": 4.110622250157134e-06, |
|
"loss": 1.0263, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.8237082066869301, |
|
"grad_norm": 1.2461769580841064, |
|
"learning_rate": 3.953488372093024e-06, |
|
"loss": 1.0579, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.8704699555763384, |
|
"grad_norm": 0.8340646028518677, |
|
"learning_rate": 3.796354494028913e-06, |
|
"loss": 1.0781, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.917231704465747, |
|
"grad_norm": 1.7858061790466309, |
|
"learning_rate": 3.6392206159648025e-06, |
|
"loss": 1.0701, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.9639934533551555, |
|
"grad_norm": 1.354982614517212, |
|
"learning_rate": 3.4820867379006918e-06, |
|
"loss": 1.0613, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.010755202244564, |
|
"grad_norm": 0.9671053886413574, |
|
"learning_rate": 3.324952859836581e-06, |
|
"loss": 1.0741, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.0575169511339726, |
|
"grad_norm": 0.8994483351707458, |
|
"learning_rate": 3.1678189817724704e-06, |
|
"loss": 1.0347, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.104278700023381, |
|
"grad_norm": 1.036387324333191, |
|
"learning_rate": 3.0106851037083594e-06, |
|
"loss": 1.054, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.1510404489127892, |
|
"grad_norm": 1.0670599937438965, |
|
"learning_rate": 2.8535512256442495e-06, |
|
"loss": 1.0315, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.197802197802198, |
|
"grad_norm": 1.0409014225006104, |
|
"learning_rate": 2.6964173475801384e-06, |
|
"loss": 1.0626, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.2445639466916063, |
|
"grad_norm": 1.0264023542404175, |
|
"learning_rate": 2.5392834695160278e-06, |
|
"loss": 1.0352, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.2913256955810146, |
|
"grad_norm": 3.9457595348358154, |
|
"learning_rate": 2.382149591451917e-06, |
|
"loss": 1.0428, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.3380874444704234, |
|
"grad_norm": 0.966581404209137, |
|
"learning_rate": 2.225015713387807e-06, |
|
"loss": 1.0487, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.3848491933598317, |
|
"grad_norm": 1.0433825254440308, |
|
"learning_rate": 2.0678818353236958e-06, |
|
"loss": 1.0679, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.43161094224924, |
|
"grad_norm": 1.005495548248291, |
|
"learning_rate": 1.910747957259585e-06, |
|
"loss": 1.0497, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.4783726911386488, |
|
"grad_norm": 1.0041530132293701, |
|
"learning_rate": 1.7536140791954748e-06, |
|
"loss": 1.05, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.525134440028057, |
|
"grad_norm": 1.108376383781433, |
|
"learning_rate": 1.596480201131364e-06, |
|
"loss": 1.0618, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.5718961889174654, |
|
"grad_norm": 1.0034010410308838, |
|
"learning_rate": 1.4393463230672533e-06, |
|
"loss": 1.0486, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.6186579378068737, |
|
"grad_norm": 0.9590221047401428, |
|
"learning_rate": 1.2822124450031428e-06, |
|
"loss": 1.0514, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.6654196866962825, |
|
"grad_norm": 1.027959942817688, |
|
"learning_rate": 1.1250785669390322e-06, |
|
"loss": 1.0578, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.712181435585691, |
|
"grad_norm": 0.9596933722496033, |
|
"learning_rate": 9.679446888749215e-07, |
|
"loss": 1.0659, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.7589431844750996, |
|
"grad_norm": 1.142251968383789, |
|
"learning_rate": 8.108108108108109e-07, |
|
"loss": 1.0714, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.805704933364508, |
|
"grad_norm": 1.2584409713745117, |
|
"learning_rate": 6.536769327467001e-07, |
|
"loss": 1.0485, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.852466682253916, |
|
"grad_norm": 1.3494170904159546, |
|
"learning_rate": 4.965430546825896e-07, |
|
"loss": 1.0321, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.8992284311433245, |
|
"grad_norm": 0.9571290612220764, |
|
"learning_rate": 3.39409176618479e-07, |
|
"loss": 1.0181, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.9459901800327333, |
|
"grad_norm": 1.231972575187683, |
|
"learning_rate": 1.8227529855436833e-07, |
|
"loss": 1.0409, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.9927519289221416, |
|
"grad_norm": 2.6358070373535156, |
|
"learning_rate": 2.6712759270898807e-08, |
|
"loss": 1.048, |
|
"step": 6400 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 6414, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 2138, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.481950171724329e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|