|
{ |
|
"best_metric": 1.1598098278045654, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.006944565009809198, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 6.944565009809199e-05, |
|
"grad_norm": 57.99919509887695, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 8.4917, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 6.944565009809199e-05, |
|
"eval_loss": 2.3346364498138428, |
|
"eval_runtime": 2224.619, |
|
"eval_samples_per_second": 10.902, |
|
"eval_steps_per_second": 1.363, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00013889130019618397, |
|
"grad_norm": 58.820064544677734, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 7.5263, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00020833695029427593, |
|
"grad_norm": 56.04924774169922, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 6.9585, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00027778260039236795, |
|
"grad_norm": 64.87261199951172, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 8.2777, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0003472282504904599, |
|
"grad_norm": 55.593360900878906, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 8.0919, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00041667390058855187, |
|
"grad_norm": 68.94898986816406, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 7.7907, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0004861195506866439, |
|
"grad_norm": 62.95187759399414, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 8.0723, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0005555652007847359, |
|
"grad_norm": 68.86846923828125, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 9.0299, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0006250108508828278, |
|
"grad_norm": 54.460689544677734, |
|
"learning_rate": 1.5e-06, |
|
"loss": 7.4533, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0006944565009809198, |
|
"grad_norm": 64.97429656982422, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 7.9471, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0007639021510790118, |
|
"grad_norm": 62.297733306884766, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 9.2093, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0008333478011771037, |
|
"grad_norm": 66.09642791748047, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 8.3644, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0009027934512751957, |
|
"grad_norm": 68.8487548828125, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 7.6406, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0009722391013732878, |
|
"grad_norm": 54.236751556396484, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 7.5545, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0010416847514713797, |
|
"grad_norm": 51.343406677246094, |
|
"learning_rate": 2.5e-06, |
|
"loss": 7.49, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0011111304015694718, |
|
"grad_norm": 52.90509033203125, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 7.4607, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0011805760516675637, |
|
"grad_norm": 49.121551513671875, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 7.2417, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0012500217017656556, |
|
"grad_norm": 45.887962341308594, |
|
"learning_rate": 3e-06, |
|
"loss": 6.7642, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0013194673518637477, |
|
"grad_norm": 43.50490951538086, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 6.9811, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0013889130019618396, |
|
"grad_norm": 45.96742248535156, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 7.1328, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0014583586520599315, |
|
"grad_norm": 44.12058639526367, |
|
"learning_rate": 3.5e-06, |
|
"loss": 7.4511, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0015278043021580237, |
|
"grad_norm": 54.125911712646484, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 6.666, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0015972499522561156, |
|
"grad_norm": 52.1979866027832, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 5.9474, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0016666956023542075, |
|
"grad_norm": 54.66168975830078, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 5.7697, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0017361412524522996, |
|
"grad_norm": 38.93903350830078, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 5.6342, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0018055869025503915, |
|
"grad_norm": 31.204254150390625, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 6.4815, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0018750325526484834, |
|
"grad_norm": 32.05744171142578, |
|
"learning_rate": 4.5e-06, |
|
"loss": 5.417, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0019444782027465755, |
|
"grad_norm": 35.15018081665039, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 5.0666, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0020139238528446676, |
|
"grad_norm": 31.198720932006836, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 5.6771, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0020833695029427593, |
|
"grad_norm": 26.646947860717773, |
|
"learning_rate": 5e-06, |
|
"loss": 4.6598, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0021528151530408515, |
|
"grad_norm": 24.5843448638916, |
|
"learning_rate": 4.997482666353287e-06, |
|
"loss": 5.5282, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0022222608031389436, |
|
"grad_norm": 35.84235763549805, |
|
"learning_rate": 4.989935734988098e-06, |
|
"loss": 5.7435, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0022917064532370353, |
|
"grad_norm": 28.750774383544922, |
|
"learning_rate": 4.977374404419838e-06, |
|
"loss": 5.621, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0023611521033351274, |
|
"grad_norm": 35.598567962646484, |
|
"learning_rate": 4.959823971496575e-06, |
|
"loss": 5.1858, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0024305977534332195, |
|
"grad_norm": 24.765888214111328, |
|
"learning_rate": 4.937319780454559e-06, |
|
"loss": 5.6566, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.002500043403531311, |
|
"grad_norm": 23.42424964904785, |
|
"learning_rate": 4.909907151739634e-06, |
|
"loss": 4.8593, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0025694890536294033, |
|
"grad_norm": 25.198575973510742, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 5.0189, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0026389347037274954, |
|
"grad_norm": 31.468351364135742, |
|
"learning_rate": 4.8405871765993435e-06, |
|
"loss": 5.8011, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.002708380353825587, |
|
"grad_norm": 36.017425537109375, |
|
"learning_rate": 4.7988194313786275e-06, |
|
"loss": 5.739, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0027778260039236793, |
|
"grad_norm": 24.822233200073242, |
|
"learning_rate": 4.752422169756048e-06, |
|
"loss": 5.3789, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0028472716540217714, |
|
"grad_norm": 23.735197067260742, |
|
"learning_rate": 4.701488829641845e-06, |
|
"loss": 5.1168, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.002916717304119863, |
|
"grad_norm": 31.22846221923828, |
|
"learning_rate": 4.646121984004666e-06, |
|
"loss": 5.9325, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.002986162954217955, |
|
"grad_norm": 23.26053810119629, |
|
"learning_rate": 4.586433134303257e-06, |
|
"loss": 4.8404, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0030556086043160473, |
|
"grad_norm": 30.344331741333008, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 5.4497, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.003125054254414139, |
|
"grad_norm": 26.371152877807617, |
|
"learning_rate": 4.454578706170075e-06, |
|
"loss": 4.7377, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.003194499904512231, |
|
"grad_norm": 29.86260414123535, |
|
"learning_rate": 4.382678665009028e-06, |
|
"loss": 5.9814, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0032639455546103232, |
|
"grad_norm": 30.244312286376953, |
|
"learning_rate": 4.3069871595684795e-06, |
|
"loss": 5.2366, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.003333391204708415, |
|
"grad_norm": 24.537878036499023, |
|
"learning_rate": 4.227656622467162e-06, |
|
"loss": 5.1262, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.003402836854806507, |
|
"grad_norm": 35.21415710449219, |
|
"learning_rate": 4.144846814849282e-06, |
|
"loss": 5.9274, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.003472282504904599, |
|
"grad_norm": 38.00114059448242, |
|
"learning_rate": 4.058724504646834e-06, |
|
"loss": 6.1489, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003472282504904599, |
|
"eval_loss": 1.2252222299575806, |
|
"eval_runtime": 2231.1566, |
|
"eval_samples_per_second": 10.87, |
|
"eval_steps_per_second": 1.359, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003541728155002691, |
|
"grad_norm": 26.02019691467285, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 4.9606, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.003611173805100783, |
|
"grad_norm": 27.602750778198242, |
|
"learning_rate": 3.8772424536302565e-06, |
|
"loss": 5.0621, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.003680619455198875, |
|
"grad_norm": 23.657602310180664, |
|
"learning_rate": 3.782248193514766e-06, |
|
"loss": 4.5891, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.003750065105296967, |
|
"grad_norm": 24.33047103881836, |
|
"learning_rate": 3.684671656182497e-06, |
|
"loss": 5.011, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.003819510755395059, |
|
"grad_norm": 20.79547119140625, |
|
"learning_rate": 3.5847093477938955e-06, |
|
"loss": 4.2932, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.003888956405493151, |
|
"grad_norm": 21.947898864746094, |
|
"learning_rate": 3.4825625791348093e-06, |
|
"loss": 4.9364, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.003958402055591243, |
|
"grad_norm": 24.627059936523438, |
|
"learning_rate": 3.3784370602033572e-06, |
|
"loss": 4.489, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.004027847705689335, |
|
"grad_norm": 17.064987182617188, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 4.566, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0040972933557874265, |
|
"grad_norm": 21.63456153869629, |
|
"learning_rate": 3.165092113916688e-06, |
|
"loss": 4.414, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.004166739005885519, |
|
"grad_norm": 20.844581604003906, |
|
"learning_rate": 3.056302334890786e-06, |
|
"loss": 5.1632, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.004236184655983611, |
|
"grad_norm": 20.495439529418945, |
|
"learning_rate": 2.946392236996592e-06, |
|
"loss": 3.9125, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.004305630306081703, |
|
"grad_norm": 18.964275360107422, |
|
"learning_rate": 2.835583164544139e-06, |
|
"loss": 3.8756, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.004375075956179795, |
|
"grad_norm": 20.54064178466797, |
|
"learning_rate": 2.724098272258584e-06, |
|
"loss": 4.4273, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.004444521606277887, |
|
"grad_norm": 19.636096954345703, |
|
"learning_rate": 2.6121620758762877e-06, |
|
"loss": 5.1823, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.004513967256375978, |
|
"grad_norm": 18.76691246032715, |
|
"learning_rate": 2.5e-06, |
|
"loss": 4.5282, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0045834129064740705, |
|
"grad_norm": 21.243412017822266, |
|
"learning_rate": 2.3878379241237136e-06, |
|
"loss": 4.6716, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.004652858556572163, |
|
"grad_norm": 19.991161346435547, |
|
"learning_rate": 2.2759017277414165e-06, |
|
"loss": 4.9848, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.004722304206670255, |
|
"grad_norm": 20.23773193359375, |
|
"learning_rate": 2.1644168354558623e-06, |
|
"loss": 4.9668, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.004791749856768347, |
|
"grad_norm": 19.373884201049805, |
|
"learning_rate": 2.053607763003409e-06, |
|
"loss": 4.8817, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.004861195506866439, |
|
"grad_norm": 22.937284469604492, |
|
"learning_rate": 1.9436976651092143e-06, |
|
"loss": 4.8936, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.00493064115696453, |
|
"grad_norm": 21.041000366210938, |
|
"learning_rate": 1.8349078860833125e-06, |
|
"loss": 4.7213, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.005000086807062622, |
|
"grad_norm": 19.949390411376953, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 5.0686, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0050695324571607145, |
|
"grad_norm": 19.30895233154297, |
|
"learning_rate": 1.6215629397966432e-06, |
|
"loss": 4.4509, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.005138978107258807, |
|
"grad_norm": 21.176286697387695, |
|
"learning_rate": 1.5174374208651913e-06, |
|
"loss": 4.4727, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.005208423757356899, |
|
"grad_norm": 20.409286499023438, |
|
"learning_rate": 1.415290652206105e-06, |
|
"loss": 4.7167, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.005277869407454991, |
|
"grad_norm": 22.04141616821289, |
|
"learning_rate": 1.3153283438175036e-06, |
|
"loss": 5.1919, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.005347315057553082, |
|
"grad_norm": 19.59886360168457, |
|
"learning_rate": 1.217751806485235e-06, |
|
"loss": 4.925, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.005416760707651174, |
|
"grad_norm": 21.676176071166992, |
|
"learning_rate": 1.122757546369744e-06, |
|
"loss": 5.0204, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.005486206357749266, |
|
"grad_norm": 23.305850982666016, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 4.2981, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0055556520078473585, |
|
"grad_norm": 19.65032958984375, |
|
"learning_rate": 9.412754953531664e-07, |
|
"loss": 3.8816, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.005625097657945451, |
|
"grad_norm": 22.556602478027344, |
|
"learning_rate": 8.551531851507186e-07, |
|
"loss": 4.8851, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.005694543308043543, |
|
"grad_norm": 20.84819221496582, |
|
"learning_rate": 7.723433775328385e-07, |
|
"loss": 4.7379, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.005763988958141634, |
|
"grad_norm": 22.757490158081055, |
|
"learning_rate": 6.930128404315214e-07, |
|
"loss": 4.4146, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.005833434608239726, |
|
"grad_norm": 23.608304977416992, |
|
"learning_rate": 6.17321334990973e-07, |
|
"loss": 4.6309, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.005902880258337818, |
|
"grad_norm": 23.365802764892578, |
|
"learning_rate": 5.454212938299256e-07, |
|
"loss": 5.3139, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.00597232590843591, |
|
"grad_norm": 21.471818923950195, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 4.0731, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.0060417715585340025, |
|
"grad_norm": 20.544567108154297, |
|
"learning_rate": 4.1356686569674344e-07, |
|
"loss": 4.5634, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.006111217208632095, |
|
"grad_norm": 20.132556915283203, |
|
"learning_rate": 3.538780159953348e-07, |
|
"loss": 4.7335, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.006180662858730186, |
|
"grad_norm": 24.553869247436523, |
|
"learning_rate": 2.98511170358155e-07, |
|
"loss": 4.3985, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.006250108508828278, |
|
"grad_norm": 23.55926513671875, |
|
"learning_rate": 2.4757783024395244e-07, |
|
"loss": 4.853, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.00631955415892637, |
|
"grad_norm": 22.498533248901367, |
|
"learning_rate": 2.0118056862137358e-07, |
|
"loss": 5.0627, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.006388999809024462, |
|
"grad_norm": 23.05436134338379, |
|
"learning_rate": 1.59412823400657e-07, |
|
"loss": 4.4592, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.006458445459122554, |
|
"grad_norm": 22.92341423034668, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 5.0885, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0065278911092206465, |
|
"grad_norm": 21.84902000427246, |
|
"learning_rate": 9.00928482603669e-08, |
|
"loss": 4.746, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.006597336759318738, |
|
"grad_norm": 27.136032104492188, |
|
"learning_rate": 6.268021954544095e-08, |
|
"loss": 5.4861, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.00666678240941683, |
|
"grad_norm": 22.971391677856445, |
|
"learning_rate": 4.017602850342584e-08, |
|
"loss": 4.7522, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.006736228059514922, |
|
"grad_norm": 22.49424171447754, |
|
"learning_rate": 2.262559558016325e-08, |
|
"loss": 4.975, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.006805673709613014, |
|
"grad_norm": 29.4676513671875, |
|
"learning_rate": 1.006426501190233e-08, |
|
"loss": 5.651, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.006875119359711106, |
|
"grad_norm": 25.18215560913086, |
|
"learning_rate": 2.5173336467135266e-09, |
|
"loss": 4.7655, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.006944565009809198, |
|
"grad_norm": 39.18954086303711, |
|
"learning_rate": 0.0, |
|
"loss": 6.7356, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006944565009809198, |
|
"eval_loss": 1.1598098278045654, |
|
"eval_runtime": 2231.8266, |
|
"eval_samples_per_second": 10.866, |
|
"eval_steps_per_second": 1.359, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4381529990325862e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|