|
{ |
|
"best_metric": 0.5973209738731384, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.635593220338983, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00423728813559322, |
|
"grad_norm": 2.8869433403015137, |
|
"learning_rate": 5e-06, |
|
"loss": 6.9072, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00423728813559322, |
|
"eval_loss": 1.7482346296310425, |
|
"eval_runtime": 42.7906, |
|
"eval_samples_per_second": 9.301, |
|
"eval_steps_per_second": 4.651, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00847457627118644, |
|
"grad_norm": 2.9117305278778076, |
|
"learning_rate": 1e-05, |
|
"loss": 6.6519, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012711864406779662, |
|
"grad_norm": 3.042799711227417, |
|
"learning_rate": 1.5e-05, |
|
"loss": 6.8339, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01694915254237288, |
|
"grad_norm": 2.9370641708374023, |
|
"learning_rate": 2e-05, |
|
"loss": 6.8588, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0211864406779661, |
|
"grad_norm": 2.8842549324035645, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.3375, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.025423728813559324, |
|
"grad_norm": 3.039867877960205, |
|
"learning_rate": 3e-05, |
|
"loss": 6.757, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.029661016949152543, |
|
"grad_norm": 2.889957904815674, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.3478, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03389830508474576, |
|
"grad_norm": 2.849916934967041, |
|
"learning_rate": 4e-05, |
|
"loss": 5.7138, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.038135593220338986, |
|
"grad_norm": 2.6687538623809814, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.1771, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0423728813559322, |
|
"grad_norm": 2.5558407306671143, |
|
"learning_rate": 5e-05, |
|
"loss": 5.3018, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.046610169491525424, |
|
"grad_norm": 2.6599059104919434, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 4.3197, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05084745762711865, |
|
"grad_norm": 2.4746832847595215, |
|
"learning_rate": 6e-05, |
|
"loss": 3.7984, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05508474576271186, |
|
"grad_norm": 2.8662729263305664, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 3.2409, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.059322033898305086, |
|
"grad_norm": 2.0270683765411377, |
|
"learning_rate": 7e-05, |
|
"loss": 3.0089, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0635593220338983, |
|
"grad_norm": 2.611672878265381, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.484, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06779661016949153, |
|
"grad_norm": 2.9032974243164062, |
|
"learning_rate": 8e-05, |
|
"loss": 2.8204, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07203389830508475, |
|
"grad_norm": 2.599370002746582, |
|
"learning_rate": 8.5e-05, |
|
"loss": 2.7265, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07627118644067797, |
|
"grad_norm": 1.805683970451355, |
|
"learning_rate": 9e-05, |
|
"loss": 2.9644, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08050847457627118, |
|
"grad_norm": 2.769292116165161, |
|
"learning_rate": 9.5e-05, |
|
"loss": 2.6633, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0847457627118644, |
|
"grad_norm": 3.1495392322540283, |
|
"learning_rate": 0.0001, |
|
"loss": 2.9764, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08898305084745763, |
|
"grad_norm": 2.3471312522888184, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 2.4665, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09322033898305085, |
|
"grad_norm": 2.6704769134521484, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 2.2892, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09745762711864407, |
|
"grad_norm": 1.0994267463684082, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 2.344, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1016949152542373, |
|
"grad_norm": 2.1697537899017334, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 2.7479, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1059322033898305, |
|
"grad_norm": 2.7966907024383545, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 2.5423, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11016949152542373, |
|
"grad_norm": 1.6926209926605225, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 2.7024, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.11440677966101695, |
|
"grad_norm": 2.5315053462982178, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 2.4407, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11864406779661017, |
|
"grad_norm": 3.6142640113830566, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 2.2694, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1228813559322034, |
|
"grad_norm": 1.3599029779434204, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 2.3543, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1271186440677966, |
|
"grad_norm": 2.29304575920105, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 2.5706, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13135593220338984, |
|
"grad_norm": 1.6786038875579834, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 2.5246, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13559322033898305, |
|
"grad_norm": 1.260206699371338, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 1.9627, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13983050847457626, |
|
"grad_norm": 1.5896670818328857, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 2.4204, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1440677966101695, |
|
"grad_norm": 2.370612144470215, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 2.8077, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1483050847457627, |
|
"grad_norm": 1.4520262479782104, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 2.4863, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15254237288135594, |
|
"grad_norm": 1.3572436571121216, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 2.4152, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.15677966101694915, |
|
"grad_norm": 1.1999151706695557, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 2.3192, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.16101694915254236, |
|
"grad_norm": 2.2570760250091553, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 2.3025, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1652542372881356, |
|
"grad_norm": 1.7871544361114502, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 2.0637, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1694915254237288, |
|
"grad_norm": 1.6054717302322388, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 2.1793, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17372881355932204, |
|
"grad_norm": 1.5231215953826904, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 2.2197, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17796610169491525, |
|
"grad_norm": 1.3351085186004639, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 2.3261, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.18220338983050846, |
|
"grad_norm": 1.2223113775253296, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 2.1145, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1864406779661017, |
|
"grad_norm": 1.6961467266082764, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 2.2667, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1906779661016949, |
|
"grad_norm": 1.4688829183578491, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 2.572, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.19491525423728814, |
|
"grad_norm": 1.4838528633117676, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 2.3653, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19915254237288135, |
|
"grad_norm": 2.620893955230713, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 2.1069, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2033898305084746, |
|
"grad_norm": 1.6659537553787231, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 2.6601, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2076271186440678, |
|
"grad_norm": 1.215427279472351, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 2.7816, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.211864406779661, |
|
"grad_norm": 2.305882453918457, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 3.6478, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.211864406779661, |
|
"eval_loss": 0.7533118724822998, |
|
"eval_runtime": 43.5309, |
|
"eval_samples_per_second": 9.143, |
|
"eval_steps_per_second": 4.571, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21610169491525424, |
|
"grad_norm": 10.459978103637695, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 3.4556, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.22033898305084745, |
|
"grad_norm": 8.529092788696289, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 3.3025, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2245762711864407, |
|
"grad_norm": 5.131667137145996, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 2.5715, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2288135593220339, |
|
"grad_norm": 1.3166052103042603, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 2.4219, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2330508474576271, |
|
"grad_norm": 2.2176835536956787, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 2.5179, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.23728813559322035, |
|
"grad_norm": 2.267003059387207, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.6671, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.24152542372881355, |
|
"grad_norm": 1.7872462272644043, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 2.6358, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2457627118644068, |
|
"grad_norm": 1.1025911569595337, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 2.4904, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2282601594924927, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 2.2931, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2542372881355932, |
|
"grad_norm": 2.1205925941467285, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 2.4453, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2584745762711864, |
|
"grad_norm": 1.7363345623016357, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 2.1719, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2627118644067797, |
|
"grad_norm": 0.8407338857650757, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 2.2618, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2669491525423729, |
|
"grad_norm": 1.0721104145050049, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 2.228, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2711864406779661, |
|
"grad_norm": 1.6518641710281372, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 2.6224, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2754237288135593, |
|
"grad_norm": 1.0283952951431274, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 2.2026, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2796610169491525, |
|
"grad_norm": 0.8485298156738281, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 2.3136, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2838983050847458, |
|
"grad_norm": 1.244860053062439, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 2.476, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.288135593220339, |
|
"grad_norm": 1.248189091682434, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.2362, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2923728813559322, |
|
"grad_norm": 1.0057225227355957, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 2.514, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2966101694915254, |
|
"grad_norm": 0.8649203777313232, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 2.1474, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3008474576271186, |
|
"grad_norm": 0.601910412311554, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 2.0126, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3050847457627119, |
|
"grad_norm": 0.7620223164558411, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 2.4381, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3093220338983051, |
|
"grad_norm": 0.8148074746131897, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 2.1575, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3135593220338983, |
|
"grad_norm": 0.7290023565292358, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.299, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3177966101694915, |
|
"grad_norm": 0.8110753297805786, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 2.1619, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3220338983050847, |
|
"grad_norm": 0.6752878427505493, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 2.2223, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.326271186440678, |
|
"grad_norm": 0.8308572173118591, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 1.9585, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3305084745762712, |
|
"grad_norm": 0.8173496127128601, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 2.0674, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3347457627118644, |
|
"grad_norm": 0.7854796648025513, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 2.0757, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3389830508474576, |
|
"grad_norm": 0.6990013718605042, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.5116, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3432203389830508, |
|
"grad_norm": 1.0307629108428955, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 2.2457, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3474576271186441, |
|
"grad_norm": 0.9620252847671509, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 2.3633, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3516949152542373, |
|
"grad_norm": 0.6838775277137756, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 2.5503, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3559322033898305, |
|
"grad_norm": 0.6633744239807129, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 2.1999, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3601694915254237, |
|
"grad_norm": 1.1613788604736328, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 2.5896, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3644067796610169, |
|
"grad_norm": 0.7708940505981445, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 2.6019, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3686440677966102, |
|
"grad_norm": 1.6062999963760376, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 2.0793, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3728813559322034, |
|
"grad_norm": 0.7241120934486389, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 2.402, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3771186440677966, |
|
"grad_norm": 1.5668368339538574, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 2.532, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3813559322033898, |
|
"grad_norm": 0.7401297092437744, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 2.2997, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3855932203389831, |
|
"grad_norm": 0.7686449289321899, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 2.0515, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3898305084745763, |
|
"grad_norm": 0.9585121273994446, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.215, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3940677966101695, |
|
"grad_norm": 0.6215618848800659, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 2.497, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3983050847457627, |
|
"grad_norm": 0.5972080826759338, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 2.0889, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4025423728813559, |
|
"grad_norm": 0.8943988680839539, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 2.1595, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4067796610169492, |
|
"grad_norm": 0.8203272223472595, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 1.8233, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4110169491525424, |
|
"grad_norm": 0.5806548595428467, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 2.298, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4152542372881356, |
|
"grad_norm": 0.7718837261199951, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 2.623, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4194915254237288, |
|
"grad_norm": 1.1169939041137695, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 3.1457, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.423728813559322, |
|
"grad_norm": 1.5888675451278687, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 3.7138, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.423728813559322, |
|
"eval_loss": 0.6284539103507996, |
|
"eval_runtime": 43.533, |
|
"eval_samples_per_second": 9.142, |
|
"eval_steps_per_second": 4.571, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4279661016949153, |
|
"grad_norm": 3.528254508972168, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 2.6844, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4322033898305085, |
|
"grad_norm": 4.093210697174072, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 2.7143, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4364406779661017, |
|
"grad_norm": 3.43405818939209, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 2.6711, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4406779661016949, |
|
"grad_norm": 2.003894567489624, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 2.3973, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4449152542372881, |
|
"grad_norm": 1.3836109638214111, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 2.6954, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4491525423728814, |
|
"grad_norm": 0.8960191011428833, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 2.3673, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4533898305084746, |
|
"grad_norm": 1.0750250816345215, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 2.3827, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.4576271186440678, |
|
"grad_norm": 1.209937572479248, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 2.5107, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.461864406779661, |
|
"grad_norm": 1.4963562488555908, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 2.6218, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4661016949152542, |
|
"grad_norm": 1.4280909299850464, |
|
"learning_rate": 5e-05, |
|
"loss": 2.5591, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.4703389830508475, |
|
"grad_norm": 1.069968819618225, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 2.238, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.4745762711864407, |
|
"grad_norm": 0.7898651957511902, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 2.1143, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.4788135593220339, |
|
"grad_norm": 0.6083970069885254, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 2.3231, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4830508474576271, |
|
"grad_norm": 1.1110987663269043, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 2.3884, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4872881355932203, |
|
"grad_norm": 0.694742739200592, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 2.2887, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4915254237288136, |
|
"grad_norm": 0.9567046165466309, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 2.2007, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4957627118644068, |
|
"grad_norm": 0.8701348900794983, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 1.9919, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9415125250816345, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 2.2197, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5042372881355932, |
|
"grad_norm": 0.6797757744789124, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 2.1456, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5084745762711864, |
|
"grad_norm": 0.8693689703941345, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 2.1877, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5127118644067796, |
|
"grad_norm": 0.6730765104293823, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 2.0728, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5169491525423728, |
|
"grad_norm": 0.8778936266899109, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 2.304, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5211864406779662, |
|
"grad_norm": 0.6395032405853271, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 2.2309, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5254237288135594, |
|
"grad_norm": 0.7323006391525269, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 2.4723, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5296610169491526, |
|
"grad_norm": 0.7824187278747559, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 2.2452, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5338983050847458, |
|
"grad_norm": 0.854575514793396, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 2.2604, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.538135593220339, |
|
"grad_norm": 1.17578125, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 2.7593, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5423728813559322, |
|
"grad_norm": 0.9400164484977722, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.8751, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5466101694915254, |
|
"grad_norm": 0.8660760521888733, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 2.0508, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5508474576271186, |
|
"grad_norm": 0.6780417561531067, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 2.2132, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5550847457627118, |
|
"grad_norm": 0.8800328969955444, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 2.0362, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.559322033898305, |
|
"grad_norm": 0.6752195954322815, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 2.4794, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5635593220338984, |
|
"grad_norm": 0.6003003716468811, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 2.3735, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5677966101694916, |
|
"grad_norm": 0.9153070449829102, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 2.4439, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5720338983050848, |
|
"grad_norm": 1.3099541664123535, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 1.9584, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.576271186440678, |
|
"grad_norm": 0.6542685031890869, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 2.0345, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5805084745762712, |
|
"grad_norm": 0.6798728704452515, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 2.5846, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5847457627118644, |
|
"grad_norm": 0.6184638142585754, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 1.7818, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5889830508474576, |
|
"grad_norm": 0.6993879079818726, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 2.295, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5932203389830508, |
|
"grad_norm": 0.8818557262420654, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 2.26, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.597457627118644, |
|
"grad_norm": 0.7364823818206787, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 1.9119, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6016949152542372, |
|
"grad_norm": 0.8221387267112732, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 1.935, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6059322033898306, |
|
"grad_norm": 0.719155490398407, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 1.978, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6101694915254238, |
|
"grad_norm": 0.7184621095657349, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 1.9602, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.614406779661017, |
|
"grad_norm": 0.7189619541168213, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 2.6127, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6186440677966102, |
|
"grad_norm": 0.5349248647689819, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.8459, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6228813559322034, |
|
"grad_norm": 0.6641550660133362, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 2.0695, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6271186440677966, |
|
"grad_norm": 0.6735299229621887, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 1.8445, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6313559322033898, |
|
"grad_norm": 0.7525951862335205, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 2.0764, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.635593220338983, |
|
"grad_norm": 1.8119826316833496, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 2.5421, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.635593220338983, |
|
"eval_loss": 0.5973209738731384, |
|
"eval_runtime": 43.5624, |
|
"eval_samples_per_second": 9.136, |
|
"eval_steps_per_second": 4.568, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.81890674556928e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|