|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.997184174103947, |
|
"global_step": 62800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-05, |
|
"loss": 3.2345, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.959909555958242e-05, |
|
"loss": 1.994, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.919819111916484e-05, |
|
"loss": 1.6753, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8797286678747256e-05, |
|
"loss": 1.5453, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8396382238329674e-05, |
|
"loss": 1.4013, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.799547779791209e-05, |
|
"loss": 1.373, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.759457335749451e-05, |
|
"loss": 1.2924, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.719366891707693e-05, |
|
"loss": 1.2266, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6792764476659346e-05, |
|
"loss": 1.1925, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6391860036241764e-05, |
|
"loss": 1.22, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.599095559582418e-05, |
|
"loss": 1.0955, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.55900511554066e-05, |
|
"loss": 1.1189, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.518914671498902e-05, |
|
"loss": 1.0632, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4788242274571436e-05, |
|
"loss": 1.0091, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4387337834153854e-05, |
|
"loss": 1.0058, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.398643339373627e-05, |
|
"loss": 1.0111, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.358552895331869e-05, |
|
"loss": 0.8988, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.318462451290111e-05, |
|
"loss": 0.9305, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2783720072483526e-05, |
|
"loss": 0.9372, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.2382815632065944e-05, |
|
"loss": 0.9176, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.198191119164836e-05, |
|
"loss": 0.8878, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.158100675123078e-05, |
|
"loss": 0.872, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.11801023108132e-05, |
|
"loss": 0.8525, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0779197870395616e-05, |
|
"loss": 0.8511, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0378293429978034e-05, |
|
"loss": 0.828, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.997738898956045e-05, |
|
"loss": 0.8198, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.957648454914287e-05, |
|
"loss": 0.8225, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.917558010872529e-05, |
|
"loss": 0.7818, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8774675668307706e-05, |
|
"loss": 0.7939, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8373771227890124e-05, |
|
"loss": 0.7799, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7972866787472535e-05, |
|
"loss": 0.7588, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.757196234705495e-05, |
|
"loss": 0.8004, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.717105790663737e-05, |
|
"loss": 0.7468, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.677015346621979e-05, |
|
"loss": 0.7574, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.636924902580221e-05, |
|
"loss": 0.7193, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5968344585384625e-05, |
|
"loss": 0.7122, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.556744014496704e-05, |
|
"loss": 0.7215, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.516653570454946e-05, |
|
"loss": 0.6629, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.476563126413188e-05, |
|
"loss": 0.7112, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.43647268237143e-05, |
|
"loss": 0.7116, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3963822383296715e-05, |
|
"loss": 0.6947, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.356291794287913e-05, |
|
"loss": 0.6492, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.316201350246155e-05, |
|
"loss": 0.5083, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.276110906204397e-05, |
|
"loss": 0.536, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.236020462162639e-05, |
|
"loss": 0.5196, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1959300181208805e-05, |
|
"loss": 0.5637, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.155839574079122e-05, |
|
"loss": 0.5338, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.115749130037364e-05, |
|
"loss": 0.5052, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.075658685995606e-05, |
|
"loss": 0.5186, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0355682419538477e-05, |
|
"loss": 0.5168, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9954777979120895e-05, |
|
"loss": 0.4931, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9553873538703313e-05, |
|
"loss": 0.4993, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.915296909828573e-05, |
|
"loss": 0.5286, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.875206465786815e-05, |
|
"loss": 0.4865, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8351160217450567e-05, |
|
"loss": 0.503, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7950255777032985e-05, |
|
"loss": 0.5093, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7549351336615403e-05, |
|
"loss": 0.4978, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.714844689619782e-05, |
|
"loss": 0.5042, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.674754245578024e-05, |
|
"loss": 0.4909, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6346638015362657e-05, |
|
"loss": 0.4861, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5945733574945075e-05, |
|
"loss": 0.472, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5544829134527493e-05, |
|
"loss": 0.4673, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.514392469410991e-05, |
|
"loss": 0.4747, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4743020253692332e-05, |
|
"loss": 0.4396, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.434211581327475e-05, |
|
"loss": 0.4675, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3941211372857168e-05, |
|
"loss": 0.4421, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3540306932439586e-05, |
|
"loss": 0.4189, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3139402492022004e-05, |
|
"loss": 0.4305, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.273849805160442e-05, |
|
"loss": 0.4332, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2337593611186837e-05, |
|
"loss": 0.4314, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1936689170769255e-05, |
|
"loss": 0.4138, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1535784730351673e-05, |
|
"loss": 0.4179, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.113488028993409e-05, |
|
"loss": 0.4104, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.073397584951651e-05, |
|
"loss": 0.4078, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0333071409098927e-05, |
|
"loss": 0.4113, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9932166968681345e-05, |
|
"loss": 0.3828, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9531262528263763e-05, |
|
"loss": 0.4095, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.913035808784618e-05, |
|
"loss": 0.3782, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.87294536474286e-05, |
|
"loss": 0.3494, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8328549207011017e-05, |
|
"loss": 0.3758, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7927644766593435e-05, |
|
"loss": 0.3628, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7526740326175853e-05, |
|
"loss": 0.3751, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.712583588575827e-05, |
|
"loss": 0.3474, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.672493144534069e-05, |
|
"loss": 0.3201, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6324027004923107e-05, |
|
"loss": 0.237, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5923122564505525e-05, |
|
"loss": 0.2274, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5522218124087943e-05, |
|
"loss": 0.2453, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.512131368367036e-05, |
|
"loss": 0.2502, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4720409243252779e-05, |
|
"loss": 0.2575, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4319504802835197e-05, |
|
"loss": 0.2533, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3918600362417613e-05, |
|
"loss": 0.2496, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3517695922000031e-05, |
|
"loss": 0.2461, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3116791481582449e-05, |
|
"loss": 0.2509, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2715887041164867e-05, |
|
"loss": 0.2343, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2314982600747287e-05, |
|
"loss": 0.2549, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1914078160329705e-05, |
|
"loss": 0.2382, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1513173719912123e-05, |
|
"loss": 0.2364, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.111226927949454e-05, |
|
"loss": 0.2334, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0711364839076959e-05, |
|
"loss": 0.2291, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0310460398659377e-05, |
|
"loss": 0.2187, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.909555958241795e-06, |
|
"loss": 0.1919, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.50865151782421e-06, |
|
"loss": 0.2364, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.107747077406629e-06, |
|
"loss": 0.2045, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.706842636989047e-06, |
|
"loss": 0.2117, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.305938196571465e-06, |
|
"loss": 0.2266, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.905033756153883e-06, |
|
"loss": 0.2023, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.504129315736301e-06, |
|
"loss": 0.2032, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.103224875318719e-06, |
|
"loss": 0.1957, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.702320434901137e-06, |
|
"loss": 0.1869, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.301415994483555e-06, |
|
"loss": 0.2016, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.9005115540659736e-06, |
|
"loss": 0.1831, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.499607113648391e-06, |
|
"loss": 0.1707, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.098702673230809e-06, |
|
"loss": 0.1902, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.697798232813227e-06, |
|
"loss": 0.1776, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.296893792395645e-06, |
|
"loss": 0.2065, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.895989351978063e-06, |
|
"loss": 0.1955, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4950849115604802e-06, |
|
"loss": 0.1747, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0941804711428986e-06, |
|
"loss": 0.198, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6932760307253166e-06, |
|
"loss": 0.1885, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.292371590307734e-06, |
|
"loss": 0.1537, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8914671498901522e-06, |
|
"loss": 0.1657, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4905627094725702e-06, |
|
"loss": 0.1631, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0896582690549882e-06, |
|
"loss": 0.1417, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.88753828637406e-07, |
|
"loss": 0.157, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.878493882198239e-07, |
|
"loss": 0.1536, |
|
"step": 62500 |
|
} |
|
], |
|
"max_steps": 62859, |
|
"num_train_epochs": 3, |
|
"total_flos": 4.536353409695568e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|