|
{ |
|
"best_metric": 0.21080316603183746, |
|
"best_model_checkpoint": "./models/bart/bart_balanced_subset_without_title_less_than_1024_double_comment/checkpoint-431935", |
|
"epoch": 8.0, |
|
"global_step": 691096, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9996141394731464e-05, |
|
"loss": 1.4863, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9992282789462922e-05, |
|
"loss": 0.2641, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9988424184194384e-05, |
|
"loss": 0.246, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9984565578925843e-05, |
|
"loss": 0.2419, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9980706973657305e-05, |
|
"loss": 0.2478, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9976848368388764e-05, |
|
"loss": 0.2509, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9972989763120226e-05, |
|
"loss": 0.2526, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9969131157851684e-05, |
|
"loss": 0.2584, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9965272552583146e-05, |
|
"loss": 0.2449, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9961413947314605e-05, |
|
"loss": 0.2346, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9957555342046067e-05, |
|
"loss": 0.239, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9953696736777526e-05, |
|
"loss": 0.2449, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9949838131508988e-05, |
|
"loss": 0.2428, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9945979526240446e-05, |
|
"loss": 0.2435, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9942120920971908e-05, |
|
"loss": 0.2322, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9938262315703367e-05, |
|
"loss": 0.2374, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.993440371043483e-05, |
|
"loss": 0.2335, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9930545105166287e-05, |
|
"loss": 0.2344, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.992668649989775e-05, |
|
"loss": 0.2241, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9922827894629208e-05, |
|
"loss": 0.2374, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.991896928936067e-05, |
|
"loss": 0.2461, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.991511068409213e-05, |
|
"loss": 0.243, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.991125207882359e-05, |
|
"loss": 0.2309, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.990739347355505e-05, |
|
"loss": 0.2403, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.990353486828651e-05, |
|
"loss": 0.2425, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.989967626301797e-05, |
|
"loss": 0.2204, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9895817657749432e-05, |
|
"loss": 0.237, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.989195905248089e-05, |
|
"loss": 0.2314, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9888100447212353e-05, |
|
"loss": 0.2321, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.988424184194381e-05, |
|
"loss": 0.2366, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9880383236675273e-05, |
|
"loss": 0.2368, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9876524631406732e-05, |
|
"loss": 0.2292, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9872666026138194e-05, |
|
"loss": 0.2334, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9868807420869653e-05, |
|
"loss": 0.237, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9864948815601115e-05, |
|
"loss": 0.237, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9861090210332573e-05, |
|
"loss": 0.2338, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9857231605064035e-05, |
|
"loss": 0.2305, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9853372999795494e-05, |
|
"loss": 0.2334, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9849514394526956e-05, |
|
"loss": 0.2396, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9845655789258415e-05, |
|
"loss": 0.2432, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9841797183989877e-05, |
|
"loss": 0.2294, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9837938578721335e-05, |
|
"loss": 0.2305, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9834079973452797e-05, |
|
"loss": 0.2415, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9830221368184256e-05, |
|
"loss": 0.2412, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9826362762915718e-05, |
|
"loss": 0.2329, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9822504157647176e-05, |
|
"loss": 0.2354, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.981864555237864e-05, |
|
"loss": 0.2372, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9814786947110097e-05, |
|
"loss": 0.2401, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.981092834184156e-05, |
|
"loss": 0.2333, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9807069736573018e-05, |
|
"loss": 0.2301, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.980321113130448e-05, |
|
"loss": 0.2423, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.979935252603594e-05, |
|
"loss": 0.2171, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.97954939207674e-05, |
|
"loss": 0.2347, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.979163531549886e-05, |
|
"loss": 0.2294, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.978777671023032e-05, |
|
"loss": 0.2286, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.978391810496178e-05, |
|
"loss": 0.2355, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9780059499693242e-05, |
|
"loss": 0.2211, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.97762008944247e-05, |
|
"loss": 0.2222, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9772342289156162e-05, |
|
"loss": 0.2264, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.976848368388762e-05, |
|
"loss": 0.2388, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9764625078619083e-05, |
|
"loss": 0.219, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9760766473350545e-05, |
|
"loss": 0.2357, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9756907868082004e-05, |
|
"loss": 0.227, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9753049262813466e-05, |
|
"loss": 0.2228, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9749190657544924e-05, |
|
"loss": 0.2368, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9745332052276386e-05, |
|
"loss": 0.2261, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9741473447007845e-05, |
|
"loss": 0.2299, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9737614841739307e-05, |
|
"loss": 0.2293, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9733756236470766e-05, |
|
"loss": 0.2346, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9729897631202228e-05, |
|
"loss": 0.2277, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9726039025933686e-05, |
|
"loss": 0.2304, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9722180420665148e-05, |
|
"loss": 0.2336, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9718321815396607e-05, |
|
"loss": 0.243, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.971446321012807e-05, |
|
"loss": 0.2327, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9710604604859528e-05, |
|
"loss": 0.2234, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.970674599959099e-05, |
|
"loss": 0.2265, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9702887394322448e-05, |
|
"loss": 0.2273, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.969902878905391e-05, |
|
"loss": 0.2335, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.969517018378537e-05, |
|
"loss": 0.2294, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.969131157851683e-05, |
|
"loss": 0.2265, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.968745297324829e-05, |
|
"loss": 0.2246, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.968359436797975e-05, |
|
"loss": 0.2334, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.967973576271121e-05, |
|
"loss": 0.2305, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9675877157442672e-05, |
|
"loss": 0.2199, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9672018552174134e-05, |
|
"loss": 0.2193, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9668159946905593e-05, |
|
"loss": 0.233, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9664301341637055e-05, |
|
"loss": 0.227, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9660442736368513e-05, |
|
"loss": 0.2221, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9656584131099975e-05, |
|
"loss": 0.2312, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9652725525831434e-05, |
|
"loss": 0.2187, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9648866920562896e-05, |
|
"loss": 0.2156, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9645008315294355e-05, |
|
"loss": 0.2227, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9641149710025817e-05, |
|
"loss": 0.2205, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9637291104757275e-05, |
|
"loss": 0.2324, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9633432499488737e-05, |
|
"loss": 0.2159, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9629573894220196e-05, |
|
"loss": 0.225, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9625715288951658e-05, |
|
"loss": 0.2124, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9621856683683117e-05, |
|
"loss": 0.231, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.961799807841458e-05, |
|
"loss": 0.2264, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9614139473146037e-05, |
|
"loss": 0.2243, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.96102808678775e-05, |
|
"loss": 0.2271, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9606422262608958e-05, |
|
"loss": 0.2227, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.960256365734042e-05, |
|
"loss": 0.234, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.959870505207188e-05, |
|
"loss": 0.2233, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.959484644680334e-05, |
|
"loss": 0.2253, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9590987841534803e-05, |
|
"loss": 0.2229, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.958712923626626e-05, |
|
"loss": 0.2359, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9583270630997723e-05, |
|
"loss": 0.2232, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9579412025729182e-05, |
|
"loss": 0.2238, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9575553420460644e-05, |
|
"loss": 0.2239, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9571694815192103e-05, |
|
"loss": 0.2401, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9567836209923565e-05, |
|
"loss": 0.2299, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9563977604655023e-05, |
|
"loss": 0.223, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9560118999386485e-05, |
|
"loss": 0.2191, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9556260394117944e-05, |
|
"loss": 0.2291, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9552401788849406e-05, |
|
"loss": 0.2176, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9548543183580864e-05, |
|
"loss": 0.2133, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9544684578312326e-05, |
|
"loss": 0.218, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9540825973043785e-05, |
|
"loss": 0.2279, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9536967367775247e-05, |
|
"loss": 0.2174, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.9533108762506706e-05, |
|
"loss": 0.2236, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9529250157238168e-05, |
|
"loss": 0.2231, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9525391551969626e-05, |
|
"loss": 0.2193, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.952153294670109e-05, |
|
"loss": 0.213, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.9517674341432547e-05, |
|
"loss": 0.2459, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.951381573616401e-05, |
|
"loss": 0.2233, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9509957130895468e-05, |
|
"loss": 0.2391, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.950609852562693e-05, |
|
"loss": 0.2248, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.950223992035839e-05, |
|
"loss": 0.225, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.949838131508985e-05, |
|
"loss": 0.2222, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.949452270982131e-05, |
|
"loss": 0.2346, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.949066410455277e-05, |
|
"loss": 0.2231, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.948680549928423e-05, |
|
"loss": 0.2254, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.948294689401569e-05, |
|
"loss": 0.228, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.947908828874715e-05, |
|
"loss": 0.2155, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9475229683478612e-05, |
|
"loss": 0.228, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9471371078210074e-05, |
|
"loss": 0.2197, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9467512472941533e-05, |
|
"loss": 0.2074, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9463653867672995e-05, |
|
"loss": 0.2183, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9459795262404454e-05, |
|
"loss": 0.2262, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.9455936657135916e-05, |
|
"loss": 0.2234, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.9452078051867374e-05, |
|
"loss": 0.2309, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9448219446598836e-05, |
|
"loss": 0.2221, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9444360841330295e-05, |
|
"loss": 0.2099, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9440502236061757e-05, |
|
"loss": 0.224, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9436643630793215e-05, |
|
"loss": 0.2205, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9432785025524678e-05, |
|
"loss": 0.2203, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9428926420256136e-05, |
|
"loss": 0.231, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9425067814987598e-05, |
|
"loss": 0.2247, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9421209209719057e-05, |
|
"loss": 0.2291, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.941735060445052e-05, |
|
"loss": 0.2284, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.9413491999181977e-05, |
|
"loss": 0.2292, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.940963339391344e-05, |
|
"loss": 0.2257, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.9405774788644898e-05, |
|
"loss": 0.2409, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.940191618337636e-05, |
|
"loss": 0.2293, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.939805757810782e-05, |
|
"loss": 0.2214, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.939419897283928e-05, |
|
"loss": 0.2246, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.939034036757074e-05, |
|
"loss": 0.2216, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.93864817623022e-05, |
|
"loss": 0.2226, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.938262315703366e-05, |
|
"loss": 0.2324, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.9378764551765122e-05, |
|
"loss": 0.2224, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.937490594649658e-05, |
|
"loss": 0.2378, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9371047341228043e-05, |
|
"loss": 0.2169, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.93671887359595e-05, |
|
"loss": 0.2206, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9363330130690963e-05, |
|
"loss": 0.2191, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9359471525422422e-05, |
|
"loss": 0.2363, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9355612920153884e-05, |
|
"loss": 0.2175, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9351754314885343e-05, |
|
"loss": 0.21, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9347895709616805e-05, |
|
"loss": 0.2203, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9344037104348263e-05, |
|
"loss": 0.2166, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9340178499079725e-05, |
|
"loss": 0.211, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.9336319893811184e-05, |
|
"loss": 0.2254, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bart_score": -6.78, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21441325545310974, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.1571033255826546, |
|
"rouge2": 0.024564791577206757, |
|
"rougeL": 0.12796016101205676, |
|
"rougeLsum": 0.12797893703375637 |
|
}, |
|
"eval_runtime": 5950.6237, |
|
"eval_samples_per_second": 3.629, |
|
"eval_simple_accuracy": 0.74, |
|
"eval_steps_per_second": 1.815, |
|
"step": 86387 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.9332461288542646e-05, |
|
"loss": 0.2202, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9328602683274105e-05, |
|
"loss": 0.214, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9324744078005567e-05, |
|
"loss": 0.2221, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9320885472737025e-05, |
|
"loss": 0.2164, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9317026867468487e-05, |
|
"loss": 0.2125, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.9313168262199946e-05, |
|
"loss": 0.221, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9309309656931408e-05, |
|
"loss": 0.1987, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9305451051662866e-05, |
|
"loss": 0.213, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.930159244639433e-05, |
|
"loss": 0.2199, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.9297733841125787e-05, |
|
"loss": 0.2154, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.929387523585725e-05, |
|
"loss": 0.2205, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.9290016630588708e-05, |
|
"loss": 0.2095, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.928615802532017e-05, |
|
"loss": 0.2091, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.928229942005163e-05, |
|
"loss": 0.2237, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.927844081478309e-05, |
|
"loss": 0.2231, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.927458220951455e-05, |
|
"loss": 0.2099, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.927072360424601e-05, |
|
"loss": 0.2142, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.926686499897747e-05, |
|
"loss": 0.2092, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.926300639370893e-05, |
|
"loss": 0.2152, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.925914778844039e-05, |
|
"loss": 0.2156, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.9255289183171852e-05, |
|
"loss": 0.2097, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.925143057790331e-05, |
|
"loss": 0.2114, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.9247571972634773e-05, |
|
"loss": 0.2254, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.924371336736623e-05, |
|
"loss": 0.2287, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.9239854762097694e-05, |
|
"loss": 0.2145, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.9235996156829152e-05, |
|
"loss": 0.2137, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.9232137551560614e-05, |
|
"loss": 0.213, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9228278946292073e-05, |
|
"loss": 0.2161, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9224420341023535e-05, |
|
"loss": 0.2103, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9220561735754994e-05, |
|
"loss": 0.2161, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9216703130486456e-05, |
|
"loss": 0.2086, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9212844525217914e-05, |
|
"loss": 0.2084, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.9208985919949376e-05, |
|
"loss": 0.2172, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.9205127314680838e-05, |
|
"loss": 0.2099, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.9201268709412297e-05, |
|
"loss": 0.2137, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.919741010414376e-05, |
|
"loss": 0.2189, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.9193551498875217e-05, |
|
"loss": 0.2327, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.918969289360668e-05, |
|
"loss": 0.2216, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.9185834288338138e-05, |
|
"loss": 0.2269, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.91819756830696e-05, |
|
"loss": 0.2005, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.917811707780106e-05, |
|
"loss": 0.2256, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.917425847253252e-05, |
|
"loss": 0.216, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.917039986726398e-05, |
|
"loss": 0.2208, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.916654126199544e-05, |
|
"loss": 0.2083, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.91626826567269e-05, |
|
"loss": 0.2169, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.9158824051458362e-05, |
|
"loss": 0.2078, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.915496544618982e-05, |
|
"loss": 0.2169, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.9151106840921283e-05, |
|
"loss": 0.2129, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.914724823565274e-05, |
|
"loss": 0.2267, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.9143389630384203e-05, |
|
"loss": 0.2084, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.9139531025115662e-05, |
|
"loss": 0.2116, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.9135672419847124e-05, |
|
"loss": 0.2181, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.9131813814578583e-05, |
|
"loss": 0.2233, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9127955209310045e-05, |
|
"loss": 0.2162, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9124096604041503e-05, |
|
"loss": 0.2271, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.9120237998772965e-05, |
|
"loss": 0.2173, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.9116379393504424e-05, |
|
"loss": 0.2141, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.9112520788235886e-05, |
|
"loss": 0.2227, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.9108662182967345e-05, |
|
"loss": 0.2234, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.9104803577698807e-05, |
|
"loss": 0.248, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.9100944972430265e-05, |
|
"loss": 0.2096, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.9097086367161727e-05, |
|
"loss": 0.2293, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.9093227761893186e-05, |
|
"loss": 0.2123, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.9089369156624648e-05, |
|
"loss": 0.2301, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.9085510551356107e-05, |
|
"loss": 0.2186, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.908165194608757e-05, |
|
"loss": 0.2147, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.9077793340819027e-05, |
|
"loss": 0.2113, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.907393473555049e-05, |
|
"loss": 0.2166, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.9070076130281948e-05, |
|
"loss": 0.2179, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.906621752501341e-05, |
|
"loss": 0.2195, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.906235891974487e-05, |
|
"loss": 0.2145, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.905850031447633e-05, |
|
"loss": 0.2066, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.905464170920779e-05, |
|
"loss": 0.2126, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.905078310393925e-05, |
|
"loss": 0.213, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.904692449867071e-05, |
|
"loss": 0.2131, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.9043065893402172e-05, |
|
"loss": 0.2212, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.903920728813363e-05, |
|
"loss": 0.2128, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.9035348682865092e-05, |
|
"loss": 0.21, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.9031490077596554e-05, |
|
"loss": 0.2242, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.9027631472328013e-05, |
|
"loss": 0.2108, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.9023772867059475e-05, |
|
"loss": 0.2146, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.9019914261790934e-05, |
|
"loss": 0.2113, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.9016055656522396e-05, |
|
"loss": 0.2182, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.9012197051253854e-05, |
|
"loss": 0.2126, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.9008338445985316e-05, |
|
"loss": 0.2153, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.9004479840716775e-05, |
|
"loss": 0.2256, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.9000621235448237e-05, |
|
"loss": 0.2197, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.8996762630179696e-05, |
|
"loss": 0.2269, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.8992904024911158e-05, |
|
"loss": 0.2277, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.8989045419642616e-05, |
|
"loss": 0.2209, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.8985186814374078e-05, |
|
"loss": 0.2263, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.8981328209105537e-05, |
|
"loss": 0.2171, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.8977469603837e-05, |
|
"loss": 0.2275, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.8973610998568458e-05, |
|
"loss": 0.2185, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.896975239329992e-05, |
|
"loss": 0.2281, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.8965893788031378e-05, |
|
"loss": 0.2125, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.896203518276284e-05, |
|
"loss": 0.2184, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.89581765774943e-05, |
|
"loss": 0.2189, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.895431797222576e-05, |
|
"loss": 0.2203, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.8950459366957223e-05, |
|
"loss": 0.2199, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.894660076168868e-05, |
|
"loss": 0.2251, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8942742156420144e-05, |
|
"loss": 0.2167, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8938883551151602e-05, |
|
"loss": 0.2149, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8935024945883064e-05, |
|
"loss": 0.2071, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8931166340614523e-05, |
|
"loss": 0.2185, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8927307735345985e-05, |
|
"loss": 0.2177, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8923449130077443e-05, |
|
"loss": 0.2144, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.8919590524808905e-05, |
|
"loss": 0.2183, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.8915731919540364e-05, |
|
"loss": 0.2252, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.8911873314271826e-05, |
|
"loss": 0.2193, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.8908014709003285e-05, |
|
"loss": 0.2004, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.8904156103734747e-05, |
|
"loss": 0.2087, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.8900297498466205e-05, |
|
"loss": 0.212, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.8896438893197667e-05, |
|
"loss": 0.2102, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.8892580287929126e-05, |
|
"loss": 0.2225, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.8888721682660588e-05, |
|
"loss": 0.2155, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.8884863077392047e-05, |
|
"loss": 0.2233, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.888100447212351e-05, |
|
"loss": 0.2187, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.8877145866854967e-05, |
|
"loss": 0.2213, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.887328726158643e-05, |
|
"loss": 0.2218, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.886942865631789e-05, |
|
"loss": 0.2169, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.886557005104935e-05, |
|
"loss": 0.2116, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.8861711445780812e-05, |
|
"loss": 0.2143, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.885785284051227e-05, |
|
"loss": 0.2143, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.8853994235243733e-05, |
|
"loss": 0.2101, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.885013562997519e-05, |
|
"loss": 0.2144, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.8846277024706653e-05, |
|
"loss": 0.2074, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8842418419438112e-05, |
|
"loss": 0.2179, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8838559814169574e-05, |
|
"loss": 0.2129, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8834701208901033e-05, |
|
"loss": 0.223, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8830842603632495e-05, |
|
"loss": 0.2211, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8826983998363953e-05, |
|
"loss": 0.2135, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8823125393095415e-05, |
|
"loss": 0.2194, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8819266787826874e-05, |
|
"loss": 0.227, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.8815408182558336e-05, |
|
"loss": 0.2168, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.8811549577289795e-05, |
|
"loss": 0.2179, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.8807690972021257e-05, |
|
"loss": 0.2077, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.8803832366752715e-05, |
|
"loss": 0.2145, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.8799973761484177e-05, |
|
"loss": 0.215, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.8796115156215636e-05, |
|
"loss": 0.2131, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.8792256550947098e-05, |
|
"loss": 0.2105, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.8788397945678556e-05, |
|
"loss": 0.2007, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.878453934041002e-05, |
|
"loss": 0.2126, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.8780680735141477e-05, |
|
"loss": 0.2001, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.877682212987294e-05, |
|
"loss": 0.216, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.8772963524604398e-05, |
|
"loss": 0.2141, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.876910491933586e-05, |
|
"loss": 0.2238, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.876524631406732e-05, |
|
"loss": 0.2206, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.876138770879878e-05, |
|
"loss": 0.2142, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.875752910353024e-05, |
|
"loss": 0.2031, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.87536704982617e-05, |
|
"loss": 0.2244, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.874981189299316e-05, |
|
"loss": 0.2138, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.874595328772462e-05, |
|
"loss": 0.2192, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.874209468245608e-05, |
|
"loss": 0.2242, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8738236077187542e-05, |
|
"loss": 0.2166, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8734377471919e-05, |
|
"loss": 0.2148, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8730518866650463e-05, |
|
"loss": 0.2141, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.872666026138192e-05, |
|
"loss": 0.2236, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8722801656113384e-05, |
|
"loss": 0.2218, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8718943050844842e-05, |
|
"loss": 0.2188, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8715084445576304e-05, |
|
"loss": 0.2179, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8711225840307763e-05, |
|
"loss": 0.2305, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8707367235039225e-05, |
|
"loss": 0.2174, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8703508629770684e-05, |
|
"loss": 0.2067, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.8699650024502146e-05, |
|
"loss": 0.2163, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8695791419233604e-05, |
|
"loss": 0.2188, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8691932813965066e-05, |
|
"loss": 0.2166, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8688074208696525e-05, |
|
"loss": 0.2215, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8684215603427987e-05, |
|
"loss": 0.2269, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.868035699815945e-05, |
|
"loss": 0.2182, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8676498392890907e-05, |
|
"loss": 0.2131, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.867263978762237e-05, |
|
"loss": 0.2125, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.8668781182353828e-05, |
|
"loss": 0.2141, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bart_score": -6.65, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21217894554138184, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.15388393304626252, |
|
"rouge2": 0.024424609903622555, |
|
"rougeL": 0.1256245521025564, |
|
"rougeLsum": 0.125613877504259 |
|
}, |
|
"eval_runtime": 4279.9159, |
|
"eval_samples_per_second": 5.046, |
|
"eval_simple_accuracy": 0.72, |
|
"eval_steps_per_second": 2.523, |
|
"step": 172774 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.866492257708529e-05, |
|
"loss": 0.2107, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.866106397181675e-05, |
|
"loss": 0.2049, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.865720536654821e-05, |
|
"loss": 0.2227, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.865334676127967e-05, |
|
"loss": 0.2124, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.864948815601113e-05, |
|
"loss": 0.209, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.864562955074259e-05, |
|
"loss": 0.2163, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.8641770945474052e-05, |
|
"loss": 0.2165, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.863791234020551e-05, |
|
"loss": 0.2116, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.8634053734936973e-05, |
|
"loss": 0.2007, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.863019512966843e-05, |
|
"loss": 0.202, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.8626336524399893e-05, |
|
"loss": 0.2164, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.8622477919131352e-05, |
|
"loss": 0.2102, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.8618619313862814e-05, |
|
"loss": 0.208, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.8614760708594273e-05, |
|
"loss": 0.2062, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.8610902103325735e-05, |
|
"loss": 0.2099, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.8607043498057193e-05, |
|
"loss": 0.2065, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.8603184892788655e-05, |
|
"loss": 0.211, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.8599326287520114e-05, |
|
"loss": 0.206, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.8595467682251576e-05, |
|
"loss": 0.2096, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.8591609076983035e-05, |
|
"loss": 0.2172, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.8587750471714497e-05, |
|
"loss": 0.2085, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.8583891866445955e-05, |
|
"loss": 0.2056, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.8580033261177417e-05, |
|
"loss": 0.2204, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.8576174655908876e-05, |
|
"loss": 0.2164, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.8572316050640338e-05, |
|
"loss": 0.2, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.8568457445371797e-05, |
|
"loss": 0.2051, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.856459884010326e-05, |
|
"loss": 0.2133, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.8560740234834717e-05, |
|
"loss": 0.2145, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.855688162956618e-05, |
|
"loss": 0.2151, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8553023024297638e-05, |
|
"loss": 0.2026, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.85491644190291e-05, |
|
"loss": 0.201, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.854530581376056e-05, |
|
"loss": 0.1968, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.854144720849202e-05, |
|
"loss": 0.2079, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.853758860322348e-05, |
|
"loss": 0.2186, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.853372999795494e-05, |
|
"loss": 0.2127, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.85298713926864e-05, |
|
"loss": 0.2043, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.8526012787417862e-05, |
|
"loss": 0.2161, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.852215418214932e-05, |
|
"loss": 0.2066, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.8518295576880782e-05, |
|
"loss": 0.2058, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.851443697161224e-05, |
|
"loss": 0.215, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.8510578366343703e-05, |
|
"loss": 0.2035, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.850671976107516e-05, |
|
"loss": 0.2185, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.8502861155806624e-05, |
|
"loss": 0.2193, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.8499002550538082e-05, |
|
"loss": 0.2254, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.8495143945269544e-05, |
|
"loss": 0.1887, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.8491285340001003e-05, |
|
"loss": 0.215, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.8487426734732465e-05, |
|
"loss": 0.2042, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.8483568129463924e-05, |
|
"loss": 0.2144, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.8479709524195386e-05, |
|
"loss": 0.2181, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8475850918926844e-05, |
|
"loss": 0.207, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8471992313658306e-05, |
|
"loss": 0.2116, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.8468133708389765e-05, |
|
"loss": 0.2043, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.8464275103121227e-05, |
|
"loss": 0.2182, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.8460416497852686e-05, |
|
"loss": 0.2135, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.8456557892584148e-05, |
|
"loss": 0.2106, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.8452699287315606e-05, |
|
"loss": 0.2089, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.8448840682047068e-05, |
|
"loss": 0.1977, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.8444982076778527e-05, |
|
"loss": 0.214, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.844112347150999e-05, |
|
"loss": 0.2093, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.8437264866241447e-05, |
|
"loss": 0.2065, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.843340626097291e-05, |
|
"loss": 0.214, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.8429547655704368e-05, |
|
"loss": 0.1995, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.842568905043583e-05, |
|
"loss": 0.2057, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.842183044516729e-05, |
|
"loss": 0.2043, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.841797183989875e-05, |
|
"loss": 0.212, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.8414113234630213e-05, |
|
"loss": 0.2144, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.841025462936167e-05, |
|
"loss": 0.2184, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.8406396024093133e-05, |
|
"loss": 0.2056, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.8402537418824592e-05, |
|
"loss": 0.2134, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.8398678813556054e-05, |
|
"loss": 0.2065, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.8394820208287513e-05, |
|
"loss": 0.2027, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.8390961603018975e-05, |
|
"loss": 0.2082, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8387102997750433e-05, |
|
"loss": 0.1966, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8383244392481895e-05, |
|
"loss": 0.1995, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8379385787213354e-05, |
|
"loss": 0.2144, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8375527181944816e-05, |
|
"loss": 0.2134, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8371668576676275e-05, |
|
"loss": 0.2119, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8367809971407737e-05, |
|
"loss": 0.2095, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.8363951366139195e-05, |
|
"loss": 0.2183, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.8360092760870657e-05, |
|
"loss": 0.2161, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8356234155602116e-05, |
|
"loss": 0.222, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8352375550333578e-05, |
|
"loss": 0.217, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.8348516945065037e-05, |
|
"loss": 0.2002, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.83446583397965e-05, |
|
"loss": 0.2039, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.8340799734527957e-05, |
|
"loss": 0.1983, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.833694112925942e-05, |
|
"loss": 0.2086, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.8333082523990878e-05, |
|
"loss": 0.2134, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.832922391872234e-05, |
|
"loss": 0.2086, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.83253653134538e-05, |
|
"loss": 0.2135, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.832150670818526e-05, |
|
"loss": 0.2074, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.831764810291672e-05, |
|
"loss": 0.2122, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.831378949764818e-05, |
|
"loss": 0.2052, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.830993089237964e-05, |
|
"loss": 0.2106, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.8306072287111102e-05, |
|
"loss": 0.2095, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8302213681842564e-05, |
|
"loss": 0.2093, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8298355076574022e-05, |
|
"loss": 0.2108, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.8294496471305484e-05, |
|
"loss": 0.2116, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.8290637866036943e-05, |
|
"loss": 0.2107, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.8286779260768405e-05, |
|
"loss": 0.205, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.8282920655499864e-05, |
|
"loss": 0.2249, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.8279062050231326e-05, |
|
"loss": 0.2167, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.8275203444962784e-05, |
|
"loss": 0.2096, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.8271344839694246e-05, |
|
"loss": 0.2097, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.8267486234425705e-05, |
|
"loss": 0.2161, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.8263627629157167e-05, |
|
"loss": 0.2248, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.8259769023888626e-05, |
|
"loss": 0.2203, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.8255910418620088e-05, |
|
"loss": 0.2082, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.8252051813351546e-05, |
|
"loss": 0.214, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.824819320808301e-05, |
|
"loss": 0.2061, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.8244334602814467e-05, |
|
"loss": 0.2137, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.824047599754593e-05, |
|
"loss": 0.2112, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.8236617392277388e-05, |
|
"loss": 0.2074, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.823275878700885e-05, |
|
"loss": 0.2163, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.8228900181740308e-05, |
|
"loss": 0.2022, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.822504157647177e-05, |
|
"loss": 0.2119, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.8221182971203232e-05, |
|
"loss": 0.2286, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.821732436593469e-05, |
|
"loss": 0.2184, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.8213465760666153e-05, |
|
"loss": 0.2022, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.820960715539761e-05, |
|
"loss": 0.2126, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.8205748550129074e-05, |
|
"loss": 0.2115, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.8201889944860532e-05, |
|
"loss": 0.2203, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.8198031339591994e-05, |
|
"loss": 0.2111, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.8194172734323453e-05, |
|
"loss": 0.2151, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.8190314129054915e-05, |
|
"loss": 0.217, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.8186455523786374e-05, |
|
"loss": 0.2146, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.8182596918517836e-05, |
|
"loss": 0.2045, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.8178738313249294e-05, |
|
"loss": 0.2103, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.8174879707980756e-05, |
|
"loss": 0.2011, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.8171021102712215e-05, |
|
"loss": 0.2086, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.8167162497443677e-05, |
|
"loss": 0.215, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.8163303892175135e-05, |
|
"loss": 0.2273, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.8159445286906597e-05, |
|
"loss": 0.2007, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.8155586681638056e-05, |
|
"loss": 0.1992, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.8151728076369518e-05, |
|
"loss": 0.2022, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.8147869471100977e-05, |
|
"loss": 0.2108, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.814401086583244e-05, |
|
"loss": 0.2087, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.81401522605639e-05, |
|
"loss": 0.2147, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.813629365529536e-05, |
|
"loss": 0.2164, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.813243505002682e-05, |
|
"loss": 0.2077, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.812857644475828e-05, |
|
"loss": 0.2195, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.8124717839489742e-05, |
|
"loss": 0.2205, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.81208592342212e-05, |
|
"loss": 0.2114, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8117000628952663e-05, |
|
"loss": 0.2166, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.811314202368412e-05, |
|
"loss": 0.2018, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.8109283418415583e-05, |
|
"loss": 0.2214, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.8105424813147042e-05, |
|
"loss": 0.2076, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.8101566207878504e-05, |
|
"loss": 0.2237, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.8097707602609963e-05, |
|
"loss": 0.2065, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.8093848997341425e-05, |
|
"loss": 0.2041, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.8089990392072883e-05, |
|
"loss": 0.2188, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.8086131786804345e-05, |
|
"loss": 0.2124, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.8082273181535804e-05, |
|
"loss": 0.2011, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.8078414576267266e-05, |
|
"loss": 0.2155, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8074555970998725e-05, |
|
"loss": 0.213, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8070697365730187e-05, |
|
"loss": 0.2052, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.8066838760461645e-05, |
|
"loss": 0.2012, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.8062980155193107e-05, |
|
"loss": 0.2143, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.8059121549924566e-05, |
|
"loss": 0.216, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.8055262944656028e-05, |
|
"loss": 0.2058, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.8051404339387486e-05, |
|
"loss": 0.2036, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.804754573411895e-05, |
|
"loss": 0.207, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.8043687128850407e-05, |
|
"loss": 0.208, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.803982852358187e-05, |
|
"loss": 0.2076, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.8035969918313328e-05, |
|
"loss": 0.2008, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.803211131304479e-05, |
|
"loss": 0.2127, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.802825270777625e-05, |
|
"loss": 0.2069, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.802439410250771e-05, |
|
"loss": 0.2116, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.802053549723917e-05, |
|
"loss": 0.2151, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.801667689197063e-05, |
|
"loss": 0.2058, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.801281828670209e-05, |
|
"loss": 0.2093, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.8008959681433552e-05, |
|
"loss": 0.2013, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.800510107616501e-05, |
|
"loss": 0.2152, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.8001242470896472e-05, |
|
"loss": 0.2219, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bart_score": -6.71, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.78, |
|
"eval_bertscore_recall": 0.71, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21118199825286865, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.15362210929602754, |
|
"rouge2": 0.02403959138428216, |
|
"rougeL": 0.12578179536151207, |
|
"rougeLsum": 0.1257791367260927 |
|
}, |
|
"eval_runtime": 4356.2183, |
|
"eval_samples_per_second": 4.958, |
|
"eval_simple_accuracy": 0.75, |
|
"eval_steps_per_second": 2.479, |
|
"step": 259161 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.799738386562793e-05, |
|
"loss": 0.1986, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.7993525260359393e-05, |
|
"loss": 0.2009, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.798966665509085e-05, |
|
"loss": 0.1981, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.7985808049822314e-05, |
|
"loss": 0.2096, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.7981949444553772e-05, |
|
"loss": 0.1995, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.7978090839285234e-05, |
|
"loss": 0.201, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.7974232234016693e-05, |
|
"loss": 0.1993, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.7970373628748155e-05, |
|
"loss": 0.2068, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.7966515023479614e-05, |
|
"loss": 0.2091, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.7962656418211076e-05, |
|
"loss": 0.2002, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.7958797812942534e-05, |
|
"loss": 0.2058, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.7954939207673996e-05, |
|
"loss": 0.1924, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.7951080602405455e-05, |
|
"loss": 0.1967, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.7947221997136917e-05, |
|
"loss": 0.2102, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.7943363391868376e-05, |
|
"loss": 0.1996, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.7939504786599838e-05, |
|
"loss": 0.195, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.7935646181331296e-05, |
|
"loss": 0.1942, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.7931787576062758e-05, |
|
"loss": 0.1986, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.7927928970794217e-05, |
|
"loss": 0.2123, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.792407036552568e-05, |
|
"loss": 0.1924, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.7920211760257137e-05, |
|
"loss": 0.2026, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.79163531549886e-05, |
|
"loss": 0.2038, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.7912494549720058e-05, |
|
"loss": 0.1966, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.790863594445152e-05, |
|
"loss": 0.1984, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.790477733918298e-05, |
|
"loss": 0.1985, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.790091873391444e-05, |
|
"loss": 0.2068, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.78970601286459e-05, |
|
"loss": 0.2091, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.789320152337736e-05, |
|
"loss": 0.2041, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.7889342918108823e-05, |
|
"loss": 0.2076, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.7885484312840282e-05, |
|
"loss": 0.2049, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.7881625707571744e-05, |
|
"loss": 0.2042, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.7877767102303203e-05, |
|
"loss": 0.2046, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.7873908497034665e-05, |
|
"loss": 0.2111, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.7870049891766123e-05, |
|
"loss": 0.2082, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.7866191286497585e-05, |
|
"loss": 0.2052, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7862332681229044e-05, |
|
"loss": 0.1885, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7858474075960506e-05, |
|
"loss": 0.2165, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.7854615470691965e-05, |
|
"loss": 0.2122, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.7850756865423427e-05, |
|
"loss": 0.2084, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.7846898260154885e-05, |
|
"loss": 0.2104, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.7843039654886347e-05, |
|
"loss": 0.2143, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.7839181049617806e-05, |
|
"loss": 0.207, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.7835322444349268e-05, |
|
"loss": 0.2075, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.7831463839080727e-05, |
|
"loss": 0.2019, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.782760523381219e-05, |
|
"loss": 0.2103, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.7823746628543647e-05, |
|
"loss": 0.2108, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.781988802327511e-05, |
|
"loss": 0.2062, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.7816029418006568e-05, |
|
"loss": 0.1966, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.781217081273803e-05, |
|
"loss": 0.1954, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.780831220746949e-05, |
|
"loss": 0.1995, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.780445360220095e-05, |
|
"loss": 0.2042, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.780059499693241e-05, |
|
"loss": 0.2065, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.779673639166387e-05, |
|
"loss": 0.2019, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.779287778639533e-05, |
|
"loss": 0.2114, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.7789019181126792e-05, |
|
"loss": 0.2043, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.778516057585825e-05, |
|
"loss": 0.2149, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.7781301970589712e-05, |
|
"loss": 0.198, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.777744336532117e-05, |
|
"loss": 0.2076, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.7773584760052633e-05, |
|
"loss": 0.2019, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.7769726154784092e-05, |
|
"loss": 0.2132, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.7765867549515554e-05, |
|
"loss": 0.2149, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.7762008944247012e-05, |
|
"loss": 0.2053, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.7758150338978474e-05, |
|
"loss": 0.1947, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.7754291733709933e-05, |
|
"loss": 0.2054, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.7750433128441395e-05, |
|
"loss": 0.2098, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.7746574523172854e-05, |
|
"loss": 0.211, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.7742715917904316e-05, |
|
"loss": 0.2129, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.7738857312635774e-05, |
|
"loss": 0.2183, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.7734998707367236e-05, |
|
"loss": 0.2111, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.7731140102098695e-05, |
|
"loss": 0.2039, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.7727281496830157e-05, |
|
"loss": 0.2056, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.7723422891561616e-05, |
|
"loss": 0.2209, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.7719564286293078e-05, |
|
"loss": 0.2174, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.7715705681024536e-05, |
|
"loss": 0.2048, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.7711847075755998e-05, |
|
"loss": 0.203, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.7707988470487457e-05, |
|
"loss": 0.2019, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.770412986521892e-05, |
|
"loss": 0.2108, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.7700271259950378e-05, |
|
"loss": 0.2037, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.769641265468184e-05, |
|
"loss": 0.2025, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.7692554049413298e-05, |
|
"loss": 0.208, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.768869544414476e-05, |
|
"loss": 0.21, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.768483683887622e-05, |
|
"loss": 0.2082, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.768097823360768e-05, |
|
"loss": 0.2073, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.767711962833914e-05, |
|
"loss": 0.2088, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.76732610230706e-05, |
|
"loss": 0.221, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.766940241780206e-05, |
|
"loss": 0.2059, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.7665543812533522e-05, |
|
"loss": 0.2065, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.7661685207264984e-05, |
|
"loss": 0.203, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.7657826601996443e-05, |
|
"loss": 0.1965, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.7653967996727905e-05, |
|
"loss": 0.1983, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.7650109391459363e-05, |
|
"loss": 0.2099, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.7646250786190825e-05, |
|
"loss": 0.2169, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.7642392180922284e-05, |
|
"loss": 0.2161, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.7638533575653746e-05, |
|
"loss": 0.199, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.7634674970385205e-05, |
|
"loss": 0.2072, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.7630816365116667e-05, |
|
"loss": 0.2115, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.7626957759848125e-05, |
|
"loss": 0.2051, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.7623099154579587e-05, |
|
"loss": 0.2063, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.7619240549311046e-05, |
|
"loss": 0.2062, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.7615381944042508e-05, |
|
"loss": 0.2107, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.7611523338773967e-05, |
|
"loss": 0.1978, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.760766473350543e-05, |
|
"loss": 0.2074, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.7603806128236887e-05, |
|
"loss": 0.2014, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.759994752296835e-05, |
|
"loss": 0.2128, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.7596088917699808e-05, |
|
"loss": 0.211, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.759223031243127e-05, |
|
"loss": 0.2056, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.758837170716273e-05, |
|
"loss": 0.2023, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.758451310189419e-05, |
|
"loss": 0.212, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.7580654496625653e-05, |
|
"loss": 0.2084, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.757679589135711e-05, |
|
"loss": 0.1958, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.7572937286088573e-05, |
|
"loss": 0.1997, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7569078680820032e-05, |
|
"loss": 0.206, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.7565220075551494e-05, |
|
"loss": 0.2068, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.7561361470282953e-05, |
|
"loss": 0.2209, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.7557502865014415e-05, |
|
"loss": 0.2086, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.7553644259745873e-05, |
|
"loss": 0.1992, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.7549785654477335e-05, |
|
"loss": 0.1929, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.7545927049208794e-05, |
|
"loss": 0.2042, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.7542068443940256e-05, |
|
"loss": 0.2031, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.7538209838671714e-05, |
|
"loss": 0.2076, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.7534351233403176e-05, |
|
"loss": 0.2038, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.7530492628134635e-05, |
|
"loss": 0.2113, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.7526634022866097e-05, |
|
"loss": 0.207, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.7522775417597556e-05, |
|
"loss": 0.2133, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.7518916812329018e-05, |
|
"loss": 0.2027, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.7515058207060476e-05, |
|
"loss": 0.2002, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.751119960179194e-05, |
|
"loss": 0.2173, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.7507340996523397e-05, |
|
"loss": 0.2068, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.750348239125486e-05, |
|
"loss": 0.1951, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.749962378598632e-05, |
|
"loss": 0.2117, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.749576518071778e-05, |
|
"loss": 0.199, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.7491906575449242e-05, |
|
"loss": 0.2016, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.74880479701807e-05, |
|
"loss": 0.203, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.7484189364912162e-05, |
|
"loss": 0.204, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.748033075964362e-05, |
|
"loss": 0.2135, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.7476472154375083e-05, |
|
"loss": 0.2115, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.747261354910654e-05, |
|
"loss": 0.2088, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.7468754943838004e-05, |
|
"loss": 0.2045, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.7464896338569462e-05, |
|
"loss": 0.2076, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.7461037733300924e-05, |
|
"loss": 0.2094, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.7457179128032383e-05, |
|
"loss": 0.2053, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.7453320522763845e-05, |
|
"loss": 0.222, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.7449461917495304e-05, |
|
"loss": 0.2011, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.7445603312226766e-05, |
|
"loss": 0.1948, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.7441744706958224e-05, |
|
"loss": 0.2022, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.7437886101689686e-05, |
|
"loss": 0.2084, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.7434027496421145e-05, |
|
"loss": 0.2012, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.7430168891152607e-05, |
|
"loss": 0.2099, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.7426310285884066e-05, |
|
"loss": 0.2229, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.7422451680615528e-05, |
|
"loss": 0.2107, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.7418593075346986e-05, |
|
"loss": 0.2112, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.7414734470078448e-05, |
|
"loss": 0.2041, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.7410875864809907e-05, |
|
"loss": 0.198, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.740701725954137e-05, |
|
"loss": 0.2063, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.7403158654272827e-05, |
|
"loss": 0.2216, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.739930004900429e-05, |
|
"loss": 0.2078, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.7395441443735748e-05, |
|
"loss": 0.2013, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.739158283846721e-05, |
|
"loss": 0.2011, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.738772423319867e-05, |
|
"loss": 0.2074, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.738386562793013e-05, |
|
"loss": 0.2103, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.738000702266159e-05, |
|
"loss": 0.2089, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.737614841739305e-05, |
|
"loss": 0.2029, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.737228981212451e-05, |
|
"loss": 0.2057, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.7368431206855972e-05, |
|
"loss": 0.1963, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.736457260158743e-05, |
|
"loss": 0.2057, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.7360713996318893e-05, |
|
"loss": 0.1949, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.7356855391050355e-05, |
|
"loss": 0.1999, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.7352996785781813e-05, |
|
"loss": 0.2147, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.7349138180513275e-05, |
|
"loss": 0.2092, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.7345279575244734e-05, |
|
"loss": 0.2003, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.7341420969976196e-05, |
|
"loss": 0.2057, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.7337562364707655e-05, |
|
"loss": 0.1958, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.7333703759439117e-05, |
|
"loss": 0.201, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bart_score": -6.65, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.2109740972518921, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.16131107323752153, |
|
"rouge2": 0.025998526352501692, |
|
"rougeL": 0.1308022660230437, |
|
"rougeLsum": 0.13079982676404756 |
|
}, |
|
"eval_runtime": 4284.4815, |
|
"eval_samples_per_second": 5.041, |
|
"eval_simple_accuracy": 0.78, |
|
"eval_steps_per_second": 2.52, |
|
"step": 345548 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.7329845154170575e-05, |
|
"loss": 0.192, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.7325986548902037e-05, |
|
"loss": 0.2071, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.7322127943633496e-05, |
|
"loss": 0.2114, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.7318269338364958e-05, |
|
"loss": 0.1951, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.7314410733096417e-05, |
|
"loss": 0.2019, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.731055212782788e-05, |
|
"loss": 0.1942, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.7306693522559337e-05, |
|
"loss": 0.2016, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.73028349172908e-05, |
|
"loss": 0.2076, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.7298976312022258e-05, |
|
"loss": 0.206, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.729511770675372e-05, |
|
"loss": 0.1963, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.729125910148518e-05, |
|
"loss": 0.1892, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.728740049621664e-05, |
|
"loss": 0.2025, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.72835418909481e-05, |
|
"loss": 0.201, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.727968328567956e-05, |
|
"loss": 0.2041, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.727582468041102e-05, |
|
"loss": 0.2077, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.7271966075142482e-05, |
|
"loss": 0.1999, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.726810746987394e-05, |
|
"loss": 0.1976, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.7264248864605402e-05, |
|
"loss": 0.1915, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.726039025933686e-05, |
|
"loss": 0.1992, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.7256531654068323e-05, |
|
"loss": 0.1924, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.7252673048799782e-05, |
|
"loss": 0.1934, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.7248814443531244e-05, |
|
"loss": 0.2034, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.7244955838262702e-05, |
|
"loss": 0.1922, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.7241097232994164e-05, |
|
"loss": 0.191, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.7237238627725623e-05, |
|
"loss": 0.2009, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.7233380022457085e-05, |
|
"loss": 0.1917, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.7229521417188544e-05, |
|
"loss": 0.1917, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.7225662811920006e-05, |
|
"loss": 0.1958, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.7221804206651464e-05, |
|
"loss": 0.2036, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.7217945601382926e-05, |
|
"loss": 0.2019, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.7214086996114385e-05, |
|
"loss": 0.2069, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.7210228390845847e-05, |
|
"loss": 0.2, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.7206369785577306e-05, |
|
"loss": 0.192, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.7202511180308768e-05, |
|
"loss": 0.2127, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.7198652575040226e-05, |
|
"loss": 0.2039, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.7194793969771688e-05, |
|
"loss": 0.2094, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.7190935364503147e-05, |
|
"loss": 0.2071, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.718707675923461e-05, |
|
"loss": 0.189, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.7183218153966068e-05, |
|
"loss": 0.2053, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.717935954869753e-05, |
|
"loss": 0.2061, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.7175500943428988e-05, |
|
"loss": 0.2076, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.717164233816045e-05, |
|
"loss": 0.2189, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.716778373289191e-05, |
|
"loss": 0.2027, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.716392512762337e-05, |
|
"loss": 0.2017, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.716006652235483e-05, |
|
"loss": 0.1943, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.715620791708629e-05, |
|
"loss": 0.1958, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.715234931181775e-05, |
|
"loss": 0.2038, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.7148490706549212e-05, |
|
"loss": 0.2037, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.714463210128067e-05, |
|
"loss": 0.1953, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.7140773496012133e-05, |
|
"loss": 0.1976, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.713691489074359e-05, |
|
"loss": 0.1929, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.7133056285475053e-05, |
|
"loss": 0.205, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.7129197680206512e-05, |
|
"loss": 0.2011, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.7125339074937974e-05, |
|
"loss": 0.2038, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.7121480469669433e-05, |
|
"loss": 0.2029, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.7117621864400895e-05, |
|
"loss": 0.2002, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.7113763259132353e-05, |
|
"loss": 0.1967, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.7109904653863815e-05, |
|
"loss": 0.2038, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.7106046048595274e-05, |
|
"loss": 0.206, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.7102187443326736e-05, |
|
"loss": 0.207, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.7098328838058198e-05, |
|
"loss": 0.2014, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.7094470232789657e-05, |
|
"loss": 0.1977, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.709061162752112e-05, |
|
"loss": 0.1958, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.7086753022252577e-05, |
|
"loss": 0.1975, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.708289441698404e-05, |
|
"loss": 0.2001, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.7079035811715498e-05, |
|
"loss": 0.201, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.707517720644696e-05, |
|
"loss": 0.2023, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.707131860117842e-05, |
|
"loss": 0.2018, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.706745999590988e-05, |
|
"loss": 0.2061, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.706360139064134e-05, |
|
"loss": 0.2104, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.70597427853728e-05, |
|
"loss": 0.2083, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.705588418010426e-05, |
|
"loss": 0.1992, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.7052025574835722e-05, |
|
"loss": 0.204, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.704816696956718e-05, |
|
"loss": 0.2064, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.7044308364298643e-05, |
|
"loss": 0.1923, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 1.70404497590301e-05, |
|
"loss": 0.1915, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.7036591153761563e-05, |
|
"loss": 0.2041, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.7032732548493022e-05, |
|
"loss": 0.2057, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.7028873943224484e-05, |
|
"loss": 0.1899, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.7025015337955942e-05, |
|
"loss": 0.1987, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.7021156732687404e-05, |
|
"loss": 0.211, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.7017298127418863e-05, |
|
"loss": 0.2023, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.7013439522150325e-05, |
|
"loss": 0.2048, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.7009580916881784e-05, |
|
"loss": 0.2035, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.7005722311613246e-05, |
|
"loss": 0.2011, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.7001863706344704e-05, |
|
"loss": 0.207, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.6998005101076166e-05, |
|
"loss": 0.2022, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.6994146495807625e-05, |
|
"loss": 0.1954, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.6990287890539087e-05, |
|
"loss": 0.2071, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.6986429285270546e-05, |
|
"loss": 0.1948, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.6982570680002008e-05, |
|
"loss": 0.197, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.6978712074733466e-05, |
|
"loss": 0.1984, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.697485346946493e-05, |
|
"loss": 0.2043, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.6970994864196387e-05, |
|
"loss": 0.1983, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.696713625892785e-05, |
|
"loss": 0.1983, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.6963277653659308e-05, |
|
"loss": 0.2012, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.695941904839077e-05, |
|
"loss": 0.2066, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.6955560443122228e-05, |
|
"loss": 0.2046, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.695170183785369e-05, |
|
"loss": 0.1981, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.694784323258515e-05, |
|
"loss": 0.2034, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.694398462731661e-05, |
|
"loss": 0.2048, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.6940126022048073e-05, |
|
"loss": 0.2106, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.693626741677953e-05, |
|
"loss": 0.1976, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.6932408811510994e-05, |
|
"loss": 0.2032, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.6928550206242452e-05, |
|
"loss": 0.2059, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.6924691600973914e-05, |
|
"loss": 0.2039, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.6920832995705373e-05, |
|
"loss": 0.1946, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.6916974390436835e-05, |
|
"loss": 0.2094, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.6913115785168293e-05, |
|
"loss": 0.2111, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.6909257179899755e-05, |
|
"loss": 0.2026, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.6905398574631214e-05, |
|
"loss": 0.2056, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.6901539969362676e-05, |
|
"loss": 0.2042, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.6897681364094135e-05, |
|
"loss": 0.1958, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.6893822758825597e-05, |
|
"loss": 0.1907, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.6889964153557055e-05, |
|
"loss": 0.2064, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.6886105548288517e-05, |
|
"loss": 0.2025, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.6882246943019976e-05, |
|
"loss": 0.2003, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.6878388337751438e-05, |
|
"loss": 0.1886, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.6874529732482897e-05, |
|
"loss": 0.1954, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.687067112721436e-05, |
|
"loss": 0.2081, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.6866812521945817e-05, |
|
"loss": 0.2059, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.686295391667728e-05, |
|
"loss": 0.1964, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.6859095311408738e-05, |
|
"loss": 0.2028, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.68552367061402e-05, |
|
"loss": 0.2032, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.6851378100871662e-05, |
|
"loss": 0.2033, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.684751949560312e-05, |
|
"loss": 0.1917, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.6843660890334583e-05, |
|
"loss": 0.2036, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.683980228506604e-05, |
|
"loss": 0.2028, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.6835943679797503e-05, |
|
"loss": 0.2007, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.6832085074528962e-05, |
|
"loss": 0.2072, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.6828226469260424e-05, |
|
"loss": 0.2059, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.6824367863991883e-05, |
|
"loss": 0.1921, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.6820509258723345e-05, |
|
"loss": 0.2001, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.6816650653454803e-05, |
|
"loss": 0.2051, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.6812792048186265e-05, |
|
"loss": 0.2081, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.6808933442917724e-05, |
|
"loss": 0.2087, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.6805074837649186e-05, |
|
"loss": 0.2157, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.6801216232380645e-05, |
|
"loss": 0.2083, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.6797357627112107e-05, |
|
"loss": 0.2008, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.6793499021843565e-05, |
|
"loss": 0.1974, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.6789640416575027e-05, |
|
"loss": 0.2012, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.6785781811306486e-05, |
|
"loss": 0.1853, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.6781923206037948e-05, |
|
"loss": 0.2057, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.6778064600769406e-05, |
|
"loss": 0.2052, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.677420599550087e-05, |
|
"loss": 0.2027, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.677034739023233e-05, |
|
"loss": 0.2042, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.676648878496379e-05, |
|
"loss": 0.1939, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.676263017969525e-05, |
|
"loss": 0.2066, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.675877157442671e-05, |
|
"loss": 0.2091, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.6754912969158172e-05, |
|
"loss": 0.2111, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.675105436388963e-05, |
|
"loss": 0.2057, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.6747195758621092e-05, |
|
"loss": 0.1918, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.674333715335255e-05, |
|
"loss": 0.2003, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.6739478548084013e-05, |
|
"loss": 0.2063, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.673561994281547e-05, |
|
"loss": 0.2117, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.6731761337546934e-05, |
|
"loss": 0.193, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.6727902732278392e-05, |
|
"loss": 0.2022, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.6724044127009854e-05, |
|
"loss": 0.213, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.6720185521741313e-05, |
|
"loss": 0.2084, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6716326916472775e-05, |
|
"loss": 0.2009, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.6712468311204234e-05, |
|
"loss": 0.198, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.6708609705935696e-05, |
|
"loss": 0.2142, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.6704751100667154e-05, |
|
"loss": 0.1949, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.6700892495398616e-05, |
|
"loss": 0.2135, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.6697033890130075e-05, |
|
"loss": 0.2038, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.6693175284861537e-05, |
|
"loss": 0.1992, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.6689316679592996e-05, |
|
"loss": 0.2016, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.6685458074324458e-05, |
|
"loss": 0.2019, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.6681599469055916e-05, |
|
"loss": 0.1995, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.6677740863787378e-05, |
|
"loss": 0.1968, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.6673882258518837e-05, |
|
"loss": 0.1999, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.66700236532503e-05, |
|
"loss": 0.2088, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_bart_score": -6.65, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21080316603183746, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.16481557726494733, |
|
"rouge2": 0.02718331876189925, |
|
"rougeL": 0.13357267291161234, |
|
"rougeLsum": 0.1335407688877468 |
|
}, |
|
"eval_runtime": 4313.7823, |
|
"eval_samples_per_second": 5.007, |
|
"eval_simple_accuracy": 0.81, |
|
"eval_steps_per_second": 2.503, |
|
"step": 431935 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.6666165047981757e-05, |
|
"loss": 0.2031, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 1.666230644271322e-05, |
|
"loss": 0.1897, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 1.6658447837444678e-05, |
|
"loss": 0.1874, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 1.665458923217614e-05, |
|
"loss": 0.188, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 1.66507306269076e-05, |
|
"loss": 0.2001, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.664687202163906e-05, |
|
"loss": 0.1905, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 1.664301341637052e-05, |
|
"loss": 0.1895, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 1.663915481110198e-05, |
|
"loss": 0.1921, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 1.663529620583344e-05, |
|
"loss": 0.1893, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 1.6631437600564902e-05, |
|
"loss": 0.1987, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.662757899529636e-05, |
|
"loss": 0.1961, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.6623720390027823e-05, |
|
"loss": 0.2017, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 1.661986178475928e-05, |
|
"loss": 0.1942, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 1.6616003179490743e-05, |
|
"loss": 0.1938, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 1.6612144574222202e-05, |
|
"loss": 0.1873, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.6608285968953664e-05, |
|
"loss": 0.1924, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.6604427363685123e-05, |
|
"loss": 0.1852, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 1.6600568758416585e-05, |
|
"loss": 0.2064, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 1.6596710153148043e-05, |
|
"loss": 0.1928, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 1.6592851547879505e-05, |
|
"loss": 0.2012, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.6588992942610964e-05, |
|
"loss": 0.2067, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.6585134337342426e-05, |
|
"loss": 0.1902, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 1.6581275732073885e-05, |
|
"loss": 0.1967, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 1.6577417126805347e-05, |
|
"loss": 0.1986, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 1.6573558521536805e-05, |
|
"loss": 0.2, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 1.6569699916268267e-05, |
|
"loss": 0.1906, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 1.656584131099973e-05, |
|
"loss": 0.198, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 1.6561982705731188e-05, |
|
"loss": 0.1839, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 1.655812410046265e-05, |
|
"loss": 0.2, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 1.655426549519411e-05, |
|
"loss": 0.1782, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 1.655040688992557e-05, |
|
"loss": 0.1983, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 1.654654828465703e-05, |
|
"loss": 0.1982, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.654268967938849e-05, |
|
"loss": 0.2065, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.653883107411995e-05, |
|
"loss": 0.1982, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.6534972468851412e-05, |
|
"loss": 0.1976, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.653111386358287e-05, |
|
"loss": 0.1969, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.6527255258314332e-05, |
|
"loss": 0.2034, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.652339665304579e-05, |
|
"loss": 0.1919, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 1.6519538047777253e-05, |
|
"loss": 0.1905, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.6515679442508712e-05, |
|
"loss": 0.2121, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.6511820837240174e-05, |
|
"loss": 0.1934, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.6507962231971632e-05, |
|
"loss": 0.2029, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.6504103626703094e-05, |
|
"loss": 0.1968, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 1.6500245021434553e-05, |
|
"loss": 0.206, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 1.6496386416166015e-05, |
|
"loss": 0.2029, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 1.6492527810897474e-05, |
|
"loss": 0.1842, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.6488669205628936e-05, |
|
"loss": 0.1959, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.6484810600360394e-05, |
|
"loss": 0.2018, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 1.6480951995091856e-05, |
|
"loss": 0.1991, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 1.6477093389823315e-05, |
|
"loss": 0.1953, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.6473234784554777e-05, |
|
"loss": 0.1936, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 1.6469376179286236e-05, |
|
"loss": 0.1967, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 1.6465517574017698e-05, |
|
"loss": 0.1948, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.6461658968749156e-05, |
|
"loss": 0.196, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.6457800363480618e-05, |
|
"loss": 0.198, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.6453941758212077e-05, |
|
"loss": 0.1997, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.645008315294354e-05, |
|
"loss": 0.1888, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 1.6446224547674998e-05, |
|
"loss": 0.2048, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.644236594240646e-05, |
|
"loss": 0.2001, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.6438507337137918e-05, |
|
"loss": 0.1925, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.643464873186938e-05, |
|
"loss": 0.1994, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.643079012660084e-05, |
|
"loss": 0.2008, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 1.64269315213323e-05, |
|
"loss": 0.2007, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.642307291606376e-05, |
|
"loss": 0.197, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.641921431079522e-05, |
|
"loss": 0.1973, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 1.641535570552668e-05, |
|
"loss": 0.2053, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 1.6411497100258142e-05, |
|
"loss": 0.1982, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 1.64076384949896e-05, |
|
"loss": 0.2059, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 1.6403779889721063e-05, |
|
"loss": 0.2058, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 1.639992128445252e-05, |
|
"loss": 0.1989, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.6396062679183983e-05, |
|
"loss": 0.1961, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.6392204073915442e-05, |
|
"loss": 0.1944, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 1.6388345468646904e-05, |
|
"loss": 0.1931, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 1.6384486863378363e-05, |
|
"loss": 0.2113, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 1.6380628258109825e-05, |
|
"loss": 0.2157, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 1.6376769652841283e-05, |
|
"loss": 0.2061, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 1.6372911047572745e-05, |
|
"loss": 0.1944, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.6369052442304204e-05, |
|
"loss": 0.1991, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.6365193837035666e-05, |
|
"loss": 0.1957, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 1.6361335231767125e-05, |
|
"loss": 0.1844, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 1.6357476626498587e-05, |
|
"loss": 0.1902, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 1.6353618021230045e-05, |
|
"loss": 0.1932, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.6349759415961507e-05, |
|
"loss": 0.2082, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.6345900810692966e-05, |
|
"loss": 0.2003, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.6342042205424428e-05, |
|
"loss": 0.1982, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.6338183600155887e-05, |
|
"loss": 0.2008, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.633432499488735e-05, |
|
"loss": 0.1914, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.6330466389618807e-05, |
|
"loss": 0.1937, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 1.632660778435027e-05, |
|
"loss": 0.2024, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.6322749179081728e-05, |
|
"loss": 0.1999, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.631889057381319e-05, |
|
"loss": 0.1917, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.631503196854465e-05, |
|
"loss": 0.1937, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.631117336327611e-05, |
|
"loss": 0.1951, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 1.6307314758007573e-05, |
|
"loss": 0.2056, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 1.630345615273903e-05, |
|
"loss": 0.1886, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 1.6299597547470493e-05, |
|
"loss": 0.195, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.6295738942201952e-05, |
|
"loss": 0.202, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.6291880336933414e-05, |
|
"loss": 0.2004, |
|
"step": 480500 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.6288021731664872e-05, |
|
"loss": 0.1975, |
|
"step": 481000 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.6284163126396334e-05, |
|
"loss": 0.2031, |
|
"step": 481500 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 1.6280304521127793e-05, |
|
"loss": 0.1979, |
|
"step": 482000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.6276445915859255e-05, |
|
"loss": 0.1876, |
|
"step": 482500 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.6272587310590714e-05, |
|
"loss": 0.1967, |
|
"step": 483000 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 1.6268728705322176e-05, |
|
"loss": 0.1974, |
|
"step": 483500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 1.6264870100053634e-05, |
|
"loss": 0.2051, |
|
"step": 484000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.6261011494785096e-05, |
|
"loss": 0.2047, |
|
"step": 484500 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.6257152889516555e-05, |
|
"loss": 0.2041, |
|
"step": 485000 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 1.6253294284248017e-05, |
|
"loss": 0.2, |
|
"step": 485500 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 1.6249435678979476e-05, |
|
"loss": 0.2009, |
|
"step": 486000 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 1.6245577073710938e-05, |
|
"loss": 0.2033, |
|
"step": 486500 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 1.6241718468442396e-05, |
|
"loss": 0.1913, |
|
"step": 487000 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 1.623785986317386e-05, |
|
"loss": 0.2027, |
|
"step": 487500 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 1.6234001257905317e-05, |
|
"loss": 0.1927, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 1.623014265263678e-05, |
|
"loss": 0.2034, |
|
"step": 488500 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 1.6226284047368238e-05, |
|
"loss": 0.2053, |
|
"step": 489000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 1.62224254420997e-05, |
|
"loss": 0.2035, |
|
"step": 489500 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 1.6218566836831158e-05, |
|
"loss": 0.1951, |
|
"step": 490000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 1.621470823156262e-05, |
|
"loss": 0.1912, |
|
"step": 490500 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 1.6210849626294082e-05, |
|
"loss": 0.1976, |
|
"step": 491000 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 1.620699102102554e-05, |
|
"loss": 0.1963, |
|
"step": 491500 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 1.6203132415757003e-05, |
|
"loss": 0.2046, |
|
"step": 492000 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 1.619927381048846e-05, |
|
"loss": 0.2058, |
|
"step": 492500 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 1.6195415205219924e-05, |
|
"loss": 0.2012, |
|
"step": 493000 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 1.6191556599951382e-05, |
|
"loss": 0.2031, |
|
"step": 493500 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 1.6187697994682844e-05, |
|
"loss": 0.1956, |
|
"step": 494000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 1.6183839389414303e-05, |
|
"loss": 0.187, |
|
"step": 494500 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 1.6179980784145765e-05, |
|
"loss": 0.1974, |
|
"step": 495000 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 1.6176122178877224e-05, |
|
"loss": 0.1987, |
|
"step": 495500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 1.6172263573608686e-05, |
|
"loss": 0.2047, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 1.6168404968340144e-05, |
|
"loss": 0.2042, |
|
"step": 496500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 1.6164546363071606e-05, |
|
"loss": 0.198, |
|
"step": 497000 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 1.6160687757803065e-05, |
|
"loss": 0.1991, |
|
"step": 497500 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 1.6156829152534527e-05, |
|
"loss": 0.197, |
|
"step": 498000 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 1.6152970547265985e-05, |
|
"loss": 0.1938, |
|
"step": 498500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 1.6149111941997447e-05, |
|
"loss": 0.2114, |
|
"step": 499000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 1.6145253336728906e-05, |
|
"loss": 0.1997, |
|
"step": 499500 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 1.6141394731460368e-05, |
|
"loss": 0.1989, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 1.6137536126191827e-05, |
|
"loss": 0.2003, |
|
"step": 500500 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 1.613367752092329e-05, |
|
"loss": 0.2036, |
|
"step": 501000 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.612981891565475e-05, |
|
"loss": 0.2041, |
|
"step": 501500 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.612596031038621e-05, |
|
"loss": 0.2042, |
|
"step": 502000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 1.612210170511767e-05, |
|
"loss": 0.1908, |
|
"step": 502500 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 1.611824309984913e-05, |
|
"loss": 0.1942, |
|
"step": 503000 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 1.6114384494580592e-05, |
|
"loss": 0.2025, |
|
"step": 503500 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 1.611052588931205e-05, |
|
"loss": 0.1953, |
|
"step": 504000 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 1.6106667284043513e-05, |
|
"loss": 0.2011, |
|
"step": 504500 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.610280867877497e-05, |
|
"loss": 0.2009, |
|
"step": 505000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.6098950073506433e-05, |
|
"loss": 0.1947, |
|
"step": 505500 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 1.6095091468237892e-05, |
|
"loss": 0.2035, |
|
"step": 506000 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 1.6091232862969354e-05, |
|
"loss": 0.2055, |
|
"step": 506500 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 1.6087374257700813e-05, |
|
"loss": 0.2043, |
|
"step": 507000 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 1.6083515652432275e-05, |
|
"loss": 0.1873, |
|
"step": 507500 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 1.6079657047163733e-05, |
|
"loss": 0.1955, |
|
"step": 508000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 1.6075798441895195e-05, |
|
"loss": 0.1949, |
|
"step": 508500 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 1.6071939836626654e-05, |
|
"loss": 0.2008, |
|
"step": 509000 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 1.6068081231358116e-05, |
|
"loss": 0.2165, |
|
"step": 509500 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 1.6064222626089575e-05, |
|
"loss": 0.1984, |
|
"step": 510000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 1.6060364020821037e-05, |
|
"loss": 0.196, |
|
"step": 510500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 1.6056505415552495e-05, |
|
"loss": 0.2007, |
|
"step": 511000 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 1.6052646810283957e-05, |
|
"loss": 0.1921, |
|
"step": 511500 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 1.6048788205015416e-05, |
|
"loss": 0.1984, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 1.6044929599746878e-05, |
|
"loss": 0.1961, |
|
"step": 512500 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 1.604107099447834e-05, |
|
"loss": 0.1948, |
|
"step": 513000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 1.60372123892098e-05, |
|
"loss": 0.2038, |
|
"step": 513500 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 1.603335378394126e-05, |
|
"loss": 0.204, |
|
"step": 514000 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.602949517867272e-05, |
|
"loss": 0.2067, |
|
"step": 514500 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.602563657340418e-05, |
|
"loss": 0.1998, |
|
"step": 515000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 1.602177796813564e-05, |
|
"loss": 0.1921, |
|
"step": 515500 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 1.6017919362867102e-05, |
|
"loss": 0.2024, |
|
"step": 516000 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 1.601406075759856e-05, |
|
"loss": 0.1984, |
|
"step": 516500 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 1.6010202152330022e-05, |
|
"loss": 0.1948, |
|
"step": 517000 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 1.600634354706148e-05, |
|
"loss": 0.1941, |
|
"step": 517500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 1.6002484941792943e-05, |
|
"loss": 0.2016, |
|
"step": 518000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_bart_score": -6.63, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21149182319641113, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.1678843969321887, |
|
"rouge2": 0.028304411045424825, |
|
"rougeL": 0.1355780441996962, |
|
"rougeLsum": 0.13555466262481553 |
|
}, |
|
"eval_runtime": 4217.6861, |
|
"eval_samples_per_second": 5.121, |
|
"eval_simple_accuracy": 0.81, |
|
"eval_steps_per_second": 2.56, |
|
"step": 518322 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 1.5998626336524402e-05, |
|
"loss": 0.1866, |
|
"step": 518500 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.5994767731255864e-05, |
|
"loss": 0.193, |
|
"step": 519000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.5990909125987322e-05, |
|
"loss": 0.1983, |
|
"step": 519500 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.5987050520718784e-05, |
|
"loss": 0.1958, |
|
"step": 520000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.5983191915450243e-05, |
|
"loss": 0.1906, |
|
"step": 520500 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.5979333310181705e-05, |
|
"loss": 0.1967, |
|
"step": 521000 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.5975474704913164e-05, |
|
"loss": 0.1962, |
|
"step": 521500 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.5971616099644626e-05, |
|
"loss": 0.1824, |
|
"step": 522000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.5967757494376084e-05, |
|
"loss": 0.1952, |
|
"step": 522500 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.5963898889107546e-05, |
|
"loss": 0.188, |
|
"step": 523000 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.5960040283839005e-05, |
|
"loss": 0.1853, |
|
"step": 523500 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.5956181678570467e-05, |
|
"loss": 0.187, |
|
"step": 524000 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.5952323073301926e-05, |
|
"loss": 0.1902, |
|
"step": 524500 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.5948464468033388e-05, |
|
"loss": 0.183, |
|
"step": 525000 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.5944605862764846e-05, |
|
"loss": 0.1902, |
|
"step": 525500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.5940747257496308e-05, |
|
"loss": 0.192, |
|
"step": 526000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.5936888652227767e-05, |
|
"loss": 0.191, |
|
"step": 526500 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.593303004695923e-05, |
|
"loss": 0.2009, |
|
"step": 527000 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.5929171441690688e-05, |
|
"loss": 0.1968, |
|
"step": 527500 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.592531283642215e-05, |
|
"loss": 0.1922, |
|
"step": 528000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.5921454231153608e-05, |
|
"loss": 0.1936, |
|
"step": 528500 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.591759562588507e-05, |
|
"loss": 0.1852, |
|
"step": 529000 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.591373702061653e-05, |
|
"loss": 0.1889, |
|
"step": 529500 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.590987841534799e-05, |
|
"loss": 0.1821, |
|
"step": 530000 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.590601981007945e-05, |
|
"loss": 0.1983, |
|
"step": 530500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.590216120481091e-05, |
|
"loss": 0.1925, |
|
"step": 531000 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.589830259954237e-05, |
|
"loss": 0.1836, |
|
"step": 531500 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.5894443994273832e-05, |
|
"loss": 0.1896, |
|
"step": 532000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.589058538900529e-05, |
|
"loss": 0.1905, |
|
"step": 532500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.5886726783736753e-05, |
|
"loss": 0.1969, |
|
"step": 533000 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.588286817846821e-05, |
|
"loss": 0.2071, |
|
"step": 533500 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.5879009573199673e-05, |
|
"loss": 0.1898, |
|
"step": 534000 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.5875150967931132e-05, |
|
"loss": 0.189, |
|
"step": 534500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.5871292362662594e-05, |
|
"loss": 0.1952, |
|
"step": 535000 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.5867433757394053e-05, |
|
"loss": 0.1873, |
|
"step": 535500 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.5863575152125515e-05, |
|
"loss": 0.1924, |
|
"step": 536000 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.5859716546856973e-05, |
|
"loss": 0.1789, |
|
"step": 536500 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.5855857941588435e-05, |
|
"loss": 0.1957, |
|
"step": 537000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.5851999336319894e-05, |
|
"loss": 0.1944, |
|
"step": 537500 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.5848140731051356e-05, |
|
"loss": 0.1958, |
|
"step": 538000 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.5844282125782815e-05, |
|
"loss": 0.1923, |
|
"step": 538500 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.5840423520514277e-05, |
|
"loss": 0.1884, |
|
"step": 539000 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.5836564915245735e-05, |
|
"loss": 0.1932, |
|
"step": 539500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.5832706309977197e-05, |
|
"loss": 0.1951, |
|
"step": 540000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.5828847704708656e-05, |
|
"loss": 0.1958, |
|
"step": 540500 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.5824989099440118e-05, |
|
"loss": 0.1959, |
|
"step": 541000 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.5821130494171577e-05, |
|
"loss": 0.2007, |
|
"step": 541500 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.581727188890304e-05, |
|
"loss": 0.1944, |
|
"step": 542000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.5813413283634497e-05, |
|
"loss": 0.2011, |
|
"step": 542500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.580955467836596e-05, |
|
"loss": 0.1946, |
|
"step": 543000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.5805696073097418e-05, |
|
"loss": 0.1938, |
|
"step": 543500 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.580183746782888e-05, |
|
"loss": 0.1892, |
|
"step": 544000 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.579797886256034e-05, |
|
"loss": 0.1926, |
|
"step": 544500 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.57941202572918e-05, |
|
"loss": 0.1951, |
|
"step": 545000 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.579026165202326e-05, |
|
"loss": 0.1846, |
|
"step": 545500 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.578640304675472e-05, |
|
"loss": 0.199, |
|
"step": 546000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.578254444148618e-05, |
|
"loss": 0.1988, |
|
"step": 546500 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.5778685836217642e-05, |
|
"loss": 0.202, |
|
"step": 547000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.5774827230949104e-05, |
|
"loss": 0.1919, |
|
"step": 547500 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.5770968625680562e-05, |
|
"loss": 0.1961, |
|
"step": 548000 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.5767110020412024e-05, |
|
"loss": 0.2074, |
|
"step": 548500 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.5763251415143483e-05, |
|
"loss": 0.1936, |
|
"step": 549000 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.5759392809874945e-05, |
|
"loss": 0.191, |
|
"step": 549500 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.5755534204606404e-05, |
|
"loss": 0.2015, |
|
"step": 550000 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.5751675599337866e-05, |
|
"loss": 0.1939, |
|
"step": 550500 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.5747816994069324e-05, |
|
"loss": 0.2, |
|
"step": 551000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.5743958388800786e-05, |
|
"loss": 0.1935, |
|
"step": 551500 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.5740099783532245e-05, |
|
"loss": 0.1864, |
|
"step": 552000 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.5736241178263707e-05, |
|
"loss": 0.1847, |
|
"step": 552500 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.5732382572995166e-05, |
|
"loss": 0.1929, |
|
"step": 553000 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.5728523967726628e-05, |
|
"loss": 0.1947, |
|
"step": 553500 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.5724665362458086e-05, |
|
"loss": 0.1943, |
|
"step": 554000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.572080675718955e-05, |
|
"loss": 0.1915, |
|
"step": 554500 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.5716948151921007e-05, |
|
"loss": 0.1857, |
|
"step": 555000 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.571308954665247e-05, |
|
"loss": 0.1947, |
|
"step": 555500 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.5709230941383928e-05, |
|
"loss": 0.1957, |
|
"step": 556000 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.570537233611539e-05, |
|
"loss": 0.1969, |
|
"step": 556500 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.5701513730846848e-05, |
|
"loss": 0.1914, |
|
"step": 557000 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.569765512557831e-05, |
|
"loss": 0.2006, |
|
"step": 557500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.569379652030977e-05, |
|
"loss": 0.1956, |
|
"step": 558000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.568993791504123e-05, |
|
"loss": 0.1939, |
|
"step": 558500 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.568607930977269e-05, |
|
"loss": 0.183, |
|
"step": 559000 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.568222070450415e-05, |
|
"loss": 0.1944, |
|
"step": 559500 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.567836209923561e-05, |
|
"loss": 0.1854, |
|
"step": 560000 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.5674503493967072e-05, |
|
"loss": 0.1961, |
|
"step": 560500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.567064488869853e-05, |
|
"loss": 0.192, |
|
"step": 561000 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.5666786283429993e-05, |
|
"loss": 0.1903, |
|
"step": 561500 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.566292767816145e-05, |
|
"loss": 0.1859, |
|
"step": 562000 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.5659069072892914e-05, |
|
"loss": 0.1966, |
|
"step": 562500 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.5655210467624372e-05, |
|
"loss": 0.1866, |
|
"step": 563000 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.5651351862355834e-05, |
|
"loss": 0.2001, |
|
"step": 563500 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.5647493257087293e-05, |
|
"loss": 0.1879, |
|
"step": 564000 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.5643634651818755e-05, |
|
"loss": 0.2075, |
|
"step": 564500 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.5639776046550213e-05, |
|
"loss": 0.207, |
|
"step": 565000 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.5635917441281675e-05, |
|
"loss": 0.208, |
|
"step": 565500 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.5632058836013134e-05, |
|
"loss": 0.2054, |
|
"step": 566000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.5628200230744596e-05, |
|
"loss": 0.2021, |
|
"step": 566500 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.5624341625476055e-05, |
|
"loss": 0.1884, |
|
"step": 567000 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.5620483020207517e-05, |
|
"loss": 0.1921, |
|
"step": 567500 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.5616624414938975e-05, |
|
"loss": 0.1989, |
|
"step": 568000 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.5612765809670437e-05, |
|
"loss": 0.1909, |
|
"step": 568500 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.5608907204401896e-05, |
|
"loss": 0.1976, |
|
"step": 569000 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.5605048599133358e-05, |
|
"loss": 0.1924, |
|
"step": 569500 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.5601189993864817e-05, |
|
"loss": 0.1982, |
|
"step": 570000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.559733138859628e-05, |
|
"loss": 0.1932, |
|
"step": 570500 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.5593472783327737e-05, |
|
"loss": 0.1959, |
|
"step": 571000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.55896141780592e-05, |
|
"loss": 0.1954, |
|
"step": 571500 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.5585755572790658e-05, |
|
"loss": 0.1992, |
|
"step": 572000 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.558189696752212e-05, |
|
"loss": 0.1941, |
|
"step": 572500 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.557803836225358e-05, |
|
"loss": 0.1951, |
|
"step": 573000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.557417975698504e-05, |
|
"loss": 0.1872, |
|
"step": 573500 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.5570321151716503e-05, |
|
"loss": 0.1997, |
|
"step": 574000 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.556646254644796e-05, |
|
"loss": 0.1957, |
|
"step": 574500 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.5562603941179423e-05, |
|
"loss": 0.204, |
|
"step": 575000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.5558745335910882e-05, |
|
"loss": 0.1953, |
|
"step": 575500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.5554886730642344e-05, |
|
"loss": 0.1974, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.5551028125373803e-05, |
|
"loss": 0.1967, |
|
"step": 576500 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.5547169520105265e-05, |
|
"loss": 0.1971, |
|
"step": 577000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.5543310914836723e-05, |
|
"loss": 0.1962, |
|
"step": 577500 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.5539452309568185e-05, |
|
"loss": 0.1966, |
|
"step": 578000 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.5535593704299644e-05, |
|
"loss": 0.2043, |
|
"step": 578500 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.5531735099031106e-05, |
|
"loss": 0.2, |
|
"step": 579000 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.5527876493762564e-05, |
|
"loss": 0.1827, |
|
"step": 579500 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.5524017888494026e-05, |
|
"loss": 0.1981, |
|
"step": 580000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.5520159283225485e-05, |
|
"loss": 0.2016, |
|
"step": 580500 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.5516300677956947e-05, |
|
"loss": 0.194, |
|
"step": 581000 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.5512442072688406e-05, |
|
"loss": 0.1934, |
|
"step": 581500 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.5508583467419868e-05, |
|
"loss": 0.1902, |
|
"step": 582000 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.5504724862151326e-05, |
|
"loss": 0.1928, |
|
"step": 582500 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.550086625688279e-05, |
|
"loss": 0.2015, |
|
"step": 583000 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.5497007651614247e-05, |
|
"loss": 0.1965, |
|
"step": 583500 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.549314904634571e-05, |
|
"loss": 0.1938, |
|
"step": 584000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.548929044107717e-05, |
|
"loss": 0.1993, |
|
"step": 584500 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.548543183580863e-05, |
|
"loss": 0.1953, |
|
"step": 585000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.5481573230540092e-05, |
|
"loss": 0.1873, |
|
"step": 585500 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.547771462527155e-05, |
|
"loss": 0.1873, |
|
"step": 586000 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.5473856020003012e-05, |
|
"loss": 0.1941, |
|
"step": 586500 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.546999741473447e-05, |
|
"loss": 0.2033, |
|
"step": 587000 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.5466138809465933e-05, |
|
"loss": 0.1973, |
|
"step": 587500 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.546228020419739e-05, |
|
"loss": 0.1966, |
|
"step": 588000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.5458421598928854e-05, |
|
"loss": 0.1892, |
|
"step": 588500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5454562993660312e-05, |
|
"loss": 0.1972, |
|
"step": 589000 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5450704388391774e-05, |
|
"loss": 0.1984, |
|
"step": 589500 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.5446845783123233e-05, |
|
"loss": 0.1986, |
|
"step": 590000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.5442987177854695e-05, |
|
"loss": 0.198, |
|
"step": 590500 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.5439128572586154e-05, |
|
"loss": 0.1948, |
|
"step": 591000 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5435269967317616e-05, |
|
"loss": 0.1855, |
|
"step": 591500 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5431411362049074e-05, |
|
"loss": 0.1955, |
|
"step": 592000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5427552756780536e-05, |
|
"loss": 0.1974, |
|
"step": 592500 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5423694151511995e-05, |
|
"loss": 0.2002, |
|
"step": 593000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.5419835546243457e-05, |
|
"loss": 0.1919, |
|
"step": 593500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.5415976940974916e-05, |
|
"loss": 0.2035, |
|
"step": 594000 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.5412118335706378e-05, |
|
"loss": 0.1898, |
|
"step": 594500 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.5408259730437836e-05, |
|
"loss": 0.1971, |
|
"step": 595000 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.5404401125169298e-05, |
|
"loss": 0.1956, |
|
"step": 595500 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.540054251990076e-05, |
|
"loss": 0.2026, |
|
"step": 596000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.539668391463222e-05, |
|
"loss": 0.1979, |
|
"step": 596500 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.539282530936368e-05, |
|
"loss": 0.1952, |
|
"step": 597000 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.538896670409514e-05, |
|
"loss": 0.1899, |
|
"step": 597500 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.53851080988266e-05, |
|
"loss": 0.2, |
|
"step": 598000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.538124949355806e-05, |
|
"loss": 0.1983, |
|
"step": 598500 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.5377390888289522e-05, |
|
"loss": 0.1974, |
|
"step": 599000 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.537353228302098e-05, |
|
"loss": 0.204, |
|
"step": 599500 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.5369673677752443e-05, |
|
"loss": 0.1951, |
|
"step": 600000 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.53658150724839e-05, |
|
"loss": 0.1988, |
|
"step": 600500 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.5361956467215363e-05, |
|
"loss": 0.1985, |
|
"step": 601000 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.5358097861946822e-05, |
|
"loss": 0.2017, |
|
"step": 601500 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5354239256678284e-05, |
|
"loss": 0.1949, |
|
"step": 602000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5350380651409743e-05, |
|
"loss": 0.204, |
|
"step": 602500 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5346522046141205e-05, |
|
"loss": 0.1993, |
|
"step": 603000 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.5342663440872663e-05, |
|
"loss": 0.1917, |
|
"step": 603500 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.5338804835604125e-05, |
|
"loss": 0.1966, |
|
"step": 604000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.5334946230335584e-05, |
|
"loss": 0.197, |
|
"step": 604500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_bart_score": -6.65, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.2117389738559723, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.16639981419489805, |
|
"rouge2": 0.02707382439936413, |
|
"rougeL": 0.1347338562164133, |
|
"rougeLsum": 0.13475139274125486 |
|
}, |
|
"eval_runtime": 4160.5348, |
|
"eval_samples_per_second": 5.191, |
|
"eval_simple_accuracy": 0.83, |
|
"eval_steps_per_second": 2.596, |
|
"step": 604709 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.5331087625067046e-05, |
|
"loss": 0.18, |
|
"step": 605000 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.5327229019798505e-05, |
|
"loss": 0.1817, |
|
"step": 605500 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.5323370414529967e-05, |
|
"loss": 0.191, |
|
"step": 606000 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.5319511809261425e-05, |
|
"loss": 0.1893, |
|
"step": 606500 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.5315653203992887e-05, |
|
"loss": 0.1856, |
|
"step": 607000 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.5311794598724346e-05, |
|
"loss": 0.1857, |
|
"step": 607500 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.5307935993455808e-05, |
|
"loss": 0.1902, |
|
"step": 608000 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.5304077388187267e-05, |
|
"loss": 0.1861, |
|
"step": 608500 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.530021878291873e-05, |
|
"loss": 0.1843, |
|
"step": 609000 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.5296360177650187e-05, |
|
"loss": 0.1889, |
|
"step": 609500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.529250157238165e-05, |
|
"loss": 0.1835, |
|
"step": 610000 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.5288642967113108e-05, |
|
"loss": 0.1799, |
|
"step": 610500 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.528478436184457e-05, |
|
"loss": 0.1972, |
|
"step": 611000 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.528092575657603e-05, |
|
"loss": 0.1938, |
|
"step": 611500 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.527706715130749e-05, |
|
"loss": 0.1962, |
|
"step": 612000 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.527320854603895e-05, |
|
"loss": 0.1937, |
|
"step": 612500 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.526934994077041e-05, |
|
"loss": 0.1868, |
|
"step": 613000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.526549133550187e-05, |
|
"loss": 0.1917, |
|
"step": 613500 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.5261632730233332e-05, |
|
"loss": 0.1917, |
|
"step": 614000 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.525777412496479e-05, |
|
"loss": 0.1787, |
|
"step": 614500 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.5253915519696252e-05, |
|
"loss": 0.1876, |
|
"step": 615000 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.5250056914427713e-05, |
|
"loss": 0.1919, |
|
"step": 615500 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.5246198309159173e-05, |
|
"loss": 0.1845, |
|
"step": 616000 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.5242339703890633e-05, |
|
"loss": 0.1923, |
|
"step": 616500 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.5238481098622094e-05, |
|
"loss": 0.1841, |
|
"step": 617000 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.5234622493353554e-05, |
|
"loss": 0.1947, |
|
"step": 617500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.5230763888085014e-05, |
|
"loss": 0.1909, |
|
"step": 618000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.5226905282816475e-05, |
|
"loss": 0.1881, |
|
"step": 618500 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.5223046677547935e-05, |
|
"loss": 0.1928, |
|
"step": 619000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.5219188072279395e-05, |
|
"loss": 0.1846, |
|
"step": 619500 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.5215329467010856e-05, |
|
"loss": 0.1865, |
|
"step": 620000 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.5211470861742316e-05, |
|
"loss": 0.1957, |
|
"step": 620500 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.5207612256473776e-05, |
|
"loss": 0.1912, |
|
"step": 621000 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.5203753651205238e-05, |
|
"loss": 0.1893, |
|
"step": 621500 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.5199895045936697e-05, |
|
"loss": 0.1832, |
|
"step": 622000 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.5196036440668159e-05, |
|
"loss": 0.1887, |
|
"step": 622500 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.5192177835399618e-05, |
|
"loss": 0.2017, |
|
"step": 623000 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.518831923013108e-05, |
|
"loss": 0.1936, |
|
"step": 623500 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.5184460624862538e-05, |
|
"loss": 0.2018, |
|
"step": 624000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.5180602019594e-05, |
|
"loss": 0.1841, |
|
"step": 624500 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.5176743414325459e-05, |
|
"loss": 0.1771, |
|
"step": 625000 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.5172884809056921e-05, |
|
"loss": 0.1986, |
|
"step": 625500 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.516902620378838e-05, |
|
"loss": 0.2022, |
|
"step": 626000 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.5165167598519842e-05, |
|
"loss": 0.1874, |
|
"step": 626500 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.51613089932513e-05, |
|
"loss": 0.1973, |
|
"step": 627000 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.5157450387982762e-05, |
|
"loss": 0.1984, |
|
"step": 627500 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.515359178271422e-05, |
|
"loss": 0.1952, |
|
"step": 628000 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.5149733177445683e-05, |
|
"loss": 0.1832, |
|
"step": 628500 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.5145874572177141e-05, |
|
"loss": 0.1899, |
|
"step": 629000 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.5142015966908603e-05, |
|
"loss": 0.197, |
|
"step": 629500 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.5138157361640062e-05, |
|
"loss": 0.1844, |
|
"step": 630000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.5134298756371524e-05, |
|
"loss": 0.1769, |
|
"step": 630500 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.5130440151102983e-05, |
|
"loss": 0.1999, |
|
"step": 631000 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.5126581545834445e-05, |
|
"loss": 0.1908, |
|
"step": 631500 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.5122722940565903e-05, |
|
"loss": 0.1968, |
|
"step": 632000 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.5118864335297365e-05, |
|
"loss": 0.1966, |
|
"step": 632500 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.5115005730028824e-05, |
|
"loss": 0.1898, |
|
"step": 633000 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.5111147124760286e-05, |
|
"loss": 0.1874, |
|
"step": 633500 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.5107288519491745e-05, |
|
"loss": 0.2011, |
|
"step": 634000 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.5103429914223207e-05, |
|
"loss": 0.1953, |
|
"step": 634500 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.5099571308954665e-05, |
|
"loss": 0.2009, |
|
"step": 635000 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.5095712703686127e-05, |
|
"loss": 0.1912, |
|
"step": 635500 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.5091854098417586e-05, |
|
"loss": 0.1888, |
|
"step": 636000 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.5087995493149048e-05, |
|
"loss": 0.1861, |
|
"step": 636500 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.5084136887880507e-05, |
|
"loss": 0.184, |
|
"step": 637000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.5080278282611969e-05, |
|
"loss": 0.1973, |
|
"step": 637500 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.5076419677343427e-05, |
|
"loss": 0.1823, |
|
"step": 638000 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.507256107207489e-05, |
|
"loss": 0.1852, |
|
"step": 638500 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.5068702466806348e-05, |
|
"loss": 0.1921, |
|
"step": 639000 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.506484386153781e-05, |
|
"loss": 0.196, |
|
"step": 639500 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.5060985256269269e-05, |
|
"loss": 0.1894, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.505712665100073e-05, |
|
"loss": 0.1873, |
|
"step": 640500 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.505326804573219e-05, |
|
"loss": 0.1899, |
|
"step": 641000 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.5049409440463651e-05, |
|
"loss": 0.1878, |
|
"step": 641500 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.5045550835195112e-05, |
|
"loss": 0.1976, |
|
"step": 642000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.5041692229926572e-05, |
|
"loss": 0.1921, |
|
"step": 642500 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.5037833624658032e-05, |
|
"loss": 0.1863, |
|
"step": 643000 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.5033975019389493e-05, |
|
"loss": 0.2051, |
|
"step": 643500 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.5030116414120953e-05, |
|
"loss": 0.2046, |
|
"step": 644000 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.5026257808852413e-05, |
|
"loss": 0.1944, |
|
"step": 644500 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.5022399203583873e-05, |
|
"loss": 0.1844, |
|
"step": 645000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.5018540598315334e-05, |
|
"loss": 0.1966, |
|
"step": 645500 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.5014681993046794e-05, |
|
"loss": 0.1898, |
|
"step": 646000 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.5010823387778254e-05, |
|
"loss": 0.1873, |
|
"step": 646500 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.5006964782509715e-05, |
|
"loss": 0.1836, |
|
"step": 647000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.5003106177241175e-05, |
|
"loss": 0.193, |
|
"step": 647500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.4999247571972635e-05, |
|
"loss": 0.193, |
|
"step": 648000 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.4995388966704096e-05, |
|
"loss": 0.1854, |
|
"step": 648500 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.4991530361435556e-05, |
|
"loss": 0.188, |
|
"step": 649000 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.4987671756167016e-05, |
|
"loss": 0.1826, |
|
"step": 649500 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.4983813150898478e-05, |
|
"loss": 0.1913, |
|
"step": 650000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.4979954545629937e-05, |
|
"loss": 0.19, |
|
"step": 650500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.4976095940361399e-05, |
|
"loss": 0.1944, |
|
"step": 651000 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.4972237335092858e-05, |
|
"loss": 0.2022, |
|
"step": 651500 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.496837872982432e-05, |
|
"loss": 0.1879, |
|
"step": 652000 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.496452012455578e-05, |
|
"loss": 0.1919, |
|
"step": 652500 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.496066151928724e-05, |
|
"loss": 0.1813, |
|
"step": 653000 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.49568029140187e-05, |
|
"loss": 0.1867, |
|
"step": 653500 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.4952944308750161e-05, |
|
"loss": 0.1967, |
|
"step": 654000 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.4949085703481621e-05, |
|
"loss": 0.1859, |
|
"step": 654500 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.4945227098213082e-05, |
|
"loss": 0.1932, |
|
"step": 655000 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.4941368492944542e-05, |
|
"loss": 0.1917, |
|
"step": 655500 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.4937509887676002e-05, |
|
"loss": 0.1967, |
|
"step": 656000 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.4933651282407463e-05, |
|
"loss": 0.1926, |
|
"step": 656500 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.4929792677138923e-05, |
|
"loss": 0.1886, |
|
"step": 657000 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.4925934071870383e-05, |
|
"loss": 0.1816, |
|
"step": 657500 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.4922075466601844e-05, |
|
"loss": 0.197, |
|
"step": 658000 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.4918216861333304e-05, |
|
"loss": 0.182, |
|
"step": 658500 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.4914358256064764e-05, |
|
"loss": 0.1953, |
|
"step": 659000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.4910499650796225e-05, |
|
"loss": 0.1921, |
|
"step": 659500 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.4906641045527685e-05, |
|
"loss": 0.188, |
|
"step": 660000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.4902782440259145e-05, |
|
"loss": 0.1961, |
|
"step": 660500 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.4898923834990605e-05, |
|
"loss": 0.1937, |
|
"step": 661000 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.4895065229722066e-05, |
|
"loss": 0.1965, |
|
"step": 661500 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.4891206624453526e-05, |
|
"loss": 0.1877, |
|
"step": 662000 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.4887348019184986e-05, |
|
"loss": 0.1879, |
|
"step": 662500 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.4883489413916448e-05, |
|
"loss": 0.1936, |
|
"step": 663000 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.4879630808647907e-05, |
|
"loss": 0.1832, |
|
"step": 663500 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.4875772203379369e-05, |
|
"loss": 0.198, |
|
"step": 664000 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.4871913598110828e-05, |
|
"loss": 0.1928, |
|
"step": 664500 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.486805499284229e-05, |
|
"loss": 0.2082, |
|
"step": 665000 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.4864196387573748e-05, |
|
"loss": 0.1888, |
|
"step": 665500 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.486033778230521e-05, |
|
"loss": 0.1912, |
|
"step": 666000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.4856479177036669e-05, |
|
"loss": 0.187, |
|
"step": 666500 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.4852620571768131e-05, |
|
"loss": 0.1966, |
|
"step": 667000 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.484876196649959e-05, |
|
"loss": 0.1986, |
|
"step": 667500 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.4844903361231052e-05, |
|
"loss": 0.1952, |
|
"step": 668000 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.484104475596251e-05, |
|
"loss": 0.1888, |
|
"step": 668500 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.4837186150693972e-05, |
|
"loss": 0.2047, |
|
"step": 669000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.4833327545425431e-05, |
|
"loss": 0.1944, |
|
"step": 669500 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.4829468940156893e-05, |
|
"loss": 0.2042, |
|
"step": 670000 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.4825610334888352e-05, |
|
"loss": 0.1934, |
|
"step": 670500 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.4821751729619814e-05, |
|
"loss": 0.193, |
|
"step": 671000 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.4817893124351272e-05, |
|
"loss": 0.1934, |
|
"step": 671500 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.4814034519082734e-05, |
|
"loss": 0.1938, |
|
"step": 672000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.4810175913814193e-05, |
|
"loss": 0.1822, |
|
"step": 672500 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.4806317308545655e-05, |
|
"loss": 0.1889, |
|
"step": 673000 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.4802458703277114e-05, |
|
"loss": 0.2009, |
|
"step": 673500 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.4798600098008576e-05, |
|
"loss": 0.1936, |
|
"step": 674000 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.4794741492740034e-05, |
|
"loss": 0.1893, |
|
"step": 674500 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.4790882887471496e-05, |
|
"loss": 0.1868, |
|
"step": 675000 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.4787024282202955e-05, |
|
"loss": 0.2023, |
|
"step": 675500 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.4783165676934417e-05, |
|
"loss": 0.197, |
|
"step": 676000 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.4779307071665875e-05, |
|
"loss": 0.1967, |
|
"step": 676500 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.4775448466397338e-05, |
|
"loss": 0.1934, |
|
"step": 677000 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.4771589861128796e-05, |
|
"loss": 0.1917, |
|
"step": 677500 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.4767731255860258e-05, |
|
"loss": 0.1909, |
|
"step": 678000 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.4763872650591717e-05, |
|
"loss": 0.1939, |
|
"step": 678500 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.4760014045323179e-05, |
|
"loss": 0.1956, |
|
"step": 679000 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.4756155440054637e-05, |
|
"loss": 0.1954, |
|
"step": 679500 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.47522968347861e-05, |
|
"loss": 0.196, |
|
"step": 680000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.4748438229517558e-05, |
|
"loss": 0.1975, |
|
"step": 680500 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.474457962424902e-05, |
|
"loss": 0.1946, |
|
"step": 681000 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.4740721018980479e-05, |
|
"loss": 0.1906, |
|
"step": 681500 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.473686241371194e-05, |
|
"loss": 0.1843, |
|
"step": 682000 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.47330038084434e-05, |
|
"loss": 0.1926, |
|
"step": 682500 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.4729145203174861e-05, |
|
"loss": 0.1941, |
|
"step": 683000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.4725286597906323e-05, |
|
"loss": 0.1843, |
|
"step": 683500 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.4721427992637782e-05, |
|
"loss": 0.1926, |
|
"step": 684000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.4717569387369244e-05, |
|
"loss": 0.1938, |
|
"step": 684500 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.4713710782100703e-05, |
|
"loss": 0.194, |
|
"step": 685000 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.4709852176832165e-05, |
|
"loss": 0.198, |
|
"step": 685500 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.4705993571563623e-05, |
|
"loss": 0.1968, |
|
"step": 686000 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.4702134966295085e-05, |
|
"loss": 0.1886, |
|
"step": 686500 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.4698276361026544e-05, |
|
"loss": 0.1875, |
|
"step": 687000 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.4694417755758006e-05, |
|
"loss": 0.1858, |
|
"step": 687500 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.4690559150489465e-05, |
|
"loss": 0.199, |
|
"step": 688000 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.4686700545220927e-05, |
|
"loss": 0.1895, |
|
"step": 688500 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.4682841939952385e-05, |
|
"loss": 0.1871, |
|
"step": 689000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.4678983334683847e-05, |
|
"loss": 0.1983, |
|
"step": 689500 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.4675124729415306e-05, |
|
"loss": 0.1832, |
|
"step": 690000 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.4671266124146768e-05, |
|
"loss": 0.1932, |
|
"step": 690500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 1.4667407518878227e-05, |
|
"loss": 0.1967, |
|
"step": 691000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_bart_score": -6.63, |
|
"eval_bertscore_f1": 0.75, |
|
"eval_bertscore_precision": 0.79, |
|
"eval_bertscore_recall": 0.72, |
|
"eval_bleu": 0.0, |
|
"eval_loss": 0.21222023665905, |
|
"eval_meteor": 0.09, |
|
"eval_rogue_score": { |
|
"rouge1": 0.1645204206500075, |
|
"rouge2": 0.02700342773326716, |
|
"rougeL": 0.13397081371478534, |
|
"rougeLsum": 0.13395690421697704 |
|
}, |
|
"eval_runtime": 3876.5924, |
|
"eval_samples_per_second": 5.571, |
|
"eval_simple_accuracy": 0.85, |
|
"eval_steps_per_second": 2.786, |
|
"step": 691096 |
|
} |
|
], |
|
"max_steps": 2591610, |
|
"num_train_epochs": 30, |
|
"total_flos": 8.427728546876621e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|