|
{ |
|
"best_metric": 0.9642160052049447, |
|
"best_model_checkpoint": "videomae-base-finetuned-ElderReact-Fear/checkpoint-77", |
|
"epoch": 4.189473684210526, |
|
"eval_steps": 500, |
|
"global_step": 380, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.242678642272949, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 0.5133, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.866024971008301, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.4234, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.626444339752197, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 0.5058, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.6388182640075684, |
|
"learning_rate": 4.970760233918128e-05, |
|
"loss": 0.4791, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.3752734661102295, |
|
"learning_rate": 4.824561403508772e-05, |
|
"loss": 0.3841, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.3690688610076904, |
|
"learning_rate": 4.678362573099415e-05, |
|
"loss": 0.4903, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.5726335048675537, |
|
"learning_rate": 4.5321637426900585e-05, |
|
"loss": 0.2935, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_f1": 0.9642160052049447, |
|
"eval_loss": 0.25238940119743347, |
|
"eval_runtime": 1187.3545, |
|
"eval_samples_per_second": 0.67, |
|
"eval_steps_per_second": 0.084, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 3.456474542617798, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.4967, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.242908239364624, |
|
"learning_rate": 4.239766081871345e-05, |
|
"loss": 0.4073, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 3.6170222759246826, |
|
"learning_rate": 4.093567251461988e-05, |
|
"loss": 0.5547, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 2.8330729007720947, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 0.363, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 2.8961918354034424, |
|
"learning_rate": 3.8011695906432746e-05, |
|
"loss": 0.4056, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.695966720581055, |
|
"learning_rate": 3.654970760233918e-05, |
|
"loss": 0.3925, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 3.3888511657714844, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 0.4257, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 7.591073513031006, |
|
"learning_rate": 3.362573099415205e-05, |
|
"loss": 0.3966, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"eval_f1": 0.9642160052049447, |
|
"eval_loss": 0.25368985533714294, |
|
"eval_runtime": 1229.9142, |
|
"eval_samples_per_second": 0.647, |
|
"eval_steps_per_second": 0.081, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 3.995485305786133, |
|
"learning_rate": 3.216374269005848e-05, |
|
"loss": 0.5168, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 2.2910969257354736, |
|
"learning_rate": 3.0701754385964913e-05, |
|
"loss": 0.4298, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 5.8856964111328125, |
|
"learning_rate": 2.9239766081871346e-05, |
|
"loss": 0.4855, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.2029250860214233, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.2937, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 1.0417933464050293, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.2089, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.3655168116092682, |
|
"learning_rate": 2.485380116959064e-05, |
|
"loss": 0.5043, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 2.3576948642730713, |
|
"learning_rate": 2.3391812865497074e-05, |
|
"loss": 0.2995, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 2.619518280029297, |
|
"learning_rate": 2.1929824561403507e-05, |
|
"loss": 0.3792, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"eval_f1": 0.9642160052049447, |
|
"eval_loss": 0.2470169961452484, |
|
"eval_runtime": 1353.2793, |
|
"eval_samples_per_second": 0.588, |
|
"eval_steps_per_second": 0.074, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 3.8343122005462646, |
|
"learning_rate": 2.046783625730994e-05, |
|
"loss": 0.3818, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 3.7524924278259277, |
|
"learning_rate": 1.9005847953216373e-05, |
|
"loss": 0.3958, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 2.071563720703125, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 0.3903, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 2.1228742599487305, |
|
"learning_rate": 1.608187134502924e-05, |
|
"loss": 0.4532, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 2.22047758102417, |
|
"learning_rate": 1.4619883040935673e-05, |
|
"loss": 0.3909, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 1.4722857475280762, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 0.44, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 1.8657927513122559, |
|
"learning_rate": 1.1695906432748537e-05, |
|
"loss": 0.3666, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"eval_f1": 0.9642160052049447, |
|
"eval_loss": 0.2569561004638672, |
|
"eval_runtime": 952.0022, |
|
"eval_samples_per_second": 0.836, |
|
"eval_steps_per_second": 0.105, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 6.351089000701904, |
|
"learning_rate": 1.023391812865497e-05, |
|
"loss": 0.3108, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 2.317436933517456, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 0.2831, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 2.794163703918457, |
|
"learning_rate": 7.3099415204678366e-06, |
|
"loss": 0.2504, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.761640787124634, |
|
"learning_rate": 5.8479532163742686e-06, |
|
"loss": 0.4861, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 4.870150089263916, |
|
"learning_rate": 4.3859649122807014e-06, |
|
"loss": 0.4329, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 4.540055751800537, |
|
"learning_rate": 2.9239766081871343e-06, |
|
"loss": 0.3709, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 4.406496524810791, |
|
"learning_rate": 1.4619883040935671e-06, |
|
"loss": 0.2996, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 3.185987949371338, |
|
"learning_rate": 0.0, |
|
"loss": 0.2803, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"eval_f1": 0.9642160052049447, |
|
"eval_loss": 0.24840089678764343, |
|
"eval_runtime": 680.4356, |
|
"eval_samples_per_second": 1.17, |
|
"eval_steps_per_second": 0.147, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"step": 380, |
|
"total_flos": 3.783052093636215e+18, |
|
"train_loss": 0.3995331350125765, |
|
"train_runtime": 14411.9404, |
|
"train_samples_per_second": 0.211, |
|
"train_steps_per_second": 0.026 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"eval_f1": 0.9517819706498952, |
|
"eval_loss": 0.3077032268047333, |
|
"eval_runtime": 515.1776, |
|
"eval_samples_per_second": 1.456, |
|
"eval_steps_per_second": 0.182, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"eval_f1": 0.9517819706498952, |
|
"eval_loss": 0.3077031970024109, |
|
"eval_runtime": 529.5496, |
|
"eval_samples_per_second": 1.416, |
|
"eval_steps_per_second": 0.178, |
|
"step": 380 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 380, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 500, |
|
"total_flos": 3.783052093636215e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|