Training in progress, step 496, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 144805440
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9d7b34a76ff225d572dfd7adb7761c11795b6aab1d0504fe34294e28072da528
|
3 |
size 144805440
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 74292308
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0096c211308a0879d16d35e3c150f000bdd220a4f020b00d82b3a62c799ad2e1
|
3 |
size 74292308
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6949d78c281042fabaa5f047deeac3cfddd3e6c3adb8c7666bda971495ed41e6
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f7493324899312613d8955872fa8217730fa78c9c6543740278014efd292b323
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch":
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -2611,6 +2611,882 @@
|
|
2611 |
"learning_rate": 1.492855875400534e-05,
|
2612 |
"loss": 0.5699,
|
2613 |
"step": 372
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2614 |
}
|
2615 |
],
|
2616 |
"logging_steps": 1,
|
@@ -2625,12 +3501,12 @@
|
|
2625 |
"should_evaluate": false,
|
2626 |
"should_log": false,
|
2627 |
"should_save": true,
|
2628 |
-
"should_training_stop":
|
2629 |
},
|
2630 |
"attributes": {}
|
2631 |
}
|
2632 |
},
|
2633 |
-
"total_flos":
|
2634 |
"train_batch_size": 4,
|
2635 |
"trial_name": null,
|
2636 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0015128593040847,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 496,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
2611 |
"learning_rate": 1.492855875400534e-05,
|
2612 |
"loss": 0.5699,
|
2613 |
"step": 372
|
2614 |
+
},
|
2615 |
+
{
|
2616 |
+
"epoch": 0.7523953605648008,
|
2617 |
+
"grad_norm": 0.490895539522171,
|
2618 |
+
"learning_rate": 1.4701260157015822e-05,
|
2619 |
+
"loss": 0.5533,
|
2620 |
+
"step": 373
|
2621 |
+
},
|
2622 |
+
{
|
2623 |
+
"epoch": 0.7544125063035805,
|
2624 |
+
"grad_norm": 0.4228683412075043,
|
2625 |
+
"learning_rate": 1.4475406648646195e-05,
|
2626 |
+
"loss": 0.5938,
|
2627 |
+
"step": 374
|
2628 |
+
},
|
2629 |
+
{
|
2630 |
+
"epoch": 0.75642965204236,
|
2631 |
+
"grad_norm": 0.39790162444114685,
|
2632 |
+
"learning_rate": 1.4251007475071687e-05,
|
2633 |
+
"loss": 0.5319,
|
2634 |
+
"step": 375
|
2635 |
+
},
|
2636 |
+
{
|
2637 |
+
"epoch": 0.7584467977811397,
|
2638 |
+
"grad_norm": 0.4296700060367584,
|
2639 |
+
"learning_rate": 1.402807182292879e-05,
|
2640 |
+
"loss": 0.575,
|
2641 |
+
"step": 376
|
2642 |
+
},
|
2643 |
+
{
|
2644 |
+
"epoch": 0.7604639435199193,
|
2645 |
+
"grad_norm": 0.40378013253211975,
|
2646 |
+
"learning_rate": 1.3806608818939203e-05,
|
2647 |
+
"loss": 0.597,
|
2648 |
+
"step": 377
|
2649 |
+
},
|
2650 |
+
{
|
2651 |
+
"epoch": 0.762481089258699,
|
2652 |
+
"grad_norm": 0.46558690071105957,
|
2653 |
+
"learning_rate": 1.3586627529536117e-05,
|
2654 |
+
"loss": 0.5359,
|
2655 |
+
"step": 378
|
2656 |
+
},
|
2657 |
+
{
|
2658 |
+
"epoch": 0.7644982349974786,
|
2659 |
+
"grad_norm": 0.46858280897140503,
|
2660 |
+
"learning_rate": 1.3368136960493083e-05,
|
2661 |
+
"loss": 0.657,
|
2662 |
+
"step": 379
|
2663 |
+
},
|
2664 |
+
{
|
2665 |
+
"epoch": 0.7665153807362582,
|
2666 |
+
"grad_norm": 0.4781467318534851,
|
2667 |
+
"learning_rate": 1.3151146056555319e-05,
|
2668 |
+
"loss": 0.6095,
|
2669 |
+
"step": 380
|
2670 |
+
},
|
2671 |
+
{
|
2672 |
+
"epoch": 0.7685325264750378,
|
2673 |
+
"grad_norm": 0.4994807839393616,
|
2674 |
+
"learning_rate": 1.2935663701073586e-05,
|
2675 |
+
"loss": 0.5803,
|
2676 |
+
"step": 381
|
2677 |
+
},
|
2678 |
+
{
|
2679 |
+
"epoch": 0.7705496722138174,
|
2680 |
+
"grad_norm": 0.9686352014541626,
|
2681 |
+
"learning_rate": 1.2721698715640351e-05,
|
2682 |
+
"loss": 0.7633,
|
2683 |
+
"step": 382
|
2684 |
+
},
|
2685 |
+
{
|
2686 |
+
"epoch": 0.7725668179525971,
|
2687 |
+
"grad_norm": 0.44720757007598877,
|
2688 |
+
"learning_rate": 1.2509259859728862e-05,
|
2689 |
+
"loss": 0.5816,
|
2690 |
+
"step": 383
|
2691 |
+
},
|
2692 |
+
{
|
2693 |
+
"epoch": 0.7745839636913767,
|
2694 |
+
"grad_norm": 0.4461372196674347,
|
2695 |
+
"learning_rate": 1.2298355830334346e-05,
|
2696 |
+
"loss": 0.5365,
|
2697 |
+
"step": 384
|
2698 |
+
},
|
2699 |
+
{
|
2700 |
+
"epoch": 0.7766011094301564,
|
2701 |
+
"grad_norm": 0.4846460223197937,
|
2702 |
+
"learning_rate": 1.2088995261618108e-05,
|
2703 |
+
"loss": 0.6579,
|
2704 |
+
"step": 385
|
2705 |
+
},
|
2706 |
+
{
|
2707 |
+
"epoch": 0.7786182551689359,
|
2708 |
+
"grad_norm": 0.4917043447494507,
|
2709 |
+
"learning_rate": 1.1881186724553966e-05,
|
2710 |
+
"loss": 0.6061,
|
2711 |
+
"step": 386
|
2712 |
+
},
|
2713 |
+
{
|
2714 |
+
"epoch": 0.7806354009077155,
|
2715 |
+
"grad_norm": 0.5250806212425232,
|
2716 |
+
"learning_rate": 1.167493872657739e-05,
|
2717 |
+
"loss": 0.743,
|
2718 |
+
"step": 387
|
2719 |
+
},
|
2720 |
+
{
|
2721 |
+
"epoch": 0.7826525466464952,
|
2722 |
+
"grad_norm": 0.48723796010017395,
|
2723 |
+
"learning_rate": 1.1470259711237286e-05,
|
2724 |
+
"loss": 0.6,
|
2725 |
+
"step": 388
|
2726 |
+
},
|
2727 |
+
{
|
2728 |
+
"epoch": 0.7846696923852748,
|
2729 |
+
"grad_norm": 0.5268376469612122,
|
2730 |
+
"learning_rate": 1.1267158057850175e-05,
|
2731 |
+
"loss": 0.5594,
|
2732 |
+
"step": 389
|
2733 |
+
},
|
2734 |
+
{
|
2735 |
+
"epoch": 0.7866868381240545,
|
2736 |
+
"grad_norm": 0.53201824426651,
|
2737 |
+
"learning_rate": 1.106564208115734e-05,
|
2738 |
+
"loss": 0.6407,
|
2739 |
+
"step": 390
|
2740 |
+
},
|
2741 |
+
{
|
2742 |
+
"epoch": 0.7887039838628341,
|
2743 |
+
"grad_norm": 0.5068972706794739,
|
2744 |
+
"learning_rate": 1.0865720030984267e-05,
|
2745 |
+
"loss": 0.6234,
|
2746 |
+
"step": 391
|
2747 |
+
},
|
2748 |
+
{
|
2749 |
+
"epoch": 0.7907211296016137,
|
2750 |
+
"grad_norm": 0.5473886728286743,
|
2751 |
+
"learning_rate": 1.066740009190304e-05,
|
2752 |
+
"loss": 0.6321,
|
2753 |
+
"step": 392
|
2754 |
+
},
|
2755 |
+
{
|
2756 |
+
"epoch": 0.7927382753403933,
|
2757 |
+
"grad_norm": 0.5613067746162415,
|
2758 |
+
"learning_rate": 1.0470690382897163e-05,
|
2759 |
+
"loss": 0.6022,
|
2760 |
+
"step": 393
|
2761 |
+
},
|
2762 |
+
{
|
2763 |
+
"epoch": 0.794755421079173,
|
2764 |
+
"grad_norm": 0.5633993148803711,
|
2765 |
+
"learning_rate": 1.0275598957029232e-05,
|
2766 |
+
"loss": 0.7131,
|
2767 |
+
"step": 394
|
2768 |
+
},
|
2769 |
+
{
|
2770 |
+
"epoch": 0.7967725668179526,
|
2771 |
+
"grad_norm": 0.5788494348526001,
|
2772 |
+
"learning_rate": 1.0082133801111293e-05,
|
2773 |
+
"loss": 0.605,
|
2774 |
+
"step": 395
|
2775 |
+
},
|
2776 |
+
{
|
2777 |
+
"epoch": 0.7987897125567323,
|
2778 |
+
"grad_norm": 0.574659526348114,
|
2779 |
+
"learning_rate": 9.890302835377735e-06,
|
2780 |
+
"loss": 0.6206,
|
2781 |
+
"step": 396
|
2782 |
+
},
|
2783 |
+
{
|
2784 |
+
"epoch": 0.8008068582955119,
|
2785 |
+
"grad_norm": 0.6315601468086243,
|
2786 |
+
"learning_rate": 9.70011391316124e-06,
|
2787 |
+
"loss": 0.7475,
|
2788 |
+
"step": 397
|
2789 |
+
},
|
2790 |
+
{
|
2791 |
+
"epoch": 0.8028240040342914,
|
2792 |
+
"grad_norm": 0.5901455879211426,
|
2793 |
+
"learning_rate": 9.511574820571095e-06,
|
2794 |
+
"loss": 0.6464,
|
2795 |
+
"step": 398
|
2796 |
+
},
|
2797 |
+
{
|
2798 |
+
"epoch": 0.8048411497730711,
|
2799 |
+
"grad_norm": 0.7051928639411926,
|
2800 |
+
"learning_rate": 9.324693276174567e-06,
|
2801 |
+
"loss": 0.6834,
|
2802 |
+
"step": 399
|
2803 |
+
},
|
2804 |
+
{
|
2805 |
+
"epoch": 0.8068582955118507,
|
2806 |
+
"grad_norm": 0.676697313785553,
|
2807 |
+
"learning_rate": 9.13947693068083e-06,
|
2808 |
+
"loss": 0.6996,
|
2809 |
+
"step": 400
|
2810 |
+
},
|
2811 |
+
{
|
2812 |
+
"epoch": 0.8088754412506304,
|
2813 |
+
"grad_norm": 0.3838101625442505,
|
2814 |
+
"learning_rate": 8.955933366627778e-06,
|
2815 |
+
"loss": 0.5033,
|
2816 |
+
"step": 401
|
2817 |
+
},
|
2818 |
+
{
|
2819 |
+
"epoch": 0.81089258698941,
|
2820 |
+
"grad_norm": 0.4192909896373749,
|
2821 |
+
"learning_rate": 8.774070098071668e-06,
|
2822 |
+
"loss": 0.5151,
|
2823 |
+
"step": 402
|
2824 |
+
},
|
2825 |
+
{
|
2826 |
+
"epoch": 0.8129097327281896,
|
2827 |
+
"grad_norm": 0.4482872188091278,
|
2828 |
+
"learning_rate": 8.593894570279365e-06,
|
2829 |
+
"loss": 0.5454,
|
2830 |
+
"step": 403
|
2831 |
+
},
|
2832 |
+
{
|
2833 |
+
"epoch": 0.8149268784669692,
|
2834 |
+
"grad_norm": 0.4435177147388458,
|
2835 |
+
"learning_rate": 8.415414159423707e-06,
|
2836 |
+
"loss": 0.562,
|
2837 |
+
"step": 404
|
2838 |
+
},
|
2839 |
+
{
|
2840 |
+
"epoch": 0.8169440242057489,
|
2841 |
+
"grad_norm": 0.43251660466194153,
|
2842 |
+
"learning_rate": 8.238636172281395e-06,
|
2843 |
+
"loss": 0.526,
|
2844 |
+
"step": 405
|
2845 |
+
},
|
2846 |
+
{
|
2847 |
+
"epoch": 0.8189611699445285,
|
2848 |
+
"grad_norm": 0.425253689289093,
|
2849 |
+
"learning_rate": 8.063567845933973e-06,
|
2850 |
+
"loss": 0.4933,
|
2851 |
+
"step": 406
|
2852 |
+
},
|
2853 |
+
{
|
2854 |
+
"epoch": 0.8209783156833081,
|
2855 |
+
"grad_norm": 0.44801023602485657,
|
2856 |
+
"learning_rate": 7.890216347471407e-06,
|
2857 |
+
"loss": 0.5181,
|
2858 |
+
"step": 407
|
2859 |
+
},
|
2860 |
+
{
|
2861 |
+
"epoch": 0.8229954614220878,
|
2862 |
+
"grad_norm": 0.5983952879905701,
|
2863 |
+
"learning_rate": 7.71858877369887e-06,
|
2864 |
+
"loss": 0.5093,
|
2865 |
+
"step": 408
|
2866 |
+
},
|
2867 |
+
{
|
2868 |
+
"epoch": 0.8250126071608673,
|
2869 |
+
"grad_norm": 0.4884449541568756,
|
2870 |
+
"learning_rate": 7.548692150846021e-06,
|
2871 |
+
"loss": 0.5683,
|
2872 |
+
"step": 409
|
2873 |
+
},
|
2874 |
+
{
|
2875 |
+
"epoch": 0.827029752899647,
|
2876 |
+
"grad_norm": 0.4012419581413269,
|
2877 |
+
"learning_rate": 7.380533434279457e-06,
|
2878 |
+
"loss": 0.5273,
|
2879 |
+
"step": 410
|
2880 |
+
},
|
2881 |
+
{
|
2882 |
+
"epoch": 0.8290468986384266,
|
2883 |
+
"grad_norm": 0.4229069650173187,
|
2884 |
+
"learning_rate": 7.214119508217976e-06,
|
2885 |
+
"loss": 0.5103,
|
2886 |
+
"step": 411
|
2887 |
+
},
|
2888 |
+
{
|
2889 |
+
"epoch": 0.8310640443772063,
|
2890 |
+
"grad_norm": 0.4223465919494629,
|
2891 |
+
"learning_rate": 7.049457185450692e-06,
|
2892 |
+
"loss": 0.5044,
|
2893 |
+
"step": 412
|
2894 |
+
},
|
2895 |
+
{
|
2896 |
+
"epoch": 0.8330811901159859,
|
2897 |
+
"grad_norm": 0.4037540853023529,
|
2898 |
+
"learning_rate": 6.886553207058149e-06,
|
2899 |
+
"loss": 0.4987,
|
2900 |
+
"step": 413
|
2901 |
+
},
|
2902 |
+
{
|
2903 |
+
"epoch": 0.8350983358547656,
|
2904 |
+
"grad_norm": 0.4209170937538147,
|
2905 |
+
"learning_rate": 6.725414242136363e-06,
|
2906 |
+
"loss": 0.5184,
|
2907 |
+
"step": 414
|
2908 |
+
},
|
2909 |
+
{
|
2910 |
+
"epoch": 0.8371154815935451,
|
2911 |
+
"grad_norm": 0.43744194507598877,
|
2912 |
+
"learning_rate": 6.5660468875238076e-06,
|
2913 |
+
"loss": 0.5205,
|
2914 |
+
"step": 415
|
2915 |
+
},
|
2916 |
+
{
|
2917 |
+
"epoch": 0.8391326273323247,
|
2918 |
+
"grad_norm": 0.4216747283935547,
|
2919 |
+
"learning_rate": 6.408457667531298e-06,
|
2920 |
+
"loss": 0.5311,
|
2921 |
+
"step": 416
|
2922 |
+
},
|
2923 |
+
{
|
2924 |
+
"epoch": 0.8411497730711044,
|
2925 |
+
"grad_norm": 0.43447908759117126,
|
2926 |
+
"learning_rate": 6.2526530336749265e-06,
|
2927 |
+
"loss": 0.5296,
|
2928 |
+
"step": 417
|
2929 |
+
},
|
2930 |
+
{
|
2931 |
+
"epoch": 0.843166918809884,
|
2932 |
+
"grad_norm": 0.4566824436187744,
|
2933 |
+
"learning_rate": 6.0986393644119735e-06,
|
2934 |
+
"loss": 0.5821,
|
2935 |
+
"step": 418
|
2936 |
+
},
|
2937 |
+
{
|
2938 |
+
"epoch": 0.8451840645486637,
|
2939 |
+
"grad_norm": 0.4413144886493683,
|
2940 |
+
"learning_rate": 5.946422964879706e-06,
|
2941 |
+
"loss": 0.5257,
|
2942 |
+
"step": 419
|
2943 |
+
},
|
2944 |
+
{
|
2945 |
+
"epoch": 0.8472012102874432,
|
2946 |
+
"grad_norm": 0.40879538655281067,
|
2947 |
+
"learning_rate": 5.796010066637319e-06,
|
2948 |
+
"loss": 0.5379,
|
2949 |
+
"step": 420
|
2950 |
+
},
|
2951 |
+
{
|
2952 |
+
"epoch": 0.8492183560262229,
|
2953 |
+
"grad_norm": 0.4062432050704956,
|
2954 |
+
"learning_rate": 5.647406827410795e-06,
|
2955 |
+
"loss": 0.5029,
|
2956 |
+
"step": 421
|
2957 |
+
},
|
2958 |
+
{
|
2959 |
+
"epoch": 0.8512355017650025,
|
2960 |
+
"grad_norm": 0.41666179895401,
|
2961 |
+
"learning_rate": 5.500619330840828e-06,
|
2962 |
+
"loss": 0.5636,
|
2963 |
+
"step": 422
|
2964 |
+
},
|
2965 |
+
{
|
2966 |
+
"epoch": 0.8532526475037822,
|
2967 |
+
"grad_norm": 0.4222431182861328,
|
2968 |
+
"learning_rate": 5.35565358623375e-06,
|
2969 |
+
"loss": 0.5327,
|
2970 |
+
"step": 423
|
2971 |
+
},
|
2972 |
+
{
|
2973 |
+
"epoch": 0.8552697932425618,
|
2974 |
+
"grad_norm": 0.44431889057159424,
|
2975 |
+
"learning_rate": 5.212515528315504e-06,
|
2976 |
+
"loss": 0.5549,
|
2977 |
+
"step": 424
|
2978 |
+
},
|
2979 |
+
{
|
2980 |
+
"epoch": 0.8572869389813415,
|
2981 |
+
"grad_norm": 0.43978357315063477,
|
2982 |
+
"learning_rate": 5.0712110169887525e-06,
|
2983 |
+
"loss": 0.5428,
|
2984 |
+
"step": 425
|
2985 |
+
},
|
2986 |
+
{
|
2987 |
+
"epoch": 0.859304084720121,
|
2988 |
+
"grad_norm": 0.4200557768344879,
|
2989 |
+
"learning_rate": 4.931745837092888e-06,
|
2990 |
+
"loss": 0.5596,
|
2991 |
+
"step": 426
|
2992 |
+
},
|
2993 |
+
{
|
2994 |
+
"epoch": 0.8613212304589006,
|
2995 |
+
"grad_norm": 0.4626789689064026,
|
2996 |
+
"learning_rate": 4.794125698167262e-06,
|
2997 |
+
"loss": 0.5136,
|
2998 |
+
"step": 427
|
2999 |
+
},
|
3000 |
+
{
|
3001 |
+
"epoch": 0.8633383761976803,
|
3002 |
+
"grad_norm": 0.430312842130661,
|
3003 |
+
"learning_rate": 4.658356234217437e-06,
|
3004 |
+
"loss": 0.5331,
|
3005 |
+
"step": 428
|
3006 |
+
},
|
3007 |
+
{
|
3008 |
+
"epoch": 0.8653555219364599,
|
3009 |
+
"grad_norm": 0.4193800389766693,
|
3010 |
+
"learning_rate": 4.5244430034845465e-06,
|
3011 |
+
"loss": 0.5229,
|
3012 |
+
"step": 429
|
3013 |
+
},
|
3014 |
+
{
|
3015 |
+
"epoch": 0.8673726676752396,
|
3016 |
+
"grad_norm": 0.44044026732444763,
|
3017 |
+
"learning_rate": 4.392391488217712e-06,
|
3018 |
+
"loss": 0.555,
|
3019 |
+
"step": 430
|
3020 |
+
},
|
3021 |
+
{
|
3022 |
+
"epoch": 0.8693898134140191,
|
3023 |
+
"grad_norm": 0.4835900068283081,
|
3024 |
+
"learning_rate": 4.262207094449633e-06,
|
3025 |
+
"loss": 0.5772,
|
3026 |
+
"step": 431
|
3027 |
+
},
|
3028 |
+
{
|
3029 |
+
"epoch": 0.8714069591527988,
|
3030 |
+
"grad_norm": 0.4724687933921814,
|
3031 |
+
"learning_rate": 4.133895151775263e-06,
|
3032 |
+
"loss": 0.541,
|
3033 |
+
"step": 432
|
3034 |
+
},
|
3035 |
+
{
|
3036 |
+
"epoch": 0.8734241048915784,
|
3037 |
+
"grad_norm": 0.4812853932380676,
|
3038 |
+
"learning_rate": 4.0074609131336425e-06,
|
3039 |
+
"loss": 0.5766,
|
3040 |
+
"step": 433
|
3041 |
+
},
|
3042 |
+
{
|
3043 |
+
"epoch": 0.875441250630358,
|
3044 |
+
"grad_norm": 0.479755163192749,
|
3045 |
+
"learning_rate": 3.8829095545928175e-06,
|
3046 |
+
"loss": 0.5014,
|
3047 |
+
"step": 434
|
3048 |
+
},
|
3049 |
+
{
|
3050 |
+
"epoch": 0.8774583963691377,
|
3051 |
+
"grad_norm": 0.465280145406723,
|
3052 |
+
"learning_rate": 3.760246175137938e-06,
|
3053 |
+
"loss": 0.667,
|
3054 |
+
"step": 435
|
3055 |
+
},
|
3056 |
+
{
|
3057 |
+
"epoch": 0.8794755421079173,
|
3058 |
+
"grad_norm": 0.4816247820854187,
|
3059 |
+
"learning_rate": 3.639475796462577e-06,
|
3060 |
+
"loss": 0.5802,
|
3061 |
+
"step": 436
|
3062 |
+
},
|
3063 |
+
{
|
3064 |
+
"epoch": 0.8814926878466969,
|
3065 |
+
"grad_norm": 0.4830552041530609,
|
3066 |
+
"learning_rate": 3.520603362763014e-06,
|
3067 |
+
"loss": 0.6388,
|
3068 |
+
"step": 437
|
3069 |
+
},
|
3070 |
+
{
|
3071 |
+
"epoch": 0.8835098335854765,
|
3072 |
+
"grad_norm": 0.5149121284484863,
|
3073 |
+
"learning_rate": 3.403633740535983e-06,
|
3074 |
+
"loss": 0.6332,
|
3075 |
+
"step": 438
|
3076 |
+
},
|
3077 |
+
{
|
3078 |
+
"epoch": 0.8855269793242562,
|
3079 |
+
"grad_norm": 0.5506238341331482,
|
3080 |
+
"learning_rate": 3.2885717183793296e-06,
|
3081 |
+
"loss": 0.6772,
|
3082 |
+
"step": 439
|
3083 |
+
},
|
3084 |
+
{
|
3085 |
+
"epoch": 0.8875441250630358,
|
3086 |
+
"grad_norm": 0.46515923738479614,
|
3087 |
+
"learning_rate": 3.1754220067960307e-06,
|
3088 |
+
"loss": 0.5341,
|
3089 |
+
"step": 440
|
3090 |
+
},
|
3091 |
+
{
|
3092 |
+
"epoch": 0.8895612708018155,
|
3093 |
+
"grad_norm": 0.5579814314842224,
|
3094 |
+
"learning_rate": 3.0641892380013247e-06,
|
3095 |
+
"loss": 0.744,
|
3096 |
+
"step": 441
|
3097 |
+
},
|
3098 |
+
{
|
3099 |
+
"epoch": 0.8915784165405951,
|
3100 |
+
"grad_norm": 0.4864133596420288,
|
3101 |
+
"learning_rate": 2.9548779657330607e-06,
|
3102 |
+
"loss": 0.5521,
|
3103 |
+
"step": 442
|
3104 |
+
},
|
3105 |
+
{
|
3106 |
+
"epoch": 0.8935955622793746,
|
3107 |
+
"grad_norm": 0.5291908383369446,
|
3108 |
+
"learning_rate": 2.847492665065349e-06,
|
3109 |
+
"loss": 0.5757,
|
3110 |
+
"step": 443
|
3111 |
+
},
|
3112 |
+
{
|
3113 |
+
"epoch": 0.8956127080181543,
|
3114 |
+
"grad_norm": 0.5644931197166443,
|
3115 |
+
"learning_rate": 2.7420377322252276e-06,
|
3116 |
+
"loss": 0.601,
|
3117 |
+
"step": 444
|
3118 |
+
},
|
3119 |
+
{
|
3120 |
+
"epoch": 0.8976298537569339,
|
3121 |
+
"grad_norm": 0.5345064997673035,
|
3122 |
+
"learning_rate": 2.6385174844128334e-06,
|
3123 |
+
"loss": 0.6898,
|
3124 |
+
"step": 445
|
3125 |
+
},
|
3126 |
+
{
|
3127 |
+
"epoch": 0.8996469994957136,
|
3128 |
+
"grad_norm": 0.5461397767066956,
|
3129 |
+
"learning_rate": 2.5369361596245367e-06,
|
3130 |
+
"loss": 0.5881,
|
3131 |
+
"step": 446
|
3132 |
+
},
|
3133 |
+
{
|
3134 |
+
"epoch": 0.9016641452344932,
|
3135 |
+
"grad_norm": 0.5114607810974121,
|
3136 |
+
"learning_rate": 2.4372979164795308e-06,
|
3137 |
+
"loss": 0.5368,
|
3138 |
+
"step": 447
|
3139 |
+
},
|
3140 |
+
{
|
3141 |
+
"epoch": 0.9036812909732728,
|
3142 |
+
"grad_norm": 0.5776088833808899,
|
3143 |
+
"learning_rate": 2.339606834049518e-06,
|
3144 |
+
"loss": 0.7657,
|
3145 |
+
"step": 448
|
3146 |
+
},
|
3147 |
+
{
|
3148 |
+
"epoch": 0.9056984367120524,
|
3149 |
+
"grad_norm": 0.6684572100639343,
|
3150 |
+
"learning_rate": 2.243866911691761e-06,
|
3151 |
+
"loss": 0.6967,
|
3152 |
+
"step": 449
|
3153 |
+
},
|
3154 |
+
{
|
3155 |
+
"epoch": 0.9077155824508321,
|
3156 |
+
"grad_norm": 0.660133957862854,
|
3157 |
+
"learning_rate": 2.1500820688853562e-06,
|
3158 |
+
"loss": 0.6859,
|
3159 |
+
"step": 450
|
3160 |
+
},
|
3161 |
+
{
|
3162 |
+
"epoch": 0.9097327281896117,
|
3163 |
+
"grad_norm": 0.3766048550605774,
|
3164 |
+
"learning_rate": 2.0582561450707026e-06,
|
3165 |
+
"loss": 0.5048,
|
3166 |
+
"step": 451
|
3167 |
+
},
|
3168 |
+
{
|
3169 |
+
"epoch": 0.9117498739283914,
|
3170 |
+
"grad_norm": 0.40027275681495667,
|
3171 |
+
"learning_rate": 1.9683928994924385e-06,
|
3172 |
+
"loss": 0.5051,
|
3173 |
+
"step": 452
|
3174 |
+
},
|
3175 |
+
{
|
3176 |
+
"epoch": 0.913767019667171,
|
3177 |
+
"grad_norm": 0.40732768177986145,
|
3178 |
+
"learning_rate": 1.8804960110454406e-06,
|
3179 |
+
"loss": 0.5266,
|
3180 |
+
"step": 453
|
3181 |
+
},
|
3182 |
+
{
|
3183 |
+
"epoch": 0.9157841654059505,
|
3184 |
+
"grad_norm": 0.4341001808643341,
|
3185 |
+
"learning_rate": 1.7945690781242752e-06,
|
3186 |
+
"loss": 0.5716,
|
3187 |
+
"step": 454
|
3188 |
+
},
|
3189 |
+
{
|
3190 |
+
"epoch": 0.9178013111447302,
|
3191 |
+
"grad_norm": 0.45314088463783264,
|
3192 |
+
"learning_rate": 1.7106156184758249e-06,
|
3193 |
+
"loss": 0.6243,
|
3194 |
+
"step": 455
|
3195 |
+
},
|
3196 |
+
{
|
3197 |
+
"epoch": 0.9198184568835098,
|
3198 |
+
"grad_norm": 0.46327388286590576,
|
3199 |
+
"learning_rate": 1.6286390690553599e-06,
|
3200 |
+
"loss": 0.5353,
|
3201 |
+
"step": 456
|
3202 |
+
},
|
3203 |
+
{
|
3204 |
+
"epoch": 0.9218356026222895,
|
3205 |
+
"grad_norm": 0.47164246439933777,
|
3206 |
+
"learning_rate": 1.5486427858857567e-06,
|
3207 |
+
"loss": 0.5876,
|
3208 |
+
"step": 457
|
3209 |
+
},
|
3210 |
+
{
|
3211 |
+
"epoch": 0.9238527483610691,
|
3212 |
+
"grad_norm": 0.48069673776626587,
|
3213 |
+
"learning_rate": 1.4706300439201348e-06,
|
3214 |
+
"loss": 0.5481,
|
3215 |
+
"step": 458
|
3216 |
+
},
|
3217 |
+
{
|
3218 |
+
"epoch": 0.9258698940998488,
|
3219 |
+
"grad_norm": 0.44389042258262634,
|
3220 |
+
"learning_rate": 1.394604036907804e-06,
|
3221 |
+
"loss": 0.5893,
|
3222 |
+
"step": 459
|
3223 |
+
},
|
3224 |
+
{
|
3225 |
+
"epoch": 0.9278870398386283,
|
3226 |
+
"grad_norm": 0.3924350142478943,
|
3227 |
+
"learning_rate": 1.3205678772634789e-06,
|
3228 |
+
"loss": 0.5074,
|
3229 |
+
"step": 460
|
3230 |
+
},
|
3231 |
+
{
|
3232 |
+
"epoch": 0.929904185577408,
|
3233 |
+
"grad_norm": 0.3960682153701782,
|
3234 |
+
"learning_rate": 1.2485245959398927e-06,
|
3235 |
+
"loss": 0.4862,
|
3236 |
+
"step": 461
|
3237 |
+
},
|
3238 |
+
{
|
3239 |
+
"epoch": 0.9319213313161876,
|
3240 |
+
"grad_norm": 0.41863298416137695,
|
3241 |
+
"learning_rate": 1.17847714230368e-06,
|
3242 |
+
"loss": 0.5652,
|
3243 |
+
"step": 462
|
3244 |
+
},
|
3245 |
+
{
|
3246 |
+
"epoch": 0.9339384770549672,
|
3247 |
+
"grad_norm": 0.40599218010902405,
|
3248 |
+
"learning_rate": 1.1104283840146834e-06,
|
3249 |
+
"loss": 0.4762,
|
3250 |
+
"step": 463
|
3251 |
+
},
|
3252 |
+
{
|
3253 |
+
"epoch": 0.9359556227937469,
|
3254 |
+
"grad_norm": 0.39246606826782227,
|
3255 |
+
"learning_rate": 1.044381106908493e-06,
|
3256 |
+
"loss": 0.5321,
|
3257 |
+
"step": 464
|
3258 |
+
},
|
3259 |
+
{
|
3260 |
+
"epoch": 0.9379727685325264,
|
3261 |
+
"grad_norm": 0.41904589533805847,
|
3262 |
+
"learning_rate": 9.80338014882437e-07,
|
3263 |
+
"loss": 0.5905,
|
3264 |
+
"step": 465
|
3265 |
+
},
|
3266 |
+
{
|
3267 |
+
"epoch": 0.9399899142713061,
|
3268 |
+
"grad_norm": 0.40185776352882385,
|
3269 |
+
"learning_rate": 9.183017297849039e-07,
|
3270 |
+
"loss": 0.5819,
|
3271 |
+
"step": 466
|
3272 |
+
},
|
3273 |
+
{
|
3274 |
+
"epoch": 0.9420070600100857,
|
3275 |
+
"grad_norm": 0.4809671640396118,
|
3276 |
+
"learning_rate": 8.582747913079448e-07,
|
3277 |
+
"loss": 0.5427,
|
3278 |
+
"step": 467
|
3279 |
+
},
|
3280 |
+
{
|
3281 |
+
"epoch": 0.9440242057488654,
|
3282 |
+
"grad_norm": 0.4338788092136383,
|
3283 |
+
"learning_rate": 8.00259656883362e-07,
|
3284 |
+
"loss": 0.5202,
|
3285 |
+
"step": 468
|
3286 |
+
},
|
3287 |
+
{
|
3288 |
+
"epoch": 0.946041351487645,
|
3289 |
+
"grad_norm": 0.43927136063575745,
|
3290 |
+
"learning_rate": 7.442587015820734e-07,
|
3291 |
+
"loss": 0.5411,
|
3292 |
+
"step": 469
|
3293 |
+
},
|
3294 |
+
{
|
3295 |
+
"epoch": 0.9480584972264247,
|
3296 |
+
"grad_norm": 0.4315224885940552,
|
3297 |
+
"learning_rate": 6.902742180168953e-07,
|
3298 |
+
"loss": 0.5392,
|
3299 |
+
"step": 470
|
3300 |
+
},
|
3301 |
+
{
|
3302 |
+
"epoch": 0.9500756429652042,
|
3303 |
+
"grad_norm": 0.4636887013912201,
|
3304 |
+
"learning_rate": 6.38308416248673e-07,
|
3305 |
+
"loss": 0.5704,
|
3306 |
+
"step": 471
|
3307 |
+
},
|
3308 |
+
{
|
3309 |
+
"epoch": 0.9520927887039838,
|
3310 |
+
"grad_norm": 0.42391523718833923,
|
3311 |
+
"learning_rate": 5.883634236958091e-07,
|
3312 |
+
"loss": 0.5419,
|
3313 |
+
"step": 472
|
3314 |
+
},
|
3315 |
+
{
|
3316 |
+
"epoch": 0.9541099344427635,
|
3317 |
+
"grad_norm": 0.4391375482082367,
|
3318 |
+
"learning_rate": 5.404412850471719e-07,
|
3319 |
+
"loss": 0.5528,
|
3320 |
+
"step": 473
|
3321 |
+
},
|
3322 |
+
{
|
3323 |
+
"epoch": 0.9561270801815431,
|
3324 |
+
"grad_norm": 0.4722242057323456,
|
3325 |
+
"learning_rate": 4.945439621783843e-07,
|
3326 |
+
"loss": 0.5146,
|
3327 |
+
"step": 474
|
3328 |
+
},
|
3329 |
+
{
|
3330 |
+
"epoch": 0.9581442259203228,
|
3331 |
+
"grad_norm": 0.455690860748291,
|
3332 |
+
"learning_rate": 4.506733340714997e-07,
|
3333 |
+
"loss": 0.5337,
|
3334 |
+
"step": 475
|
3335 |
+
},
|
3336 |
+
{
|
3337 |
+
"epoch": 0.9601613716591024,
|
3338 |
+
"grad_norm": 0.4465389549732208,
|
3339 |
+
"learning_rate": 4.08831196738102e-07,
|
3340 |
+
"loss": 0.5657,
|
3341 |
+
"step": 476
|
3342 |
+
},
|
3343 |
+
{
|
3344 |
+
"epoch": 0.962178517397882,
|
3345 |
+
"grad_norm": 0.47597572207450867,
|
3346 |
+
"learning_rate": 3.6901926314575894e-07,
|
3347 |
+
"loss": 0.6033,
|
3348 |
+
"step": 477
|
3349 |
+
},
|
3350 |
+
{
|
3351 |
+
"epoch": 0.9641956631366616,
|
3352 |
+
"grad_norm": 0.4470367133617401,
|
3353 |
+
"learning_rate": 3.312391631479006e-07,
|
3354 |
+
"loss": 0.5517,
|
3355 |
+
"step": 478
|
3356 |
+
},
|
3357 |
+
{
|
3358 |
+
"epoch": 0.9662128088754413,
|
3359 |
+
"grad_norm": 0.4683283865451813,
|
3360 |
+
"learning_rate": 2.9549244341708916e-07,
|
3361 |
+
"loss": 0.5747,
|
3362 |
+
"step": 479
|
3363 |
+
},
|
3364 |
+
{
|
3365 |
+
"epoch": 0.9682299546142209,
|
3366 |
+
"grad_norm": 0.4400279223918915,
|
3367 |
+
"learning_rate": 2.617805673817086e-07,
|
3368 |
+
"loss": 0.5459,
|
3369 |
+
"step": 480
|
3370 |
+
},
|
3371 |
+
{
|
3372 |
+
"epoch": 0.9702471003530005,
|
3373 |
+
"grad_norm": 0.48166075348854065,
|
3374 |
+
"learning_rate": 2.301049151660628e-07,
|
3375 |
+
"loss": 0.5339,
|
3376 |
+
"step": 481
|
3377 |
+
},
|
3378 |
+
{
|
3379 |
+
"epoch": 0.9722642460917801,
|
3380 |
+
"grad_norm": 0.4849461019039154,
|
3381 |
+
"learning_rate": 2.004667835338425e-07,
|
3382 |
+
"loss": 0.5633,
|
3383 |
+
"step": 482
|
3384 |
+
},
|
3385 |
+
{
|
3386 |
+
"epoch": 0.9742813918305597,
|
3387 |
+
"grad_norm": 0.46600455045700073,
|
3388 |
+
"learning_rate": 1.7286738583507933e-07,
|
3389 |
+
"loss": 0.5857,
|
3390 |
+
"step": 483
|
3391 |
+
},
|
3392 |
+
{
|
3393 |
+
"epoch": 0.9762985375693394,
|
3394 |
+
"grad_norm": 0.4829322397708893,
|
3395 |
+
"learning_rate": 1.4730785195643527e-07,
|
3396 |
+
"loss": 0.6381,
|
3397 |
+
"step": 484
|
3398 |
+
},
|
3399 |
+
{
|
3400 |
+
"epoch": 0.978315683308119,
|
3401 |
+
"grad_norm": 0.5022732019424438,
|
3402 |
+
"learning_rate": 1.2378922827496199e-07,
|
3403 |
+
"loss": 0.6468,
|
3404 |
+
"step": 485
|
3405 |
+
},
|
3406 |
+
{
|
3407 |
+
"epoch": 0.9803328290468987,
|
3408 |
+
"grad_norm": 0.5150927901268005,
|
3409 |
+
"learning_rate": 1.0231247761528506e-07,
|
3410 |
+
"loss": 0.5638,
|
3411 |
+
"step": 486
|
3412 |
+
},
|
3413 |
+
{
|
3414 |
+
"epoch": 0.9823499747856783,
|
3415 |
+
"grad_norm": 0.5347169041633606,
|
3416 |
+
"learning_rate": 8.287847921013559e-08,
|
3417 |
+
"loss": 0.6239,
|
3418 |
+
"step": 487
|
3419 |
+
},
|
3420 |
+
{
|
3421 |
+
"epoch": 0.9843671205244579,
|
3422 |
+
"grad_norm": 0.49782970547676086,
|
3423 |
+
"learning_rate": 6.548802866441217e-08,
|
3424 |
+
"loss": 0.6322,
|
3425 |
+
"step": 488
|
3426 |
+
},
|
3427 |
+
{
|
3428 |
+
"epoch": 0.9863842662632375,
|
3429 |
+
"grad_norm": 0.5174301862716675,
|
3430 |
+
"learning_rate": 5.014183792256266e-08,
|
3431 |
+
"loss": 0.6164,
|
3432 |
+
"step": 489
|
3433 |
+
},
|
3434 |
+
{
|
3435 |
+
"epoch": 0.9884014120020171,
|
3436 |
+
"grad_norm": 0.5362997651100159,
|
3437 |
+
"learning_rate": 3.684053523947406e-08,
|
3438 |
+
"loss": 0.6095,
|
3439 |
+
"step": 490
|
3440 |
+
},
|
3441 |
+
{
|
3442 |
+
"epoch": 0.9904185577407968,
|
3443 |
+
"grad_norm": 0.5228421092033386,
|
3444 |
+
"learning_rate": 2.558466515473201e-08,
|
3445 |
+
"loss": 0.6369,
|
3446 |
+
"step": 491
|
3447 |
+
},
|
3448 |
+
{
|
3449 |
+
"epoch": 0.9924357034795764,
|
3450 |
+
"grad_norm": 0.5408637523651123,
|
3451 |
+
"learning_rate": 1.6374688470327527e-08,
|
3452 |
+
"loss": 0.6323,
|
3453 |
+
"step": 492
|
3454 |
+
},
|
3455 |
+
{
|
3456 |
+
"epoch": 0.994452849218356,
|
3457 |
+
"grad_norm": 0.6218075752258301,
|
3458 |
+
"learning_rate": 9.210982231805387e-09,
|
3459 |
+
"loss": 0.5916,
|
3460 |
+
"step": 493
|
3461 |
+
},
|
3462 |
+
{
|
3463 |
+
"epoch": 0.9964699949571356,
|
3464 |
+
"grad_norm": 0.5772291421890259,
|
3465 |
+
"learning_rate": 4.093839712815406e-09,
|
3466 |
+
"loss": 0.7449,
|
3467 |
+
"step": 494
|
3468 |
+
},
|
3469 |
+
{
|
3470 |
+
"epoch": 0.9984871406959153,
|
3471 |
+
"grad_norm": 0.6290249228477478,
|
3472 |
+
"learning_rate": 1.0234704031220066e-09,
|
3473 |
+
"loss": 0.6259,
|
3474 |
+
"step": 495
|
3475 |
+
},
|
3476 |
+
{
|
3477 |
+
"epoch": 0.9984871406959153,
|
3478 |
+
"eval_loss": 0.5574879050254822,
|
3479 |
+
"eval_runtime": 47.6442,
|
3480 |
+
"eval_samples_per_second": 8.773,
|
3481 |
+
"eval_steps_per_second": 2.204,
|
3482 |
+
"step": 495
|
3483 |
+
},
|
3484 |
+
{
|
3485 |
+
"epoch": 1.0015128593040847,
|
3486 |
+
"grad_norm": 1.4135169982910156,
|
3487 |
+
"learning_rate": 0.0,
|
3488 |
+
"loss": 1.1306,
|
3489 |
+
"step": 496
|
3490 |
}
|
3491 |
],
|
3492 |
"logging_steps": 1,
|
|
|
3501 |
"should_evaluate": false,
|
3502 |
"should_log": false,
|
3503 |
"should_save": true,
|
3504 |
+
"should_training_stop": true
|
3505 |
},
|
3506 |
"attributes": {}
|
3507 |
}
|
3508 |
},
|
3509 |
+
"total_flos": 8.407795184393257e+17,
|
3510 |
"train_batch_size": 4,
|
3511 |
"trial_name": null,
|
3512 |
"trial_params": null
|