Training in progress, step 50000, checkpoint
Browse files- last-checkpoint/adapter_model.safetensors +1 -1
- last-checkpoint/global_step50000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt +3 -0
- last-checkpoint/global_step50000/mp_rank_00_model_states.pt +3 -0
- last-checkpoint/latest +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/trainer_state.json +1403 -3
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 42002584
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9b6d50b51eaeaf39e5b98cd5da2106095b1eadd36c42f5444608ce74d1a8ca0b
|
3 |
size 42002584
|
last-checkpoint/global_step50000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bcf17c5fde91af90da7941726b37394dae64a048f39a02257d8b0f49bf086ec0
|
3 |
+
size 251710672
|
last-checkpoint/global_step50000/mp_rank_00_model_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f11d211be2ef28ea17002a9cff69b332f736f2857db44afe14dc9ad1d9cf0e0
|
3 |
+
size 153747385
|
last-checkpoint/latest
CHANGED
@@ -1 +1 @@
|
|
1 |
-
|
|
|
1 |
+
global_step50000
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6adf9f8aec42d3e779c271fa8f3a6228f4d8e88b7de5083bf000a76dfc53c984
|
3 |
size 14244
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 1.
|
5 |
"eval_steps": 1000,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -5614,6 +5614,1406 @@
|
|
5614 |
"learning_rate": 0.0001885730775604432,
|
5615 |
"loss": 1.3047,
|
5616 |
"step": 40000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5617 |
}
|
5618 |
],
|
5619 |
"logging_steps": 50,
|
@@ -5633,7 +7033,7 @@
|
|
5633 |
"attributes": {}
|
5634 |
}
|
5635 |
},
|
5636 |
-
"total_flos": 1.
|
5637 |
"train_batch_size": 2,
|
5638 |
"trial_name": null,
|
5639 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.4928492520825247,
|
5 |
"eval_steps": 1000,
|
6 |
+
"global_step": 50000,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
5614 |
"learning_rate": 0.0001885730775604432,
|
5615 |
"loss": 1.3047,
|
5616 |
"step": 40000
|
5617 |
+
},
|
5618 |
+
{
|
5619 |
+
"epoch": 1.1957722509181024,
|
5620 |
+
"grad_norm": 4.373313903808594,
|
5621 |
+
"learning_rate": 0.00018855879176452438,
|
5622 |
+
"loss": 1.366,
|
5623 |
+
"step": 40050
|
5624 |
+
},
|
5625 |
+
{
|
5626 |
+
"epoch": 1.1972651001701848,
|
5627 |
+
"grad_norm": 4.842396259307861,
|
5628 |
+
"learning_rate": 0.00018854450596860554,
|
5629 |
+
"loss": 1.3294,
|
5630 |
+
"step": 40100
|
5631 |
+
},
|
5632 |
+
{
|
5633 |
+
"epoch": 1.1987579494222673,
|
5634 |
+
"grad_norm": 5.024688720703125,
|
5635 |
+
"learning_rate": 0.0001885302201726867,
|
5636 |
+
"loss": 1.3531,
|
5637 |
+
"step": 40150
|
5638 |
+
},
|
5639 |
+
{
|
5640 |
+
"epoch": 1.2002507986743498,
|
5641 |
+
"grad_norm": 5.733693599700928,
|
5642 |
+
"learning_rate": 0.00018851593437676787,
|
5643 |
+
"loss": 1.3177,
|
5644 |
+
"step": 40200
|
5645 |
+
},
|
5646 |
+
{
|
5647 |
+
"epoch": 1.2017436479264325,
|
5648 |
+
"grad_norm": 4.634098529815674,
|
5649 |
+
"learning_rate": 0.00018850164858084904,
|
5650 |
+
"loss": 1.3534,
|
5651 |
+
"step": 40250
|
5652 |
+
},
|
5653 |
+
{
|
5654 |
+
"epoch": 1.203236497178515,
|
5655 |
+
"grad_norm": 5.662175178527832,
|
5656 |
+
"learning_rate": 0.0001884873627849302,
|
5657 |
+
"loss": 1.2813,
|
5658 |
+
"step": 40300
|
5659 |
+
},
|
5660 |
+
{
|
5661 |
+
"epoch": 1.2047293464305975,
|
5662 |
+
"grad_norm": 4.25814151763916,
|
5663 |
+
"learning_rate": 0.00018847307698901137,
|
5664 |
+
"loss": 1.3704,
|
5665 |
+
"step": 40350
|
5666 |
+
},
|
5667 |
+
{
|
5668 |
+
"epoch": 1.20622219568268,
|
5669 |
+
"grad_norm": 4.733590126037598,
|
5670 |
+
"learning_rate": 0.00018845879119309253,
|
5671 |
+
"loss": 1.2999,
|
5672 |
+
"step": 40400
|
5673 |
+
},
|
5674 |
+
{
|
5675 |
+
"epoch": 1.2077150449347624,
|
5676 |
+
"grad_norm": 5.873198509216309,
|
5677 |
+
"learning_rate": 0.0001884445053971737,
|
5678 |
+
"loss": 1.3697,
|
5679 |
+
"step": 40450
|
5680 |
+
},
|
5681 |
+
{
|
5682 |
+
"epoch": 1.209207894186845,
|
5683 |
+
"grad_norm": 4.847556114196777,
|
5684 |
+
"learning_rate": 0.00018843021960125486,
|
5685 |
+
"loss": 1.2706,
|
5686 |
+
"step": 40500
|
5687 |
+
},
|
5688 |
+
{
|
5689 |
+
"epoch": 1.2107007434389274,
|
5690 |
+
"grad_norm": 4.76710319519043,
|
5691 |
+
"learning_rate": 0.00018841593380533605,
|
5692 |
+
"loss": 1.3873,
|
5693 |
+
"step": 40550
|
5694 |
+
},
|
5695 |
+
{
|
5696 |
+
"epoch": 1.2121935926910101,
|
5697 |
+
"grad_norm": 4.693939685821533,
|
5698 |
+
"learning_rate": 0.0001884016480094172,
|
5699 |
+
"loss": 1.2752,
|
5700 |
+
"step": 40600
|
5701 |
+
},
|
5702 |
+
{
|
5703 |
+
"epoch": 1.2136864419430926,
|
5704 |
+
"grad_norm": 5.377971649169922,
|
5705 |
+
"learning_rate": 0.00018838736221349838,
|
5706 |
+
"loss": 1.3083,
|
5707 |
+
"step": 40650
|
5708 |
+
},
|
5709 |
+
{
|
5710 |
+
"epoch": 1.215179291195175,
|
5711 |
+
"grad_norm": 4.257247447967529,
|
5712 |
+
"learning_rate": 0.00018837307641757952,
|
5713 |
+
"loss": 1.3318,
|
5714 |
+
"step": 40700
|
5715 |
+
},
|
5716 |
+
{
|
5717 |
+
"epoch": 1.2166721404472576,
|
5718 |
+
"grad_norm": 4.820394039154053,
|
5719 |
+
"learning_rate": 0.0001883587906216607,
|
5720 |
+
"loss": 1.3063,
|
5721 |
+
"step": 40750
|
5722 |
+
},
|
5723 |
+
{
|
5724 |
+
"epoch": 1.21816498969934,
|
5725 |
+
"grad_norm": 3.7783172130584717,
|
5726 |
+
"learning_rate": 0.00018834450482574188,
|
5727 |
+
"loss": 1.2765,
|
5728 |
+
"step": 40800
|
5729 |
+
},
|
5730 |
+
{
|
5731 |
+
"epoch": 1.2196578389514228,
|
5732 |
+
"grad_norm": 5.447601795196533,
|
5733 |
+
"learning_rate": 0.00018833021902982304,
|
5734 |
+
"loss": 1.3067,
|
5735 |
+
"step": 40850
|
5736 |
+
},
|
5737 |
+
{
|
5738 |
+
"epoch": 1.2211506882035053,
|
5739 |
+
"grad_norm": 4.7725510597229,
|
5740 |
+
"learning_rate": 0.0001883159332339042,
|
5741 |
+
"loss": 1.3366,
|
5742 |
+
"step": 40900
|
5743 |
+
},
|
5744 |
+
{
|
5745 |
+
"epoch": 1.2226435374555877,
|
5746 |
+
"grad_norm": 5.6868672370910645,
|
5747 |
+
"learning_rate": 0.00018830164743798537,
|
5748 |
+
"loss": 1.3441,
|
5749 |
+
"step": 40950
|
5750 |
+
},
|
5751 |
+
{
|
5752 |
+
"epoch": 1.2241363867076702,
|
5753 |
+
"grad_norm": 8.460538864135742,
|
5754 |
+
"learning_rate": 0.00018828736164206653,
|
5755 |
+
"loss": 1.3305,
|
5756 |
+
"step": 41000
|
5757 |
+
},
|
5758 |
+
{
|
5759 |
+
"epoch": 1.2256292359597527,
|
5760 |
+
"grad_norm": 5.594083309173584,
|
5761 |
+
"learning_rate": 0.0001882730758461477,
|
5762 |
+
"loss": 1.2825,
|
5763 |
+
"step": 41050
|
5764 |
+
},
|
5765 |
+
{
|
5766 |
+
"epoch": 1.2271220852118354,
|
5767 |
+
"grad_norm": 6.125982284545898,
|
5768 |
+
"learning_rate": 0.00018825879005022886,
|
5769 |
+
"loss": 1.3253,
|
5770 |
+
"step": 41100
|
5771 |
+
},
|
5772 |
+
{
|
5773 |
+
"epoch": 1.228614934463918,
|
5774 |
+
"grad_norm": 4.173381328582764,
|
5775 |
+
"learning_rate": 0.00018824450425431003,
|
5776 |
+
"loss": 1.3363,
|
5777 |
+
"step": 41150
|
5778 |
+
},
|
5779 |
+
{
|
5780 |
+
"epoch": 1.2301077837160004,
|
5781 |
+
"grad_norm": 4.884373664855957,
|
5782 |
+
"learning_rate": 0.0001882302184583912,
|
5783 |
+
"loss": 1.3124,
|
5784 |
+
"step": 41200
|
5785 |
+
},
|
5786 |
+
{
|
5787 |
+
"epoch": 1.2316006329680829,
|
5788 |
+
"grad_norm": 6.418700695037842,
|
5789 |
+
"learning_rate": 0.00018821593266247239,
|
5790 |
+
"loss": 1.3123,
|
5791 |
+
"step": 41250
|
5792 |
+
},
|
5793 |
+
{
|
5794 |
+
"epoch": 1.2330934822201653,
|
5795 |
+
"grad_norm": 4.717264175415039,
|
5796 |
+
"learning_rate": 0.00018820164686655352,
|
5797 |
+
"loss": 1.287,
|
5798 |
+
"step": 41300
|
5799 |
+
},
|
5800 |
+
{
|
5801 |
+
"epoch": 1.234586331472248,
|
5802 |
+
"grad_norm": 5.55648136138916,
|
5803 |
+
"learning_rate": 0.00018818736107063471,
|
5804 |
+
"loss": 1.3426,
|
5805 |
+
"step": 41350
|
5806 |
+
},
|
5807 |
+
{
|
5808 |
+
"epoch": 1.2360791807243305,
|
5809 |
+
"grad_norm": 5.190313339233398,
|
5810 |
+
"learning_rate": 0.00018817307527471585,
|
5811 |
+
"loss": 1.2945,
|
5812 |
+
"step": 41400
|
5813 |
+
},
|
5814 |
+
{
|
5815 |
+
"epoch": 1.237572029976413,
|
5816 |
+
"grad_norm": 6.027787208557129,
|
5817 |
+
"learning_rate": 0.00018815878947879704,
|
5818 |
+
"loss": 1.3313,
|
5819 |
+
"step": 41450
|
5820 |
+
},
|
5821 |
+
{
|
5822 |
+
"epoch": 1.2390648792284955,
|
5823 |
+
"grad_norm": 4.4704413414001465,
|
5824 |
+
"learning_rate": 0.00018814450368287818,
|
5825 |
+
"loss": 1.3031,
|
5826 |
+
"step": 41500
|
5827 |
+
},
|
5828 |
+
{
|
5829 |
+
"epoch": 1.240557728480578,
|
5830 |
+
"grad_norm": 4.408693790435791,
|
5831 |
+
"learning_rate": 0.00018813021788695937,
|
5832 |
+
"loss": 1.287,
|
5833 |
+
"step": 41550
|
5834 |
+
},
|
5835 |
+
{
|
5836 |
+
"epoch": 1.2420505777326605,
|
5837 |
+
"grad_norm": 5.230643272399902,
|
5838 |
+
"learning_rate": 0.00018811593209104054,
|
5839 |
+
"loss": 1.2846,
|
5840 |
+
"step": 41600
|
5841 |
+
},
|
5842 |
+
{
|
5843 |
+
"epoch": 1.243543426984743,
|
5844 |
+
"grad_norm": 4.442893981933594,
|
5845 |
+
"learning_rate": 0.0001881016462951217,
|
5846 |
+
"loss": 1.3199,
|
5847 |
+
"step": 41650
|
5848 |
+
},
|
5849 |
+
{
|
5850 |
+
"epoch": 1.2450362762368257,
|
5851 |
+
"grad_norm": 3.9362857341766357,
|
5852 |
+
"learning_rate": 0.00018808736049920287,
|
5853 |
+
"loss": 1.3301,
|
5854 |
+
"step": 41700
|
5855 |
+
},
|
5856 |
+
{
|
5857 |
+
"epoch": 1.2465291254889082,
|
5858 |
+
"grad_norm": 4.765340805053711,
|
5859 |
+
"learning_rate": 0.00018807307470328403,
|
5860 |
+
"loss": 1.3092,
|
5861 |
+
"step": 41750
|
5862 |
+
},
|
5863 |
+
{
|
5864 |
+
"epoch": 1.2480219747409906,
|
5865 |
+
"grad_norm": 4.7868146896362305,
|
5866 |
+
"learning_rate": 0.0001880587889073652,
|
5867 |
+
"loss": 1.3077,
|
5868 |
+
"step": 41800
|
5869 |
+
},
|
5870 |
+
{
|
5871 |
+
"epoch": 1.2495148239930731,
|
5872 |
+
"grad_norm": 5.724763870239258,
|
5873 |
+
"learning_rate": 0.00018804450311144636,
|
5874 |
+
"loss": 1.3035,
|
5875 |
+
"step": 41850
|
5876 |
+
},
|
5877 |
+
{
|
5878 |
+
"epoch": 1.2510076732451556,
|
5879 |
+
"grad_norm": 5.832367897033691,
|
5880 |
+
"learning_rate": 0.00018803021731552753,
|
5881 |
+
"loss": 1.3703,
|
5882 |
+
"step": 41900
|
5883 |
+
},
|
5884 |
+
{
|
5885 |
+
"epoch": 1.2525005224972383,
|
5886 |
+
"grad_norm": 5.079206466674805,
|
5887 |
+
"learning_rate": 0.0001880159315196087,
|
5888 |
+
"loss": 1.3328,
|
5889 |
+
"step": 41950
|
5890 |
+
},
|
5891 |
+
{
|
5892 |
+
"epoch": 1.2539933717493208,
|
5893 |
+
"grad_norm": 5.704753398895264,
|
5894 |
+
"learning_rate": 0.00018800164572368986,
|
5895 |
+
"loss": 1.354,
|
5896 |
+
"step": 42000
|
5897 |
+
},
|
5898 |
+
{
|
5899 |
+
"epoch": 1.2554862210014033,
|
5900 |
+
"grad_norm": 5.396346569061279,
|
5901 |
+
"learning_rate": 0.00018798735992777105,
|
5902 |
+
"loss": 1.2794,
|
5903 |
+
"step": 42050
|
5904 |
+
},
|
5905 |
+
{
|
5906 |
+
"epoch": 1.2569790702534858,
|
5907 |
+
"grad_norm": 5.461711406707764,
|
5908 |
+
"learning_rate": 0.00018797307413185218,
|
5909 |
+
"loss": 1.3335,
|
5910 |
+
"step": 42100
|
5911 |
+
},
|
5912 |
+
{
|
5913 |
+
"epoch": 1.2584719195055682,
|
5914 |
+
"grad_norm": 3.3528285026550293,
|
5915 |
+
"learning_rate": 0.00018795878833593338,
|
5916 |
+
"loss": 1.296,
|
5917 |
+
"step": 42150
|
5918 |
+
},
|
5919 |
+
{
|
5920 |
+
"epoch": 1.259964768757651,
|
5921 |
+
"grad_norm": 4.413696765899658,
|
5922 |
+
"learning_rate": 0.00018794450254001451,
|
5923 |
+
"loss": 1.2828,
|
5924 |
+
"step": 42200
|
5925 |
+
},
|
5926 |
+
{
|
5927 |
+
"epoch": 1.2614576180097334,
|
5928 |
+
"grad_norm": 5.430131435394287,
|
5929 |
+
"learning_rate": 0.0001879302167440957,
|
5930 |
+
"loss": 1.3107,
|
5931 |
+
"step": 42250
|
5932 |
+
},
|
5933 |
+
{
|
5934 |
+
"epoch": 1.262950467261816,
|
5935 |
+
"grad_norm": 4.623260974884033,
|
5936 |
+
"learning_rate": 0.00018791593094817684,
|
5937 |
+
"loss": 1.3252,
|
5938 |
+
"step": 42300
|
5939 |
+
},
|
5940 |
+
{
|
5941 |
+
"epoch": 1.2644433165138984,
|
5942 |
+
"grad_norm": 5.377962112426758,
|
5943 |
+
"learning_rate": 0.00018790164515225803,
|
5944 |
+
"loss": 1.3073,
|
5945 |
+
"step": 42350
|
5946 |
+
},
|
5947 |
+
{
|
5948 |
+
"epoch": 1.265936165765981,
|
5949 |
+
"grad_norm": 5.34269905090332,
|
5950 |
+
"learning_rate": 0.0001878873593563392,
|
5951 |
+
"loss": 1.3705,
|
5952 |
+
"step": 42400
|
5953 |
+
},
|
5954 |
+
{
|
5955 |
+
"epoch": 1.2674290150180636,
|
5956 |
+
"grad_norm": 5.574904918670654,
|
5957 |
+
"learning_rate": 0.00018787307356042036,
|
5958 |
+
"loss": 1.3848,
|
5959 |
+
"step": 42450
|
5960 |
+
},
|
5961 |
+
{
|
5962 |
+
"epoch": 1.268921864270146,
|
5963 |
+
"grad_norm": 4.49995231628418,
|
5964 |
+
"learning_rate": 0.00018785878776450153,
|
5965 |
+
"loss": 1.3169,
|
5966 |
+
"step": 42500
|
5967 |
+
},
|
5968 |
+
{
|
5969 |
+
"epoch": 1.2704147135222286,
|
5970 |
+
"grad_norm": 3.9160735607147217,
|
5971 |
+
"learning_rate": 0.00018784450196858267,
|
5972 |
+
"loss": 1.2994,
|
5973 |
+
"step": 42550
|
5974 |
+
},
|
5975 |
+
{
|
5976 |
+
"epoch": 1.271907562774311,
|
5977 |
+
"grad_norm": 6.018070697784424,
|
5978 |
+
"learning_rate": 0.00018783021617266386,
|
5979 |
+
"loss": 1.3213,
|
5980 |
+
"step": 42600
|
5981 |
+
},
|
5982 |
+
{
|
5983 |
+
"epoch": 1.2734004120263935,
|
5984 |
+
"grad_norm": 5.017271041870117,
|
5985 |
+
"learning_rate": 0.000187815930376745,
|
5986 |
+
"loss": 1.2986,
|
5987 |
+
"step": 42650
|
5988 |
+
},
|
5989 |
+
{
|
5990 |
+
"epoch": 1.274893261278476,
|
5991 |
+
"grad_norm": 4.147556304931641,
|
5992 |
+
"learning_rate": 0.0001878016445808262,
|
5993 |
+
"loss": 1.2738,
|
5994 |
+
"step": 42700
|
5995 |
+
},
|
5996 |
+
{
|
5997 |
+
"epoch": 1.2763861105305585,
|
5998 |
+
"grad_norm": 4.864907741546631,
|
5999 |
+
"learning_rate": 0.00018778735878490735,
|
6000 |
+
"loss": 1.3414,
|
6001 |
+
"step": 42750
|
6002 |
+
},
|
6003 |
+
{
|
6004 |
+
"epoch": 1.2778789597826412,
|
6005 |
+
"grad_norm": 4.564859390258789,
|
6006 |
+
"learning_rate": 0.00018777307298898852,
|
6007 |
+
"loss": 1.3028,
|
6008 |
+
"step": 42800
|
6009 |
+
},
|
6010 |
+
{
|
6011 |
+
"epoch": 1.2793718090347237,
|
6012 |
+
"grad_norm": 4.96591854095459,
|
6013 |
+
"learning_rate": 0.00018775878719306968,
|
6014 |
+
"loss": 1.3197,
|
6015 |
+
"step": 42850
|
6016 |
+
},
|
6017 |
+
{
|
6018 |
+
"epoch": 1.2808646582868062,
|
6019 |
+
"grad_norm": 4.384543418884277,
|
6020 |
+
"learning_rate": 0.00018774450139715085,
|
6021 |
+
"loss": 1.2802,
|
6022 |
+
"step": 42900
|
6023 |
+
},
|
6024 |
+
{
|
6025 |
+
"epoch": 1.2823575075388887,
|
6026 |
+
"grad_norm": 3.956608295440674,
|
6027 |
+
"learning_rate": 0.000187730215601232,
|
6028 |
+
"loss": 1.2764,
|
6029 |
+
"step": 42950
|
6030 |
+
},
|
6031 |
+
{
|
6032 |
+
"epoch": 1.2838503567909711,
|
6033 |
+
"grad_norm": 4.815617561340332,
|
6034 |
+
"learning_rate": 0.00018771592980531318,
|
6035 |
+
"loss": 1.2384,
|
6036 |
+
"step": 43000
|
6037 |
+
},
|
6038 |
+
{
|
6039 |
+
"epoch": 1.2853432060430539,
|
6040 |
+
"grad_norm": 4.381842136383057,
|
6041 |
+
"learning_rate": 0.00018770164400939434,
|
6042 |
+
"loss": 1.3016,
|
6043 |
+
"step": 43050
|
6044 |
+
},
|
6045 |
+
{
|
6046 |
+
"epoch": 1.2868360552951363,
|
6047 |
+
"grad_norm": 6.777477741241455,
|
6048 |
+
"learning_rate": 0.0001876873582134755,
|
6049 |
+
"loss": 1.2956,
|
6050 |
+
"step": 43100
|
6051 |
+
},
|
6052 |
+
{
|
6053 |
+
"epoch": 1.2883289045472188,
|
6054 |
+
"grad_norm": 4.342850208282471,
|
6055 |
+
"learning_rate": 0.00018767307241755667,
|
6056 |
+
"loss": 1.338,
|
6057 |
+
"step": 43150
|
6058 |
+
},
|
6059 |
+
{
|
6060 |
+
"epoch": 1.2898217537993013,
|
6061 |
+
"grad_norm": 5.081860065460205,
|
6062 |
+
"learning_rate": 0.00018765878662163786,
|
6063 |
+
"loss": 1.292,
|
6064 |
+
"step": 43200
|
6065 |
+
},
|
6066 |
+
{
|
6067 |
+
"epoch": 1.2913146030513838,
|
6068 |
+
"grad_norm": 4.7942986488342285,
|
6069 |
+
"learning_rate": 0.000187644500825719,
|
6070 |
+
"loss": 1.2969,
|
6071 |
+
"step": 43250
|
6072 |
+
},
|
6073 |
+
{
|
6074 |
+
"epoch": 1.2928074523034665,
|
6075 |
+
"grad_norm": 3.766878604888916,
|
6076 |
+
"learning_rate": 0.0001876302150298002,
|
6077 |
+
"loss": 1.3561,
|
6078 |
+
"step": 43300
|
6079 |
+
},
|
6080 |
+
{
|
6081 |
+
"epoch": 1.294300301555549,
|
6082 |
+
"grad_norm": 5.662395000457764,
|
6083 |
+
"learning_rate": 0.00018761592923388133,
|
6084 |
+
"loss": 1.2993,
|
6085 |
+
"step": 43350
|
6086 |
+
},
|
6087 |
+
{
|
6088 |
+
"epoch": 1.2957931508076315,
|
6089 |
+
"grad_norm": 4.3839545249938965,
|
6090 |
+
"learning_rate": 0.00018760164343796252,
|
6091 |
+
"loss": 1.3283,
|
6092 |
+
"step": 43400
|
6093 |
+
},
|
6094 |
+
{
|
6095 |
+
"epoch": 1.297286000059714,
|
6096 |
+
"grad_norm": 5.862764835357666,
|
6097 |
+
"learning_rate": 0.00018758735764204368,
|
6098 |
+
"loss": 1.3876,
|
6099 |
+
"step": 43450
|
6100 |
+
},
|
6101 |
+
{
|
6102 |
+
"epoch": 1.2987788493117964,
|
6103 |
+
"grad_norm": 4.883128643035889,
|
6104 |
+
"learning_rate": 0.00018757307184612485,
|
6105 |
+
"loss": 1.3456,
|
6106 |
+
"step": 43500
|
6107 |
+
},
|
6108 |
+
{
|
6109 |
+
"epoch": 1.3002716985638791,
|
6110 |
+
"grad_norm": 5.6903533935546875,
|
6111 |
+
"learning_rate": 0.000187558786050206,
|
6112 |
+
"loss": 1.351,
|
6113 |
+
"step": 43550
|
6114 |
+
},
|
6115 |
+
{
|
6116 |
+
"epoch": 1.3017645478159616,
|
6117 |
+
"grad_norm": 3.8554911613464355,
|
6118 |
+
"learning_rate": 0.00018754450025428718,
|
6119 |
+
"loss": 1.3385,
|
6120 |
+
"step": 43600
|
6121 |
+
},
|
6122 |
+
{
|
6123 |
+
"epoch": 1.303257397068044,
|
6124 |
+
"grad_norm": 4.693499565124512,
|
6125 |
+
"learning_rate": 0.00018753021445836834,
|
6126 |
+
"loss": 1.2923,
|
6127 |
+
"step": 43650
|
6128 |
+
},
|
6129 |
+
{
|
6130 |
+
"epoch": 1.3047502463201266,
|
6131 |
+
"grad_norm": 4.351410388946533,
|
6132 |
+
"learning_rate": 0.0001875159286624495,
|
6133 |
+
"loss": 1.37,
|
6134 |
+
"step": 43700
|
6135 |
+
},
|
6136 |
+
{
|
6137 |
+
"epoch": 1.306243095572209,
|
6138 |
+
"grad_norm": 3.915044069290161,
|
6139 |
+
"learning_rate": 0.00018750164286653067,
|
6140 |
+
"loss": 1.3378,
|
6141 |
+
"step": 43750
|
6142 |
+
},
|
6143 |
+
{
|
6144 |
+
"epoch": 1.3077359448242916,
|
6145 |
+
"grad_norm": 5.484678745269775,
|
6146 |
+
"learning_rate": 0.00018748735707061184,
|
6147 |
+
"loss": 1.2753,
|
6148 |
+
"step": 43800
|
6149 |
+
},
|
6150 |
+
{
|
6151 |
+
"epoch": 1.309228794076374,
|
6152 |
+
"grad_norm": 4.64120626449585,
|
6153 |
+
"learning_rate": 0.000187473071274693,
|
6154 |
+
"loss": 1.346,
|
6155 |
+
"step": 43850
|
6156 |
+
},
|
6157 |
+
{
|
6158 |
+
"epoch": 1.3107216433284568,
|
6159 |
+
"grad_norm": 6.287013053894043,
|
6160 |
+
"learning_rate": 0.00018745878547877417,
|
6161 |
+
"loss": 1.3233,
|
6162 |
+
"step": 43900
|
6163 |
+
},
|
6164 |
+
{
|
6165 |
+
"epoch": 1.3122144925805392,
|
6166 |
+
"grad_norm": 4.840941905975342,
|
6167 |
+
"learning_rate": 0.00018744449968285533,
|
6168 |
+
"loss": 1.3651,
|
6169 |
+
"step": 43950
|
6170 |
+
},
|
6171 |
+
{
|
6172 |
+
"epoch": 1.3137073418326217,
|
6173 |
+
"grad_norm": 4.027961730957031,
|
6174 |
+
"learning_rate": 0.00018743021388693652,
|
6175 |
+
"loss": 1.3241,
|
6176 |
+
"step": 44000
|
6177 |
+
},
|
6178 |
+
{
|
6179 |
+
"epoch": 1.3152001910847042,
|
6180 |
+
"grad_norm": 5.4013190269470215,
|
6181 |
+
"learning_rate": 0.00018741592809101766,
|
6182 |
+
"loss": 1.3048,
|
6183 |
+
"step": 44050
|
6184 |
+
},
|
6185 |
+
{
|
6186 |
+
"epoch": 1.3166930403367867,
|
6187 |
+
"grad_norm": 5.291625022888184,
|
6188 |
+
"learning_rate": 0.00018740164229509885,
|
6189 |
+
"loss": 1.3171,
|
6190 |
+
"step": 44100
|
6191 |
+
},
|
6192 |
+
{
|
6193 |
+
"epoch": 1.3181858895888694,
|
6194 |
+
"grad_norm": 3.7480239868164062,
|
6195 |
+
"learning_rate": 0.00018738735649918,
|
6196 |
+
"loss": 1.3527,
|
6197 |
+
"step": 44150
|
6198 |
+
},
|
6199 |
+
{
|
6200 |
+
"epoch": 1.3196787388409519,
|
6201 |
+
"grad_norm": 3.080198287963867,
|
6202 |
+
"learning_rate": 0.00018737307070326118,
|
6203 |
+
"loss": 1.3782,
|
6204 |
+
"step": 44200
|
6205 |
+
},
|
6206 |
+
{
|
6207 |
+
"epoch": 1.3211715880930344,
|
6208 |
+
"grad_norm": 5.115420818328857,
|
6209 |
+
"learning_rate": 0.00018735878490734235,
|
6210 |
+
"loss": 1.3359,
|
6211 |
+
"step": 44250
|
6212 |
+
},
|
6213 |
+
{
|
6214 |
+
"epoch": 1.3226644373451169,
|
6215 |
+
"grad_norm": 4.522632598876953,
|
6216 |
+
"learning_rate": 0.0001873444991114235,
|
6217 |
+
"loss": 1.3557,
|
6218 |
+
"step": 44300
|
6219 |
+
},
|
6220 |
+
{
|
6221 |
+
"epoch": 1.3241572865971993,
|
6222 |
+
"grad_norm": 5.980693817138672,
|
6223 |
+
"learning_rate": 0.00018733021331550468,
|
6224 |
+
"loss": 1.3356,
|
6225 |
+
"step": 44350
|
6226 |
+
},
|
6227 |
+
{
|
6228 |
+
"epoch": 1.325650135849282,
|
6229 |
+
"grad_norm": 4.4641947746276855,
|
6230 |
+
"learning_rate": 0.00018731592751958584,
|
6231 |
+
"loss": 1.3179,
|
6232 |
+
"step": 44400
|
6233 |
+
},
|
6234 |
+
{
|
6235 |
+
"epoch": 1.3271429851013645,
|
6236 |
+
"grad_norm": 5.057164669036865,
|
6237 |
+
"learning_rate": 0.000187301641723667,
|
6238 |
+
"loss": 1.2966,
|
6239 |
+
"step": 44450
|
6240 |
+
},
|
6241 |
+
{
|
6242 |
+
"epoch": 1.328635834353447,
|
6243 |
+
"grad_norm": 4.383616924285889,
|
6244 |
+
"learning_rate": 0.00018728735592774817,
|
6245 |
+
"loss": 1.317,
|
6246 |
+
"step": 44500
|
6247 |
+
},
|
6248 |
+
{
|
6249 |
+
"epoch": 1.3301286836055295,
|
6250 |
+
"grad_norm": 5.158847332000732,
|
6251 |
+
"learning_rate": 0.00018727307013182933,
|
6252 |
+
"loss": 1.3278,
|
6253 |
+
"step": 44550
|
6254 |
+
},
|
6255 |
+
{
|
6256 |
+
"epoch": 1.331621532857612,
|
6257 |
+
"grad_norm": 4.217287063598633,
|
6258 |
+
"learning_rate": 0.0001872587843359105,
|
6259 |
+
"loss": 1.2695,
|
6260 |
+
"step": 44600
|
6261 |
+
},
|
6262 |
+
{
|
6263 |
+
"epoch": 1.3331143821096947,
|
6264 |
+
"grad_norm": 3.9984827041625977,
|
6265 |
+
"learning_rate": 0.00018724449853999166,
|
6266 |
+
"loss": 1.287,
|
6267 |
+
"step": 44650
|
6268 |
+
},
|
6269 |
+
{
|
6270 |
+
"epoch": 1.3346072313617772,
|
6271 |
+
"grad_norm": 3.536656379699707,
|
6272 |
+
"learning_rate": 0.00018723021274407285,
|
6273 |
+
"loss": 1.3528,
|
6274 |
+
"step": 44700
|
6275 |
+
},
|
6276 |
+
{
|
6277 |
+
"epoch": 1.3361000806138597,
|
6278 |
+
"grad_norm": 8.800326347351074,
|
6279 |
+
"learning_rate": 0.000187215926948154,
|
6280 |
+
"loss": 1.3632,
|
6281 |
+
"step": 44750
|
6282 |
+
},
|
6283 |
+
{
|
6284 |
+
"epoch": 1.3375929298659421,
|
6285 |
+
"grad_norm": 5.808169364929199,
|
6286 |
+
"learning_rate": 0.00018720164115223518,
|
6287 |
+
"loss": 1.3802,
|
6288 |
+
"step": 44800
|
6289 |
+
},
|
6290 |
+
{
|
6291 |
+
"epoch": 1.3390857791180246,
|
6292 |
+
"grad_norm": 3.631208896636963,
|
6293 |
+
"learning_rate": 0.00018718735535631632,
|
6294 |
+
"loss": 1.3953,
|
6295 |
+
"step": 44850
|
6296 |
+
},
|
6297 |
+
{
|
6298 |
+
"epoch": 1.340578628370107,
|
6299 |
+
"grad_norm": 3.4300222396850586,
|
6300 |
+
"learning_rate": 0.0001871730695603975,
|
6301 |
+
"loss": 1.3154,
|
6302 |
+
"step": 44900
|
6303 |
+
},
|
6304 |
+
{
|
6305 |
+
"epoch": 1.3420714776221896,
|
6306 |
+
"grad_norm": 4.281446933746338,
|
6307 |
+
"learning_rate": 0.00018715878376447865,
|
6308 |
+
"loss": 1.3453,
|
6309 |
+
"step": 44950
|
6310 |
+
},
|
6311 |
+
{
|
6312 |
+
"epoch": 1.3435643268742723,
|
6313 |
+
"grad_norm": 4.406055450439453,
|
6314 |
+
"learning_rate": 0.00018714449796855984,
|
6315 |
+
"loss": 1.3131,
|
6316 |
+
"step": 45000
|
6317 |
+
},
|
6318 |
+
{
|
6319 |
+
"epoch": 1.3450571761263548,
|
6320 |
+
"grad_norm": 4.559666156768799,
|
6321 |
+
"learning_rate": 0.000187130212172641,
|
6322 |
+
"loss": 1.3005,
|
6323 |
+
"step": 45050
|
6324 |
+
},
|
6325 |
+
{
|
6326 |
+
"epoch": 1.3465500253784373,
|
6327 |
+
"grad_norm": 4.3830342292785645,
|
6328 |
+
"learning_rate": 0.00018711592637672217,
|
6329 |
+
"loss": 1.2796,
|
6330 |
+
"step": 45100
|
6331 |
+
},
|
6332 |
+
{
|
6333 |
+
"epoch": 1.3480428746305197,
|
6334 |
+
"grad_norm": 3.840522289276123,
|
6335 |
+
"learning_rate": 0.00018710164058080334,
|
6336 |
+
"loss": 1.3683,
|
6337 |
+
"step": 45150
|
6338 |
+
},
|
6339 |
+
{
|
6340 |
+
"epoch": 1.3495357238826022,
|
6341 |
+
"grad_norm": 6.221634387969971,
|
6342 |
+
"learning_rate": 0.00018708735478488447,
|
6343 |
+
"loss": 1.3393,
|
6344 |
+
"step": 45200
|
6345 |
+
},
|
6346 |
+
{
|
6347 |
+
"epoch": 1.351028573134685,
|
6348 |
+
"grad_norm": 5.086376190185547,
|
6349 |
+
"learning_rate": 0.00018707306898896567,
|
6350 |
+
"loss": 1.3007,
|
6351 |
+
"step": 45250
|
6352 |
+
},
|
6353 |
+
{
|
6354 |
+
"epoch": 1.3525214223867674,
|
6355 |
+
"grad_norm": 5.3263373374938965,
|
6356 |
+
"learning_rate": 0.0001870587831930468,
|
6357 |
+
"loss": 1.3334,
|
6358 |
+
"step": 45300
|
6359 |
+
},
|
6360 |
+
{
|
6361 |
+
"epoch": 1.35401427163885,
|
6362 |
+
"grad_norm": 3.790532350540161,
|
6363 |
+
"learning_rate": 0.000187044497397128,
|
6364 |
+
"loss": 1.3282,
|
6365 |
+
"step": 45350
|
6366 |
+
},
|
6367 |
+
{
|
6368 |
+
"epoch": 1.3555071208909324,
|
6369 |
+
"grad_norm": 5.929101943969727,
|
6370 |
+
"learning_rate": 0.00018703021160120916,
|
6371 |
+
"loss": 1.3048,
|
6372 |
+
"step": 45400
|
6373 |
+
},
|
6374 |
+
{
|
6375 |
+
"epoch": 1.3569999701430149,
|
6376 |
+
"grad_norm": 4.900948524475098,
|
6377 |
+
"learning_rate": 0.00018701592580529032,
|
6378 |
+
"loss": 1.3282,
|
6379 |
+
"step": 45450
|
6380 |
+
},
|
6381 |
+
{
|
6382 |
+
"epoch": 1.3584928193950976,
|
6383 |
+
"grad_norm": 4.735415935516357,
|
6384 |
+
"learning_rate": 0.0001870016400093715,
|
6385 |
+
"loss": 1.3076,
|
6386 |
+
"step": 45500
|
6387 |
+
},
|
6388 |
+
{
|
6389 |
+
"epoch": 1.35998566864718,
|
6390 |
+
"grad_norm": 3.394000768661499,
|
6391 |
+
"learning_rate": 0.00018698735421345265,
|
6392 |
+
"loss": 1.3283,
|
6393 |
+
"step": 45550
|
6394 |
+
},
|
6395 |
+
{
|
6396 |
+
"epoch": 1.3614785178992626,
|
6397 |
+
"grad_norm": 4.902571678161621,
|
6398 |
+
"learning_rate": 0.00018697306841753382,
|
6399 |
+
"loss": 1.3456,
|
6400 |
+
"step": 45600
|
6401 |
+
},
|
6402 |
+
{
|
6403 |
+
"epoch": 1.362971367151345,
|
6404 |
+
"grad_norm": 3.773273229598999,
|
6405 |
+
"learning_rate": 0.00018695878262161498,
|
6406 |
+
"loss": 1.3095,
|
6407 |
+
"step": 45650
|
6408 |
+
},
|
6409 |
+
{
|
6410 |
+
"epoch": 1.3644642164034275,
|
6411 |
+
"grad_norm": 3.6063497066497803,
|
6412 |
+
"learning_rate": 0.00018694449682569615,
|
6413 |
+
"loss": 1.3088,
|
6414 |
+
"step": 45700
|
6415 |
+
},
|
6416 |
+
{
|
6417 |
+
"epoch": 1.3659570656555102,
|
6418 |
+
"grad_norm": 5.380884170532227,
|
6419 |
+
"learning_rate": 0.0001869302110297773,
|
6420 |
+
"loss": 1.2992,
|
6421 |
+
"step": 45750
|
6422 |
+
},
|
6423 |
+
{
|
6424 |
+
"epoch": 1.3674499149075927,
|
6425 |
+
"grad_norm": 3.108400821685791,
|
6426 |
+
"learning_rate": 0.00018691592523385848,
|
6427 |
+
"loss": 1.3638,
|
6428 |
+
"step": 45800
|
6429 |
+
},
|
6430 |
+
{
|
6431 |
+
"epoch": 1.3689427641596752,
|
6432 |
+
"grad_norm": 5.948930740356445,
|
6433 |
+
"learning_rate": 0.00018690163943793967,
|
6434 |
+
"loss": 1.4066,
|
6435 |
+
"step": 45850
|
6436 |
+
},
|
6437 |
+
{
|
6438 |
+
"epoch": 1.3704356134117577,
|
6439 |
+
"grad_norm": 3.5984768867492676,
|
6440 |
+
"learning_rate": 0.0001868873536420208,
|
6441 |
+
"loss": 1.3167,
|
6442 |
+
"step": 45900
|
6443 |
+
},
|
6444 |
+
{
|
6445 |
+
"epoch": 1.3719284626638402,
|
6446 |
+
"grad_norm": 4.595841884613037,
|
6447 |
+
"learning_rate": 0.000186873067846102,
|
6448 |
+
"loss": 1.2317,
|
6449 |
+
"step": 45950
|
6450 |
+
},
|
6451 |
+
{
|
6452 |
+
"epoch": 1.3734213119159226,
|
6453 |
+
"grad_norm": 4.499626159667969,
|
6454 |
+
"learning_rate": 0.00018685878205018314,
|
6455 |
+
"loss": 1.3094,
|
6456 |
+
"step": 46000
|
6457 |
+
},
|
6458 |
+
{
|
6459 |
+
"epoch": 1.3749141611680051,
|
6460 |
+
"grad_norm": 4.876091480255127,
|
6461 |
+
"learning_rate": 0.00018684449625426433,
|
6462 |
+
"loss": 1.3723,
|
6463 |
+
"step": 46050
|
6464 |
+
},
|
6465 |
+
{
|
6466 |
+
"epoch": 1.3764070104200878,
|
6467 |
+
"grad_norm": 5.354704856872559,
|
6468 |
+
"learning_rate": 0.00018683021045834547,
|
6469 |
+
"loss": 1.3672,
|
6470 |
+
"step": 46100
|
6471 |
+
},
|
6472 |
+
{
|
6473 |
+
"epoch": 1.3778998596721703,
|
6474 |
+
"grad_norm": 4.629252910614014,
|
6475 |
+
"learning_rate": 0.00018681592466242666,
|
6476 |
+
"loss": 1.3295,
|
6477 |
+
"step": 46150
|
6478 |
+
},
|
6479 |
+
{
|
6480 |
+
"epoch": 1.3793927089242528,
|
6481 |
+
"grad_norm": 3.6931405067443848,
|
6482 |
+
"learning_rate": 0.00018680163886650782,
|
6483 |
+
"loss": 1.2989,
|
6484 |
+
"step": 46200
|
6485 |
+
},
|
6486 |
+
{
|
6487 |
+
"epoch": 1.3808855581763353,
|
6488 |
+
"grad_norm": 6.378023624420166,
|
6489 |
+
"learning_rate": 0.00018678735307058899,
|
6490 |
+
"loss": 1.3022,
|
6491 |
+
"step": 46250
|
6492 |
+
},
|
6493 |
+
{
|
6494 |
+
"epoch": 1.3823784074284178,
|
6495 |
+
"grad_norm": 4.780524253845215,
|
6496 |
+
"learning_rate": 0.00018677306727467015,
|
6497 |
+
"loss": 1.298,
|
6498 |
+
"step": 46300
|
6499 |
+
},
|
6500 |
+
{
|
6501 |
+
"epoch": 1.3838712566805005,
|
6502 |
+
"grad_norm": 4.4194560050964355,
|
6503 |
+
"learning_rate": 0.00018675878147875132,
|
6504 |
+
"loss": 1.3396,
|
6505 |
+
"step": 46350
|
6506 |
+
},
|
6507 |
+
{
|
6508 |
+
"epoch": 1.385364105932583,
|
6509 |
+
"grad_norm": 5.250397205352783,
|
6510 |
+
"learning_rate": 0.00018674449568283248,
|
6511 |
+
"loss": 1.347,
|
6512 |
+
"step": 46400
|
6513 |
+
},
|
6514 |
+
{
|
6515 |
+
"epoch": 1.3868569551846655,
|
6516 |
+
"grad_norm": 5.2740607261657715,
|
6517 |
+
"learning_rate": 0.00018673020988691364,
|
6518 |
+
"loss": 1.3962,
|
6519 |
+
"step": 46450
|
6520 |
+
},
|
6521 |
+
{
|
6522 |
+
"epoch": 1.388349804436748,
|
6523 |
+
"grad_norm": 4.3260884284973145,
|
6524 |
+
"learning_rate": 0.0001867159240909948,
|
6525 |
+
"loss": 1.3236,
|
6526 |
+
"step": 46500
|
6527 |
+
},
|
6528 |
+
{
|
6529 |
+
"epoch": 1.3898426536888304,
|
6530 |
+
"grad_norm": 4.773256778717041,
|
6531 |
+
"learning_rate": 0.00018670163829507597,
|
6532 |
+
"loss": 1.324,
|
6533 |
+
"step": 46550
|
6534 |
+
},
|
6535 |
+
{
|
6536 |
+
"epoch": 1.3913355029409131,
|
6537 |
+
"grad_norm": 6.666245460510254,
|
6538 |
+
"learning_rate": 0.00018668735249915714,
|
6539 |
+
"loss": 1.3242,
|
6540 |
+
"step": 46600
|
6541 |
+
},
|
6542 |
+
{
|
6543 |
+
"epoch": 1.3928283521929956,
|
6544 |
+
"grad_norm": 7.051351547241211,
|
6545 |
+
"learning_rate": 0.00018667306670323833,
|
6546 |
+
"loss": 1.3228,
|
6547 |
+
"step": 46650
|
6548 |
+
},
|
6549 |
+
{
|
6550 |
+
"epoch": 1.394321201445078,
|
6551 |
+
"grad_norm": 7.103764057159424,
|
6552 |
+
"learning_rate": 0.00018665878090731947,
|
6553 |
+
"loss": 1.3815,
|
6554 |
+
"step": 46700
|
6555 |
+
},
|
6556 |
+
{
|
6557 |
+
"epoch": 1.3958140506971606,
|
6558 |
+
"grad_norm": 4.093582630157471,
|
6559 |
+
"learning_rate": 0.00018664449511140066,
|
6560 |
+
"loss": 1.3352,
|
6561 |
+
"step": 46750
|
6562 |
+
},
|
6563 |
+
{
|
6564 |
+
"epoch": 1.397306899949243,
|
6565 |
+
"grad_norm": 4.276589870452881,
|
6566 |
+
"learning_rate": 0.0001866302093154818,
|
6567 |
+
"loss": 1.2578,
|
6568 |
+
"step": 46800
|
6569 |
+
},
|
6570 |
+
{
|
6571 |
+
"epoch": 1.3987997492013258,
|
6572 |
+
"grad_norm": 4.325264930725098,
|
6573 |
+
"learning_rate": 0.000186615923519563,
|
6574 |
+
"loss": 1.297,
|
6575 |
+
"step": 46850
|
6576 |
+
},
|
6577 |
+
{
|
6578 |
+
"epoch": 1.4002925984534083,
|
6579 |
+
"grad_norm": 4.978733062744141,
|
6580 |
+
"learning_rate": 0.00018660163772364415,
|
6581 |
+
"loss": 1.3142,
|
6582 |
+
"step": 46900
|
6583 |
+
},
|
6584 |
+
{
|
6585 |
+
"epoch": 1.4017854477054907,
|
6586 |
+
"grad_norm": 4.2968854904174805,
|
6587 |
+
"learning_rate": 0.00018658735192772532,
|
6588 |
+
"loss": 1.3085,
|
6589 |
+
"step": 46950
|
6590 |
+
},
|
6591 |
+
{
|
6592 |
+
"epoch": 1.4032782969575732,
|
6593 |
+
"grad_norm": 3.587477922439575,
|
6594 |
+
"learning_rate": 0.00018657306613180648,
|
6595 |
+
"loss": 1.3348,
|
6596 |
+
"step": 47000
|
6597 |
+
},
|
6598 |
+
{
|
6599 |
+
"epoch": 1.4047711462096557,
|
6600 |
+
"grad_norm": 5.918453693389893,
|
6601 |
+
"learning_rate": 0.00018655878033588765,
|
6602 |
+
"loss": 1.3163,
|
6603 |
+
"step": 47050
|
6604 |
+
},
|
6605 |
+
{
|
6606 |
+
"epoch": 1.4062639954617382,
|
6607 |
+
"grad_norm": 4.217932224273682,
|
6608 |
+
"learning_rate": 0.0001865444945399688,
|
6609 |
+
"loss": 1.358,
|
6610 |
+
"step": 47100
|
6611 |
+
},
|
6612 |
+
{
|
6613 |
+
"epoch": 1.4077568447138207,
|
6614 |
+
"grad_norm": 4.987464904785156,
|
6615 |
+
"learning_rate": 0.00018653020874404998,
|
6616 |
+
"loss": 1.2865,
|
6617 |
+
"step": 47150
|
6618 |
+
},
|
6619 |
+
{
|
6620 |
+
"epoch": 1.4092496939659034,
|
6621 |
+
"grad_norm": 4.03493070602417,
|
6622 |
+
"learning_rate": 0.00018651592294813114,
|
6623 |
+
"loss": 1.2961,
|
6624 |
+
"step": 47200
|
6625 |
+
},
|
6626 |
+
{
|
6627 |
+
"epoch": 1.4107425432179859,
|
6628 |
+
"grad_norm": 4.768476486206055,
|
6629 |
+
"learning_rate": 0.0001865016371522123,
|
6630 |
+
"loss": 1.3301,
|
6631 |
+
"step": 47250
|
6632 |
+
},
|
6633 |
+
{
|
6634 |
+
"epoch": 1.4122353924700684,
|
6635 |
+
"grad_norm": 4.223608016967773,
|
6636 |
+
"learning_rate": 0.00018648735135629347,
|
6637 |
+
"loss": 1.358,
|
6638 |
+
"step": 47300
|
6639 |
+
},
|
6640 |
+
{
|
6641 |
+
"epoch": 1.4137282417221508,
|
6642 |
+
"grad_norm": 5.97542142868042,
|
6643 |
+
"learning_rate": 0.00018647306556037464,
|
6644 |
+
"loss": 1.3001,
|
6645 |
+
"step": 47350
|
6646 |
+
},
|
6647 |
+
{
|
6648 |
+
"epoch": 1.4152210909742333,
|
6649 |
+
"grad_norm": 3.4876928329467773,
|
6650 |
+
"learning_rate": 0.0001864587797644558,
|
6651 |
+
"loss": 1.358,
|
6652 |
+
"step": 47400
|
6653 |
+
},
|
6654 |
+
{
|
6655 |
+
"epoch": 1.416713940226316,
|
6656 |
+
"grad_norm": 3.8894033432006836,
|
6657 |
+
"learning_rate": 0.000186444493968537,
|
6658 |
+
"loss": 1.3079,
|
6659 |
+
"step": 47450
|
6660 |
+
},
|
6661 |
+
{
|
6662 |
+
"epoch": 1.4182067894783985,
|
6663 |
+
"grad_norm": 5.399030685424805,
|
6664 |
+
"learning_rate": 0.00018643020817261813,
|
6665 |
+
"loss": 1.3059,
|
6666 |
+
"step": 47500
|
6667 |
+
},
|
6668 |
+
{
|
6669 |
+
"epoch": 1.419699638730481,
|
6670 |
+
"grad_norm": 3.9021189212799072,
|
6671 |
+
"learning_rate": 0.00018641592237669932,
|
6672 |
+
"loss": 1.2862,
|
6673 |
+
"step": 47550
|
6674 |
+
},
|
6675 |
+
{
|
6676 |
+
"epoch": 1.4211924879825635,
|
6677 |
+
"grad_norm": 4.103232383728027,
|
6678 |
+
"learning_rate": 0.00018640163658078046,
|
6679 |
+
"loss": 1.3533,
|
6680 |
+
"step": 47600
|
6681 |
+
},
|
6682 |
+
{
|
6683 |
+
"epoch": 1.422685337234646,
|
6684 |
+
"grad_norm": 4.994762897491455,
|
6685 |
+
"learning_rate": 0.00018638735078486165,
|
6686 |
+
"loss": 1.3515,
|
6687 |
+
"step": 47650
|
6688 |
+
},
|
6689 |
+
{
|
6690 |
+
"epoch": 1.4241781864867287,
|
6691 |
+
"grad_norm": 4.363476276397705,
|
6692 |
+
"learning_rate": 0.00018637306498894282,
|
6693 |
+
"loss": 1.3524,
|
6694 |
+
"step": 47700
|
6695 |
+
},
|
6696 |
+
{
|
6697 |
+
"epoch": 1.4256710357388112,
|
6698 |
+
"grad_norm": 4.2915873527526855,
|
6699 |
+
"learning_rate": 0.00018635877919302398,
|
6700 |
+
"loss": 1.3245,
|
6701 |
+
"step": 47750
|
6702 |
+
},
|
6703 |
+
{
|
6704 |
+
"epoch": 1.4271638849908936,
|
6705 |
+
"grad_norm": 4.654321670532227,
|
6706 |
+
"learning_rate": 0.00018634449339710514,
|
6707 |
+
"loss": 1.369,
|
6708 |
+
"step": 47800
|
6709 |
+
},
|
6710 |
+
{
|
6711 |
+
"epoch": 1.4286567342429761,
|
6712 |
+
"grad_norm": 4.786460876464844,
|
6713 |
+
"learning_rate": 0.00018633020760118628,
|
6714 |
+
"loss": 1.3485,
|
6715 |
+
"step": 47850
|
6716 |
+
},
|
6717 |
+
{
|
6718 |
+
"epoch": 1.4301495834950586,
|
6719 |
+
"grad_norm": 4.434101104736328,
|
6720 |
+
"learning_rate": 0.00018631592180526747,
|
6721 |
+
"loss": 1.3131,
|
6722 |
+
"step": 47900
|
6723 |
+
},
|
6724 |
+
{
|
6725 |
+
"epoch": 1.4316424327471413,
|
6726 |
+
"grad_norm": 4.622028827667236,
|
6727 |
+
"learning_rate": 0.0001863016360093486,
|
6728 |
+
"loss": 1.3383,
|
6729 |
+
"step": 47950
|
6730 |
+
},
|
6731 |
+
{
|
6732 |
+
"epoch": 1.4331352819992238,
|
6733 |
+
"grad_norm": 5.579216480255127,
|
6734 |
+
"learning_rate": 0.0001862873502134298,
|
6735 |
+
"loss": 1.3214,
|
6736 |
+
"step": 48000
|
6737 |
+
},
|
6738 |
+
{
|
6739 |
+
"epoch": 1.4346281312513063,
|
6740 |
+
"grad_norm": 5.444978713989258,
|
6741 |
+
"learning_rate": 0.00018627306441751097,
|
6742 |
+
"loss": 1.3243,
|
6743 |
+
"step": 48050
|
6744 |
+
},
|
6745 |
+
{
|
6746 |
+
"epoch": 1.4361209805033888,
|
6747 |
+
"grad_norm": 4.104014873504639,
|
6748 |
+
"learning_rate": 0.00018625877862159213,
|
6749 |
+
"loss": 1.241,
|
6750 |
+
"step": 48100
|
6751 |
+
},
|
6752 |
+
{
|
6753 |
+
"epoch": 1.4376138297554713,
|
6754 |
+
"grad_norm": 4.517495632171631,
|
6755 |
+
"learning_rate": 0.0001862444928256733,
|
6756 |
+
"loss": 1.4197,
|
6757 |
+
"step": 48150
|
6758 |
+
},
|
6759 |
+
{
|
6760 |
+
"epoch": 1.4391066790075537,
|
6761 |
+
"grad_norm": 5.38925838470459,
|
6762 |
+
"learning_rate": 0.00018623020702975446,
|
6763 |
+
"loss": 1.3125,
|
6764 |
+
"step": 48200
|
6765 |
+
},
|
6766 |
+
{
|
6767 |
+
"epoch": 1.4405995282596362,
|
6768 |
+
"grad_norm": 6.211074352264404,
|
6769 |
+
"learning_rate": 0.00018621592123383563,
|
6770 |
+
"loss": 1.2659,
|
6771 |
+
"step": 48250
|
6772 |
+
},
|
6773 |
+
{
|
6774 |
+
"epoch": 1.442092377511719,
|
6775 |
+
"grad_norm": 5.461385726928711,
|
6776 |
+
"learning_rate": 0.0001862016354379168,
|
6777 |
+
"loss": 1.3043,
|
6778 |
+
"step": 48300
|
6779 |
+
},
|
6780 |
+
{
|
6781 |
+
"epoch": 1.4435852267638014,
|
6782 |
+
"grad_norm": 8.296045303344727,
|
6783 |
+
"learning_rate": 0.00018618734964199796,
|
6784 |
+
"loss": 1.3422,
|
6785 |
+
"step": 48350
|
6786 |
+
},
|
6787 |
+
{
|
6788 |
+
"epoch": 1.445078076015884,
|
6789 |
+
"grad_norm": 4.640946388244629,
|
6790 |
+
"learning_rate": 0.00018617306384607912,
|
6791 |
+
"loss": 1.3317,
|
6792 |
+
"step": 48400
|
6793 |
+
},
|
6794 |
+
{
|
6795 |
+
"epoch": 1.4465709252679664,
|
6796 |
+
"grad_norm": 4.698127269744873,
|
6797 |
+
"learning_rate": 0.00018615877805016029,
|
6798 |
+
"loss": 1.3241,
|
6799 |
+
"step": 48450
|
6800 |
+
},
|
6801 |
+
{
|
6802 |
+
"epoch": 1.4480637745200489,
|
6803 |
+
"grad_norm": 4.810885429382324,
|
6804 |
+
"learning_rate": 0.00018614449225424148,
|
6805 |
+
"loss": 1.3204,
|
6806 |
+
"step": 48500
|
6807 |
+
},
|
6808 |
+
{
|
6809 |
+
"epoch": 1.4495566237721316,
|
6810 |
+
"grad_norm": 3.57252836227417,
|
6811 |
+
"learning_rate": 0.00018613020645832261,
|
6812 |
+
"loss": 1.3412,
|
6813 |
+
"step": 48550
|
6814 |
+
},
|
6815 |
+
{
|
6816 |
+
"epoch": 1.451049473024214,
|
6817 |
+
"grad_norm": 4.79307222366333,
|
6818 |
+
"learning_rate": 0.0001861159206624038,
|
6819 |
+
"loss": 1.325,
|
6820 |
+
"step": 48600
|
6821 |
+
},
|
6822 |
+
{
|
6823 |
+
"epoch": 1.4525423222762965,
|
6824 |
+
"grad_norm": 5.495840072631836,
|
6825 |
+
"learning_rate": 0.00018610163486648494,
|
6826 |
+
"loss": 1.3191,
|
6827 |
+
"step": 48650
|
6828 |
+
},
|
6829 |
+
{
|
6830 |
+
"epoch": 1.454035171528379,
|
6831 |
+
"grad_norm": 4.352155685424805,
|
6832 |
+
"learning_rate": 0.00018608734907056614,
|
6833 |
+
"loss": 1.3129,
|
6834 |
+
"step": 48700
|
6835 |
+
},
|
6836 |
+
{
|
6837 |
+
"epoch": 1.4555280207804615,
|
6838 |
+
"grad_norm": 4.606874942779541,
|
6839 |
+
"learning_rate": 0.00018607306327464727,
|
6840 |
+
"loss": 1.3212,
|
6841 |
+
"step": 48750
|
6842 |
+
},
|
6843 |
+
{
|
6844 |
+
"epoch": 1.4570208700325442,
|
6845 |
+
"grad_norm": 5.2679219245910645,
|
6846 |
+
"learning_rate": 0.00018605877747872846,
|
6847 |
+
"loss": 1.3531,
|
6848 |
+
"step": 48800
|
6849 |
+
},
|
6850 |
+
{
|
6851 |
+
"epoch": 1.4585137192846267,
|
6852 |
+
"grad_norm": 4.821314811706543,
|
6853 |
+
"learning_rate": 0.00018604449168280963,
|
6854 |
+
"loss": 1.3195,
|
6855 |
+
"step": 48850
|
6856 |
+
},
|
6857 |
+
{
|
6858 |
+
"epoch": 1.4600065685367092,
|
6859 |
+
"grad_norm": 4.137834072113037,
|
6860 |
+
"learning_rate": 0.0001860302058868908,
|
6861 |
+
"loss": 1.2751,
|
6862 |
+
"step": 48900
|
6863 |
+
},
|
6864 |
+
{
|
6865 |
+
"epoch": 1.4614994177887917,
|
6866 |
+
"grad_norm": 6.577256202697754,
|
6867 |
+
"learning_rate": 0.00018601592009097196,
|
6868 |
+
"loss": 1.3971,
|
6869 |
+
"step": 48950
|
6870 |
+
},
|
6871 |
+
{
|
6872 |
+
"epoch": 1.4629922670408742,
|
6873 |
+
"grad_norm": 4.749688148498535,
|
6874 |
+
"learning_rate": 0.00018600163429505312,
|
6875 |
+
"loss": 1.3557,
|
6876 |
+
"step": 49000
|
6877 |
+
},
|
6878 |
+
{
|
6879 |
+
"epoch": 1.4644851162929569,
|
6880 |
+
"grad_norm": 5.410492897033691,
|
6881 |
+
"learning_rate": 0.0001859873484991343,
|
6882 |
+
"loss": 1.2467,
|
6883 |
+
"step": 49050
|
6884 |
+
},
|
6885 |
+
{
|
6886 |
+
"epoch": 1.4659779655450393,
|
6887 |
+
"grad_norm": 4.303316593170166,
|
6888 |
+
"learning_rate": 0.00018597306270321545,
|
6889 |
+
"loss": 1.3166,
|
6890 |
+
"step": 49100
|
6891 |
+
},
|
6892 |
+
{
|
6893 |
+
"epoch": 1.4674708147971218,
|
6894 |
+
"grad_norm": 5.683919906616211,
|
6895 |
+
"learning_rate": 0.00018595877690729662,
|
6896 |
+
"loss": 1.2932,
|
6897 |
+
"step": 49150
|
6898 |
+
},
|
6899 |
+
{
|
6900 |
+
"epoch": 1.4689636640492043,
|
6901 |
+
"grad_norm": 5.251864433288574,
|
6902 |
+
"learning_rate": 0.00018594449111137778,
|
6903 |
+
"loss": 1.3179,
|
6904 |
+
"step": 49200
|
6905 |
+
},
|
6906 |
+
{
|
6907 |
+
"epoch": 1.4704565133012868,
|
6908 |
+
"grad_norm": 4.565097808837891,
|
6909 |
+
"learning_rate": 0.00018593020531545895,
|
6910 |
+
"loss": 1.327,
|
6911 |
+
"step": 49250
|
6912 |
+
},
|
6913 |
+
{
|
6914 |
+
"epoch": 1.4719493625533693,
|
6915 |
+
"grad_norm": 5.532310485839844,
|
6916 |
+
"learning_rate": 0.00018591591951954014,
|
6917 |
+
"loss": 1.3868,
|
6918 |
+
"step": 49300
|
6919 |
+
},
|
6920 |
+
{
|
6921 |
+
"epoch": 1.4734422118054518,
|
6922 |
+
"grad_norm": 4.5699782371521,
|
6923 |
+
"learning_rate": 0.00018590163372362128,
|
6924 |
+
"loss": 1.2583,
|
6925 |
+
"step": 49350
|
6926 |
+
},
|
6927 |
+
{
|
6928 |
+
"epoch": 1.4749350610575345,
|
6929 |
+
"grad_norm": 4.792230606079102,
|
6930 |
+
"learning_rate": 0.00018588734792770247,
|
6931 |
+
"loss": 1.3805,
|
6932 |
+
"step": 49400
|
6933 |
+
},
|
6934 |
+
{
|
6935 |
+
"epoch": 1.476427910309617,
|
6936 |
+
"grad_norm": 4.52764368057251,
|
6937 |
+
"learning_rate": 0.0001858730621317836,
|
6938 |
+
"loss": 1.3465,
|
6939 |
+
"step": 49450
|
6940 |
+
},
|
6941 |
+
{
|
6942 |
+
"epoch": 1.4779207595616994,
|
6943 |
+
"grad_norm": 5.21061372756958,
|
6944 |
+
"learning_rate": 0.0001858587763358648,
|
6945 |
+
"loss": 1.3778,
|
6946 |
+
"step": 49500
|
6947 |
+
},
|
6948 |
+
{
|
6949 |
+
"epoch": 1.479413608813782,
|
6950 |
+
"grad_norm": 4.855043888092041,
|
6951 |
+
"learning_rate": 0.00018584449053994593,
|
6952 |
+
"loss": 1.3313,
|
6953 |
+
"step": 49550
|
6954 |
+
},
|
6955 |
+
{
|
6956 |
+
"epoch": 1.4809064580658644,
|
6957 |
+
"grad_norm": 4.446802616119385,
|
6958 |
+
"learning_rate": 0.00018583020474402713,
|
6959 |
+
"loss": 1.3224,
|
6960 |
+
"step": 49600
|
6961 |
+
},
|
6962 |
+
{
|
6963 |
+
"epoch": 1.4823993073179471,
|
6964 |
+
"grad_norm": 5.054227828979492,
|
6965 |
+
"learning_rate": 0.0001858159189481083,
|
6966 |
+
"loss": 1.3782,
|
6967 |
+
"step": 49650
|
6968 |
+
},
|
6969 |
+
{
|
6970 |
+
"epoch": 1.4838921565700296,
|
6971 |
+
"grad_norm": 6.877427101135254,
|
6972 |
+
"learning_rate": 0.00018580163315218946,
|
6973 |
+
"loss": 1.3399,
|
6974 |
+
"step": 49700
|
6975 |
+
},
|
6976 |
+
{
|
6977 |
+
"epoch": 1.485385005822112,
|
6978 |
+
"grad_norm": 6.227908611297607,
|
6979 |
+
"learning_rate": 0.00018578734735627062,
|
6980 |
+
"loss": 1.2996,
|
6981 |
+
"step": 49750
|
6982 |
+
},
|
6983 |
+
{
|
6984 |
+
"epoch": 1.4868778550741946,
|
6985 |
+
"grad_norm": 6.2445759773254395,
|
6986 |
+
"learning_rate": 0.00018577306156035179,
|
6987 |
+
"loss": 1.3005,
|
6988 |
+
"step": 49800
|
6989 |
+
},
|
6990 |
+
{
|
6991 |
+
"epoch": 1.488370704326277,
|
6992 |
+
"grad_norm": 6.488719463348389,
|
6993 |
+
"learning_rate": 0.00018575877576443295,
|
6994 |
+
"loss": 1.3755,
|
6995 |
+
"step": 49850
|
6996 |
+
},
|
6997 |
+
{
|
6998 |
+
"epoch": 1.4898635535783598,
|
6999 |
+
"grad_norm": 5.3022589683532715,
|
7000 |
+
"learning_rate": 0.00018574448996851411,
|
7001 |
+
"loss": 1.3402,
|
7002 |
+
"step": 49900
|
7003 |
+
},
|
7004 |
+
{
|
7005 |
+
"epoch": 1.4913564028304422,
|
7006 |
+
"grad_norm": 4.986827373504639,
|
7007 |
+
"learning_rate": 0.00018573020417259528,
|
7008 |
+
"loss": 1.3695,
|
7009 |
+
"step": 49950
|
7010 |
+
},
|
7011 |
+
{
|
7012 |
+
"epoch": 1.4928492520825247,
|
7013 |
+
"grad_norm": 5.669780731201172,
|
7014 |
+
"learning_rate": 0.00018571591837667644,
|
7015 |
+
"loss": 1.314,
|
7016 |
+
"step": 50000
|
7017 |
}
|
7018 |
],
|
7019 |
"logging_steps": 50,
|
|
|
7033 |
"attributes": {}
|
7034 |
}
|
7035 |
},
|
7036 |
+
"total_flos": 1.263739877366694e+18,
|
7037 |
"train_batch_size": 2,
|
7038 |
"trial_name": null,
|
7039 |
"trial_params": null
|