Training in progress, step 1546, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 327040
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d3d3c88a2f6fc76c63b5b413b8836a28d23690119332de6c0d73337282895274
|
3 |
size 327040
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 739578
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e3f357ecaae3199fbef6ac291a2514abc5cc0b5efe1cf45b48581741cd310bfc
|
3 |
size 739578
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8fd3973c26fcbf8a09cd499f584c932fdd87a70c5f82b3348f32cf79ea8c4698
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9fc4f1df5bdd12aa905296360f9b610ca03e005805ce6a6dee3df4e3e8050225
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 387,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -8166,6 +8166,2701 @@
|
|
8166 |
"eval_samples_per_second": 70.203,
|
8167 |
"eval_steps_per_second": 35.105,
|
8168 |
"step": 1161
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8169 |
}
|
8170 |
],
|
8171 |
"logging_steps": 1,
|
@@ -8180,12 +10875,12 @@
|
|
8180 |
"should_evaluate": false,
|
8181 |
"should_log": false,
|
8182 |
"should_save": true,
|
8183 |
-
"should_training_stop":
|
8184 |
},
|
8185 |
"attributes": {}
|
8186 |
}
|
8187 |
},
|
8188 |
-
"total_flos":
|
8189 |
"train_batch_size": 2,
|
8190 |
"trial_name": null,
|
8191 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.076973823423244,
|
5 |
"eval_steps": 387,
|
6 |
+
"global_step": 1546,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
8166 |
"eval_samples_per_second": 70.203,
|
8167 |
"eval_steps_per_second": 35.105,
|
8168 |
"step": 1161
|
8169 |
+
},
|
8170 |
+
{
|
8171 |
+
"epoch": 0.057854840115012636,
|
8172 |
+
"grad_norm": 0.1332004815340042,
|
8173 |
+
"learning_rate": 2.9289321881345254e-05,
|
8174 |
+
"loss": 11.8158,
|
8175 |
+
"step": 1162
|
8176 |
+
},
|
8177 |
+
{
|
8178 |
+
"epoch": 0.05790462913404449,
|
8179 |
+
"grad_norm": 0.14585599303245544,
|
8180 |
+
"learning_rate": 2.914484478789724e-05,
|
8181 |
+
"loss": 11.8238,
|
8182 |
+
"step": 1163
|
8183 |
+
},
|
8184 |
+
{
|
8185 |
+
"epoch": 0.05795441815307634,
|
8186 |
+
"grad_norm": 0.3500841557979584,
|
8187 |
+
"learning_rate": 2.9000664101558773e-05,
|
8188 |
+
"loss": 11.7387,
|
8189 |
+
"step": 1164
|
8190 |
+
},
|
8191 |
+
{
|
8192 |
+
"epoch": 0.05800420717210819,
|
8193 |
+
"grad_norm": 0.21934057772159576,
|
8194 |
+
"learning_rate": 2.8856780425478347e-05,
|
8195 |
+
"loss": 11.7743,
|
8196 |
+
"step": 1165
|
8197 |
+
},
|
8198 |
+
{
|
8199 |
+
"epoch": 0.058053996191140045,
|
8200 |
+
"grad_norm": 0.12822453677654266,
|
8201 |
+
"learning_rate": 2.8713194361562036e-05,
|
8202 |
+
"loss": 11.8167,
|
8203 |
+
"step": 1166
|
8204 |
+
},
|
8205 |
+
{
|
8206 |
+
"epoch": 0.0581037852101719,
|
8207 |
+
"grad_norm": 0.215314581990242,
|
8208 |
+
"learning_rate": 2.8569906510470757e-05,
|
8209 |
+
"loss": 11.7771,
|
8210 |
+
"step": 1167
|
8211 |
+
},
|
8212 |
+
{
|
8213 |
+
"epoch": 0.058153574229203746,
|
8214 |
+
"grad_norm": 0.1931249499320984,
|
8215 |
+
"learning_rate": 2.8426917471618144e-05,
|
8216 |
+
"loss": 11.8149,
|
8217 |
+
"step": 1168
|
8218 |
+
},
|
8219 |
+
{
|
8220 |
+
"epoch": 0.0582033632482356,
|
8221 |
+
"grad_norm": 0.17059537768363953,
|
8222 |
+
"learning_rate": 2.828422784316762e-05,
|
8223 |
+
"loss": 11.7696,
|
8224 |
+
"step": 1169
|
8225 |
+
},
|
8226 |
+
{
|
8227 |
+
"epoch": 0.058253152267267454,
|
8228 |
+
"grad_norm": 0.18207654356956482,
|
8229 |
+
"learning_rate": 2.814183822203019e-05,
|
8230 |
+
"loss": 11.8353,
|
8231 |
+
"step": 1170
|
8232 |
+
},
|
8233 |
+
{
|
8234 |
+
"epoch": 0.05830294128629931,
|
8235 |
+
"grad_norm": 0.17656633257865906,
|
8236 |
+
"learning_rate": 2.7999749203861836e-05,
|
8237 |
+
"loss": 11.8031,
|
8238 |
+
"step": 1171
|
8239 |
+
},
|
8240 |
+
{
|
8241 |
+
"epoch": 0.05835273030533116,
|
8242 |
+
"grad_norm": 0.19579464197158813,
|
8243 |
+
"learning_rate": 2.7857961383061028e-05,
|
8244 |
+
"loss": 11.7895,
|
8245 |
+
"step": 1172
|
8246 |
+
},
|
8247 |
+
{
|
8248 |
+
"epoch": 0.05840251932436301,
|
8249 |
+
"grad_norm": 0.1476120948791504,
|
8250 |
+
"learning_rate": 2.7716475352766146e-05,
|
8251 |
+
"loss": 11.8159,
|
8252 |
+
"step": 1173
|
8253 |
+
},
|
8254 |
+
{
|
8255 |
+
"epoch": 0.05845230834339486,
|
8256 |
+
"grad_norm": 0.22224178910255432,
|
8257 |
+
"learning_rate": 2.7575291704853323e-05,
|
8258 |
+
"loss": 11.7752,
|
8259 |
+
"step": 1174
|
8260 |
+
},
|
8261 |
+
{
|
8262 |
+
"epoch": 0.05850209736242672,
|
8263 |
+
"grad_norm": 0.18499843776226044,
|
8264 |
+
"learning_rate": 2.7434411029933483e-05,
|
8265 |
+
"loss": 11.7876,
|
8266 |
+
"step": 1175
|
8267 |
+
},
|
8268 |
+
{
|
8269 |
+
"epoch": 0.05855188638145857,
|
8270 |
+
"grad_norm": 0.20463870465755463,
|
8271 |
+
"learning_rate": 2.72938339173503e-05,
|
8272 |
+
"loss": 11.8047,
|
8273 |
+
"step": 1176
|
8274 |
+
},
|
8275 |
+
{
|
8276 |
+
"epoch": 0.058601675400490424,
|
8277 |
+
"grad_norm": 0.27578920125961304,
|
8278 |
+
"learning_rate": 2.7153560955177483e-05,
|
8279 |
+
"loss": 11.763,
|
8280 |
+
"step": 1177
|
8281 |
+
},
|
8282 |
+
{
|
8283 |
+
"epoch": 0.05865146441952227,
|
8284 |
+
"grad_norm": 0.20626184344291687,
|
8285 |
+
"learning_rate": 2.7013592730216465e-05,
|
8286 |
+
"loss": 11.8009,
|
8287 |
+
"step": 1178
|
8288 |
+
},
|
8289 |
+
{
|
8290 |
+
"epoch": 0.058701253438554125,
|
8291 |
+
"grad_norm": 0.1729109138250351,
|
8292 |
+
"learning_rate": 2.6873929827993727e-05,
|
8293 |
+
"loss": 11.7838,
|
8294 |
+
"step": 1179
|
8295 |
+
},
|
8296 |
+
{
|
8297 |
+
"epoch": 0.05875104245758598,
|
8298 |
+
"grad_norm": 0.12373920530080795,
|
8299 |
+
"learning_rate": 2.673457283275873e-05,
|
8300 |
+
"loss": 11.8073,
|
8301 |
+
"step": 1180
|
8302 |
+
},
|
8303 |
+
{
|
8304 |
+
"epoch": 0.05880083147661783,
|
8305 |
+
"grad_norm": 0.14452655613422394,
|
8306 |
+
"learning_rate": 2.6595522327481027e-05,
|
8307 |
+
"loss": 11.7844,
|
8308 |
+
"step": 1181
|
8309 |
+
},
|
8310 |
+
{
|
8311 |
+
"epoch": 0.05885062049564969,
|
8312 |
+
"grad_norm": 0.14236567914485931,
|
8313 |
+
"learning_rate": 2.6456778893848144e-05,
|
8314 |
+
"loss": 11.8261,
|
8315 |
+
"step": 1182
|
8316 |
+
},
|
8317 |
+
{
|
8318 |
+
"epoch": 0.058900409514681534,
|
8319 |
+
"grad_norm": 0.1285596340894699,
|
8320 |
+
"learning_rate": 2.6318343112263012e-05,
|
8321 |
+
"loss": 11.8245,
|
8322 |
+
"step": 1183
|
8323 |
+
},
|
8324 |
+
{
|
8325 |
+
"epoch": 0.05895019853371339,
|
8326 |
+
"grad_norm": 0.16046473383903503,
|
8327 |
+
"learning_rate": 2.6180215561841614e-05,
|
8328 |
+
"loss": 11.7825,
|
8329 |
+
"step": 1184
|
8330 |
+
},
|
8331 |
+
{
|
8332 |
+
"epoch": 0.05899998755274524,
|
8333 |
+
"grad_norm": 0.264010488986969,
|
8334 |
+
"learning_rate": 2.6042396820410375e-05,
|
8335 |
+
"loss": 11.6605,
|
8336 |
+
"step": 1185
|
8337 |
+
},
|
8338 |
+
{
|
8339 |
+
"epoch": 0.059049776571777096,
|
8340 |
+
"grad_norm": 0.17687653005123138,
|
8341 |
+
"learning_rate": 2.5904887464504114e-05,
|
8342 |
+
"loss": 11.7912,
|
8343 |
+
"step": 1186
|
8344 |
+
},
|
8345 |
+
{
|
8346 |
+
"epoch": 0.05909956559080895,
|
8347 |
+
"grad_norm": 0.19682057201862335,
|
8348 |
+
"learning_rate": 2.576768806936316e-05,
|
8349 |
+
"loss": 11.7986,
|
8350 |
+
"step": 1187
|
8351 |
+
},
|
8352 |
+
{
|
8353 |
+
"epoch": 0.0591493546098408,
|
8354 |
+
"grad_norm": 0.1468563675880432,
|
8355 |
+
"learning_rate": 2.5630799208931345e-05,
|
8356 |
+
"loss": 11.7849,
|
8357 |
+
"step": 1188
|
8358 |
+
},
|
8359 |
+
{
|
8360 |
+
"epoch": 0.05919914362887265,
|
8361 |
+
"grad_norm": 0.19433733820915222,
|
8362 |
+
"learning_rate": 2.5494221455853408e-05,
|
8363 |
+
"loss": 11.8063,
|
8364 |
+
"step": 1189
|
8365 |
+
},
|
8366 |
+
{
|
8367 |
+
"epoch": 0.059248932647904505,
|
8368 |
+
"grad_norm": 0.16230177879333496,
|
8369 |
+
"learning_rate": 2.535795538147262e-05,
|
8370 |
+
"loss": 11.8176,
|
8371 |
+
"step": 1190
|
8372 |
+
},
|
8373 |
+
{
|
8374 |
+
"epoch": 0.05929872166693636,
|
8375 |
+
"grad_norm": 0.27983057498931885,
|
8376 |
+
"learning_rate": 2.5222001555828434e-05,
|
8377 |
+
"loss": 11.7862,
|
8378 |
+
"step": 1191
|
8379 |
+
},
|
8380 |
+
{
|
8381 |
+
"epoch": 0.05934851068596821,
|
8382 |
+
"grad_norm": 0.2696499526500702,
|
8383 |
+
"learning_rate": 2.5086360547654087e-05,
|
8384 |
+
"loss": 11.7359,
|
8385 |
+
"step": 1192
|
8386 |
+
},
|
8387 |
+
{
|
8388 |
+
"epoch": 0.05939829970500006,
|
8389 |
+
"grad_norm": 0.1899920254945755,
|
8390 |
+
"learning_rate": 2.495103292437415e-05,
|
8391 |
+
"loss": 11.7893,
|
8392 |
+
"step": 1193
|
8393 |
+
},
|
8394 |
+
{
|
8395 |
+
"epoch": 0.059448088724031914,
|
8396 |
+
"grad_norm": 0.14830581843852997,
|
8397 |
+
"learning_rate": 2.4816019252102273e-05,
|
8398 |
+
"loss": 11.8072,
|
8399 |
+
"step": 1194
|
8400 |
+
},
|
8401 |
+
{
|
8402 |
+
"epoch": 0.05949787774306377,
|
8403 |
+
"grad_norm": 0.1520967334508896,
|
8404 |
+
"learning_rate": 2.468132009563876e-05,
|
8405 |
+
"loss": 11.7788,
|
8406 |
+
"step": 1195
|
8407 |
+
},
|
8408 |
+
{
|
8409 |
+
"epoch": 0.05954766676209562,
|
8410 |
+
"grad_norm": 0.22229152917861938,
|
8411 |
+
"learning_rate": 2.454693601846819e-05,
|
8412 |
+
"loss": 11.7905,
|
8413 |
+
"step": 1196
|
8414 |
+
},
|
8415 |
+
{
|
8416 |
+
"epoch": 0.059597455781127476,
|
8417 |
+
"grad_norm": 0.29063358902931213,
|
8418 |
+
"learning_rate": 2.4412867582757083e-05,
|
8419 |
+
"loss": 11.7853,
|
8420 |
+
"step": 1197
|
8421 |
+
},
|
8422 |
+
{
|
8423 |
+
"epoch": 0.05964724480015932,
|
8424 |
+
"grad_norm": 0.2314983606338501,
|
8425 |
+
"learning_rate": 2.4279115349351543e-05,
|
8426 |
+
"loss": 11.6871,
|
8427 |
+
"step": 1198
|
8428 |
+
},
|
8429 |
+
{
|
8430 |
+
"epoch": 0.05969703381919118,
|
8431 |
+
"grad_norm": 0.24789012968540192,
|
8432 |
+
"learning_rate": 2.4145679877774928e-05,
|
8433 |
+
"loss": 11.8307,
|
8434 |
+
"step": 1199
|
8435 |
+
},
|
8436 |
+
{
|
8437 |
+
"epoch": 0.05974682283822303,
|
8438 |
+
"grad_norm": 0.19050073623657227,
|
8439 |
+
"learning_rate": 2.401256172622538e-05,
|
8440 |
+
"loss": 11.7982,
|
8441 |
+
"step": 1200
|
8442 |
+
},
|
8443 |
+
{
|
8444 |
+
"epoch": 0.059796611857254885,
|
8445 |
+
"grad_norm": 0.2415127456188202,
|
8446 |
+
"learning_rate": 2.3879761451573835e-05,
|
8447 |
+
"loss": 11.7125,
|
8448 |
+
"step": 1201
|
8449 |
+
},
|
8450 |
+
{
|
8451 |
+
"epoch": 0.05984640087628673,
|
8452 |
+
"grad_norm": 0.2305862456560135,
|
8453 |
+
"learning_rate": 2.3747279609361196e-05,
|
8454 |
+
"loss": 11.8058,
|
8455 |
+
"step": 1202
|
8456 |
+
},
|
8457 |
+
{
|
8458 |
+
"epoch": 0.059896189895318586,
|
8459 |
+
"grad_norm": 0.1610925942659378,
|
8460 |
+
"learning_rate": 2.361511675379645e-05,
|
8461 |
+
"loss": 11.8253,
|
8462 |
+
"step": 1203
|
8463 |
+
},
|
8464 |
+
{
|
8465 |
+
"epoch": 0.05994597891435044,
|
8466 |
+
"grad_norm": 0.16269992291927338,
|
8467 |
+
"learning_rate": 2.3483273437754107e-05,
|
8468 |
+
"loss": 11.8268,
|
8469 |
+
"step": 1204
|
8470 |
+
},
|
8471 |
+
{
|
8472 |
+
"epoch": 0.059995767933382294,
|
8473 |
+
"grad_norm": 0.18723253905773163,
|
8474 |
+
"learning_rate": 2.3351750212771995e-05,
|
8475 |
+
"loss": 11.795,
|
8476 |
+
"step": 1205
|
8477 |
+
},
|
8478 |
+
{
|
8479 |
+
"epoch": 0.06004555695241415,
|
8480 |
+
"grad_norm": 0.14840541779994965,
|
8481 |
+
"learning_rate": 2.3220547629048796e-05,
|
8482 |
+
"loss": 11.8227,
|
8483 |
+
"step": 1206
|
8484 |
+
},
|
8485 |
+
{
|
8486 |
+
"epoch": 0.060095345971445994,
|
8487 |
+
"grad_norm": 0.17991192638874054,
|
8488 |
+
"learning_rate": 2.3089666235442054e-05,
|
8489 |
+
"loss": 11.8096,
|
8490 |
+
"step": 1207
|
8491 |
+
},
|
8492 |
+
{
|
8493 |
+
"epoch": 0.06014513499047785,
|
8494 |
+
"grad_norm": 0.16019561886787415,
|
8495 |
+
"learning_rate": 2.2959106579465483e-05,
|
8496 |
+
"loss": 11.7857,
|
8497 |
+
"step": 1208
|
8498 |
+
},
|
8499 |
+
{
|
8500 |
+
"epoch": 0.0601949240095097,
|
8501 |
+
"grad_norm": 0.2253340631723404,
|
8502 |
+
"learning_rate": 2.2828869207287007e-05,
|
8503 |
+
"loss": 11.789,
|
8504 |
+
"step": 1209
|
8505 |
+
},
|
8506 |
+
{
|
8507 |
+
"epoch": 0.060244713028541556,
|
8508 |
+
"grad_norm": 0.17055194079875946,
|
8509 |
+
"learning_rate": 2.26989546637263e-05,
|
8510 |
+
"loss": 11.7304,
|
8511 |
+
"step": 1210
|
8512 |
+
},
|
8513 |
+
{
|
8514 |
+
"epoch": 0.06029450204757341,
|
8515 |
+
"grad_norm": 0.1622680276632309,
|
8516 |
+
"learning_rate": 2.2569363492252594e-05,
|
8517 |
+
"loss": 11.7283,
|
8518 |
+
"step": 1211
|
8519 |
+
},
|
8520 |
+
{
|
8521 |
+
"epoch": 0.06034429106660526,
|
8522 |
+
"grad_norm": 0.19046859443187714,
|
8523 |
+
"learning_rate": 2.2440096234982245e-05,
|
8524 |
+
"loss": 11.8123,
|
8525 |
+
"step": 1212
|
8526 |
+
},
|
8527 |
+
{
|
8528 |
+
"epoch": 0.06039408008563711,
|
8529 |
+
"grad_norm": 0.16025525331497192,
|
8530 |
+
"learning_rate": 2.2311153432676768e-05,
|
8531 |
+
"loss": 11.8145,
|
8532 |
+
"step": 1213
|
8533 |
+
},
|
8534 |
+
{
|
8535 |
+
"epoch": 0.060443869104668965,
|
8536 |
+
"grad_norm": 0.16243071854114532,
|
8537 |
+
"learning_rate": 2.218253562474023e-05,
|
8538 |
+
"loss": 11.7793,
|
8539 |
+
"step": 1214
|
8540 |
+
},
|
8541 |
+
{
|
8542 |
+
"epoch": 0.06049365812370082,
|
8543 |
+
"grad_norm": 0.2079620361328125,
|
8544 |
+
"learning_rate": 2.2054243349217217e-05,
|
8545 |
+
"loss": 11.7495,
|
8546 |
+
"step": 1215
|
8547 |
+
},
|
8548 |
+
{
|
8549 |
+
"epoch": 0.06054344714273267,
|
8550 |
+
"grad_norm": 0.24841146171092987,
|
8551 |
+
"learning_rate": 2.1926277142790552e-05,
|
8552 |
+
"loss": 11.7723,
|
8553 |
+
"step": 1216
|
8554 |
+
},
|
8555 |
+
{
|
8556 |
+
"epoch": 0.06059323616176452,
|
8557 |
+
"grad_norm": 0.2690463662147522,
|
8558 |
+
"learning_rate": 2.1798637540779e-05,
|
8559 |
+
"loss": 11.7952,
|
8560 |
+
"step": 1217
|
8561 |
+
},
|
8562 |
+
{
|
8563 |
+
"epoch": 0.060643025180796374,
|
8564 |
+
"grad_norm": 0.15697398781776428,
|
8565 |
+
"learning_rate": 2.1671325077134963e-05,
|
8566 |
+
"loss": 11.7767,
|
8567 |
+
"step": 1218
|
8568 |
+
},
|
8569 |
+
{
|
8570 |
+
"epoch": 0.06069281419982823,
|
8571 |
+
"grad_norm": 0.12324465811252594,
|
8572 |
+
"learning_rate": 2.15443402844425e-05,
|
8573 |
+
"loss": 11.7935,
|
8574 |
+
"step": 1219
|
8575 |
+
},
|
8576 |
+
{
|
8577 |
+
"epoch": 0.06074260321886008,
|
8578 |
+
"grad_norm": 0.1910986602306366,
|
8579 |
+
"learning_rate": 2.1417683693914747e-05,
|
8580 |
+
"loss": 11.7808,
|
8581 |
+
"step": 1220
|
8582 |
+
},
|
8583 |
+
{
|
8584 |
+
"epoch": 0.060792392237891936,
|
8585 |
+
"grad_norm": 0.1951969563961029,
|
8586 |
+
"learning_rate": 2.1291355835392025e-05,
|
8587 |
+
"loss": 11.7967,
|
8588 |
+
"step": 1221
|
8589 |
+
},
|
8590 |
+
{
|
8591 |
+
"epoch": 0.06084218125692378,
|
8592 |
+
"grad_norm": 0.2584574222564697,
|
8593 |
+
"learning_rate": 2.116535723733938e-05,
|
8594 |
+
"loss": 11.7213,
|
8595 |
+
"step": 1222
|
8596 |
+
},
|
8597 |
+
{
|
8598 |
+
"epoch": 0.06089197027595564,
|
8599 |
+
"grad_norm": 0.19004681706428528,
|
8600 |
+
"learning_rate": 2.103968842684452e-05,
|
8601 |
+
"loss": 11.771,
|
8602 |
+
"step": 1223
|
8603 |
+
},
|
8604 |
+
{
|
8605 |
+
"epoch": 0.06094175929498749,
|
8606 |
+
"grad_norm": 0.18857941031455994,
|
8607 |
+
"learning_rate": 2.091434992961555e-05,
|
8608 |
+
"loss": 11.802,
|
8609 |
+
"step": 1224
|
8610 |
+
},
|
8611 |
+
{
|
8612 |
+
"epoch": 0.060991548314019345,
|
8613 |
+
"grad_norm": 0.17620600759983063,
|
8614 |
+
"learning_rate": 2.0789342269978785e-05,
|
8615 |
+
"loss": 11.7605,
|
8616 |
+
"step": 1225
|
8617 |
+
},
|
8618 |
+
{
|
8619 |
+
"epoch": 0.0610413373330512,
|
8620 |
+
"grad_norm": 0.20876444876194,
|
8621 |
+
"learning_rate": 2.0664665970876496e-05,
|
8622 |
+
"loss": 11.7829,
|
8623 |
+
"step": 1226
|
8624 |
+
},
|
8625 |
+
{
|
8626 |
+
"epoch": 0.061091126352083046,
|
8627 |
+
"grad_norm": 0.13984788954257965,
|
8628 |
+
"learning_rate": 2.054032155386486e-05,
|
8629 |
+
"loss": 11.827,
|
8630 |
+
"step": 1227
|
8631 |
+
},
|
8632 |
+
{
|
8633 |
+
"epoch": 0.0611409153711149,
|
8634 |
+
"grad_norm": 0.15672601759433746,
|
8635 |
+
"learning_rate": 2.0416309539111654e-05,
|
8636 |
+
"loss": 11.8305,
|
8637 |
+
"step": 1228
|
8638 |
+
},
|
8639 |
+
{
|
8640 |
+
"epoch": 0.061190704390146754,
|
8641 |
+
"grad_norm": 0.20603567361831665,
|
8642 |
+
"learning_rate": 2.0292630445394133e-05,
|
8643 |
+
"loss": 11.7932,
|
8644 |
+
"step": 1229
|
8645 |
+
},
|
8646 |
+
{
|
8647 |
+
"epoch": 0.06124049340917861,
|
8648 |
+
"grad_norm": 0.23964861035346985,
|
8649 |
+
"learning_rate": 2.0169284790096853e-05,
|
8650 |
+
"loss": 11.7195,
|
8651 |
+
"step": 1230
|
8652 |
+
},
|
8653 |
+
{
|
8654 |
+
"epoch": 0.06129028242821046,
|
8655 |
+
"grad_norm": 0.19552284479141235,
|
8656 |
+
"learning_rate": 2.0046273089209487e-05,
|
8657 |
+
"loss": 11.8088,
|
8658 |
+
"step": 1231
|
8659 |
+
},
|
8660 |
+
{
|
8661 |
+
"epoch": 0.06134007144724231,
|
8662 |
+
"grad_norm": 0.18251019716262817,
|
8663 |
+
"learning_rate": 1.992359585732474e-05,
|
8664 |
+
"loss": 11.808,
|
8665 |
+
"step": 1232
|
8666 |
+
},
|
8667 |
+
{
|
8668 |
+
"epoch": 0.06138986046627416,
|
8669 |
+
"grad_norm": 0.17195625603199005,
|
8670 |
+
"learning_rate": 1.9801253607635996e-05,
|
8671 |
+
"loss": 11.7913,
|
8672 |
+
"step": 1233
|
8673 |
+
},
|
8674 |
+
{
|
8675 |
+
"epoch": 0.06143964948530602,
|
8676 |
+
"grad_norm": 0.23347894847393036,
|
8677 |
+
"learning_rate": 1.967924685193552e-05,
|
8678 |
+
"loss": 11.7888,
|
8679 |
+
"step": 1234
|
8680 |
+
},
|
8681 |
+
{
|
8682 |
+
"epoch": 0.06148943850433787,
|
8683 |
+
"grad_norm": 0.17683707177639008,
|
8684 |
+
"learning_rate": 1.9557576100611917e-05,
|
8685 |
+
"loss": 11.7651,
|
8686 |
+
"step": 1235
|
8687 |
+
},
|
8688 |
+
{
|
8689 |
+
"epoch": 0.06153922752336972,
|
8690 |
+
"grad_norm": 0.21828392148017883,
|
8691 |
+
"learning_rate": 1.943624186264832e-05,
|
8692 |
+
"loss": 11.7847,
|
8693 |
+
"step": 1236
|
8694 |
+
},
|
8695 |
+
{
|
8696 |
+
"epoch": 0.06158901654240157,
|
8697 |
+
"grad_norm": 0.17595314979553223,
|
8698 |
+
"learning_rate": 1.9315244645620067e-05,
|
8699 |
+
"loss": 11.8248,
|
8700 |
+
"step": 1237
|
8701 |
+
},
|
8702 |
+
{
|
8703 |
+
"epoch": 0.061638805561433425,
|
8704 |
+
"grad_norm": 0.14729666709899902,
|
8705 |
+
"learning_rate": 1.9194584955692706e-05,
|
8706 |
+
"loss": 11.8181,
|
8707 |
+
"step": 1238
|
8708 |
+
},
|
8709 |
+
{
|
8710 |
+
"epoch": 0.06168859458046528,
|
8711 |
+
"grad_norm": 0.18449276685714722,
|
8712 |
+
"learning_rate": 1.9074263297619676e-05,
|
8713 |
+
"loss": 11.7976,
|
8714 |
+
"step": 1239
|
8715 |
+
},
|
8716 |
+
{
|
8717 |
+
"epoch": 0.06173838359949713,
|
8718 |
+
"grad_norm": 0.19626711308956146,
|
8719 |
+
"learning_rate": 1.8954280174740537e-05,
|
8720 |
+
"loss": 11.8252,
|
8721 |
+
"step": 1240
|
8722 |
+
},
|
8723 |
+
{
|
8724 |
+
"epoch": 0.06178817261852898,
|
8725 |
+
"grad_norm": 0.14658327400684357,
|
8726 |
+
"learning_rate": 1.8834636088978476e-05,
|
8727 |
+
"loss": 11.796,
|
8728 |
+
"step": 1241
|
8729 |
+
},
|
8730 |
+
{
|
8731 |
+
"epoch": 0.061837961637560834,
|
8732 |
+
"grad_norm": 0.20259447395801544,
|
8733 |
+
"learning_rate": 1.8715331540838487e-05,
|
8734 |
+
"loss": 11.7835,
|
8735 |
+
"step": 1242
|
8736 |
+
},
|
8737 |
+
{
|
8738 |
+
"epoch": 0.06188775065659269,
|
8739 |
+
"grad_norm": 0.15107446908950806,
|
8740 |
+
"learning_rate": 1.859636702940516e-05,
|
8741 |
+
"loss": 11.8077,
|
8742 |
+
"step": 1243
|
8743 |
+
},
|
8744 |
+
{
|
8745 |
+
"epoch": 0.06193753967562454,
|
8746 |
+
"grad_norm": 0.1753118485212326,
|
8747 |
+
"learning_rate": 1.8477743052340668e-05,
|
8748 |
+
"loss": 11.8062,
|
8749 |
+
"step": 1244
|
8750 |
+
},
|
8751 |
+
{
|
8752 |
+
"epoch": 0.061987328694656396,
|
8753 |
+
"grad_norm": 0.2554684579372406,
|
8754 |
+
"learning_rate": 1.8359460105882476e-05,
|
8755 |
+
"loss": 11.7968,
|
8756 |
+
"step": 1245
|
8757 |
+
},
|
8758 |
+
{
|
8759 |
+
"epoch": 0.06203711771368824,
|
8760 |
+
"grad_norm": 0.16749104857444763,
|
8761 |
+
"learning_rate": 1.824151868484164e-05,
|
8762 |
+
"loss": 11.8242,
|
8763 |
+
"step": 1246
|
8764 |
+
},
|
8765 |
+
{
|
8766 |
+
"epoch": 0.0620869067327201,
|
8767 |
+
"grad_norm": 0.20813743770122528,
|
8768 |
+
"learning_rate": 1.8123919282600342e-05,
|
8769 |
+
"loss": 11.7966,
|
8770 |
+
"step": 1247
|
8771 |
+
},
|
8772 |
+
{
|
8773 |
+
"epoch": 0.06213669575175195,
|
8774 |
+
"grad_norm": 0.3146398365497589,
|
8775 |
+
"learning_rate": 1.8006662391110085e-05,
|
8776 |
+
"loss": 11.7039,
|
8777 |
+
"step": 1248
|
8778 |
+
},
|
8779 |
+
{
|
8780 |
+
"epoch": 0.062186484770783805,
|
8781 |
+
"grad_norm": 0.1499229073524475,
|
8782 |
+
"learning_rate": 1.7889748500889536e-05,
|
8783 |
+
"loss": 11.8197,
|
8784 |
+
"step": 1249
|
8785 |
+
},
|
8786 |
+
{
|
8787 |
+
"epoch": 0.06223627378981566,
|
8788 |
+
"grad_norm": 0.28088995814323425,
|
8789 |
+
"learning_rate": 1.7773178101022514e-05,
|
8790 |
+
"loss": 11.7661,
|
8791 |
+
"step": 1250
|
8792 |
+
},
|
8793 |
+
{
|
8794 |
+
"epoch": 0.062286062808847506,
|
8795 |
+
"grad_norm": 0.19208338856697083,
|
8796 |
+
"learning_rate": 1.765695167915582e-05,
|
8797 |
+
"loss": 11.7225,
|
8798 |
+
"step": 1251
|
8799 |
+
},
|
8800 |
+
{
|
8801 |
+
"epoch": 0.06233585182787936,
|
8802 |
+
"grad_norm": 0.1761149913072586,
|
8803 |
+
"learning_rate": 1.7541069721497493e-05,
|
8804 |
+
"loss": 11.8034,
|
8805 |
+
"step": 1252
|
8806 |
+
},
|
8807 |
+
{
|
8808 |
+
"epoch": 0.062385640846911214,
|
8809 |
+
"grad_norm": 0.16740404069423676,
|
8810 |
+
"learning_rate": 1.742553271281436e-05,
|
8811 |
+
"loss": 11.8247,
|
8812 |
+
"step": 1253
|
8813 |
+
},
|
8814 |
+
{
|
8815 |
+
"epoch": 0.06243542986594307,
|
8816 |
+
"grad_norm": 0.12668420374393463,
|
8817 |
+
"learning_rate": 1.7310341136430385e-05,
|
8818 |
+
"loss": 11.7905,
|
8819 |
+
"step": 1254
|
8820 |
+
},
|
8821 |
+
{
|
8822 |
+
"epoch": 0.06248521888497492,
|
8823 |
+
"grad_norm": 0.23680634796619415,
|
8824 |
+
"learning_rate": 1.719549547422443e-05,
|
8825 |
+
"loss": 11.7957,
|
8826 |
+
"step": 1255
|
8827 |
+
},
|
8828 |
+
{
|
8829 |
+
"epoch": 0.06253500790400678,
|
8830 |
+
"grad_norm": 0.312541663646698,
|
8831 |
+
"learning_rate": 1.7080996206628307e-05,
|
8832 |
+
"loss": 11.7737,
|
8833 |
+
"step": 1256
|
8834 |
+
},
|
8835 |
+
{
|
8836 |
+
"epoch": 0.06258479692303863,
|
8837 |
+
"grad_norm": 0.28565460443496704,
|
8838 |
+
"learning_rate": 1.6966843812624754e-05,
|
8839 |
+
"loss": 11.7457,
|
8840 |
+
"step": 1257
|
8841 |
+
},
|
8842 |
+
{
|
8843 |
+
"epoch": 0.06263458594207047,
|
8844 |
+
"grad_norm": 0.142897829413414,
|
8845 |
+
"learning_rate": 1.6853038769745467e-05,
|
8846 |
+
"loss": 11.8205,
|
8847 |
+
"step": 1258
|
8848 |
+
},
|
8849 |
+
{
|
8850 |
+
"epoch": 0.06268437496110232,
|
8851 |
+
"grad_norm": 0.15803474187850952,
|
8852 |
+
"learning_rate": 1.6739581554069072e-05,
|
8853 |
+
"loss": 11.7954,
|
8854 |
+
"step": 1259
|
8855 |
+
},
|
8856 |
+
{
|
8857 |
+
"epoch": 0.06273416398013418,
|
8858 |
+
"grad_norm": 0.22464770078659058,
|
8859 |
+
"learning_rate": 1.6626472640219083e-05,
|
8860 |
+
"loss": 11.7389,
|
8861 |
+
"step": 1260
|
8862 |
+
},
|
8863 |
+
{
|
8864 |
+
"epoch": 0.06278395299916603,
|
8865 |
+
"grad_norm": 0.18673096597194672,
|
8866 |
+
"learning_rate": 1.6513712501362e-05,
|
8867 |
+
"loss": 11.8045,
|
8868 |
+
"step": 1261
|
8869 |
+
},
|
8870 |
+
{
|
8871 |
+
"epoch": 0.06283374201819789,
|
8872 |
+
"grad_norm": 0.1820594072341919,
|
8873 |
+
"learning_rate": 1.6401301609205335e-05,
|
8874 |
+
"loss": 11.8111,
|
8875 |
+
"step": 1262
|
8876 |
+
},
|
8877 |
+
{
|
8878 |
+
"epoch": 0.06288353103722974,
|
8879 |
+
"grad_norm": 0.15530428290367126,
|
8880 |
+
"learning_rate": 1.6289240433995555e-05,
|
8881 |
+
"loss": 11.7924,
|
8882 |
+
"step": 1263
|
8883 |
+
},
|
8884 |
+
{
|
8885 |
+
"epoch": 0.0629333200562616,
|
8886 |
+
"grad_norm": 0.18411067128181458,
|
8887 |
+
"learning_rate": 1.6177529444516194e-05,
|
8888 |
+
"loss": 11.8046,
|
8889 |
+
"step": 1264
|
8890 |
+
},
|
8891 |
+
{
|
8892 |
+
"epoch": 0.06298310907529345,
|
8893 |
+
"grad_norm": 0.2087487280368805,
|
8894 |
+
"learning_rate": 1.606616910808586e-05,
|
8895 |
+
"loss": 11.7875,
|
8896 |
+
"step": 1265
|
8897 |
+
},
|
8898 |
+
{
|
8899 |
+
"epoch": 0.0630328980943253,
|
8900 |
+
"grad_norm": 0.15608364343643188,
|
8901 |
+
"learning_rate": 1.595515989055618e-05,
|
8902 |
+
"loss": 11.7725,
|
8903 |
+
"step": 1266
|
8904 |
+
},
|
8905 |
+
{
|
8906 |
+
"epoch": 0.06308268711335716,
|
8907 |
+
"grad_norm": 0.16233456134796143,
|
8908 |
+
"learning_rate": 1.5844502256310167e-05,
|
8909 |
+
"loss": 11.8137,
|
8910 |
+
"step": 1267
|
8911 |
+
},
|
8912 |
+
{
|
8913 |
+
"epoch": 0.063132476132389,
|
8914 |
+
"grad_norm": 0.23211054503917694,
|
8915 |
+
"learning_rate": 1.573419666825984e-05,
|
8916 |
+
"loss": 11.7599,
|
8917 |
+
"step": 1268
|
8918 |
+
},
|
8919 |
+
{
|
8920 |
+
"epoch": 0.06318226515142085,
|
8921 |
+
"grad_norm": 0.2220914661884308,
|
8922 |
+
"learning_rate": 1.5624243587844622e-05,
|
8923 |
+
"loss": 11.7407,
|
8924 |
+
"step": 1269
|
8925 |
+
},
|
8926 |
+
{
|
8927 |
+
"epoch": 0.0632320541704527,
|
8928 |
+
"grad_norm": 0.1719830334186554,
|
8929 |
+
"learning_rate": 1.551464347502929e-05,
|
8930 |
+
"loss": 11.8137,
|
8931 |
+
"step": 1270
|
8932 |
+
},
|
8933 |
+
{
|
8934 |
+
"epoch": 0.06328184318948456,
|
8935 |
+
"grad_norm": 0.23962591588497162,
|
8936 |
+
"learning_rate": 1.5405396788302064e-05,
|
8937 |
+
"loss": 11.8074,
|
8938 |
+
"step": 1271
|
8939 |
+
},
|
8940 |
+
{
|
8941 |
+
"epoch": 0.06333163220851641,
|
8942 |
+
"grad_norm": 0.20624442398548126,
|
8943 |
+
"learning_rate": 1.5296503984672582e-05,
|
8944 |
+
"loss": 11.7508,
|
8945 |
+
"step": 1272
|
8946 |
+
},
|
8947 |
+
{
|
8948 |
+
"epoch": 0.06338142122754827,
|
8949 |
+
"grad_norm": 0.20032471418380737,
|
8950 |
+
"learning_rate": 1.5187965519670289e-05,
|
8951 |
+
"loss": 11.8079,
|
8952 |
+
"step": 1273
|
8953 |
+
},
|
8954 |
+
{
|
8955 |
+
"epoch": 0.06343121024658012,
|
8956 |
+
"grad_norm": 0.24550995230674744,
|
8957 |
+
"learning_rate": 1.5079781847342123e-05,
|
8958 |
+
"loss": 11.7745,
|
8959 |
+
"step": 1274
|
8960 |
+
},
|
8961 |
+
{
|
8962 |
+
"epoch": 0.06348099926561197,
|
8963 |
+
"grad_norm": 0.25876301527023315,
|
8964 |
+
"learning_rate": 1.497195342025094e-05,
|
8965 |
+
"loss": 11.7274,
|
8966 |
+
"step": 1275
|
8967 |
+
},
|
8968 |
+
{
|
8969 |
+
"epoch": 0.06353078828464383,
|
8970 |
+
"grad_norm": 0.2925122082233429,
|
8971 |
+
"learning_rate": 1.486448068947348e-05,
|
8972 |
+
"loss": 11.7551,
|
8973 |
+
"step": 1276
|
8974 |
+
},
|
8975 |
+
{
|
8976 |
+
"epoch": 0.06358057730367568,
|
8977 |
+
"grad_norm": 0.15747866034507751,
|
8978 |
+
"learning_rate": 1.4757364104598547e-05,
|
8979 |
+
"loss": 11.8141,
|
8980 |
+
"step": 1277
|
8981 |
+
},
|
8982 |
+
{
|
8983 |
+
"epoch": 0.06363036632270752,
|
8984 |
+
"grad_norm": 0.26077190041542053,
|
8985 |
+
"learning_rate": 1.4650604113724953e-05,
|
8986 |
+
"loss": 11.6712,
|
8987 |
+
"step": 1278
|
8988 |
+
},
|
8989 |
+
{
|
8990 |
+
"epoch": 0.06368015534173938,
|
8991 |
+
"grad_norm": 0.175714373588562,
|
8992 |
+
"learning_rate": 1.4544201163459959e-05,
|
8993 |
+
"loss": 11.7996,
|
8994 |
+
"step": 1279
|
8995 |
+
},
|
8996 |
+
{
|
8997 |
+
"epoch": 0.06372994436077123,
|
8998 |
+
"grad_norm": 0.17583681643009186,
|
8999 |
+
"learning_rate": 1.4438155698917077e-05,
|
9000 |
+
"loss": 11.7977,
|
9001 |
+
"step": 1280
|
9002 |
+
},
|
9003 |
+
{
|
9004 |
+
"epoch": 0.06377973337980308,
|
9005 |
+
"grad_norm": 0.17645223438739777,
|
9006 |
+
"learning_rate": 1.433246816371443e-05,
|
9007 |
+
"loss": 11.7973,
|
9008 |
+
"step": 1281
|
9009 |
+
},
|
9010 |
+
{
|
9011 |
+
"epoch": 0.06382952239883494,
|
9012 |
+
"grad_norm": 0.225063756108284,
|
9013 |
+
"learning_rate": 1.42271389999728e-05,
|
9014 |
+
"loss": 11.7713,
|
9015 |
+
"step": 1282
|
9016 |
+
},
|
9017 |
+
{
|
9018 |
+
"epoch": 0.06387931141786679,
|
9019 |
+
"grad_norm": 0.231404110789299,
|
9020 |
+
"learning_rate": 1.4122168648313816e-05,
|
9021 |
+
"loss": 11.7736,
|
9022 |
+
"step": 1283
|
9023 |
+
},
|
9024 |
+
{
|
9025 |
+
"epoch": 0.06392910043689864,
|
9026 |
+
"grad_norm": 0.2934523820877075,
|
9027 |
+
"learning_rate": 1.401755754785804e-05,
|
9028 |
+
"loss": 11.7407,
|
9029 |
+
"step": 1284
|
9030 |
+
},
|
9031 |
+
{
|
9032 |
+
"epoch": 0.0639788894559305,
|
9033 |
+
"grad_norm": 0.2312464565038681,
|
9034 |
+
"learning_rate": 1.3913306136223292e-05,
|
9035 |
+
"loss": 11.7608,
|
9036 |
+
"step": 1285
|
9037 |
+
},
|
9038 |
+
{
|
9039 |
+
"epoch": 0.06402867847496235,
|
9040 |
+
"grad_norm": 0.1840086430311203,
|
9041 |
+
"learning_rate": 1.3809414849522584e-05,
|
9042 |
+
"loss": 11.7666,
|
9043 |
+
"step": 1286
|
9044 |
+
},
|
9045 |
+
{
|
9046 |
+
"epoch": 0.06407846749399419,
|
9047 |
+
"grad_norm": 0.18141189217567444,
|
9048 |
+
"learning_rate": 1.3705884122362512e-05,
|
9049 |
+
"loss": 11.8211,
|
9050 |
+
"step": 1287
|
9051 |
+
},
|
9052 |
+
{
|
9053 |
+
"epoch": 0.06412825651302605,
|
9054 |
+
"grad_norm": 0.20563624799251556,
|
9055 |
+
"learning_rate": 1.360271438784133e-05,
|
9056 |
+
"loss": 11.7455,
|
9057 |
+
"step": 1288
|
9058 |
+
},
|
9059 |
+
{
|
9060 |
+
"epoch": 0.0641780455320579,
|
9061 |
+
"grad_norm": 0.2131551057100296,
|
9062 |
+
"learning_rate": 1.3499906077547143e-05,
|
9063 |
+
"loss": 11.7986,
|
9064 |
+
"step": 1289
|
9065 |
+
},
|
9066 |
+
{
|
9067 |
+
"epoch": 0.06422783455108975,
|
9068 |
+
"grad_norm": 0.15692314505577087,
|
9069 |
+
"learning_rate": 1.339745962155613e-05,
|
9070 |
+
"loss": 11.8078,
|
9071 |
+
"step": 1290
|
9072 |
+
},
|
9073 |
+
{
|
9074 |
+
"epoch": 0.06427762357012161,
|
9075 |
+
"grad_norm": 0.36720526218414307,
|
9076 |
+
"learning_rate": 1.3295375448430724e-05,
|
9077 |
+
"loss": 11.7193,
|
9078 |
+
"step": 1291
|
9079 |
+
},
|
9080 |
+
{
|
9081 |
+
"epoch": 0.06432741258915346,
|
9082 |
+
"grad_norm": 0.15019673109054565,
|
9083 |
+
"learning_rate": 1.3193653985217858e-05,
|
9084 |
+
"loss": 11.8034,
|
9085 |
+
"step": 1292
|
9086 |
+
},
|
9087 |
+
{
|
9088 |
+
"epoch": 0.06437720160818532,
|
9089 |
+
"grad_norm": 0.18476565182209015,
|
9090 |
+
"learning_rate": 1.3092295657447073e-05,
|
9091 |
+
"loss": 11.7708,
|
9092 |
+
"step": 1293
|
9093 |
+
},
|
9094 |
+
{
|
9095 |
+
"epoch": 0.06442699062721717,
|
9096 |
+
"grad_norm": 0.17680063843727112,
|
9097 |
+
"learning_rate": 1.2991300889128866e-05,
|
9098 |
+
"loss": 11.7683,
|
9099 |
+
"step": 1294
|
9100 |
+
},
|
9101 |
+
{
|
9102 |
+
"epoch": 0.06447677964624902,
|
9103 |
+
"grad_norm": 0.2056722640991211,
|
9104 |
+
"learning_rate": 1.2890670102752867e-05,
|
9105 |
+
"loss": 11.7957,
|
9106 |
+
"step": 1295
|
9107 |
+
},
|
9108 |
+
{
|
9109 |
+
"epoch": 0.06452656866528088,
|
9110 |
+
"grad_norm": 0.1903773546218872,
|
9111 |
+
"learning_rate": 1.2790403719286049e-05,
|
9112 |
+
"loss": 11.8159,
|
9113 |
+
"step": 1296
|
9114 |
+
},
|
9115 |
+
{
|
9116 |
+
"epoch": 0.06457635768431272,
|
9117 |
+
"grad_norm": 0.2007601261138916,
|
9118 |
+
"learning_rate": 1.269050215817099e-05,
|
9119 |
+
"loss": 11.8124,
|
9120 |
+
"step": 1297
|
9121 |
+
},
|
9122 |
+
{
|
9123 |
+
"epoch": 0.06462614670334457,
|
9124 |
+
"grad_norm": 0.2411353588104248,
|
9125 |
+
"learning_rate": 1.2590965837324131e-05,
|
9126 |
+
"loss": 11.7436,
|
9127 |
+
"step": 1298
|
9128 |
+
},
|
9129 |
+
{
|
9130 |
+
"epoch": 0.06467593572237643,
|
9131 |
+
"grad_norm": 0.18333886563777924,
|
9132 |
+
"learning_rate": 1.2491795173133935e-05,
|
9133 |
+
"loss": 11.8131,
|
9134 |
+
"step": 1299
|
9135 |
+
},
|
9136 |
+
{
|
9137 |
+
"epoch": 0.06472572474140828,
|
9138 |
+
"grad_norm": 0.19457672536373138,
|
9139 |
+
"learning_rate": 1.2392990580459352e-05,
|
9140 |
+
"loss": 11.7076,
|
9141 |
+
"step": 1300
|
9142 |
+
},
|
9143 |
+
{
|
9144 |
+
"epoch": 0.06477551376044013,
|
9145 |
+
"grad_norm": 0.1295676976442337,
|
9146 |
+
"learning_rate": 1.2294552472627808e-05,
|
9147 |
+
"loss": 11.8213,
|
9148 |
+
"step": 1301
|
9149 |
+
},
|
9150 |
+
{
|
9151 |
+
"epoch": 0.06482530277947199,
|
9152 |
+
"grad_norm": 0.1292513757944107,
|
9153 |
+
"learning_rate": 1.2196481261433735e-05,
|
9154 |
+
"loss": 11.8335,
|
9155 |
+
"step": 1302
|
9156 |
+
},
|
9157 |
+
{
|
9158 |
+
"epoch": 0.06487509179850384,
|
9159 |
+
"grad_norm": 0.1650247722864151,
|
9160 |
+
"learning_rate": 1.2098777357136648e-05,
|
9161 |
+
"loss": 11.7762,
|
9162 |
+
"step": 1303
|
9163 |
+
},
|
9164 |
+
{
|
9165 |
+
"epoch": 0.0649248808175357,
|
9166 |
+
"grad_norm": 0.12315841764211655,
|
9167 |
+
"learning_rate": 1.2001441168459604e-05,
|
9168 |
+
"loss": 11.8228,
|
9169 |
+
"step": 1304
|
9170 |
+
},
|
9171 |
+
{
|
9172 |
+
"epoch": 0.06497466983656755,
|
9173 |
+
"grad_norm": 0.17617648839950562,
|
9174 |
+
"learning_rate": 1.1904473102587266e-05,
|
9175 |
+
"loss": 11.7885,
|
9176 |
+
"step": 1305
|
9177 |
+
},
|
9178 |
+
{
|
9179 |
+
"epoch": 0.0650244588555994,
|
9180 |
+
"grad_norm": 0.21746443212032318,
|
9181 |
+
"learning_rate": 1.1807873565164506e-05,
|
9182 |
+
"loss": 11.7753,
|
9183 |
+
"step": 1306
|
9184 |
+
},
|
9185 |
+
{
|
9186 |
+
"epoch": 0.06507424787463124,
|
9187 |
+
"grad_norm": 0.21231286227703094,
|
9188 |
+
"learning_rate": 1.171164296029439e-05,
|
9189 |
+
"loss": 11.7982,
|
9190 |
+
"step": 1307
|
9191 |
+
},
|
9192 |
+
{
|
9193 |
+
"epoch": 0.0651240368936631,
|
9194 |
+
"grad_norm": 0.19948884844779968,
|
9195 |
+
"learning_rate": 1.161578169053672e-05,
|
9196 |
+
"loss": 11.7973,
|
9197 |
+
"step": 1308
|
9198 |
+
},
|
9199 |
+
{
|
9200 |
+
"epoch": 0.06517382591269495,
|
9201 |
+
"grad_norm": 0.17060606181621552,
|
9202 |
+
"learning_rate": 1.1520290156906221e-05,
|
9203 |
+
"loss": 11.7739,
|
9204 |
+
"step": 1309
|
9205 |
+
},
|
9206 |
+
{
|
9207 |
+
"epoch": 0.0652236149317268,
|
9208 |
+
"grad_norm": 0.16603590548038483,
|
9209 |
+
"learning_rate": 1.1425168758870964e-05,
|
9210 |
+
"loss": 11.7717,
|
9211 |
+
"step": 1310
|
9212 |
+
},
|
9213 |
+
{
|
9214 |
+
"epoch": 0.06527340395075866,
|
9215 |
+
"grad_norm": 0.2605130970478058,
|
9216 |
+
"learning_rate": 1.1330417894350531e-05,
|
9217 |
+
"loss": 11.7492,
|
9218 |
+
"step": 1311
|
9219 |
+
},
|
9220 |
+
{
|
9221 |
+
"epoch": 0.06532319296979051,
|
9222 |
+
"grad_norm": 0.18486784398555756,
|
9223 |
+
"learning_rate": 1.1236037959714618e-05,
|
9224 |
+
"loss": 11.794,
|
9225 |
+
"step": 1312
|
9226 |
+
},
|
9227 |
+
{
|
9228 |
+
"epoch": 0.06537298198882237,
|
9229 |
+
"grad_norm": 0.27326729893684387,
|
9230 |
+
"learning_rate": 1.1142029349781046e-05,
|
9231 |
+
"loss": 11.7349,
|
9232 |
+
"step": 1313
|
9233 |
+
},
|
9234 |
+
{
|
9235 |
+
"epoch": 0.06542277100785422,
|
9236 |
+
"grad_norm": 0.14249469339847565,
|
9237 |
+
"learning_rate": 1.1048392457814405e-05,
|
9238 |
+
"loss": 11.8134,
|
9239 |
+
"step": 1314
|
9240 |
+
},
|
9241 |
+
{
|
9242 |
+
"epoch": 0.06547256002688608,
|
9243 |
+
"grad_norm": 0.15261195600032806,
|
9244 |
+
"learning_rate": 1.0955127675524214e-05,
|
9245 |
+
"loss": 11.8002,
|
9246 |
+
"step": 1315
|
9247 |
+
},
|
9248 |
+
{
|
9249 |
+
"epoch": 0.06552234904591792,
|
9250 |
+
"grad_norm": 0.15059180557727814,
|
9251 |
+
"learning_rate": 1.0862235393063413e-05,
|
9252 |
+
"loss": 11.8358,
|
9253 |
+
"step": 1316
|
9254 |
+
},
|
9255 |
+
{
|
9256 |
+
"epoch": 0.06557213806494977,
|
9257 |
+
"grad_norm": 0.15011687576770782,
|
9258 |
+
"learning_rate": 1.0769715999026564e-05,
|
9259 |
+
"loss": 11.8035,
|
9260 |
+
"step": 1317
|
9261 |
+
},
|
9262 |
+
{
|
9263 |
+
"epoch": 0.06562192708398162,
|
9264 |
+
"grad_norm": 0.24232584238052368,
|
9265 |
+
"learning_rate": 1.067756988044848e-05,
|
9266 |
+
"loss": 11.7555,
|
9267 |
+
"step": 1318
|
9268 |
+
},
|
9269 |
+
{
|
9270 |
+
"epoch": 0.06567171610301348,
|
9271 |
+
"grad_norm": 0.2767235338687897,
|
9272 |
+
"learning_rate": 1.0585797422802335e-05,
|
9273 |
+
"loss": 11.7894,
|
9274 |
+
"step": 1319
|
9275 |
+
},
|
9276 |
+
{
|
9277 |
+
"epoch": 0.06572150512204533,
|
9278 |
+
"grad_norm": 0.13861814141273499,
|
9279 |
+
"learning_rate": 1.0494399009998212e-05,
|
9280 |
+
"loss": 11.8011,
|
9281 |
+
"step": 1320
|
9282 |
+
},
|
9283 |
+
{
|
9284 |
+
"epoch": 0.06577129414107719,
|
9285 |
+
"grad_norm": 0.23359110951423645,
|
9286 |
+
"learning_rate": 1.040337502438149e-05,
|
9287 |
+
"loss": 11.7411,
|
9288 |
+
"step": 1321
|
9289 |
+
},
|
9290 |
+
{
|
9291 |
+
"epoch": 0.06582108316010904,
|
9292 |
+
"grad_norm": 0.20453020930290222,
|
9293 |
+
"learning_rate": 1.0312725846731175e-05,
|
9294 |
+
"loss": 11.8098,
|
9295 |
+
"step": 1322
|
9296 |
+
},
|
9297 |
+
{
|
9298 |
+
"epoch": 0.0658708721791409,
|
9299 |
+
"grad_norm": 0.19233523309230804,
|
9300 |
+
"learning_rate": 1.0222451856258352e-05,
|
9301 |
+
"loss": 11.7981,
|
9302 |
+
"step": 1323
|
9303 |
+
},
|
9304 |
+
{
|
9305 |
+
"epoch": 0.06592066119817275,
|
9306 |
+
"grad_norm": 0.18436777591705322,
|
9307 |
+
"learning_rate": 1.0132553430604608e-05,
|
9308 |
+
"loss": 11.8185,
|
9309 |
+
"step": 1324
|
9310 |
+
},
|
9311 |
+
{
|
9312 |
+
"epoch": 0.0659704502172046,
|
9313 |
+
"grad_norm": 0.19035573303699493,
|
9314 |
+
"learning_rate": 1.0043030945840447e-05,
|
9315 |
+
"loss": 11.7904,
|
9316 |
+
"step": 1325
|
9317 |
+
},
|
9318 |
+
{
|
9319 |
+
"epoch": 0.06602023923623644,
|
9320 |
+
"grad_norm": 0.1984977275133133,
|
9321 |
+
"learning_rate": 9.953884776463652e-06,
|
9322 |
+
"loss": 11.7855,
|
9323 |
+
"step": 1326
|
9324 |
+
},
|
9325 |
+
{
|
9326 |
+
"epoch": 0.0660700282552683,
|
9327 |
+
"grad_norm": 0.18953529000282288,
|
9328 |
+
"learning_rate": 9.865115295397808e-06,
|
9329 |
+
"loss": 11.7463,
|
9330 |
+
"step": 1327
|
9331 |
+
},
|
9332 |
+
{
|
9333 |
+
"epoch": 0.06611981727430015,
|
9334 |
+
"grad_norm": 0.28462833166122437,
|
9335 |
+
"learning_rate": 9.776722873990719e-06,
|
9336 |
+
"loss": 11.777,
|
9337 |
+
"step": 1328
|
9338 |
+
},
|
9339 |
+
{
|
9340 |
+
"epoch": 0.066169606293332,
|
9341 |
+
"grad_norm": 0.17960093915462494,
|
9342 |
+
"learning_rate": 9.688707882012814e-06,
|
9343 |
+
"loss": 11.7865,
|
9344 |
+
"step": 1329
|
9345 |
+
},
|
9346 |
+
{
|
9347 |
+
"epoch": 0.06621939531236386,
|
9348 |
+
"grad_norm": 0.2843165099620819,
|
9349 |
+
"learning_rate": 9.601070687655667e-06,
|
9350 |
+
"loss": 11.7383,
|
9351 |
+
"step": 1330
|
9352 |
+
},
|
9353 |
+
{
|
9354 |
+
"epoch": 0.06626918433139571,
|
9355 |
+
"grad_norm": 0.22249552607536316,
|
9356 |
+
"learning_rate": 9.513811657530391e-06,
|
9357 |
+
"loss": 11.8015,
|
9358 |
+
"step": 1331
|
9359 |
+
},
|
9360 |
+
{
|
9361 |
+
"epoch": 0.06631897335042757,
|
9362 |
+
"grad_norm": 0.18703776597976685,
|
9363 |
+
"learning_rate": 9.426931156666085e-06,
|
9364 |
+
"loss": 11.7874,
|
9365 |
+
"step": 1332
|
9366 |
+
},
|
9367 |
+
{
|
9368 |
+
"epoch": 0.06636876236945942,
|
9369 |
+
"grad_norm": 0.15447834134101868,
|
9370 |
+
"learning_rate": 9.340429548508468e-06,
|
9371 |
+
"loss": 11.7912,
|
9372 |
+
"step": 1333
|
9373 |
+
},
|
9374 |
+
{
|
9375 |
+
"epoch": 0.06641855138849127,
|
9376 |
+
"grad_norm": 0.13673259317874908,
|
9377 |
+
"learning_rate": 9.254307194918144e-06,
|
9378 |
+
"loss": 11.8201,
|
9379 |
+
"step": 1334
|
9380 |
+
},
|
9381 |
+
{
|
9382 |
+
"epoch": 0.06646834040752313,
|
9383 |
+
"grad_norm": 0.2511157989501953,
|
9384 |
+
"learning_rate": 9.168564456169215e-06,
|
9385 |
+
"loss": 11.7807,
|
9386 |
+
"step": 1335
|
9387 |
+
},
|
9388 |
+
{
|
9389 |
+
"epoch": 0.06651812942655497,
|
9390 |
+
"grad_norm": 0.15829138457775116,
|
9391 |
+
"learning_rate": 9.083201690947763e-06,
|
9392 |
+
"loss": 11.7919,
|
9393 |
+
"step": 1336
|
9394 |
+
},
|
9395 |
+
{
|
9396 |
+
"epoch": 0.06656791844558682,
|
9397 |
+
"grad_norm": 0.27086418867111206,
|
9398 |
+
"learning_rate": 8.998219256350349e-06,
|
9399 |
+
"loss": 11.7224,
|
9400 |
+
"step": 1337
|
9401 |
+
},
|
9402 |
+
{
|
9403 |
+
"epoch": 0.06661770746461868,
|
9404 |
+
"grad_norm": 0.18902213871479034,
|
9405 |
+
"learning_rate": 8.91361750788241e-06,
|
9406 |
+
"loss": 11.7965,
|
9407 |
+
"step": 1338
|
9408 |
+
},
|
9409 |
+
{
|
9410 |
+
"epoch": 0.06666749648365053,
|
9411 |
+
"grad_norm": 0.20603874325752258,
|
9412 |
+
"learning_rate": 8.829396799457024e-06,
|
9413 |
+
"loss": 11.8084,
|
9414 |
+
"step": 1339
|
9415 |
+
},
|
9416 |
+
{
|
9417 |
+
"epoch": 0.06671728550268238,
|
9418 |
+
"grad_norm": 0.1588762402534485,
|
9419 |
+
"learning_rate": 8.745557483393118e-06,
|
9420 |
+
"loss": 11.7921,
|
9421 |
+
"step": 1340
|
9422 |
+
},
|
9423 |
+
{
|
9424 |
+
"epoch": 0.06676707452171424,
|
9425 |
+
"grad_norm": 0.16840197145938873,
|
9426 |
+
"learning_rate": 8.662099910414222e-06,
|
9427 |
+
"loss": 11.7359,
|
9428 |
+
"step": 1341
|
9429 |
+
},
|
9430 |
+
{
|
9431 |
+
"epoch": 0.06681686354074609,
|
9432 |
+
"grad_norm": 0.18038836121559143,
|
9433 |
+
"learning_rate": 8.579024429646932e-06,
|
9434 |
+
"loss": 11.783,
|
9435 |
+
"step": 1342
|
9436 |
+
},
|
9437 |
+
{
|
9438 |
+
"epoch": 0.06686665255977794,
|
9439 |
+
"grad_norm": 0.30767059326171875,
|
9440 |
+
"learning_rate": 8.496331388619438e-06,
|
9441 |
+
"loss": 11.6919,
|
9442 |
+
"step": 1343
|
9443 |
+
},
|
9444 |
+
{
|
9445 |
+
"epoch": 0.0669164415788098,
|
9446 |
+
"grad_norm": 0.2664734125137329,
|
9447 |
+
"learning_rate": 8.414021133260007e-06,
|
9448 |
+
"loss": 11.7529,
|
9449 |
+
"step": 1344
|
9450 |
+
},
|
9451 |
+
{
|
9452 |
+
"epoch": 0.06696623059784165,
|
9453 |
+
"grad_norm": 0.16803313791751862,
|
9454 |
+
"learning_rate": 8.332094007895741e-06,
|
9455 |
+
"loss": 11.7672,
|
9456 |
+
"step": 1345
|
9457 |
+
},
|
9458 |
+
{
|
9459 |
+
"epoch": 0.06701601961687349,
|
9460 |
+
"grad_norm": 0.16844692826271057,
|
9461 |
+
"learning_rate": 8.250550355250875e-06,
|
9462 |
+
"loss": 11.8,
|
9463 |
+
"step": 1346
|
9464 |
+
},
|
9465 |
+
{
|
9466 |
+
"epoch": 0.06706580863590535,
|
9467 |
+
"grad_norm": 0.38066744804382324,
|
9468 |
+
"learning_rate": 8.16939051644554e-06,
|
9469 |
+
"loss": 11.7309,
|
9470 |
+
"step": 1347
|
9471 |
+
},
|
9472 |
+
{
|
9473 |
+
"epoch": 0.0671155976549372,
|
9474 |
+
"grad_norm": 0.17246215045452118,
|
9475 |
+
"learning_rate": 8.088614830994223e-06,
|
9476 |
+
"loss": 11.8018,
|
9477 |
+
"step": 1348
|
9478 |
+
},
|
9479 |
+
{
|
9480 |
+
"epoch": 0.06716538667396905,
|
9481 |
+
"grad_norm": 0.24451196193695068,
|
9482 |
+
"learning_rate": 8.008223636804457e-06,
|
9483 |
+
"loss": 11.7835,
|
9484 |
+
"step": 1349
|
9485 |
+
},
|
9486 |
+
{
|
9487 |
+
"epoch": 0.06721517569300091,
|
9488 |
+
"grad_norm": 0.17051436007022858,
|
9489 |
+
"learning_rate": 7.92821727017523e-06,
|
9490 |
+
"loss": 11.8191,
|
9491 |
+
"step": 1350
|
9492 |
+
},
|
9493 |
+
{
|
9494 |
+
"epoch": 0.06726496471203276,
|
9495 |
+
"grad_norm": 0.2110452950000763,
|
9496 |
+
"learning_rate": 7.84859606579582e-06,
|
9497 |
+
"loss": 11.783,
|
9498 |
+
"step": 1351
|
9499 |
+
},
|
9500 |
+
{
|
9501 |
+
"epoch": 0.06731475373106462,
|
9502 |
+
"grad_norm": 0.24877861142158508,
|
9503 |
+
"learning_rate": 7.769360356744137e-06,
|
9504 |
+
"loss": 11.7938,
|
9505 |
+
"step": 1352
|
9506 |
+
},
|
9507 |
+
{
|
9508 |
+
"epoch": 0.06736454275009647,
|
9509 |
+
"grad_norm": 0.20723049342632294,
|
9510 |
+
"learning_rate": 7.690510474485535e-06,
|
9511 |
+
"loss": 11.775,
|
9512 |
+
"step": 1353
|
9513 |
+
},
|
9514 |
+
{
|
9515 |
+
"epoch": 0.06741433176912832,
|
9516 |
+
"grad_norm": 0.32642632722854614,
|
9517 |
+
"learning_rate": 7.612046748871327e-06,
|
9518 |
+
"loss": 11.7432,
|
9519 |
+
"step": 1354
|
9520 |
+
},
|
9521 |
+
{
|
9522 |
+
"epoch": 0.06746412078816016,
|
9523 |
+
"grad_norm": 0.18689784407615662,
|
9524 |
+
"learning_rate": 7.533969508137418e-06,
|
9525 |
+
"loss": 11.7489,
|
9526 |
+
"step": 1355
|
9527 |
+
},
|
9528 |
+
{
|
9529 |
+
"epoch": 0.06751390980719202,
|
9530 |
+
"grad_norm": 0.1531464010477066,
|
9531 |
+
"learning_rate": 7.456279078902928e-06,
|
9532 |
+
"loss": 11.8084,
|
9533 |
+
"step": 1356
|
9534 |
+
},
|
9535 |
+
{
|
9536 |
+
"epoch": 0.06756369882622387,
|
9537 |
+
"grad_norm": 0.15569479763507843,
|
9538 |
+
"learning_rate": 7.378975786168863e-06,
|
9539 |
+
"loss": 11.7955,
|
9540 |
+
"step": 1357
|
9541 |
+
},
|
9542 |
+
{
|
9543 |
+
"epoch": 0.06761348784525573,
|
9544 |
+
"grad_norm": 0.13607852160930634,
|
9545 |
+
"learning_rate": 7.30205995331672e-06,
|
9546 |
+
"loss": 11.7745,
|
9547 |
+
"step": 1358
|
9548 |
+
},
|
9549 |
+
{
|
9550 |
+
"epoch": 0.06766327686428758,
|
9551 |
+
"grad_norm": 0.254788875579834,
|
9552 |
+
"learning_rate": 7.225531902107075e-06,
|
9553 |
+
"loss": 11.7883,
|
9554 |
+
"step": 1359
|
9555 |
+
},
|
9556 |
+
{
|
9557 |
+
"epoch": 0.06771306588331943,
|
9558 |
+
"grad_norm": 0.17215323448181152,
|
9559 |
+
"learning_rate": 7.149391952678452e-06,
|
9560 |
+
"loss": 11.7977,
|
9561 |
+
"step": 1360
|
9562 |
+
},
|
9563 |
+
{
|
9564 |
+
"epoch": 0.06776285490235129,
|
9565 |
+
"grad_norm": 0.18600726127624512,
|
9566 |
+
"learning_rate": 7.073640423545669e-06,
|
9567 |
+
"loss": 11.8324,
|
9568 |
+
"step": 1361
|
9569 |
+
},
|
9570 |
+
{
|
9571 |
+
"epoch": 0.06781264392138314,
|
9572 |
+
"grad_norm": 0.13674423098564148,
|
9573 |
+
"learning_rate": 6.998277631598793e-06,
|
9574 |
+
"loss": 11.7892,
|
9575 |
+
"step": 1362
|
9576 |
+
},
|
9577 |
+
{
|
9578 |
+
"epoch": 0.067862432940415,
|
9579 |
+
"grad_norm": 0.17669561505317688,
|
9580 |
+
"learning_rate": 6.923303892101629e-06,
|
9581 |
+
"loss": 11.8134,
|
9582 |
+
"step": 1363
|
9583 |
+
},
|
9584 |
+
{
|
9585 |
+
"epoch": 0.06791222195944685,
|
9586 |
+
"grad_norm": 0.15605033934116364,
|
9587 |
+
"learning_rate": 6.848719518690505e-06,
|
9588 |
+
"loss": 11.7993,
|
9589 |
+
"step": 1364
|
9590 |
+
},
|
9591 |
+
{
|
9592 |
+
"epoch": 0.06796201097847869,
|
9593 |
+
"grad_norm": 0.14751456677913666,
|
9594 |
+
"learning_rate": 6.774524823372863e-06,
|
9595 |
+
"loss": 11.8109,
|
9596 |
+
"step": 1365
|
9597 |
+
},
|
9598 |
+
{
|
9599 |
+
"epoch": 0.06801179999751054,
|
9600 |
+
"grad_norm": 0.22870990633964539,
|
9601 |
+
"learning_rate": 6.700720116526116e-06,
|
9602 |
+
"loss": 11.7763,
|
9603 |
+
"step": 1366
|
9604 |
+
},
|
9605 |
+
{
|
9606 |
+
"epoch": 0.0680615890165424,
|
9607 |
+
"grad_norm": 0.20633479952812195,
|
9608 |
+
"learning_rate": 6.6273057068961255e-06,
|
9609 |
+
"loss": 11.7761,
|
9610 |
+
"step": 1367
|
9611 |
+
},
|
9612 |
+
{
|
9613 |
+
"epoch": 0.06811137803557425,
|
9614 |
+
"grad_norm": 0.16053776443004608,
|
9615 |
+
"learning_rate": 6.554281901596104e-06,
|
9616 |
+
"loss": 11.8222,
|
9617 |
+
"step": 1368
|
9618 |
+
},
|
9619 |
+
{
|
9620 |
+
"epoch": 0.0681611670546061,
|
9621 |
+
"grad_norm": 0.18129949271678925,
|
9622 |
+
"learning_rate": 6.48164900610524e-06,
|
9623 |
+
"loss": 11.7816,
|
9624 |
+
"step": 1369
|
9625 |
+
},
|
9626 |
+
{
|
9627 |
+
"epoch": 0.06821095607363796,
|
9628 |
+
"grad_norm": 0.17778095602989197,
|
9629 |
+
"learning_rate": 6.409407324267447e-06,
|
9630 |
+
"loss": 11.7602,
|
9631 |
+
"step": 1370
|
9632 |
+
},
|
9633 |
+
{
|
9634 |
+
"epoch": 0.06826074509266981,
|
9635 |
+
"grad_norm": 0.13038229942321777,
|
9636 |
+
"learning_rate": 6.337557158290009e-06,
|
9637 |
+
"loss": 11.8125,
|
9638 |
+
"step": 1371
|
9639 |
+
},
|
9640 |
+
{
|
9641 |
+
"epoch": 0.06831053411170167,
|
9642 |
+
"grad_norm": 0.14508725702762604,
|
9643 |
+
"learning_rate": 6.266098808742516e-06,
|
9644 |
+
"loss": 11.8424,
|
9645 |
+
"step": 1372
|
9646 |
+
},
|
9647 |
+
{
|
9648 |
+
"epoch": 0.06836032313073352,
|
9649 |
+
"grad_norm": 0.17138592898845673,
|
9650 |
+
"learning_rate": 6.1950325745553705e-06,
|
9651 |
+
"loss": 11.8248,
|
9652 |
+
"step": 1373
|
9653 |
+
},
|
9654 |
+
{
|
9655 |
+
"epoch": 0.06841011214976538,
|
9656 |
+
"grad_norm": 0.23954185843467712,
|
9657 |
+
"learning_rate": 6.124358753018689e-06,
|
9658 |
+
"loss": 11.7354,
|
9659 |
+
"step": 1374
|
9660 |
+
},
|
9661 |
+
{
|
9662 |
+
"epoch": 0.06845990116879722,
|
9663 |
+
"grad_norm": 0.20728358626365662,
|
9664 |
+
"learning_rate": 6.0540776397810085e-06,
|
9665 |
+
"loss": 11.8165,
|
9666 |
+
"step": 1375
|
9667 |
+
},
|
9668 |
+
{
|
9669 |
+
"epoch": 0.06850969018782907,
|
9670 |
+
"grad_norm": 0.17900319397449493,
|
9671 |
+
"learning_rate": 5.984189528848095e-06,
|
9672 |
+
"loss": 11.8032,
|
9673 |
+
"step": 1376
|
9674 |
+
},
|
9675 |
+
{
|
9676 |
+
"epoch": 0.06855947920686092,
|
9677 |
+
"grad_norm": 0.1983952671289444,
|
9678 |
+
"learning_rate": 5.91469471258157e-06,
|
9679 |
+
"loss": 11.7399,
|
9680 |
+
"step": 1377
|
9681 |
+
},
|
9682 |
+
{
|
9683 |
+
"epoch": 0.06860926822589278,
|
9684 |
+
"grad_norm": 0.1507929116487503,
|
9685 |
+
"learning_rate": 5.8455934816979305e-06,
|
9686 |
+
"loss": 11.7895,
|
9687 |
+
"step": 1378
|
9688 |
+
},
|
9689 |
+
{
|
9690 |
+
"epoch": 0.06865905724492463,
|
9691 |
+
"grad_norm": 0.20500941574573517,
|
9692 |
+
"learning_rate": 5.776886125267089e-06,
|
9693 |
+
"loss": 11.8209,
|
9694 |
+
"step": 1379
|
9695 |
+
},
|
9696 |
+
{
|
9697 |
+
"epoch": 0.06870884626395649,
|
9698 |
+
"grad_norm": 0.18446022272109985,
|
9699 |
+
"learning_rate": 5.708572930711309e-06,
|
9700 |
+
"loss": 11.7794,
|
9701 |
+
"step": 1380
|
9702 |
+
},
|
9703 |
+
{
|
9704 |
+
"epoch": 0.06875863528298834,
|
9705 |
+
"grad_norm": 0.16411173343658447,
|
9706 |
+
"learning_rate": 5.640654183803962e-06,
|
9707 |
+
"loss": 11.7888,
|
9708 |
+
"step": 1381
|
9709 |
+
},
|
9710 |
+
{
|
9711 |
+
"epoch": 0.0688084243020202,
|
9712 |
+
"grad_norm": 0.2134004533290863,
|
9713 |
+
"learning_rate": 5.57313016866835e-06,
|
9714 |
+
"loss": 11.7693,
|
9715 |
+
"step": 1382
|
9716 |
+
},
|
9717 |
+
{
|
9718 |
+
"epoch": 0.06885821332105205,
|
9719 |
+
"grad_norm": 0.264740526676178,
|
9720 |
+
"learning_rate": 5.506001167776464e-06,
|
9721 |
+
"loss": 11.7674,
|
9722 |
+
"step": 1383
|
9723 |
+
},
|
9724 |
+
{
|
9725 |
+
"epoch": 0.06890800234008389,
|
9726 |
+
"grad_norm": 0.1912824958562851,
|
9727 |
+
"learning_rate": 5.439267461947883e-06,
|
9728 |
+
"loss": 11.7971,
|
9729 |
+
"step": 1384
|
9730 |
+
},
|
9731 |
+
{
|
9732 |
+
"epoch": 0.06895779135911574,
|
9733 |
+
"grad_norm": 0.24043652415275574,
|
9734 |
+
"learning_rate": 5.3729293303484864e-06,
|
9735 |
+
"loss": 11.7721,
|
9736 |
+
"step": 1385
|
9737 |
+
},
|
9738 |
+
{
|
9739 |
+
"epoch": 0.0690075803781476,
|
9740 |
+
"grad_norm": 0.21462978422641754,
|
9741 |
+
"learning_rate": 5.306987050489442e-06,
|
9742 |
+
"loss": 11.7343,
|
9743 |
+
"step": 1386
|
9744 |
+
},
|
9745 |
+
{
|
9746 |
+
"epoch": 0.06905736939717945,
|
9747 |
+
"grad_norm": 0.1838296502828598,
|
9748 |
+
"learning_rate": 5.241440898225891e-06,
|
9749 |
+
"loss": 11.7714,
|
9750 |
+
"step": 1387
|
9751 |
+
},
|
9752 |
+
{
|
9753 |
+
"epoch": 0.0691071584162113,
|
9754 |
+
"grad_norm": 0.18836387991905212,
|
9755 |
+
"learning_rate": 5.1762911477558965e-06,
|
9756 |
+
"loss": 11.7724,
|
9757 |
+
"step": 1388
|
9758 |
+
},
|
9759 |
+
{
|
9760 |
+
"epoch": 0.06915694743524316,
|
9761 |
+
"grad_norm": 0.16222824156284332,
|
9762 |
+
"learning_rate": 5.111538071619249e-06,
|
9763 |
+
"loss": 11.8043,
|
9764 |
+
"step": 1389
|
9765 |
+
},
|
9766 |
+
{
|
9767 |
+
"epoch": 0.06920673645427501,
|
9768 |
+
"grad_norm": 0.23392929136753082,
|
9769 |
+
"learning_rate": 5.047181940696333e-06,
|
9770 |
+
"loss": 11.7533,
|
9771 |
+
"step": 1390
|
9772 |
+
},
|
9773 |
+
{
|
9774 |
+
"epoch": 0.06925652547330687,
|
9775 |
+
"grad_norm": 0.20155908167362213,
|
9776 |
+
"learning_rate": 4.983223024207029e-06,
|
9777 |
+
"loss": 11.7653,
|
9778 |
+
"step": 1391
|
9779 |
+
},
|
9780 |
+
{
|
9781 |
+
"epoch": 0.06930631449233872,
|
9782 |
+
"grad_norm": 0.18231526017189026,
|
9783 |
+
"learning_rate": 4.919661589709479e-06,
|
9784 |
+
"loss": 11.797,
|
9785 |
+
"step": 1392
|
9786 |
+
},
|
9787 |
+
{
|
9788 |
+
"epoch": 0.06935610351137057,
|
9789 |
+
"grad_norm": 0.19696125388145447,
|
9790 |
+
"learning_rate": 4.856497903099166e-06,
|
9791 |
+
"loss": 11.7374,
|
9792 |
+
"step": 1393
|
9793 |
+
},
|
9794 |
+
{
|
9795 |
+
"epoch": 0.06940589253040241,
|
9796 |
+
"grad_norm": 0.1968884915113449,
|
9797 |
+
"learning_rate": 4.7937322286075725e-06,
|
9798 |
+
"loss": 11.7665,
|
9799 |
+
"step": 1394
|
9800 |
+
},
|
9801 |
+
{
|
9802 |
+
"epoch": 0.06945568154943427,
|
9803 |
+
"grad_norm": 0.1573328673839569,
|
9804 |
+
"learning_rate": 4.731364828801243e-06,
|
9805 |
+
"loss": 11.805,
|
9806 |
+
"step": 1395
|
9807 |
+
},
|
9808 |
+
{
|
9809 |
+
"epoch": 0.06950547056846612,
|
9810 |
+
"grad_norm": 0.18190044164657593,
|
9811 |
+
"learning_rate": 4.669395964580614e-06,
|
9812 |
+
"loss": 11.8274,
|
9813 |
+
"step": 1396
|
9814 |
+
},
|
9815 |
+
{
|
9816 |
+
"epoch": 0.06955525958749798,
|
9817 |
+
"grad_norm": 0.16547870635986328,
|
9818 |
+
"learning_rate": 4.607825895178941e-06,
|
9819 |
+
"loss": 11.7654,
|
9820 |
+
"step": 1397
|
9821 |
+
},
|
9822 |
+
{
|
9823 |
+
"epoch": 0.06960504860652983,
|
9824 |
+
"grad_norm": 0.27401962876319885,
|
9825 |
+
"learning_rate": 4.54665487816115e-06,
|
9826 |
+
"loss": 11.71,
|
9827 |
+
"step": 1398
|
9828 |
+
},
|
9829 |
+
{
|
9830 |
+
"epoch": 0.06965483762556168,
|
9831 |
+
"grad_norm": 0.1850835680961609,
|
9832 |
+
"learning_rate": 4.485883169422933e-06,
|
9833 |
+
"loss": 11.7776,
|
9834 |
+
"step": 1399
|
9835 |
+
},
|
9836 |
+
{
|
9837 |
+
"epoch": 0.06970462664459354,
|
9838 |
+
"grad_norm": 0.20128892362117767,
|
9839 |
+
"learning_rate": 4.425511023189455e-06,
|
9840 |
+
"loss": 11.7581,
|
9841 |
+
"step": 1400
|
9842 |
+
},
|
9843 |
+
{
|
9844 |
+
"epoch": 0.06975441566362539,
|
9845 |
+
"grad_norm": 0.29459527134895325,
|
9846 |
+
"learning_rate": 4.365538692014459e-06,
|
9847 |
+
"loss": 11.7471,
|
9848 |
+
"step": 1401
|
9849 |
+
},
|
9850 |
+
{
|
9851 |
+
"epoch": 0.06980420468265724,
|
9852 |
+
"grad_norm": 0.1757139265537262,
|
9853 |
+
"learning_rate": 4.305966426779118e-06,
|
9854 |
+
"loss": 11.7846,
|
9855 |
+
"step": 1402
|
9856 |
+
},
|
9857 |
+
{
|
9858 |
+
"epoch": 0.0698539937016891,
|
9859 |
+
"grad_norm": 0.2456239014863968,
|
9860 |
+
"learning_rate": 4.24679447669104e-06,
|
9861 |
+
"loss": 11.7441,
|
9862 |
+
"step": 1403
|
9863 |
+
},
|
9864 |
+
{
|
9865 |
+
"epoch": 0.06990378272072094,
|
9866 |
+
"grad_norm": 0.13391117751598358,
|
9867 |
+
"learning_rate": 4.188023089283177e-06,
|
9868 |
+
"loss": 11.7944,
|
9869 |
+
"step": 1404
|
9870 |
+
},
|
9871 |
+
{
|
9872 |
+
"epoch": 0.06995357173975279,
|
9873 |
+
"grad_norm": 0.18124625086784363,
|
9874 |
+
"learning_rate": 4.129652510412851e-06,
|
9875 |
+
"loss": 11.7355,
|
9876 |
+
"step": 1405
|
9877 |
+
},
|
9878 |
+
{
|
9879 |
+
"epoch": 0.07000336075878465,
|
9880 |
+
"grad_norm": 0.15496863424777985,
|
9881 |
+
"learning_rate": 4.071682984260638e-06,
|
9882 |
+
"loss": 11.7991,
|
9883 |
+
"step": 1406
|
9884 |
+
},
|
9885 |
+
{
|
9886 |
+
"epoch": 0.0700531497778165,
|
9887 |
+
"grad_norm": 0.2016768902540207,
|
9888 |
+
"learning_rate": 4.014114753329446e-06,
|
9889 |
+
"loss": 11.7922,
|
9890 |
+
"step": 1407
|
9891 |
+
},
|
9892 |
+
{
|
9893 |
+
"epoch": 0.07010293879684835,
|
9894 |
+
"grad_norm": 0.1571883112192154,
|
9895 |
+
"learning_rate": 3.9569480584434216e-06,
|
9896 |
+
"loss": 11.7845,
|
9897 |
+
"step": 1408
|
9898 |
+
},
|
9899 |
+
{
|
9900 |
+
"epoch": 0.07015272781588021,
|
9901 |
+
"grad_norm": 0.20776788890361786,
|
9902 |
+
"learning_rate": 3.900183138746993e-06,
|
9903 |
+
"loss": 11.7574,
|
9904 |
+
"step": 1409
|
9905 |
+
},
|
9906 |
+
{
|
9907 |
+
"epoch": 0.07020251683491206,
|
9908 |
+
"grad_norm": 0.16272762417793274,
|
9909 |
+
"learning_rate": 3.8438202317037986e-06,
|
9910 |
+
"loss": 11.7947,
|
9911 |
+
"step": 1410
|
9912 |
+
},
|
9913 |
+
{
|
9914 |
+
"epoch": 0.07025230585394392,
|
9915 |
+
"grad_norm": 0.18583381175994873,
|
9916 |
+
"learning_rate": 3.787859573095853e-06,
|
9917 |
+
"loss": 11.7632,
|
9918 |
+
"step": 1411
|
9919 |
+
},
|
9920 |
+
{
|
9921 |
+
"epoch": 0.07030209487297577,
|
9922 |
+
"grad_norm": 0.15767168998718262,
|
9923 |
+
"learning_rate": 3.73230139702232e-06,
|
9924 |
+
"loss": 11.7643,
|
9925 |
+
"step": 1412
|
9926 |
+
},
|
9927 |
+
{
|
9928 |
+
"epoch": 0.07035188389200762,
|
9929 |
+
"grad_norm": 0.22298897802829742,
|
9930 |
+
"learning_rate": 3.6771459358987513e-06,
|
9931 |
+
"loss": 11.7749,
|
9932 |
+
"step": 1413
|
9933 |
+
},
|
9934 |
+
{
|
9935 |
+
"epoch": 0.07040167291103946,
|
9936 |
+
"grad_norm": 0.16401736438274384,
|
9937 |
+
"learning_rate": 3.622393420456016e-06,
|
9938 |
+
"loss": 11.7595,
|
9939 |
+
"step": 1414
|
9940 |
+
},
|
9941 |
+
{
|
9942 |
+
"epoch": 0.07045146193007132,
|
9943 |
+
"grad_norm": 0.1506534218788147,
|
9944 |
+
"learning_rate": 3.56804407973933e-06,
|
9945 |
+
"loss": 11.7954,
|
9946 |
+
"step": 1415
|
9947 |
+
},
|
9948 |
+
{
|
9949 |
+
"epoch": 0.07050125094910317,
|
9950 |
+
"grad_norm": 0.16986608505249023,
|
9951 |
+
"learning_rate": 3.514098141107314e-06,
|
9952 |
+
"loss": 11.7684,
|
9953 |
+
"step": 1416
|
9954 |
+
},
|
9955 |
+
{
|
9956 |
+
"epoch": 0.07055103996813503,
|
9957 |
+
"grad_norm": 0.1684313863515854,
|
9958 |
+
"learning_rate": 3.4605558302310715e-06,
|
9959 |
+
"loss": 11.8155,
|
9960 |
+
"step": 1417
|
9961 |
+
},
|
9962 |
+
{
|
9963 |
+
"epoch": 0.07060082898716688,
|
9964 |
+
"grad_norm": 0.16894525289535522,
|
9965 |
+
"learning_rate": 3.40741737109318e-06,
|
9966 |
+
"loss": 11.7789,
|
9967 |
+
"step": 1418
|
9968 |
+
},
|
9969 |
+
{
|
9970 |
+
"epoch": 0.07065061800619873,
|
9971 |
+
"grad_norm": 0.12358859926462173,
|
9972 |
+
"learning_rate": 3.3546829859868123e-06,
|
9973 |
+
"loss": 11.8163,
|
9974 |
+
"step": 1419
|
9975 |
+
},
|
9976 |
+
{
|
9977 |
+
"epoch": 0.07070040702523059,
|
9978 |
+
"grad_norm": 0.13290484249591827,
|
9979 |
+
"learning_rate": 3.302352895514793e-06,
|
9980 |
+
"loss": 11.8037,
|
9981 |
+
"step": 1420
|
9982 |
+
},
|
9983 |
+
{
|
9984 |
+
"epoch": 0.07075019604426244,
|
9985 |
+
"grad_norm": 0.11723480373620987,
|
9986 |
+
"learning_rate": 3.2504273185886447e-06,
|
9987 |
+
"loss": 11.8336,
|
9988 |
+
"step": 1421
|
9989 |
+
},
|
9990 |
+
{
|
9991 |
+
"epoch": 0.0707999850632943,
|
9992 |
+
"grad_norm": 0.2081618458032608,
|
9993 |
+
"learning_rate": 3.198906472427732e-06,
|
9994 |
+
"loss": 11.7767,
|
9995 |
+
"step": 1422
|
9996 |
+
},
|
9997 |
+
{
|
9998 |
+
"epoch": 0.07084977408232614,
|
9999 |
+
"grad_norm": 0.13548432290554047,
|
10000 |
+
"learning_rate": 3.1477905725582623e-06,
|
10001 |
+
"loss": 11.8009,
|
10002 |
+
"step": 1423
|
10003 |
+
},
|
10004 |
+
{
|
10005 |
+
"epoch": 0.07089956310135799,
|
10006 |
+
"grad_norm": 0.1521718055009842,
|
10007 |
+
"learning_rate": 3.0970798328125105e-06,
|
10008 |
+
"loss": 11.7784,
|
10009 |
+
"step": 1424
|
10010 |
+
},
|
10011 |
+
{
|
10012 |
+
"epoch": 0.07094935212038984,
|
10013 |
+
"grad_norm": 0.2850455939769745,
|
10014 |
+
"learning_rate": 3.0467744653277617e-06,
|
10015 |
+
"loss": 11.7356,
|
10016 |
+
"step": 1425
|
10017 |
+
},
|
10018 |
+
{
|
10019 |
+
"epoch": 0.0709991411394217,
|
10020 |
+
"grad_norm": 0.23329710960388184,
|
10021 |
+
"learning_rate": 2.996874680545603e-06,
|
10022 |
+
"loss": 11.7991,
|
10023 |
+
"step": 1426
|
10024 |
+
},
|
10025 |
+
{
|
10026 |
+
"epoch": 0.07104893015845355,
|
10027 |
+
"grad_norm": 0.1841469258069992,
|
10028 |
+
"learning_rate": 2.9473806872108988e-06,
|
10029 |
+
"loss": 11.807,
|
10030 |
+
"step": 1427
|
10031 |
+
},
|
10032 |
+
{
|
10033 |
+
"epoch": 0.0710987191774854,
|
10034 |
+
"grad_norm": 0.17405760288238525,
|
10035 |
+
"learning_rate": 2.8982926923709962e-06,
|
10036 |
+
"loss": 11.8047,
|
10037 |
+
"step": 1428
|
10038 |
+
},
|
10039 |
+
{
|
10040 |
+
"epoch": 0.07114850819651726,
|
10041 |
+
"grad_norm": 0.1519736349582672,
|
10042 |
+
"learning_rate": 2.8496109013748217e-06,
|
10043 |
+
"loss": 11.8019,
|
10044 |
+
"step": 1429
|
10045 |
+
},
|
10046 |
+
{
|
10047 |
+
"epoch": 0.07119829721554911,
|
10048 |
+
"grad_norm": 0.20428180694580078,
|
10049 |
+
"learning_rate": 2.8013355178720612e-06,
|
10050 |
+
"loss": 11.7955,
|
10051 |
+
"step": 1430
|
10052 |
+
},
|
10053 |
+
{
|
10054 |
+
"epoch": 0.07124808623458097,
|
10055 |
+
"grad_norm": 0.16532306373119354,
|
10056 |
+
"learning_rate": 2.7534667438122387e-06,
|
10057 |
+
"loss": 11.7715,
|
10058 |
+
"step": 1431
|
10059 |
+
},
|
10060 |
+
{
|
10061 |
+
"epoch": 0.07129787525361282,
|
10062 |
+
"grad_norm": 0.1306053251028061,
|
10063 |
+
"learning_rate": 2.7060047794439936e-06,
|
10064 |
+
"loss": 11.7838,
|
10065 |
+
"step": 1432
|
10066 |
+
},
|
10067 |
+
{
|
10068 |
+
"epoch": 0.07134766427264466,
|
10069 |
+
"grad_norm": 0.15693004429340363,
|
10070 |
+
"learning_rate": 2.658949823314061e-06,
|
10071 |
+
"loss": 11.797,
|
10072 |
+
"step": 1433
|
10073 |
+
},
|
10074 |
+
{
|
10075 |
+
"epoch": 0.07139745329167652,
|
10076 |
+
"grad_norm": 0.2272719144821167,
|
10077 |
+
"learning_rate": 2.612302072266637e-06,
|
10078 |
+
"loss": 11.7342,
|
10079 |
+
"step": 1434
|
10080 |
+
},
|
10081 |
+
{
|
10082 |
+
"epoch": 0.07144724231070837,
|
10083 |
+
"grad_norm": 0.22072280943393707,
|
10084 |
+
"learning_rate": 2.5660617214424145e-06,
|
10085 |
+
"loss": 11.7282,
|
10086 |
+
"step": 1435
|
10087 |
+
},
|
10088 |
+
{
|
10089 |
+
"epoch": 0.07149703132974022,
|
10090 |
+
"grad_norm": 0.17024008929729462,
|
10091 |
+
"learning_rate": 2.5202289642778375e-06,
|
10092 |
+
"loss": 11.8194,
|
10093 |
+
"step": 1436
|
10094 |
+
},
|
10095 |
+
{
|
10096 |
+
"epoch": 0.07154682034877208,
|
10097 |
+
"grad_norm": 0.22808553278446198,
|
10098 |
+
"learning_rate": 2.474803992504249e-06,
|
10099 |
+
"loss": 11.7873,
|
10100 |
+
"step": 1437
|
10101 |
+
},
|
10102 |
+
{
|
10103 |
+
"epoch": 0.07159660936780393,
|
10104 |
+
"grad_norm": 0.1550351232290268,
|
10105 |
+
"learning_rate": 2.429786996147154e-06,
|
10106 |
+
"loss": 11.7759,
|
10107 |
+
"step": 1438
|
10108 |
+
},
|
10109 |
+
{
|
10110 |
+
"epoch": 0.07164639838683579,
|
10111 |
+
"grad_norm": 0.19467805325984955,
|
10112 |
+
"learning_rate": 2.3851781635253147e-06,
|
10113 |
+
"loss": 11.7909,
|
10114 |
+
"step": 1439
|
10115 |
+
},
|
10116 |
+
{
|
10117 |
+
"epoch": 0.07169618740586764,
|
10118 |
+
"grad_norm": 0.18849214911460876,
|
10119 |
+
"learning_rate": 2.3409776812500894e-06,
|
10120 |
+
"loss": 11.7807,
|
10121 |
+
"step": 1440
|
10122 |
+
},
|
10123 |
+
{
|
10124 |
+
"epoch": 0.0717459764248995,
|
10125 |
+
"grad_norm": 0.21851764619350433,
|
10126 |
+
"learning_rate": 2.2971857342245606e-06,
|
10127 |
+
"loss": 11.7958,
|
10128 |
+
"step": 1441
|
10129 |
+
},
|
10130 |
+
{
|
10131 |
+
"epoch": 0.07179576544393135,
|
10132 |
+
"grad_norm": 0.2308024913072586,
|
10133 |
+
"learning_rate": 2.2538025056428214e-06,
|
10134 |
+
"loss": 11.7636,
|
10135 |
+
"step": 1442
|
10136 |
+
},
|
10137 |
+
{
|
10138 |
+
"epoch": 0.07184555446296319,
|
10139 |
+
"grad_norm": 0.21681872010231018,
|
10140 |
+
"learning_rate": 2.2108281769891217e-06,
|
10141 |
+
"loss": 11.7421,
|
10142 |
+
"step": 1443
|
10143 |
+
},
|
10144 |
+
{
|
10145 |
+
"epoch": 0.07189534348199504,
|
10146 |
+
"grad_norm": 0.16060197353363037,
|
10147 |
+
"learning_rate": 2.1682629280372456e-06,
|
10148 |
+
"loss": 11.8079,
|
10149 |
+
"step": 1444
|
10150 |
+
},
|
10151 |
+
{
|
10152 |
+
"epoch": 0.0719451325010269,
|
10153 |
+
"grad_norm": 0.20774561166763306,
|
10154 |
+
"learning_rate": 2.1261069368496033e-06,
|
10155 |
+
"loss": 11.7746,
|
10156 |
+
"step": 1445
|
10157 |
+
},
|
10158 |
+
{
|
10159 |
+
"epoch": 0.07199492152005875,
|
10160 |
+
"grad_norm": 0.21979907155036926,
|
10161 |
+
"learning_rate": 2.0843603797766287e-06,
|
10162 |
+
"loss": 11.8317,
|
10163 |
+
"step": 1446
|
10164 |
+
},
|
10165 |
+
{
|
10166 |
+
"epoch": 0.0720447105390906,
|
10167 |
+
"grad_norm": 0.12352941185235977,
|
10168 |
+
"learning_rate": 2.0430234314559482e-06,
|
10169 |
+
"loss": 11.8045,
|
10170 |
+
"step": 1447
|
10171 |
+
},
|
10172 |
+
{
|
10173 |
+
"epoch": 0.07209449955812246,
|
10174 |
+
"grad_norm": 0.1338411271572113,
|
10175 |
+
"learning_rate": 2.0020962648116704e-06,
|
10176 |
+
"loss": 11.8227,
|
10177 |
+
"step": 1448
|
10178 |
+
},
|
10179 |
+
{
|
10180 |
+
"epoch": 0.07214428857715431,
|
10181 |
+
"grad_norm": 0.24592214822769165,
|
10182 |
+
"learning_rate": 1.9615790510536967e-06,
|
10183 |
+
"loss": 11.8063,
|
10184 |
+
"step": 1449
|
10185 |
+
},
|
10186 |
+
{
|
10187 |
+
"epoch": 0.07219407759618617,
|
10188 |
+
"grad_norm": 0.16896864771842957,
|
10189 |
+
"learning_rate": 1.921471959676957e-06,
|
10190 |
+
"loss": 11.7331,
|
10191 |
+
"step": 1450
|
10192 |
+
},
|
10193 |
+
{
|
10194 |
+
"epoch": 0.07224386661521802,
|
10195 |
+
"grad_norm": 0.22436468303203583,
|
10196 |
+
"learning_rate": 1.8817751584607413e-06,
|
10197 |
+
"loss": 11.6698,
|
10198 |
+
"step": 1451
|
10199 |
+
},
|
10200 |
+
{
|
10201 |
+
"epoch": 0.07229365563424986,
|
10202 |
+
"grad_norm": 0.15300509333610535,
|
10203 |
+
"learning_rate": 1.8424888134679574e-06,
|
10204 |
+
"loss": 11.7759,
|
10205 |
+
"step": 1452
|
10206 |
+
},
|
10207 |
+
{
|
10208 |
+
"epoch": 0.07234344465328171,
|
10209 |
+
"grad_norm": 0.17076455056667328,
|
10210 |
+
"learning_rate": 1.8036130890444757e-06,
|
10211 |
+
"loss": 11.8276,
|
10212 |
+
"step": 1453
|
10213 |
+
},
|
10214 |
+
{
|
10215 |
+
"epoch": 0.07239323367231357,
|
10216 |
+
"grad_norm": 0.13483761250972748,
|
10217 |
+
"learning_rate": 1.7651481478184296e-06,
|
10218 |
+
"loss": 11.7844,
|
10219 |
+
"step": 1454
|
10220 |
+
},
|
10221 |
+
{
|
10222 |
+
"epoch": 0.07244302269134542,
|
10223 |
+
"grad_norm": 0.21753963828086853,
|
10224 |
+
"learning_rate": 1.7270941506995265e-06,
|
10225 |
+
"loss": 11.7257,
|
10226 |
+
"step": 1455
|
10227 |
+
},
|
10228 |
+
{
|
10229 |
+
"epoch": 0.07249281171037728,
|
10230 |
+
"grad_norm": 0.17371314764022827,
|
10231 |
+
"learning_rate": 1.6894512568783716e-06,
|
10232 |
+
"loss": 11.7394,
|
10233 |
+
"step": 1456
|
10234 |
+
},
|
10235 |
+
{
|
10236 |
+
"epoch": 0.07254260072940913,
|
10237 |
+
"grad_norm": 0.14611701667308807,
|
10238 |
+
"learning_rate": 1.6522196238258126e-06,
|
10239 |
+
"loss": 11.8025,
|
10240 |
+
"step": 1457
|
10241 |
+
},
|
10242 |
+
{
|
10243 |
+
"epoch": 0.07259238974844098,
|
10244 |
+
"grad_norm": 0.2837623655796051,
|
10245 |
+
"learning_rate": 1.6153994072922506e-06,
|
10246 |
+
"loss": 11.7385,
|
10247 |
+
"step": 1458
|
10248 |
+
},
|
10249 |
+
{
|
10250 |
+
"epoch": 0.07264217876747284,
|
10251 |
+
"grad_norm": 0.2298700213432312,
|
10252 |
+
"learning_rate": 1.5789907613070976e-06,
|
10253 |
+
"loss": 11.7627,
|
10254 |
+
"step": 1459
|
10255 |
+
},
|
10256 |
+
{
|
10257 |
+
"epoch": 0.07269196778650469,
|
10258 |
+
"grad_norm": 0.1822548806667328,
|
10259 |
+
"learning_rate": 1.542993838177953e-06,
|
10260 |
+
"loss": 11.795,
|
10261 |
+
"step": 1460
|
10262 |
+
},
|
10263 |
+
{
|
10264 |
+
"epoch": 0.07274175680553654,
|
10265 |
+
"grad_norm": 0.17248661816120148,
|
10266 |
+
"learning_rate": 1.5074087884901056e-06,
|
10267 |
+
"loss": 11.8025,
|
10268 |
+
"step": 1461
|
10269 |
+
},
|
10270 |
+
{
|
10271 |
+
"epoch": 0.07279154582456838,
|
10272 |
+
"grad_norm": 0.17918799817562103,
|
10273 |
+
"learning_rate": 1.472235761105878e-06,
|
10274 |
+
"loss": 11.8325,
|
10275 |
+
"step": 1462
|
10276 |
+
},
|
10277 |
+
{
|
10278 |
+
"epoch": 0.07284133484360024,
|
10279 |
+
"grad_norm": 0.19771920144557953,
|
10280 |
+
"learning_rate": 1.43747490316396e-06,
|
10281 |
+
"loss": 11.7967,
|
10282 |
+
"step": 1463
|
10283 |
+
},
|
10284 |
+
{
|
10285 |
+
"epoch": 0.07289112386263209,
|
10286 |
+
"grad_norm": 0.15453074872493744,
|
10287 |
+
"learning_rate": 1.4031263600788214e-06,
|
10288 |
+
"loss": 11.7959,
|
10289 |
+
"step": 1464
|
10290 |
+
},
|
10291 |
+
{
|
10292 |
+
"epoch": 0.07294091288166395,
|
10293 |
+
"grad_norm": 0.17252416908740997,
|
10294 |
+
"learning_rate": 1.3691902755401442e-06,
|
10295 |
+
"loss": 11.7703,
|
10296 |
+
"step": 1465
|
10297 |
+
},
|
10298 |
+
{
|
10299 |
+
"epoch": 0.0729907019006958,
|
10300 |
+
"grad_norm": 0.20003172755241394,
|
10301 |
+
"learning_rate": 1.3356667915121025e-06,
|
10302 |
+
"loss": 11.748,
|
10303 |
+
"step": 1466
|
10304 |
+
},
|
10305 |
+
{
|
10306 |
+
"epoch": 0.07304049091972765,
|
10307 |
+
"grad_norm": 0.19730688631534576,
|
10308 |
+
"learning_rate": 1.302556048232917e-06,
|
10309 |
+
"loss": 11.8133,
|
10310 |
+
"step": 1467
|
10311 |
+
},
|
10312 |
+
{
|
10313 |
+
"epoch": 0.07309027993875951,
|
10314 |
+
"grad_norm": 0.20058004558086395,
|
10315 |
+
"learning_rate": 1.2698581842141567e-06,
|
10316 |
+
"loss": 11.8004,
|
10317 |
+
"step": 1468
|
10318 |
+
},
|
10319 |
+
{
|
10320 |
+
"epoch": 0.07314006895779136,
|
10321 |
+
"grad_norm": 0.2041475921869278,
|
10322 |
+
"learning_rate": 1.2375733362402387e-06,
|
10323 |
+
"loss": 11.7774,
|
10324 |
+
"step": 1469
|
10325 |
+
},
|
10326 |
+
{
|
10327 |
+
"epoch": 0.07318985797682322,
|
10328 |
+
"grad_norm": 0.2926437258720398,
|
10329 |
+
"learning_rate": 1.2057016393677623e-06,
|
10330 |
+
"loss": 11.725,
|
10331 |
+
"step": 1470
|
10332 |
+
},
|
10333 |
+
{
|
10334 |
+
"epoch": 0.07323964699585507,
|
10335 |
+
"grad_norm": 0.2043275684118271,
|
10336 |
+
"learning_rate": 1.1742432269250536e-06,
|
10337 |
+
"loss": 11.8008,
|
10338 |
+
"step": 1471
|
10339 |
+
},
|
10340 |
+
{
|
10341 |
+
"epoch": 0.07328943601488691,
|
10342 |
+
"grad_norm": 0.1262151598930359,
|
10343 |
+
"learning_rate": 1.1431982305115108e-06,
|
10344 |
+
"loss": 11.8014,
|
10345 |
+
"step": 1472
|
10346 |
+
},
|
10347 |
+
{
|
10348 |
+
"epoch": 0.07333922503391876,
|
10349 |
+
"grad_norm": 0.2351101189851761,
|
10350 |
+
"learning_rate": 1.1125667799971262e-06,
|
10351 |
+
"loss": 11.753,
|
10352 |
+
"step": 1473
|
10353 |
+
},
|
10354 |
+
{
|
10355 |
+
"epoch": 0.07338901405295062,
|
10356 |
+
"grad_norm": 0.12109551578760147,
|
10357 |
+
"learning_rate": 1.0823490035218987e-06,
|
10358 |
+
"loss": 11.8044,
|
10359 |
+
"step": 1474
|
10360 |
+
},
|
10361 |
+
{
|
10362 |
+
"epoch": 0.07343880307198247,
|
10363 |
+
"grad_norm": 0.1387912482023239,
|
10364 |
+
"learning_rate": 1.0525450274953218e-06,
|
10365 |
+
"loss": 11.7976,
|
10366 |
+
"step": 1475
|
10367 |
+
},
|
10368 |
+
{
|
10369 |
+
"epoch": 0.07348859209101433,
|
10370 |
+
"grad_norm": 0.21857623755931854,
|
10371 |
+
"learning_rate": 1.0231549765958192e-06,
|
10372 |
+
"loss": 11.7771,
|
10373 |
+
"step": 1476
|
10374 |
+
},
|
10375 |
+
{
|
10376 |
+
"epoch": 0.07353838111004618,
|
10377 |
+
"grad_norm": 0.19753660261631012,
|
10378 |
+
"learning_rate": 9.94178973770299e-07,
|
10379 |
+
"loss": 11.7841,
|
10380 |
+
"step": 1477
|
10381 |
+
},
|
10382 |
+
{
|
10383 |
+
"epoch": 0.07358817012907803,
|
10384 |
+
"grad_norm": 0.1730002760887146,
|
10385 |
+
"learning_rate": 9.656171402335213e-07,
|
10386 |
+
"loss": 11.8072,
|
10387 |
+
"step": 1478
|
10388 |
+
},
|
10389 |
+
{
|
10390 |
+
"epoch": 0.07363795914810989,
|
10391 |
+
"grad_norm": 0.22716954350471497,
|
10392 |
+
"learning_rate": 9.374695954677104e-07,
|
10393 |
+
"loss": 11.7893,
|
10394 |
+
"step": 1479
|
10395 |
+
},
|
10396 |
+
{
|
10397 |
+
"epoch": 0.07368774816714174,
|
10398 |
+
"grad_norm": 0.19123832881450653,
|
10399 |
+
"learning_rate": 9.09736457221999e-07,
|
10400 |
+
"loss": 11.7598,
|
10401 |
+
"step": 1480
|
10402 |
+
},
|
10403 |
+
{
|
10404 |
+
"epoch": 0.0737375371861736,
|
10405 |
+
"grad_norm": 0.19865520298480988,
|
10406 |
+
"learning_rate": 8.824178415119177e-07,
|
10407 |
+
"loss": 11.7832,
|
10408 |
+
"step": 1481
|
10409 |
+
},
|
10410 |
+
{
|
10411 |
+
"epoch": 0.07378732620520544,
|
10412 |
+
"grad_norm": 0.21398580074310303,
|
10413 |
+
"learning_rate": 8.555138626189618e-07,
|
10414 |
+
"loss": 11.7743,
|
10415 |
+
"step": 1482
|
10416 |
+
},
|
10417 |
+
{
|
10418 |
+
"epoch": 0.07383711522423729,
|
10419 |
+
"grad_norm": 0.15314972400665283,
|
10420 |
+
"learning_rate": 8.290246330900476e-07,
|
10421 |
+
"loss": 11.7966,
|
10422 |
+
"step": 1483
|
10423 |
+
},
|
10424 |
+
{
|
10425 |
+
"epoch": 0.07388690424326914,
|
10426 |
+
"grad_norm": 0.18939357995986938,
|
10427 |
+
"learning_rate": 8.029502637371123e-07,
|
10428 |
+
"loss": 11.7908,
|
10429 |
+
"step": 1484
|
10430 |
+
},
|
10431 |
+
{
|
10432 |
+
"epoch": 0.073936693262301,
|
10433 |
+
"grad_norm": 0.19066807627677917,
|
10434 |
+
"learning_rate": 7.772908636365927e-07,
|
10435 |
+
"loss": 11.7809,
|
10436 |
+
"step": 1485
|
10437 |
+
},
|
10438 |
+
{
|
10439 |
+
"epoch": 0.07398648228133285,
|
10440 |
+
"grad_norm": 0.19176031649112701,
|
10441 |
+
"learning_rate": 7.520465401290033e-07,
|
10442 |
+
"loss": 11.8176,
|
10443 |
+
"step": 1486
|
10444 |
+
},
|
10445 |
+
{
|
10446 |
+
"epoch": 0.0740362713003647,
|
10447 |
+
"grad_norm": 0.23324772715568542,
|
10448 |
+
"learning_rate": 7.272173988184694e-07,
|
10449 |
+
"loss": 11.7523,
|
10450 |
+
"step": 1487
|
10451 |
+
},
|
10452 |
+
{
|
10453 |
+
"epoch": 0.07408606031939656,
|
10454 |
+
"grad_norm": 0.18824033439159393,
|
10455 |
+
"learning_rate": 7.028035435723058e-07,
|
10456 |
+
"loss": 11.8134,
|
10457 |
+
"step": 1488
|
10458 |
+
},
|
10459 |
+
{
|
10460 |
+
"epoch": 0.07413584933842841,
|
10461 |
+
"grad_norm": 0.16926607489585876,
|
10462 |
+
"learning_rate": 6.7880507652055e-07,
|
10463 |
+
"loss": 11.8194,
|
10464 |
+
"step": 1489
|
10465 |
+
},
|
10466 |
+
{
|
10467 |
+
"epoch": 0.07418563835746027,
|
10468 |
+
"grad_norm": 0.24031144380569458,
|
10469 |
+
"learning_rate": 6.552220980555635e-07,
|
10470 |
+
"loss": 11.773,
|
10471 |
+
"step": 1490
|
10472 |
+
},
|
10473 |
+
{
|
10474 |
+
"epoch": 0.07423542737649211,
|
10475 |
+
"grad_norm": 0.1799970269203186,
|
10476 |
+
"learning_rate": 6.320547068315974e-07,
|
10477 |
+
"loss": 11.7776,
|
10478 |
+
"step": 1491
|
10479 |
+
},
|
10480 |
+
{
|
10481 |
+
"epoch": 0.07428521639552396,
|
10482 |
+
"grad_norm": 0.12576720118522644,
|
10483 |
+
"learning_rate": 6.09302999764394e-07,
|
10484 |
+
"loss": 11.8256,
|
10485 |
+
"step": 1492
|
10486 |
+
},
|
10487 |
+
{
|
10488 |
+
"epoch": 0.07433500541455582,
|
10489 |
+
"grad_norm": 0.2028319239616394,
|
10490 |
+
"learning_rate": 5.869670720307641e-07,
|
10491 |
+
"loss": 11.8222,
|
10492 |
+
"step": 1493
|
10493 |
+
},
|
10494 |
+
{
|
10495 |
+
"epoch": 0.07438479443358767,
|
10496 |
+
"grad_norm": 0.17539533972740173,
|
10497 |
+
"learning_rate": 5.650470170681876e-07,
|
10498 |
+
"loss": 11.8153,
|
10499 |
+
"step": 1494
|
10500 |
+
},
|
10501 |
+
{
|
10502 |
+
"epoch": 0.07443458345261952,
|
10503 |
+
"grad_norm": 0.11721708625555038,
|
10504 |
+
"learning_rate": 5.435429265744585e-07,
|
10505 |
+
"loss": 11.8145,
|
10506 |
+
"step": 1495
|
10507 |
+
},
|
10508 |
+
{
|
10509 |
+
"epoch": 0.07448437247165138,
|
10510 |
+
"grad_norm": 0.21665911376476288,
|
10511 |
+
"learning_rate": 5.224548905072402e-07,
|
10512 |
+
"loss": 11.7634,
|
10513 |
+
"step": 1496
|
10514 |
+
},
|
10515 |
+
{
|
10516 |
+
"epoch": 0.07453416149068323,
|
10517 |
+
"grad_norm": 0.16707894206047058,
|
10518 |
+
"learning_rate": 5.017829970837329e-07,
|
10519 |
+
"loss": 11.7865,
|
10520 |
+
"step": 1497
|
10521 |
+
},
|
10522 |
+
{
|
10523 |
+
"epoch": 0.07458395050971509,
|
10524 |
+
"grad_norm": 0.1843748539686203,
|
10525 |
+
"learning_rate": 4.815273327803182e-07,
|
10526 |
+
"loss": 11.7698,
|
10527 |
+
"step": 1498
|
10528 |
+
},
|
10529 |
+
{
|
10530 |
+
"epoch": 0.07463373952874694,
|
10531 |
+
"grad_norm": 0.19585278630256653,
|
10532 |
+
"learning_rate": 4.616879823321374e-07,
|
10533 |
+
"loss": 11.7962,
|
10534 |
+
"step": 1499
|
10535 |
+
},
|
10536 |
+
{
|
10537 |
+
"epoch": 0.0746835285477788,
|
10538 |
+
"grad_norm": 0.2650960683822632,
|
10539 |
+
"learning_rate": 4.422650287328134e-07,
|
10540 |
+
"loss": 11.7915,
|
10541 |
+
"step": 1500
|
10542 |
+
},
|
10543 |
+
{
|
10544 |
+
"epoch": 0.07473331756681063,
|
10545 |
+
"grad_norm": 0.16555967926979065,
|
10546 |
+
"learning_rate": 4.232585532340183e-07,
|
10547 |
+
"loss": 11.7793,
|
10548 |
+
"step": 1501
|
10549 |
+
},
|
10550 |
+
{
|
10551 |
+
"epoch": 0.07478310658584249,
|
10552 |
+
"grad_norm": 0.14748094975948334,
|
10553 |
+
"learning_rate": 4.0466863534522893e-07,
|
10554 |
+
"loss": 11.7864,
|
10555 |
+
"step": 1502
|
10556 |
+
},
|
10557 |
+
{
|
10558 |
+
"epoch": 0.07483289560487434,
|
10559 |
+
"grad_norm": 0.14321599900722504,
|
10560 |
+
"learning_rate": 3.8649535283329376e-07,
|
10561 |
+
"loss": 11.7624,
|
10562 |
+
"step": 1503
|
10563 |
+
},
|
10564 |
+
{
|
10565 |
+
"epoch": 0.0748826846239062,
|
10566 |
+
"grad_norm": 0.17585203051567078,
|
10567 |
+
"learning_rate": 3.687387817221999e-07,
|
10568 |
+
"loss": 11.7895,
|
10569 |
+
"step": 1504
|
10570 |
+
},
|
10571 |
+
{
|
10572 |
+
"epoch": 0.07493247364293805,
|
10573 |
+
"grad_norm": 0.2230001986026764,
|
10574 |
+
"learning_rate": 3.5139899629268445e-07,
|
10575 |
+
"loss": 11.7779,
|
10576 |
+
"step": 1505
|
10577 |
+
},
|
10578 |
+
{
|
10579 |
+
"epoch": 0.0749822626619699,
|
10580 |
+
"grad_norm": 0.20294946432113647,
|
10581 |
+
"learning_rate": 3.3447606908196817e-07,
|
10582 |
+
"loss": 11.8003,
|
10583 |
+
"step": 1506
|
10584 |
+
},
|
10585 |
+
{
|
10586 |
+
"epoch": 0.07503205168100176,
|
10587 |
+
"grad_norm": 0.19467610120773315,
|
10588 |
+
"learning_rate": 3.179700708834332e-07,
|
10589 |
+
"loss": 11.7798,
|
10590 |
+
"step": 1507
|
10591 |
+
},
|
10592 |
+
{
|
10593 |
+
"epoch": 0.07508184070003361,
|
10594 |
+
"grad_norm": 0.1973147690296173,
|
10595 |
+
"learning_rate": 3.0188107074632376e-07,
|
10596 |
+
"loss": 11.8057,
|
10597 |
+
"step": 1508
|
10598 |
+
},
|
10599 |
+
{
|
10600 |
+
"epoch": 0.07513162971906547,
|
10601 |
+
"grad_norm": 0.19669723510742188,
|
10602 |
+
"learning_rate": 2.8620913597545707e-07,
|
10603 |
+
"loss": 11.7372,
|
10604 |
+
"step": 1509
|
10605 |
+
},
|
10606 |
+
{
|
10607 |
+
"epoch": 0.07518141873809732,
|
10608 |
+
"grad_norm": 0.14120781421661377,
|
10609 |
+
"learning_rate": 2.709543321309793e-07,
|
10610 |
+
"loss": 11.812,
|
10611 |
+
"step": 1510
|
10612 |
+
},
|
10613 |
+
{
|
10614 |
+
"epoch": 0.07523120775712916,
|
10615 |
+
"grad_norm": 0.20933443307876587,
|
10616 |
+
"learning_rate": 2.5611672302803257e-07,
|
10617 |
+
"loss": 11.7448,
|
10618 |
+
"step": 1511
|
10619 |
+
},
|
10620 |
+
{
|
10621 |
+
"epoch": 0.07528099677616101,
|
10622 |
+
"grad_norm": 0.23116809129714966,
|
10623 |
+
"learning_rate": 2.416963707365105e-07,
|
10624 |
+
"loss": 11.7646,
|
10625 |
+
"step": 1512
|
10626 |
+
},
|
10627 |
+
{
|
10628 |
+
"epoch": 0.07533078579519287,
|
10629 |
+
"grad_norm": 0.13899879157543182,
|
10630 |
+
"learning_rate": 2.2769333558083638e-07,
|
10631 |
+
"loss": 11.7938,
|
10632 |
+
"step": 1513
|
10633 |
+
},
|
10634 |
+
{
|
10635 |
+
"epoch": 0.07538057481422472,
|
10636 |
+
"grad_norm": 0.2576369643211365,
|
10637 |
+
"learning_rate": 2.141076761396521e-07,
|
10638 |
+
"loss": 11.8102,
|
10639 |
+
"step": 1514
|
10640 |
+
},
|
10641 |
+
{
|
10642 |
+
"epoch": 0.07543036383325658,
|
10643 |
+
"grad_norm": 0.2112661898136139,
|
10644 |
+
"learning_rate": 2.0093944924559626e-07,
|
10645 |
+
"loss": 11.7579,
|
10646 |
+
"step": 1515
|
10647 |
+
},
|
10648 |
+
{
|
10649 |
+
"epoch": 0.07548015285228843,
|
10650 |
+
"grad_norm": 0.13582190871238708,
|
10651 |
+
"learning_rate": 1.8818870998508208e-07,
|
10652 |
+
"loss": 11.8194,
|
10653 |
+
"step": 1516
|
10654 |
+
},
|
10655 |
+
{
|
10656 |
+
"epoch": 0.07552994187132028,
|
10657 |
+
"grad_norm": 0.1776101291179657,
|
10658 |
+
"learning_rate": 1.7585551169805315e-07,
|
10659 |
+
"loss": 11.8226,
|
10660 |
+
"step": 1517
|
10661 |
+
},
|
10662 |
+
{
|
10663 |
+
"epoch": 0.07557973089035214,
|
10664 |
+
"grad_norm": 0.23276415467262268,
|
10665 |
+
"learning_rate": 1.6393990597775022e-07,
|
10666 |
+
"loss": 11.7534,
|
10667 |
+
"step": 1518
|
10668 |
+
},
|
10669 |
+
{
|
10670 |
+
"epoch": 0.07562951990938399,
|
10671 |
+
"grad_norm": 0.2860177159309387,
|
10672 |
+
"learning_rate": 1.524419426705226e-07,
|
10673 |
+
"loss": 11.7577,
|
10674 |
+
"step": 1519
|
10675 |
+
},
|
10676 |
+
{
|
10677 |
+
"epoch": 0.07567930892841583,
|
10678 |
+
"grad_norm": 0.17294132709503174,
|
10679 |
+
"learning_rate": 1.4136166987559485e-07,
|
10680 |
+
"loss": 11.791,
|
10681 |
+
"step": 1520
|
10682 |
+
},
|
10683 |
+
{
|
10684 |
+
"epoch": 0.07572909794744768,
|
10685 |
+
"grad_norm": 0.2505719065666199,
|
10686 |
+
"learning_rate": 1.306991339448782e-07,
|
10687 |
+
"loss": 11.7587,
|
10688 |
+
"step": 1521
|
10689 |
+
},
|
10690 |
+
{
|
10691 |
+
"epoch": 0.07577888696647954,
|
10692 |
+
"grad_norm": 0.17550881206989288,
|
10693 |
+
"learning_rate": 1.204543794827595e-07,
|
10694 |
+
"loss": 11.7905,
|
10695 |
+
"step": 1522
|
10696 |
+
},
|
10697 |
+
{
|
10698 |
+
"epoch": 0.07582867598551139,
|
10699 |
+
"grad_norm": 0.22312860190868378,
|
10700 |
+
"learning_rate": 1.1062744934594582e-07,
|
10701 |
+
"loss": 11.7964,
|
10702 |
+
"step": 1523
|
10703 |
+
},
|
10704 |
+
{
|
10705 |
+
"epoch": 0.07587846500454325,
|
10706 |
+
"grad_norm": 0.15087145566940308,
|
10707 |
+
"learning_rate": 1.012183846432535e-07,
|
10708 |
+
"loss": 11.8222,
|
10709 |
+
"step": 1524
|
10710 |
+
},
|
10711 |
+
{
|
10712 |
+
"epoch": 0.0759282540235751,
|
10713 |
+
"grad_norm": 0.2809426486492157,
|
10714 |
+
"learning_rate": 9.222722473546386e-08,
|
10715 |
+
"loss": 11.776,
|
10716 |
+
"step": 1525
|
10717 |
+
},
|
10718 |
+
{
|
10719 |
+
"epoch": 0.07597804304260695,
|
10720 |
+
"grad_norm": 0.2292870730161667,
|
10721 |
+
"learning_rate": 8.365400723512328e-08,
|
10722 |
+
"loss": 11.7307,
|
10723 |
+
"step": 1526
|
10724 |
+
},
|
10725 |
+
{
|
10726 |
+
"epoch": 0.07602783206163881,
|
10727 |
+
"grad_norm": 0.17887090146541595,
|
10728 |
+
"learning_rate": 7.54987680064323e-08,
|
10729 |
+
"loss": 11.7752,
|
10730 |
+
"step": 1527
|
10731 |
+
},
|
10732 |
+
{
|
10733 |
+
"epoch": 0.07607762108067066,
|
10734 |
+
"grad_norm": 0.15502114593982697,
|
10735 |
+
"learning_rate": 6.776154116504562e-08,
|
10736 |
+
"loss": 11.7883,
|
10737 |
+
"step": 1528
|
10738 |
+
},
|
10739 |
+
{
|
10740 |
+
"epoch": 0.07612741009970252,
|
10741 |
+
"grad_norm": 0.22068634629249573,
|
10742 |
+
"learning_rate": 6.044235907798346e-08,
|
10743 |
+
"loss": 11.7661,
|
10744 |
+
"step": 1529
|
10745 |
+
},
|
10746 |
+
{
|
10747 |
+
"epoch": 0.07617719911873436,
|
10748 |
+
"grad_norm": 0.2096518725156784,
|
10749 |
+
"learning_rate": 5.354125236343155e-08,
|
10750 |
+
"loss": 11.7544,
|
10751 |
+
"step": 1530
|
10752 |
+
},
|
10753 |
+
{
|
10754 |
+
"epoch": 0.07622698813776621,
|
10755 |
+
"grad_norm": 0.15306280553340912,
|
10756 |
+
"learning_rate": 4.7058249890685743e-08,
|
10757 |
+
"loss": 11.7953,
|
10758 |
+
"step": 1531
|
10759 |
+
},
|
10760 |
+
{
|
10761 |
+
"epoch": 0.07627677715679806,
|
10762 |
+
"grad_norm": 0.19555924832820892,
|
10763 |
+
"learning_rate": 4.099337877995213e-08,
|
10764 |
+
"loss": 11.8177,
|
10765 |
+
"step": 1532
|
10766 |
+
},
|
10767 |
+
{
|
10768 |
+
"epoch": 0.07632656617582992,
|
10769 |
+
"grad_norm": 0.19804956018924713,
|
10770 |
+
"learning_rate": 3.534666440232481e-08,
|
10771 |
+
"loss": 11.7908,
|
10772 |
+
"step": 1533
|
10773 |
+
},
|
10774 |
+
{
|
10775 |
+
"epoch": 0.07637635519486177,
|
10776 |
+
"grad_norm": 0.1767924427986145,
|
10777 |
+
"learning_rate": 3.0118130379575005e-08,
|
10778 |
+
"loss": 11.7822,
|
10779 |
+
"step": 1534
|
10780 |
+
},
|
10781 |
+
{
|
10782 |
+
"epoch": 0.07642614421389363,
|
10783 |
+
"grad_norm": 0.35643061995506287,
|
10784 |
+
"learning_rate": 2.53077985841621e-08,
|
10785 |
+
"loss": 11.7708,
|
10786 |
+
"step": 1535
|
10787 |
+
},
|
10788 |
+
{
|
10789 |
+
"epoch": 0.07647593323292548,
|
10790 |
+
"grad_norm": 0.21405452489852905,
|
10791 |
+
"learning_rate": 2.091568913904496e-08,
|
10792 |
+
"loss": 11.779,
|
10793 |
+
"step": 1536
|
10794 |
+
},
|
10795 |
+
{
|
10796 |
+
"epoch": 0.07652572225195733,
|
10797 |
+
"grad_norm": 0.1269250214099884,
|
10798 |
+
"learning_rate": 1.694182041765968e-08,
|
10799 |
+
"loss": 11.8105,
|
10800 |
+
"step": 1537
|
10801 |
+
},
|
10802 |
+
{
|
10803 |
+
"epoch": 0.07657551127098919,
|
10804 |
+
"grad_norm": 0.15415985882282257,
|
10805 |
+
"learning_rate": 1.3386209043819708e-08,
|
10806 |
+
"loss": 11.8159,
|
10807 |
+
"step": 1538
|
10808 |
+
},
|
10809 |
+
{
|
10810 |
+
"epoch": 0.07662530029002104,
|
10811 |
+
"grad_norm": 0.15747161209583282,
|
10812 |
+
"learning_rate": 1.0248869891660295e-08,
|
10813 |
+
"loss": 11.8089,
|
10814 |
+
"step": 1539
|
10815 |
+
},
|
10816 |
+
{
|
10817 |
+
"epoch": 0.07667508930905288,
|
10818 |
+
"grad_norm": 0.16431812942028046,
|
10819 |
+
"learning_rate": 7.529816085549702e-09,
|
10820 |
+
"loss": 11.7756,
|
10821 |
+
"step": 1540
|
10822 |
+
},
|
10823 |
+
{
|
10824 |
+
"epoch": 0.07672487832808474,
|
10825 |
+
"grad_norm": 0.29294076561927795,
|
10826 |
+
"learning_rate": 5.22905900005588e-09,
|
10827 |
+
"loss": 11.7033,
|
10828 |
+
"step": 1541
|
10829 |
+
},
|
10830 |
+
{
|
10831 |
+
"epoch": 0.07677466734711659,
|
10832 |
+
"grad_norm": 0.2334837168455124,
|
10833 |
+
"learning_rate": 3.346608259890971e-09,
|
10834 |
+
"loss": 11.8231,
|
10835 |
+
"step": 1542
|
10836 |
+
},
|
10837 |
+
{
|
10838 |
+
"epoch": 0.07682445636614844,
|
10839 |
+
"grad_norm": 0.22579777240753174,
|
10840 |
+
"learning_rate": 1.882471739889091e-09,
|
10841 |
+
"loss": 11.8099,
|
10842 |
+
"step": 1543
|
10843 |
+
},
|
10844 |
+
{
|
10845 |
+
"epoch": 0.0768742453851803,
|
10846 |
+
"grad_norm": 0.19169573485851288,
|
10847 |
+
"learning_rate": 8.366555649397256e-10,
|
10848 |
+
"loss": 11.8139,
|
10849 |
+
"step": 1544
|
10850 |
+
},
|
10851 |
+
{
|
10852 |
+
"epoch": 0.07692403440421215,
|
10853 |
+
"grad_norm": 0.2151736468076706,
|
10854 |
+
"learning_rate": 2.0916410997662283e-10,
|
10855 |
+
"loss": 11.8025,
|
10856 |
+
"step": 1545
|
10857 |
+
},
|
10858 |
+
{
|
10859 |
+
"epoch": 0.076973823423244,
|
10860 |
+
"grad_norm": 0.1965971291065216,
|
10861 |
+
"learning_rate": 0.0,
|
10862 |
+
"loss": 11.8072,
|
10863 |
+
"step": 1546
|
10864 |
}
|
10865 |
],
|
10866 |
"logging_steps": 1,
|
|
|
10875 |
"should_evaluate": false,
|
10876 |
"should_log": false,
|
10877 |
"should_save": true,
|
10878 |
+
"should_training_stop": true
|
10879 |
},
|
10880 |
"attributes": {}
|
10881 |
}
|
10882 |
},
|
10883 |
+
"total_flos": 210793882189824.0,
|
10884 |
"train_batch_size": 2,
|
10885 |
"trial_name": null,
|
10886 |
"trial_params": null
|