whiteapple8222
commited on
Training in progress, step 1000, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 41581360
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:406006ca6679f6957d9f750eed044348938d2d20560a4ef28864563dfd066c37
|
3 |
size 41581360
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 21505924
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:852b3306955bc3f134c5047b25cbd07a714454b7f1ad7d110c8e389d99265b8f
|
3 |
size 21505924
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:01d818999d75f3b26872977d90b422207e2341c8d2be13ef90fc8c01cac01d66
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c7b5bf190dc871967c45091d9f1ab233b2d2ed62baca21fee5dfedb5718ffa5d
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 250,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -5289,6 +5289,1764 @@
|
|
5289 |
"eval_samples_per_second": 32.051,
|
5290 |
"eval_steps_per_second": 16.026,
|
5291 |
"step": 750
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5292 |
}
|
5293 |
],
|
5294 |
"logging_steps": 1,
|
@@ -5303,12 +7061,12 @@
|
|
5303 |
"should_evaluate": false,
|
5304 |
"should_log": false,
|
5305 |
"should_save": true,
|
5306 |
-
"should_training_stop":
|
5307 |
},
|
5308 |
"attributes": {}
|
5309 |
}
|
5310 |
},
|
5311 |
-
"total_flos":
|
5312 |
"train_batch_size": 2,
|
5313 |
"trial_name": null,
|
5314 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.4005607850991388,
|
5 |
"eval_steps": 250,
|
6 |
+
"global_step": 1000,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
5289 |
"eval_samples_per_second": 32.051,
|
5290 |
"eval_steps_per_second": 16.026,
|
5291 |
"step": 750
|
5292 |
+
},
|
5293 |
+
{
|
5294 |
+
"epoch": 0.3008211496094532,
|
5295 |
+
"grad_norm": 1.2960726022720337,
|
5296 |
+
"learning_rate": 2.962670371296996e-05,
|
5297 |
+
"loss": 2.397,
|
5298 |
+
"step": 751
|
5299 |
+
},
|
5300 |
+
{
|
5301 |
+
"epoch": 0.30122171039455237,
|
5302 |
+
"grad_norm": 1.0980877876281738,
|
5303 |
+
"learning_rate": 2.9401604854884357e-05,
|
5304 |
+
"loss": 2.2021,
|
5305 |
+
"step": 752
|
5306 |
+
},
|
5307 |
+
{
|
5308 |
+
"epoch": 0.3016222711796515,
|
5309 |
+
"grad_norm": 1.016575813293457,
|
5310 |
+
"learning_rate": 2.91772169218541e-05,
|
5311 |
+
"loss": 1.7718,
|
5312 |
+
"step": 753
|
5313 |
+
},
|
5314 |
+
{
|
5315 |
+
"epoch": 0.30202283196475066,
|
5316 |
+
"grad_norm": 1.2587579488754272,
|
5317 |
+
"learning_rate": 2.8953542173463133e-05,
|
5318 |
+
"loss": 1.8161,
|
5319 |
+
"step": 754
|
5320 |
+
},
|
5321 |
+
{
|
5322 |
+
"epoch": 0.3024233927498498,
|
5323 |
+
"grad_norm": 1.3101757764816284,
|
5324 |
+
"learning_rate": 2.8730582862113742e-05,
|
5325 |
+
"loss": 1.6577,
|
5326 |
+
"step": 755
|
5327 |
+
},
|
5328 |
+
{
|
5329 |
+
"epoch": 0.30282395353494895,
|
5330 |
+
"grad_norm": 1.2363008260726929,
|
5331 |
+
"learning_rate": 2.8508341233003654e-05,
|
5332 |
+
"loss": 2.0945,
|
5333 |
+
"step": 756
|
5334 |
+
},
|
5335 |
+
{
|
5336 |
+
"epoch": 0.30322451432004804,
|
5337 |
+
"grad_norm": 1.2293903827667236,
|
5338 |
+
"learning_rate": 2.828681952410366e-05,
|
5339 |
+
"loss": 1.8652,
|
5340 |
+
"step": 757
|
5341 |
+
},
|
5342 |
+
{
|
5343 |
+
"epoch": 0.3036250751051472,
|
5344 |
+
"grad_norm": 1.2226698398590088,
|
5345 |
+
"learning_rate": 2.8066019966134904e-05,
|
5346 |
+
"loss": 2.0198,
|
5347 |
+
"step": 758
|
5348 |
+
},
|
5349 |
+
{
|
5350 |
+
"epoch": 0.30402563589024634,
|
5351 |
+
"grad_norm": 1.4481924772262573,
|
5352 |
+
"learning_rate": 2.7845944782546453e-05,
|
5353 |
+
"loss": 1.861,
|
5354 |
+
"step": 759
|
5355 |
+
},
|
5356 |
+
{
|
5357 |
+
"epoch": 0.3044261966753455,
|
5358 |
+
"grad_norm": 1.2004791498184204,
|
5359 |
+
"learning_rate": 2.7626596189492983e-05,
|
5360 |
+
"loss": 2.192,
|
5361 |
+
"step": 760
|
5362 |
+
},
|
5363 |
+
{
|
5364 |
+
"epoch": 0.30482675746044463,
|
5365 |
+
"grad_norm": 1.1224644184112549,
|
5366 |
+
"learning_rate": 2.7407976395812418e-05,
|
5367 |
+
"loss": 2.108,
|
5368 |
+
"step": 761
|
5369 |
+
},
|
5370 |
+
{
|
5371 |
+
"epoch": 0.3052273182455438,
|
5372 |
+
"grad_norm": 1.3192898035049438,
|
5373 |
+
"learning_rate": 2.719008760300359e-05,
|
5374 |
+
"loss": 1.7614,
|
5375 |
+
"step": 762
|
5376 |
+
},
|
5377 |
+
{
|
5378 |
+
"epoch": 0.3056278790306429,
|
5379 |
+
"grad_norm": 1.3838907480239868,
|
5380 |
+
"learning_rate": 2.6972932005204267e-05,
|
5381 |
+
"loss": 1.9876,
|
5382 |
+
"step": 763
|
5383 |
+
},
|
5384 |
+
{
|
5385 |
+
"epoch": 0.306028439815742,
|
5386 |
+
"grad_norm": 1.4712343215942383,
|
5387 |
+
"learning_rate": 2.6756511789168925e-05,
|
5388 |
+
"loss": 2.0119,
|
5389 |
+
"step": 764
|
5390 |
+
},
|
5391 |
+
{
|
5392 |
+
"epoch": 0.30642900060084116,
|
5393 |
+
"grad_norm": 1.0120660066604614,
|
5394 |
+
"learning_rate": 2.654082913424668e-05,
|
5395 |
+
"loss": 2.0213,
|
5396 |
+
"step": 765
|
5397 |
+
},
|
5398 |
+
{
|
5399 |
+
"epoch": 0.3068295613859403,
|
5400 |
+
"grad_norm": 1.2688238620758057,
|
5401 |
+
"learning_rate": 2.6325886212359498e-05,
|
5402 |
+
"loss": 1.6258,
|
5403 |
+
"step": 766
|
5404 |
+
},
|
5405 |
+
{
|
5406 |
+
"epoch": 0.30723012217103945,
|
5407 |
+
"grad_norm": 1.1377239227294922,
|
5408 |
+
"learning_rate": 2.6111685187980262e-05,
|
5409 |
+
"loss": 1.7486,
|
5410 |
+
"step": 767
|
5411 |
+
},
|
5412 |
+
{
|
5413 |
+
"epoch": 0.3076306829561386,
|
5414 |
+
"grad_norm": 1.1769511699676514,
|
5415 |
+
"learning_rate": 2.589822821811083e-05,
|
5416 |
+
"loss": 1.6603,
|
5417 |
+
"step": 768
|
5418 |
+
},
|
5419 |
+
{
|
5420 |
+
"epoch": 0.30803124374123775,
|
5421 |
+
"grad_norm": 0.9931359887123108,
|
5422 |
+
"learning_rate": 2.5685517452260567e-05,
|
5423 |
+
"loss": 2.4301,
|
5424 |
+
"step": 769
|
5425 |
+
},
|
5426 |
+
{
|
5427 |
+
"epoch": 0.3084318045263369,
|
5428 |
+
"grad_norm": 1.1136318445205688,
|
5429 |
+
"learning_rate": 2.5473555032424533e-05,
|
5430 |
+
"loss": 1.8987,
|
5431 |
+
"step": 770
|
5432 |
+
},
|
5433 |
+
{
|
5434 |
+
"epoch": 0.308832365311436,
|
5435 |
+
"grad_norm": 0.8127551674842834,
|
5436 |
+
"learning_rate": 2.5262343093061936e-05,
|
5437 |
+
"loss": 1.9156,
|
5438 |
+
"step": 771
|
5439 |
+
},
|
5440 |
+
{
|
5441 |
+
"epoch": 0.30923292609653513,
|
5442 |
+
"grad_norm": 1.2461220026016235,
|
5443 |
+
"learning_rate": 2.5051883761074614e-05,
|
5444 |
+
"loss": 1.9488,
|
5445 |
+
"step": 772
|
5446 |
+
},
|
5447 |
+
{
|
5448 |
+
"epoch": 0.3096334868816343,
|
5449 |
+
"grad_norm": 1.610859990119934,
|
5450 |
+
"learning_rate": 2.4842179155785737e-05,
|
5451 |
+
"loss": 2.0289,
|
5452 |
+
"step": 773
|
5453 |
+
},
|
5454 |
+
{
|
5455 |
+
"epoch": 0.3100340476667334,
|
5456 |
+
"grad_norm": 1.442642092704773,
|
5457 |
+
"learning_rate": 2.4633231388918378e-05,
|
5458 |
+
"loss": 1.9204,
|
5459 |
+
"step": 774
|
5460 |
+
},
|
5461 |
+
{
|
5462 |
+
"epoch": 0.31043460845183257,
|
5463 |
+
"grad_norm": 0.9093045592308044,
|
5464 |
+
"learning_rate": 2.4425042564574184e-05,
|
5465 |
+
"loss": 1.9993,
|
5466 |
+
"step": 775
|
5467 |
+
},
|
5468 |
+
{
|
5469 |
+
"epoch": 0.3108351692369317,
|
5470 |
+
"grad_norm": 0.9235540628433228,
|
5471 |
+
"learning_rate": 2.4217614779212315e-05,
|
5472 |
+
"loss": 1.7345,
|
5473 |
+
"step": 776
|
5474 |
+
},
|
5475 |
+
{
|
5476 |
+
"epoch": 0.31123573002203087,
|
5477 |
+
"grad_norm": 1.202626347541809,
|
5478 |
+
"learning_rate": 2.4010950121628318e-05,
|
5479 |
+
"loss": 1.8141,
|
5480 |
+
"step": 777
|
5481 |
+
},
|
5482 |
+
{
|
5483 |
+
"epoch": 0.31163629080712996,
|
5484 |
+
"grad_norm": 0.9984288811683655,
|
5485 |
+
"learning_rate": 2.3805050672932928e-05,
|
5486 |
+
"loss": 1.8233,
|
5487 |
+
"step": 778
|
5488 |
+
},
|
5489 |
+
{
|
5490 |
+
"epoch": 0.3120368515922291,
|
5491 |
+
"grad_norm": 1.394755482673645,
|
5492 |
+
"learning_rate": 2.3599918506531337e-05,
|
5493 |
+
"loss": 1.8879,
|
5494 |
+
"step": 779
|
5495 |
+
},
|
5496 |
+
{
|
5497 |
+
"epoch": 0.31243741237732825,
|
5498 |
+
"grad_norm": 1.5760648250579834,
|
5499 |
+
"learning_rate": 2.339555568810221e-05,
|
5500 |
+
"loss": 1.9124,
|
5501 |
+
"step": 780
|
5502 |
+
},
|
5503 |
+
{
|
5504 |
+
"epoch": 0.3128379731624274,
|
5505 |
+
"grad_norm": 0.9450803995132446,
|
5506 |
+
"learning_rate": 2.3191964275576805e-05,
|
5507 |
+
"loss": 2.068,
|
5508 |
+
"step": 781
|
5509 |
+
},
|
5510 |
+
{
|
5511 |
+
"epoch": 0.31323853394752654,
|
5512 |
+
"grad_norm": 1.023253083229065,
|
5513 |
+
"learning_rate": 2.2989146319118425e-05,
|
5514 |
+
"loss": 2.166,
|
5515 |
+
"step": 782
|
5516 |
+
},
|
5517 |
+
{
|
5518 |
+
"epoch": 0.3136390947326257,
|
5519 |
+
"grad_norm": 1.1726493835449219,
|
5520 |
+
"learning_rate": 2.2787103861101655e-05,
|
5521 |
+
"loss": 1.9661,
|
5522 |
+
"step": 783
|
5523 |
+
},
|
5524 |
+
{
|
5525 |
+
"epoch": 0.31403965551772484,
|
5526 |
+
"grad_norm": 1.1509053707122803,
|
5527 |
+
"learning_rate": 2.2585838936091754e-05,
|
5528 |
+
"loss": 2.0738,
|
5529 |
+
"step": 784
|
5530 |
+
},
|
5531 |
+
{
|
5532 |
+
"epoch": 0.3144402163028239,
|
5533 |
+
"grad_norm": 1.0587100982666016,
|
5534 |
+
"learning_rate": 2.2385353570824308e-05,
|
5535 |
+
"loss": 1.8896,
|
5536 |
+
"step": 785
|
5537 |
+
},
|
5538 |
+
{
|
5539 |
+
"epoch": 0.3148407770879231,
|
5540 |
+
"grad_norm": 1.2842707633972168,
|
5541 |
+
"learning_rate": 2.2185649784184746e-05,
|
5542 |
+
"loss": 1.9896,
|
5543 |
+
"step": 786
|
5544 |
+
},
|
5545 |
+
{
|
5546 |
+
"epoch": 0.3152413378730222,
|
5547 |
+
"grad_norm": 1.2250515222549438,
|
5548 |
+
"learning_rate": 2.198672958718796e-05,
|
5549 |
+
"loss": 2.0292,
|
5550 |
+
"step": 787
|
5551 |
+
},
|
5552 |
+
{
|
5553 |
+
"epoch": 0.31564189865812137,
|
5554 |
+
"grad_norm": 0.9199315309524536,
|
5555 |
+
"learning_rate": 2.178859498295809e-05,
|
5556 |
+
"loss": 2.0993,
|
5557 |
+
"step": 788
|
5558 |
+
},
|
5559 |
+
{
|
5560 |
+
"epoch": 0.3160424594432205,
|
5561 |
+
"grad_norm": 1.379782795906067,
|
5562 |
+
"learning_rate": 2.159124796670843e-05,
|
5563 |
+
"loss": 1.9658,
|
5564 |
+
"step": 789
|
5565 |
+
},
|
5566 |
+
{
|
5567 |
+
"epoch": 0.31644302022831966,
|
5568 |
+
"grad_norm": 1.297568440437317,
|
5569 |
+
"learning_rate": 2.139469052572127e-05,
|
5570 |
+
"loss": 1.8385,
|
5571 |
+
"step": 790
|
5572 |
+
},
|
5573 |
+
{
|
5574 |
+
"epoch": 0.3168435810134188,
|
5575 |
+
"grad_norm": 1.2404309511184692,
|
5576 |
+
"learning_rate": 2.119892463932781e-05,
|
5577 |
+
"loss": 2.0786,
|
5578 |
+
"step": 791
|
5579 |
+
},
|
5580 |
+
{
|
5581 |
+
"epoch": 0.3172441417985179,
|
5582 |
+
"grad_norm": 0.8843573331832886,
|
5583 |
+
"learning_rate": 2.1003952278888382e-05,
|
5584 |
+
"loss": 2.226,
|
5585 |
+
"step": 792
|
5586 |
+
},
|
5587 |
+
{
|
5588 |
+
"epoch": 0.31764470258361704,
|
5589 |
+
"grad_norm": 1.3021501302719116,
|
5590 |
+
"learning_rate": 2.0809775407772503e-05,
|
5591 |
+
"loss": 2.0633,
|
5592 |
+
"step": 793
|
5593 |
+
},
|
5594 |
+
{
|
5595 |
+
"epoch": 0.3180452633687162,
|
5596 |
+
"grad_norm": 1.5391861200332642,
|
5597 |
+
"learning_rate": 2.0616395981339075e-05,
|
5598 |
+
"loss": 2.0367,
|
5599 |
+
"step": 794
|
5600 |
+
},
|
5601 |
+
{
|
5602 |
+
"epoch": 0.31844582415381534,
|
5603 |
+
"grad_norm": 1.118991732597351,
|
5604 |
+
"learning_rate": 2.042381594691678e-05,
|
5605 |
+
"loss": 1.94,
|
5606 |
+
"step": 795
|
5607 |
+
},
|
5608 |
+
{
|
5609 |
+
"epoch": 0.3188463849389145,
|
5610 |
+
"grad_norm": 1.1585220098495483,
|
5611 |
+
"learning_rate": 2.0232037243784475e-05,
|
5612 |
+
"loss": 1.8611,
|
5613 |
+
"step": 796
|
5614 |
+
},
|
5615 |
+
{
|
5616 |
+
"epoch": 0.31924694572401363,
|
5617 |
+
"grad_norm": 1.1401135921478271,
|
5618 |
+
"learning_rate": 2.0041061803151508e-05,
|
5619 |
+
"loss": 1.8363,
|
5620 |
+
"step": 797
|
5621 |
+
},
|
5622 |
+
{
|
5623 |
+
"epoch": 0.3196475065091128,
|
5624 |
+
"grad_norm": 1.2198765277862549,
|
5625 |
+
"learning_rate": 1.985089154813846e-05,
|
5626 |
+
"loss": 2.0256,
|
5627 |
+
"step": 798
|
5628 |
+
},
|
5629 |
+
{
|
5630 |
+
"epoch": 0.3200480672942119,
|
5631 |
+
"grad_norm": 1.0418367385864258,
|
5632 |
+
"learning_rate": 1.9661528393757744e-05,
|
5633 |
+
"loss": 1.9346,
|
5634 |
+
"step": 799
|
5635 |
+
},
|
5636 |
+
{
|
5637 |
+
"epoch": 0.320448628079311,
|
5638 |
+
"grad_norm": 1.1914174556732178,
|
5639 |
+
"learning_rate": 1.947297424689414e-05,
|
5640 |
+
"loss": 2.2865,
|
5641 |
+
"step": 800
|
5642 |
+
},
|
5643 |
+
{
|
5644 |
+
"epoch": 0.32084918886441016,
|
5645 |
+
"grad_norm": 1.3711671829223633,
|
5646 |
+
"learning_rate": 1.9285231006285853e-05,
|
5647 |
+
"loss": 2.423,
|
5648 |
+
"step": 801
|
5649 |
+
},
|
5650 |
+
{
|
5651 |
+
"epoch": 0.3212497496495093,
|
5652 |
+
"grad_norm": 1.17362380027771,
|
5653 |
+
"learning_rate": 1.9098300562505266e-05,
|
5654 |
+
"loss": 1.5378,
|
5655 |
+
"step": 802
|
5656 |
+
},
|
5657 |
+
{
|
5658 |
+
"epoch": 0.32165031043460846,
|
5659 |
+
"grad_norm": 0.9338995814323425,
|
5660 |
+
"learning_rate": 1.8912184797939803e-05,
|
5661 |
+
"loss": 2.2084,
|
5662 |
+
"step": 803
|
5663 |
+
},
|
5664 |
+
{
|
5665 |
+
"epoch": 0.3220508712197076,
|
5666 |
+
"grad_norm": 1.059409499168396,
|
5667 |
+
"learning_rate": 1.8726885586773212e-05,
|
5668 |
+
"loss": 2.3115,
|
5669 |
+
"step": 804
|
5670 |
+
},
|
5671 |
+
{
|
5672 |
+
"epoch": 0.32245143200480675,
|
5673 |
+
"grad_norm": 1.0587588548660278,
|
5674 |
+
"learning_rate": 1.854240479496643e-05,
|
5675 |
+
"loss": 1.6772,
|
5676 |
+
"step": 805
|
5677 |
+
},
|
5678 |
+
{
|
5679 |
+
"epoch": 0.3228519927899059,
|
5680 |
+
"grad_norm": 1.5739694833755493,
|
5681 |
+
"learning_rate": 1.835874428023905e-05,
|
5682 |
+
"loss": 1.8971,
|
5683 |
+
"step": 806
|
5684 |
+
},
|
5685 |
+
{
|
5686 |
+
"epoch": 0.323252553575005,
|
5687 |
+
"grad_norm": 1.096549391746521,
|
5688 |
+
"learning_rate": 1.817590589205035e-05,
|
5689 |
+
"loss": 1.9728,
|
5690 |
+
"step": 807
|
5691 |
+
},
|
5692 |
+
{
|
5693 |
+
"epoch": 0.32365311436010413,
|
5694 |
+
"grad_norm": 1.1896045207977295,
|
5695 |
+
"learning_rate": 1.7993891471580893e-05,
|
5696 |
+
"loss": 1.4642,
|
5697 |
+
"step": 808
|
5698 |
+
},
|
5699 |
+
{
|
5700 |
+
"epoch": 0.3240536751452033,
|
5701 |
+
"grad_norm": 1.2677874565124512,
|
5702 |
+
"learning_rate": 1.7812702851713904e-05,
|
5703 |
+
"loss": 1.9893,
|
5704 |
+
"step": 809
|
5705 |
+
},
|
5706 |
+
{
|
5707 |
+
"epoch": 0.3244542359303024,
|
5708 |
+
"grad_norm": 1.2322319746017456,
|
5709 |
+
"learning_rate": 1.763234185701673e-05,
|
5710 |
+
"loss": 1.6648,
|
5711 |
+
"step": 810
|
5712 |
+
},
|
5713 |
+
{
|
5714 |
+
"epoch": 0.3248547967154016,
|
5715 |
+
"grad_norm": 1.4150607585906982,
|
5716 |
+
"learning_rate": 1.74528103037226e-05,
|
5717 |
+
"loss": 1.8747,
|
5718 |
+
"step": 811
|
5719 |
+
},
|
5720 |
+
{
|
5721 |
+
"epoch": 0.3252553575005007,
|
5722 |
+
"grad_norm": 1.1502705812454224,
|
5723 |
+
"learning_rate": 1.7274109999712295e-05,
|
5724 |
+
"loss": 1.8867,
|
5725 |
+
"step": 812
|
5726 |
+
},
|
5727 |
+
{
|
5728 |
+
"epoch": 0.32565591828559987,
|
5729 |
+
"grad_norm": 1.0201531648635864,
|
5730 |
+
"learning_rate": 1.7096242744495837e-05,
|
5731 |
+
"loss": 1.9702,
|
5732 |
+
"step": 813
|
5733 |
+
},
|
5734 |
+
{
|
5735 |
+
"epoch": 0.32605647907069896,
|
5736 |
+
"grad_norm": 1.109731912612915,
|
5737 |
+
"learning_rate": 1.6919210329194533e-05,
|
5738 |
+
"loss": 1.852,
|
5739 |
+
"step": 814
|
5740 |
+
},
|
5741 |
+
{
|
5742 |
+
"epoch": 0.3264570398557981,
|
5743 |
+
"grad_norm": 1.1922898292541504,
|
5744 |
+
"learning_rate": 1.6743014536522873e-05,
|
5745 |
+
"loss": 1.7001,
|
5746 |
+
"step": 815
|
5747 |
+
},
|
5748 |
+
{
|
5749 |
+
"epoch": 0.32685760064089725,
|
5750 |
+
"grad_norm": 0.8632221221923828,
|
5751 |
+
"learning_rate": 1.6567657140770475e-05,
|
5752 |
+
"loss": 1.7701,
|
5753 |
+
"step": 816
|
5754 |
+
},
|
5755 |
+
{
|
5756 |
+
"epoch": 0.3272581614259964,
|
5757 |
+
"grad_norm": 1.106311321258545,
|
5758 |
+
"learning_rate": 1.6393139907784404e-05,
|
5759 |
+
"loss": 2.1824,
|
5760 |
+
"step": 817
|
5761 |
+
},
|
5762 |
+
{
|
5763 |
+
"epoch": 0.32765872221109554,
|
5764 |
+
"grad_norm": 1.2513147592544556,
|
5765 |
+
"learning_rate": 1.621946459495127e-05,
|
5766 |
+
"loss": 2.1743,
|
5767 |
+
"step": 818
|
5768 |
+
},
|
5769 |
+
{
|
5770 |
+
"epoch": 0.3280592829961947,
|
5771 |
+
"grad_norm": 1.183262825012207,
|
5772 |
+
"learning_rate": 1.6046632951179508e-05,
|
5773 |
+
"loss": 1.7933,
|
5774 |
+
"step": 819
|
5775 |
+
},
|
5776 |
+
{
|
5777 |
+
"epoch": 0.32845984378129384,
|
5778 |
+
"grad_norm": 1.4637755155563354,
|
5779 |
+
"learning_rate": 1.587464671688187e-05,
|
5780 |
+
"loss": 1.4244,
|
5781 |
+
"step": 820
|
5782 |
+
},
|
5783 |
+
{
|
5784 |
+
"epoch": 0.3288604045663929,
|
5785 |
+
"grad_norm": 1.0762394666671753,
|
5786 |
+
"learning_rate": 1.5703507623957848e-05,
|
5787 |
+
"loss": 1.9548,
|
5788 |
+
"step": 821
|
5789 |
+
},
|
5790 |
+
{
|
5791 |
+
"epoch": 0.3292609653514921,
|
5792 |
+
"grad_norm": 1.5148048400878906,
|
5793 |
+
"learning_rate": 1.553321739577619e-05,
|
5794 |
+
"loss": 1.8027,
|
5795 |
+
"step": 822
|
5796 |
+
},
|
5797 |
+
{
|
5798 |
+
"epoch": 0.3296615261365912,
|
5799 |
+
"grad_norm": 1.4003595113754272,
|
5800 |
+
"learning_rate": 1.5363777747157572e-05,
|
5801 |
+
"loss": 1.6786,
|
5802 |
+
"step": 823
|
5803 |
+
},
|
5804 |
+
{
|
5805 |
+
"epoch": 0.33006208692169037,
|
5806 |
+
"grad_norm": 1.1532552242279053,
|
5807 |
+
"learning_rate": 1.5195190384357404e-05,
|
5808 |
+
"loss": 2.0791,
|
5809 |
+
"step": 824
|
5810 |
+
},
|
5811 |
+
{
|
5812 |
+
"epoch": 0.3304626477067895,
|
5813 |
+
"grad_norm": 1.2315119504928589,
|
5814 |
+
"learning_rate": 1.5027457005048573e-05,
|
5815 |
+
"loss": 1.8975,
|
5816 |
+
"step": 825
|
5817 |
+
},
|
5818 |
+
{
|
5819 |
+
"epoch": 0.33086320849188866,
|
5820 |
+
"grad_norm": 1.5469486713409424,
|
5821 |
+
"learning_rate": 1.4860579298304312e-05,
|
5822 |
+
"loss": 1.9729,
|
5823 |
+
"step": 826
|
5824 |
+
},
|
5825 |
+
{
|
5826 |
+
"epoch": 0.3312637692769878,
|
5827 |
+
"grad_norm": 1.1972731351852417,
|
5828 |
+
"learning_rate": 1.4694558944581293e-05,
|
5829 |
+
"loss": 1.7436,
|
5830 |
+
"step": 827
|
5831 |
+
},
|
5832 |
+
{
|
5833 |
+
"epoch": 0.3316643300620869,
|
5834 |
+
"grad_norm": 1.2163808345794678,
|
5835 |
+
"learning_rate": 1.4529397615702656e-05,
|
5836 |
+
"loss": 1.9608,
|
5837 |
+
"step": 828
|
5838 |
+
},
|
5839 |
+
{
|
5840 |
+
"epoch": 0.33206489084718604,
|
5841 |
+
"grad_norm": 1.1661227941513062,
|
5842 |
+
"learning_rate": 1.4365096974841108e-05,
|
5843 |
+
"loss": 1.9195,
|
5844 |
+
"step": 829
|
5845 |
+
},
|
5846 |
+
{
|
5847 |
+
"epoch": 0.3324654516322852,
|
5848 |
+
"grad_norm": 1.3404620885849,
|
5849 |
+
"learning_rate": 1.4201658676502294e-05,
|
5850 |
+
"loss": 1.9545,
|
5851 |
+
"step": 830
|
5852 |
+
},
|
5853 |
+
{
|
5854 |
+
"epoch": 0.33286601241738434,
|
5855 |
+
"grad_norm": 1.3196473121643066,
|
5856 |
+
"learning_rate": 1.4039084366508092e-05,
|
5857 |
+
"loss": 1.789,
|
5858 |
+
"step": 831
|
5859 |
+
},
|
5860 |
+
{
|
5861 |
+
"epoch": 0.3332665732024835,
|
5862 |
+
"grad_norm": 1.4525930881500244,
|
5863 |
+
"learning_rate": 1.3877375681979943e-05,
|
5864 |
+
"loss": 1.9036,
|
5865 |
+
"step": 832
|
5866 |
+
},
|
5867 |
+
{
|
5868 |
+
"epoch": 0.33366713398758263,
|
5869 |
+
"grad_norm": 0.9184648990631104,
|
5870 |
+
"learning_rate": 1.3716534251322544e-05,
|
5871 |
+
"loss": 1.9158,
|
5872 |
+
"step": 833
|
5873 |
+
},
|
5874 |
+
{
|
5875 |
+
"epoch": 0.3340676947726818,
|
5876 |
+
"grad_norm": 1.1989598274230957,
|
5877 |
+
"learning_rate": 1.3556561694207338e-05,
|
5878 |
+
"loss": 1.6822,
|
5879 |
+
"step": 834
|
5880 |
+
},
|
5881 |
+
{
|
5882 |
+
"epoch": 0.33446825555778087,
|
5883 |
+
"grad_norm": 0.9898660182952881,
|
5884 |
+
"learning_rate": 1.339745962155613e-05,
|
5885 |
+
"loss": 2.1256,
|
5886 |
+
"step": 835
|
5887 |
+
},
|
5888 |
+
{
|
5889 |
+
"epoch": 0.33486881634288,
|
5890 |
+
"grad_norm": 1.368600606918335,
|
5891 |
+
"learning_rate": 1.3239229635525074e-05,
|
5892 |
+
"loss": 1.592,
|
5893 |
+
"step": 836
|
5894 |
+
},
|
5895 |
+
{
|
5896 |
+
"epoch": 0.33526937712797916,
|
5897 |
+
"grad_norm": 1.3661975860595703,
|
5898 |
+
"learning_rate": 1.3081873329488392e-05,
|
5899 |
+
"loss": 1.865,
|
5900 |
+
"step": 837
|
5901 |
+
},
|
5902 |
+
{
|
5903 |
+
"epoch": 0.3356699379130783,
|
5904 |
+
"grad_norm": 0.9782090187072754,
|
5905 |
+
"learning_rate": 1.2925392288022298e-05,
|
5906 |
+
"loss": 1.8389,
|
5907 |
+
"step": 838
|
5908 |
+
},
|
5909 |
+
{
|
5910 |
+
"epoch": 0.33607049869817746,
|
5911 |
+
"grad_norm": 1.5394399166107178,
|
5912 |
+
"learning_rate": 1.2769788086889134e-05,
|
5913 |
+
"loss": 2.0711,
|
5914 |
+
"step": 839
|
5915 |
+
},
|
5916 |
+
{
|
5917 |
+
"epoch": 0.3364710594832766,
|
5918 |
+
"grad_norm": 1.2607556581497192,
|
5919 |
+
"learning_rate": 1.2615062293021507e-05,
|
5920 |
+
"loss": 2.0338,
|
5921 |
+
"step": 840
|
5922 |
+
},
|
5923 |
+
{
|
5924 |
+
"epoch": 0.33687162026837575,
|
5925 |
+
"grad_norm": 1.4436510801315308,
|
5926 |
+
"learning_rate": 1.2461216464506454e-05,
|
5927 |
+
"loss": 2.074,
|
5928 |
+
"step": 841
|
5929 |
+
},
|
5930 |
+
{
|
5931 |
+
"epoch": 0.33727218105347484,
|
5932 |
+
"grad_norm": 1.4884815216064453,
|
5933 |
+
"learning_rate": 1.230825215056971e-05,
|
5934 |
+
"loss": 2.0801,
|
5935 |
+
"step": 842
|
5936 |
+
},
|
5937 |
+
{
|
5938 |
+
"epoch": 0.337672741838574,
|
5939 |
+
"grad_norm": 0.985197126865387,
|
5940 |
+
"learning_rate": 1.2156170891560258e-05,
|
5941 |
+
"loss": 2.1941,
|
5942 |
+
"step": 843
|
5943 |
+
},
|
5944 |
+
{
|
5945 |
+
"epoch": 0.33807330262367313,
|
5946 |
+
"grad_norm": 1.5094271898269653,
|
5947 |
+
"learning_rate": 1.2004974218934695e-05,
|
5948 |
+
"loss": 2.1544,
|
5949 |
+
"step": 844
|
5950 |
+
},
|
5951 |
+
{
|
5952 |
+
"epoch": 0.3384738634087723,
|
5953 |
+
"grad_norm": 1.4975275993347168,
|
5954 |
+
"learning_rate": 1.1854663655241805e-05,
|
5955 |
+
"loss": 2.3323,
|
5956 |
+
"step": 845
|
5957 |
+
},
|
5958 |
+
{
|
5959 |
+
"epoch": 0.3388744241938714,
|
5960 |
+
"grad_norm": 1.178804636001587,
|
5961 |
+
"learning_rate": 1.1705240714107302e-05,
|
5962 |
+
"loss": 2.0121,
|
5963 |
+
"step": 846
|
5964 |
+
},
|
5965 |
+
{
|
5966 |
+
"epoch": 0.3392749849789706,
|
5967 |
+
"grad_norm": 1.1911643743515015,
|
5968 |
+
"learning_rate": 1.1556706900218572e-05,
|
5969 |
+
"loss": 2.2518,
|
5970 |
+
"step": 847
|
5971 |
+
},
|
5972 |
+
{
|
5973 |
+
"epoch": 0.3396755457640697,
|
5974 |
+
"grad_norm": 1.2257444858551025,
|
5975 |
+
"learning_rate": 1.1409063709309442e-05,
|
5976 |
+
"loss": 1.9825,
|
5977 |
+
"step": 848
|
5978 |
+
},
|
5979 |
+
{
|
5980 |
+
"epoch": 0.3400761065491688,
|
5981 |
+
"grad_norm": 1.2943917512893677,
|
5982 |
+
"learning_rate": 1.126231262814521e-05,
|
5983 |
+
"loss": 1.9012,
|
5984 |
+
"step": 849
|
5985 |
+
},
|
5986 |
+
{
|
5987 |
+
"epoch": 0.34047666733426796,
|
5988 |
+
"grad_norm": 1.4267241954803467,
|
5989 |
+
"learning_rate": 1.1116455134507664e-05,
|
5990 |
+
"loss": 1.9565,
|
5991 |
+
"step": 850
|
5992 |
+
},
|
5993 |
+
{
|
5994 |
+
"epoch": 0.3408772281193671,
|
5995 |
+
"grad_norm": 1.4165103435516357,
|
5996 |
+
"learning_rate": 1.0971492697180096e-05,
|
5997 |
+
"loss": 1.9873,
|
5998 |
+
"step": 851
|
5999 |
+
},
|
6000 |
+
{
|
6001 |
+
"epoch": 0.34127778890446625,
|
6002 |
+
"grad_norm": 0.963735044002533,
|
6003 |
+
"learning_rate": 1.0827426775932658e-05,
|
6004 |
+
"loss": 1.9228,
|
6005 |
+
"step": 852
|
6006 |
+
},
|
6007 |
+
{
|
6008 |
+
"epoch": 0.3416783496895654,
|
6009 |
+
"grad_norm": 1.302394986152649,
|
6010 |
+
"learning_rate": 1.068425882150762e-05,
|
6011 |
+
"loss": 1.8196,
|
6012 |
+
"step": 853
|
6013 |
+
},
|
6014 |
+
{
|
6015 |
+
"epoch": 0.34207891047466454,
|
6016 |
+
"grad_norm": 1.6307578086853027,
|
6017 |
+
"learning_rate": 1.054199027560463e-05,
|
6018 |
+
"loss": 2.2207,
|
6019 |
+
"step": 854
|
6020 |
+
},
|
6021 |
+
{
|
6022 |
+
"epoch": 0.3424794712597637,
|
6023 |
+
"grad_norm": 1.1663156747817993,
|
6024 |
+
"learning_rate": 1.0400622570866425e-05,
|
6025 |
+
"loss": 1.3852,
|
6026 |
+
"step": 855
|
6027 |
+
},
|
6028 |
+
{
|
6029 |
+
"epoch": 0.3428800320448628,
|
6030 |
+
"grad_norm": 1.391180157661438,
|
6031 |
+
"learning_rate": 1.026015713086418e-05,
|
6032 |
+
"loss": 2.0258,
|
6033 |
+
"step": 856
|
6034 |
+
},
|
6035 |
+
{
|
6036 |
+
"epoch": 0.34328059282996193,
|
6037 |
+
"grad_norm": 0.9974930882453918,
|
6038 |
+
"learning_rate": 1.0120595370083318e-05,
|
6039 |
+
"loss": 2.1458,
|
6040 |
+
"step": 857
|
6041 |
+
},
|
6042 |
+
{
|
6043 |
+
"epoch": 0.3436811536150611,
|
6044 |
+
"grad_norm": 1.4504176378250122,
|
6045 |
+
"learning_rate": 9.98193869390922e-06,
|
6046 |
+
"loss": 2.5077,
|
6047 |
+
"step": 858
|
6048 |
+
},
|
6049 |
+
{
|
6050 |
+
"epoch": 0.3440817144001602,
|
6051 |
+
"grad_norm": 1.6607308387756348,
|
6052 |
+
"learning_rate": 9.844188498613116e-06,
|
6053 |
+
"loss": 1.9936,
|
6054 |
+
"step": 859
|
6055 |
+
},
|
6056 |
+
{
|
6057 |
+
"epoch": 0.34448227518525937,
|
6058 |
+
"grad_norm": 1.0695178508758545,
|
6059 |
+
"learning_rate": 9.707346171337894e-06,
|
6060 |
+
"loss": 1.5378,
|
6061 |
+
"step": 860
|
6062 |
+
},
|
6063 |
+
{
|
6064 |
+
"epoch": 0.3448828359703585,
|
6065 |
+
"grad_norm": 1.1689550876617432,
|
6066 |
+
"learning_rate": 9.57141309008428e-06,
|
6067 |
+
"loss": 2.1762,
|
6068 |
+
"step": 861
|
6069 |
+
},
|
6070 |
+
{
|
6071 |
+
"epoch": 0.34528339675545766,
|
6072 |
+
"grad_norm": 1.247942566871643,
|
6073 |
+
"learning_rate": 9.436390623696911e-06,
|
6074 |
+
"loss": 2.2111,
|
6075 |
+
"step": 862
|
6076 |
+
},
|
6077 |
+
{
|
6078 |
+
"epoch": 0.34568395754055675,
|
6079 |
+
"grad_norm": 1.3530837297439575,
|
6080 |
+
"learning_rate": 9.302280131850539e-06,
|
6081 |
+
"loss": 2.2161,
|
6082 |
+
"step": 863
|
6083 |
+
},
|
6084 |
+
{
|
6085 |
+
"epoch": 0.3460845183256559,
|
6086 |
+
"grad_norm": 0.9715630412101746,
|
6087 |
+
"learning_rate": 9.16908296503628e-06,
|
6088 |
+
"loss": 1.8675,
|
6089 |
+
"step": 864
|
6090 |
+
},
|
6091 |
+
{
|
6092 |
+
"epoch": 0.34648507911075505,
|
6093 |
+
"grad_norm": 1.2928553819656372,
|
6094 |
+
"learning_rate": 9.036800464548157e-06,
|
6095 |
+
"loss": 1.98,
|
6096 |
+
"step": 865
|
6097 |
+
},
|
6098 |
+
{
|
6099 |
+
"epoch": 0.3468856398958542,
|
6100 |
+
"grad_norm": 1.3303308486938477,
|
6101 |
+
"learning_rate": 8.905433962469489e-06,
|
6102 |
+
"loss": 2.0134,
|
6103 |
+
"step": 866
|
6104 |
+
},
|
6105 |
+
{
|
6106 |
+
"epoch": 0.34728620068095334,
|
6107 |
+
"grad_norm": 1.1572000980377197,
|
6108 |
+
"learning_rate": 8.774984781659467e-06,
|
6109 |
+
"loss": 1.9953,
|
6110 |
+
"step": 867
|
6111 |
+
},
|
6112 |
+
{
|
6113 |
+
"epoch": 0.3476867614660525,
|
6114 |
+
"grad_norm": 1.1844559907913208,
|
6115 |
+
"learning_rate": 8.645454235739903e-06,
|
6116 |
+
"loss": 2.4239,
|
6117 |
+
"step": 868
|
6118 |
+
},
|
6119 |
+
{
|
6120 |
+
"epoch": 0.34808732225115163,
|
6121 |
+
"grad_norm": 0.9763182401657104,
|
6122 |
+
"learning_rate": 8.516843629081984e-06,
|
6123 |
+
"loss": 2.2392,
|
6124 |
+
"step": 869
|
6125 |
+
},
|
6126 |
+
{
|
6127 |
+
"epoch": 0.3484878830362508,
|
6128 |
+
"grad_norm": 1.408148169517517,
|
6129 |
+
"learning_rate": 8.38915425679304e-06,
|
6130 |
+
"loss": 1.792,
|
6131 |
+
"step": 870
|
6132 |
+
},
|
6133 |
+
{
|
6134 |
+
"epoch": 0.34888844382134987,
|
6135 |
+
"grad_norm": 1.2217282056808472,
|
6136 |
+
"learning_rate": 8.262387404703653e-06,
|
6137 |
+
"loss": 1.5025,
|
6138 |
+
"step": 871
|
6139 |
+
},
|
6140 |
+
{
|
6141 |
+
"epoch": 0.349289004606449,
|
6142 |
+
"grad_norm": 1.0182693004608154,
|
6143 |
+
"learning_rate": 8.13654434935467e-06,
|
6144 |
+
"loss": 1.684,
|
6145 |
+
"step": 872
|
6146 |
+
},
|
6147 |
+
{
|
6148 |
+
"epoch": 0.34968956539154816,
|
6149 |
+
"grad_norm": 1.0316119194030762,
|
6150 |
+
"learning_rate": 8.011626357984181e-06,
|
6151 |
+
"loss": 1.9877,
|
6152 |
+
"step": 873
|
6153 |
+
},
|
6154 |
+
{
|
6155 |
+
"epoch": 0.3500901261766473,
|
6156 |
+
"grad_norm": 1.3248041868209839,
|
6157 |
+
"learning_rate": 7.887634688515e-06,
|
6158 |
+
"loss": 2.0565,
|
6159 |
+
"step": 874
|
6160 |
+
},
|
6161 |
+
{
|
6162 |
+
"epoch": 0.35049068696174646,
|
6163 |
+
"grad_norm": 1.2190947532653809,
|
6164 |
+
"learning_rate": 7.764570589541875e-06,
|
6165 |
+
"loss": 1.9459,
|
6166 |
+
"step": 875
|
6167 |
+
},
|
6168 |
+
{
|
6169 |
+
"epoch": 0.3508912477468456,
|
6170 |
+
"grad_norm": 1.182137131690979,
|
6171 |
+
"learning_rate": 7.642435300318907e-06,
|
6172 |
+
"loss": 2.0345,
|
6173 |
+
"step": 876
|
6174 |
+
},
|
6175 |
+
{
|
6176 |
+
"epoch": 0.35129180853194475,
|
6177 |
+
"grad_norm": 1.1659443378448486,
|
6178 |
+
"learning_rate": 7.521230050747086e-06,
|
6179 |
+
"loss": 2.1023,
|
6180 |
+
"step": 877
|
6181 |
+
},
|
6182 |
+
{
|
6183 |
+
"epoch": 0.35169236931704384,
|
6184 |
+
"grad_norm": 1.0156196355819702,
|
6185 |
+
"learning_rate": 7.400956061361974e-06,
|
6186 |
+
"loss": 1.8653,
|
6187 |
+
"step": 878
|
6188 |
+
},
|
6189 |
+
{
|
6190 |
+
"epoch": 0.352092930102143,
|
6191 |
+
"grad_norm": 1.0992286205291748,
|
6192 |
+
"learning_rate": 7.281614543321269e-06,
|
6193 |
+
"loss": 1.8927,
|
6194 |
+
"step": 879
|
6195 |
+
},
|
6196 |
+
{
|
6197 |
+
"epoch": 0.35249349088724213,
|
6198 |
+
"grad_norm": 1.1435526609420776,
|
6199 |
+
"learning_rate": 7.163206698392744e-06,
|
6200 |
+
"loss": 2.099,
|
6201 |
+
"step": 880
|
6202 |
+
},
|
6203 |
+
{
|
6204 |
+
"epoch": 0.3528940516723413,
|
6205 |
+
"grad_norm": 1.0202122926712036,
|
6206 |
+
"learning_rate": 7.045733718942094e-06,
|
6207 |
+
"loss": 1.8585,
|
6208 |
+
"step": 881
|
6209 |
+
},
|
6210 |
+
{
|
6211 |
+
"epoch": 0.3532946124574404,
|
6212 |
+
"grad_norm": 1.4951303005218506,
|
6213 |
+
"learning_rate": 6.929196787920899e-06,
|
6214 |
+
"loss": 1.695,
|
6215 |
+
"step": 882
|
6216 |
+
},
|
6217 |
+
{
|
6218 |
+
"epoch": 0.3536951732425396,
|
6219 |
+
"grad_norm": 1.1155850887298584,
|
6220 |
+
"learning_rate": 6.813597078854772e-06,
|
6221 |
+
"loss": 1.8532,
|
6222 |
+
"step": 883
|
6223 |
+
},
|
6224 |
+
{
|
6225 |
+
"epoch": 0.3540957340276387,
|
6226 |
+
"grad_norm": 1.260799527168274,
|
6227 |
+
"learning_rate": 6.698935755831492e-06,
|
6228 |
+
"loss": 1.9453,
|
6229 |
+
"step": 884
|
6230 |
+
},
|
6231 |
+
{
|
6232 |
+
"epoch": 0.3544962948127378,
|
6233 |
+
"grad_norm": 1.116297960281372,
|
6234 |
+
"learning_rate": 6.585213973489335e-06,
|
6235 |
+
"loss": 2.0739,
|
6236 |
+
"step": 885
|
6237 |
+
},
|
6238 |
+
{
|
6239 |
+
"epoch": 0.35489685559783696,
|
6240 |
+
"grad_norm": 1.355909824371338,
|
6241 |
+
"learning_rate": 6.472432877005341e-06,
|
6242 |
+
"loss": 2.0201,
|
6243 |
+
"step": 886
|
6244 |
+
},
|
6245 |
+
{
|
6246 |
+
"epoch": 0.3552974163829361,
|
6247 |
+
"grad_norm": 1.2766674757003784,
|
6248 |
+
"learning_rate": 6.360593602083942e-06,
|
6249 |
+
"loss": 1.8345,
|
6250 |
+
"step": 887
|
6251 |
+
},
|
6252 |
+
{
|
6253 |
+
"epoch": 0.35569797716803525,
|
6254 |
+
"grad_norm": 1.0762203931808472,
|
6255 |
+
"learning_rate": 6.2496972749453766e-06,
|
6256 |
+
"loss": 1.8632,
|
6257 |
+
"step": 888
|
6258 |
+
},
|
6259 |
+
{
|
6260 |
+
"epoch": 0.3560985379531344,
|
6261 |
+
"grad_norm": 1.4779655933380127,
|
6262 |
+
"learning_rate": 6.139745012314424e-06,
|
6263 |
+
"loss": 1.8136,
|
6264 |
+
"step": 889
|
6265 |
+
},
|
6266 |
+
{
|
6267 |
+
"epoch": 0.35649909873823354,
|
6268 |
+
"grad_norm": 1.2446835041046143,
|
6269 |
+
"learning_rate": 6.030737921409169e-06,
|
6270 |
+
"loss": 2.1419,
|
6271 |
+
"step": 890
|
6272 |
+
},
|
6273 |
+
{
|
6274 |
+
"epoch": 0.3568996595233327,
|
6275 |
+
"grad_norm": 1.3997869491577148,
|
6276 |
+
"learning_rate": 5.922677099929786e-06,
|
6277 |
+
"loss": 1.5943,
|
6278 |
+
"step": 891
|
6279 |
+
},
|
6280 |
+
{
|
6281 |
+
"epoch": 0.3573002203084318,
|
6282 |
+
"grad_norm": 1.0760163068771362,
|
6283 |
+
"learning_rate": 5.8155636360475385e-06,
|
6284 |
+
"loss": 1.7411,
|
6285 |
+
"step": 892
|
6286 |
+
},
|
6287 |
+
{
|
6288 |
+
"epoch": 0.35770078109353093,
|
6289 |
+
"grad_norm": 1.466942548751831,
|
6290 |
+
"learning_rate": 5.709398608393835e-06,
|
6291 |
+
"loss": 1.5269,
|
6292 |
+
"step": 893
|
6293 |
+
},
|
6294 |
+
{
|
6295 |
+
"epoch": 0.3581013418786301,
|
6296 |
+
"grad_norm": 1.3610737323760986,
|
6297 |
+
"learning_rate": 5.604183086049342e-06,
|
6298 |
+
"loss": 2.1299,
|
6299 |
+
"step": 894
|
6300 |
+
},
|
6301 |
+
{
|
6302 |
+
"epoch": 0.3585019026637292,
|
6303 |
+
"grad_norm": 1.5304806232452393,
|
6304 |
+
"learning_rate": 5.499918128533155e-06,
|
6305 |
+
"loss": 2.0253,
|
6306 |
+
"step": 895
|
6307 |
+
},
|
6308 |
+
{
|
6309 |
+
"epoch": 0.35890246344882837,
|
6310 |
+
"grad_norm": 0.9878894090652466,
|
6311 |
+
"learning_rate": 5.396604785792281e-06,
|
6312 |
+
"loss": 1.9011,
|
6313 |
+
"step": 896
|
6314 |
+
},
|
6315 |
+
{
|
6316 |
+
"epoch": 0.3593030242339275,
|
6317 |
+
"grad_norm": 1.012338638305664,
|
6318 |
+
"learning_rate": 5.294244098190926e-06,
|
6319 |
+
"loss": 1.8002,
|
6320 |
+
"step": 897
|
6321 |
+
},
|
6322 |
+
{
|
6323 |
+
"epoch": 0.35970358501902666,
|
6324 |
+
"grad_norm": 1.3276349306106567,
|
6325 |
+
"learning_rate": 5.192837096500058e-06,
|
6326 |
+
"loss": 2.0386,
|
6327 |
+
"step": 898
|
6328 |
+
},
|
6329 |
+
{
|
6330 |
+
"epoch": 0.36010414580412575,
|
6331 |
+
"grad_norm": 1.223771572113037,
|
6332 |
+
"learning_rate": 5.092384801887074e-06,
|
6333 |
+
"loss": 2.1836,
|
6334 |
+
"step": 899
|
6335 |
+
},
|
6336 |
+
{
|
6337 |
+
"epoch": 0.3605047065892249,
|
6338 |
+
"grad_norm": 0.9753492474555969,
|
6339 |
+
"learning_rate": 4.992888225905468e-06,
|
6340 |
+
"loss": 1.6111,
|
6341 |
+
"step": 900
|
6342 |
+
},
|
6343 |
+
{
|
6344 |
+
"epoch": 0.36090526737432405,
|
6345 |
+
"grad_norm": 0.7910905480384827,
|
6346 |
+
"learning_rate": 4.8943483704846475e-06,
|
6347 |
+
"loss": 1.903,
|
6348 |
+
"step": 901
|
6349 |
+
},
|
6350 |
+
{
|
6351 |
+
"epoch": 0.3613058281594232,
|
6352 |
+
"grad_norm": 1.2467719316482544,
|
6353 |
+
"learning_rate": 4.796766227919857e-06,
|
6354 |
+
"loss": 1.9438,
|
6355 |
+
"step": 902
|
6356 |
+
},
|
6357 |
+
{
|
6358 |
+
"epoch": 0.36170638894452234,
|
6359 |
+
"grad_norm": 1.1500917673110962,
|
6360 |
+
"learning_rate": 4.700142780862205e-06,
|
6361 |
+
"loss": 1.9579,
|
6362 |
+
"step": 903
|
6363 |
+
},
|
6364 |
+
{
|
6365 |
+
"epoch": 0.3621069497296215,
|
6366 |
+
"grad_norm": 1.1933996677398682,
|
6367 |
+
"learning_rate": 4.604479002308737e-06,
|
6368 |
+
"loss": 2.3006,
|
6369 |
+
"step": 904
|
6370 |
+
},
|
6371 |
+
{
|
6372 |
+
"epoch": 0.36250751051472063,
|
6373 |
+
"grad_norm": 1.1259969472885132,
|
6374 |
+
"learning_rate": 4.509775855592613e-06,
|
6375 |
+
"loss": 1.998,
|
6376 |
+
"step": 905
|
6377 |
+
},
|
6378 |
+
{
|
6379 |
+
"epoch": 0.3629080712998197,
|
6380 |
+
"grad_norm": 1.130823016166687,
|
6381 |
+
"learning_rate": 4.416034294373472e-06,
|
6382 |
+
"loss": 1.8769,
|
6383 |
+
"step": 906
|
6384 |
+
},
|
6385 |
+
{
|
6386 |
+
"epoch": 0.36330863208491887,
|
6387 |
+
"grad_norm": 1.02981698513031,
|
6388 |
+
"learning_rate": 4.323255262627846e-06,
|
6389 |
+
"loss": 1.9655,
|
6390 |
+
"step": 907
|
6391 |
+
},
|
6392 |
+
{
|
6393 |
+
"epoch": 0.363709192870018,
|
6394 |
+
"grad_norm": 0.9289757609367371,
|
6395 |
+
"learning_rate": 4.231439694639483e-06,
|
6396 |
+
"loss": 2.0099,
|
6397 |
+
"step": 908
|
6398 |
+
},
|
6399 |
+
{
|
6400 |
+
"epoch": 0.36410975365511716,
|
6401 |
+
"grad_norm": 1.203212857246399,
|
6402 |
+
"learning_rate": 4.140588514990162e-06,
|
6403 |
+
"loss": 1.9931,
|
6404 |
+
"step": 909
|
6405 |
+
},
|
6406 |
+
{
|
6407 |
+
"epoch": 0.3645103144402163,
|
6408 |
+
"grad_norm": 1.1724556684494019,
|
6409 |
+
"learning_rate": 4.050702638550275e-06,
|
6410 |
+
"loss": 1.662,
|
6411 |
+
"step": 910
|
6412 |
+
},
|
6413 |
+
{
|
6414 |
+
"epoch": 0.36491087522531546,
|
6415 |
+
"grad_norm": 1.212730050086975,
|
6416 |
+
"learning_rate": 3.961782970469563e-06,
|
6417 |
+
"loss": 2.0693,
|
6418 |
+
"step": 911
|
6419 |
+
},
|
6420 |
+
{
|
6421 |
+
"epoch": 0.3653114360104146,
|
6422 |
+
"grad_norm": 0.8744038939476013,
|
6423 |
+
"learning_rate": 3.873830406168111e-06,
|
6424 |
+
"loss": 1.9265,
|
6425 |
+
"step": 912
|
6426 |
+
},
|
6427 |
+
{
|
6428 |
+
"epoch": 0.3657119967955137,
|
6429 |
+
"grad_norm": 1.2729175090789795,
|
6430 |
+
"learning_rate": 3.7868458313272904e-06,
|
6431 |
+
"loss": 2.1908,
|
6432 |
+
"step": 913
|
6433 |
+
},
|
6434 |
+
{
|
6435 |
+
"epoch": 0.36611255758061284,
|
6436 |
+
"grad_norm": 1.3766783475875854,
|
6437 |
+
"learning_rate": 3.7008301218807716e-06,
|
6438 |
+
"loss": 1.9221,
|
6439 |
+
"step": 914
|
6440 |
+
},
|
6441 |
+
{
|
6442 |
+
"epoch": 0.366513118365712,
|
6443 |
+
"grad_norm": 1.1976795196533203,
|
6444 |
+
"learning_rate": 3.615784144005796e-06,
|
6445 |
+
"loss": 2.0681,
|
6446 |
+
"step": 915
|
6447 |
+
},
|
6448 |
+
{
|
6449 |
+
"epoch": 0.36691367915081113,
|
6450 |
+
"grad_norm": 1.2433587312698364,
|
6451 |
+
"learning_rate": 3.5317087541144377e-06,
|
6452 |
+
"loss": 1.7831,
|
6453 |
+
"step": 916
|
6454 |
+
},
|
6455 |
+
{
|
6456 |
+
"epoch": 0.3673142399359103,
|
6457 |
+
"grad_norm": 1.328249216079712,
|
6458 |
+
"learning_rate": 3.448604798844912e-06,
|
6459 |
+
"loss": 1.9766,
|
6460 |
+
"step": 917
|
6461 |
+
},
|
6462 |
+
{
|
6463 |
+
"epoch": 0.3677148007210094,
|
6464 |
+
"grad_norm": 0.9622407555580139,
|
6465 |
+
"learning_rate": 3.3664731150531482e-06,
|
6466 |
+
"loss": 1.8986,
|
6467 |
+
"step": 918
|
6468 |
+
},
|
6469 |
+
{
|
6470 |
+
"epoch": 0.3681153615061086,
|
6471 |
+
"grad_norm": 1.2278088331222534,
|
6472 |
+
"learning_rate": 3.2853145298042953e-06,
|
6473 |
+
"loss": 1.9074,
|
6474 |
+
"step": 919
|
6475 |
+
},
|
6476 |
+
{
|
6477 |
+
"epoch": 0.36851592229120766,
|
6478 |
+
"grad_norm": 1.2879400253295898,
|
6479 |
+
"learning_rate": 3.2051298603643753e-06,
|
6480 |
+
"loss": 2.0471,
|
6481 |
+
"step": 920
|
6482 |
+
},
|
6483 |
+
{
|
6484 |
+
"epoch": 0.3689164830763068,
|
6485 |
+
"grad_norm": 1.2228416204452515,
|
6486 |
+
"learning_rate": 3.1259199141921435e-06,
|
6487 |
+
"loss": 1.8044,
|
6488 |
+
"step": 921
|
6489 |
+
},
|
6490 |
+
{
|
6491 |
+
"epoch": 0.36931704386140596,
|
6492 |
+
"grad_norm": 1.045020341873169,
|
6493 |
+
"learning_rate": 3.047685488930874e-06,
|
6494 |
+
"loss": 1.8997,
|
6495 |
+
"step": 922
|
6496 |
+
},
|
6497 |
+
{
|
6498 |
+
"epoch": 0.3697176046465051,
|
6499 |
+
"grad_norm": 0.9093934893608093,
|
6500 |
+
"learning_rate": 2.970427372400353e-06,
|
6501 |
+
"loss": 1.651,
|
6502 |
+
"step": 923
|
6503 |
+
},
|
6504 |
+
{
|
6505 |
+
"epoch": 0.37011816543160425,
|
6506 |
+
"grad_norm": 1.3145874738693237,
|
6507 |
+
"learning_rate": 2.894146342588977e-06,
|
6508 |
+
"loss": 2.1705,
|
6509 |
+
"step": 924
|
6510 |
+
},
|
6511 |
+
{
|
6512 |
+
"epoch": 0.3705187262167034,
|
6513 |
+
"grad_norm": 1.227515697479248,
|
6514 |
+
"learning_rate": 2.818843167645835e-06,
|
6515 |
+
"loss": 2.4005,
|
6516 |
+
"step": 925
|
6517 |
+
},
|
6518 |
+
{
|
6519 |
+
"epoch": 0.37091928700180254,
|
6520 |
+
"grad_norm": 0.9281034469604492,
|
6521 |
+
"learning_rate": 2.744518605873092e-06,
|
6522 |
+
"loss": 2.142,
|
6523 |
+
"step": 926
|
6524 |
+
},
|
6525 |
+
{
|
6526 |
+
"epoch": 0.37131984778690164,
|
6527 |
+
"grad_norm": 1.3893688917160034,
|
6528 |
+
"learning_rate": 2.6711734057182415e-06,
|
6529 |
+
"loss": 1.9674,
|
6530 |
+
"step": 927
|
6531 |
+
},
|
6532 |
+
{
|
6533 |
+
"epoch": 0.3717204085720008,
|
6534 |
+
"grad_norm": 1.1843217611312866,
|
6535 |
+
"learning_rate": 2.5988083057666533e-06,
|
6536 |
+
"loss": 2.0467,
|
6537 |
+
"step": 928
|
6538 |
+
},
|
6539 |
+
{
|
6540 |
+
"epoch": 0.37212096935709993,
|
6541 |
+
"grad_norm": 1.1903239488601685,
|
6542 |
+
"learning_rate": 2.5274240347340717e-06,
|
6543 |
+
"loss": 1.9037,
|
6544 |
+
"step": 929
|
6545 |
+
},
|
6546 |
+
{
|
6547 |
+
"epoch": 0.3725215301421991,
|
6548 |
+
"grad_norm": 1.1729291677474976,
|
6549 |
+
"learning_rate": 2.4570213114592954e-06,
|
6550 |
+
"loss": 1.9407,
|
6551 |
+
"step": 930
|
6552 |
+
},
|
6553 |
+
{
|
6554 |
+
"epoch": 0.3729220909272982,
|
6555 |
+
"grad_norm": 1.4715547561645508,
|
6556 |
+
"learning_rate": 2.3876008448969976e-06,
|
6557 |
+
"loss": 2.1013,
|
6558 |
+
"step": 931
|
6559 |
+
},
|
6560 |
+
{
|
6561 |
+
"epoch": 0.37332265171239737,
|
6562 |
+
"grad_norm": 1.2586127519607544,
|
6563 |
+
"learning_rate": 2.3191633341104856e-06,
|
6564 |
+
"loss": 1.8211,
|
6565 |
+
"step": 932
|
6566 |
+
},
|
6567 |
+
{
|
6568 |
+
"epoch": 0.3737232124974965,
|
6569 |
+
"grad_norm": 1.3188387155532837,
|
6570 |
+
"learning_rate": 2.2517094682647397e-06,
|
6571 |
+
"loss": 1.7527,
|
6572 |
+
"step": 933
|
6573 |
+
},
|
6574 |
+
{
|
6575 |
+
"epoch": 0.3741237732825956,
|
6576 |
+
"grad_norm": 1.0343830585479736,
|
6577 |
+
"learning_rate": 2.1852399266194314e-06,
|
6578 |
+
"loss": 1.4433,
|
6579 |
+
"step": 934
|
6580 |
+
},
|
6581 |
+
{
|
6582 |
+
"epoch": 0.37452433406769475,
|
6583 |
+
"grad_norm": 1.2205039262771606,
|
6584 |
+
"learning_rate": 2.119755378522137e-06,
|
6585 |
+
"loss": 1.6247,
|
6586 |
+
"step": 935
|
6587 |
+
},
|
6588 |
+
{
|
6589 |
+
"epoch": 0.3749248948527939,
|
6590 |
+
"grad_norm": 1.367773175239563,
|
6591 |
+
"learning_rate": 2.05525648340148e-06,
|
6592 |
+
"loss": 1.9733,
|
6593 |
+
"step": 936
|
6594 |
+
},
|
6595 |
+
{
|
6596 |
+
"epoch": 0.37532545563789305,
|
6597 |
+
"grad_norm": 1.1995794773101807,
|
6598 |
+
"learning_rate": 1.9917438907606556e-06,
|
6599 |
+
"loss": 2.2017,
|
6600 |
+
"step": 937
|
6601 |
+
},
|
6602 |
+
{
|
6603 |
+
"epoch": 0.3757260164229922,
|
6604 |
+
"grad_norm": 1.0902953147888184,
|
6605 |
+
"learning_rate": 1.9292182401707603e-06,
|
6606 |
+
"loss": 1.6807,
|
6607 |
+
"step": 938
|
6608 |
+
},
|
6609 |
+
{
|
6610 |
+
"epoch": 0.37612657720809134,
|
6611 |
+
"grad_norm": 1.0587186813354492,
|
6612 |
+
"learning_rate": 1.8676801612643957e-06,
|
6613 |
+
"loss": 1.7358,
|
6614 |
+
"step": 939
|
6615 |
+
},
|
6616 |
+
{
|
6617 |
+
"epoch": 0.3765271379931905,
|
6618 |
+
"grad_norm": 1.0900659561157227,
|
6619 |
+
"learning_rate": 1.8071302737293295e-06,
|
6620 |
+
"loss": 1.9882,
|
6621 |
+
"step": 940
|
6622 |
+
},
|
6623 |
+
{
|
6624 |
+
"epoch": 0.37692769877828963,
|
6625 |
+
"grad_norm": 1.2947819232940674,
|
6626 |
+
"learning_rate": 1.747569187302267e-06,
|
6627 |
+
"loss": 1.8446,
|
6628 |
+
"step": 941
|
6629 |
+
},
|
6630 |
+
{
|
6631 |
+
"epoch": 0.3773282595633887,
|
6632 |
+
"grad_norm": 0.9651957750320435,
|
6633 |
+
"learning_rate": 1.6889975017626903e-06,
|
6634 |
+
"loss": 1.9641,
|
6635 |
+
"step": 942
|
6636 |
+
},
|
6637 |
+
{
|
6638 |
+
"epoch": 0.37772882034848787,
|
6639 |
+
"grad_norm": 1.0325901508331299,
|
6640 |
+
"learning_rate": 1.6314158069267948e-06,
|
6641 |
+
"loss": 1.9663,
|
6642 |
+
"step": 943
|
6643 |
+
},
|
6644 |
+
{
|
6645 |
+
"epoch": 0.378129381133587,
|
6646 |
+
"grad_norm": 1.0935128927230835,
|
6647 |
+
"learning_rate": 1.574824682641629e-06,
|
6648 |
+
"loss": 1.6413,
|
6649 |
+
"step": 944
|
6650 |
+
},
|
6651 |
+
{
|
6652 |
+
"epoch": 0.37852994191868616,
|
6653 |
+
"grad_norm": 1.0227198600769043,
|
6654 |
+
"learning_rate": 1.5192246987791981e-06,
|
6655 |
+
"loss": 1.8915,
|
6656 |
+
"step": 945
|
6657 |
+
},
|
6658 |
+
{
|
6659 |
+
"epoch": 0.3789305027037853,
|
6660 |
+
"grad_norm": 1.019073486328125,
|
6661 |
+
"learning_rate": 1.4646164152307018e-06,
|
6662 |
+
"loss": 1.8609,
|
6663 |
+
"step": 946
|
6664 |
+
},
|
6665 |
+
{
|
6666 |
+
"epoch": 0.37933106348888446,
|
6667 |
+
"grad_norm": 1.2001268863677979,
|
6668 |
+
"learning_rate": 1.411000381900951e-06,
|
6669 |
+
"loss": 1.95,
|
6670 |
+
"step": 947
|
6671 |
+
},
|
6672 |
+
{
|
6673 |
+
"epoch": 0.3797316242739836,
|
6674 |
+
"grad_norm": 1.1757829189300537,
|
6675 |
+
"learning_rate": 1.3583771387028265e-06,
|
6676 |
+
"loss": 1.6127,
|
6677 |
+
"step": 948
|
6678 |
+
},
|
6679 |
+
{
|
6680 |
+
"epoch": 0.3801321850590827,
|
6681 |
+
"grad_norm": 0.9995384216308594,
|
6682 |
+
"learning_rate": 1.3067472155517735e-06,
|
6683 |
+
"loss": 1.7688,
|
6684 |
+
"step": 949
|
6685 |
+
},
|
6686 |
+
{
|
6687 |
+
"epoch": 0.38053274584418184,
|
6688 |
+
"grad_norm": 0.9961532354354858,
|
6689 |
+
"learning_rate": 1.2561111323605712e-06,
|
6690 |
+
"loss": 1.7373,
|
6691 |
+
"step": 950
|
6692 |
+
},
|
6693 |
+
{
|
6694 |
+
"epoch": 0.380933306629281,
|
6695 |
+
"grad_norm": 1.0140630006790161,
|
6696 |
+
"learning_rate": 1.2064693990339936e-06,
|
6697 |
+
"loss": 1.901,
|
6698 |
+
"step": 951
|
6699 |
+
},
|
6700 |
+
{
|
6701 |
+
"epoch": 0.38133386741438013,
|
6702 |
+
"grad_norm": 1.1691038608551025,
|
6703 |
+
"learning_rate": 1.157822515463758e-06,
|
6704 |
+
"loss": 1.8546,
|
6705 |
+
"step": 952
|
6706 |
+
},
|
6707 |
+
{
|
6708 |
+
"epoch": 0.3817344281994793,
|
6709 |
+
"grad_norm": 1.1697040796279907,
|
6710 |
+
"learning_rate": 1.1101709715234386e-06,
|
6711 |
+
"loss": 1.7615,
|
6712 |
+
"step": 953
|
6713 |
+
},
|
6714 |
+
{
|
6715 |
+
"epoch": 0.3821349889845784,
|
6716 |
+
"grad_norm": 1.594868540763855,
|
6717 |
+
"learning_rate": 1.0635152470635512e-06,
|
6718 |
+
"loss": 1.9847,
|
6719 |
+
"step": 954
|
6720 |
+
},
|
6721 |
+
{
|
6722 |
+
"epoch": 0.3825355497696776,
|
6723 |
+
"grad_norm": 1.151361107826233,
|
6724 |
+
"learning_rate": 1.0178558119067315e-06,
|
6725 |
+
"loss": 2.078,
|
6726 |
+
"step": 955
|
6727 |
+
},
|
6728 |
+
{
|
6729 |
+
"epoch": 0.38293611055477667,
|
6730 |
+
"grad_norm": 0.9073551297187805,
|
6731 |
+
"learning_rate": 9.731931258429638e-07,
|
6732 |
+
"loss": 1.9805,
|
6733 |
+
"step": 956
|
6734 |
+
},
|
6735 |
+
{
|
6736 |
+
"epoch": 0.3833366713398758,
|
6737 |
+
"grad_norm": 1.2325596809387207,
|
6738 |
+
"learning_rate": 9.295276386250274e-07,
|
6739 |
+
"loss": 2.0565,
|
6740 |
+
"step": 957
|
6741 |
+
},
|
6742 |
+
{
|
6743 |
+
"epoch": 0.38373723212497496,
|
6744 |
+
"grad_norm": 1.184550166130066,
|
6745 |
+
"learning_rate": 8.868597899638898e-07,
|
6746 |
+
"loss": 2.1695,
|
6747 |
+
"step": 958
|
6748 |
+
},
|
6749 |
+
{
|
6750 |
+
"epoch": 0.3841377929100741,
|
6751 |
+
"grad_norm": 1.1788114309310913,
|
6752 |
+
"learning_rate": 8.451900095242881e-07,
|
6753 |
+
"loss": 2.0578,
|
6754 |
+
"step": 959
|
6755 |
+
},
|
6756 |
+
{
|
6757 |
+
"epoch": 0.38453835369517325,
|
6758 |
+
"grad_norm": 1.1700098514556885,
|
6759 |
+
"learning_rate": 8.04518716920466e-07,
|
6760 |
+
"loss": 1.8347,
|
6761 |
+
"step": 960
|
6762 |
+
},
|
6763 |
+
{
|
6764 |
+
"epoch": 0.3849389144802724,
|
6765 |
+
"grad_norm": 0.935607373714447,
|
6766 |
+
"learning_rate": 7.648463217118984e-07,
|
6767 |
+
"loss": 1.8536,
|
6768 |
+
"step": 961
|
6769 |
+
},
|
6770 |
+
{
|
6771 |
+
"epoch": 0.38533947526537154,
|
6772 |
+
"grad_norm": 0.9701693058013916,
|
6773 |
+
"learning_rate": 7.261732233991513e-07,
|
6774 |
+
"loss": 1.6937,
|
6775 |
+
"step": 962
|
6776 |
+
},
|
6777 |
+
{
|
6778 |
+
"epoch": 0.38574003605047064,
|
6779 |
+
"grad_norm": 1.195351243019104,
|
6780 |
+
"learning_rate": 6.884998114198959e-07,
|
6781 |
+
"loss": 2.1418,
|
6782 |
+
"step": 963
|
6783 |
+
},
|
6784 |
+
{
|
6785 |
+
"epoch": 0.3861405968355698,
|
6786 |
+
"grad_norm": 1.6363762617111206,
|
6787 |
+
"learning_rate": 6.518264651449779e-07,
|
6788 |
+
"loss": 2.2872,
|
6789 |
+
"step": 964
|
6790 |
+
},
|
6791 |
+
{
|
6792 |
+
"epoch": 0.38654115762066893,
|
6793 |
+
"grad_norm": 1.2114075422286987,
|
6794 |
+
"learning_rate": 6.161535538745878e-07,
|
6795 |
+
"loss": 2.1418,
|
6796 |
+
"step": 965
|
6797 |
+
},
|
6798 |
+
{
|
6799 |
+
"epoch": 0.3869417184057681,
|
6800 |
+
"grad_norm": 1.7314434051513672,
|
6801 |
+
"learning_rate": 5.814814368345412e-07,
|
6802 |
+
"loss": 2.0097,
|
6803 |
+
"step": 966
|
6804 |
+
},
|
6805 |
+
{
|
6806 |
+
"epoch": 0.3873422791908672,
|
6807 |
+
"grad_norm": 1.5273650884628296,
|
6808 |
+
"learning_rate": 5.478104631726711e-07,
|
6809 |
+
"loss": 2.0629,
|
6810 |
+
"step": 967
|
6811 |
+
},
|
6812 |
+
{
|
6813 |
+
"epoch": 0.38774283997596637,
|
6814 |
+
"grad_norm": 1.0178067684173584,
|
6815 |
+
"learning_rate": 5.151409719553079e-07,
|
6816 |
+
"loss": 1.5887,
|
6817 |
+
"step": 968
|
6818 |
+
},
|
6819 |
+
{
|
6820 |
+
"epoch": 0.3881434007610655,
|
6821 |
+
"grad_norm": 0.971847653388977,
|
6822 |
+
"learning_rate": 4.834732921638719e-07,
|
6823 |
+
"loss": 1.8772,
|
6824 |
+
"step": 969
|
6825 |
+
},
|
6826 |
+
{
|
6827 |
+
"epoch": 0.3885439615461646,
|
6828 |
+
"grad_norm": 1.0141364336013794,
|
6829 |
+
"learning_rate": 4.5280774269154115e-07,
|
6830 |
+
"loss": 1.7437,
|
6831 |
+
"step": 970
|
6832 |
+
},
|
6833 |
+
{
|
6834 |
+
"epoch": 0.38894452233126375,
|
6835 |
+
"grad_norm": 1.1142873764038086,
|
6836 |
+
"learning_rate": 4.2314463234005565e-07,
|
6837 |
+
"loss": 2.1158,
|
6838 |
+
"step": 971
|
6839 |
+
},
|
6840 |
+
{
|
6841 |
+
"epoch": 0.3893450831163629,
|
6842 |
+
"grad_norm": 0.7133710384368896,
|
6843 |
+
"learning_rate": 3.9448425981661876e-07,
|
6844 |
+
"loss": 1.8973,
|
6845 |
+
"step": 972
|
6846 |
+
},
|
6847 |
+
{
|
6848 |
+
"epoch": 0.38974564390146205,
|
6849 |
+
"grad_norm": 1.245099663734436,
|
6850 |
+
"learning_rate": 3.6682691373086665e-07,
|
6851 |
+
"loss": 1.9652,
|
6852 |
+
"step": 973
|
6853 |
+
},
|
6854 |
+
{
|
6855 |
+
"epoch": 0.3901462046865612,
|
6856 |
+
"grad_norm": 1.2889657020568848,
|
6857 |
+
"learning_rate": 3.401728725919373e-07,
|
6858 |
+
"loss": 1.7035,
|
6859 |
+
"step": 974
|
6860 |
+
},
|
6861 |
+
{
|
6862 |
+
"epoch": 0.39054676547166034,
|
6863 |
+
"grad_norm": 1.1345562934875488,
|
6864 |
+
"learning_rate": 3.145224048057727e-07,
|
6865 |
+
"loss": 1.6353,
|
6866 |
+
"step": 975
|
6867 |
+
},
|
6868 |
+
{
|
6869 |
+
"epoch": 0.3909473262567595,
|
6870 |
+
"grad_norm": 1.1244771480560303,
|
6871 |
+
"learning_rate": 2.898757686722542e-07,
|
6872 |
+
"loss": 1.9095,
|
6873 |
+
"step": 976
|
6874 |
+
},
|
6875 |
+
{
|
6876 |
+
"epoch": 0.3913478870418586,
|
6877 |
+
"grad_norm": 1.1727226972579956,
|
6878 |
+
"learning_rate": 2.6623321238277157e-07,
|
6879 |
+
"loss": 1.8767,
|
6880 |
+
"step": 977
|
6881 |
+
},
|
6882 |
+
{
|
6883 |
+
"epoch": 0.3917484478269577,
|
6884 |
+
"grad_norm": 1.2802989482879639,
|
6885 |
+
"learning_rate": 2.4359497401758024e-07,
|
6886 |
+
"loss": 1.5799,
|
6887 |
+
"step": 978
|
6888 |
+
},
|
6889 |
+
{
|
6890 |
+
"epoch": 0.39214900861205687,
|
6891 |
+
"grad_norm": 1.4607635736465454,
|
6892 |
+
"learning_rate": 2.219612815434924e-07,
|
6893 |
+
"loss": 2.0009,
|
6894 |
+
"step": 979
|
6895 |
+
},
|
6896 |
+
{
|
6897 |
+
"epoch": 0.392549569397156,
|
6898 |
+
"grad_norm": 1.410239815711975,
|
6899 |
+
"learning_rate": 2.0133235281156736e-07,
|
6900 |
+
"loss": 2.0732,
|
6901 |
+
"step": 980
|
6902 |
+
},
|
6903 |
+
{
|
6904 |
+
"epoch": 0.39295013018225516,
|
6905 |
+
"grad_norm": 0.9666495323181152,
|
6906 |
+
"learning_rate": 1.817083955548693e-07,
|
6907 |
+
"loss": 1.8358,
|
6908 |
+
"step": 981
|
6909 |
+
},
|
6910 |
+
{
|
6911 |
+
"epoch": 0.3933506909673543,
|
6912 |
+
"grad_norm": 1.4496511220932007,
|
6913 |
+
"learning_rate": 1.630896073864352e-07,
|
6914 |
+
"loss": 1.8643,
|
6915 |
+
"step": 982
|
6916 |
+
},
|
6917 |
+
{
|
6918 |
+
"epoch": 0.39375125175245346,
|
6919 |
+
"grad_norm": 1.2983746528625488,
|
6920 |
+
"learning_rate": 1.4547617579725449e-07,
|
6921 |
+
"loss": 1.9004,
|
6922 |
+
"step": 983
|
6923 |
+
},
|
6924 |
+
{
|
6925 |
+
"epoch": 0.39415181253755255,
|
6926 |
+
"grad_norm": 1.286615014076233,
|
6927 |
+
"learning_rate": 1.2886827815440372e-07,
|
6928 |
+
"loss": 1.8282,
|
6929 |
+
"step": 984
|
6930 |
+
},
|
6931 |
+
{
|
6932 |
+
"epoch": 0.3945523733226517,
|
6933 |
+
"grad_norm": 1.1125391721725464,
|
6934 |
+
"learning_rate": 1.1326608169920372e-07,
|
6935 |
+
"loss": 1.9587,
|
6936 |
+
"step": 985
|
6937 |
+
},
|
6938 |
+
{
|
6939 |
+
"epoch": 0.39495293410775084,
|
6940 |
+
"grad_norm": 1.1754589080810547,
|
6941 |
+
"learning_rate": 9.866974354560965e-08,
|
6942 |
+
"loss": 1.8011,
|
6943 |
+
"step": 986
|
6944 |
+
},
|
6945 |
+
{
|
6946 |
+
"epoch": 0.39535349489285,
|
6947 |
+
"grad_norm": 1.0687789916992188,
|
6948 |
+
"learning_rate": 8.507941067859016e-08,
|
6949 |
+
"loss": 1.8824,
|
6950 |
+
"step": 987
|
6951 |
+
},
|
6952 |
+
{
|
6953 |
+
"epoch": 0.39575405567794913,
|
6954 |
+
"grad_norm": 1.156052589416504,
|
6955 |
+
"learning_rate": 7.249521995263964e-08,
|
6956 |
+
"loss": 1.8151,
|
6957 |
+
"step": 988
|
6958 |
+
},
|
6959 |
+
{
|
6960 |
+
"epoch": 0.3961546164630483,
|
6961 |
+
"grad_norm": 1.0500197410583496,
|
6962 |
+
"learning_rate": 6.09172980904238e-08,
|
6963 |
+
"loss": 1.6616,
|
6964 |
+
"step": 989
|
6965 |
+
},
|
6966 |
+
{
|
6967 |
+
"epoch": 0.39655517724814743,
|
6968 |
+
"grad_norm": 0.9670491218566895,
|
6969 |
+
"learning_rate": 5.0345761681491746e-08,
|
6970 |
+
"loss": 1.671,
|
6971 |
+
"step": 990
|
6972 |
+
},
|
6973 |
+
{
|
6974 |
+
"epoch": 0.3969557380332465,
|
6975 |
+
"grad_norm": 1.1478677988052368,
|
6976 |
+
"learning_rate": 4.078071718107701e-08,
|
6977 |
+
"loss": 1.779,
|
6978 |
+
"step": 991
|
6979 |
+
},
|
6980 |
+
{
|
6981 |
+
"epoch": 0.39735629881834567,
|
6982 |
+
"grad_norm": 1.2338886260986328,
|
6983 |
+
"learning_rate": 3.2222260909087196e-08,
|
6984 |
+
"loss": 1.8303,
|
6985 |
+
"step": 992
|
6986 |
+
},
|
6987 |
+
{
|
6988 |
+
"epoch": 0.3977568596034448,
|
6989 |
+
"grad_norm": 0.9074128270149231,
|
6990 |
+
"learning_rate": 2.4670479049082597e-08,
|
6991 |
+
"loss": 1.8302,
|
6992 |
+
"step": 993
|
6993 |
+
},
|
6994 |
+
{
|
6995 |
+
"epoch": 0.39815742038854396,
|
6996 |
+
"grad_norm": 1.1111611127853394,
|
6997 |
+
"learning_rate": 1.81254476474213e-08,
|
6998 |
+
"loss": 1.9118,
|
6999 |
+
"step": 994
|
7000 |
+
},
|
7001 |
+
{
|
7002 |
+
"epoch": 0.3985579811736431,
|
7003 |
+
"grad_norm": 1.4943230152130127,
|
7004 |
+
"learning_rate": 1.2587232612493172e-08,
|
7005 |
+
"loss": 2.1329,
|
7006 |
+
"step": 995
|
7007 |
+
},
|
7008 |
+
{
|
7009 |
+
"epoch": 0.39895854195874225,
|
7010 |
+
"grad_norm": 0.8442416191101074,
|
7011 |
+
"learning_rate": 8.055889714064791e-09,
|
7012 |
+
"loss": 1.832,
|
7013 |
+
"step": 996
|
7014 |
+
},
|
7015 |
+
{
|
7016 |
+
"epoch": 0.3993591027438414,
|
7017 |
+
"grad_norm": 1.6363670825958252,
|
7018 |
+
"learning_rate": 4.531464582713252e-09,
|
7019 |
+
"loss": 2.3091,
|
7020 |
+
"step": 997
|
7021 |
+
},
|
7022 |
+
{
|
7023 |
+
"epoch": 0.3997596635289405,
|
7024 |
+
"grad_norm": 1.186084270477295,
|
7025 |
+
"learning_rate": 2.0139927093487664e-09,
|
7026 |
+
"loss": 1.5448,
|
7027 |
+
"step": 998
|
7028 |
+
},
|
7029 |
+
{
|
7030 |
+
"epoch": 0.40016022431403964,
|
7031 |
+
"grad_norm": 1.3950694799423218,
|
7032 |
+
"learning_rate": 5.034994448926967e-10,
|
7033 |
+
"loss": 1.846,
|
7034 |
+
"step": 999
|
7035 |
+
},
|
7036 |
+
{
|
7037 |
+
"epoch": 0.4005607850991388,
|
7038 |
+
"grad_norm": 1.2405130863189697,
|
7039 |
+
"learning_rate": 0.0,
|
7040 |
+
"loss": 1.6719,
|
7041 |
+
"step": 1000
|
7042 |
+
},
|
7043 |
+
{
|
7044 |
+
"epoch": 0.4005607850991388,
|
7045 |
+
"eval_loss": 1.906521201133728,
|
7046 |
+
"eval_runtime": 32.8778,
|
7047 |
+
"eval_samples_per_second": 31.997,
|
7048 |
+
"eval_steps_per_second": 15.999,
|
7049 |
+
"step": 1000
|
7050 |
}
|
7051 |
],
|
7052 |
"logging_steps": 1,
|
|
|
7061 |
"should_evaluate": false,
|
7062 |
"should_log": false,
|
7063 |
"should_save": true,
|
7064 |
+
"should_training_stop": true
|
7065 |
},
|
7066 |
"attributes": {}
|
7067 |
}
|
7068 |
},
|
7069 |
+
"total_flos": 5.08473960997847e+16,
|
7070 |
"train_batch_size": 2,
|
7071 |
"trial_name": null,
|
7072 |
"trial_params": null
|