Training in progress, step 1500, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 28348936
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7cb31e81ea5105cb82dc8f487ec259e028a0188dc8f4a57ac8adb7bcda7c3bc8
|
3 |
size 28348936
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14714324
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cb6f65ebb7bddb5caf2941360e00253b2d607c51654b9df116d5404bce91b268
|
3 |
size 14714324
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:733b5c5c0a3916e70f852e663c0513c05bab3aa0f71a30256e61fa740397a848
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fe4e625cf5525cf235a8c7e4c6984ba05d1b5436f9e969799dcec0119a3a6c2a
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 375,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -7914,6 +7914,2639 @@
|
|
7914 |
"eval_samples_per_second": 20.16,
|
7915 |
"eval_steps_per_second": 10.082,
|
7916 |
"step": 1125
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7917 |
}
|
7918 |
],
|
7919 |
"logging_steps": 1,
|
@@ -7928,12 +10561,12 @@
|
|
7928 |
"should_evaluate": false,
|
7929 |
"should_log": false,
|
7930 |
"should_save": true,
|
7931 |
-
"should_training_stop":
|
7932 |
},
|
7933 |
"attributes": {}
|
7934 |
}
|
7935 |
},
|
7936 |
-
"total_flos":
|
7937 |
"train_batch_size": 2,
|
7938 |
"trial_name": null,
|
7939 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.5067781578613961,
|
5 |
"eval_steps": 375,
|
6 |
+
"global_step": 1500,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
7914 |
"eval_samples_per_second": 20.16,
|
7915 |
"eval_steps_per_second": 10.082,
|
7916 |
"step": 1125
|
7917 |
+
},
|
7918 |
+
{
|
7919 |
+
"epoch": 0.38042147050128805,
|
7920 |
+
"grad_norm": 0.2497602105140686,
|
7921 |
+
"learning_rate": 2.9513310198183065e-05,
|
7922 |
+
"loss": 1.5079,
|
7923 |
+
"step": 1126
|
7924 |
+
},
|
7925 |
+
{
|
7926 |
+
"epoch": 0.380759322606529,
|
7927 |
+
"grad_norm": 0.2503485083580017,
|
7928 |
+
"learning_rate": 2.936390617691097e-05,
|
7929 |
+
"loss": 1.5071,
|
7930 |
+
"step": 1127
|
7931 |
+
},
|
7932 |
+
{
|
7933 |
+
"epoch": 0.3810971747117699,
|
7934 |
+
"grad_norm": 0.2478799968957901,
|
7935 |
+
"learning_rate": 2.9214816173045356e-05,
|
7936 |
+
"loss": 1.4848,
|
7937 |
+
"step": 1128
|
7938 |
+
},
|
7939 |
+
{
|
7940 |
+
"epoch": 0.38143502681701086,
|
7941 |
+
"grad_norm": 0.25389614701271057,
|
7942 |
+
"learning_rate": 2.906604084937572e-05,
|
7943 |
+
"loss": 1.4275,
|
7944 |
+
"step": 1129
|
7945 |
+
},
|
7946 |
+
{
|
7947 |
+
"epoch": 0.38177287892225176,
|
7948 |
+
"grad_norm": 0.24664315581321716,
|
7949 |
+
"learning_rate": 2.8917580867292526e-05,
|
7950 |
+
"loss": 1.3777,
|
7951 |
+
"step": 1130
|
7952 |
+
},
|
7953 |
+
{
|
7954 |
+
"epoch": 0.3821107310274927,
|
7955 |
+
"grad_norm": 0.25654202699661255,
|
7956 |
+
"learning_rate": 2.8769436886784408e-05,
|
7957 |
+
"loss": 1.4802,
|
7958 |
+
"step": 1131
|
7959 |
+
},
|
7960 |
+
{
|
7961 |
+
"epoch": 0.38244858313273367,
|
7962 |
+
"grad_norm": 0.25405144691467285,
|
7963 |
+
"learning_rate": 2.862160956643517e-05,
|
7964 |
+
"loss": 1.3829,
|
7965 |
+
"step": 1132
|
7966 |
+
},
|
7967 |
+
{
|
7968 |
+
"epoch": 0.38278643523797456,
|
7969 |
+
"grad_norm": 0.2616935968399048,
|
7970 |
+
"learning_rate": 2.847409956342092e-05,
|
7971 |
+
"loss": 1.4627,
|
7972 |
+
"step": 1133
|
7973 |
+
},
|
7974 |
+
{
|
7975 |
+
"epoch": 0.3831242873432155,
|
7976 |
+
"grad_norm": 0.2542346119880676,
|
7977 |
+
"learning_rate": 2.8326907533507074e-05,
|
7978 |
+
"loss": 1.4486,
|
7979 |
+
"step": 1134
|
7980 |
+
},
|
7981 |
+
{
|
7982 |
+
"epoch": 0.3834621394484564,
|
7983 |
+
"grad_norm": 0.2578616738319397,
|
7984 |
+
"learning_rate": 2.8180034131045464e-05,
|
7985 |
+
"loss": 1.4032,
|
7986 |
+
"step": 1135
|
7987 |
+
},
|
7988 |
+
{
|
7989 |
+
"epoch": 0.3837999915536974,
|
7990 |
+
"grad_norm": 0.2529214322566986,
|
7991 |
+
"learning_rate": 2.8033480008971546e-05,
|
7992 |
+
"loss": 1.4395,
|
7993 |
+
"step": 1136
|
7994 |
+
},
|
7995 |
+
{
|
7996 |
+
"epoch": 0.3841378436589383,
|
7997 |
+
"grad_norm": 0.263386994600296,
|
7998 |
+
"learning_rate": 2.7887245818801277e-05,
|
7999 |
+
"loss": 1.4745,
|
8000 |
+
"step": 1137
|
8001 |
+
},
|
8002 |
+
{
|
8003 |
+
"epoch": 0.3844756957641792,
|
8004 |
+
"grad_norm": 0.2704524099826813,
|
8005 |
+
"learning_rate": 2.7741332210628345e-05,
|
8006 |
+
"loss": 1.521,
|
8007 |
+
"step": 1138
|
8008 |
+
},
|
8009 |
+
{
|
8010 |
+
"epoch": 0.3848135478694202,
|
8011 |
+
"grad_norm": 0.2625349164009094,
|
8012 |
+
"learning_rate": 2.759573983312138e-05,
|
8013 |
+
"loss": 1.327,
|
8014 |
+
"step": 1139
|
8015 |
+
},
|
8016 |
+
{
|
8017 |
+
"epoch": 0.3851513999746611,
|
8018 |
+
"grad_norm": 0.26310068368911743,
|
8019 |
+
"learning_rate": 2.7450469333520855e-05,
|
8020 |
+
"loss": 1.4221,
|
8021 |
+
"step": 1140
|
8022 |
+
},
|
8023 |
+
{
|
8024 |
+
"epoch": 0.38548925207990203,
|
8025 |
+
"grad_norm": 0.26481926441192627,
|
8026 |
+
"learning_rate": 2.730552135763632e-05,
|
8027 |
+
"loss": 1.471,
|
8028 |
+
"step": 1141
|
8029 |
+
},
|
8030 |
+
{
|
8031 |
+
"epoch": 0.38582710418514293,
|
8032 |
+
"grad_norm": 0.275761216878891,
|
8033 |
+
"learning_rate": 2.7160896549843562e-05,
|
8034 |
+
"loss": 1.4951,
|
8035 |
+
"step": 1142
|
8036 |
+
},
|
8037 |
+
{
|
8038 |
+
"epoch": 0.3861649562903839,
|
8039 |
+
"grad_norm": 0.2722332179546356,
|
8040 |
+
"learning_rate": 2.701659555308169e-05,
|
8041 |
+
"loss": 1.4825,
|
8042 |
+
"step": 1143
|
8043 |
+
},
|
8044 |
+
{
|
8045 |
+
"epoch": 0.38650280839562484,
|
8046 |
+
"grad_norm": 0.27185872197151184,
|
8047 |
+
"learning_rate": 2.6872619008850274e-05,
|
8048 |
+
"loss": 1.4367,
|
8049 |
+
"step": 1144
|
8050 |
+
},
|
8051 |
+
{
|
8052 |
+
"epoch": 0.38684066050086574,
|
8053 |
+
"grad_norm": 0.2962988317012787,
|
8054 |
+
"learning_rate": 2.672896755720654e-05,
|
8055 |
+
"loss": 1.4295,
|
8056 |
+
"step": 1145
|
8057 |
+
},
|
8058 |
+
{
|
8059 |
+
"epoch": 0.3871785126061067,
|
8060 |
+
"grad_norm": 0.28734004497528076,
|
8061 |
+
"learning_rate": 2.6585641836762433e-05,
|
8062 |
+
"loss": 1.4859,
|
8063 |
+
"step": 1146
|
8064 |
+
},
|
8065 |
+
{
|
8066 |
+
"epoch": 0.3875163647113476,
|
8067 |
+
"grad_norm": 0.2959744334220886,
|
8068 |
+
"learning_rate": 2.6442642484681944e-05,
|
8069 |
+
"loss": 1.4594,
|
8070 |
+
"step": 1147
|
8071 |
+
},
|
8072 |
+
{
|
8073 |
+
"epoch": 0.38785421681658855,
|
8074 |
+
"grad_norm": 0.2969874143600464,
|
8075 |
+
"learning_rate": 2.6299970136678077e-05,
|
8076 |
+
"loss": 1.3245,
|
8077 |
+
"step": 1148
|
8078 |
+
},
|
8079 |
+
{
|
8080 |
+
"epoch": 0.38819206892182945,
|
8081 |
+
"grad_norm": 0.33464786410331726,
|
8082 |
+
"learning_rate": 2.6157625427010156e-05,
|
8083 |
+
"loss": 1.4796,
|
8084 |
+
"step": 1149
|
8085 |
+
},
|
8086 |
+
{
|
8087 |
+
"epoch": 0.3885299210270704,
|
8088 |
+
"grad_norm": 0.5102173686027527,
|
8089 |
+
"learning_rate": 2.6015608988480955e-05,
|
8090 |
+
"loss": 1.2026,
|
8091 |
+
"step": 1150
|
8092 |
+
},
|
8093 |
+
{
|
8094 |
+
"epoch": 0.38886777313231136,
|
8095 |
+
"grad_norm": 0.19551311433315277,
|
8096 |
+
"learning_rate": 2.5873921452433915e-05,
|
8097 |
+
"loss": 1.507,
|
8098 |
+
"step": 1151
|
8099 |
+
},
|
8100 |
+
{
|
8101 |
+
"epoch": 0.38920562523755226,
|
8102 |
+
"grad_norm": 0.20793762803077698,
|
8103 |
+
"learning_rate": 2.57325634487503e-05,
|
8104 |
+
"loss": 1.5617,
|
8105 |
+
"step": 1152
|
8106 |
+
},
|
8107 |
+
{
|
8108 |
+
"epoch": 0.3895434773427932,
|
8109 |
+
"grad_norm": 0.20898018777370453,
|
8110 |
+
"learning_rate": 2.5591535605846383e-05,
|
8111 |
+
"loss": 1.5061,
|
8112 |
+
"step": 1153
|
8113 |
+
},
|
8114 |
+
{
|
8115 |
+
"epoch": 0.3898813294480341,
|
8116 |
+
"grad_norm": 0.21093174815177917,
|
8117 |
+
"learning_rate": 2.5450838550670808e-05,
|
8118 |
+
"loss": 1.4877,
|
8119 |
+
"step": 1154
|
8120 |
+
},
|
8121 |
+
{
|
8122 |
+
"epoch": 0.39021918155327506,
|
8123 |
+
"grad_norm": 0.1973397582769394,
|
8124 |
+
"learning_rate": 2.5310472908701555e-05,
|
8125 |
+
"loss": 1.4436,
|
8126 |
+
"step": 1155
|
8127 |
+
},
|
8128 |
+
{
|
8129 |
+
"epoch": 0.39055703365851596,
|
8130 |
+
"grad_norm": 0.2074335664510727,
|
8131 |
+
"learning_rate": 2.5170439303943294e-05,
|
8132 |
+
"loss": 1.4567,
|
8133 |
+
"step": 1156
|
8134 |
+
},
|
8135 |
+
{
|
8136 |
+
"epoch": 0.3908948857637569,
|
8137 |
+
"grad_norm": 0.21190492808818817,
|
8138 |
+
"learning_rate": 2.503073835892471e-05,
|
8139 |
+
"loss": 1.5825,
|
8140 |
+
"step": 1157
|
8141 |
+
},
|
8142 |
+
{
|
8143 |
+
"epoch": 0.39123273786899787,
|
8144 |
+
"grad_norm": 0.215172678232193,
|
8145 |
+
"learning_rate": 2.4891370694695517e-05,
|
8146 |
+
"loss": 1.4868,
|
8147 |
+
"step": 1158
|
8148 |
+
},
|
8149 |
+
{
|
8150 |
+
"epoch": 0.39157058997423877,
|
8151 |
+
"grad_norm": 0.2086210399866104,
|
8152 |
+
"learning_rate": 2.4752336930823837e-05,
|
8153 |
+
"loss": 1.4145,
|
8154 |
+
"step": 1159
|
8155 |
+
},
|
8156 |
+
{
|
8157 |
+
"epoch": 0.3919084420794797,
|
8158 |
+
"grad_norm": 0.21715684235095978,
|
8159 |
+
"learning_rate": 2.4613637685393432e-05,
|
8160 |
+
"loss": 1.4079,
|
8161 |
+
"step": 1160
|
8162 |
+
},
|
8163 |
+
{
|
8164 |
+
"epoch": 0.3922462941847206,
|
8165 |
+
"grad_norm": 0.22638556361198425,
|
8166 |
+
"learning_rate": 2.4475273575000936e-05,
|
8167 |
+
"loss": 1.4737,
|
8168 |
+
"step": 1161
|
8169 |
+
},
|
8170 |
+
{
|
8171 |
+
"epoch": 0.3925841462899616,
|
8172 |
+
"grad_norm": 0.21631711721420288,
|
8173 |
+
"learning_rate": 2.4337245214753103e-05,
|
8174 |
+
"loss": 1.34,
|
8175 |
+
"step": 1162
|
8176 |
+
},
|
8177 |
+
{
|
8178 |
+
"epoch": 0.3929219983952025,
|
8179 |
+
"grad_norm": 0.2178223878145218,
|
8180 |
+
"learning_rate": 2.4199553218264093e-05,
|
8181 |
+
"loss": 1.4078,
|
8182 |
+
"step": 1163
|
8183 |
+
},
|
8184 |
+
{
|
8185 |
+
"epoch": 0.39325985050044343,
|
8186 |
+
"grad_norm": 0.23028664290905,
|
8187 |
+
"learning_rate": 2.4062198197652752e-05,
|
8188 |
+
"loss": 1.4507,
|
8189 |
+
"step": 1164
|
8190 |
+
},
|
8191 |
+
{
|
8192 |
+
"epoch": 0.3935977026056844,
|
8193 |
+
"grad_norm": 0.23226363956928253,
|
8194 |
+
"learning_rate": 2.3925180763539844e-05,
|
8195 |
+
"loss": 1.4514,
|
8196 |
+
"step": 1165
|
8197 |
+
},
|
8198 |
+
{
|
8199 |
+
"epoch": 0.3939355547109253,
|
8200 |
+
"grad_norm": 0.23551660776138306,
|
8201 |
+
"learning_rate": 2.3788501525045438e-05,
|
8202 |
+
"loss": 1.4471,
|
8203 |
+
"step": 1166
|
8204 |
+
},
|
8205 |
+
{
|
8206 |
+
"epoch": 0.39427340681616624,
|
8207 |
+
"grad_norm": 0.23365965485572815,
|
8208 |
+
"learning_rate": 2.3652161089786086e-05,
|
8209 |
+
"loss": 1.544,
|
8210 |
+
"step": 1167
|
8211 |
+
},
|
8212 |
+
{
|
8213 |
+
"epoch": 0.39461125892140714,
|
8214 |
+
"grad_norm": 0.22713260352611542,
|
8215 |
+
"learning_rate": 2.351616006387214e-05,
|
8216 |
+
"loss": 1.4282,
|
8217 |
+
"step": 1168
|
8218 |
+
},
|
8219 |
+
{
|
8220 |
+
"epoch": 0.3949491110266481,
|
8221 |
+
"grad_norm": 0.2328532189130783,
|
8222 |
+
"learning_rate": 2.3380499051905137e-05,
|
8223 |
+
"loss": 1.5179,
|
8224 |
+
"step": 1169
|
8225 |
+
},
|
8226 |
+
{
|
8227 |
+
"epoch": 0.395286963131889,
|
8228 |
+
"grad_norm": 0.2374032437801361,
|
8229 |
+
"learning_rate": 2.324517865697501e-05,
|
8230 |
+
"loss": 1.3968,
|
8231 |
+
"step": 1170
|
8232 |
+
},
|
8233 |
+
{
|
8234 |
+
"epoch": 0.39562481523712995,
|
8235 |
+
"grad_norm": 0.24028538167476654,
|
8236 |
+
"learning_rate": 2.3110199480657525e-05,
|
8237 |
+
"loss": 1.4864,
|
8238 |
+
"step": 1171
|
8239 |
+
},
|
8240 |
+
{
|
8241 |
+
"epoch": 0.3959626673423709,
|
8242 |
+
"grad_norm": 0.23734930157661438,
|
8243 |
+
"learning_rate": 2.2975562123011495e-05,
|
8244 |
+
"loss": 1.4716,
|
8245 |
+
"step": 1172
|
8246 |
+
},
|
8247 |
+
{
|
8248 |
+
"epoch": 0.3963005194476118,
|
8249 |
+
"grad_norm": 0.24631333351135254,
|
8250 |
+
"learning_rate": 2.2841267182576143e-05,
|
8251 |
+
"loss": 1.3686,
|
8252 |
+
"step": 1173
|
8253 |
+
},
|
8254 |
+
{
|
8255 |
+
"epoch": 0.39663837155285275,
|
8256 |
+
"grad_norm": 0.2447107881307602,
|
8257 |
+
"learning_rate": 2.2707315256368433e-05,
|
8258 |
+
"loss": 1.4315,
|
8259 |
+
"step": 1174
|
8260 |
+
},
|
8261 |
+
{
|
8262 |
+
"epoch": 0.39697622365809365,
|
8263 |
+
"grad_norm": 0.2390620857477188,
|
8264 |
+
"learning_rate": 2.2573706939880555e-05,
|
8265 |
+
"loss": 1.3778,
|
8266 |
+
"step": 1175
|
8267 |
+
},
|
8268 |
+
{
|
8269 |
+
"epoch": 0.3973140757633346,
|
8270 |
+
"grad_norm": 0.2474817931652069,
|
8271 |
+
"learning_rate": 2.2440442827077045e-05,
|
8272 |
+
"loss": 1.4444,
|
8273 |
+
"step": 1176
|
8274 |
+
},
|
8275 |
+
{
|
8276 |
+
"epoch": 0.3976519278685755,
|
8277 |
+
"grad_norm": 0.24436935782432556,
|
8278 |
+
"learning_rate": 2.230752351039228e-05,
|
8279 |
+
"loss": 1.4287,
|
8280 |
+
"step": 1177
|
8281 |
+
},
|
8282 |
+
{
|
8283 |
+
"epoch": 0.39798977997381646,
|
8284 |
+
"grad_norm": 0.2554487884044647,
|
8285 |
+
"learning_rate": 2.2174949580727832e-05,
|
8286 |
+
"loss": 1.537,
|
8287 |
+
"step": 1178
|
8288 |
+
},
|
8289 |
+
{
|
8290 |
+
"epoch": 0.3983276320790574,
|
8291 |
+
"grad_norm": 0.2586643099784851,
|
8292 |
+
"learning_rate": 2.2042721627449846e-05,
|
8293 |
+
"loss": 1.4456,
|
8294 |
+
"step": 1179
|
8295 |
+
},
|
8296 |
+
{
|
8297 |
+
"epoch": 0.3986654841842983,
|
8298 |
+
"grad_norm": 0.26540178060531616,
|
8299 |
+
"learning_rate": 2.1910840238386398e-05,
|
8300 |
+
"loss": 1.4151,
|
8301 |
+
"step": 1180
|
8302 |
+
},
|
8303 |
+
{
|
8304 |
+
"epoch": 0.39900333628953927,
|
8305 |
+
"grad_norm": 0.254321426153183,
|
8306 |
+
"learning_rate": 2.1779305999824884e-05,
|
8307 |
+
"loss": 1.4471,
|
8308 |
+
"step": 1181
|
8309 |
+
},
|
8310 |
+
{
|
8311 |
+
"epoch": 0.39934118839478017,
|
8312 |
+
"grad_norm": 0.2584054172039032,
|
8313 |
+
"learning_rate": 2.164811949650942e-05,
|
8314 |
+
"loss": 1.4683,
|
8315 |
+
"step": 1182
|
8316 |
+
},
|
8317 |
+
{
|
8318 |
+
"epoch": 0.3996790405000211,
|
8319 |
+
"grad_norm": 0.2601689398288727,
|
8320 |
+
"learning_rate": 2.1517281311638217e-05,
|
8321 |
+
"loss": 1.3734,
|
8322 |
+
"step": 1183
|
8323 |
+
},
|
8324 |
+
{
|
8325 |
+
"epoch": 0.400016892605262,
|
8326 |
+
"grad_norm": 0.2716273069381714,
|
8327 |
+
"learning_rate": 2.1386792026861103e-05,
|
8328 |
+
"loss": 1.489,
|
8329 |
+
"step": 1184
|
8330 |
+
},
|
8331 |
+
{
|
8332 |
+
"epoch": 0.400354744710503,
|
8333 |
+
"grad_norm": 0.2612789571285248,
|
8334 |
+
"learning_rate": 2.125665222227675e-05,
|
8335 |
+
"loss": 1.4303,
|
8336 |
+
"step": 1185
|
8337 |
+
},
|
8338 |
+
{
|
8339 |
+
"epoch": 0.40069259681574393,
|
8340 |
+
"grad_norm": 0.2691882848739624,
|
8341 |
+
"learning_rate": 2.112686247643024e-05,
|
8342 |
+
"loss": 1.4763,
|
8343 |
+
"step": 1186
|
8344 |
+
},
|
8345 |
+
{
|
8346 |
+
"epoch": 0.40103044892098483,
|
8347 |
+
"grad_norm": 0.2639162242412567,
|
8348 |
+
"learning_rate": 2.09974233663104e-05,
|
8349 |
+
"loss": 1.4537,
|
8350 |
+
"step": 1187
|
8351 |
+
},
|
8352 |
+
{
|
8353 |
+
"epoch": 0.4013683010262258,
|
8354 |
+
"grad_norm": 0.2596285939216614,
|
8355 |
+
"learning_rate": 2.0868335467347366e-05,
|
8356 |
+
"loss": 1.396,
|
8357 |
+
"step": 1188
|
8358 |
+
},
|
8359 |
+
{
|
8360 |
+
"epoch": 0.4017061531314667,
|
8361 |
+
"grad_norm": 0.27600008249282837,
|
8362 |
+
"learning_rate": 2.073959935340988e-05,
|
8363 |
+
"loss": 1.5009,
|
8364 |
+
"step": 1189
|
8365 |
+
},
|
8366 |
+
{
|
8367 |
+
"epoch": 0.40204400523670764,
|
8368 |
+
"grad_norm": 0.2726534903049469,
|
8369 |
+
"learning_rate": 2.06112155968028e-05,
|
8370 |
+
"loss": 1.4841,
|
8371 |
+
"step": 1190
|
8372 |
+
},
|
8373 |
+
{
|
8374 |
+
"epoch": 0.40238185734194853,
|
8375 |
+
"grad_norm": 0.26992374658584595,
|
8376 |
+
"learning_rate": 2.0483184768264596e-05,
|
8377 |
+
"loss": 1.4832,
|
8378 |
+
"step": 1191
|
8379 |
+
},
|
8380 |
+
{
|
8381 |
+
"epoch": 0.4027197094471895,
|
8382 |
+
"grad_norm": 0.2781830430030823,
|
8383 |
+
"learning_rate": 2.035550743696468e-05,
|
8384 |
+
"loss": 1.5346,
|
8385 |
+
"step": 1192
|
8386 |
+
},
|
8387 |
+
{
|
8388 |
+
"epoch": 0.40305756155243044,
|
8389 |
+
"grad_norm": 0.27425867319107056,
|
8390 |
+
"learning_rate": 2.022818417050113e-05,
|
8391 |
+
"loss": 1.4853,
|
8392 |
+
"step": 1193
|
8393 |
+
},
|
8394 |
+
{
|
8395 |
+
"epoch": 0.40339541365767134,
|
8396 |
+
"grad_norm": 0.2790997624397278,
|
8397 |
+
"learning_rate": 2.0101215534897855e-05,
|
8398 |
+
"loss": 1.4492,
|
8399 |
+
"step": 1194
|
8400 |
+
},
|
8401 |
+
{
|
8402 |
+
"epoch": 0.4037332657629123,
|
8403 |
+
"grad_norm": 0.28432077169418335,
|
8404 |
+
"learning_rate": 1.99746020946023e-05,
|
8405 |
+
"loss": 1.4352,
|
8406 |
+
"step": 1195
|
8407 |
+
},
|
8408 |
+
{
|
8409 |
+
"epoch": 0.4040711178681532,
|
8410 |
+
"grad_norm": 0.2782287895679474,
|
8411 |
+
"learning_rate": 1.9848344412482854e-05,
|
8412 |
+
"loss": 1.4887,
|
8413 |
+
"step": 1196
|
8414 |
+
},
|
8415 |
+
{
|
8416 |
+
"epoch": 0.40440896997339415,
|
8417 |
+
"grad_norm": 0.29870307445526123,
|
8418 |
+
"learning_rate": 1.9722443049826344e-05,
|
8419 |
+
"loss": 1.4134,
|
8420 |
+
"step": 1197
|
8421 |
+
},
|
8422 |
+
{
|
8423 |
+
"epoch": 0.4047468220786351,
|
8424 |
+
"grad_norm": 0.30817660689353943,
|
8425 |
+
"learning_rate": 1.9596898566335576e-05,
|
8426 |
+
"loss": 1.4889,
|
8427 |
+
"step": 1198
|
8428 |
+
},
|
8429 |
+
{
|
8430 |
+
"epoch": 0.405084674183876,
|
8431 |
+
"grad_norm": 0.31997233629226685,
|
8432 |
+
"learning_rate": 1.9471711520126824e-05,
|
8433 |
+
"loss": 1.3966,
|
8434 |
+
"step": 1199
|
8435 |
+
},
|
8436 |
+
{
|
8437 |
+
"epoch": 0.40542252628911696,
|
8438 |
+
"grad_norm": 0.42330682277679443,
|
8439 |
+
"learning_rate": 1.9346882467727325e-05,
|
8440 |
+
"loss": 1.4177,
|
8441 |
+
"step": 1200
|
8442 |
+
},
|
8443 |
+
{
|
8444 |
+
"epoch": 0.40576037839435786,
|
8445 |
+
"grad_norm": 0.20732523500919342,
|
8446 |
+
"learning_rate": 1.9222411964072884e-05,
|
8447 |
+
"loss": 1.5109,
|
8448 |
+
"step": 1201
|
8449 |
+
},
|
8450 |
+
{
|
8451 |
+
"epoch": 0.4060982304995988,
|
8452 |
+
"grad_norm": 0.20898114144802094,
|
8453 |
+
"learning_rate": 1.9098300562505266e-05,
|
8454 |
+
"loss": 1.5027,
|
8455 |
+
"step": 1202
|
8456 |
+
},
|
8457 |
+
{
|
8458 |
+
"epoch": 0.4064360826048397,
|
8459 |
+
"grad_norm": 0.20611798763275146,
|
8460 |
+
"learning_rate": 1.8974548814769944e-05,
|
8461 |
+
"loss": 1.4697,
|
8462 |
+
"step": 1203
|
8463 |
+
},
|
8464 |
+
{
|
8465 |
+
"epoch": 0.40677393471008066,
|
8466 |
+
"grad_norm": 0.206655353307724,
|
8467 |
+
"learning_rate": 1.8851157271013442e-05,
|
8468 |
+
"loss": 1.4451,
|
8469 |
+
"step": 1204
|
8470 |
+
},
|
8471 |
+
{
|
8472 |
+
"epoch": 0.4071117868153216,
|
8473 |
+
"grad_norm": 0.20715847611427307,
|
8474 |
+
"learning_rate": 1.872812647978095e-05,
|
8475 |
+
"loss": 1.4607,
|
8476 |
+
"step": 1205
|
8477 |
+
},
|
8478 |
+
{
|
8479 |
+
"epoch": 0.4074496389205625,
|
8480 |
+
"grad_norm": 0.20446132123470306,
|
8481 |
+
"learning_rate": 1.8605456988014015e-05,
|
8482 |
+
"loss": 1.3845,
|
8483 |
+
"step": 1206
|
8484 |
+
},
|
8485 |
+
{
|
8486 |
+
"epoch": 0.4077874910258035,
|
8487 |
+
"grad_norm": 0.2049819380044937,
|
8488 |
+
"learning_rate": 1.8483149341047923e-05,
|
8489 |
+
"loss": 1.4571,
|
8490 |
+
"step": 1207
|
8491 |
+
},
|
8492 |
+
{
|
8493 |
+
"epoch": 0.40812534313104437,
|
8494 |
+
"grad_norm": 0.21026751399040222,
|
8495 |
+
"learning_rate": 1.8361204082609352e-05,
|
8496 |
+
"loss": 1.5207,
|
8497 |
+
"step": 1208
|
8498 |
+
},
|
8499 |
+
{
|
8500 |
+
"epoch": 0.4084631952362853,
|
8501 |
+
"grad_norm": 0.216688334941864,
|
8502 |
+
"learning_rate": 1.8239621754813995e-05,
|
8503 |
+
"loss": 1.4578,
|
8504 |
+
"step": 1209
|
8505 |
+
},
|
8506 |
+
{
|
8507 |
+
"epoch": 0.4088010473415262,
|
8508 |
+
"grad_norm": 0.2216232419013977,
|
8509 |
+
"learning_rate": 1.811840289816409e-05,
|
8510 |
+
"loss": 1.4798,
|
8511 |
+
"step": 1210
|
8512 |
+
},
|
8513 |
+
{
|
8514 |
+
"epoch": 0.4091388994467672,
|
8515 |
+
"grad_norm": 0.22522899508476257,
|
8516 |
+
"learning_rate": 1.799754805154603e-05,
|
8517 |
+
"loss": 1.5251,
|
8518 |
+
"step": 1211
|
8519 |
+
},
|
8520 |
+
{
|
8521 |
+
"epoch": 0.40947675155200813,
|
8522 |
+
"grad_norm": 0.22267042100429535,
|
8523 |
+
"learning_rate": 1.787705775222802e-05,
|
8524 |
+
"loss": 1.3923,
|
8525 |
+
"step": 1212
|
8526 |
+
},
|
8527 |
+
{
|
8528 |
+
"epoch": 0.40981460365724903,
|
8529 |
+
"grad_norm": 0.2366584986448288,
|
8530 |
+
"learning_rate": 1.775693253585763e-05,
|
8531 |
+
"loss": 1.489,
|
8532 |
+
"step": 1213
|
8533 |
+
},
|
8534 |
+
{
|
8535 |
+
"epoch": 0.41015245576249,
|
8536 |
+
"grad_norm": 0.23010605573654175,
|
8537 |
+
"learning_rate": 1.763717293645939e-05,
|
8538 |
+
"loss": 1.4637,
|
8539 |
+
"step": 1214
|
8540 |
+
},
|
8541 |
+
{
|
8542 |
+
"epoch": 0.4104903078677309,
|
8543 |
+
"grad_norm": 0.2245521992444992,
|
8544 |
+
"learning_rate": 1.7517779486432495e-05,
|
8545 |
+
"loss": 1.3503,
|
8546 |
+
"step": 1215
|
8547 |
+
},
|
8548 |
+
{
|
8549 |
+
"epoch": 0.41082815997297184,
|
8550 |
+
"grad_norm": 0.2249370515346527,
|
8551 |
+
"learning_rate": 1.7398752716548395e-05,
|
8552 |
+
"loss": 1.42,
|
8553 |
+
"step": 1216
|
8554 |
+
},
|
8555 |
+
{
|
8556 |
+
"epoch": 0.41116601207821274,
|
8557 |
+
"grad_norm": 0.23397955298423767,
|
8558 |
+
"learning_rate": 1.728009315594843e-05,
|
8559 |
+
"loss": 1.4915,
|
8560 |
+
"step": 1217
|
8561 |
+
},
|
8562 |
+
{
|
8563 |
+
"epoch": 0.4115038641834537,
|
8564 |
+
"grad_norm": 0.2305310070514679,
|
8565 |
+
"learning_rate": 1.716180133214149e-05,
|
8566 |
+
"loss": 1.4514,
|
8567 |
+
"step": 1218
|
8568 |
+
},
|
8569 |
+
{
|
8570 |
+
"epoch": 0.41184171628869465,
|
8571 |
+
"grad_norm": 0.22874809801578522,
|
8572 |
+
"learning_rate": 1.704387777100165e-05,
|
8573 |
+
"loss": 1.4138,
|
8574 |
+
"step": 1219
|
8575 |
+
},
|
8576 |
+
{
|
8577 |
+
"epoch": 0.41217956839393555,
|
8578 |
+
"grad_norm": 0.23827221989631653,
|
8579 |
+
"learning_rate": 1.6926322996765897e-05,
|
8580 |
+
"loss": 1.4636,
|
8581 |
+
"step": 1220
|
8582 |
+
},
|
8583 |
+
{
|
8584 |
+
"epoch": 0.4125174204991765,
|
8585 |
+
"grad_norm": 0.2340448498725891,
|
8586 |
+
"learning_rate": 1.6809137532031704e-05,
|
8587 |
+
"loss": 1.3643,
|
8588 |
+
"step": 1221
|
8589 |
+
},
|
8590 |
+
{
|
8591 |
+
"epoch": 0.4128552726044174,
|
8592 |
+
"grad_norm": 0.24293161928653717,
|
8593 |
+
"learning_rate": 1.6692321897754758e-05,
|
8594 |
+
"loss": 1.4872,
|
8595 |
+
"step": 1222
|
8596 |
+
},
|
8597 |
+
{
|
8598 |
+
"epoch": 0.41319312470965835,
|
8599 |
+
"grad_norm": 0.23916597664356232,
|
8600 |
+
"learning_rate": 1.65758766132467e-05,
|
8601 |
+
"loss": 1.4244,
|
8602 |
+
"step": 1223
|
8603 |
+
},
|
8604 |
+
{
|
8605 |
+
"epoch": 0.41353097681489925,
|
8606 |
+
"grad_norm": 0.24621140956878662,
|
8607 |
+
"learning_rate": 1.6459802196172668e-05,
|
8608 |
+
"loss": 1.437,
|
8609 |
+
"step": 1224
|
8610 |
+
},
|
8611 |
+
{
|
8612 |
+
"epoch": 0.4138688289201402,
|
8613 |
+
"grad_norm": 0.2637491524219513,
|
8614 |
+
"learning_rate": 1.634409916254914e-05,
|
8615 |
+
"loss": 1.5468,
|
8616 |
+
"step": 1225
|
8617 |
+
},
|
8618 |
+
{
|
8619 |
+
"epoch": 0.41420668102538116,
|
8620 |
+
"grad_norm": 0.24949494004249573,
|
8621 |
+
"learning_rate": 1.622876802674158e-05,
|
8622 |
+
"loss": 1.4285,
|
8623 |
+
"step": 1226
|
8624 |
+
},
|
8625 |
+
{
|
8626 |
+
"epoch": 0.41454453313062206,
|
8627 |
+
"grad_norm": 0.2498621940612793,
|
8628 |
+
"learning_rate": 1.6113809301462125e-05,
|
8629 |
+
"loss": 1.4151,
|
8630 |
+
"step": 1227
|
8631 |
+
},
|
8632 |
+
{
|
8633 |
+
"epoch": 0.414882385235863,
|
8634 |
+
"grad_norm": 0.25141584873199463,
|
8635 |
+
"learning_rate": 1.599922349776738e-05,
|
8636 |
+
"loss": 1.4404,
|
8637 |
+
"step": 1228
|
8638 |
+
},
|
8639 |
+
{
|
8640 |
+
"epoch": 0.4152202373411039,
|
8641 |
+
"grad_norm": 0.2545926868915558,
|
8642 |
+
"learning_rate": 1.5885011125056047e-05,
|
8643 |
+
"loss": 1.4105,
|
8644 |
+
"step": 1229
|
8645 |
+
},
|
8646 |
+
{
|
8647 |
+
"epoch": 0.41555808944634487,
|
8648 |
+
"grad_norm": 0.2538551390171051,
|
8649 |
+
"learning_rate": 1.5771172691066794e-05,
|
8650 |
+
"loss": 1.4131,
|
8651 |
+
"step": 1230
|
8652 |
+
},
|
8653 |
+
{
|
8654 |
+
"epoch": 0.41589594155158577,
|
8655 |
+
"grad_norm": 0.24894842505455017,
|
8656 |
+
"learning_rate": 1.565770870187585e-05,
|
8657 |
+
"loss": 1.4303,
|
8658 |
+
"step": 1231
|
8659 |
+
},
|
8660 |
+
{
|
8661 |
+
"epoch": 0.4162337936568267,
|
8662 |
+
"grad_norm": 0.2575973868370056,
|
8663 |
+
"learning_rate": 1.5544619661894864e-05,
|
8664 |
+
"loss": 1.4958,
|
8665 |
+
"step": 1232
|
8666 |
+
},
|
8667 |
+
{
|
8668 |
+
"epoch": 0.4165716457620677,
|
8669 |
+
"grad_norm": 0.26696881651878357,
|
8670 |
+
"learning_rate": 1.543190607386861e-05,
|
8671 |
+
"loss": 1.479,
|
8672 |
+
"step": 1233
|
8673 |
+
},
|
8674 |
+
{
|
8675 |
+
"epoch": 0.4169094978673086,
|
8676 |
+
"grad_norm": 0.2578486204147339,
|
8677 |
+
"learning_rate": 1.5319568438872745e-05,
|
8678 |
+
"loss": 1.4487,
|
8679 |
+
"step": 1234
|
8680 |
+
},
|
8681 |
+
{
|
8682 |
+
"epoch": 0.41724734997254953,
|
8683 |
+
"grad_norm": 0.2606693506240845,
|
8684 |
+
"learning_rate": 1.520760725631164e-05,
|
8685 |
+
"loss": 1.4834,
|
8686 |
+
"step": 1235
|
8687 |
+
},
|
8688 |
+
{
|
8689 |
+
"epoch": 0.41758520207779043,
|
8690 |
+
"grad_norm": 0.2726755440235138,
|
8691 |
+
"learning_rate": 1.5096023023916094e-05,
|
8692 |
+
"loss": 1.4472,
|
8693 |
+
"step": 1236
|
8694 |
+
},
|
8695 |
+
{
|
8696 |
+
"epoch": 0.4179230541830314,
|
8697 |
+
"grad_norm": 0.2661513686180115,
|
8698 |
+
"learning_rate": 1.498481623774115e-05,
|
8699 |
+
"loss": 1.4896,
|
8700 |
+
"step": 1237
|
8701 |
+
},
|
8702 |
+
{
|
8703 |
+
"epoch": 0.4182609062882723,
|
8704 |
+
"grad_norm": 0.2649241089820862,
|
8705 |
+
"learning_rate": 1.4873987392163947e-05,
|
8706 |
+
"loss": 1.4332,
|
8707 |
+
"step": 1238
|
8708 |
+
},
|
8709 |
+
{
|
8710 |
+
"epoch": 0.41859875839351324,
|
8711 |
+
"grad_norm": 0.276518851518631,
|
8712 |
+
"learning_rate": 1.4763536979881354e-05,
|
8713 |
+
"loss": 1.5728,
|
8714 |
+
"step": 1239
|
8715 |
+
},
|
8716 |
+
{
|
8717 |
+
"epoch": 0.4189366104987542,
|
8718 |
+
"grad_norm": 0.26126405596733093,
|
8719 |
+
"learning_rate": 1.4653465491908003e-05,
|
8720 |
+
"loss": 1.3866,
|
8721 |
+
"step": 1240
|
8722 |
+
},
|
8723 |
+
{
|
8724 |
+
"epoch": 0.4192744626039951,
|
8725 |
+
"grad_norm": 0.27424702048301697,
|
8726 |
+
"learning_rate": 1.4543773417573925e-05,
|
8727 |
+
"loss": 1.4755,
|
8728 |
+
"step": 1241
|
8729 |
+
},
|
8730 |
+
{
|
8731 |
+
"epoch": 0.41961231470923605,
|
8732 |
+
"grad_norm": 0.2712365388870239,
|
8733 |
+
"learning_rate": 1.4434461244522458e-05,
|
8734 |
+
"loss": 1.4201,
|
8735 |
+
"step": 1242
|
8736 |
+
},
|
8737 |
+
{
|
8738 |
+
"epoch": 0.41995016681447694,
|
8739 |
+
"grad_norm": 0.27947381138801575,
|
8740 |
+
"learning_rate": 1.4325529458708065e-05,
|
8741 |
+
"loss": 1.4636,
|
8742 |
+
"step": 1243
|
8743 |
+
},
|
8744 |
+
{
|
8745 |
+
"epoch": 0.4202880189197179,
|
8746 |
+
"grad_norm": 0.27926087379455566,
|
8747 |
+
"learning_rate": 1.4216978544394177e-05,
|
8748 |
+
"loss": 1.4317,
|
8749 |
+
"step": 1244
|
8750 |
+
},
|
8751 |
+
{
|
8752 |
+
"epoch": 0.4206258710249588,
|
8753 |
+
"grad_norm": 0.2867196500301361,
|
8754 |
+
"learning_rate": 1.4108808984151023e-05,
|
8755 |
+
"loss": 1.5503,
|
8756 |
+
"step": 1245
|
8757 |
+
},
|
8758 |
+
{
|
8759 |
+
"epoch": 0.42096372313019975,
|
8760 |
+
"grad_norm": 0.2843060791492462,
|
8761 |
+
"learning_rate": 1.4001021258853509e-05,
|
8762 |
+
"loss": 1.4083,
|
8763 |
+
"step": 1246
|
8764 |
+
},
|
8765 |
+
{
|
8766 |
+
"epoch": 0.4213015752354407,
|
8767 |
+
"grad_norm": 0.2859300673007965,
|
8768 |
+
"learning_rate": 1.3893615847679065e-05,
|
8769 |
+
"loss": 1.4187,
|
8770 |
+
"step": 1247
|
8771 |
+
},
|
8772 |
+
{
|
8773 |
+
"epoch": 0.4216394273406816,
|
8774 |
+
"grad_norm": 0.2931751608848572,
|
8775 |
+
"learning_rate": 1.3786593228105494e-05,
|
8776 |
+
"loss": 1.3945,
|
8777 |
+
"step": 1248
|
8778 |
+
},
|
8779 |
+
{
|
8780 |
+
"epoch": 0.42197727944592256,
|
8781 |
+
"grad_norm": 0.34488803148269653,
|
8782 |
+
"learning_rate": 1.3679953875908957e-05,
|
8783 |
+
"loss": 1.4171,
|
8784 |
+
"step": 1249
|
8785 |
+
},
|
8786 |
+
{
|
8787 |
+
"epoch": 0.42231513155116346,
|
8788 |
+
"grad_norm": 0.4581731855869293,
|
8789 |
+
"learning_rate": 1.3573698265161683e-05,
|
8790 |
+
"loss": 1.4116,
|
8791 |
+
"step": 1250
|
8792 |
+
},
|
8793 |
+
{
|
8794 |
+
"epoch": 0.4226529836564044,
|
8795 |
+
"grad_norm": 0.1875457912683487,
|
8796 |
+
"learning_rate": 1.3467826868229994e-05,
|
8797 |
+
"loss": 1.4063,
|
8798 |
+
"step": 1251
|
8799 |
+
},
|
8800 |
+
{
|
8801 |
+
"epoch": 0.42299083576164537,
|
8802 |
+
"grad_norm": 0.20211121439933777,
|
8803 |
+
"learning_rate": 1.3362340155772146e-05,
|
8804 |
+
"loss": 1.469,
|
8805 |
+
"step": 1252
|
8806 |
+
},
|
8807 |
+
{
|
8808 |
+
"epoch": 0.42332868786688627,
|
8809 |
+
"grad_norm": 0.2031911015510559,
|
8810 |
+
"learning_rate": 1.3257238596736266e-05,
|
8811 |
+
"loss": 1.401,
|
8812 |
+
"step": 1253
|
8813 |
+
},
|
8814 |
+
{
|
8815 |
+
"epoch": 0.4236665399721272,
|
8816 |
+
"grad_norm": 0.20159441232681274,
|
8817 |
+
"learning_rate": 1.3152522658358245e-05,
|
8818 |
+
"loss": 1.4079,
|
8819 |
+
"step": 1254
|
8820 |
+
},
|
8821 |
+
{
|
8822 |
+
"epoch": 0.4240043920773681,
|
8823 |
+
"grad_norm": 0.2081223428249359,
|
8824 |
+
"learning_rate": 1.3048192806159721e-05,
|
8825 |
+
"loss": 1.52,
|
8826 |
+
"step": 1255
|
8827 |
+
},
|
8828 |
+
{
|
8829 |
+
"epoch": 0.4243422441826091,
|
8830 |
+
"grad_norm": 0.20980967581272125,
|
8831 |
+
"learning_rate": 1.2944249503945894e-05,
|
8832 |
+
"loss": 1.4587,
|
8833 |
+
"step": 1256
|
8834 |
+
},
|
8835 |
+
{
|
8836 |
+
"epoch": 0.42468009628785,
|
8837 |
+
"grad_norm": 0.20135554671287537,
|
8838 |
+
"learning_rate": 1.2840693213803545e-05,
|
8839 |
+
"loss": 1.4216,
|
8840 |
+
"step": 1257
|
8841 |
+
},
|
8842 |
+
{
|
8843 |
+
"epoch": 0.4250179483930909,
|
8844 |
+
"grad_norm": 0.20347854495048523,
|
8845 |
+
"learning_rate": 1.2737524396099032e-05,
|
8846 |
+
"loss": 1.3872,
|
8847 |
+
"step": 1258
|
8848 |
+
},
|
8849 |
+
{
|
8850 |
+
"epoch": 0.4253558004983319,
|
8851 |
+
"grad_norm": 0.20733654499053955,
|
8852 |
+
"learning_rate": 1.2634743509476088e-05,
|
8853 |
+
"loss": 1.3855,
|
8854 |
+
"step": 1259
|
8855 |
+
},
|
8856 |
+
{
|
8857 |
+
"epoch": 0.4256936526035728,
|
8858 |
+
"grad_norm": 0.2230863869190216,
|
8859 |
+
"learning_rate": 1.2532351010853916e-05,
|
8860 |
+
"loss": 1.4903,
|
8861 |
+
"step": 1260
|
8862 |
+
},
|
8863 |
+
{
|
8864 |
+
"epoch": 0.42603150470881374,
|
8865 |
+
"grad_norm": 0.2193530648946762,
|
8866 |
+
"learning_rate": 1.243034735542512e-05,
|
8867 |
+
"loss": 1.3547,
|
8868 |
+
"step": 1261
|
8869 |
+
},
|
8870 |
+
{
|
8871 |
+
"epoch": 0.42636935681405463,
|
8872 |
+
"grad_norm": 0.2210264652967453,
|
8873 |
+
"learning_rate": 1.2328732996653669e-05,
|
8874 |
+
"loss": 1.4023,
|
8875 |
+
"step": 1262
|
8876 |
+
},
|
8877 |
+
{
|
8878 |
+
"epoch": 0.4267072089192956,
|
8879 |
+
"grad_norm": 0.2219851016998291,
|
8880 |
+
"learning_rate": 1.2227508386272878e-05,
|
8881 |
+
"loss": 1.4894,
|
8882 |
+
"step": 1263
|
8883 |
+
},
|
8884 |
+
{
|
8885 |
+
"epoch": 0.4270450610245365,
|
8886 |
+
"grad_norm": 0.22122083604335785,
|
8887 |
+
"learning_rate": 1.212667397428342e-05,
|
8888 |
+
"loss": 1.3537,
|
8889 |
+
"step": 1264
|
8890 |
+
},
|
8891 |
+
{
|
8892 |
+
"epoch": 0.42738291312977744,
|
8893 |
+
"grad_norm": 0.21676096320152283,
|
8894 |
+
"learning_rate": 1.2026230208951306e-05,
|
8895 |
+
"loss": 1.4027,
|
8896 |
+
"step": 1265
|
8897 |
+
},
|
8898 |
+
{
|
8899 |
+
"epoch": 0.4277207652350184,
|
8900 |
+
"grad_norm": 0.22972509264945984,
|
8901 |
+
"learning_rate": 1.1926177536805905e-05,
|
8902 |
+
"loss": 1.4587,
|
8903 |
+
"step": 1266
|
8904 |
+
},
|
8905 |
+
{
|
8906 |
+
"epoch": 0.4280586173402593,
|
8907 |
+
"grad_norm": 0.24429595470428467,
|
8908 |
+
"learning_rate": 1.1826516402637989e-05,
|
8909 |
+
"loss": 1.5336,
|
8910 |
+
"step": 1267
|
8911 |
+
},
|
8912 |
+
{
|
8913 |
+
"epoch": 0.42839646944550025,
|
8914 |
+
"grad_norm": 0.22251424193382263,
|
8915 |
+
"learning_rate": 1.1727247249497685e-05,
|
8916 |
+
"loss": 1.3503,
|
8917 |
+
"step": 1268
|
8918 |
+
},
|
8919 |
+
{
|
8920 |
+
"epoch": 0.42873432155074115,
|
8921 |
+
"grad_norm": 0.24076072871685028,
|
8922 |
+
"learning_rate": 1.1628370518692533e-05,
|
8923 |
+
"loss": 1.5081,
|
8924 |
+
"step": 1269
|
8925 |
+
},
|
8926 |
+
{
|
8927 |
+
"epoch": 0.4290721736559821,
|
8928 |
+
"grad_norm": 0.23927442729473114,
|
8929 |
+
"learning_rate": 1.152988664978556e-05,
|
8930 |
+
"loss": 1.4237,
|
8931 |
+
"step": 1270
|
8932 |
+
},
|
8933 |
+
{
|
8934 |
+
"epoch": 0.429410025761223,
|
8935 |
+
"grad_norm": 0.24455800652503967,
|
8936 |
+
"learning_rate": 1.1431796080593283e-05,
|
8937 |
+
"loss": 1.4313,
|
8938 |
+
"step": 1271
|
8939 |
+
},
|
8940 |
+
{
|
8941 |
+
"epoch": 0.42974787786646396,
|
8942 |
+
"grad_norm": 0.24002833664417267,
|
8943 |
+
"learning_rate": 1.1334099247183783e-05,
|
8944 |
+
"loss": 1.4184,
|
8945 |
+
"step": 1272
|
8946 |
+
},
|
8947 |
+
{
|
8948 |
+
"epoch": 0.4300857299717049,
|
8949 |
+
"grad_norm": 0.24867548048496246,
|
8950 |
+
"learning_rate": 1.1236796583874787e-05,
|
8951 |
+
"loss": 1.4284,
|
8952 |
+
"step": 1273
|
8953 |
+
},
|
8954 |
+
{
|
8955 |
+
"epoch": 0.4304235820769458,
|
8956 |
+
"grad_norm": 0.24509286880493164,
|
8957 |
+
"learning_rate": 1.1139888523231678e-05,
|
8958 |
+
"loss": 1.418,
|
8959 |
+
"step": 1274
|
8960 |
+
},
|
8961 |
+
{
|
8962 |
+
"epoch": 0.43076143418218676,
|
8963 |
+
"grad_norm": 0.24799369275569916,
|
8964 |
+
"learning_rate": 1.1043375496065611e-05,
|
8965 |
+
"loss": 1.4701,
|
8966 |
+
"step": 1275
|
8967 |
+
},
|
8968 |
+
{
|
8969 |
+
"epoch": 0.43109928628742766,
|
8970 |
+
"grad_norm": 0.2530164122581482,
|
8971 |
+
"learning_rate": 1.0947257931431642e-05,
|
8972 |
+
"loss": 1.4601,
|
8973 |
+
"step": 1276
|
8974 |
+
},
|
8975 |
+
{
|
8976 |
+
"epoch": 0.4314371383926686,
|
8977 |
+
"grad_norm": 0.25756946206092834,
|
8978 |
+
"learning_rate": 1.0851536256626705e-05,
|
8979 |
+
"loss": 1.4254,
|
8980 |
+
"step": 1277
|
8981 |
+
},
|
8982 |
+
{
|
8983 |
+
"epoch": 0.4317749904979095,
|
8984 |
+
"grad_norm": 0.25300857424736023,
|
8985 |
+
"learning_rate": 1.0756210897187812e-05,
|
8986 |
+
"loss": 1.4425,
|
8987 |
+
"step": 1278
|
8988 |
+
},
|
8989 |
+
{
|
8990 |
+
"epoch": 0.43211284260315047,
|
8991 |
+
"grad_norm": 0.24420616030693054,
|
8992 |
+
"learning_rate": 1.0661282276890127e-05,
|
8993 |
+
"loss": 1.387,
|
8994 |
+
"step": 1279
|
8995 |
+
},
|
8996 |
+
{
|
8997 |
+
"epoch": 0.4324506947083914,
|
8998 |
+
"grad_norm": 0.25003913044929504,
|
8999 |
+
"learning_rate": 1.0566750817745074e-05,
|
9000 |
+
"loss": 1.4285,
|
9001 |
+
"step": 1280
|
9002 |
+
},
|
9003 |
+
{
|
9004 |
+
"epoch": 0.4327885468136323,
|
9005 |
+
"grad_norm": 0.2543680667877197,
|
9006 |
+
"learning_rate": 1.0472616939998492e-05,
|
9007 |
+
"loss": 1.368,
|
9008 |
+
"step": 1281
|
9009 |
+
},
|
9010 |
+
{
|
9011 |
+
"epoch": 0.4331263989188733,
|
9012 |
+
"grad_norm": 0.26691439747810364,
|
9013 |
+
"learning_rate": 1.0378881062128731e-05,
|
9014 |
+
"loss": 1.4675,
|
9015 |
+
"step": 1282
|
9016 |
+
},
|
9017 |
+
{
|
9018 |
+
"epoch": 0.4334642510241142,
|
9019 |
+
"grad_norm": 0.2624582350254059,
|
9020 |
+
"learning_rate": 1.0285543600844804e-05,
|
9021 |
+
"loss": 1.468,
|
9022 |
+
"step": 1283
|
9023 |
+
},
|
9024 |
+
{
|
9025 |
+
"epoch": 0.43380210312935513,
|
9026 |
+
"grad_norm": 0.25681832432746887,
|
9027 |
+
"learning_rate": 1.019260497108453e-05,
|
9028 |
+
"loss": 1.45,
|
9029 |
+
"step": 1284
|
9030 |
+
},
|
9031 |
+
{
|
9032 |
+
"epoch": 0.43413995523459603,
|
9033 |
+
"grad_norm": 0.2696521282196045,
|
9034 |
+
"learning_rate": 1.010006558601274e-05,
|
9035 |
+
"loss": 1.5556,
|
9036 |
+
"step": 1285
|
9037 |
+
},
|
9038 |
+
{
|
9039 |
+
"epoch": 0.434477807339837,
|
9040 |
+
"grad_norm": 0.26296812295913696,
|
9041 |
+
"learning_rate": 1.000792585701934e-05,
|
9042 |
+
"loss": 1.4541,
|
9043 |
+
"step": 1286
|
9044 |
+
},
|
9045 |
+
{
|
9046 |
+
"epoch": 0.43481565944507794,
|
9047 |
+
"grad_norm": 0.2661752700805664,
|
9048 |
+
"learning_rate": 9.91618619371757e-06,
|
9049 |
+
"loss": 1.4211,
|
9050 |
+
"step": 1287
|
9051 |
+
},
|
9052 |
+
{
|
9053 |
+
"epoch": 0.43515351155031884,
|
9054 |
+
"grad_norm": 0.26171037554740906,
|
9055 |
+
"learning_rate": 9.82484700394215e-06,
|
9056 |
+
"loss": 1.3575,
|
9057 |
+
"step": 1288
|
9058 |
+
},
|
9059 |
+
{
|
9060 |
+
"epoch": 0.4354913636555598,
|
9061 |
+
"grad_norm": 0.2706320881843567,
|
9062 |
+
"learning_rate": 9.73390869374743e-06,
|
9063 |
+
"loss": 1.411,
|
9064 |
+
"step": 1289
|
9065 |
+
},
|
9066 |
+
{
|
9067 |
+
"epoch": 0.4358292157608007,
|
9068 |
+
"grad_norm": 0.2640978693962097,
|
9069 |
+
"learning_rate": 9.643371667405698e-06,
|
9070 |
+
"loss": 1.4101,
|
9071 |
+
"step": 1290
|
9072 |
+
},
|
9073 |
+
{
|
9074 |
+
"epoch": 0.43616706786604165,
|
9075 |
+
"grad_norm": 0.27845901250839233,
|
9076 |
+
"learning_rate": 9.553236327405246e-06,
|
9077 |
+
"loss": 1.4901,
|
9078 |
+
"step": 1291
|
9079 |
+
},
|
9080 |
+
{
|
9081 |
+
"epoch": 0.43650491997128255,
|
9082 |
+
"grad_norm": 0.27310383319854736,
|
9083 |
+
"learning_rate": 9.463503074448677e-06,
|
9084 |
+
"loss": 1.3912,
|
9085 |
+
"step": 1292
|
9086 |
+
},
|
9087 |
+
{
|
9088 |
+
"epoch": 0.4368427720765235,
|
9089 |
+
"grad_norm": 0.28718850016593933,
|
9090 |
+
"learning_rate": 9.374172307451068e-06,
|
9091 |
+
"loss": 1.4322,
|
9092 |
+
"step": 1293
|
9093 |
+
},
|
9094 |
+
{
|
9095 |
+
"epoch": 0.43718062418176445,
|
9096 |
+
"grad_norm": 0.2859232425689697,
|
9097 |
+
"learning_rate": 9.285244423538197e-06,
|
9098 |
+
"loss": 1.4486,
|
9099 |
+
"step": 1294
|
9100 |
+
},
|
9101 |
+
{
|
9102 |
+
"epoch": 0.43751847628700535,
|
9103 |
+
"grad_norm": 0.2851870059967041,
|
9104 |
+
"learning_rate": 9.196719818044886e-06,
|
9105 |
+
"loss": 1.4604,
|
9106 |
+
"step": 1295
|
9107 |
+
},
|
9108 |
+
{
|
9109 |
+
"epoch": 0.4378563283922463,
|
9110 |
+
"grad_norm": 0.28842806816101074,
|
9111 |
+
"learning_rate": 9.108598884513053e-06,
|
9112 |
+
"loss": 1.5381,
|
9113 |
+
"step": 1296
|
9114 |
+
},
|
9115 |
+
{
|
9116 |
+
"epoch": 0.4381941804974872,
|
9117 |
+
"grad_norm": 0.30437952280044556,
|
9118 |
+
"learning_rate": 9.020882014690136e-06,
|
9119 |
+
"loss": 1.4684,
|
9120 |
+
"step": 1297
|
9121 |
+
},
|
9122 |
+
{
|
9123 |
+
"epoch": 0.43853203260272816,
|
9124 |
+
"grad_norm": 0.3386310338973999,
|
9125 |
+
"learning_rate": 8.933569598527247e-06,
|
9126 |
+
"loss": 1.4873,
|
9127 |
+
"step": 1298
|
9128 |
+
},
|
9129 |
+
{
|
9130 |
+
"epoch": 0.43886988470796906,
|
9131 |
+
"grad_norm": 0.3568730652332306,
|
9132 |
+
"learning_rate": 8.846662024177477e-06,
|
9133 |
+
"loss": 1.397,
|
9134 |
+
"step": 1299
|
9135 |
+
},
|
9136 |
+
{
|
9137 |
+
"epoch": 0.43920773681321,
|
9138 |
+
"grad_norm": 0.5049918293952942,
|
9139 |
+
"learning_rate": 8.760159677994172e-06,
|
9140 |
+
"loss": 1.2596,
|
9141 |
+
"step": 1300
|
9142 |
+
},
|
9143 |
+
{
|
9144 |
+
"epoch": 0.43954558891845097,
|
9145 |
+
"grad_norm": 0.196341410279274,
|
9146 |
+
"learning_rate": 8.674062944529216e-06,
|
9147 |
+
"loss": 1.4375,
|
9148 |
+
"step": 1301
|
9149 |
+
},
|
9150 |
+
{
|
9151 |
+
"epoch": 0.43988344102369187,
|
9152 |
+
"grad_norm": 0.20131823420524597,
|
9153 |
+
"learning_rate": 8.588372206531292e-06,
|
9154 |
+
"loss": 1.4903,
|
9155 |
+
"step": 1302
|
9156 |
+
},
|
9157 |
+
{
|
9158 |
+
"epoch": 0.4402212931289328,
|
9159 |
+
"grad_norm": 0.20620301365852356,
|
9160 |
+
"learning_rate": 8.503087844944213e-06,
|
9161 |
+
"loss": 1.5428,
|
9162 |
+
"step": 1303
|
9163 |
+
},
|
9164 |
+
{
|
9165 |
+
"epoch": 0.4405591452341737,
|
9166 |
+
"grad_norm": 0.20495443046092987,
|
9167 |
+
"learning_rate": 8.418210238905256e-06,
|
9168 |
+
"loss": 1.4226,
|
9169 |
+
"step": 1304
|
9170 |
+
},
|
9171 |
+
{
|
9172 |
+
"epoch": 0.4408969973394147,
|
9173 |
+
"grad_norm": 0.20458513498306274,
|
9174 |
+
"learning_rate": 8.333739765743398e-06,
|
9175 |
+
"loss": 1.4374,
|
9176 |
+
"step": 1305
|
9177 |
+
},
|
9178 |
+
{
|
9179 |
+
"epoch": 0.4412348494446556,
|
9180 |
+
"grad_norm": 0.19676244258880615,
|
9181 |
+
"learning_rate": 8.249676800977658e-06,
|
9182 |
+
"loss": 1.3949,
|
9183 |
+
"step": 1306
|
9184 |
+
},
|
9185 |
+
{
|
9186 |
+
"epoch": 0.44157270154989653,
|
9187 |
+
"grad_norm": 0.2105901539325714,
|
9188 |
+
"learning_rate": 8.16602171831553e-06,
|
9189 |
+
"loss": 1.5368,
|
9190 |
+
"step": 1307
|
9191 |
+
},
|
9192 |
+
{
|
9193 |
+
"epoch": 0.4419105536551375,
|
9194 |
+
"grad_norm": 0.2150135338306427,
|
9195 |
+
"learning_rate": 8.082774889651168e-06,
|
9196 |
+
"loss": 1.442,
|
9197 |
+
"step": 1308
|
9198 |
+
},
|
9199 |
+
{
|
9200 |
+
"epoch": 0.4422484057603784,
|
9201 |
+
"grad_norm": 0.21322043240070343,
|
9202 |
+
"learning_rate": 7.999936685063835e-06,
|
9203 |
+
"loss": 1.3787,
|
9204 |
+
"step": 1309
|
9205 |
+
},
|
9206 |
+
{
|
9207 |
+
"epoch": 0.44258625786561934,
|
9208 |
+
"grad_norm": 0.21862493455410004,
|
9209 |
+
"learning_rate": 7.91750747281621e-06,
|
9210 |
+
"loss": 1.4595,
|
9211 |
+
"step": 1310
|
9212 |
+
},
|
9213 |
+
{
|
9214 |
+
"epoch": 0.44292410997086024,
|
9215 |
+
"grad_norm": 0.2159496247768402,
|
9216 |
+
"learning_rate": 7.835487619352811e-06,
|
9217 |
+
"loss": 1.4227,
|
9218 |
+
"step": 1311
|
9219 |
+
},
|
9220 |
+
{
|
9221 |
+
"epoch": 0.4432619620761012,
|
9222 |
+
"grad_norm": 0.22969207167625427,
|
9223 |
+
"learning_rate": 7.753877489298244e-06,
|
9224 |
+
"loss": 1.4936,
|
9225 |
+
"step": 1312
|
9226 |
+
},
|
9227 |
+
{
|
9228 |
+
"epoch": 0.44359981418134214,
|
9229 |
+
"grad_norm": 0.23035991191864014,
|
9230 |
+
"learning_rate": 7.67267744545579e-06,
|
9231 |
+
"loss": 1.459,
|
9232 |
+
"step": 1313
|
9233 |
+
},
|
9234 |
+
{
|
9235 |
+
"epoch": 0.44393766628658304,
|
9236 |
+
"grad_norm": 0.2281055748462677,
|
9237 |
+
"learning_rate": 7.591887848805545e-06,
|
9238 |
+
"loss": 1.5041,
|
9239 |
+
"step": 1314
|
9240 |
+
},
|
9241 |
+
{
|
9242 |
+
"epoch": 0.444275518391824,
|
9243 |
+
"grad_norm": 0.23097485303878784,
|
9244 |
+
"learning_rate": 7.5115090585029966e-06,
|
9245 |
+
"loss": 1.4415,
|
9246 |
+
"step": 1315
|
9247 |
+
},
|
9248 |
+
{
|
9249 |
+
"epoch": 0.4446133704970649,
|
9250 |
+
"grad_norm": 0.22556231915950775,
|
9251 |
+
"learning_rate": 7.431541431877342e-06,
|
9252 |
+
"loss": 1.347,
|
9253 |
+
"step": 1316
|
9254 |
+
},
|
9255 |
+
{
|
9256 |
+
"epoch": 0.44495122260230585,
|
9257 |
+
"grad_norm": 0.2288346290588379,
|
9258 |
+
"learning_rate": 7.351985324429933e-06,
|
9259 |
+
"loss": 1.384,
|
9260 |
+
"step": 1317
|
9261 |
+
},
|
9262 |
+
{
|
9263 |
+
"epoch": 0.44528907470754675,
|
9264 |
+
"grad_norm": 0.22915063798427582,
|
9265 |
+
"learning_rate": 7.272841089832694e-06,
|
9266 |
+
"loss": 1.4946,
|
9267 |
+
"step": 1318
|
9268 |
+
},
|
9269 |
+
{
|
9270 |
+
"epoch": 0.4456269268127877,
|
9271 |
+
"grad_norm": 0.2333012819290161,
|
9272 |
+
"learning_rate": 7.194109079926514e-06,
|
9273 |
+
"loss": 1.4418,
|
9274 |
+
"step": 1319
|
9275 |
+
},
|
9276 |
+
{
|
9277 |
+
"epoch": 0.44596477891802866,
|
9278 |
+
"grad_norm": 0.237061008810997,
|
9279 |
+
"learning_rate": 7.115789644719728e-06,
|
9280 |
+
"loss": 1.4299,
|
9281 |
+
"step": 1320
|
9282 |
+
},
|
9283 |
+
{
|
9284 |
+
"epoch": 0.44630263102326956,
|
9285 |
+
"grad_norm": 0.23179931938648224,
|
9286 |
+
"learning_rate": 7.037883132386547e-06,
|
9287 |
+
"loss": 1.4275,
|
9288 |
+
"step": 1321
|
9289 |
+
},
|
9290 |
+
{
|
9291 |
+
"epoch": 0.4466404831285105,
|
9292 |
+
"grad_norm": 0.24053171277046204,
|
9293 |
+
"learning_rate": 6.960389889265517e-06,
|
9294 |
+
"loss": 1.4357,
|
9295 |
+
"step": 1322
|
9296 |
+
},
|
9297 |
+
{
|
9298 |
+
"epoch": 0.4469783352337514,
|
9299 |
+
"grad_norm": 0.24306833744049072,
|
9300 |
+
"learning_rate": 6.883310259857944e-06,
|
9301 |
+
"loss": 1.4001,
|
9302 |
+
"step": 1323
|
9303 |
+
},
|
9304 |
+
{
|
9305 |
+
"epoch": 0.44731618733899237,
|
9306 |
+
"grad_norm": 0.24736307561397552,
|
9307 |
+
"learning_rate": 6.806644586826383e-06,
|
9308 |
+
"loss": 1.3993,
|
9309 |
+
"step": 1324
|
9310 |
+
},
|
9311 |
+
{
|
9312 |
+
"epoch": 0.44765403944423326,
|
9313 |
+
"grad_norm": 0.24501316249370575,
|
9314 |
+
"learning_rate": 6.730393210993147e-06,
|
9315 |
+
"loss": 1.4323,
|
9316 |
+
"step": 1325
|
9317 |
+
},
|
9318 |
+
{
|
9319 |
+
"epoch": 0.4479918915494742,
|
9320 |
+
"grad_norm": 0.2442789524793625,
|
9321 |
+
"learning_rate": 6.654556471338746e-06,
|
9322 |
+
"loss": 1.4619,
|
9323 |
+
"step": 1326
|
9324 |
+
},
|
9325 |
+
{
|
9326 |
+
"epoch": 0.4483297436547152,
|
9327 |
+
"grad_norm": 0.250758558511734,
|
9328 |
+
"learning_rate": 6.579134705000412e-06,
|
9329 |
+
"loss": 1.4396,
|
9330 |
+
"step": 1327
|
9331 |
+
},
|
9332 |
+
{
|
9333 |
+
"epoch": 0.4486675957599561,
|
9334 |
+
"grad_norm": 0.25503236055374146,
|
9335 |
+
"learning_rate": 6.504128247270546e-06,
|
9336 |
+
"loss": 1.4844,
|
9337 |
+
"step": 1328
|
9338 |
+
},
|
9339 |
+
{
|
9340 |
+
"epoch": 0.449005447865197,
|
9341 |
+
"grad_norm": 0.24753165245056152,
|
9342 |
+
"learning_rate": 6.429537431595312e-06,
|
9343 |
+
"loss": 1.3958,
|
9344 |
+
"step": 1329
|
9345 |
+
},
|
9346 |
+
{
|
9347 |
+
"epoch": 0.4493432999704379,
|
9348 |
+
"grad_norm": 0.24918653070926666,
|
9349 |
+
"learning_rate": 6.355362589573077e-06,
|
9350 |
+
"loss": 1.4146,
|
9351 |
+
"step": 1330
|
9352 |
+
},
|
9353 |
+
{
|
9354 |
+
"epoch": 0.4496811520756789,
|
9355 |
+
"grad_norm": 0.250940203666687,
|
9356 |
+
"learning_rate": 6.2816040509530165e-06,
|
9357 |
+
"loss": 1.4475,
|
9358 |
+
"step": 1331
|
9359 |
+
},
|
9360 |
+
{
|
9361 |
+
"epoch": 0.4500190041809198,
|
9362 |
+
"grad_norm": 0.2660050392150879,
|
9363 |
+
"learning_rate": 6.2082621436335475e-06,
|
9364 |
+
"loss": 1.479,
|
9365 |
+
"step": 1332
|
9366 |
+
},
|
9367 |
+
{
|
9368 |
+
"epoch": 0.45035685628616073,
|
9369 |
+
"grad_norm": 0.2663992643356323,
|
9370 |
+
"learning_rate": 6.135337193660962e-06,
|
9371 |
+
"loss": 1.4689,
|
9372 |
+
"step": 1333
|
9373 |
+
},
|
9374 |
+
{
|
9375 |
+
"epoch": 0.4506947083914017,
|
9376 |
+
"grad_norm": 0.2747964859008789,
|
9377 |
+
"learning_rate": 6.062829525227909e-06,
|
9378 |
+
"loss": 1.5964,
|
9379 |
+
"step": 1334
|
9380 |
+
},
|
9381 |
+
{
|
9382 |
+
"epoch": 0.4510325604966426,
|
9383 |
+
"grad_norm": 0.2579297721385956,
|
9384 |
+
"learning_rate": 5.990739460672024e-06,
|
9385 |
+
"loss": 1.4453,
|
9386 |
+
"step": 1335
|
9387 |
+
},
|
9388 |
+
{
|
9389 |
+
"epoch": 0.45137041260188354,
|
9390 |
+
"grad_norm": 0.25830748677253723,
|
9391 |
+
"learning_rate": 5.9190673204744255e-06,
|
9392 |
+
"loss": 1.3953,
|
9393 |
+
"step": 1336
|
9394 |
+
},
|
9395 |
+
{
|
9396 |
+
"epoch": 0.45170826470712444,
|
9397 |
+
"grad_norm": 0.2584705650806427,
|
9398 |
+
"learning_rate": 5.84781342325833e-06,
|
9399 |
+
"loss": 1.4166,
|
9400 |
+
"step": 1337
|
9401 |
+
},
|
9402 |
+
{
|
9403 |
+
"epoch": 0.4520461168123654,
|
9404 |
+
"grad_norm": 0.25616222620010376,
|
9405 |
+
"learning_rate": 5.77697808578761e-06,
|
9406 |
+
"loss": 1.4082,
|
9407 |
+
"step": 1338
|
9408 |
+
},
|
9409 |
+
{
|
9410 |
+
"epoch": 0.4523839689176063,
|
9411 |
+
"grad_norm": 0.26288050413131714,
|
9412 |
+
"learning_rate": 5.706561622965467e-06,
|
9413 |
+
"loss": 1.3987,
|
9414 |
+
"step": 1339
|
9415 |
+
},
|
9416 |
+
{
|
9417 |
+
"epoch": 0.45272182102284725,
|
9418 |
+
"grad_norm": 0.2798880338668823,
|
9419 |
+
"learning_rate": 5.636564347832907e-06,
|
9420 |
+
"loss": 1.4789,
|
9421 |
+
"step": 1340
|
9422 |
+
},
|
9423 |
+
{
|
9424 |
+
"epoch": 0.4530596731280882,
|
9425 |
+
"grad_norm": 0.2743123769760132,
|
9426 |
+
"learning_rate": 5.566986571567401e-06,
|
9427 |
+
"loss": 1.4992,
|
9428 |
+
"step": 1341
|
9429 |
+
},
|
9430 |
+
{
|
9431 |
+
"epoch": 0.4533975252333291,
|
9432 |
+
"grad_norm": 0.27298104763031006,
|
9433 |
+
"learning_rate": 5.497828603481569e-06,
|
9434 |
+
"loss": 1.4524,
|
9435 |
+
"step": 1342
|
9436 |
+
},
|
9437 |
+
{
|
9438 |
+
"epoch": 0.45373537733857006,
|
9439 |
+
"grad_norm": 0.2885231673717499,
|
9440 |
+
"learning_rate": 5.429090751021704e-06,
|
9441 |
+
"loss": 1.5301,
|
9442 |
+
"step": 1343
|
9443 |
+
},
|
9444 |
+
{
|
9445 |
+
"epoch": 0.45407322944381096,
|
9446 |
+
"grad_norm": 0.28205472230911255,
|
9447 |
+
"learning_rate": 5.3607733197664436e-06,
|
9448 |
+
"loss": 1.3804,
|
9449 |
+
"step": 1344
|
9450 |
+
},
|
9451 |
+
{
|
9452 |
+
"epoch": 0.4544110815490519,
|
9453 |
+
"grad_norm": 0.29323282837867737,
|
9454 |
+
"learning_rate": 5.2928766134254345e-06,
|
9455 |
+
"loss": 1.4331,
|
9456 |
+
"step": 1345
|
9457 |
+
},
|
9458 |
+
{
|
9459 |
+
"epoch": 0.4547489336542928,
|
9460 |
+
"grad_norm": 0.303227961063385,
|
9461 |
+
"learning_rate": 5.225400933837954e-06,
|
9462 |
+
"loss": 1.5122,
|
9463 |
+
"step": 1346
|
9464 |
+
},
|
9465 |
+
{
|
9466 |
+
"epoch": 0.45508678575953376,
|
9467 |
+
"grad_norm": 0.2984352111816406,
|
9468 |
+
"learning_rate": 5.158346580971573e-06,
|
9469 |
+
"loss": 1.4675,
|
9470 |
+
"step": 1347
|
9471 |
+
},
|
9472 |
+
{
|
9473 |
+
"epoch": 0.4554246378647747,
|
9474 |
+
"grad_norm": 0.3196500241756439,
|
9475 |
+
"learning_rate": 5.091713852920854e-06,
|
9476 |
+
"loss": 1.4159,
|
9477 |
+
"step": 1348
|
9478 |
+
},
|
9479 |
+
{
|
9480 |
+
"epoch": 0.4557624899700156,
|
9481 |
+
"grad_norm": 0.34708526730537415,
|
9482 |
+
"learning_rate": 5.025503045905933e-06,
|
9483 |
+
"loss": 1.4533,
|
9484 |
+
"step": 1349
|
9485 |
+
},
|
9486 |
+
{
|
9487 |
+
"epoch": 0.45610034207525657,
|
9488 |
+
"grad_norm": 0.45641905069351196,
|
9489 |
+
"learning_rate": 4.959714454271369e-06,
|
9490 |
+
"loss": 1.1837,
|
9491 |
+
"step": 1350
|
9492 |
+
},
|
9493 |
+
{
|
9494 |
+
"epoch": 0.45643819418049747,
|
9495 |
+
"grad_norm": 0.19005750119686127,
|
9496 |
+
"learning_rate": 4.8943483704846475e-06,
|
9497 |
+
"loss": 1.4357,
|
9498 |
+
"step": 1351
|
9499 |
+
},
|
9500 |
+
{
|
9501 |
+
"epoch": 0.4567760462857384,
|
9502 |
+
"grad_norm": 0.2102600336074829,
|
9503 |
+
"learning_rate": 4.829405085134997e-06,
|
9504 |
+
"loss": 1.628,
|
9505 |
+
"step": 1352
|
9506 |
+
},
|
9507 |
+
{
|
9508 |
+
"epoch": 0.4571138983909793,
|
9509 |
+
"grad_norm": 0.20484864711761475,
|
9510 |
+
"learning_rate": 4.764884886932086e-06,
|
9511 |
+
"loss": 1.5274,
|
9512 |
+
"step": 1353
|
9513 |
+
},
|
9514 |
+
{
|
9515 |
+
"epoch": 0.4574517504962203,
|
9516 |
+
"grad_norm": 0.2083808332681656,
|
9517 |
+
"learning_rate": 4.700788062704687e-06,
|
9518 |
+
"loss": 1.5608,
|
9519 |
+
"step": 1354
|
9520 |
+
},
|
9521 |
+
{
|
9522 |
+
"epoch": 0.45778960260146123,
|
9523 |
+
"grad_norm": 0.20618511736392975,
|
9524 |
+
"learning_rate": 4.6371148973994525e-06,
|
9525 |
+
"loss": 1.3876,
|
9526 |
+
"step": 1355
|
9527 |
+
},
|
9528 |
+
{
|
9529 |
+
"epoch": 0.45812745470670213,
|
9530 |
+
"grad_norm": 0.20985640585422516,
|
9531 |
+
"learning_rate": 4.573865674079625e-06,
|
9532 |
+
"loss": 1.4888,
|
9533 |
+
"step": 1356
|
9534 |
+
},
|
9535 |
+
{
|
9536 |
+
"epoch": 0.4584653068119431,
|
9537 |
+
"grad_norm": 0.21005688607692719,
|
9538 |
+
"learning_rate": 4.511040673923828e-06,
|
9539 |
+
"loss": 1.6587,
|
9540 |
+
"step": 1357
|
9541 |
+
},
|
9542 |
+
{
|
9543 |
+
"epoch": 0.458803158917184,
|
9544 |
+
"grad_norm": 0.21912069618701935,
|
9545 |
+
"learning_rate": 4.448640176224694e-06,
|
9546 |
+
"loss": 1.4263,
|
9547 |
+
"step": 1358
|
9548 |
+
},
|
9549 |
+
{
|
9550 |
+
"epoch": 0.45914101102242494,
|
9551 |
+
"grad_norm": 0.21053703129291534,
|
9552 |
+
"learning_rate": 4.386664458387779e-06,
|
9553 |
+
"loss": 1.4085,
|
9554 |
+
"step": 1359
|
9555 |
+
},
|
9556 |
+
{
|
9557 |
+
"epoch": 0.45947886312766584,
|
9558 |
+
"grad_norm": 0.21588748693466187,
|
9559 |
+
"learning_rate": 4.325113795930203e-06,
|
9560 |
+
"loss": 1.4477,
|
9561 |
+
"step": 1360
|
9562 |
+
},
|
9563 |
+
{
|
9564 |
+
"epoch": 0.4598167152329068,
|
9565 |
+
"grad_norm": 0.2162930965423584,
|
9566 |
+
"learning_rate": 4.263988462479484e-06,
|
9567 |
+
"loss": 1.3273,
|
9568 |
+
"step": 1361
|
9569 |
+
},
|
9570 |
+
{
|
9571 |
+
"epoch": 0.46015456733814775,
|
9572 |
+
"grad_norm": 0.22231332957744598,
|
9573 |
+
"learning_rate": 4.203288729772326e-06,
|
9574 |
+
"loss": 1.4126,
|
9575 |
+
"step": 1362
|
9576 |
+
},
|
9577 |
+
{
|
9578 |
+
"epoch": 0.46049241944338865,
|
9579 |
+
"grad_norm": 0.2243518978357315,
|
9580 |
+
"learning_rate": 4.143014867653383e-06,
|
9581 |
+
"loss": 1.4591,
|
9582 |
+
"step": 1363
|
9583 |
+
},
|
9584 |
+
{
|
9585 |
+
"epoch": 0.4608302715486296,
|
9586 |
+
"grad_norm": 0.23123854398727417,
|
9587 |
+
"learning_rate": 4.083167144074073e-06,
|
9588 |
+
"loss": 1.4422,
|
9589 |
+
"step": 1364
|
9590 |
+
},
|
9591 |
+
{
|
9592 |
+
"epoch": 0.4611681236538705,
|
9593 |
+
"grad_norm": 0.23480401933193207,
|
9594 |
+
"learning_rate": 4.023745825091407e-06,
|
9595 |
+
"loss": 1.5316,
|
9596 |
+
"step": 1365
|
9597 |
+
},
|
9598 |
+
{
|
9599 |
+
"epoch": 0.46150597575911145,
|
9600 |
+
"grad_norm": 0.22668781876564026,
|
9601 |
+
"learning_rate": 3.964751174866765e-06,
|
9602 |
+
"loss": 1.4507,
|
9603 |
+
"step": 1366
|
9604 |
+
},
|
9605 |
+
{
|
9606 |
+
"epoch": 0.46184382786435235,
|
9607 |
+
"grad_norm": 0.2280048280954361,
|
9608 |
+
"learning_rate": 3.906183455664725e-06,
|
9609 |
+
"loss": 1.3738,
|
9610 |
+
"step": 1367
|
9611 |
+
},
|
9612 |
+
{
|
9613 |
+
"epoch": 0.4621816799695933,
|
9614 |
+
"grad_norm": 0.22981899976730347,
|
9615 |
+
"learning_rate": 3.84804292785198e-06,
|
9616 |
+
"loss": 1.3908,
|
9617 |
+
"step": 1368
|
9618 |
+
},
|
9619 |
+
{
|
9620 |
+
"epoch": 0.46251953207483426,
|
9621 |
+
"grad_norm": 0.2282203584909439,
|
9622 |
+
"learning_rate": 3.7903298498960572e-06,
|
9623 |
+
"loss": 1.4036,
|
9624 |
+
"step": 1369
|
9625 |
+
},
|
9626 |
+
{
|
9627 |
+
"epoch": 0.46285738418007516,
|
9628 |
+
"grad_norm": 0.24845145642757416,
|
9629 |
+
"learning_rate": 3.7330444783642338e-06,
|
9630 |
+
"loss": 1.4999,
|
9631 |
+
"step": 1370
|
9632 |
+
},
|
9633 |
+
{
|
9634 |
+
"epoch": 0.4631952362853161,
|
9635 |
+
"grad_norm": 0.239670068025589,
|
9636 |
+
"learning_rate": 3.676187067922421e-06,
|
9637 |
+
"loss": 1.4212,
|
9638 |
+
"step": 1371
|
9639 |
+
},
|
9640 |
+
{
|
9641 |
+
"epoch": 0.463533088390557,
|
9642 |
+
"grad_norm": 0.24668945372104645,
|
9643 |
+
"learning_rate": 3.619757871333973e-06,
|
9644 |
+
"loss": 1.4853,
|
9645 |
+
"step": 1372
|
9646 |
+
},
|
9647 |
+
{
|
9648 |
+
"epoch": 0.46387094049579797,
|
9649 |
+
"grad_norm": 0.24276994168758392,
|
9650 |
+
"learning_rate": 3.563757139458579e-06,
|
9651 |
+
"loss": 1.4339,
|
9652 |
+
"step": 1373
|
9653 |
+
},
|
9654 |
+
{
|
9655 |
+
"epoch": 0.4642087926010389,
|
9656 |
+
"grad_norm": 0.24644066393375397,
|
9657 |
+
"learning_rate": 3.5081851212512175e-06,
|
9658 |
+
"loss": 1.4848,
|
9659 |
+
"step": 1374
|
9660 |
+
},
|
9661 |
+
{
|
9662 |
+
"epoch": 0.4645466447062798,
|
9663 |
+
"grad_norm": 0.24790909886360168,
|
9664 |
+
"learning_rate": 3.4530420637609363e-06,
|
9665 |
+
"loss": 1.4527,
|
9666 |
+
"step": 1375
|
9667 |
+
},
|
9668 |
+
{
|
9669 |
+
"epoch": 0.4648844968115208,
|
9670 |
+
"grad_norm": 0.2538505792617798,
|
9671 |
+
"learning_rate": 3.3983282121298086e-06,
|
9672 |
+
"loss": 1.4475,
|
9673 |
+
"step": 1376
|
9674 |
+
},
|
9675 |
+
{
|
9676 |
+
"epoch": 0.4652223489167617,
|
9677 |
+
"grad_norm": 0.2481241375207901,
|
9678 |
+
"learning_rate": 3.3440438095919126e-06,
|
9679 |
+
"loss": 1.454,
|
9680 |
+
"step": 1377
|
9681 |
+
},
|
9682 |
+
{
|
9683 |
+
"epoch": 0.46556020102200263,
|
9684 |
+
"grad_norm": 0.25160735845565796,
|
9685 |
+
"learning_rate": 3.290189097472096e-06,
|
9686 |
+
"loss": 1.425,
|
9687 |
+
"step": 1378
|
9688 |
+
},
|
9689 |
+
{
|
9690 |
+
"epoch": 0.4658980531272435,
|
9691 |
+
"grad_norm": 0.2557906210422516,
|
9692 |
+
"learning_rate": 3.236764315185037e-06,
|
9693 |
+
"loss": 1.4421,
|
9694 |
+
"step": 1379
|
9695 |
+
},
|
9696 |
+
{
|
9697 |
+
"epoch": 0.4662359052324845,
|
9698 |
+
"grad_norm": 0.24841107428073883,
|
9699 |
+
"learning_rate": 3.1837697002341293e-06,
|
9700 |
+
"loss": 1.4462,
|
9701 |
+
"step": 1380
|
9702 |
+
},
|
9703 |
+
{
|
9704 |
+
"epoch": 0.46657375733772544,
|
9705 |
+
"grad_norm": 0.2620057463645935,
|
9706 |
+
"learning_rate": 3.131205488210409e-06,
|
9707 |
+
"loss": 1.433,
|
9708 |
+
"step": 1381
|
9709 |
+
},
|
9710 |
+
{
|
9711 |
+
"epoch": 0.46691160944296634,
|
9712 |
+
"grad_norm": 0.26040536165237427,
|
9713 |
+
"learning_rate": 3.0790719127915646e-06,
|
9714 |
+
"loss": 1.3973,
|
9715 |
+
"step": 1382
|
9716 |
+
},
|
9717 |
+
{
|
9718 |
+
"epoch": 0.4672494615482073,
|
9719 |
+
"grad_norm": 0.2547360360622406,
|
9720 |
+
"learning_rate": 3.0273692057408265e-06,
|
9721 |
+
"loss": 1.4534,
|
9722 |
+
"step": 1383
|
9723 |
+
},
|
9724 |
+
{
|
9725 |
+
"epoch": 0.4675873136534482,
|
9726 |
+
"grad_norm": 0.2619830071926117,
|
9727 |
+
"learning_rate": 2.976097596905969e-06,
|
9728 |
+
"loss": 1.4949,
|
9729 |
+
"step": 1384
|
9730 |
+
},
|
9731 |
+
{
|
9732 |
+
"epoch": 0.46792516575868914,
|
9733 |
+
"grad_norm": 0.26287898421287537,
|
9734 |
+
"learning_rate": 2.9252573142183326e-06,
|
9735 |
+
"loss": 1.474,
|
9736 |
+
"step": 1385
|
9737 |
+
},
|
9738 |
+
{
|
9739 |
+
"epoch": 0.46826301786393004,
|
9740 |
+
"grad_norm": 0.2676185965538025,
|
9741 |
+
"learning_rate": 2.874848583691714e-06,
|
9742 |
+
"loss": 1.4119,
|
9743 |
+
"step": 1386
|
9744 |
+
},
|
9745 |
+
{
|
9746 |
+
"epoch": 0.468600869969171,
|
9747 |
+
"grad_norm": 0.2610134482383728,
|
9748 |
+
"learning_rate": 2.8248716294214774e-06,
|
9749 |
+
"loss": 1.5111,
|
9750 |
+
"step": 1387
|
9751 |
+
},
|
9752 |
+
{
|
9753 |
+
"epoch": 0.46893872207441195,
|
9754 |
+
"grad_norm": 0.2791317403316498,
|
9755 |
+
"learning_rate": 2.7753266735834338e-06,
|
9756 |
+
"loss": 1.5268,
|
9757 |
+
"step": 1388
|
9758 |
+
},
|
9759 |
+
{
|
9760 |
+
"epoch": 0.46927657417965285,
|
9761 |
+
"grad_norm": 0.2638148367404938,
|
9762 |
+
"learning_rate": 2.7262139364329643e-06,
|
9763 |
+
"loss": 1.4041,
|
9764 |
+
"step": 1389
|
9765 |
+
},
|
9766 |
+
{
|
9767 |
+
"epoch": 0.4696144262848938,
|
9768 |
+
"grad_norm": 0.2758844792842865,
|
9769 |
+
"learning_rate": 2.677533636303964e-06,
|
9770 |
+
"loss": 1.4062,
|
9771 |
+
"step": 1390
|
9772 |
+
},
|
9773 |
+
{
|
9774 |
+
"epoch": 0.4699522783901347,
|
9775 |
+
"grad_norm": 0.26792052388191223,
|
9776 |
+
"learning_rate": 2.6292859896079213e-06,
|
9777 |
+
"loss": 1.4498,
|
9778 |
+
"step": 1391
|
9779 |
+
},
|
9780 |
+
{
|
9781 |
+
"epoch": 0.47029013049537566,
|
9782 |
+
"grad_norm": 0.2740675210952759,
|
9783 |
+
"learning_rate": 2.581471210832931e-06,
|
9784 |
+
"loss": 1.3951,
|
9785 |
+
"step": 1392
|
9786 |
+
},
|
9787 |
+
{
|
9788 |
+
"epoch": 0.47062798260061656,
|
9789 |
+
"grad_norm": 0.27770861983299255,
|
9790 |
+
"learning_rate": 2.5340895125427364e-06,
|
9791 |
+
"loss": 1.4293,
|
9792 |
+
"step": 1393
|
9793 |
+
},
|
9794 |
+
{
|
9795 |
+
"epoch": 0.4709658347058575,
|
9796 |
+
"grad_norm": 0.2883884906768799,
|
9797 |
+
"learning_rate": 2.4871411053757898e-06,
|
9798 |
+
"loss": 1.4896,
|
9799 |
+
"step": 1394
|
9800 |
+
},
|
9801 |
+
{
|
9802 |
+
"epoch": 0.47130368681109847,
|
9803 |
+
"grad_norm": 0.3118283450603485,
|
9804 |
+
"learning_rate": 2.440626198044327e-06,
|
9805 |
+
"loss": 1.4892,
|
9806 |
+
"step": 1395
|
9807 |
+
},
|
9808 |
+
{
|
9809 |
+
"epoch": 0.47164153891633936,
|
9810 |
+
"grad_norm": 0.28643909096717834,
|
9811 |
+
"learning_rate": 2.394544997333437e-06,
|
9812 |
+
"loss": 1.4762,
|
9813 |
+
"step": 1396
|
9814 |
+
},
|
9815 |
+
{
|
9816 |
+
"epoch": 0.4719793910215803,
|
9817 |
+
"grad_norm": 0.2881928086280823,
|
9818 |
+
"learning_rate": 2.3488977081001394e-06,
|
9819 |
+
"loss": 1.4529,
|
9820 |
+
"step": 1397
|
9821 |
+
},
|
9822 |
+
{
|
9823 |
+
"epoch": 0.4723172431268212,
|
9824 |
+
"grad_norm": 0.3157871663570404,
|
9825 |
+
"learning_rate": 2.3036845332724543e-06,
|
9826 |
+
"loss": 1.4082,
|
9827 |
+
"step": 1398
|
9828 |
+
},
|
9829 |
+
{
|
9830 |
+
"epoch": 0.4726550952320622,
|
9831 |
+
"grad_norm": 0.3361440896987915,
|
9832 |
+
"learning_rate": 2.2589056738485324e-06,
|
9833 |
+
"loss": 1.4026,
|
9834 |
+
"step": 1399
|
9835 |
+
},
|
9836 |
+
{
|
9837 |
+
"epoch": 0.47299294733730307,
|
9838 |
+
"grad_norm": 0.48678070306777954,
|
9839 |
+
"learning_rate": 2.2145613288957478e-06,
|
9840 |
+
"loss": 1.3248,
|
9841 |
+
"step": 1400
|
9842 |
+
},
|
9843 |
+
{
|
9844 |
+
"epoch": 0.473330799442544,
|
9845 |
+
"grad_norm": 0.20233768224716187,
|
9846 |
+
"learning_rate": 2.170651695549786e-06,
|
9847 |
+
"loss": 1.5271,
|
9848 |
+
"step": 1401
|
9849 |
+
},
|
9850 |
+
{
|
9851 |
+
"epoch": 0.473668651547785,
|
9852 |
+
"grad_norm": 0.21050798892974854,
|
9853 |
+
"learning_rate": 2.1271769690138332e-06,
|
9854 |
+
"loss": 1.5762,
|
9855 |
+
"step": 1402
|
9856 |
+
},
|
9857 |
+
{
|
9858 |
+
"epoch": 0.4740065036530259,
|
9859 |
+
"grad_norm": 0.2095797210931778,
|
9860 |
+
"learning_rate": 2.084137342557646e-06,
|
9861 |
+
"loss": 1.5743,
|
9862 |
+
"step": 1403
|
9863 |
+
},
|
9864 |
+
{
|
9865 |
+
"epoch": 0.47434435575826683,
|
9866 |
+
"grad_norm": 0.20064674317836761,
|
9867 |
+
"learning_rate": 2.0415330075166937e-06,
|
9868 |
+
"loss": 1.4512,
|
9869 |
+
"step": 1404
|
9870 |
+
},
|
9871 |
+
{
|
9872 |
+
"epoch": 0.47468220786350773,
|
9873 |
+
"grad_norm": 0.20394767820835114,
|
9874 |
+
"learning_rate": 1.9993641532913833e-06,
|
9875 |
+
"loss": 1.4832,
|
9876 |
+
"step": 1405
|
9877 |
+
},
|
9878 |
+
{
|
9879 |
+
"epoch": 0.4750200599687487,
|
9880 |
+
"grad_norm": 0.20908264815807343,
|
9881 |
+
"learning_rate": 1.9576309673461357e-06,
|
9882 |
+
"loss": 1.4248,
|
9883 |
+
"step": 1406
|
9884 |
+
},
|
9885 |
+
{
|
9886 |
+
"epoch": 0.4753579120739896,
|
9887 |
+
"grad_norm": 0.2067255973815918,
|
9888 |
+
"learning_rate": 1.916333635208556e-06,
|
9889 |
+
"loss": 1.5287,
|
9890 |
+
"step": 1407
|
9891 |
+
},
|
9892 |
+
{
|
9893 |
+
"epoch": 0.47569576417923054,
|
9894 |
+
"grad_norm": 0.21107180416584015,
|
9895 |
+
"learning_rate": 1.8754723404686425e-06,
|
9896 |
+
"loss": 1.5148,
|
9897 |
+
"step": 1408
|
9898 |
+
},
|
9899 |
+
{
|
9900 |
+
"epoch": 0.4760336162844715,
|
9901 |
+
"grad_norm": 0.2210325002670288,
|
9902 |
+
"learning_rate": 1.8350472647780116e-06,
|
9903 |
+
"loss": 1.5248,
|
9904 |
+
"step": 1409
|
9905 |
+
},
|
9906 |
+
{
|
9907 |
+
"epoch": 0.4763714683897124,
|
9908 |
+
"grad_norm": 0.2224951684474945,
|
9909 |
+
"learning_rate": 1.7950585878489856e-06,
|
9910 |
+
"loss": 1.4922,
|
9911 |
+
"step": 1410
|
9912 |
+
},
|
9913 |
+
{
|
9914 |
+
"epoch": 0.47670932049495335,
|
9915 |
+
"grad_norm": 0.21566277742385864,
|
9916 |
+
"learning_rate": 1.7555064874538397e-06,
|
9917 |
+
"loss": 1.4383,
|
9918 |
+
"step": 1411
|
9919 |
+
},
|
9920 |
+
{
|
9921 |
+
"epoch": 0.47704717260019425,
|
9922 |
+
"grad_norm": 0.22012431919574738,
|
9923 |
+
"learning_rate": 1.7163911394240672e-06,
|
9924 |
+
"loss": 1.4499,
|
9925 |
+
"step": 1412
|
9926 |
+
},
|
9927 |
+
{
|
9928 |
+
"epoch": 0.4773850247054352,
|
9929 |
+
"grad_norm": 0.21885032951831818,
|
9930 |
+
"learning_rate": 1.6777127176495043e-06,
|
9931 |
+
"loss": 1.4391,
|
9932 |
+
"step": 1413
|
9933 |
+
},
|
9934 |
+
{
|
9935 |
+
"epoch": 0.4777228768106761,
|
9936 |
+
"grad_norm": 0.22390317916870117,
|
9937 |
+
"learning_rate": 1.6394713940776296e-06,
|
9938 |
+
"loss": 1.3692,
|
9939 |
+
"step": 1414
|
9940 |
+
},
|
9941 |
+
{
|
9942 |
+
"epoch": 0.47806072891591705,
|
9943 |
+
"grad_norm": 0.22826632857322693,
|
9944 |
+
"learning_rate": 1.6016673387127646e-06,
|
9945 |
+
"loss": 1.4446,
|
9946 |
+
"step": 1415
|
9947 |
+
},
|
9948 |
+
{
|
9949 |
+
"epoch": 0.478398581021158,
|
9950 |
+
"grad_norm": 0.22638465464115143,
|
9951 |
+
"learning_rate": 1.5643007196153302e-06,
|
9952 |
+
"loss": 1.4462,
|
9953 |
+
"step": 1416
|
9954 |
+
},
|
9955 |
+
{
|
9956 |
+
"epoch": 0.4787364331263989,
|
9957 |
+
"grad_norm": 0.23499830067157745,
|
9958 |
+
"learning_rate": 1.5273717029010925e-06,
|
9959 |
+
"loss": 1.4275,
|
9960 |
+
"step": 1417
|
9961 |
+
},
|
9962 |
+
{
|
9963 |
+
"epoch": 0.47907428523163986,
|
9964 |
+
"grad_norm": 0.236100435256958,
|
9965 |
+
"learning_rate": 1.4908804527404286e-06,
|
9966 |
+
"loss": 1.5108,
|
9967 |
+
"step": 1418
|
9968 |
+
},
|
9969 |
+
{
|
9970 |
+
"epoch": 0.47941213733688076,
|
9971 |
+
"grad_norm": 0.2444576472043991,
|
9972 |
+
"learning_rate": 1.4548271313575835e-06,
|
9973 |
+
"loss": 1.4356,
|
9974 |
+
"step": 1419
|
9975 |
+
},
|
9976 |
+
{
|
9977 |
+
"epoch": 0.4797499894421217,
|
9978 |
+
"grad_norm": 0.23675861954689026,
|
9979 |
+
"learning_rate": 1.4192118990299707e-06,
|
9980 |
+
"loss": 1.4267,
|
9981 |
+
"step": 1420
|
9982 |
+
},
|
9983 |
+
{
|
9984 |
+
"epoch": 0.4800878415473626,
|
9985 |
+
"grad_norm": 0.24428217113018036,
|
9986 |
+
"learning_rate": 1.3840349140874619e-06,
|
9987 |
+
"loss": 1.5211,
|
9988 |
+
"step": 1421
|
9989 |
+
},
|
9990 |
+
{
|
9991 |
+
"epoch": 0.48042569365260357,
|
9992 |
+
"grad_norm": 0.24572604894638062,
|
9993 |
+
"learning_rate": 1.3492963329116537e-06,
|
9994 |
+
"loss": 1.4525,
|
9995 |
+
"step": 1422
|
9996 |
+
},
|
9997 |
+
{
|
9998 |
+
"epoch": 0.4807635457578445,
|
9999 |
+
"grad_norm": 0.2513872981071472,
|
10000 |
+
"learning_rate": 1.3149963099352014e-06,
|
10001 |
+
"loss": 1.4967,
|
10002 |
+
"step": 1423
|
10003 |
+
},
|
10004 |
+
{
|
10005 |
+
"epoch": 0.4811013978630854,
|
10006 |
+
"grad_norm": 0.24693499505519867,
|
10007 |
+
"learning_rate": 1.2811349976411202e-06,
|
10008 |
+
"loss": 1.4472,
|
10009 |
+
"step": 1424
|
10010 |
+
},
|
10011 |
+
{
|
10012 |
+
"epoch": 0.4814392499683264,
|
10013 |
+
"grad_norm": 0.25388646125793457,
|
10014 |
+
"learning_rate": 1.2477125465620853e-06,
|
10015 |
+
"loss": 1.5039,
|
10016 |
+
"step": 1425
|
10017 |
+
},
|
10018 |
+
{
|
10019 |
+
"epoch": 0.4817771020735673,
|
10020 |
+
"grad_norm": 0.25052186846733093,
|
10021 |
+
"learning_rate": 1.2147291052798216e-06,
|
10022 |
+
"loss": 1.4438,
|
10023 |
+
"step": 1426
|
10024 |
+
},
|
10025 |
+
{
|
10026 |
+
"epoch": 0.48211495417880823,
|
10027 |
+
"grad_norm": 0.2511197328567505,
|
10028 |
+
"learning_rate": 1.1821848204243814e-06,
|
10029 |
+
"loss": 1.4077,
|
10030 |
+
"step": 1427
|
10031 |
+
},
|
10032 |
+
{
|
10033 |
+
"epoch": 0.4824528062840492,
|
10034 |
+
"grad_norm": 0.25512370467185974,
|
10035 |
+
"learning_rate": 1.1500798366735233e-06,
|
10036 |
+
"loss": 1.4676,
|
10037 |
+
"step": 1428
|
10038 |
+
},
|
10039 |
+
{
|
10040 |
+
"epoch": 0.4827906583892901,
|
10041 |
+
"grad_norm": 0.2465367168188095,
|
10042 |
+
"learning_rate": 1.1184142967520794e-06,
|
10043 |
+
"loss": 1.3828,
|
10044 |
+
"step": 1429
|
10045 |
+
},
|
10046 |
+
{
|
10047 |
+
"epoch": 0.48312851049453104,
|
10048 |
+
"grad_norm": 0.25415486097335815,
|
10049 |
+
"learning_rate": 1.0871883414312777e-06,
|
10050 |
+
"loss": 1.3957,
|
10051 |
+
"step": 1430
|
10052 |
+
},
|
10053 |
+
{
|
10054 |
+
"epoch": 0.48346636259977194,
|
10055 |
+
"grad_norm": 0.2578563690185547,
|
10056 |
+
"learning_rate": 1.0564021095281652e-06,
|
10057 |
+
"loss": 1.4395,
|
10058 |
+
"step": 1431
|
10059 |
+
},
|
10060 |
+
{
|
10061 |
+
"epoch": 0.4838042147050129,
|
10062 |
+
"grad_norm": 0.2700105607509613,
|
10063 |
+
"learning_rate": 1.0260557379049519e-06,
|
10064 |
+
"loss": 1.4772,
|
10065 |
+
"step": 1432
|
10066 |
+
},
|
10067 |
+
{
|
10068 |
+
"epoch": 0.4841420668102538,
|
10069 |
+
"grad_norm": 0.2679789364337921,
|
10070 |
+
"learning_rate": 9.96149361468457e-07,
|
10071 |
+
"loss": 1.4804,
|
10072 |
+
"step": 1433
|
10073 |
+
},
|
10074 |
+
{
|
10075 |
+
"epoch": 0.48447991891549474,
|
10076 |
+
"grad_norm": 0.2531481683254242,
|
10077 |
+
"learning_rate": 9.66683113169431e-07,
|
10078 |
+
"loss": 1.4105,
|
10079 |
+
"step": 1434
|
10080 |
+
},
|
10081 |
+
{
|
10082 |
+
"epoch": 0.4848177710207357,
|
10083 |
+
"grad_norm": 0.2527173161506653,
|
10084 |
+
"learning_rate": 9.376571240020227e-07,
|
10085 |
+
"loss": 1.4478,
|
10086 |
+
"step": 1435
|
10087 |
+
},
|
10088 |
+
{
|
10089 |
+
"epoch": 0.4851556231259766,
|
10090 |
+
"grad_norm": 0.26817187666893005,
|
10091 |
+
"learning_rate": 9.090715230031688e-07,
|
10092 |
+
"loss": 1.4658,
|
10093 |
+
"step": 1436
|
10094 |
+
},
|
10095 |
+
{
|
10096 |
+
"epoch": 0.48549347523121755,
|
10097 |
+
"grad_norm": 0.2709321677684784,
|
10098 |
+
"learning_rate": 8.809264372520609e-07,
|
10099 |
+
"loss": 1.444,
|
10100 |
+
"step": 1437
|
10101 |
+
},
|
10102 |
+
{
|
10103 |
+
"epoch": 0.48583132733645845,
|
10104 |
+
"grad_norm": 0.2711314260959625,
|
10105 |
+
"learning_rate": 8.532219918695128e-07,
|
10106 |
+
"loss": 1.3911,
|
10107 |
+
"step": 1438
|
10108 |
+
},
|
10109 |
+
{
|
10110 |
+
"epoch": 0.4861691794416994,
|
10111 |
+
"grad_norm": 0.26008379459381104,
|
10112 |
+
"learning_rate": 8.259583100174606e-07,
|
10113 |
+
"loss": 1.3877,
|
10114 |
+
"step": 1439
|
10115 |
+
},
|
10116 |
+
{
|
10117 |
+
"epoch": 0.4865070315469403,
|
10118 |
+
"grad_norm": 0.26714691519737244,
|
10119 |
+
"learning_rate": 7.991355128984079e-07,
|
10120 |
+
"loss": 1.4846,
|
10121 |
+
"step": 1440
|
10122 |
+
},
|
10123 |
+
{
|
10124 |
+
"epoch": 0.48684488365218126,
|
10125 |
+
"grad_norm": 0.27431437373161316,
|
10126 |
+
"learning_rate": 7.727537197548707e-07,
|
10127 |
+
"loss": 1.5039,
|
10128 |
+
"step": 1441
|
10129 |
+
},
|
10130 |
+
{
|
10131 |
+
"epoch": 0.4871827357574222,
|
10132 |
+
"grad_norm": 0.28554314374923706,
|
10133 |
+
"learning_rate": 7.468130478688218e-07,
|
10134 |
+
"loss": 1.4743,
|
10135 |
+
"step": 1442
|
10136 |
+
},
|
10137 |
+
{
|
10138 |
+
"epoch": 0.4875205878626631,
|
10139 |
+
"grad_norm": 0.2806571424007416,
|
10140 |
+
"learning_rate": 7.213136125612586e-07,
|
10141 |
+
"loss": 1.468,
|
10142 |
+
"step": 1443
|
10143 |
+
},
|
10144 |
+
{
|
10145 |
+
"epoch": 0.48785843996790407,
|
10146 |
+
"grad_norm": 0.29014453291893005,
|
10147 |
+
"learning_rate": 6.962555271915805e-07,
|
10148 |
+
"loss": 1.4315,
|
10149 |
+
"step": 1444
|
10150 |
+
},
|
10151 |
+
{
|
10152 |
+
"epoch": 0.48819629207314497,
|
10153 |
+
"grad_norm": 0.2866988480091095,
|
10154 |
+
"learning_rate": 6.716389031571568e-07,
|
10155 |
+
"loss": 1.4511,
|
10156 |
+
"step": 1445
|
10157 |
+
},
|
10158 |
+
{
|
10159 |
+
"epoch": 0.4885341441783859,
|
10160 |
+
"grad_norm": 0.30054572224617004,
|
10161 |
+
"learning_rate": 6.474638498928265e-07,
|
10162 |
+
"loss": 1.4226,
|
10163 |
+
"step": 1446
|
10164 |
+
},
|
10165 |
+
{
|
10166 |
+
"epoch": 0.4888719962836268,
|
10167 |
+
"grad_norm": 0.3014582693576813,
|
10168 |
+
"learning_rate": 6.237304748703543e-07,
|
10169 |
+
"loss": 1.435,
|
10170 |
+
"step": 1447
|
10171 |
+
},
|
10172 |
+
{
|
10173 |
+
"epoch": 0.4892098483888678,
|
10174 |
+
"grad_norm": 0.32816365361213684,
|
10175 |
+
"learning_rate": 6.004388835980423e-07,
|
10176 |
+
"loss": 1.4552,
|
10177 |
+
"step": 1448
|
10178 |
+
},
|
10179 |
+
{
|
10180 |
+
"epoch": 0.48954770049410873,
|
10181 |
+
"grad_norm": 0.36893031001091003,
|
10182 |
+
"learning_rate": 5.77589179620186e-07,
|
10183 |
+
"loss": 1.4099,
|
10184 |
+
"step": 1449
|
10185 |
+
},
|
10186 |
+
{
|
10187 |
+
"epoch": 0.4898855525993496,
|
10188 |
+
"grad_norm": 0.5016908049583435,
|
10189 |
+
"learning_rate": 5.55181464516652e-07,
|
10190 |
+
"loss": 1.2846,
|
10191 |
+
"step": 1450
|
10192 |
+
},
|
10193 |
+
{
|
10194 |
+
"epoch": 0.4902234047045906,
|
10195 |
+
"grad_norm": 0.1944635808467865,
|
10196 |
+
"learning_rate": 5.332158379024122e-07,
|
10197 |
+
"loss": 1.4562,
|
10198 |
+
"step": 1451
|
10199 |
+
},
|
10200 |
+
{
|
10201 |
+
"epoch": 0.4905612568098315,
|
10202 |
+
"grad_norm": 0.20273853838443756,
|
10203 |
+
"learning_rate": 5.116923974270993e-07,
|
10204 |
+
"loss": 1.5634,
|
10205 |
+
"step": 1452
|
10206 |
+
},
|
10207 |
+
{
|
10208 |
+
"epoch": 0.49089910891507244,
|
10209 |
+
"grad_norm": 0.20178069174289703,
|
10210 |
+
"learning_rate": 4.906112387745965e-07,
|
10211 |
+
"loss": 1.4923,
|
10212 |
+
"step": 1453
|
10213 |
+
},
|
10214 |
+
{
|
10215 |
+
"epoch": 0.49123696102031333,
|
10216 |
+
"grad_norm": 0.20075154304504395,
|
10217 |
+
"learning_rate": 4.6997245566257064e-07,
|
10218 |
+
"loss": 1.4629,
|
10219 |
+
"step": 1454
|
10220 |
+
},
|
10221 |
+
{
|
10222 |
+
"epoch": 0.4915748131255543,
|
10223 |
+
"grad_norm": 0.20780137181282043,
|
10224 |
+
"learning_rate": 4.497761398421063e-07,
|
10225 |
+
"loss": 1.4992,
|
10226 |
+
"step": 1455
|
10227 |
+
},
|
10228 |
+
{
|
10229 |
+
"epoch": 0.49191266523079524,
|
10230 |
+
"grad_norm": 0.21015407145023346,
|
10231 |
+
"learning_rate": 4.3002238109723927e-07,
|
10232 |
+
"loss": 1.508,
|
10233 |
+
"step": 1456
|
10234 |
+
},
|
10235 |
+
{
|
10236 |
+
"epoch": 0.49225051733603614,
|
10237 |
+
"grad_norm": 0.2047802358865738,
|
10238 |
+
"learning_rate": 4.107112672446123e-07,
|
10239 |
+
"loss": 1.5003,
|
10240 |
+
"step": 1457
|
10241 |
+
},
|
10242 |
+
{
|
10243 |
+
"epoch": 0.4925883694412771,
|
10244 |
+
"grad_norm": 0.20468640327453613,
|
10245 |
+
"learning_rate": 3.9184288413306456e-07,
|
10246 |
+
"loss": 1.3998,
|
10247 |
+
"step": 1458
|
10248 |
+
},
|
10249 |
+
{
|
10250 |
+
"epoch": 0.492926221546518,
|
10251 |
+
"grad_norm": 0.2083989530801773,
|
10252 |
+
"learning_rate": 3.734173156432208e-07,
|
10253 |
+
"loss": 1.5097,
|
10254 |
+
"step": 1459
|
10255 |
+
},
|
10256 |
+
{
|
10257 |
+
"epoch": 0.49326407365175895,
|
10258 |
+
"grad_norm": 0.20935308933258057,
|
10259 |
+
"learning_rate": 3.554346436871581e-07,
|
10260 |
+
"loss": 1.4561,
|
10261 |
+
"step": 1460
|
10262 |
+
},
|
10263 |
+
{
|
10264 |
+
"epoch": 0.49360192575699985,
|
10265 |
+
"grad_norm": 0.22204406559467316,
|
10266 |
+
"learning_rate": 3.3789494820803957e-07,
|
10267 |
+
"loss": 1.516,
|
10268 |
+
"step": 1461
|
10269 |
+
},
|
10270 |
+
{
|
10271 |
+
"epoch": 0.4939397778622408,
|
10272 |
+
"grad_norm": 0.21647249162197113,
|
10273 |
+
"learning_rate": 3.2079830717972606e-07,
|
10274 |
+
"loss": 1.4333,
|
10275 |
+
"step": 1462
|
10276 |
+
},
|
10277 |
+
{
|
10278 |
+
"epoch": 0.49427762996748176,
|
10279 |
+
"grad_norm": 0.22233545780181885,
|
10280 |
+
"learning_rate": 3.041447966064648e-07,
|
10281 |
+
"loss": 1.4746,
|
10282 |
+
"step": 1463
|
10283 |
+
},
|
10284 |
+
{
|
10285 |
+
"epoch": 0.49461548207272266,
|
10286 |
+
"grad_norm": 0.22757571935653687,
|
10287 |
+
"learning_rate": 2.8793449052254563e-07,
|
10288 |
+
"loss": 1.5024,
|
10289 |
+
"step": 1464
|
10290 |
+
},
|
10291 |
+
{
|
10292 |
+
"epoch": 0.4949533341779636,
|
10293 |
+
"grad_norm": 0.23233623802661896,
|
10294 |
+
"learning_rate": 2.721674609919345e-07,
|
10295 |
+
"loss": 1.4857,
|
10296 |
+
"step": 1465
|
10297 |
+
},
|
10298 |
+
{
|
10299 |
+
"epoch": 0.4952911862832045,
|
10300 |
+
"grad_norm": 0.22455690801143646,
|
10301 |
+
"learning_rate": 2.568437781080069e-07,
|
10302 |
+
"loss": 1.3257,
|
10303 |
+
"step": 1466
|
10304 |
+
},
|
10305 |
+
{
|
10306 |
+
"epoch": 0.49562903838844546,
|
10307 |
+
"grad_norm": 0.2350911796092987,
|
10308 |
+
"learning_rate": 2.4196350999320384e-07,
|
10309 |
+
"loss": 1.4795,
|
10310 |
+
"step": 1467
|
10311 |
+
},
|
10312 |
+
{
|
10313 |
+
"epoch": 0.49596689049368636,
|
10314 |
+
"grad_norm": 0.25203824043273926,
|
10315 |
+
"learning_rate": 2.275267227987321e-07,
|
10316 |
+
"loss": 1.5831,
|
10317 |
+
"step": 1468
|
10318 |
+
},
|
10319 |
+
{
|
10320 |
+
"epoch": 0.4963047425989273,
|
10321 |
+
"grad_norm": 0.2257823795080185,
|
10322 |
+
"learning_rate": 2.135334807042866e-07,
|
10323 |
+
"loss": 1.4099,
|
10324 |
+
"step": 1469
|
10325 |
+
},
|
10326 |
+
{
|
10327 |
+
"epoch": 0.49664259470416827,
|
10328 |
+
"grad_norm": 0.23401281237602234,
|
10329 |
+
"learning_rate": 1.9998384591773944e-07,
|
10330 |
+
"loss": 1.413,
|
10331 |
+
"step": 1470
|
10332 |
+
},
|
10333 |
+
{
|
10334 |
+
"epoch": 0.49698044680940917,
|
10335 |
+
"grad_norm": 0.23719368875026703,
|
10336 |
+
"learning_rate": 1.8687787867489592e-07,
|
10337 |
+
"loss": 1.4406,
|
10338 |
+
"step": 1471
|
10339 |
+
},
|
10340 |
+
{
|
10341 |
+
"epoch": 0.4973182989146501,
|
10342 |
+
"grad_norm": 0.2402154505252838,
|
10343 |
+
"learning_rate": 1.7421563723919454e-07,
|
10344 |
+
"loss": 1.3499,
|
10345 |
+
"step": 1472
|
10346 |
+
},
|
10347 |
+
{
|
10348 |
+
"epoch": 0.497656151019891,
|
10349 |
+
"grad_norm": 0.238765686750412,
|
10350 |
+
"learning_rate": 1.6199717790145174e-07,
|
10351 |
+
"loss": 1.489,
|
10352 |
+
"step": 1473
|
10353 |
+
},
|
10354 |
+
{
|
10355 |
+
"epoch": 0.497994003125132,
|
10356 |
+
"grad_norm": 0.23822593688964844,
|
10357 |
+
"learning_rate": 1.5022255497962879e-07,
|
10358 |
+
"loss": 1.3948,
|
10359 |
+
"step": 1474
|
10360 |
+
},
|
10361 |
+
{
|
10362 |
+
"epoch": 0.4983318552303729,
|
10363 |
+
"grad_norm": 0.24517039954662323,
|
10364 |
+
"learning_rate": 1.3889182081860962e-07,
|
10365 |
+
"loss": 1.401,
|
10366 |
+
"step": 1475
|
10367 |
+
},
|
10368 |
+
{
|
10369 |
+
"epoch": 0.49866970733561383,
|
10370 |
+
"grad_norm": 0.25566139817237854,
|
10371 |
+
"learning_rate": 1.2800502578991235e-07,
|
10372 |
+
"loss": 1.5096,
|
10373 |
+
"step": 1476
|
10374 |
+
},
|
10375 |
+
{
|
10376 |
+
"epoch": 0.4990075594408548,
|
10377 |
+
"grad_norm": 0.24511995911598206,
|
10378 |
+
"learning_rate": 1.1756221829148928e-07,
|
10379 |
+
"loss": 1.4267,
|
10380 |
+
"step": 1477
|
10381 |
+
},
|
10382 |
+
{
|
10383 |
+
"epoch": 0.4993454115460957,
|
10384 |
+
"grad_norm": 0.2521734833717346,
|
10385 |
+
"learning_rate": 1.0756344474753821e-07,
|
10386 |
+
"loss": 1.4738,
|
10387 |
+
"step": 1478
|
10388 |
+
},
|
10389 |
+
{
|
10390 |
+
"epoch": 0.49968326365133664,
|
10391 |
+
"grad_norm": 0.24908527731895447,
|
10392 |
+
"learning_rate": 9.800874960826933e-08,
|
10393 |
+
"loss": 1.443,
|
10394 |
+
"step": 1479
|
10395 |
+
},
|
10396 |
+
{
|
10397 |
+
"epoch": 0.5000211157565776,
|
10398 |
+
"grad_norm": 0.25004932284355164,
|
10399 |
+
"learning_rate": 8.889817534969425e-08,
|
10400 |
+
"loss": 1.4288,
|
10401 |
+
"step": 1480
|
10402 |
+
},
|
10403 |
+
{
|
10404 |
+
"epoch": 0.5003589678618184,
|
10405 |
+
"grad_norm": 0.25815922021865845,
|
10406 |
+
"learning_rate": 8.023176247348163e-08,
|
10407 |
+
"loss": 1.4432,
|
10408 |
+
"step": 1481
|
10409 |
+
},
|
10410 |
+
{
|
10411 |
+
"epoch": 0.5006968199670594,
|
10412 |
+
"grad_norm": 0.25589337944984436,
|
10413 |
+
"learning_rate": 7.200954950673522e-08,
|
10414 |
+
"loss": 1.3795,
|
10415 |
+
"step": 1482
|
10416 |
+
},
|
10417 |
+
{
|
10418 |
+
"epoch": 0.5010346720723003,
|
10419 |
+
"grad_norm": 0.25801023840904236,
|
10420 |
+
"learning_rate": 6.423157300184946e-08,
|
10421 |
+
"loss": 1.4741,
|
10422 |
+
"step": 1483
|
10423 |
+
},
|
10424 |
+
{
|
10425 |
+
"epoch": 0.5013725241775413,
|
10426 |
+
"grad_norm": 0.2608857750892639,
|
10427 |
+
"learning_rate": 5.6897867536331864e-08,
|
10428 |
+
"loss": 1.4923,
|
10429 |
+
"step": 1484
|
10430 |
+
},
|
10431 |
+
{
|
10432 |
+
"epoch": 0.5017103762827823,
|
10433 |
+
"grad_norm": 0.2637975215911865,
|
10434 |
+
"learning_rate": 5.000846571264761e-08,
|
10435 |
+
"loss": 1.4454,
|
10436 |
+
"step": 1485
|
10437 |
+
},
|
10438 |
+
{
|
10439 |
+
"epoch": 0.5020482283880231,
|
10440 |
+
"grad_norm": 0.27491477131843567,
|
10441 |
+
"learning_rate": 4.35633981580974e-08,
|
10442 |
+
"loss": 1.4514,
|
10443 |
+
"step": 1486
|
10444 |
+
},
|
10445 |
+
{
|
10446 |
+
"epoch": 0.502386080493264,
|
10447 |
+
"grad_norm": 0.2614571154117584,
|
10448 |
+
"learning_rate": 3.756269352462871e-08,
|
10449 |
+
"loss": 1.4438,
|
10450 |
+
"step": 1487
|
10451 |
+
},
|
10452 |
+
{
|
10453 |
+
"epoch": 0.502723932598505,
|
10454 |
+
"grad_norm": 0.26092520356178284,
|
10455 |
+
"learning_rate": 3.20063784888025e-08,
|
10456 |
+
"loss": 1.4221,
|
10457 |
+
"step": 1488
|
10458 |
+
},
|
10459 |
+
{
|
10460 |
+
"epoch": 0.503061784703746,
|
10461 |
+
"grad_norm": 0.2608908712863922,
|
10462 |
+
"learning_rate": 2.6894477751548964e-08,
|
10463 |
+
"loss": 1.4017,
|
10464 |
+
"step": 1489
|
10465 |
+
},
|
10466 |
+
{
|
10467 |
+
"epoch": 0.5033996368089869,
|
10468 |
+
"grad_norm": 0.27233171463012695,
|
10469 |
+
"learning_rate": 2.222701403818972e-08,
|
10470 |
+
"loss": 1.4314,
|
10471 |
+
"step": 1490
|
10472 |
+
},
|
10473 |
+
{
|
10474 |
+
"epoch": 0.5037374889142278,
|
10475 |
+
"grad_norm": 0.27723515033721924,
|
10476 |
+
"learning_rate": 1.8004008098226887e-08,
|
10477 |
+
"loss": 1.4268,
|
10478 |
+
"step": 1491
|
10479 |
+
},
|
10480 |
+
{
|
10481 |
+
"epoch": 0.5040753410194687,
|
10482 |
+
"grad_norm": 0.2781209945678711,
|
10483 |
+
"learning_rate": 1.4225478705309769e-08,
|
10484 |
+
"loss": 1.4213,
|
10485 |
+
"step": 1492
|
10486 |
+
},
|
10487 |
+
{
|
10488 |
+
"epoch": 0.5044131931247097,
|
10489 |
+
"grad_norm": 0.26957958936691284,
|
10490 |
+
"learning_rate": 1.0891442657134932e-08,
|
10491 |
+
"loss": 1.4145,
|
10492 |
+
"step": 1493
|
10493 |
+
},
|
10494 |
+
{
|
10495 |
+
"epoch": 0.5047510452299506,
|
10496 |
+
"grad_norm": 0.2852826714515686,
|
10497 |
+
"learning_rate": 8.001914775401798e-09,
|
10498 |
+
"loss": 1.4797,
|
10499 |
+
"step": 1494
|
10500 |
+
},
|
10501 |
+
{
|
10502 |
+
"epoch": 0.5050888973351915,
|
10503 |
+
"grad_norm": 0.28609928488731384,
|
10504 |
+
"learning_rate": 5.5569079056794206e-09,
|
10505 |
+
"loss": 1.4738,
|
10506 |
+
"step": 1495
|
10507 |
+
},
|
10508 |
+
{
|
10509 |
+
"epoch": 0.5054267494404324,
|
10510 |
+
"grad_norm": 0.3007526695728302,
|
10511 |
+
"learning_rate": 3.5564329174064824e-09,
|
10512 |
+
"loss": 1.4873,
|
10513 |
+
"step": 1496
|
10514 |
+
},
|
10515 |
+
{
|
10516 |
+
"epoch": 0.5057646015456734,
|
10517 |
+
"grad_norm": 0.3054019808769226,
|
10518 |
+
"learning_rate": 2.0004987038246824e-09,
|
10519 |
+
"loss": 1.4303,
|
10520 |
+
"step": 1497
|
10521 |
+
},
|
10522 |
+
{
|
10523 |
+
"epoch": 0.5061024536509143,
|
10524 |
+
"grad_norm": 0.3101552724838257,
|
10525 |
+
"learning_rate": 8.891121819565306e-10,
|
10526 |
+
"loss": 1.4619,
|
10527 |
+
"step": 1498
|
10528 |
+
},
|
10529 |
+
{
|
10530 |
+
"epoch": 0.5064403057561553,
|
10531 |
+
"grad_norm": 0.3414049446582794,
|
10532 |
+
"learning_rate": 2.2227829252763344e-10,
|
10533 |
+
"loss": 1.4851,
|
10534 |
+
"step": 1499
|
10535 |
+
},
|
10536 |
+
{
|
10537 |
+
"epoch": 0.5067781578613961,
|
10538 |
+
"grad_norm": 0.4380083680152893,
|
10539 |
+
"learning_rate": 0.0,
|
10540 |
+
"loss": 1.3147,
|
10541 |
+
"step": 1500
|
10542 |
+
},
|
10543 |
+
{
|
10544 |
+
"epoch": 0.5067781578613961,
|
10545 |
+
"eval_loss": 1.448108434677124,
|
10546 |
+
"eval_runtime": 247.4,
|
10547 |
+
"eval_samples_per_second": 20.15,
|
10548 |
+
"eval_steps_per_second": 10.077,
|
10549 |
+
"step": 1500
|
10550 |
}
|
10551 |
],
|
10552 |
"logging_steps": 1,
|
|
|
10561 |
"should_evaluate": false,
|
10562 |
"should_log": false,
|
10563 |
"should_save": true,
|
10564 |
+
"should_training_stop": true
|
10565 |
},
|
10566 |
"attributes": {}
|
10567 |
}
|
10568 |
},
|
10569 |
+
"total_flos": 5.735797142543401e+17,
|
10570 |
"train_batch_size": 2,
|
10571 |
"trial_name": null,
|
10572 |
"trial_params": null
|