Training in progress, step 20000, checkpoint
Browse files- last-checkpoint/adapter_model.safetensors +1 -1
- last-checkpoint/global_step20000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt +3 -0
- last-checkpoint/global_step20000/mp_rank_00_model_states.pt +3 -0
- last-checkpoint/latest +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/trainer_state.json +1403 -3
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 42002584
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:670b323c30234183d76d69beccc409356cf0f3cf5a7d1a121393f17528156a5b
|
3 |
size 42002584
|
last-checkpoint/global_step20000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9b4dec58967ce19e650ca78fff3c6c1fd9a38a7b596d0ecbe4cb869fa55ef2f4
|
3 |
+
size 251710672
|
last-checkpoint/global_step20000/mp_rank_00_model_states.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1af1542174ddc650b9872a385e289575ed8714b70f8719703fe43d6c6b31928c
|
3 |
+
size 153747385
|
last-checkpoint/latest
CHANGED
@@ -1 +1 @@
|
|
1 |
-
|
|
|
1 |
+
global_step20000
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e8a20a83af5159ef1b89e6a2a6b6443f1d926224c7e753ec250bdffb3b1a9056
|
3 |
size 14244
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 1000,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -1414,6 +1414,1406 @@
|
|
1414 |
"learning_rate": 0.0001971445551117435,
|
1415 |
"loss": 1.4018,
|
1416 |
"step": 10000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1417 |
}
|
1418 |
],
|
1419 |
"logging_steps": 50,
|
@@ -1433,7 +2833,7 @@
|
|
1433 |
"attributes": {}
|
1434 |
}
|
1435 |
},
|
1436 |
-
"total_flos":
|
1437 |
"train_batch_size": 2,
|
1438 |
"trial_name": null,
|
1439 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.5971397008330099,
|
5 |
"eval_steps": 1000,
|
6 |
+
"global_step": 20000,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
1414 |
"learning_rate": 0.0001971445551117435,
|
1415 |
"loss": 1.4018,
|
1416 |
"step": 10000
|
1417 |
+
},
|
1418 |
+
{
|
1419 |
+
"epoch": 0.30006269966858745,
|
1420 |
+
"grad_norm": 4.7913641929626465,
|
1421 |
+
"learning_rate": 0.0001971302693158247,
|
1422 |
+
"loss": 1.4228,
|
1423 |
+
"step": 10050
|
1424 |
+
},
|
1425 |
+
{
|
1426 |
+
"epoch": 0.30155554892067,
|
1427 |
+
"grad_norm": 5.312823295593262,
|
1428 |
+
"learning_rate": 0.00019711598351990583,
|
1429 |
+
"loss": 1.3866,
|
1430 |
+
"step": 10100
|
1431 |
+
},
|
1432 |
+
{
|
1433 |
+
"epoch": 0.30304839817275253,
|
1434 |
+
"grad_norm": 4.099662780761719,
|
1435 |
+
"learning_rate": 0.00019710169772398702,
|
1436 |
+
"loss": 1.454,
|
1437 |
+
"step": 10150
|
1438 |
+
},
|
1439 |
+
{
|
1440 |
+
"epoch": 0.304541247424835,
|
1441 |
+
"grad_norm": 4.254878520965576,
|
1442 |
+
"learning_rate": 0.00019708741192806816,
|
1443 |
+
"loss": 1.3526,
|
1444 |
+
"step": 10200
|
1445 |
+
},
|
1446 |
+
{
|
1447 |
+
"epoch": 0.30603409667691756,
|
1448 |
+
"grad_norm": 4.056606292724609,
|
1449 |
+
"learning_rate": 0.00019707312613214935,
|
1450 |
+
"loss": 1.4167,
|
1451 |
+
"step": 10250
|
1452 |
+
},
|
1453 |
+
{
|
1454 |
+
"epoch": 0.3075269459290001,
|
1455 |
+
"grad_norm": 3.790809154510498,
|
1456 |
+
"learning_rate": 0.0001970588403362305,
|
1457 |
+
"loss": 1.3536,
|
1458 |
+
"step": 10300
|
1459 |
+
},
|
1460 |
+
{
|
1461 |
+
"epoch": 0.30901979518108263,
|
1462 |
+
"grad_norm": 4.46298360824585,
|
1463 |
+
"learning_rate": 0.00019704455454031168,
|
1464 |
+
"loss": 1.3613,
|
1465 |
+
"step": 10350
|
1466 |
+
},
|
1467 |
+
{
|
1468 |
+
"epoch": 0.3105126444331651,
|
1469 |
+
"grad_norm": 4.52452278137207,
|
1470 |
+
"learning_rate": 0.00019703026874439284,
|
1471 |
+
"loss": 1.4591,
|
1472 |
+
"step": 10400
|
1473 |
+
},
|
1474 |
+
{
|
1475 |
+
"epoch": 0.31200549368524766,
|
1476 |
+
"grad_norm": 4.735177040100098,
|
1477 |
+
"learning_rate": 0.000197015982948474,
|
1478 |
+
"loss": 1.4617,
|
1479 |
+
"step": 10450
|
1480 |
+
},
|
1481 |
+
{
|
1482 |
+
"epoch": 0.3134983429373302,
|
1483 |
+
"grad_norm": 4.48261833190918,
|
1484 |
+
"learning_rate": 0.00019700169715255517,
|
1485 |
+
"loss": 1.4072,
|
1486 |
+
"step": 10500
|
1487 |
+
},
|
1488 |
+
{
|
1489 |
+
"epoch": 0.31499119218941274,
|
1490 |
+
"grad_norm": 3.3441503047943115,
|
1491 |
+
"learning_rate": 0.00019698741135663634,
|
1492 |
+
"loss": 1.4082,
|
1493 |
+
"step": 10550
|
1494 |
+
},
|
1495 |
+
{
|
1496 |
+
"epoch": 0.3164840414414952,
|
1497 |
+
"grad_norm": 3.9771218299865723,
|
1498 |
+
"learning_rate": 0.0001969731255607175,
|
1499 |
+
"loss": 1.4184,
|
1500 |
+
"step": 10600
|
1501 |
+
},
|
1502 |
+
{
|
1503 |
+
"epoch": 0.31797689069357776,
|
1504 |
+
"grad_norm": 6.366194725036621,
|
1505 |
+
"learning_rate": 0.00019695883976479867,
|
1506 |
+
"loss": 1.387,
|
1507 |
+
"step": 10650
|
1508 |
+
},
|
1509 |
+
{
|
1510 |
+
"epoch": 0.3194697399456603,
|
1511 |
+
"grad_norm": 5.072678089141846,
|
1512 |
+
"learning_rate": 0.00019694455396887983,
|
1513 |
+
"loss": 1.3996,
|
1514 |
+
"step": 10700
|
1515 |
+
},
|
1516 |
+
{
|
1517 |
+
"epoch": 0.3209625891977428,
|
1518 |
+
"grad_norm": 3.7204978466033936,
|
1519 |
+
"learning_rate": 0.000196930268172961,
|
1520 |
+
"loss": 1.3774,
|
1521 |
+
"step": 10750
|
1522 |
+
},
|
1523 |
+
{
|
1524 |
+
"epoch": 0.3224554384498253,
|
1525 |
+
"grad_norm": 4.47731351852417,
|
1526 |
+
"learning_rate": 0.00019691598237704216,
|
1527 |
+
"loss": 1.3552,
|
1528 |
+
"step": 10800
|
1529 |
+
},
|
1530 |
+
{
|
1531 |
+
"epoch": 0.32394828770190787,
|
1532 |
+
"grad_norm": 3.4569220542907715,
|
1533 |
+
"learning_rate": 0.00019690169658112335,
|
1534 |
+
"loss": 1.3794,
|
1535 |
+
"step": 10850
|
1536 |
+
},
|
1537 |
+
{
|
1538 |
+
"epoch": 0.3254411369539904,
|
1539 |
+
"grad_norm": 4.344145774841309,
|
1540 |
+
"learning_rate": 0.0001968874107852045,
|
1541 |
+
"loss": 1.4419,
|
1542 |
+
"step": 10900
|
1543 |
+
},
|
1544 |
+
{
|
1545 |
+
"epoch": 0.3269339862060729,
|
1546 |
+
"grad_norm": 4.089848041534424,
|
1547 |
+
"learning_rate": 0.00019687312498928568,
|
1548 |
+
"loss": 1.4116,
|
1549 |
+
"step": 10950
|
1550 |
+
},
|
1551 |
+
{
|
1552 |
+
"epoch": 0.32842683545815543,
|
1553 |
+
"grad_norm": 3.995945930480957,
|
1554 |
+
"learning_rate": 0.00019685883919336682,
|
1555 |
+
"loss": 1.3532,
|
1556 |
+
"step": 11000
|
1557 |
+
},
|
1558 |
+
{
|
1559 |
+
"epoch": 0.32991968471023797,
|
1560 |
+
"grad_norm": 3.8309378623962402,
|
1561 |
+
"learning_rate": 0.00019684455339744798,
|
1562 |
+
"loss": 1.3957,
|
1563 |
+
"step": 11050
|
1564 |
+
},
|
1565 |
+
{
|
1566 |
+
"epoch": 0.3314125339623205,
|
1567 |
+
"grad_norm": 4.386235237121582,
|
1568 |
+
"learning_rate": 0.00019683026760152915,
|
1569 |
+
"loss": 1.3616,
|
1570 |
+
"step": 11100
|
1571 |
+
},
|
1572 |
+
{
|
1573 |
+
"epoch": 0.332905383214403,
|
1574 |
+
"grad_norm": 5.133239269256592,
|
1575 |
+
"learning_rate": 0.00019681598180561031,
|
1576 |
+
"loss": 1.3959,
|
1577 |
+
"step": 11150
|
1578 |
+
},
|
1579 |
+
{
|
1580 |
+
"epoch": 0.33439823246648553,
|
1581 |
+
"grad_norm": 4.216183662414551,
|
1582 |
+
"learning_rate": 0.0001968016960096915,
|
1583 |
+
"loss": 1.3446,
|
1584 |
+
"step": 11200
|
1585 |
+
},
|
1586 |
+
{
|
1587 |
+
"epoch": 0.3358910817185681,
|
1588 |
+
"grad_norm": 3.631131172180176,
|
1589 |
+
"learning_rate": 0.00019678741021377264,
|
1590 |
+
"loss": 1.3779,
|
1591 |
+
"step": 11250
|
1592 |
+
},
|
1593 |
+
{
|
1594 |
+
"epoch": 0.33738393097065056,
|
1595 |
+
"grad_norm": 4.603448390960693,
|
1596 |
+
"learning_rate": 0.00019677312441785384,
|
1597 |
+
"loss": 1.3962,
|
1598 |
+
"step": 11300
|
1599 |
+
},
|
1600 |
+
{
|
1601 |
+
"epoch": 0.3388767802227331,
|
1602 |
+
"grad_norm": 3.6482913494110107,
|
1603 |
+
"learning_rate": 0.00019675883862193497,
|
1604 |
+
"loss": 1.3931,
|
1605 |
+
"step": 11350
|
1606 |
+
},
|
1607 |
+
{
|
1608 |
+
"epoch": 0.34036962947481564,
|
1609 |
+
"grad_norm": 5.040388107299805,
|
1610 |
+
"learning_rate": 0.00019674455282601616,
|
1611 |
+
"loss": 1.3465,
|
1612 |
+
"step": 11400
|
1613 |
+
},
|
1614 |
+
{
|
1615 |
+
"epoch": 0.3418624787268982,
|
1616 |
+
"grad_norm": 5.762825012207031,
|
1617 |
+
"learning_rate": 0.0001967302670300973,
|
1618 |
+
"loss": 1.3577,
|
1619 |
+
"step": 11450
|
1620 |
+
},
|
1621 |
+
{
|
1622 |
+
"epoch": 0.34335532797898066,
|
1623 |
+
"grad_norm": 4.941501617431641,
|
1624 |
+
"learning_rate": 0.0001967159812341785,
|
1625 |
+
"loss": 1.3676,
|
1626 |
+
"step": 11500
|
1627 |
+
},
|
1628 |
+
{
|
1629 |
+
"epoch": 0.3448481772310632,
|
1630 |
+
"grad_norm": 5.368370532989502,
|
1631 |
+
"learning_rate": 0.00019670169543825966,
|
1632 |
+
"loss": 1.4265,
|
1633 |
+
"step": 11550
|
1634 |
+
},
|
1635 |
+
{
|
1636 |
+
"epoch": 0.34634102648314574,
|
1637 |
+
"grad_norm": 4.931522369384766,
|
1638 |
+
"learning_rate": 0.00019668740964234082,
|
1639 |
+
"loss": 1.3551,
|
1640 |
+
"step": 11600
|
1641 |
+
},
|
1642 |
+
{
|
1643 |
+
"epoch": 0.3478338757352283,
|
1644 |
+
"grad_norm": 3.9685990810394287,
|
1645 |
+
"learning_rate": 0.000196673123846422,
|
1646 |
+
"loss": 1.402,
|
1647 |
+
"step": 11650
|
1648 |
+
},
|
1649 |
+
{
|
1650 |
+
"epoch": 0.34932672498731077,
|
1651 |
+
"grad_norm": 5.771200656890869,
|
1652 |
+
"learning_rate": 0.00019665883805050315,
|
1653 |
+
"loss": 1.3596,
|
1654 |
+
"step": 11700
|
1655 |
+
},
|
1656 |
+
{
|
1657 |
+
"epoch": 0.3508195742393933,
|
1658 |
+
"grad_norm": 5.142852306365967,
|
1659 |
+
"learning_rate": 0.00019664455225458432,
|
1660 |
+
"loss": 1.385,
|
1661 |
+
"step": 11750
|
1662 |
+
},
|
1663 |
+
{
|
1664 |
+
"epoch": 0.35231242349147585,
|
1665 |
+
"grad_norm": 3.295628786087036,
|
1666 |
+
"learning_rate": 0.00019663026645866548,
|
1667 |
+
"loss": 1.3454,
|
1668 |
+
"step": 11800
|
1669 |
+
},
|
1670 |
+
{
|
1671 |
+
"epoch": 0.35380527274355833,
|
1672 |
+
"grad_norm": 4.34658145904541,
|
1673 |
+
"learning_rate": 0.00019661598066274665,
|
1674 |
+
"loss": 1.3976,
|
1675 |
+
"step": 11850
|
1676 |
+
},
|
1677 |
+
{
|
1678 |
+
"epoch": 0.35529812199564087,
|
1679 |
+
"grad_norm": 4.032591819763184,
|
1680 |
+
"learning_rate": 0.0001966016948668278,
|
1681 |
+
"loss": 1.3571,
|
1682 |
+
"step": 11900
|
1683 |
+
},
|
1684 |
+
{
|
1685 |
+
"epoch": 0.3567909712477234,
|
1686 |
+
"grad_norm": 3.9286158084869385,
|
1687 |
+
"learning_rate": 0.00019658740907090898,
|
1688 |
+
"loss": 1.398,
|
1689 |
+
"step": 11950
|
1690 |
+
},
|
1691 |
+
{
|
1692 |
+
"epoch": 0.35828382049980595,
|
1693 |
+
"grad_norm": 5.184597492218018,
|
1694 |
+
"learning_rate": 0.00019657312327499017,
|
1695 |
+
"loss": 1.3827,
|
1696 |
+
"step": 12000
|
1697 |
+
},
|
1698 |
+
{
|
1699 |
+
"epoch": 0.35977666975188843,
|
1700 |
+
"grad_norm": 4.4749226570129395,
|
1701 |
+
"learning_rate": 0.0001965588374790713,
|
1702 |
+
"loss": 1.373,
|
1703 |
+
"step": 12050
|
1704 |
+
},
|
1705 |
+
{
|
1706 |
+
"epoch": 0.361269519003971,
|
1707 |
+
"grad_norm": 3.5633764266967773,
|
1708 |
+
"learning_rate": 0.0001965445516831525,
|
1709 |
+
"loss": 1.3704,
|
1710 |
+
"step": 12100
|
1711 |
+
},
|
1712 |
+
{
|
1713 |
+
"epoch": 0.3627623682560535,
|
1714 |
+
"grad_norm": 7.570897102355957,
|
1715 |
+
"learning_rate": 0.00019653026588723363,
|
1716 |
+
"loss": 1.3515,
|
1717 |
+
"step": 12150
|
1718 |
+
},
|
1719 |
+
{
|
1720 |
+
"epoch": 0.36425521750813605,
|
1721 |
+
"grad_norm": 4.239411354064941,
|
1722 |
+
"learning_rate": 0.00019651598009131483,
|
1723 |
+
"loss": 1.3813,
|
1724 |
+
"step": 12200
|
1725 |
+
},
|
1726 |
+
{
|
1727 |
+
"epoch": 0.36574806676021854,
|
1728 |
+
"grad_norm": 3.8941049575805664,
|
1729 |
+
"learning_rate": 0.00019650169429539596,
|
1730 |
+
"loss": 1.3954,
|
1731 |
+
"step": 12250
|
1732 |
+
},
|
1733 |
+
{
|
1734 |
+
"epoch": 0.3672409160123011,
|
1735 |
+
"grad_norm": 4.8694586753845215,
|
1736 |
+
"learning_rate": 0.00019648740849947716,
|
1737 |
+
"loss": 1.3531,
|
1738 |
+
"step": 12300
|
1739 |
+
},
|
1740 |
+
{
|
1741 |
+
"epoch": 0.3687337652643836,
|
1742 |
+
"grad_norm": 3.914964437484741,
|
1743 |
+
"learning_rate": 0.00019647312270355832,
|
1744 |
+
"loss": 1.3954,
|
1745 |
+
"step": 12350
|
1746 |
+
},
|
1747 |
+
{
|
1748 |
+
"epoch": 0.3702266145164661,
|
1749 |
+
"grad_norm": 3.4050538539886475,
|
1750 |
+
"learning_rate": 0.00019645883690763948,
|
1751 |
+
"loss": 1.3498,
|
1752 |
+
"step": 12400
|
1753 |
+
},
|
1754 |
+
{
|
1755 |
+
"epoch": 0.37171946376854864,
|
1756 |
+
"grad_norm": 4.436797618865967,
|
1757 |
+
"learning_rate": 0.00019644455111172065,
|
1758 |
+
"loss": 1.3889,
|
1759 |
+
"step": 12450
|
1760 |
+
},
|
1761 |
+
{
|
1762 |
+
"epoch": 0.3732123130206312,
|
1763 |
+
"grad_norm": 2.7660670280456543,
|
1764 |
+
"learning_rate": 0.00019643026531580181,
|
1765 |
+
"loss": 1.3392,
|
1766 |
+
"step": 12500
|
1767 |
+
},
|
1768 |
+
{
|
1769 |
+
"epoch": 0.3747051622727137,
|
1770 |
+
"grad_norm": 5.364072799682617,
|
1771 |
+
"learning_rate": 0.00019641597951988298,
|
1772 |
+
"loss": 1.3157,
|
1773 |
+
"step": 12550
|
1774 |
+
},
|
1775 |
+
{
|
1776 |
+
"epoch": 0.3761980115247962,
|
1777 |
+
"grad_norm": 5.123339653015137,
|
1778 |
+
"learning_rate": 0.00019640169372396414,
|
1779 |
+
"loss": 1.3523,
|
1780 |
+
"step": 12600
|
1781 |
+
},
|
1782 |
+
{
|
1783 |
+
"epoch": 0.37769086077687875,
|
1784 |
+
"grad_norm": 3.4495439529418945,
|
1785 |
+
"learning_rate": 0.0001963874079280453,
|
1786 |
+
"loss": 1.3331,
|
1787 |
+
"step": 12650
|
1788 |
+
},
|
1789 |
+
{
|
1790 |
+
"epoch": 0.3791837100289613,
|
1791 |
+
"grad_norm": 4.613680362701416,
|
1792 |
+
"learning_rate": 0.00019637312213212647,
|
1793 |
+
"loss": 1.3707,
|
1794 |
+
"step": 12700
|
1795 |
+
},
|
1796 |
+
{
|
1797 |
+
"epoch": 0.3806765592810438,
|
1798 |
+
"grad_norm": 3.5819404125213623,
|
1799 |
+
"learning_rate": 0.00019635883633620764,
|
1800 |
+
"loss": 1.4023,
|
1801 |
+
"step": 12750
|
1802 |
+
},
|
1803 |
+
{
|
1804 |
+
"epoch": 0.3821694085331263,
|
1805 |
+
"grad_norm": 3.5075576305389404,
|
1806 |
+
"learning_rate": 0.00019634455054028883,
|
1807 |
+
"loss": 1.3815,
|
1808 |
+
"step": 12800
|
1809 |
+
},
|
1810 |
+
{
|
1811 |
+
"epoch": 0.38366225778520885,
|
1812 |
+
"grad_norm": 4.425256729125977,
|
1813 |
+
"learning_rate": 0.00019633026474436997,
|
1814 |
+
"loss": 1.3801,
|
1815 |
+
"step": 12850
|
1816 |
+
},
|
1817 |
+
{
|
1818 |
+
"epoch": 0.3851551070372914,
|
1819 |
+
"grad_norm": 3.5711112022399902,
|
1820 |
+
"learning_rate": 0.00019631597894845116,
|
1821 |
+
"loss": 1.3375,
|
1822 |
+
"step": 12900
|
1823 |
+
},
|
1824 |
+
{
|
1825 |
+
"epoch": 0.3866479562893739,
|
1826 |
+
"grad_norm": 5.728016376495361,
|
1827 |
+
"learning_rate": 0.0001963016931525323,
|
1828 |
+
"loss": 1.3914,
|
1829 |
+
"step": 12950
|
1830 |
+
},
|
1831 |
+
{
|
1832 |
+
"epoch": 0.3881408055414564,
|
1833 |
+
"grad_norm": 4.294504642486572,
|
1834 |
+
"learning_rate": 0.0001962874073566135,
|
1835 |
+
"loss": 1.4561,
|
1836 |
+
"step": 13000
|
1837 |
+
},
|
1838 |
+
{
|
1839 |
+
"epoch": 0.38963365479353895,
|
1840 |
+
"grad_norm": 5.329941749572754,
|
1841 |
+
"learning_rate": 0.00019627312156069465,
|
1842 |
+
"loss": 1.4018,
|
1843 |
+
"step": 13050
|
1844 |
+
},
|
1845 |
+
{
|
1846 |
+
"epoch": 0.3911265040456215,
|
1847 |
+
"grad_norm": 4.166362762451172,
|
1848 |
+
"learning_rate": 0.00019625883576477582,
|
1849 |
+
"loss": 1.4334,
|
1850 |
+
"step": 13100
|
1851 |
+
},
|
1852 |
+
{
|
1853 |
+
"epoch": 0.392619353297704,
|
1854 |
+
"grad_norm": 3.3922691345214844,
|
1855 |
+
"learning_rate": 0.00019624454996885698,
|
1856 |
+
"loss": 1.3956,
|
1857 |
+
"step": 13150
|
1858 |
+
},
|
1859 |
+
{
|
1860 |
+
"epoch": 0.3941122025497865,
|
1861 |
+
"grad_norm": 4.340898036956787,
|
1862 |
+
"learning_rate": 0.00019623026417293815,
|
1863 |
+
"loss": 1.3643,
|
1864 |
+
"step": 13200
|
1865 |
+
},
|
1866 |
+
{
|
1867 |
+
"epoch": 0.39560505180186906,
|
1868 |
+
"grad_norm": 6.623823165893555,
|
1869 |
+
"learning_rate": 0.0001962159783770193,
|
1870 |
+
"loss": 1.362,
|
1871 |
+
"step": 13250
|
1872 |
+
},
|
1873 |
+
{
|
1874 |
+
"epoch": 0.3970979010539516,
|
1875 |
+
"grad_norm": 4.490639686584473,
|
1876 |
+
"learning_rate": 0.00019620169258110048,
|
1877 |
+
"loss": 1.3742,
|
1878 |
+
"step": 13300
|
1879 |
+
},
|
1880 |
+
{
|
1881 |
+
"epoch": 0.3985907503060341,
|
1882 |
+
"grad_norm": 4.179808139801025,
|
1883 |
+
"learning_rate": 0.00019618740678518164,
|
1884 |
+
"loss": 1.4191,
|
1885 |
+
"step": 13350
|
1886 |
+
},
|
1887 |
+
{
|
1888 |
+
"epoch": 0.4000835995581166,
|
1889 |
+
"grad_norm": 5.623187065124512,
|
1890 |
+
"learning_rate": 0.0001961731209892628,
|
1891 |
+
"loss": 1.3793,
|
1892 |
+
"step": 13400
|
1893 |
+
},
|
1894 |
+
{
|
1895 |
+
"epoch": 0.40157644881019916,
|
1896 |
+
"grad_norm": 3.9650678634643555,
|
1897 |
+
"learning_rate": 0.00019615883519334397,
|
1898 |
+
"loss": 1.3917,
|
1899 |
+
"step": 13450
|
1900 |
+
},
|
1901 |
+
{
|
1902 |
+
"epoch": 0.40306929806228164,
|
1903 |
+
"grad_norm": 5.047702312469482,
|
1904 |
+
"learning_rate": 0.00019614454939742516,
|
1905 |
+
"loss": 1.4501,
|
1906 |
+
"step": 13500
|
1907 |
+
},
|
1908 |
+
{
|
1909 |
+
"epoch": 0.4045621473143642,
|
1910 |
+
"grad_norm": 3.46647310256958,
|
1911 |
+
"learning_rate": 0.0001961302636015063,
|
1912 |
+
"loss": 1.4091,
|
1913 |
+
"step": 13550
|
1914 |
+
},
|
1915 |
+
{
|
1916 |
+
"epoch": 0.4060549965664467,
|
1917 |
+
"grad_norm": 3.2203481197357178,
|
1918 |
+
"learning_rate": 0.0001961159778055875,
|
1919 |
+
"loss": 1.4003,
|
1920 |
+
"step": 13600
|
1921 |
+
},
|
1922 |
+
{
|
1923 |
+
"epoch": 0.40754784581852926,
|
1924 |
+
"grad_norm": 3.727679967880249,
|
1925 |
+
"learning_rate": 0.00019610169200966863,
|
1926 |
+
"loss": 1.4004,
|
1927 |
+
"step": 13650
|
1928 |
+
},
|
1929 |
+
{
|
1930 |
+
"epoch": 0.40904069507061175,
|
1931 |
+
"grad_norm": 4.469257831573486,
|
1932 |
+
"learning_rate": 0.0001960874062137498,
|
1933 |
+
"loss": 1.4061,
|
1934 |
+
"step": 13700
|
1935 |
+
},
|
1936 |
+
{
|
1937 |
+
"epoch": 0.4105335443226943,
|
1938 |
+
"grad_norm": 4.041538715362549,
|
1939 |
+
"learning_rate": 0.00019607312041783096,
|
1940 |
+
"loss": 1.411,
|
1941 |
+
"step": 13750
|
1942 |
+
},
|
1943 |
+
{
|
1944 |
+
"epoch": 0.41202639357477683,
|
1945 |
+
"grad_norm": 5.2691779136657715,
|
1946 |
+
"learning_rate": 0.00019605883462191212,
|
1947 |
+
"loss": 1.3396,
|
1948 |
+
"step": 13800
|
1949 |
+
},
|
1950 |
+
{
|
1951 |
+
"epoch": 0.41351924282685937,
|
1952 |
+
"grad_norm": 6.236726760864258,
|
1953 |
+
"learning_rate": 0.00019604454882599331,
|
1954 |
+
"loss": 1.4295,
|
1955 |
+
"step": 13850
|
1956 |
+
},
|
1957 |
+
{
|
1958 |
+
"epoch": 0.41501209207894185,
|
1959 |
+
"grad_norm": 6.703745365142822,
|
1960 |
+
"learning_rate": 0.00019603026303007445,
|
1961 |
+
"loss": 1.4105,
|
1962 |
+
"step": 13900
|
1963 |
+
},
|
1964 |
+
{
|
1965 |
+
"epoch": 0.4165049413310244,
|
1966 |
+
"grad_norm": 3.997664451599121,
|
1967 |
+
"learning_rate": 0.00019601597723415564,
|
1968 |
+
"loss": 1.347,
|
1969 |
+
"step": 13950
|
1970 |
+
},
|
1971 |
+
{
|
1972 |
+
"epoch": 0.41799779058310693,
|
1973 |
+
"grad_norm": 5.311407566070557,
|
1974 |
+
"learning_rate": 0.00019600169143823678,
|
1975 |
+
"loss": 1.4546,
|
1976 |
+
"step": 14000
|
1977 |
+
},
|
1978 |
+
{
|
1979 |
+
"epoch": 0.4194906398351894,
|
1980 |
+
"grad_norm": 4.0283098220825195,
|
1981 |
+
"learning_rate": 0.00019598740564231797,
|
1982 |
+
"loss": 1.4513,
|
1983 |
+
"step": 14050
|
1984 |
+
},
|
1985 |
+
{
|
1986 |
+
"epoch": 0.42098348908727196,
|
1987 |
+
"grad_norm": 7.345764636993408,
|
1988 |
+
"learning_rate": 0.0001959731198463991,
|
1989 |
+
"loss": 1.3832,
|
1990 |
+
"step": 14100
|
1991 |
+
},
|
1992 |
+
{
|
1993 |
+
"epoch": 0.4224763383393545,
|
1994 |
+
"grad_norm": 4.324542045593262,
|
1995 |
+
"learning_rate": 0.0001959588340504803,
|
1996 |
+
"loss": 1.3751,
|
1997 |
+
"step": 14150
|
1998 |
+
},
|
1999 |
+
{
|
2000 |
+
"epoch": 0.42396918759143704,
|
2001 |
+
"grad_norm": 3.8322675228118896,
|
2002 |
+
"learning_rate": 0.00019594454825456147,
|
2003 |
+
"loss": 1.344,
|
2004 |
+
"step": 14200
|
2005 |
+
},
|
2006 |
+
{
|
2007 |
+
"epoch": 0.4254620368435195,
|
2008 |
+
"grad_norm": 4.62548303604126,
|
2009 |
+
"learning_rate": 0.00019593026245864263,
|
2010 |
+
"loss": 1.4346,
|
2011 |
+
"step": 14250
|
2012 |
+
},
|
2013 |
+
{
|
2014 |
+
"epoch": 0.42695488609560206,
|
2015 |
+
"grad_norm": 4.585489273071289,
|
2016 |
+
"learning_rate": 0.0001959159766627238,
|
2017 |
+
"loss": 1.4145,
|
2018 |
+
"step": 14300
|
2019 |
+
},
|
2020 |
+
{
|
2021 |
+
"epoch": 0.4284477353476846,
|
2022 |
+
"grad_norm": 3.64227557182312,
|
2023 |
+
"learning_rate": 0.00019590169086680496,
|
2024 |
+
"loss": 1.367,
|
2025 |
+
"step": 14350
|
2026 |
+
},
|
2027 |
+
{
|
2028 |
+
"epoch": 0.42994058459976714,
|
2029 |
+
"grad_norm": 4.730580806732178,
|
2030 |
+
"learning_rate": 0.00019588740507088613,
|
2031 |
+
"loss": 1.3653,
|
2032 |
+
"step": 14400
|
2033 |
+
},
|
2034 |
+
{
|
2035 |
+
"epoch": 0.4314334338518496,
|
2036 |
+
"grad_norm": 4.4075398445129395,
|
2037 |
+
"learning_rate": 0.0001958731192749673,
|
2038 |
+
"loss": 1.4081,
|
2039 |
+
"step": 14450
|
2040 |
+
},
|
2041 |
+
{
|
2042 |
+
"epoch": 0.43292628310393216,
|
2043 |
+
"grad_norm": 3.0498785972595215,
|
2044 |
+
"learning_rate": 0.00019585883347904845,
|
2045 |
+
"loss": 1.343,
|
2046 |
+
"step": 14500
|
2047 |
+
},
|
2048 |
+
{
|
2049 |
+
"epoch": 0.4344191323560147,
|
2050 |
+
"grad_norm": 4.179199695587158,
|
2051 |
+
"learning_rate": 0.00019584454768312962,
|
2052 |
+
"loss": 1.3662,
|
2053 |
+
"step": 14550
|
2054 |
+
},
|
2055 |
+
{
|
2056 |
+
"epoch": 0.4359119816080972,
|
2057 |
+
"grad_norm": 4.1148786544799805,
|
2058 |
+
"learning_rate": 0.00019583026188721078,
|
2059 |
+
"loss": 1.3785,
|
2060 |
+
"step": 14600
|
2061 |
+
},
|
2062 |
+
{
|
2063 |
+
"epoch": 0.4374048308601797,
|
2064 |
+
"grad_norm": 4.012060165405273,
|
2065 |
+
"learning_rate": 0.00019581597609129198,
|
2066 |
+
"loss": 1.4056,
|
2067 |
+
"step": 14650
|
2068 |
+
},
|
2069 |
+
{
|
2070 |
+
"epoch": 0.43889768011226227,
|
2071 |
+
"grad_norm": 7.186342716217041,
|
2072 |
+
"learning_rate": 0.0001958016902953731,
|
2073 |
+
"loss": 1.3943,
|
2074 |
+
"step": 14700
|
2075 |
+
},
|
2076 |
+
{
|
2077 |
+
"epoch": 0.4403905293643448,
|
2078 |
+
"grad_norm": 3.951267957687378,
|
2079 |
+
"learning_rate": 0.0001957874044994543,
|
2080 |
+
"loss": 1.3976,
|
2081 |
+
"step": 14750
|
2082 |
+
},
|
2083 |
+
{
|
2084 |
+
"epoch": 0.4418833786164273,
|
2085 |
+
"grad_norm": 5.276801586151123,
|
2086 |
+
"learning_rate": 0.00019577311870353544,
|
2087 |
+
"loss": 1.4137,
|
2088 |
+
"step": 14800
|
2089 |
+
},
|
2090 |
+
{
|
2091 |
+
"epoch": 0.44337622786850983,
|
2092 |
+
"grad_norm": 4.107429504394531,
|
2093 |
+
"learning_rate": 0.00019575883290761663,
|
2094 |
+
"loss": 1.388,
|
2095 |
+
"step": 14850
|
2096 |
+
},
|
2097 |
+
{
|
2098 |
+
"epoch": 0.44486907712059237,
|
2099 |
+
"grad_norm": 4.274941444396973,
|
2100 |
+
"learning_rate": 0.00019574454711169777,
|
2101 |
+
"loss": 1.3446,
|
2102 |
+
"step": 14900
|
2103 |
+
},
|
2104 |
+
{
|
2105 |
+
"epoch": 0.4463619263726749,
|
2106 |
+
"grad_norm": 4.174200534820557,
|
2107 |
+
"learning_rate": 0.00019573026131577896,
|
2108 |
+
"loss": 1.3703,
|
2109 |
+
"step": 14950
|
2110 |
+
},
|
2111 |
+
{
|
2112 |
+
"epoch": 0.4478547756247574,
|
2113 |
+
"grad_norm": 3.867125988006592,
|
2114 |
+
"learning_rate": 0.00019571597551986013,
|
2115 |
+
"loss": 1.3745,
|
2116 |
+
"step": 15000
|
2117 |
+
},
|
2118 |
+
{
|
2119 |
+
"epoch": 0.44934762487683994,
|
2120 |
+
"grad_norm": 6.454402923583984,
|
2121 |
+
"learning_rate": 0.0001957016897239413,
|
2122 |
+
"loss": 1.3081,
|
2123 |
+
"step": 15050
|
2124 |
+
},
|
2125 |
+
{
|
2126 |
+
"epoch": 0.4508404741289225,
|
2127 |
+
"grad_norm": 3.8726885318756104,
|
2128 |
+
"learning_rate": 0.00019568740392802246,
|
2129 |
+
"loss": 1.3527,
|
2130 |
+
"step": 15100
|
2131 |
+
},
|
2132 |
+
{
|
2133 |
+
"epoch": 0.45233332338100496,
|
2134 |
+
"grad_norm": 3.6218361854553223,
|
2135 |
+
"learning_rate": 0.00019567311813210362,
|
2136 |
+
"loss": 1.4549,
|
2137 |
+
"step": 15150
|
2138 |
+
},
|
2139 |
+
{
|
2140 |
+
"epoch": 0.4538261726330875,
|
2141 |
+
"grad_norm": 5.2475361824035645,
|
2142 |
+
"learning_rate": 0.0001956588323361848,
|
2143 |
+
"loss": 1.361,
|
2144 |
+
"step": 15200
|
2145 |
+
},
|
2146 |
+
{
|
2147 |
+
"epoch": 0.45531902188517004,
|
2148 |
+
"grad_norm": 4.388748645782471,
|
2149 |
+
"learning_rate": 0.00019564454654026595,
|
2150 |
+
"loss": 1.3164,
|
2151 |
+
"step": 15250
|
2152 |
+
},
|
2153 |
+
{
|
2154 |
+
"epoch": 0.4568118711372526,
|
2155 |
+
"grad_norm": 4.97973108291626,
|
2156 |
+
"learning_rate": 0.00019563026074434712,
|
2157 |
+
"loss": 1.3755,
|
2158 |
+
"step": 15300
|
2159 |
+
},
|
2160 |
+
{
|
2161 |
+
"epoch": 0.45830472038933506,
|
2162 |
+
"grad_norm": 4.538138389587402,
|
2163 |
+
"learning_rate": 0.00019561597494842828,
|
2164 |
+
"loss": 1.4339,
|
2165 |
+
"step": 15350
|
2166 |
+
},
|
2167 |
+
{
|
2168 |
+
"epoch": 0.4597975696414176,
|
2169 |
+
"grad_norm": 4.389719009399414,
|
2170 |
+
"learning_rate": 0.00019560168915250945,
|
2171 |
+
"loss": 1.4163,
|
2172 |
+
"step": 15400
|
2173 |
+
},
|
2174 |
+
{
|
2175 |
+
"epoch": 0.46129041889350014,
|
2176 |
+
"grad_norm": 4.347919464111328,
|
2177 |
+
"learning_rate": 0.00019558740335659064,
|
2178 |
+
"loss": 1.3663,
|
2179 |
+
"step": 15450
|
2180 |
+
},
|
2181 |
+
{
|
2182 |
+
"epoch": 0.4627832681455827,
|
2183 |
+
"grad_norm": 4.820595741271973,
|
2184 |
+
"learning_rate": 0.00019557311756067177,
|
2185 |
+
"loss": 1.3441,
|
2186 |
+
"step": 15500
|
2187 |
+
},
|
2188 |
+
{
|
2189 |
+
"epoch": 0.46427611739766517,
|
2190 |
+
"grad_norm": 2.6965413093566895,
|
2191 |
+
"learning_rate": 0.00019555883176475297,
|
2192 |
+
"loss": 1.3657,
|
2193 |
+
"step": 15550
|
2194 |
+
},
|
2195 |
+
{
|
2196 |
+
"epoch": 0.4657689666497477,
|
2197 |
+
"grad_norm": 4.741116523742676,
|
2198 |
+
"learning_rate": 0.0001955445459688341,
|
2199 |
+
"loss": 1.4414,
|
2200 |
+
"step": 15600
|
2201 |
+
},
|
2202 |
+
{
|
2203 |
+
"epoch": 0.46726181590183025,
|
2204 |
+
"grad_norm": 3.9512829780578613,
|
2205 |
+
"learning_rate": 0.0001955302601729153,
|
2206 |
+
"loss": 1.354,
|
2207 |
+
"step": 15650
|
2208 |
+
},
|
2209 |
+
{
|
2210 |
+
"epoch": 0.46875466515391273,
|
2211 |
+
"grad_norm": 7.704863548278809,
|
2212 |
+
"learning_rate": 0.00019551597437699646,
|
2213 |
+
"loss": 1.4107,
|
2214 |
+
"step": 15700
|
2215 |
+
},
|
2216 |
+
{
|
2217 |
+
"epoch": 0.47024751440599527,
|
2218 |
+
"grad_norm": 3.502988338470459,
|
2219 |
+
"learning_rate": 0.00019550168858107763,
|
2220 |
+
"loss": 1.415,
|
2221 |
+
"step": 15750
|
2222 |
+
},
|
2223 |
+
{
|
2224 |
+
"epoch": 0.4717403636580778,
|
2225 |
+
"grad_norm": 4.246065139770508,
|
2226 |
+
"learning_rate": 0.0001954874027851588,
|
2227 |
+
"loss": 1.3414,
|
2228 |
+
"step": 15800
|
2229 |
+
},
|
2230 |
+
{
|
2231 |
+
"epoch": 0.47323321291016035,
|
2232 |
+
"grad_norm": 3.4039735794067383,
|
2233 |
+
"learning_rate": 0.00019547311698923995,
|
2234 |
+
"loss": 1.3303,
|
2235 |
+
"step": 15850
|
2236 |
+
},
|
2237 |
+
{
|
2238 |
+
"epoch": 0.47472606216224283,
|
2239 |
+
"grad_norm": 3.279521942138672,
|
2240 |
+
"learning_rate": 0.00019545883119332112,
|
2241 |
+
"loss": 1.3447,
|
2242 |
+
"step": 15900
|
2243 |
+
},
|
2244 |
+
{
|
2245 |
+
"epoch": 0.4762189114143254,
|
2246 |
+
"grad_norm": 2.9335134029388428,
|
2247 |
+
"learning_rate": 0.00019544454539740228,
|
2248 |
+
"loss": 1.3998,
|
2249 |
+
"step": 15950
|
2250 |
+
},
|
2251 |
+
{
|
2252 |
+
"epoch": 0.4777117606664079,
|
2253 |
+
"grad_norm": 3.317011833190918,
|
2254 |
+
"learning_rate": 0.00019543025960148345,
|
2255 |
+
"loss": 1.3506,
|
2256 |
+
"step": 16000
|
2257 |
+
},
|
2258 |
+
{
|
2259 |
+
"epoch": 0.47920460991849045,
|
2260 |
+
"grad_norm": 3.5980935096740723,
|
2261 |
+
"learning_rate": 0.0001954159738055646,
|
2262 |
+
"loss": 1.3622,
|
2263 |
+
"step": 16050
|
2264 |
+
},
|
2265 |
+
{
|
2266 |
+
"epoch": 0.48069745917057294,
|
2267 |
+
"grad_norm": 4.726743698120117,
|
2268 |
+
"learning_rate": 0.00019540168800964578,
|
2269 |
+
"loss": 1.3414,
|
2270 |
+
"step": 16100
|
2271 |
+
},
|
2272 |
+
{
|
2273 |
+
"epoch": 0.4821903084226555,
|
2274 |
+
"grad_norm": 5.129758358001709,
|
2275 |
+
"learning_rate": 0.00019538740221372694,
|
2276 |
+
"loss": 1.3873,
|
2277 |
+
"step": 16150
|
2278 |
+
},
|
2279 |
+
{
|
2280 |
+
"epoch": 0.483683157674738,
|
2281 |
+
"grad_norm": 5.122271537780762,
|
2282 |
+
"learning_rate": 0.0001953731164178081,
|
2283 |
+
"loss": 1.428,
|
2284 |
+
"step": 16200
|
2285 |
+
},
|
2286 |
+
{
|
2287 |
+
"epoch": 0.4851760069268205,
|
2288 |
+
"grad_norm": 3.359868049621582,
|
2289 |
+
"learning_rate": 0.0001953588306218893,
|
2290 |
+
"loss": 1.3111,
|
2291 |
+
"step": 16250
|
2292 |
+
},
|
2293 |
+
{
|
2294 |
+
"epoch": 0.48666885617890304,
|
2295 |
+
"grad_norm": 5.066514492034912,
|
2296 |
+
"learning_rate": 0.00019534454482597044,
|
2297 |
+
"loss": 1.2961,
|
2298 |
+
"step": 16300
|
2299 |
+
},
|
2300 |
+
{
|
2301 |
+
"epoch": 0.4881617054309856,
|
2302 |
+
"grad_norm": 4.902595520019531,
|
2303 |
+
"learning_rate": 0.0001953302590300516,
|
2304 |
+
"loss": 1.3682,
|
2305 |
+
"step": 16350
|
2306 |
+
},
|
2307 |
+
{
|
2308 |
+
"epoch": 0.4896545546830681,
|
2309 |
+
"grad_norm": 5.0537028312683105,
|
2310 |
+
"learning_rate": 0.00019531597323413277,
|
2311 |
+
"loss": 1.315,
|
2312 |
+
"step": 16400
|
2313 |
+
},
|
2314 |
+
{
|
2315 |
+
"epoch": 0.4911474039351506,
|
2316 |
+
"grad_norm": 3.7002792358398438,
|
2317 |
+
"learning_rate": 0.00019530168743821393,
|
2318 |
+
"loss": 1.3441,
|
2319 |
+
"step": 16450
|
2320 |
+
},
|
2321 |
+
{
|
2322 |
+
"epoch": 0.49264025318723315,
|
2323 |
+
"grad_norm": 4.845950603485107,
|
2324 |
+
"learning_rate": 0.00019528740164229512,
|
2325 |
+
"loss": 1.3887,
|
2326 |
+
"step": 16500
|
2327 |
+
},
|
2328 |
+
{
|
2329 |
+
"epoch": 0.4941331024393157,
|
2330 |
+
"grad_norm": 4.933434963226318,
|
2331 |
+
"learning_rate": 0.00019527311584637626,
|
2332 |
+
"loss": 1.3865,
|
2333 |
+
"step": 16550
|
2334 |
+
},
|
2335 |
+
{
|
2336 |
+
"epoch": 0.4956259516913982,
|
2337 |
+
"grad_norm": 3.8103625774383545,
|
2338 |
+
"learning_rate": 0.00019525883005045745,
|
2339 |
+
"loss": 1.3757,
|
2340 |
+
"step": 16600
|
2341 |
+
},
|
2342 |
+
{
|
2343 |
+
"epoch": 0.4971188009434807,
|
2344 |
+
"grad_norm": 4.501999855041504,
|
2345 |
+
"learning_rate": 0.0001952445442545386,
|
2346 |
+
"loss": 1.4482,
|
2347 |
+
"step": 16650
|
2348 |
+
},
|
2349 |
+
{
|
2350 |
+
"epoch": 0.49861165019556325,
|
2351 |
+
"grad_norm": 5.600002765655518,
|
2352 |
+
"learning_rate": 0.00019523025845861978,
|
2353 |
+
"loss": 1.4209,
|
2354 |
+
"step": 16700
|
2355 |
+
},
|
2356 |
+
{
|
2357 |
+
"epoch": 0.5001044994476458,
|
2358 |
+
"grad_norm": 5.138682842254639,
|
2359 |
+
"learning_rate": 0.00019521597266270092,
|
2360 |
+
"loss": 1.4287,
|
2361 |
+
"step": 16750
|
2362 |
+
},
|
2363 |
+
{
|
2364 |
+
"epoch": 0.5015973486997283,
|
2365 |
+
"grad_norm": 5.575449466705322,
|
2366 |
+
"learning_rate": 0.0001952016868667821,
|
2367 |
+
"loss": 1.3992,
|
2368 |
+
"step": 16800
|
2369 |
+
},
|
2370 |
+
{
|
2371 |
+
"epoch": 0.5030901979518109,
|
2372 |
+
"grad_norm": 3.6443893909454346,
|
2373 |
+
"learning_rate": 0.00019518740107086327,
|
2374 |
+
"loss": 1.3661,
|
2375 |
+
"step": 16850
|
2376 |
+
},
|
2377 |
+
{
|
2378 |
+
"epoch": 0.5045830472038934,
|
2379 |
+
"grad_norm": 3.904905319213867,
|
2380 |
+
"learning_rate": 0.00019517311527494444,
|
2381 |
+
"loss": 1.4448,
|
2382 |
+
"step": 16900
|
2383 |
+
},
|
2384 |
+
{
|
2385 |
+
"epoch": 0.5060758964559758,
|
2386 |
+
"grad_norm": 4.380904197692871,
|
2387 |
+
"learning_rate": 0.0001951588294790256,
|
2388 |
+
"loss": 1.4076,
|
2389 |
+
"step": 16950
|
2390 |
+
},
|
2391 |
+
{
|
2392 |
+
"epoch": 0.5075687457080584,
|
2393 |
+
"grad_norm": 3.5924415588378906,
|
2394 |
+
"learning_rate": 0.00019514454368310677,
|
2395 |
+
"loss": 1.3833,
|
2396 |
+
"step": 17000
|
2397 |
+
},
|
2398 |
+
{
|
2399 |
+
"epoch": 0.5090615949601409,
|
2400 |
+
"grad_norm": 3.7007193565368652,
|
2401 |
+
"learning_rate": 0.00019513025788718793,
|
2402 |
+
"loss": 1.391,
|
2403 |
+
"step": 17050
|
2404 |
+
},
|
2405 |
+
{
|
2406 |
+
"epoch": 0.5105544442122234,
|
2407 |
+
"grad_norm": 3.9879095554351807,
|
2408 |
+
"learning_rate": 0.0001951159720912691,
|
2409 |
+
"loss": 1.3419,
|
2410 |
+
"step": 17100
|
2411 |
+
},
|
2412 |
+
{
|
2413 |
+
"epoch": 0.512047293464306,
|
2414 |
+
"grad_norm": 5.663998126983643,
|
2415 |
+
"learning_rate": 0.00019510168629535026,
|
2416 |
+
"loss": 1.3481,
|
2417 |
+
"step": 17150
|
2418 |
+
},
|
2419 |
+
{
|
2420 |
+
"epoch": 0.5135401427163885,
|
2421 |
+
"grad_norm": 3.9803707599639893,
|
2422 |
+
"learning_rate": 0.00019508740049943143,
|
2423 |
+
"loss": 1.4034,
|
2424 |
+
"step": 17200
|
2425 |
+
},
|
2426 |
+
{
|
2427 |
+
"epoch": 0.5150329919684711,
|
2428 |
+
"grad_norm": 3.718477725982666,
|
2429 |
+
"learning_rate": 0.0001950731147035126,
|
2430 |
+
"loss": 1.406,
|
2431 |
+
"step": 17250
|
2432 |
+
},
|
2433 |
+
{
|
2434 |
+
"epoch": 0.5165258412205536,
|
2435 |
+
"grad_norm": 4.864751815795898,
|
2436 |
+
"learning_rate": 0.00019505882890759378,
|
2437 |
+
"loss": 1.375,
|
2438 |
+
"step": 17300
|
2439 |
+
},
|
2440 |
+
{
|
2441 |
+
"epoch": 0.518018690472636,
|
2442 |
+
"grad_norm": 3.697645664215088,
|
2443 |
+
"learning_rate": 0.00019504454311167492,
|
2444 |
+
"loss": 1.4283,
|
2445 |
+
"step": 17350
|
2446 |
+
},
|
2447 |
+
{
|
2448 |
+
"epoch": 0.5195115397247186,
|
2449 |
+
"grad_norm": 4.063074111938477,
|
2450 |
+
"learning_rate": 0.0001950302573157561,
|
2451 |
+
"loss": 1.378,
|
2452 |
+
"step": 17400
|
2453 |
+
},
|
2454 |
+
{
|
2455 |
+
"epoch": 0.5210043889768011,
|
2456 |
+
"grad_norm": 4.223004341125488,
|
2457 |
+
"learning_rate": 0.00019501597151983725,
|
2458 |
+
"loss": 1.3789,
|
2459 |
+
"step": 17450
|
2460 |
+
},
|
2461 |
+
{
|
2462 |
+
"epoch": 0.5224972382288836,
|
2463 |
+
"grad_norm": 3.329366683959961,
|
2464 |
+
"learning_rate": 0.00019500168572391844,
|
2465 |
+
"loss": 1.346,
|
2466 |
+
"step": 17500
|
2467 |
+
},
|
2468 |
+
{
|
2469 |
+
"epoch": 0.5239900874809662,
|
2470 |
+
"grad_norm": 4.774710178375244,
|
2471 |
+
"learning_rate": 0.00019498739992799958,
|
2472 |
+
"loss": 1.3895,
|
2473 |
+
"step": 17550
|
2474 |
+
},
|
2475 |
+
{
|
2476 |
+
"epoch": 0.5254829367330487,
|
2477 |
+
"grad_norm": 6.2145490646362305,
|
2478 |
+
"learning_rate": 0.00019497311413208077,
|
2479 |
+
"loss": 1.3715,
|
2480 |
+
"step": 17600
|
2481 |
+
},
|
2482 |
+
{
|
2483 |
+
"epoch": 0.5269757859851312,
|
2484 |
+
"grad_norm": 3.9069626331329346,
|
2485 |
+
"learning_rate": 0.00019495882833616194,
|
2486 |
+
"loss": 1.4572,
|
2487 |
+
"step": 17650
|
2488 |
+
},
|
2489 |
+
{
|
2490 |
+
"epoch": 0.5284686352372138,
|
2491 |
+
"grad_norm": 3.347576141357422,
|
2492 |
+
"learning_rate": 0.0001949445425402431,
|
2493 |
+
"loss": 1.359,
|
2494 |
+
"step": 17700
|
2495 |
+
},
|
2496 |
+
{
|
2497 |
+
"epoch": 0.5299614844892963,
|
2498 |
+
"grad_norm": 5.305202484130859,
|
2499 |
+
"learning_rate": 0.00019493025674432427,
|
2500 |
+
"loss": 1.4038,
|
2501 |
+
"step": 17750
|
2502 |
+
},
|
2503 |
+
{
|
2504 |
+
"epoch": 0.5314543337413788,
|
2505 |
+
"grad_norm": 3.865619659423828,
|
2506 |
+
"learning_rate": 0.00019491597094840543,
|
2507 |
+
"loss": 1.3913,
|
2508 |
+
"step": 17800
|
2509 |
+
},
|
2510 |
+
{
|
2511 |
+
"epoch": 0.5329471829934613,
|
2512 |
+
"grad_norm": 4.791336536407471,
|
2513 |
+
"learning_rate": 0.0001949016851524866,
|
2514 |
+
"loss": 1.3852,
|
2515 |
+
"step": 17850
|
2516 |
+
},
|
2517 |
+
{
|
2518 |
+
"epoch": 0.5344400322455438,
|
2519 |
+
"grad_norm": 3.7827060222625732,
|
2520 |
+
"learning_rate": 0.00019488739935656776,
|
2521 |
+
"loss": 1.3108,
|
2522 |
+
"step": 17900
|
2523 |
+
},
|
2524 |
+
{
|
2525 |
+
"epoch": 0.5359328814976264,
|
2526 |
+
"grad_norm": 4.945117473602295,
|
2527 |
+
"learning_rate": 0.00019487311356064892,
|
2528 |
+
"loss": 1.346,
|
2529 |
+
"step": 17950
|
2530 |
+
},
|
2531 |
+
{
|
2532 |
+
"epoch": 0.5374257307497089,
|
2533 |
+
"grad_norm": 4.561169147491455,
|
2534 |
+
"learning_rate": 0.0001948588277647301,
|
2535 |
+
"loss": 1.3904,
|
2536 |
+
"step": 18000
|
2537 |
+
},
|
2538 |
+
{
|
2539 |
+
"epoch": 0.5389185800017914,
|
2540 |
+
"grad_norm": 4.608798027038574,
|
2541 |
+
"learning_rate": 0.00019484454196881125,
|
2542 |
+
"loss": 1.4133,
|
2543 |
+
"step": 18050
|
2544 |
+
},
|
2545 |
+
{
|
2546 |
+
"epoch": 0.540411429253874,
|
2547 |
+
"grad_norm": 4.303143501281738,
|
2548 |
+
"learning_rate": 0.00019483025617289245,
|
2549 |
+
"loss": 1.3994,
|
2550 |
+
"step": 18100
|
2551 |
+
},
|
2552 |
+
{
|
2553 |
+
"epoch": 0.5419042785059565,
|
2554 |
+
"grad_norm": 5.815835952758789,
|
2555 |
+
"learning_rate": 0.00019481597037697358,
|
2556 |
+
"loss": 1.392,
|
2557 |
+
"step": 18150
|
2558 |
+
},
|
2559 |
+
{
|
2560 |
+
"epoch": 0.543397127758039,
|
2561 |
+
"grad_norm": 5.349491596221924,
|
2562 |
+
"learning_rate": 0.00019480168458105477,
|
2563 |
+
"loss": 1.418,
|
2564 |
+
"step": 18200
|
2565 |
+
},
|
2566 |
+
{
|
2567 |
+
"epoch": 0.5448899770101215,
|
2568 |
+
"grad_norm": 3.7407824993133545,
|
2569 |
+
"learning_rate": 0.0001947873987851359,
|
2570 |
+
"loss": 1.3747,
|
2571 |
+
"step": 18250
|
2572 |
+
},
|
2573 |
+
{
|
2574 |
+
"epoch": 0.546382826262204,
|
2575 |
+
"grad_norm": 5.2810163497924805,
|
2576 |
+
"learning_rate": 0.0001947731129892171,
|
2577 |
+
"loss": 1.4023,
|
2578 |
+
"step": 18300
|
2579 |
+
},
|
2580 |
+
{
|
2581 |
+
"epoch": 0.5478756755142866,
|
2582 |
+
"grad_norm": 4.417948246002197,
|
2583 |
+
"learning_rate": 0.00019475882719329824,
|
2584 |
+
"loss": 1.4005,
|
2585 |
+
"step": 18350
|
2586 |
+
},
|
2587 |
+
{
|
2588 |
+
"epoch": 0.5493685247663691,
|
2589 |
+
"grad_norm": 5.287749290466309,
|
2590 |
+
"learning_rate": 0.00019474454139737943,
|
2591 |
+
"loss": 1.4211,
|
2592 |
+
"step": 18400
|
2593 |
+
},
|
2594 |
+
{
|
2595 |
+
"epoch": 0.5508613740184516,
|
2596 |
+
"grad_norm": 4.0996809005737305,
|
2597 |
+
"learning_rate": 0.0001947302556014606,
|
2598 |
+
"loss": 1.3559,
|
2599 |
+
"step": 18450
|
2600 |
+
},
|
2601 |
+
{
|
2602 |
+
"epoch": 0.5523542232705342,
|
2603 |
+
"grad_norm": 5.229327201843262,
|
2604 |
+
"learning_rate": 0.00019471596980554176,
|
2605 |
+
"loss": 1.4549,
|
2606 |
+
"step": 18500
|
2607 |
+
},
|
2608 |
+
{
|
2609 |
+
"epoch": 0.5538470725226167,
|
2610 |
+
"grad_norm": 4.409546852111816,
|
2611 |
+
"learning_rate": 0.00019470168400962293,
|
2612 |
+
"loss": 1.3683,
|
2613 |
+
"step": 18550
|
2614 |
+
},
|
2615 |
+
{
|
2616 |
+
"epoch": 0.5553399217746992,
|
2617 |
+
"grad_norm": 5.4077229499816895,
|
2618 |
+
"learning_rate": 0.0001946873982137041,
|
2619 |
+
"loss": 1.398,
|
2620 |
+
"step": 18600
|
2621 |
+
},
|
2622 |
+
{
|
2623 |
+
"epoch": 0.5568327710267817,
|
2624 |
+
"grad_norm": 5.208966255187988,
|
2625 |
+
"learning_rate": 0.00019467311241778526,
|
2626 |
+
"loss": 1.3276,
|
2627 |
+
"step": 18650
|
2628 |
+
},
|
2629 |
+
{
|
2630 |
+
"epoch": 0.5583256202788642,
|
2631 |
+
"grad_norm": 4.8162617683410645,
|
2632 |
+
"learning_rate": 0.00019465882662186642,
|
2633 |
+
"loss": 1.3314,
|
2634 |
+
"step": 18700
|
2635 |
+
},
|
2636 |
+
{
|
2637 |
+
"epoch": 0.5598184695309467,
|
2638 |
+
"grad_norm": 5.04697322845459,
|
2639 |
+
"learning_rate": 0.00019464454082594759,
|
2640 |
+
"loss": 1.3883,
|
2641 |
+
"step": 18750
|
2642 |
+
},
|
2643 |
+
{
|
2644 |
+
"epoch": 0.5613113187830293,
|
2645 |
+
"grad_norm": 4.038108825683594,
|
2646 |
+
"learning_rate": 0.00019463025503002875,
|
2647 |
+
"loss": 1.4377,
|
2648 |
+
"step": 18800
|
2649 |
+
},
|
2650 |
+
{
|
2651 |
+
"epoch": 0.5628041680351118,
|
2652 |
+
"grad_norm": 4.910576820373535,
|
2653 |
+
"learning_rate": 0.00019461596923410992,
|
2654 |
+
"loss": 1.3713,
|
2655 |
+
"step": 18850
|
2656 |
+
},
|
2657 |
+
{
|
2658 |
+
"epoch": 0.5642970172871944,
|
2659 |
+
"grad_norm": 5.3433756828308105,
|
2660 |
+
"learning_rate": 0.00019460168343819108,
|
2661 |
+
"loss": 1.4359,
|
2662 |
+
"step": 18900
|
2663 |
+
},
|
2664 |
+
{
|
2665 |
+
"epoch": 0.5657898665392769,
|
2666 |
+
"grad_norm": 3.9515552520751953,
|
2667 |
+
"learning_rate": 0.00019458739764227224,
|
2668 |
+
"loss": 1.3608,
|
2669 |
+
"step": 18950
|
2670 |
+
},
|
2671 |
+
{
|
2672 |
+
"epoch": 0.5672827157913594,
|
2673 |
+
"grad_norm": 4.515705108642578,
|
2674 |
+
"learning_rate": 0.0001945731118463534,
|
2675 |
+
"loss": 1.3274,
|
2676 |
+
"step": 19000
|
2677 |
+
},
|
2678 |
+
{
|
2679 |
+
"epoch": 0.568775565043442,
|
2680 |
+
"grad_norm": 4.434077262878418,
|
2681 |
+
"learning_rate": 0.00019455882605043457,
|
2682 |
+
"loss": 1.3681,
|
2683 |
+
"step": 19050
|
2684 |
+
},
|
2685 |
+
{
|
2686 |
+
"epoch": 0.5702684142955244,
|
2687 |
+
"grad_norm": 4.534008979797363,
|
2688 |
+
"learning_rate": 0.00019454454025451574,
|
2689 |
+
"loss": 1.3863,
|
2690 |
+
"step": 19100
|
2691 |
+
},
|
2692 |
+
{
|
2693 |
+
"epoch": 0.5717612635476069,
|
2694 |
+
"grad_norm": 4.200322151184082,
|
2695 |
+
"learning_rate": 0.00019453025445859693,
|
2696 |
+
"loss": 1.3743,
|
2697 |
+
"step": 19150
|
2698 |
+
},
|
2699 |
+
{
|
2700 |
+
"epoch": 0.5732541127996895,
|
2701 |
+
"grad_norm": 5.686845779418945,
|
2702 |
+
"learning_rate": 0.00019451596866267807,
|
2703 |
+
"loss": 1.3225,
|
2704 |
+
"step": 19200
|
2705 |
+
},
|
2706 |
+
{
|
2707 |
+
"epoch": 0.574746962051772,
|
2708 |
+
"grad_norm": 7.821211814880371,
|
2709 |
+
"learning_rate": 0.00019450168286675926,
|
2710 |
+
"loss": 1.3775,
|
2711 |
+
"step": 19250
|
2712 |
+
},
|
2713 |
+
{
|
2714 |
+
"epoch": 0.5762398113038545,
|
2715 |
+
"grad_norm": 5.200834274291992,
|
2716 |
+
"learning_rate": 0.0001944873970708404,
|
2717 |
+
"loss": 1.3423,
|
2718 |
+
"step": 19300
|
2719 |
+
},
|
2720 |
+
{
|
2721 |
+
"epoch": 0.5777326605559371,
|
2722 |
+
"grad_norm": 5.26302433013916,
|
2723 |
+
"learning_rate": 0.0001944731112749216,
|
2724 |
+
"loss": 1.3813,
|
2725 |
+
"step": 19350
|
2726 |
+
},
|
2727 |
+
{
|
2728 |
+
"epoch": 0.5792255098080196,
|
2729 |
+
"grad_norm": 3.3207719326019287,
|
2730 |
+
"learning_rate": 0.00019445882547900273,
|
2731 |
+
"loss": 1.3922,
|
2732 |
+
"step": 19400
|
2733 |
+
},
|
2734 |
+
{
|
2735 |
+
"epoch": 0.5807183590601022,
|
2736 |
+
"grad_norm": 4.619020938873291,
|
2737 |
+
"learning_rate": 0.00019444453968308392,
|
2738 |
+
"loss": 1.3533,
|
2739 |
+
"step": 19450
|
2740 |
+
},
|
2741 |
+
{
|
2742 |
+
"epoch": 0.5822112083121846,
|
2743 |
+
"grad_norm": 5.780002593994141,
|
2744 |
+
"learning_rate": 0.00019443025388716508,
|
2745 |
+
"loss": 1.4035,
|
2746 |
+
"step": 19500
|
2747 |
+
},
|
2748 |
+
{
|
2749 |
+
"epoch": 0.5837040575642671,
|
2750 |
+
"grad_norm": 4.961215496063232,
|
2751 |
+
"learning_rate": 0.00019441596809124625,
|
2752 |
+
"loss": 1.3687,
|
2753 |
+
"step": 19550
|
2754 |
+
},
|
2755 |
+
{
|
2756 |
+
"epoch": 0.5851969068163497,
|
2757 |
+
"grad_norm": 4.50115442276001,
|
2758 |
+
"learning_rate": 0.0001944016822953274,
|
2759 |
+
"loss": 1.342,
|
2760 |
+
"step": 19600
|
2761 |
+
},
|
2762 |
+
{
|
2763 |
+
"epoch": 0.5866897560684322,
|
2764 |
+
"grad_norm": 3.9477944374084473,
|
2765 |
+
"learning_rate": 0.00019438739649940858,
|
2766 |
+
"loss": 1.394,
|
2767 |
+
"step": 19650
|
2768 |
+
},
|
2769 |
+
{
|
2770 |
+
"epoch": 0.5881826053205147,
|
2771 |
+
"grad_norm": 3.7466814517974854,
|
2772 |
+
"learning_rate": 0.00019437311070348974,
|
2773 |
+
"loss": 1.3414,
|
2774 |
+
"step": 19700
|
2775 |
+
},
|
2776 |
+
{
|
2777 |
+
"epoch": 0.5896754545725973,
|
2778 |
+
"grad_norm": 4.382058143615723,
|
2779 |
+
"learning_rate": 0.0001943588249075709,
|
2780 |
+
"loss": 1.3669,
|
2781 |
+
"step": 19750
|
2782 |
+
},
|
2783 |
+
{
|
2784 |
+
"epoch": 0.5911683038246798,
|
2785 |
+
"grad_norm": 3.7016665935516357,
|
2786 |
+
"learning_rate": 0.00019434453911165207,
|
2787 |
+
"loss": 1.4548,
|
2788 |
+
"step": 19800
|
2789 |
+
},
|
2790 |
+
{
|
2791 |
+
"epoch": 0.5926611530767623,
|
2792 |
+
"grad_norm": 4.4738030433654785,
|
2793 |
+
"learning_rate": 0.00019433025331573324,
|
2794 |
+
"loss": 1.4273,
|
2795 |
+
"step": 19850
|
2796 |
+
},
|
2797 |
+
{
|
2798 |
+
"epoch": 0.5941540023288449,
|
2799 |
+
"grad_norm": 5.2445454597473145,
|
2800 |
+
"learning_rate": 0.0001943159675198144,
|
2801 |
+
"loss": 1.3746,
|
2802 |
+
"step": 19900
|
2803 |
+
},
|
2804 |
+
{
|
2805 |
+
"epoch": 0.5956468515809273,
|
2806 |
+
"grad_norm": 3.766219139099121,
|
2807 |
+
"learning_rate": 0.0001943016817238956,
|
2808 |
+
"loss": 1.4391,
|
2809 |
+
"step": 19950
|
2810 |
+
},
|
2811 |
+
{
|
2812 |
+
"epoch": 0.5971397008330099,
|
2813 |
+
"grad_norm": 6.310808181762695,
|
2814 |
+
"learning_rate": 0.00019428739592797673,
|
2815 |
+
"loss": 1.3316,
|
2816 |
+
"step": 20000
|
2817 |
}
|
2818 |
],
|
2819 |
"logging_steps": 50,
|
|
|
2833 |
"attributes": {}
|
2834 |
}
|
2835 |
},
|
2836 |
+
"total_flos": 5.04693160210858e+17,
|
2837 |
"train_batch_size": 2,
|
2838 |
"trial_name": null,
|
2839 |
"trial_params": null
|