Model save
Browse files- README.md +68 -0
- all_results.json +8 -0
- generation_config.json +9 -0
- train_results.json +8 -0
- trainer_state.json +2743 -0
README.md
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
base_model: deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B
|
| 3 |
+
library_name: transformers
|
| 4 |
+
model_name: Gaussian_0.2_0.2
|
| 5 |
+
tags:
|
| 6 |
+
- generated_from_trainer
|
| 7 |
+
- trl
|
| 8 |
+
- grpo
|
| 9 |
+
licence: license
|
| 10 |
+
---
|
| 11 |
+
|
| 12 |
+
# Model Card for Gaussian_0.2_0.2
|
| 13 |
+
|
| 14 |
+
This model is a fine-tuned version of [deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B](https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B).
|
| 15 |
+
It has been trained using [TRL](https://github.com/huggingface/trl).
|
| 16 |
+
|
| 17 |
+
## Quick start
|
| 18 |
+
|
| 19 |
+
```python
|
| 20 |
+
from transformers import pipeline
|
| 21 |
+
|
| 22 |
+
question = "If you had a time machine, but could only go to the past or the future once and never return, which would you choose and why?"
|
| 23 |
+
generator = pipeline("text-generation", model="LLucass/Gaussian_0.2_0.2", device="cuda")
|
| 24 |
+
output = generator([{"role": "user", "content": question}], max_new_tokens=128, return_full_text=False)[0]
|
| 25 |
+
print(output["generated_text"])
|
| 26 |
+
```
|
| 27 |
+
|
| 28 |
+
## Training procedure
|
| 29 |
+
|
| 30 |
+
[<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/lavatorywang-nus/Gaussian/runs/2dg4rntu)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
This model was trained with GRPO, a method introduced in [DeepSeekMath: Pushing the Limits of Mathematical Reasoning in Open Language Models](https://huggingface.co/papers/2402.03300).
|
| 34 |
+
|
| 35 |
+
### Framework versions
|
| 36 |
+
|
| 37 |
+
- TRL: 0.18.0
|
| 38 |
+
- Transformers: 4.50.0
|
| 39 |
+
- Pytorch: 2.5.1
|
| 40 |
+
- Datasets: 3.6.0
|
| 41 |
+
- Tokenizers: 0.21.1
|
| 42 |
+
|
| 43 |
+
## Citations
|
| 44 |
+
|
| 45 |
+
Cite GRPO as:
|
| 46 |
+
|
| 47 |
+
```bibtex
|
| 48 |
+
@article{zhihong2024deepseekmath,
|
| 49 |
+
title = {{DeepSeekMath: Pushing the Limits of Mathematical Reasoning in Open Language Models}},
|
| 50 |
+
author = {Zhihong Shao and Peiyi Wang and Qihao Zhu and Runxin Xu and Junxiao Song and Mingchuan Zhang and Y. K. Li and Y. Wu and Daya Guo},
|
| 51 |
+
year = 2024,
|
| 52 |
+
eprint = {arXiv:2402.03300},
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
```
|
| 56 |
+
|
| 57 |
+
Cite TRL as:
|
| 58 |
+
|
| 59 |
+
```bibtex
|
| 60 |
+
@misc{vonwerra2022trl,
|
| 61 |
+
title = {{TRL: Transformer Reinforcement Learning}},
|
| 62 |
+
author = {Leandro von Werra and Younes Belkada and Lewis Tunstall and Edward Beeching and Tristan Thrush and Nathan Lambert and Shengyi Huang and Kashif Rasul and Quentin Gallouédec},
|
| 63 |
+
year = 2020,
|
| 64 |
+
journal = {GitHub repository},
|
| 65 |
+
publisher = {GitHub},
|
| 66 |
+
howpublished = {\url{https://github.com/huggingface/trl}}
|
| 67 |
+
}
|
| 68 |
+
```
|
all_results.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"total_flos": 0.0,
|
| 3 |
+
"train_loss": -0.0008640377339906991,
|
| 4 |
+
"train_runtime": 8354.3803,
|
| 5 |
+
"train_samples": 7000,
|
| 6 |
+
"train_samples_per_second": 1.149,
|
| 7 |
+
"train_steps_per_second": 0.012
|
| 8 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 151646,
|
| 4 |
+
"do_sample": true,
|
| 5 |
+
"eos_token_id": 151643,
|
| 6 |
+
"temperature": 0.6,
|
| 7 |
+
"top_p": 0.95,
|
| 8 |
+
"transformers_version": "4.50.0"
|
| 9 |
+
}
|
train_results.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"total_flos": 0.0,
|
| 3 |
+
"train_loss": -0.0008640377339906991,
|
| 4 |
+
"train_runtime": 8354.3803,
|
| 5 |
+
"train_samples": 7000,
|
| 6 |
+
"train_samples_per_second": 1.149,
|
| 7 |
+
"train_steps_per_second": 0.012
|
| 8 |
+
}
|
trainer_state.json
ADDED
|
@@ -0,0 +1,2743 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_global_step": null,
|
| 3 |
+
"best_metric": null,
|
| 4 |
+
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 0.11428571428571428,
|
| 6 |
+
"eval_steps": 500,
|
| 7 |
+
"global_step": 100,
|
| 8 |
+
"is_hyper_param_search": false,
|
| 9 |
+
"is_local_process_zero": true,
|
| 10 |
+
"is_world_process_zero": true,
|
| 11 |
+
"log_history": [
|
| 12 |
+
{
|
| 13 |
+
"clip_ratio/high_max": 0.0,
|
| 14 |
+
"clip_ratio/high_mean": 0.0,
|
| 15 |
+
"clip_ratio/low_mean": 0.0,
|
| 16 |
+
"clip_ratio/low_min": 0.0,
|
| 17 |
+
"clip_ratio/region_mean": 0.0,
|
| 18 |
+
"completion_length": 2700.4271850585938,
|
| 19 |
+
"cov_mean": -6.0587970438064076e-05,
|
| 20 |
+
"cov_std": 0.35307812318205833,
|
| 21 |
+
"entropy": 0.36474609375,
|
| 22 |
+
"epoch": 0.001142857142857143,
|
| 23 |
+
"grad_norm": 0.45541471242904663,
|
| 24 |
+
"kl": 0.0,
|
| 25 |
+
"learning_rate": 1e-07,
|
| 26 |
+
"loss": -0.0382,
|
| 27 |
+
"reward": 0.7604166893288493,
|
| 28 |
+
"reward_std": 0.4268697127699852,
|
| 29 |
+
"rewards/accuracy_reward": 0.25000001303851604,
|
| 30 |
+
"rewards/format_reward": 0.5104166669771075,
|
| 31 |
+
"step": 1,
|
| 32 |
+
"w_high_ratio": 0.2200421690940857,
|
| 33 |
+
"w_low_ratio": 0.03663695091381669,
|
| 34 |
+
"w_max": 2.1593789756298065,
|
| 35 |
+
"w_mean": 1.4711343348026276,
|
| 36 |
+
"w_min": 6.525355682266089e-35,
|
| 37 |
+
"w_std": 0.2659660503268242
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"clip_ratio/high_max": 0.0,
|
| 41 |
+
"clip_ratio/high_mean": 0.0,
|
| 42 |
+
"clip_ratio/low_mean": 0.0,
|
| 43 |
+
"clip_ratio/low_min": 0.0,
|
| 44 |
+
"clip_ratio/region_mean": 0.0,
|
| 45 |
+
"completion_length": 3127.3958435058594,
|
| 46 |
+
"cov_mean": -2.155053698515985e-05,
|
| 47 |
+
"cov_std": 0.310540571808815,
|
| 48 |
+
"entropy": 0.353515625,
|
| 49 |
+
"epoch": 0.002285714285714286,
|
| 50 |
+
"grad_norm": 0.5143813490867615,
|
| 51 |
+
"kl": 0.0,
|
| 52 |
+
"learning_rate": 2e-07,
|
| 53 |
+
"loss": 0.0049,
|
| 54 |
+
"reward": 0.6458333637565374,
|
| 55 |
+
"reward_std": 0.4249730706214905,
|
| 56 |
+
"rewards/accuracy_reward": 0.2812500102445483,
|
| 57 |
+
"rewards/format_reward": 0.3645833386108279,
|
| 58 |
+
"step": 2,
|
| 59 |
+
"w_high_ratio": 0.05183619633316994,
|
| 60 |
+
"w_low_ratio": 0.036958135198801756,
|
| 61 |
+
"w_max": 1.8325217366218567,
|
| 62 |
+
"w_mean": 1.2113382518291473,
|
| 63 |
+
"w_min": 0.0,
|
| 64 |
+
"w_std": 0.20957503467798233
|
| 65 |
+
},
|
| 66 |
+
{
|
| 67 |
+
"clip_ratio/high_max": 0.0,
|
| 68 |
+
"clip_ratio/high_mean": 0.0,
|
| 69 |
+
"clip_ratio/low_mean": 0.0,
|
| 70 |
+
"clip_ratio/low_min": 0.0,
|
| 71 |
+
"clip_ratio/region_mean": 0.0,
|
| 72 |
+
"completion_length": 3791.375,
|
| 73 |
+
"cov_mean": -3.563215068425052e-05,
|
| 74 |
+
"cov_std": 0.28256653994321823,
|
| 75 |
+
"entropy": 0.4658203125,
|
| 76 |
+
"epoch": 0.0034285714285714284,
|
| 77 |
+
"grad_norm": 0.2701888084411621,
|
| 78 |
+
"kl": 4.756450653076172e-05,
|
| 79 |
+
"learning_rate": 3e-07,
|
| 80 |
+
"loss": 0.0344,
|
| 81 |
+
"reward": 0.16666667349636555,
|
| 82 |
+
"reward_std": 0.3025414012372494,
|
| 83 |
+
"rewards/accuracy_reward": 0.02083333395421505,
|
| 84 |
+
"rewards/format_reward": 0.14583333395421505,
|
| 85 |
+
"step": 3,
|
| 86 |
+
"w_high_ratio": 0.0,
|
| 87 |
+
"w_low_ratio": 0.03658500872552395,
|
| 88 |
+
"w_max": 1.348844289779663,
|
| 89 |
+
"w_mean": 1.0439709424972534,
|
| 90 |
+
"w_min": 0.0,
|
| 91 |
+
"w_std": 0.15747325122356415
|
| 92 |
+
},
|
| 93 |
+
{
|
| 94 |
+
"clip_ratio/high_max": 0.0,
|
| 95 |
+
"clip_ratio/high_mean": 0.0,
|
| 96 |
+
"clip_ratio/low_mean": 0.0,
|
| 97 |
+
"clip_ratio/low_min": 0.0,
|
| 98 |
+
"clip_ratio/region_mean": 0.0,
|
| 99 |
+
"completion_length": 2446.1250610351562,
|
| 100 |
+
"cov_mean": -2.657165350683499e-05,
|
| 101 |
+
"cov_std": 0.47042107582092285,
|
| 102 |
+
"entropy": 0.4052734375,
|
| 103 |
+
"epoch": 0.004571428571428572,
|
| 104 |
+
"grad_norm": 0.6580816507339478,
|
| 105 |
+
"kl": 2.866983413696289e-05,
|
| 106 |
+
"learning_rate": 4e-07,
|
| 107 |
+
"loss": -0.0116,
|
| 108 |
+
"reward": 0.8541666865348816,
|
| 109 |
+
"reward_std": 0.5623367577791214,
|
| 110 |
+
"rewards/accuracy_reward": 0.19791667070239782,
|
| 111 |
+
"rewards/format_reward": 0.6562500074505806,
|
| 112 |
+
"step": 4,
|
| 113 |
+
"w_high_ratio": 0.2048901468515396,
|
| 114 |
+
"w_low_ratio": 0.04687658231705427,
|
| 115 |
+
"w_max": 2.3084834814071655,
|
| 116 |
+
"w_mean": 1.5087227523326874,
|
| 117 |
+
"w_min": 3.5522916070634113e-43,
|
| 118 |
+
"w_std": 0.31092390790581703
|
| 119 |
+
},
|
| 120 |
+
{
|
| 121 |
+
"clip_ratio/high_max": 0.0,
|
| 122 |
+
"clip_ratio/high_mean": 0.0,
|
| 123 |
+
"clip_ratio/low_mean": 0.0,
|
| 124 |
+
"clip_ratio/low_min": 0.0,
|
| 125 |
+
"clip_ratio/region_mean": 0.0,
|
| 126 |
+
"completion_length": 3562.4166870117188,
|
| 127 |
+
"cov_mean": 3.653238718470675e-05,
|
| 128 |
+
"cov_std": 0.539387047290802,
|
| 129 |
+
"entropy": 0.45458984375,
|
| 130 |
+
"epoch": 0.005714285714285714,
|
| 131 |
+
"grad_norm": 0.30956918001174927,
|
| 132 |
+
"kl": 3.820657730102539e-05,
|
| 133 |
+
"learning_rate": 5e-07,
|
| 134 |
+
"loss": 0.0053,
|
| 135 |
+
"reward": 0.4479166939854622,
|
| 136 |
+
"reward_std": 0.5839087814092636,
|
| 137 |
+
"rewards/accuracy_reward": 0.08333333861082792,
|
| 138 |
+
"rewards/format_reward": 0.3645833507180214,
|
| 139 |
+
"step": 5,
|
| 140 |
+
"w_high_ratio": 0.009932879358530045,
|
| 141 |
+
"w_low_ratio": 0.061708422377705574,
|
| 142 |
+
"w_max": 1.4947779774665833,
|
| 143 |
+
"w_mean": 1.13177028298378,
|
| 144 |
+
"w_min": 0.0,
|
| 145 |
+
"w_std": 0.2904536984860897
|
| 146 |
+
},
|
| 147 |
+
{
|
| 148 |
+
"clip_ratio/high_max": 0.0,
|
| 149 |
+
"clip_ratio/high_mean": 0.0,
|
| 150 |
+
"clip_ratio/low_mean": 0.0,
|
| 151 |
+
"clip_ratio/low_min": 0.0,
|
| 152 |
+
"clip_ratio/region_mean": 0.0,
|
| 153 |
+
"completion_length": 3346.166748046875,
|
| 154 |
+
"cov_mean": 2.951182568722288e-05,
|
| 155 |
+
"cov_std": 0.4695303291082382,
|
| 156 |
+
"entropy": 0.474609375,
|
| 157 |
+
"epoch": 0.006857142857142857,
|
| 158 |
+
"grad_norm": 0.4116966426372528,
|
| 159 |
+
"kl": 4.678964614868164e-05,
|
| 160 |
+
"learning_rate": 6e-07,
|
| 161 |
+
"loss": 0.0655,
|
| 162 |
+
"reward": 0.40625001303851604,
|
| 163 |
+
"reward_std": 0.5175340622663498,
|
| 164 |
+
"rewards/accuracy_reward": 0.09375000186264515,
|
| 165 |
+
"rewards/format_reward": 0.31250001303851604,
|
| 166 |
+
"step": 6,
|
| 167 |
+
"w_high_ratio": 0.09942464530467987,
|
| 168 |
+
"w_low_ratio": 0.05820021778345108,
|
| 169 |
+
"w_max": 2.0522369146347046,
|
| 170 |
+
"w_mean": 1.2698509693145752,
|
| 171 |
+
"w_min": 6.311469302795941e-40,
|
| 172 |
+
"w_std": 0.3068386148661375
|
| 173 |
+
},
|
| 174 |
+
{
|
| 175 |
+
"clip_ratio/high_max": 0.0,
|
| 176 |
+
"clip_ratio/high_mean": 0.0,
|
| 177 |
+
"clip_ratio/low_mean": 0.0,
|
| 178 |
+
"clip_ratio/low_min": 0.0,
|
| 179 |
+
"clip_ratio/region_mean": 0.0,
|
| 180 |
+
"completion_length": 3291.197998046875,
|
| 181 |
+
"cov_mean": -2.080401827697642e-06,
|
| 182 |
+
"cov_std": 0.5660274773836136,
|
| 183 |
+
"entropy": 0.38671875,
|
| 184 |
+
"epoch": 0.008,
|
| 185 |
+
"grad_norm": 0.4001730680465698,
|
| 186 |
+
"kl": 2.8431415557861328e-05,
|
| 187 |
+
"learning_rate": 7e-07,
|
| 188 |
+
"loss": -0.0874,
|
| 189 |
+
"reward": 0.9687500298023224,
|
| 190 |
+
"reward_std": 0.639276884496212,
|
| 191 |
+
"rewards/accuracy_reward": 0.2812500074505806,
|
| 192 |
+
"rewards/format_reward": 0.6875000149011612,
|
| 193 |
+
"step": 7,
|
| 194 |
+
"w_high_ratio": 0.04338983818888664,
|
| 195 |
+
"w_low_ratio": 0.05278784967958927,
|
| 196 |
+
"w_max": 1.6053467988967896,
|
| 197 |
+
"w_mean": 1.2385202646255493,
|
| 198 |
+
"w_min": 0.0,
|
| 199 |
+
"w_std": 0.2744893953204155
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"clip_ratio/high_max": 0.0,
|
| 203 |
+
"clip_ratio/high_mean": 0.0,
|
| 204 |
+
"clip_ratio/low_mean": 0.0,
|
| 205 |
+
"clip_ratio/low_min": 0.0,
|
| 206 |
+
"clip_ratio/region_mean": 0.0,
|
| 207 |
+
"completion_length": 2852.4896240234375,
|
| 208 |
+
"cov_mean": -2.6475029699213337e-05,
|
| 209 |
+
"cov_std": 0.24741052091121674,
|
| 210 |
+
"entropy": 0.349365234375,
|
| 211 |
+
"epoch": 0.009142857142857144,
|
| 212 |
+
"grad_norm": 0.28081798553466797,
|
| 213 |
+
"kl": 2.35140323638916e-05,
|
| 214 |
+
"learning_rate": 8e-07,
|
| 215 |
+
"loss": -0.0024,
|
| 216 |
+
"reward": 0.8750000111758709,
|
| 217 |
+
"reward_std": 0.3533418998122215,
|
| 218 |
+
"rewards/accuracy_reward": 0.3854166679084301,
|
| 219 |
+
"rewards/format_reward": 0.48958334885537624,
|
| 220 |
+
"step": 8,
|
| 221 |
+
"w_high_ratio": 0.0625,
|
| 222 |
+
"w_low_ratio": 0.026329820044338703,
|
| 223 |
+
"w_max": 1.744232177734375,
|
| 224 |
+
"w_mean": 1.2852342873811722,
|
| 225 |
+
"w_min": 0.25,
|
| 226 |
+
"w_std": 0.13892405480146408
|
| 227 |
+
},
|
| 228 |
+
{
|
| 229 |
+
"clip_ratio/high_max": 0.0,
|
| 230 |
+
"clip_ratio/high_mean": 0.0,
|
| 231 |
+
"clip_ratio/low_mean": 0.0,
|
| 232 |
+
"clip_ratio/low_min": 0.0,
|
| 233 |
+
"clip_ratio/region_mean": 0.0,
|
| 234 |
+
"completion_length": 3371.2708740234375,
|
| 235 |
+
"cov_mean": -2.0414277514646528e-05,
|
| 236 |
+
"cov_std": 0.3537435829639435,
|
| 237 |
+
"entropy": 0.4619140625,
|
| 238 |
+
"epoch": 0.010285714285714285,
|
| 239 |
+
"grad_norm": 0.3546924591064453,
|
| 240 |
+
"kl": 3.835558891296387e-05,
|
| 241 |
+
"learning_rate": 9e-07,
|
| 242 |
+
"loss": -0.0341,
|
| 243 |
+
"reward": 0.3958333432674408,
|
| 244 |
+
"reward_std": 0.4515319801867008,
|
| 245 |
+
"rewards/accuracy_reward": 0.09375000186264515,
|
| 246 |
+
"rewards/format_reward": 0.3020833358168602,
|
| 247 |
+
"step": 9,
|
| 248 |
+
"w_high_ratio": 0.07049691677093506,
|
| 249 |
+
"w_low_ratio": 0.03988973796367645,
|
| 250 |
+
"w_max": 1.8283900916576385,
|
| 251 |
+
"w_mean": 1.2170793116092682,
|
| 252 |
+
"w_min": 9.553366044251431e-29,
|
| 253 |
+
"w_std": 0.2274811826646328
|
| 254 |
+
},
|
| 255 |
+
{
|
| 256 |
+
"clip_ratio/high_max": 0.0,
|
| 257 |
+
"clip_ratio/high_mean": 0.0,
|
| 258 |
+
"clip_ratio/low_mean": 0.0,
|
| 259 |
+
"clip_ratio/low_min": 0.0,
|
| 260 |
+
"clip_ratio/region_mean": 0.0,
|
| 261 |
+
"completion_length": 2993.8438110351562,
|
| 262 |
+
"cov_mean": -3.597586055548163e-05,
|
| 263 |
+
"cov_std": 0.3790554851293564,
|
| 264 |
+
"entropy": 0.349609375,
|
| 265 |
+
"epoch": 0.011428571428571429,
|
| 266 |
+
"grad_norm": 0.4014468193054199,
|
| 267 |
+
"kl": 3.153085708618164e-05,
|
| 268 |
+
"learning_rate": 1e-06,
|
| 269 |
+
"loss": 0.111,
|
| 270 |
+
"reward": 0.572916679084301,
|
| 271 |
+
"reward_std": 0.5256113260984421,
|
| 272 |
+
"rewards/accuracy_reward": 0.15625000651925802,
|
| 273 |
+
"rewards/format_reward": 0.416666679084301,
|
| 274 |
+
"step": 10,
|
| 275 |
+
"w_high_ratio": 0.17509328201413155,
|
| 276 |
+
"w_low_ratio": 0.04464914742857218,
|
| 277 |
+
"w_max": 2.3221429884433746,
|
| 278 |
+
"w_mean": 1.403680145740509,
|
| 279 |
+
"w_min": 0.0,
|
| 280 |
+
"w_std": 0.2820280008018017
|
| 281 |
+
},
|
| 282 |
+
{
|
| 283 |
+
"clip_ratio/high_max": 0.0,
|
| 284 |
+
"clip_ratio/high_mean": 0.0,
|
| 285 |
+
"clip_ratio/low_mean": 0.0,
|
| 286 |
+
"clip_ratio/low_min": 0.0,
|
| 287 |
+
"clip_ratio/region_mean": 0.0,
|
| 288 |
+
"completion_length": 3645.3126220703125,
|
| 289 |
+
"cov_mean": 1.727970106912835e-05,
|
| 290 |
+
"cov_std": 0.314908966422081,
|
| 291 |
+
"entropy": 0.3701171875,
|
| 292 |
+
"epoch": 0.012571428571428572,
|
| 293 |
+
"grad_norm": 0.40044310688972473,
|
| 294 |
+
"kl": 3.1620264053344727e-05,
|
| 295 |
+
"learning_rate": 9.997258721585931e-07,
|
| 296 |
+
"loss": 0.0585,
|
| 297 |
+
"reward": 0.25000001303851604,
|
| 298 |
+
"reward_std": 0.4806990921497345,
|
| 299 |
+
"rewards/accuracy_reward": 0.09375000279396772,
|
| 300 |
+
"rewards/format_reward": 0.15625000279396772,
|
| 301 |
+
"step": 11,
|
| 302 |
+
"w_high_ratio": 0.0,
|
| 303 |
+
"w_low_ratio": 0.04341120272874832,
|
| 304 |
+
"w_max": 1.4488586485385895,
|
| 305 |
+
"w_mean": 1.097432792186737,
|
| 306 |
+
"w_min": 4.6695499555262094e-38,
|
| 307 |
+
"w_std": 0.2005491964519024
|
| 308 |
+
},
|
| 309 |
+
{
|
| 310 |
+
"clip_ratio/high_max": 0.0,
|
| 311 |
+
"clip_ratio/high_mean": 0.0,
|
| 312 |
+
"clip_ratio/low_mean": 0.0,
|
| 313 |
+
"clip_ratio/low_min": 0.0,
|
| 314 |
+
"clip_ratio/region_mean": 0.0,
|
| 315 |
+
"completion_length": 2548.0729370117188,
|
| 316 |
+
"cov_mean": -8.678332051204052e-05,
|
| 317 |
+
"cov_std": 0.3924334645271301,
|
| 318 |
+
"entropy": 0.3896484375,
|
| 319 |
+
"epoch": 0.013714285714285714,
|
| 320 |
+
"grad_norm": 0.40460628271102905,
|
| 321 |
+
"kl": 3.9458274841308594e-05,
|
| 322 |
+
"learning_rate": 9.989038226169207e-07,
|
| 323 |
+
"loss": 0.0329,
|
| 324 |
+
"reward": 0.9479166939854622,
|
| 325 |
+
"reward_std": 0.4162924438714981,
|
| 326 |
+
"rewards/accuracy_reward": 0.23958334140479565,
|
| 327 |
+
"rewards/format_reward": 0.708333358168602,
|
| 328 |
+
"step": 12,
|
| 329 |
+
"w_high_ratio": 0.14796987175941467,
|
| 330 |
+
"w_low_ratio": 0.038647969253361225,
|
| 331 |
+
"w_max": 2.0233654975891113,
|
| 332 |
+
"w_mean": 1.479979693889618,
|
| 333 |
+
"w_min": 0.0,
|
| 334 |
+
"w_std": 0.2828930839896202
|
| 335 |
+
},
|
| 336 |
+
{
|
| 337 |
+
"clip_ratio/high_max": 0.0,
|
| 338 |
+
"clip_ratio/high_mean": 0.0,
|
| 339 |
+
"clip_ratio/low_mean": 0.0,
|
| 340 |
+
"clip_ratio/low_min": 0.0,
|
| 341 |
+
"clip_ratio/region_mean": 0.0,
|
| 342 |
+
"completion_length": 3180.9896850585938,
|
| 343 |
+
"cov_mean": -3.604557650760398e-05,
|
| 344 |
+
"cov_std": 0.29976917430758476,
|
| 345 |
+
"entropy": 0.39111328125,
|
| 346 |
+
"epoch": 0.014857142857142857,
|
| 347 |
+
"grad_norm": 0.42143014073371887,
|
| 348 |
+
"kl": 2.7954578399658203e-05,
|
| 349 |
+
"learning_rate": 9.975348529157229e-07,
|
| 350 |
+
"loss": 0.0007,
|
| 351 |
+
"reward": 0.5937500298023224,
|
| 352 |
+
"reward_std": 0.39751993864774704,
|
| 353 |
+
"rewards/accuracy_reward": 0.18750000558793545,
|
| 354 |
+
"rewards/format_reward": 0.4062500074505806,
|
| 355 |
+
"step": 13,
|
| 356 |
+
"w_high_ratio": 0.13092797622084618,
|
| 357 |
+
"w_low_ratio": 0.038139537908136845,
|
| 358 |
+
"w_max": 1.9087003767490387,
|
| 359 |
+
"w_mean": 1.2740049362182617,
|
| 360 |
+
"w_min": 0.0,
|
| 361 |
+
"w_std": 0.20897787064313889
|
| 362 |
+
},
|
| 363 |
+
{
|
| 364 |
+
"clip_ratio/high_max": 0.0,
|
| 365 |
+
"clip_ratio/high_mean": 0.0,
|
| 366 |
+
"clip_ratio/low_mean": 0.0,
|
| 367 |
+
"clip_ratio/low_min": 0.0,
|
| 368 |
+
"clip_ratio/region_mean": 0.0,
|
| 369 |
+
"completion_length": 3071.760498046875,
|
| 370 |
+
"cov_mean": -1.3722287803830113e-06,
|
| 371 |
+
"cov_std": 0.3737764284014702,
|
| 372 |
+
"entropy": 0.37744140625,
|
| 373 |
+
"epoch": 0.016,
|
| 374 |
+
"grad_norm": 0.5302906632423401,
|
| 375 |
+
"kl": 2.9087066650390625e-05,
|
| 376 |
+
"learning_rate": 9.956206309337066e-07,
|
| 377 |
+
"loss": 0.0182,
|
| 378 |
+
"reward": 0.541666679084301,
|
| 379 |
+
"reward_std": 0.4254928454756737,
|
| 380 |
+
"rewards/accuracy_reward": 0.15625000279396772,
|
| 381 |
+
"rewards/format_reward": 0.3854166679084301,
|
| 382 |
+
"step": 14,
|
| 383 |
+
"w_high_ratio": 0.13122042268514633,
|
| 384 |
+
"w_low_ratio": 0.04646214470267296,
|
| 385 |
+
"w_max": 2.057934284210205,
|
| 386 |
+
"w_mean": 1.2967519462108612,
|
| 387 |
+
"w_min": 6.977258874336181e-23,
|
| 388 |
+
"w_std": 0.279865525662899
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"clip_ratio/high_max": 0.0,
|
| 392 |
+
"clip_ratio/high_mean": 0.0,
|
| 393 |
+
"clip_ratio/low_mean": 0.0,
|
| 394 |
+
"clip_ratio/low_min": 0.0,
|
| 395 |
+
"clip_ratio/region_mean": 0.0,
|
| 396 |
+
"completion_length": 2915.2396240234375,
|
| 397 |
+
"cov_mean": -2.9782687306578737e-05,
|
| 398 |
+
"cov_std": 0.3060881793498993,
|
| 399 |
+
"entropy": 0.3681640625,
|
| 400 |
+
"epoch": 0.017142857142857144,
|
| 401 |
+
"grad_norm": 0.471722811460495,
|
| 402 |
+
"kl": 3.0308961868286133e-05,
|
| 403 |
+
"learning_rate": 9.931634888554935e-07,
|
| 404 |
+
"loss": 0.0075,
|
| 405 |
+
"reward": 0.6145833432674408,
|
| 406 |
+
"reward_std": 0.3603988029062748,
|
| 407 |
+
"rewards/accuracy_reward": 0.19791666977107525,
|
| 408 |
+
"rewards/format_reward": 0.4166666679084301,
|
| 409 |
+
"step": 15,
|
| 410 |
+
"w_high_ratio": 0.0,
|
| 411 |
+
"w_low_ratio": 0.03737350553274155,
|
| 412 |
+
"w_max": 1.5459995865821838,
|
| 413 |
+
"w_mean": 1.177234023809433,
|
| 414 |
+
"w_min": 0.0,
|
| 415 |
+
"w_std": 0.20952722802758217
|
| 416 |
+
},
|
| 417 |
+
{
|
| 418 |
+
"clip_ratio/high_max": 0.0,
|
| 419 |
+
"clip_ratio/high_mean": 0.0,
|
| 420 |
+
"clip_ratio/low_mean": 0.0,
|
| 421 |
+
"clip_ratio/low_min": 0.0,
|
| 422 |
+
"clip_ratio/region_mean": 0.0,
|
| 423 |
+
"completion_length": 3857.7084350585938,
|
| 424 |
+
"cov_mean": -3.300000025774352e-05,
|
| 425 |
+
"cov_std": 0.18836934491991997,
|
| 426 |
+
"entropy": 0.45751953125,
|
| 427 |
+
"epoch": 0.018285714285714287,
|
| 428 |
+
"grad_norm": 0.31818071007728577,
|
| 429 |
+
"kl": 3.904104232788086e-05,
|
| 430 |
+
"learning_rate": 9.901664203302124e-07,
|
| 431 |
+
"loss": 0.0556,
|
| 432 |
+
"reward": 0.13541666977107525,
|
| 433 |
+
"reward_std": 0.249445378780365,
|
| 434 |
+
"rewards/accuracy_reward": 0.052083334885537624,
|
| 435 |
+
"rewards/format_reward": 0.0833333358168602,
|
| 436 |
+
"step": 16,
|
| 437 |
+
"w_high_ratio": 0.0,
|
| 438 |
+
"w_low_ratio": 0.02626894786953926,
|
| 439 |
+
"w_max": 1.197378009557724,
|
| 440 |
+
"w_mean": 1.0219765603542328,
|
| 441 |
+
"w_min": 0.25,
|
| 442 |
+
"w_std": 0.10555266216397285
|
| 443 |
+
},
|
| 444 |
+
{
|
| 445 |
+
"clip_ratio/high_max": 0.0,
|
| 446 |
+
"clip_ratio/high_mean": 0.0,
|
| 447 |
+
"clip_ratio/low_mean": 0.0,
|
| 448 |
+
"clip_ratio/low_min": 0.0,
|
| 449 |
+
"clip_ratio/region_mean": 0.0,
|
| 450 |
+
"completion_length": 2503.479232788086,
|
| 451 |
+
"cov_mean": 5.234984382695984e-05,
|
| 452 |
+
"cov_std": 0.34602249413728714,
|
| 453 |
+
"entropy": 0.43408203125,
|
| 454 |
+
"epoch": 0.019428571428571427,
|
| 455 |
+
"grad_norm": 0.43525052070617676,
|
| 456 |
+
"kl": 5.2034854888916016e-05,
|
| 457 |
+
"learning_rate": 9.866330768241983e-07,
|
| 458 |
+
"loss": 0.0179,
|
| 459 |
+
"reward": 0.7604167014360428,
|
| 460 |
+
"reward_std": 0.4241996556520462,
|
| 461 |
+
"rewards/accuracy_reward": 0.1770833432674408,
|
| 462 |
+
"rewards/format_reward": 0.5833333432674408,
|
| 463 |
+
"step": 17,
|
| 464 |
+
"w_high_ratio": 0.13831434771418571,
|
| 465 |
+
"w_low_ratio": 0.039523204788565636,
|
| 466 |
+
"w_max": 2.07596218585968,
|
| 467 |
+
"w_mean": 1.3631863296031952,
|
| 468 |
+
"w_min": 0.25,
|
| 469 |
+
"w_std": 0.22783420607447624
|
| 470 |
+
},
|
| 471 |
+
{
|
| 472 |
+
"clip_ratio/high_max": 0.0,
|
| 473 |
+
"clip_ratio/high_mean": 0.0,
|
| 474 |
+
"clip_ratio/low_mean": 0.0,
|
| 475 |
+
"clip_ratio/low_min": 0.0,
|
| 476 |
+
"clip_ratio/region_mean": 0.0,
|
| 477 |
+
"completion_length": 3050.8438110351562,
|
| 478 |
+
"cov_mean": -8.538075144315371e-05,
|
| 479 |
+
"cov_std": 0.34513213485479355,
|
| 480 |
+
"entropy": 0.36328125,
|
| 481 |
+
"epoch": 0.02057142857142857,
|
| 482 |
+
"grad_norm": 0.3079332113265991,
|
| 483 |
+
"kl": 5.075335502624512e-05,
|
| 484 |
+
"learning_rate": 9.825677631722435e-07,
|
| 485 |
+
"loss": 0.005,
|
| 486 |
+
"reward": 0.5833333432674408,
|
| 487 |
+
"reward_std": 0.4453107975423336,
|
| 488 |
+
"rewards/accuracy_reward": 0.1250000037252903,
|
| 489 |
+
"rewards/format_reward": 0.4583333469927311,
|
| 490 |
+
"step": 18,
|
| 491 |
+
"w_high_ratio": 0.0583355538547039,
|
| 492 |
+
"w_low_ratio": 0.038819507928565145,
|
| 493 |
+
"w_max": 1.7474263310432434,
|
| 494 |
+
"w_mean": 1.2030333578586578,
|
| 495 |
+
"w_min": 1.0509738482436128e-45,
|
| 496 |
+
"w_std": 0.20471886917948723
|
| 497 |
+
},
|
| 498 |
+
{
|
| 499 |
+
"clip_ratio/high_max": 0.0,
|
| 500 |
+
"clip_ratio/high_mean": 0.0,
|
| 501 |
+
"clip_ratio/low_mean": 0.0,
|
| 502 |
+
"clip_ratio/low_min": 0.0,
|
| 503 |
+
"clip_ratio/region_mean": 0.0,
|
| 504 |
+
"completion_length": 3198.7500610351562,
|
| 505 |
+
"cov_mean": 6.975242376938695e-05,
|
| 506 |
+
"cov_std": 0.4591265842318535,
|
| 507 |
+
"entropy": 0.3994140625,
|
| 508 |
+
"epoch": 0.021714285714285714,
|
| 509 |
+
"grad_norm": 0.44368991255760193,
|
| 510 |
+
"kl": 5.0961971282958984e-05,
|
| 511 |
+
"learning_rate": 9.779754323328192e-07,
|
| 512 |
+
"loss": -0.0137,
|
| 513 |
+
"reward": 0.8541666865348816,
|
| 514 |
+
"reward_std": 0.6421672403812408,
|
| 515 |
+
"rewards/accuracy_reward": 0.3750000074505806,
|
| 516 |
+
"rewards/format_reward": 0.479166679084301,
|
| 517 |
+
"step": 19,
|
| 518 |
+
"w_high_ratio": 0.07967927679419518,
|
| 519 |
+
"w_low_ratio": 0.055658016353845596,
|
| 520 |
+
"w_max": 1.7943021953105927,
|
| 521 |
+
"w_mean": 1.2339626252651215,
|
| 522 |
+
"w_min": 0.0,
|
| 523 |
+
"w_std": 0.28908008337020874
|
| 524 |
+
},
|
| 525 |
+
{
|
| 526 |
+
"clip_ratio/high_max": 0.0,
|
| 527 |
+
"clip_ratio/high_mean": 0.0,
|
| 528 |
+
"clip_ratio/low_mean": 0.0,
|
| 529 |
+
"clip_ratio/low_min": 0.0,
|
| 530 |
+
"clip_ratio/region_mean": 0.0,
|
| 531 |
+
"completion_length": 2531.0208740234375,
|
| 532 |
+
"cov_mean": 3.258255492255557e-05,
|
| 533 |
+
"cov_std": 0.3959212973713875,
|
| 534 |
+
"entropy": 0.3134765625,
|
| 535 |
+
"epoch": 0.022857142857142857,
|
| 536 |
+
"grad_norm": 0.42710381746292114,
|
| 537 |
+
"kl": 7.474422454833984e-05,
|
| 538 |
+
"learning_rate": 9.728616793536587e-07,
|
| 539 |
+
"loss": -0.0142,
|
| 540 |
+
"reward": 0.9479166865348816,
|
| 541 |
+
"reward_std": 0.4626789018511772,
|
| 542 |
+
"rewards/accuracy_reward": 0.260416679084301,
|
| 543 |
+
"rewards/format_reward": 0.6875000149011612,
|
| 544 |
+
"step": 20,
|
| 545 |
+
"w_high_ratio": 0.20212292298674583,
|
| 546 |
+
"w_low_ratio": 0.040643465239554644,
|
| 547 |
+
"w_max": 2.211318254470825,
|
| 548 |
+
"w_mean": 1.4521130919456482,
|
| 549 |
+
"w_min": 0.0,
|
| 550 |
+
"w_std": 0.25691715627908707
|
| 551 |
+
},
|
| 552 |
+
{
|
| 553 |
+
"clip_ratio/high_max": 0.0,
|
| 554 |
+
"clip_ratio/high_mean": 0.0,
|
| 555 |
+
"clip_ratio/low_mean": 0.0,
|
| 556 |
+
"clip_ratio/low_min": 0.0,
|
| 557 |
+
"clip_ratio/region_mean": 0.0,
|
| 558 |
+
"completion_length": 2890.541778564453,
|
| 559 |
+
"cov_mean": 1.6164100088644773e-05,
|
| 560 |
+
"cov_std": 0.2818361334502697,
|
| 561 |
+
"entropy": 0.421875,
|
| 562 |
+
"epoch": 0.024,
|
| 563 |
+
"grad_norm": 0.8042517900466919,
|
| 564 |
+
"kl": 0.0001793205738067627,
|
| 565 |
+
"learning_rate": 9.672327345550543e-07,
|
| 566 |
+
"loss": 0.073,
|
| 567 |
+
"reward": 0.697916692122817,
|
| 568 |
+
"reward_std": 0.3805258348584175,
|
| 569 |
+
"rewards/accuracy_reward": 0.22916667070239782,
|
| 570 |
+
"rewards/format_reward": 0.4687500102445483,
|
| 571 |
+
"step": 21,
|
| 572 |
+
"w_high_ratio": 0.1653403341770172,
|
| 573 |
+
"w_low_ratio": 0.034426179714500904,
|
| 574 |
+
"w_max": 2.0890542566776276,
|
| 575 |
+
"w_mean": 1.4488303065299988,
|
| 576 |
+
"w_min": 0.25,
|
| 577 |
+
"w_std": 0.26479026675224304
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"clip_ratio/high_max": 0.0,
|
| 581 |
+
"clip_ratio/high_mean": 0.0,
|
| 582 |
+
"clip_ratio/low_mean": 0.0,
|
| 583 |
+
"clip_ratio/low_min": 0.0,
|
| 584 |
+
"clip_ratio/region_mean": 0.0,
|
| 585 |
+
"completion_length": 2023.8750610351562,
|
| 586 |
+
"cov_mean": -3.984599959494517e-05,
|
| 587 |
+
"cov_std": 0.3324627988040447,
|
| 588 |
+
"entropy": 0.41162109375,
|
| 589 |
+
"epoch": 0.025142857142857144,
|
| 590 |
+
"grad_norm": 0.43941012024879456,
|
| 591 |
+
"kl": 0.0001862645149230957,
|
| 592 |
+
"learning_rate": 9.610954559391704e-07,
|
| 593 |
+
"loss": 0.0018,
|
| 594 |
+
"reward": 1.0416666865348816,
|
| 595 |
+
"reward_std": 0.32273583114147186,
|
| 596 |
+
"rewards/accuracy_reward": 0.2708333386108279,
|
| 597 |
+
"rewards/format_reward": 0.7708333730697632,
|
| 598 |
+
"step": 22,
|
| 599 |
+
"w_high_ratio": 0.16147570684552193,
|
| 600 |
+
"w_low_ratio": 0.026547667337581515,
|
| 601 |
+
"w_max": 2.171365201473236,
|
| 602 |
+
"w_mean": 1.5571591556072235,
|
| 603 |
+
"w_min": 2.1019476964872256e-45,
|
| 604 |
+
"w_std": 0.16451371088624
|
| 605 |
+
},
|
| 606 |
+
{
|
| 607 |
+
"clip_ratio/high_max": 0.0,
|
| 608 |
+
"clip_ratio/high_mean": 0.0,
|
| 609 |
+
"clip_ratio/low_mean": 0.0,
|
| 610 |
+
"clip_ratio/low_min": 0.0,
|
| 611 |
+
"clip_ratio/region_mean": 0.0,
|
| 612 |
+
"completion_length": 2749.6146850585938,
|
| 613 |
+
"cov_mean": -4.101629156139097e-05,
|
| 614 |
+
"cov_std": 0.25967343151569366,
|
| 615 |
+
"entropy": 0.37939453125,
|
| 616 |
+
"epoch": 0.026285714285714287,
|
| 617 |
+
"grad_norm": 0.37253376841545105,
|
| 618 |
+
"kl": 0.00014030933380126953,
|
| 619 |
+
"learning_rate": 9.54457320834625e-07,
|
| 620 |
+
"loss": -0.0678,
|
| 621 |
+
"reward": 0.5625000223517418,
|
| 622 |
+
"reward_std": 0.3055335730314255,
|
| 623 |
+
"rewards/accuracy_reward": 0.11458333861082792,
|
| 624 |
+
"rewards/format_reward": 0.4479166679084301,
|
| 625 |
+
"step": 23,
|
| 626 |
+
"w_high_ratio": 0.14202075079083443,
|
| 627 |
+
"w_low_ratio": 0.028185136150568724,
|
| 628 |
+
"w_max": 2.1473127901554108,
|
| 629 |
+
"w_mean": 1.35337632894516,
|
| 630 |
+
"w_min": 1.3966908548442706e-38,
|
| 631 |
+
"w_std": 0.21078352630138397
|
| 632 |
+
},
|
| 633 |
+
{
|
| 634 |
+
"clip_ratio/high_max": 0.0,
|
| 635 |
+
"clip_ratio/high_mean": 0.0,
|
| 636 |
+
"clip_ratio/low_mean": 0.0,
|
| 637 |
+
"clip_ratio/low_min": 0.0,
|
| 638 |
+
"clip_ratio/region_mean": 0.0,
|
| 639 |
+
"completion_length": 3073.7813110351562,
|
| 640 |
+
"cov_mean": -2.623773912091565e-05,
|
| 641 |
+
"cov_std": 0.5731624215841293,
|
| 642 |
+
"entropy": 0.365234375,
|
| 643 |
+
"epoch": 0.027428571428571427,
|
| 644 |
+
"grad_norm": 0.6742011904716492,
|
| 645 |
+
"kl": 0.00015079975128173828,
|
| 646 |
+
"learning_rate": 9.473264167865171e-07,
|
| 647 |
+
"loss": -0.0491,
|
| 648 |
+
"reward": 0.7916666939854622,
|
| 649 |
+
"reward_std": 0.7121171355247498,
|
| 650 |
+
"rewards/accuracy_reward": 0.2812500074505806,
|
| 651 |
+
"rewards/format_reward": 0.510416679084301,
|
| 652 |
+
"step": 24,
|
| 653 |
+
"w_high_ratio": 0.15864675119519234,
|
| 654 |
+
"w_low_ratio": 0.057481554336845875,
|
| 655 |
+
"w_max": 2.1011292338371277,
|
| 656 |
+
"w_mean": 1.4009218215942383,
|
| 657 |
+
"w_min": 0.0,
|
| 658 |
+
"w_std": 0.3654456064105034
|
| 659 |
+
},
|
| 660 |
+
{
|
| 661 |
+
"clip_ratio/high_max": 0.0,
|
| 662 |
+
"clip_ratio/high_mean": 0.0,
|
| 663 |
+
"clip_ratio/low_mean": 0.0,
|
| 664 |
+
"clip_ratio/low_min": 0.0,
|
| 665 |
+
"clip_ratio/region_mean": 0.0,
|
| 666 |
+
"completion_length": 3008.9271850585938,
|
| 667 |
+
"cov_mean": -5.06913784192875e-05,
|
| 668 |
+
"cov_std": 0.360674187541008,
|
| 669 |
+
"entropy": 0.43896484375,
|
| 670 |
+
"epoch": 0.02857142857142857,
|
| 671 |
+
"grad_norm": 0.6551496982574463,
|
| 672 |
+
"kl": 0.00020998716354370117,
|
| 673 |
+
"learning_rate": 9.397114317029974e-07,
|
| 674 |
+
"loss": 0.0466,
|
| 675 |
+
"reward": 0.5208333432674408,
|
| 676 |
+
"reward_std": 0.4276355504989624,
|
| 677 |
+
"rewards/accuracy_reward": 0.1354166679084301,
|
| 678 |
+
"rewards/format_reward": 0.3854166865348816,
|
| 679 |
+
"step": 25,
|
| 680 |
+
"w_high_ratio": 0.10193426162004471,
|
| 681 |
+
"w_low_ratio": 0.04761309362947941,
|
| 682 |
+
"w_max": 2.144305258989334,
|
| 683 |
+
"w_mean": 1.3412529230117798,
|
| 684 |
+
"w_min": 0.25,
|
| 685 |
+
"w_std": 0.2752615138888359
|
| 686 |
+
},
|
| 687 |
+
{
|
| 688 |
+
"clip_ratio/high_max": 0.0,
|
| 689 |
+
"clip_ratio/high_mean": 0.0,
|
| 690 |
+
"clip_ratio/low_mean": 0.0,
|
| 691 |
+
"clip_ratio/low_min": 0.0,
|
| 692 |
+
"clip_ratio/region_mean": 0.0,
|
| 693 |
+
"completion_length": 3288.4896240234375,
|
| 694 |
+
"cov_mean": 1.0494537946215132e-05,
|
| 695 |
+
"cov_std": 0.2694205194711685,
|
| 696 |
+
"entropy": 0.423828125,
|
| 697 |
+
"epoch": 0.029714285714285714,
|
| 698 |
+
"grad_norm": 0.2771300673484802,
|
| 699 |
+
"kl": 4.094839096069336e-05,
|
| 700 |
+
"learning_rate": 9.316216432703916e-07,
|
| 701 |
+
"loss": -0.0308,
|
| 702 |
+
"reward": 0.6875000298023224,
|
| 703 |
+
"reward_std": 0.3060605004429817,
|
| 704 |
+
"rewards/accuracy_reward": 0.2395833358168602,
|
| 705 |
+
"rewards/format_reward": 0.447916679084301,
|
| 706 |
+
"step": 26,
|
| 707 |
+
"w_high_ratio": 0.0,
|
| 708 |
+
"w_low_ratio": 0.026628307532519102,
|
| 709 |
+
"w_max": 1.6005243062973022,
|
| 710 |
+
"w_mean": 1.1837812960147858,
|
| 711 |
+
"w_min": 0.0,
|
| 712 |
+
"w_std": 0.15812482312321663
|
| 713 |
+
},
|
| 714 |
+
{
|
| 715 |
+
"clip_ratio/high_max": 0.0,
|
| 716 |
+
"clip_ratio/high_mean": 0.0,
|
| 717 |
+
"clip_ratio/low_mean": 0.0,
|
| 718 |
+
"clip_ratio/low_min": 0.0,
|
| 719 |
+
"clip_ratio/region_mean": 0.0,
|
| 720 |
+
"completion_length": 3304.0625610351562,
|
| 721 |
+
"cov_mean": 2.6395198347017867e-05,
|
| 722 |
+
"cov_std": 0.47998297959566116,
|
| 723 |
+
"entropy": 0.43408203125,
|
| 724 |
+
"epoch": 0.030857142857142857,
|
| 725 |
+
"grad_norm": 0.3476252257823944,
|
| 726 |
+
"kl": 0.0001595616340637207,
|
| 727 |
+
"learning_rate": 9.230669076497687e-07,
|
| 728 |
+
"loss": -0.0198,
|
| 729 |
+
"reward": 0.614583358168602,
|
| 730 |
+
"reward_std": 0.5712436102330685,
|
| 731 |
+
"rewards/accuracy_reward": 0.1666666679084301,
|
| 732 |
+
"rewards/format_reward": 0.4479166716337204,
|
| 733 |
+
"step": 27,
|
| 734 |
+
"w_high_ratio": 0.12017197906970978,
|
| 735 |
+
"w_low_ratio": 0.05561595968902111,
|
| 736 |
+
"w_max": 1.8565902709960938,
|
| 737 |
+
"w_mean": 1.2779352962970734,
|
| 738 |
+
"w_min": 0.0,
|
| 739 |
+
"w_std": 0.2663590759038925
|
| 740 |
+
},
|
| 741 |
+
{
|
| 742 |
+
"clip_ratio/high_max": 0.0,
|
| 743 |
+
"clip_ratio/high_mean": 0.0,
|
| 744 |
+
"clip_ratio/low_mean": 0.0,
|
| 745 |
+
"clip_ratio/low_min": 0.0,
|
| 746 |
+
"clip_ratio/region_mean": 0.0,
|
| 747 |
+
"completion_length": 3088.8854370117188,
|
| 748 |
+
"cov_mean": -3.223641306249192e-05,
|
| 749 |
+
"cov_std": 0.42785073816776276,
|
| 750 |
+
"entropy": 0.4013671875,
|
| 751 |
+
"epoch": 0.032,
|
| 752 |
+
"grad_norm": 0.33925580978393555,
|
| 753 |
+
"kl": 0.00015425682067871094,
|
| 754 |
+
"learning_rate": 9.140576474687263e-07,
|
| 755 |
+
"loss": 0.0176,
|
| 756 |
+
"reward": 0.739583358168602,
|
| 757 |
+
"reward_std": 0.5647517889738083,
|
| 758 |
+
"rewards/accuracy_reward": 0.29166667722165585,
|
| 759 |
+
"rewards/format_reward": 0.4479166865348816,
|
| 760 |
+
"step": 28,
|
| 761 |
+
"w_high_ratio": 0.049459055066108704,
|
| 762 |
+
"w_low_ratio": 0.046367804519832134,
|
| 763 |
+
"w_max": 1.811535805463791,
|
| 764 |
+
"w_mean": 1.2396393418312073,
|
| 765 |
+
"w_min": 0.0,
|
| 766 |
+
"w_std": 0.264127716422081
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"clip_ratio/high_max": 0.0,
|
| 770 |
+
"clip_ratio/high_mean": 0.0,
|
| 771 |
+
"clip_ratio/low_mean": 0.0,
|
| 772 |
+
"clip_ratio/low_min": 0.0,
|
| 773 |
+
"clip_ratio/region_mean": 0.0,
|
| 774 |
+
"completion_length": 3654.9375610351562,
|
| 775 |
+
"cov_mean": -5.409401546785375e-05,
|
| 776 |
+
"cov_std": 0.37247660756111145,
|
| 777 |
+
"entropy": 0.4443359375,
|
| 778 |
+
"epoch": 0.03314285714285714,
|
| 779 |
+
"grad_norm": 0.3806890547275543,
|
| 780 |
+
"kl": 0.00038933753967285156,
|
| 781 |
+
"learning_rate": 9.046048391230247e-07,
|
| 782 |
+
"loss": 0.0781,
|
| 783 |
+
"reward": 0.28125,
|
| 784 |
+
"reward_std": 0.4390515610575676,
|
| 785 |
+
"rewards/accuracy_reward": 0.09375000279396772,
|
| 786 |
+
"rewards/format_reward": 0.18750000558793545,
|
| 787 |
+
"step": 29,
|
| 788 |
+
"w_high_ratio": 0.0,
|
| 789 |
+
"w_low_ratio": 0.04866650328040123,
|
| 790 |
+
"w_max": 1.3806794583797455,
|
| 791 |
+
"w_mean": 1.0922435522079468,
|
| 792 |
+
"w_min": 0.25,
|
| 793 |
+
"w_std": 0.21911596134305
|
| 794 |
+
},
|
| 795 |
+
{
|
| 796 |
+
"clip_ratio/high_max": 0.0,
|
| 797 |
+
"clip_ratio/high_mean": 0.0,
|
| 798 |
+
"clip_ratio/low_mean": 0.0,
|
| 799 |
+
"clip_ratio/low_min": 0.0,
|
| 800 |
+
"clip_ratio/region_mean": 0.0,
|
| 801 |
+
"completion_length": 3306.5938110351562,
|
| 802 |
+
"cov_mean": -2.3586735551361926e-05,
|
| 803 |
+
"cov_std": 0.4439524784684181,
|
| 804 |
+
"entropy": 0.37841796875,
|
| 805 |
+
"epoch": 0.03428571428571429,
|
| 806 |
+
"grad_norm": 0.49297624826431274,
|
| 807 |
+
"kl": 0.0008985996246337891,
|
| 808 |
+
"learning_rate": 8.9471999940354e-07,
|
| 809 |
+
"loss": -0.0256,
|
| 810 |
+
"reward": 0.6458333544433117,
|
| 811 |
+
"reward_std": 0.5611635595560074,
|
| 812 |
+
"rewards/accuracy_reward": 0.2187500037252903,
|
| 813 |
+
"rewards/format_reward": 0.4270833395421505,
|
| 814 |
+
"step": 30,
|
| 815 |
+
"w_high_ratio": 0.06216667778789997,
|
| 816 |
+
"w_low_ratio": 0.05258181784301996,
|
| 817 |
+
"w_max": 1.9480324983596802,
|
| 818 |
+
"w_mean": 1.2739951610565186,
|
| 819 |
+
"w_min": 4.3700652660890127e-35,
|
| 820 |
+
"w_std": 0.28470994904637337
|
| 821 |
+
},
|
| 822 |
+
{
|
| 823 |
+
"clip_ratio/high_max": 0.0,
|
| 824 |
+
"clip_ratio/high_mean": 0.0,
|
| 825 |
+
"clip_ratio/low_mean": 0.0,
|
| 826 |
+
"clip_ratio/low_min": 0.0,
|
| 827 |
+
"clip_ratio/region_mean": 0.0,
|
| 828 |
+
"completion_length": 3249.072998046875,
|
| 829 |
+
"cov_mean": 7.066663692967268e-05,
|
| 830 |
+
"cov_std": 0.31172432936728,
|
| 831 |
+
"entropy": 0.38232421875,
|
| 832 |
+
"epoch": 0.03542857142857143,
|
| 833 |
+
"grad_norm": 0.41590920090675354,
|
| 834 |
+
"kl": 0.0011307001113891602,
|
| 835 |
+
"learning_rate": 8.844151714648274e-07,
|
| 836 |
+
"loss": -0.009,
|
| 837 |
+
"reward": 0.541666679084301,
|
| 838 |
+
"reward_std": 0.48244282230734825,
|
| 839 |
+
"rewards/accuracy_reward": 0.19791667442768812,
|
| 840 |
+
"rewards/format_reward": 0.34375000558793545,
|
| 841 |
+
"step": 31,
|
| 842 |
+
"w_high_ratio": 0.09930127486586571,
|
| 843 |
+
"w_low_ratio": 0.04017635714262724,
|
| 844 |
+
"w_max": 2.09058153629303,
|
| 845 |
+
"w_mean": 1.2682196497917175,
|
| 846 |
+
"w_min": 0.0,
|
| 847 |
+
"w_std": 0.2415708377957344
|
| 848 |
+
},
|
| 849 |
+
{
|
| 850 |
+
"clip_ratio/high_max": 0.0,
|
| 851 |
+
"clip_ratio/high_mean": 0.0,
|
| 852 |
+
"clip_ratio/low_mean": 0.0,
|
| 853 |
+
"clip_ratio/low_min": 0.0,
|
| 854 |
+
"clip_ratio/region_mean": 0.0,
|
| 855 |
+
"completion_length": 3569.0626220703125,
|
| 856 |
+
"cov_mean": -2.6002062440966256e-06,
|
| 857 |
+
"cov_std": 0.3952501490712166,
|
| 858 |
+
"entropy": 0.43212890625,
|
| 859 |
+
"epoch": 0.036571428571428574,
|
| 860 |
+
"grad_norm": 1.6052252054214478,
|
| 861 |
+
"kl": 0.03867650032043457,
|
| 862 |
+
"learning_rate": 8.737029101523929e-07,
|
| 863 |
+
"loss": -0.0005,
|
| 864 |
+
"reward": 0.6979166865348816,
|
| 865 |
+
"reward_std": 0.5698855072259903,
|
| 866 |
+
"rewards/accuracy_reward": 0.3333333395421505,
|
| 867 |
+
"rewards/format_reward": 0.3645833432674408,
|
| 868 |
+
"step": 32,
|
| 869 |
+
"w_high_ratio": 0.0,
|
| 870 |
+
"w_low_ratio": 0.04428828274831176,
|
| 871 |
+
"w_max": 1.4899851083755493,
|
| 872 |
+
"w_mean": 1.1257199943065643,
|
| 873 |
+
"w_min": 0.0,
|
| 874 |
+
"w_std": 0.2084966115653515
|
| 875 |
+
},
|
| 876 |
+
{
|
| 877 |
+
"clip_ratio/high_max": 0.0,
|
| 878 |
+
"clip_ratio/high_mean": 0.0,
|
| 879 |
+
"clip_ratio/low_mean": 0.0,
|
| 880 |
+
"clip_ratio/low_min": 0.0,
|
| 881 |
+
"clip_ratio/region_mean": 0.0,
|
| 882 |
+
"completion_length": 3602.0313110351562,
|
| 883 |
+
"cov_mean": 3.6438897495827405e-05,
|
| 884 |
+
"cov_std": 0.35359735041856766,
|
| 885 |
+
"entropy": 0.36865234375,
|
| 886 |
+
"epoch": 0.037714285714285714,
|
| 887 |
+
"grad_norm": 0.4356963038444519,
|
| 888 |
+
"kl": 0.006441354751586914,
|
| 889 |
+
"learning_rate": 8.625962667065487e-07,
|
| 890 |
+
"loss": 0.0063,
|
| 891 |
+
"reward": 0.6041666977107525,
|
| 892 |
+
"reward_std": 0.5775354653596878,
|
| 893 |
+
"rewards/accuracy_reward": 0.25000000558793545,
|
| 894 |
+
"rewards/format_reward": 0.35416667722165585,
|
| 895 |
+
"step": 33,
|
| 896 |
+
"w_high_ratio": 0.0,
|
| 897 |
+
"w_low_ratio": 0.04679631860926747,
|
| 898 |
+
"w_max": 1.4279894530773163,
|
| 899 |
+
"w_mean": 1.114120066165924,
|
| 900 |
+
"w_min": 1.083308810307908e-39,
|
| 901 |
+
"w_std": 0.1986728459596634
|
| 902 |
+
},
|
| 903 |
+
{
|
| 904 |
+
"clip_ratio/high_max": 0.0,
|
| 905 |
+
"clip_ratio/high_mean": 0.0,
|
| 906 |
+
"clip_ratio/low_mean": 0.0,
|
| 907 |
+
"clip_ratio/low_min": 0.0,
|
| 908 |
+
"clip_ratio/region_mean": 0.0,
|
| 909 |
+
"completion_length": 2617.0834045410156,
|
| 910 |
+
"cov_mean": 6.101907820266206e-05,
|
| 911 |
+
"cov_std": 0.4060870446264744,
|
| 912 |
+
"entropy": 0.42529296875,
|
| 913 |
+
"epoch": 0.038857142857142854,
|
| 914 |
+
"grad_norm": 0.6856685876846313,
|
| 915 |
+
"kl": 0.0007038116455078125,
|
| 916 |
+
"learning_rate": 8.511087728614862e-07,
|
| 917 |
+
"loss": -0.0313,
|
| 918 |
+
"reward": 0.9583333432674408,
|
| 919 |
+
"reward_std": 0.4463158957660198,
|
| 920 |
+
"rewards/accuracy_reward": 0.40625001303851604,
|
| 921 |
+
"rewards/format_reward": 0.5520833535119891,
|
| 922 |
+
"step": 34,
|
| 923 |
+
"w_high_ratio": 0.24574057757854462,
|
| 924 |
+
"w_low_ratio": 0.03856370970606804,
|
| 925 |
+
"w_max": 2.4257175028324127,
|
| 926 |
+
"w_mean": 1.564720779657364,
|
| 927 |
+
"w_min": 9.308517412847608e-40,
|
| 928 |
+
"w_std": 0.33169806748628616
|
| 929 |
+
},
|
| 930 |
+
{
|
| 931 |
+
"clip_ratio/high_max": 0.0,
|
| 932 |
+
"clip_ratio/high_mean": 0.0,
|
| 933 |
+
"clip_ratio/low_mean": 0.0,
|
| 934 |
+
"clip_ratio/low_min": 0.0,
|
| 935 |
+
"clip_ratio/region_mean": 0.0,
|
| 936 |
+
"completion_length": 3259.072998046875,
|
| 937 |
+
"cov_mean": 6.144623966974905e-06,
|
| 938 |
+
"cov_std": 0.4504075199365616,
|
| 939 |
+
"entropy": 0.41455078125,
|
| 940 |
+
"epoch": 0.04,
|
| 941 |
+
"grad_norm": 0.41887158155441284,
|
| 942 |
+
"kl": 0.0009140968322753906,
|
| 943 |
+
"learning_rate": 8.392544243589427e-07,
|
| 944 |
+
"loss": 0.0587,
|
| 945 |
+
"reward": 0.5312500111758709,
|
| 946 |
+
"reward_std": 0.6110180467367172,
|
| 947 |
+
"rewards/accuracy_reward": 0.1875000037252903,
|
| 948 |
+
"rewards/format_reward": 0.34375001303851604,
|
| 949 |
+
"step": 35,
|
| 950 |
+
"w_high_ratio": 0.0,
|
| 951 |
+
"w_low_ratio": 0.055781897623091936,
|
| 952 |
+
"w_max": 1.4624531269073486,
|
| 953 |
+
"w_mean": 1.1014132499694824,
|
| 954 |
+
"w_min": 5.8279178943564365e-36,
|
| 955 |
+
"w_std": 0.2577386908233166
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"clip_ratio/high_max": 0.0,
|
| 959 |
+
"clip_ratio/high_mean": 0.0,
|
| 960 |
+
"clip_ratio/low_mean": 0.0,
|
| 961 |
+
"clip_ratio/low_min": 0.0,
|
| 962 |
+
"clip_ratio/region_mean": 0.0,
|
| 963 |
+
"completion_length": 3602.3438110351562,
|
| 964 |
+
"cov_mean": 2.4346391001017764e-05,
|
| 965 |
+
"cov_std": 0.21211356669664383,
|
| 966 |
+
"entropy": 0.49267578125,
|
| 967 |
+
"epoch": 0.04114285714285714,
|
| 968 |
+
"grad_norm": 0.5710666179656982,
|
| 969 |
+
"kl": 0.0008481144905090332,
|
| 970 |
+
"learning_rate": 8.270476638965461e-07,
|
| 971 |
+
"loss": -0.0327,
|
| 972 |
+
"reward": 0.19791667442768812,
|
| 973 |
+
"reward_std": 0.16615793853998184,
|
| 974 |
+
"rewards/accuracy_reward": 0.010416666977107525,
|
| 975 |
+
"rewards/format_reward": 0.18750001024454832,
|
| 976 |
+
"step": 36,
|
| 977 |
+
"w_high_ratio": 0.04688615724444389,
|
| 978 |
+
"w_low_ratio": 0.019336079712957144,
|
| 979 |
+
"w_max": 1.5947359800338745,
|
| 980 |
+
"w_mean": 1.155133068561554,
|
| 981 |
+
"w_min": 0.25,
|
| 982 |
+
"w_std": 0.13783840090036392
|
| 983 |
+
},
|
| 984 |
+
{
|
| 985 |
+
"clip_ratio/high_max": 0.0,
|
| 986 |
+
"clip_ratio/high_mean": 0.0,
|
| 987 |
+
"clip_ratio/low_mean": 0.0,
|
| 988 |
+
"clip_ratio/low_min": 0.0,
|
| 989 |
+
"clip_ratio/region_mean": 0.0,
|
| 990 |
+
"completion_length": 3618.604248046875,
|
| 991 |
+
"cov_mean": 3.312654916953761e-05,
|
| 992 |
+
"cov_std": 0.2396765574812889,
|
| 993 |
+
"entropy": 0.41943359375,
|
| 994 |
+
"epoch": 0.04228571428571429,
|
| 995 |
+
"grad_norm": 0.35426977276802063,
|
| 996 |
+
"kl": 0.0006122589111328125,
|
| 997 |
+
"learning_rate": 8.145033635316128e-07,
|
| 998 |
+
"loss": -0.0109,
|
| 999 |
+
"reward": 0.19791667256504297,
|
| 1000 |
+
"reward_std": 0.22218847274780273,
|
| 1001 |
+
"rewards/accuracy_reward": 0.010416666977107525,
|
| 1002 |
+
"rewards/format_reward": 0.18750000558793545,
|
| 1003 |
+
"step": 37,
|
| 1004 |
+
"w_high_ratio": 0.004357387777417898,
|
| 1005 |
+
"w_low_ratio": 0.026776093989610672,
|
| 1006 |
+
"w_max": 1.527068942785263,
|
| 1007 |
+
"w_mean": 1.1089930832386017,
|
| 1008 |
+
"w_min": 0.0,
|
| 1009 |
+
"w_std": 0.156088937073946
|
| 1010 |
+
},
|
| 1011 |
+
{
|
| 1012 |
+
"clip_ratio/high_max": 0.0,
|
| 1013 |
+
"clip_ratio/high_mean": 0.0,
|
| 1014 |
+
"clip_ratio/low_mean": 0.0,
|
| 1015 |
+
"clip_ratio/low_min": 0.0,
|
| 1016 |
+
"clip_ratio/region_mean": 0.0,
|
| 1017 |
+
"completion_length": 3681.0625610351562,
|
| 1018 |
+
"cov_mean": -1.270252869289834e-05,
|
| 1019 |
+
"cov_std": 0.19421957433223724,
|
| 1020 |
+
"entropy": 0.44921875,
|
| 1021 |
+
"epoch": 0.04342857142857143,
|
| 1022 |
+
"grad_norm": 0.24127696454524994,
|
| 1023 |
+
"kl": 0.00042241811752319336,
|
| 1024 |
+
"learning_rate": 8.01636806561836e-07,
|
| 1025 |
+
"loss": 0.0018,
|
| 1026 |
+
"reward": 0.25,
|
| 1027 |
+
"reward_std": 0.287552148103714,
|
| 1028 |
+
"rewards/accuracy_reward": 0.11458333861082792,
|
| 1029 |
+
"rewards/format_reward": 0.13541666977107525,
|
| 1030 |
+
"step": 38,
|
| 1031 |
+
"w_high_ratio": 0.041594721376895905,
|
| 1032 |
+
"w_low_ratio": 0.02351229265332222,
|
| 1033 |
+
"w_max": 1.402332603931427,
|
| 1034 |
+
"w_mean": 1.097372442483902,
|
| 1035 |
+
"w_min": 0.5,
|
| 1036 |
+
"w_std": 0.1229349672794342
|
| 1037 |
+
},
|
| 1038 |
+
{
|
| 1039 |
+
"clip_ratio/high_max": 0.0,
|
| 1040 |
+
"clip_ratio/high_mean": 0.0,
|
| 1041 |
+
"clip_ratio/low_mean": 0.0,
|
| 1042 |
+
"clip_ratio/low_min": 0.0,
|
| 1043 |
+
"clip_ratio/region_mean": 0.0,
|
| 1044 |
+
"completion_length": 3038.5626220703125,
|
| 1045 |
+
"cov_mean": 2.8159271096228622e-05,
|
| 1046 |
+
"cov_std": 0.31503692269325256,
|
| 1047 |
+
"entropy": 0.364013671875,
|
| 1048 |
+
"epoch": 0.044571428571428574,
|
| 1049 |
+
"grad_norm": 0.2635081708431244,
|
| 1050 |
+
"kl": 0.001262664794921875,
|
| 1051 |
+
"learning_rate": 7.884636689049422e-07,
|
| 1052 |
+
"loss": -0.0133,
|
| 1053 |
+
"reward": 0.7291666865348816,
|
| 1054 |
+
"reward_std": 0.26209891587495804,
|
| 1055 |
+
"rewards/accuracy_reward": 0.2291666716337204,
|
| 1056 |
+
"rewards/format_reward": 0.5000000149011612,
|
| 1057 |
+
"step": 39,
|
| 1058 |
+
"w_high_ratio": 0.0,
|
| 1059 |
+
"w_low_ratio": 0.027915622107684612,
|
| 1060 |
+
"w_max": 1.4430480003356934,
|
| 1061 |
+
"w_mean": 1.1478222012519836,
|
| 1062 |
+
"w_min": 0.25,
|
| 1063 |
+
"w_std": 0.14645230770111084
|
| 1064 |
+
},
|
| 1065 |
+
{
|
| 1066 |
+
"clip_ratio/high_max": 0.0,
|
| 1067 |
+
"clip_ratio/high_mean": 0.0,
|
| 1068 |
+
"clip_ratio/low_mean": 0.0,
|
| 1069 |
+
"clip_ratio/low_min": 0.0,
|
| 1070 |
+
"clip_ratio/region_mean": 0.0,
|
| 1071 |
+
"completion_length": 2716.6876220703125,
|
| 1072 |
+
"cov_mean": 1.541716937936144e-05,
|
| 1073 |
+
"cov_std": 0.37162280082702637,
|
| 1074 |
+
"entropy": 0.36669921875,
|
| 1075 |
+
"epoch": 0.045714285714285714,
|
| 1076 |
+
"grad_norm": 0.27566489577293396,
|
| 1077 |
+
"kl": 0.0022230148315429688,
|
| 1078 |
+
"learning_rate": 7.75e-07,
|
| 1079 |
+
"loss": -0.0203,
|
| 1080 |
+
"reward": 0.729166679084301,
|
| 1081 |
+
"reward_std": 0.3497198149561882,
|
| 1082 |
+
"rewards/accuracy_reward": 0.15625000093132257,
|
| 1083 |
+
"rewards/format_reward": 0.5729166716337204,
|
| 1084 |
+
"step": 40,
|
| 1085 |
+
"w_high_ratio": 0.05738469213247299,
|
| 1086 |
+
"w_low_ratio": 0.03804673533886671,
|
| 1087 |
+
"w_max": 1.8830105662345886,
|
| 1088 |
+
"w_mean": 1.3182978928089142,
|
| 1089 |
+
"w_min": 0.0,
|
| 1090 |
+
"w_std": 0.2204515039920807
|
| 1091 |
+
},
|
| 1092 |
+
{
|
| 1093 |
+
"clip_ratio/high_max": 0.0,
|
| 1094 |
+
"clip_ratio/high_mean": 0.0,
|
| 1095 |
+
"clip_ratio/low_mean": 0.0,
|
| 1096 |
+
"clip_ratio/low_min": 0.0,
|
| 1097 |
+
"clip_ratio/region_mean": 0.0,
|
| 1098 |
+
"completion_length": 3265.8334350585938,
|
| 1099 |
+
"cov_mean": 4.474487604966271e-05,
|
| 1100 |
+
"cov_std": 0.3514489606022835,
|
| 1101 |
+
"entropy": 0.38134765625,
|
| 1102 |
+
"epoch": 0.046857142857142854,
|
| 1103 |
+
"grad_norm": 0.25198379158973694,
|
| 1104 |
+
"kl": 0.00047135353088378906,
|
| 1105 |
+
"learning_rate": 7.612622032536507e-07,
|
| 1106 |
+
"loss": -0.0348,
|
| 1107 |
+
"reward": 0.5625000204890966,
|
| 1108 |
+
"reward_std": 0.5001779943704605,
|
| 1109 |
+
"rewards/accuracy_reward": 0.1458333358168602,
|
| 1110 |
+
"rewards/format_reward": 0.41666669212281704,
|
| 1111 |
+
"step": 41,
|
| 1112 |
+
"w_high_ratio": 0.060201918706297874,
|
| 1113 |
+
"w_low_ratio": 0.03864650521427393,
|
| 1114 |
+
"w_max": 1.765150785446167,
|
| 1115 |
+
"w_mean": 1.2053866684436798,
|
| 1116 |
+
"w_min": 1.0444519155498174e-26,
|
| 1117 |
+
"w_std": 0.20880188420414925
|
| 1118 |
+
},
|
| 1119 |
+
{
|
| 1120 |
+
"clip_ratio/high_max": 0.0,
|
| 1121 |
+
"clip_ratio/high_mean": 0.0,
|
| 1122 |
+
"clip_ratio/low_mean": 0.0,
|
| 1123 |
+
"clip_ratio/low_min": 0.0,
|
| 1124 |
+
"clip_ratio/region_mean": 0.0,
|
| 1125 |
+
"completion_length": 3039.260498046875,
|
| 1126 |
+
"cov_mean": 4.992843923901091e-05,
|
| 1127 |
+
"cov_std": 0.12326429784297943,
|
| 1128 |
+
"entropy": 0.4560546875,
|
| 1129 |
+
"epoch": 0.048,
|
| 1130 |
+
"grad_norm": 0.10436006635427475,
|
| 1131 |
+
"kl": 0.0007390975952148438,
|
| 1132 |
+
"learning_rate": 7.472670160550848e-07,
|
| 1133 |
+
"loss": 0.0037,
|
| 1134 |
+
"reward": 0.3333333358168602,
|
| 1135 |
+
"reward_std": 0.15885811299085617,
|
| 1136 |
+
"rewards/accuracy_reward": 0.02083333395421505,
|
| 1137 |
+
"rewards/format_reward": 0.31250000186264515,
|
| 1138 |
+
"step": 42,
|
| 1139 |
+
"w_high_ratio": 0.0625,
|
| 1140 |
+
"w_low_ratio": 0.016482737846672535,
|
| 1141 |
+
"w_max": 1.6183056831359863,
|
| 1142 |
+
"w_mean": 1.1592219173908234,
|
| 1143 |
+
"w_min": 0.5,
|
| 1144 |
+
"w_std": 0.06949653849005699
|
| 1145 |
+
},
|
| 1146 |
+
{
|
| 1147 |
+
"clip_ratio/high_max": 0.0,
|
| 1148 |
+
"clip_ratio/high_mean": 0.0,
|
| 1149 |
+
"clip_ratio/low_mean": 0.0,
|
| 1150 |
+
"clip_ratio/low_min": 0.0,
|
| 1151 |
+
"clip_ratio/region_mean": 0.0,
|
| 1152 |
+
"completion_length": 3322.5208740234375,
|
| 1153 |
+
"cov_mean": 1.5148519764807133e-05,
|
| 1154 |
+
"cov_std": 0.3199145011603832,
|
| 1155 |
+
"entropy": 0.39794921875,
|
| 1156 |
+
"epoch": 0.04914285714285714,
|
| 1157 |
+
"grad_norm": 0.22395218908786774,
|
| 1158 |
+
"kl": 0.0014238357543945312,
|
| 1159 |
+
"learning_rate": 7.330314893841101e-07,
|
| 1160 |
+
"loss": 0.0251,
|
| 1161 |
+
"reward": 0.45833334140479565,
|
| 1162 |
+
"reward_std": 0.34082313999533653,
|
| 1163 |
+
"rewards/accuracy_reward": 0.16666666977107525,
|
| 1164 |
+
"rewards/format_reward": 0.2916666716337204,
|
| 1165 |
+
"step": 43,
|
| 1166 |
+
"w_high_ratio": 0.0,
|
| 1167 |
+
"w_low_ratio": 0.03815819416195154,
|
| 1168 |
+
"w_max": 1.3987224996089935,
|
| 1169 |
+
"w_mean": 1.0974957346916199,
|
| 1170 |
+
"w_min": 0.0,
|
| 1171 |
+
"w_std": 0.18725593388080597
|
| 1172 |
+
},
|
| 1173 |
+
{
|
| 1174 |
+
"clip_ratio/high_max": 0.0,
|
| 1175 |
+
"clip_ratio/high_mean": 0.0,
|
| 1176 |
+
"clip_ratio/low_mean": 0.0,
|
| 1177 |
+
"clip_ratio/low_min": 0.0,
|
| 1178 |
+
"clip_ratio/region_mean": 0.0,
|
| 1179 |
+
"completion_length": 2873.947967529297,
|
| 1180 |
+
"cov_mean": 1.1262121915933676e-05,
|
| 1181 |
+
"cov_std": 0.3456302881240845,
|
| 1182 |
+
"entropy": 0.37548828125,
|
| 1183 |
+
"epoch": 0.05028571428571429,
|
| 1184 |
+
"grad_norm": 0.570351243019104,
|
| 1185 |
+
"kl": 0.0011093616485595703,
|
| 1186 |
+
"learning_rate": 7.185729670371604e-07,
|
| 1187 |
+
"loss": -0.0802,
|
| 1188 |
+
"reward": 0.7500000447034836,
|
| 1189 |
+
"reward_std": 0.31945212185382843,
|
| 1190 |
+
"rewards/accuracy_reward": 0.2812500074505806,
|
| 1191 |
+
"rewards/format_reward": 0.4687500074505806,
|
| 1192 |
+
"step": 44,
|
| 1193 |
+
"w_high_ratio": 0.15513932332396507,
|
| 1194 |
+
"w_low_ratio": 0.028022687416523695,
|
| 1195 |
+
"w_max": 2.1701363921165466,
|
| 1196 |
+
"w_mean": 1.4684883952140808,
|
| 1197 |
+
"w_min": 0.25,
|
| 1198 |
+
"w_std": 0.245870441198349
|
| 1199 |
+
},
|
| 1200 |
+
{
|
| 1201 |
+
"clip_ratio/high_max": 0.0,
|
| 1202 |
+
"clip_ratio/high_mean": 0.0,
|
| 1203 |
+
"clip_ratio/low_mean": 0.0,
|
| 1204 |
+
"clip_ratio/low_min": 0.0,
|
| 1205 |
+
"clip_ratio/region_mean": 0.0,
|
| 1206 |
+
"completion_length": 3659.1355590820312,
|
| 1207 |
+
"cov_mean": -1.3919307093601674e-05,
|
| 1208 |
+
"cov_std": 0.3560608774423599,
|
| 1209 |
+
"entropy": 0.4130859375,
|
| 1210 |
+
"epoch": 0.05142857142857143,
|
| 1211 |
+
"grad_norm": 0.33030757308006287,
|
| 1212 |
+
"kl": 0.0011370182037353516,
|
| 1213 |
+
"learning_rate": 7.039090644965509e-07,
|
| 1214 |
+
"loss": 0.0303,
|
| 1215 |
+
"reward": 0.416666679084301,
|
| 1216 |
+
"reward_std": 0.5311296693980694,
|
| 1217 |
+
"rewards/accuracy_reward": 0.16666667349636555,
|
| 1218 |
+
"rewards/format_reward": 0.25,
|
| 1219 |
+
"step": 45,
|
| 1220 |
+
"w_high_ratio": 0.0,
|
| 1221 |
+
"w_low_ratio": 0.04757110681384802,
|
| 1222 |
+
"w_max": 1.4556901454925537,
|
| 1223 |
+
"w_mean": 1.08749720454216,
|
| 1224 |
+
"w_min": 0.0,
|
| 1225 |
+
"w_std": 0.21380594745278358
|
| 1226 |
+
},
|
| 1227 |
+
{
|
| 1228 |
+
"clip_ratio/high_max": 0.0,
|
| 1229 |
+
"clip_ratio/high_mean": 0.0,
|
| 1230 |
+
"clip_ratio/low_mean": 0.0,
|
| 1231 |
+
"clip_ratio/low_min": 0.0,
|
| 1232 |
+
"clip_ratio/region_mean": 0.0,
|
| 1233 |
+
"completion_length": 3486.7916870117188,
|
| 1234 |
+
"cov_mean": 2.1816805514163207e-05,
|
| 1235 |
+
"cov_std": 0.2602475844323635,
|
| 1236 |
+
"entropy": 0.4794921875,
|
| 1237 |
+
"epoch": 0.052571428571428575,
|
| 1238 |
+
"grad_norm": 0.28776484727859497,
|
| 1239 |
+
"kl": 0.0007176399230957031,
|
| 1240 |
+
"learning_rate": 6.890576474687263e-07,
|
| 1241 |
+
"loss": 0.058,
|
| 1242 |
+
"reward": 0.23958334233611822,
|
| 1243 |
+
"reward_std": 0.2835810258984566,
|
| 1244 |
+
"rewards/accuracy_reward": 0.031250000931322575,
|
| 1245 |
+
"rewards/format_reward": 0.20833333488553762,
|
| 1246 |
+
"step": 46,
|
| 1247 |
+
"w_high_ratio": 0.0,
|
| 1248 |
+
"w_low_ratio": 0.03342714952304959,
|
| 1249 |
+
"w_max": 1.2925868034362793,
|
| 1250 |
+
"w_mean": 1.0403871834278107,
|
| 1251 |
+
"w_min": 1.838248673476915e-29,
|
| 1252 |
+
"w_std": 0.14680924825370312
|
| 1253 |
+
},
|
| 1254 |
+
{
|
| 1255 |
+
"clip_ratio/high_max": 0.0,
|
| 1256 |
+
"clip_ratio/high_mean": 0.0,
|
| 1257 |
+
"clip_ratio/low_mean": 0.0,
|
| 1258 |
+
"clip_ratio/low_min": 0.0,
|
| 1259 |
+
"clip_ratio/region_mean": 0.0,
|
| 1260 |
+
"completion_length": 3093.4479370117188,
|
| 1261 |
+
"cov_mean": -4.72289493700373e-05,
|
| 1262 |
+
"cov_std": 0.39596526324748993,
|
| 1263 |
+
"entropy": 0.3857421875,
|
| 1264 |
+
"epoch": 0.053714285714285714,
|
| 1265 |
+
"grad_norm": 0.3468870520591736,
|
| 1266 |
+
"kl": 0.0010945796966552734,
|
| 1267 |
+
"learning_rate": 6.740368101176495e-07,
|
| 1268 |
+
"loss": -0.064,
|
| 1269 |
+
"reward": 0.84375,
|
| 1270 |
+
"reward_std": 0.5307980924844742,
|
| 1271 |
+
"rewards/accuracy_reward": 0.322916679084301,
|
| 1272 |
+
"rewards/format_reward": 0.5208333432674408,
|
| 1273 |
+
"step": 47,
|
| 1274 |
+
"w_high_ratio": 0.10172786563634872,
|
| 1275 |
+
"w_low_ratio": 0.03984384797513485,
|
| 1276 |
+
"w_max": 1.9562607407569885,
|
| 1277 |
+
"w_mean": 1.385090559720993,
|
| 1278 |
+
"w_min": 0.25,
|
| 1279 |
+
"w_std": 0.24371833354234695
|
| 1280 |
+
},
|
| 1281 |
+
{
|
| 1282 |
+
"clip_ratio/high_max": 0.0,
|
| 1283 |
+
"clip_ratio/high_mean": 0.0,
|
| 1284 |
+
"clip_ratio/low_mean": 0.0,
|
| 1285 |
+
"clip_ratio/low_min": 0.0,
|
| 1286 |
+
"clip_ratio/region_mean": 0.0,
|
| 1287 |
+
"completion_length": 2902.979278564453,
|
| 1288 |
+
"cov_mean": -2.4175317776098382e-05,
|
| 1289 |
+
"cov_std": 0.4257803037762642,
|
| 1290 |
+
"entropy": 0.388671875,
|
| 1291 |
+
"epoch": 0.054857142857142854,
|
| 1292 |
+
"grad_norm": 0.5910794734954834,
|
| 1293 |
+
"kl": 0.002681732177734375,
|
| 1294 |
+
"learning_rate": 6.588648530198504e-07,
|
| 1295 |
+
"loss": -0.0371,
|
| 1296 |
+
"reward": 0.6562500074505806,
|
| 1297 |
+
"reward_std": 0.493436336517334,
|
| 1298 |
+
"rewards/accuracy_reward": 0.22916666697710752,
|
| 1299 |
+
"rewards/format_reward": 0.42708334140479565,
|
| 1300 |
+
"step": 48,
|
| 1301 |
+
"w_high_ratio": 0.05750561133027077,
|
| 1302 |
+
"w_low_ratio": 0.0480266478843987,
|
| 1303 |
+
"w_max": 1.9184067249298096,
|
| 1304 |
+
"w_mean": 1.2389512956142426,
|
| 1305 |
+
"w_min": 9.458764634192515e-45,
|
| 1306 |
+
"w_std": 0.2734759133309126
|
| 1307 |
+
},
|
| 1308 |
+
{
|
| 1309 |
+
"clip_ratio/high_max": 0.0,
|
| 1310 |
+
"clip_ratio/high_mean": 0.0,
|
| 1311 |
+
"clip_ratio/low_mean": 0.0,
|
| 1312 |
+
"clip_ratio/low_min": 0.0,
|
| 1313 |
+
"clip_ratio/region_mean": 0.0,
|
| 1314 |
+
"completion_length": 2592.4271545410156,
|
| 1315 |
+
"cov_mean": -5.175127171241911e-06,
|
| 1316 |
+
"cov_std": 0.3634071573615074,
|
| 1317 |
+
"entropy": 0.36669921875,
|
| 1318 |
+
"epoch": 0.056,
|
| 1319 |
+
"grad_norm": 0.33904796838760376,
|
| 1320 |
+
"kl": 0.006507396697998047,
|
| 1321 |
+
"learning_rate": 6.435602608679916e-07,
|
| 1322 |
+
"loss": 0.0033,
|
| 1323 |
+
"reward": 0.8437500447034836,
|
| 1324 |
+
"reward_std": 0.5799632221460342,
|
| 1325 |
+
"rewards/accuracy_reward": 0.27083333767950535,
|
| 1326 |
+
"rewards/format_reward": 0.5729166865348816,
|
| 1327 |
+
"step": 49,
|
| 1328 |
+
"w_high_ratio": 0.0624999962747097,
|
| 1329 |
+
"w_low_ratio": 0.04148435592651367,
|
| 1330 |
+
"w_max": 1.7326014041900635,
|
| 1331 |
+
"w_mean": 1.239928662776947,
|
| 1332 |
+
"w_min": 4.925564102101732e-43,
|
| 1333 |
+
"w_std": 0.23503416404128075
|
| 1334 |
+
},
|
| 1335 |
+
{
|
| 1336 |
+
"clip_ratio/high_max": 0.0,
|
| 1337 |
+
"clip_ratio/high_mean": 0.0,
|
| 1338 |
+
"clip_ratio/low_mean": 0.0,
|
| 1339 |
+
"clip_ratio/low_min": 0.0,
|
| 1340 |
+
"clip_ratio/region_mean": 0.0,
|
| 1341 |
+
"completion_length": 3253.1355590820312,
|
| 1342 |
+
"cov_mean": -2.146356928278692e-05,
|
| 1343 |
+
"cov_std": 0.2279180847108364,
|
| 1344 |
+
"entropy": 0.35498046875,
|
| 1345 |
+
"epoch": 0.05714285714285714,
|
| 1346 |
+
"grad_norm": 0.15721456706523895,
|
| 1347 |
+
"kl": 0.0008764266967773438,
|
| 1348 |
+
"learning_rate": 6.281416799501187e-07,
|
| 1349 |
+
"loss": -0.0086,
|
| 1350 |
+
"reward": 0.635416679084301,
|
| 1351 |
+
"reward_std": 0.3225880041718483,
|
| 1352 |
+
"rewards/accuracy_reward": 0.2708333386108279,
|
| 1353 |
+
"rewards/format_reward": 0.3645833395421505,
|
| 1354 |
+
"step": 50,
|
| 1355 |
+
"w_high_ratio": 0.0625,
|
| 1356 |
+
"w_low_ratio": 0.028236051555722952,
|
| 1357 |
+
"w_max": 1.5944055318832397,
|
| 1358 |
+
"w_mean": 1.1758275628089905,
|
| 1359 |
+
"w_min": 0.25,
|
| 1360 |
+
"w_std": 0.12848308496177197
|
| 1361 |
+
},
|
| 1362 |
+
{
|
| 1363 |
+
"clip_ratio/high_max": 0.0,
|
| 1364 |
+
"clip_ratio/high_mean": 0.0,
|
| 1365 |
+
"clip_ratio/low_mean": 0.0,
|
| 1366 |
+
"clip_ratio/low_min": 0.0,
|
| 1367 |
+
"clip_ratio/region_mean": 0.0,
|
| 1368 |
+
"completion_length": 2558.1043090820312,
|
| 1369 |
+
"cov_mean": 5.439032975118607e-05,
|
| 1370 |
+
"cov_std": 0.29480236768722534,
|
| 1371 |
+
"entropy": 0.43408203125,
|
| 1372 |
+
"epoch": 0.05828571428571429,
|
| 1373 |
+
"grad_norm": 0.38427427411079407,
|
| 1374 |
+
"kl": 0.00363922119140625,
|
| 1375 |
+
"learning_rate": 6.126278954320294e-07,
|
| 1376 |
+
"loss": 0.0141,
|
| 1377 |
+
"reward": 0.6666666865348816,
|
| 1378 |
+
"reward_std": 0.2830107621848583,
|
| 1379 |
+
"rewards/accuracy_reward": 0.13541666977107525,
|
| 1380 |
+
"rewards/format_reward": 0.53125,
|
| 1381 |
+
"step": 51,
|
| 1382 |
+
"w_high_ratio": 0.0,
|
| 1383 |
+
"w_low_ratio": 0.029488239903002977,
|
| 1384 |
+
"w_max": 1.5800293982028961,
|
| 1385 |
+
"w_mean": 1.1582573056221008,
|
| 1386 |
+
"w_min": 0.25,
|
| 1387 |
+
"w_std": 0.16206533834338188
|
| 1388 |
+
},
|
| 1389 |
+
{
|
| 1390 |
+
"clip_ratio/high_max": 0.0,
|
| 1391 |
+
"clip_ratio/high_mean": 0.0,
|
| 1392 |
+
"clip_ratio/low_mean": 0.0,
|
| 1393 |
+
"clip_ratio/low_min": 0.0,
|
| 1394 |
+
"clip_ratio/region_mean": 0.0,
|
| 1395 |
+
"completion_length": 3121.510467529297,
|
| 1396 |
+
"cov_mean": 4.514171632763464e-05,
|
| 1397 |
+
"cov_std": 0.3024504631757736,
|
| 1398 |
+
"entropy": 0.38525390625,
|
| 1399 |
+
"epoch": 0.05942857142857143,
|
| 1400 |
+
"grad_norm": 0.38342365622520447,
|
| 1401 |
+
"kl": 0.0027284622192382812,
|
| 1402 |
+
"learning_rate": 5.97037808470444e-07,
|
| 1403 |
+
"loss": -0.0158,
|
| 1404 |
+
"reward": 0.6666666772216558,
|
| 1405 |
+
"reward_std": 0.48630647361278534,
|
| 1406 |
+
"rewards/accuracy_reward": 0.2500000074505806,
|
| 1407 |
+
"rewards/format_reward": 0.41666668467223644,
|
| 1408 |
+
"step": 52,
|
| 1409 |
+
"w_high_ratio": 0.0,
|
| 1410 |
+
"w_low_ratio": 0.048233418725430965,
|
| 1411 |
+
"w_max": 1.4871755540370941,
|
| 1412 |
+
"w_mean": 1.1300460696220398,
|
| 1413 |
+
"w_min": 0.0,
|
| 1414 |
+
"w_std": 0.221306212246418
|
| 1415 |
+
},
|
| 1416 |
+
{
|
| 1417 |
+
"clip_ratio/high_max": 0.0,
|
| 1418 |
+
"clip_ratio/high_mean": 0.0,
|
| 1419 |
+
"clip_ratio/low_mean": 0.0,
|
| 1420 |
+
"clip_ratio/low_min": 0.0,
|
| 1421 |
+
"clip_ratio/region_mean": 0.0,
|
| 1422 |
+
"completion_length": 3089.6146850585938,
|
| 1423 |
+
"cov_mean": 1.5540852473350242e-05,
|
| 1424 |
+
"cov_std": 0.4309914745390415,
|
| 1425 |
+
"entropy": 0.42138671875,
|
| 1426 |
+
"epoch": 0.060571428571428575,
|
| 1427 |
+
"grad_norm": 0.32499778270721436,
|
| 1428 |
+
"kl": 0.001051187515258789,
|
| 1429 |
+
"learning_rate": 5.813904131848564e-07,
|
| 1430 |
+
"loss": -0.0334,
|
| 1431 |
+
"reward": 0.9062500447034836,
|
| 1432 |
+
"reward_std": 0.615619845688343,
|
| 1433 |
+
"rewards/accuracy_reward": 0.3020833432674408,
|
| 1434 |
+
"rewards/format_reward": 0.6041666865348816,
|
| 1435 |
+
"step": 53,
|
| 1436 |
+
"w_high_ratio": 0.0,
|
| 1437 |
+
"w_low_ratio": 0.05247905198484659,
|
| 1438 |
+
"w_max": 1.4815186858177185,
|
| 1439 |
+
"w_mean": 1.1437698602676392,
|
| 1440 |
+
"w_min": 0.0,
|
| 1441 |
+
"w_std": 0.2556675784289837
|
| 1442 |
+
},
|
| 1443 |
+
{
|
| 1444 |
+
"clip_ratio/high_max": 0.0,
|
| 1445 |
+
"clip_ratio/high_mean": 0.0,
|
| 1446 |
+
"clip_ratio/low_mean": 0.0,
|
| 1447 |
+
"clip_ratio/low_min": 0.0,
|
| 1448 |
+
"clip_ratio/region_mean": 0.0,
|
| 1449 |
+
"completion_length": 2948.479248046875,
|
| 1450 |
+
"cov_mean": 5.7689636832947144e-05,
|
| 1451 |
+
"cov_std": 0.4967670738697052,
|
| 1452 |
+
"entropy": 0.3662109375,
|
| 1453 |
+
"epoch": 0.061714285714285715,
|
| 1454 |
+
"grad_norm": 0.5084431767463684,
|
| 1455 |
+
"kl": 0.0007784366607666016,
|
| 1456 |
+
"learning_rate": 5.657047735161255e-07,
|
| 1457 |
+
"loss": -0.0313,
|
| 1458 |
+
"reward": 1.0208333730697632,
|
| 1459 |
+
"reward_std": 0.6375530436635017,
|
| 1460 |
+
"rewards/accuracy_reward": 0.4375000149011612,
|
| 1461 |
+
"rewards/format_reward": 0.5833333507180214,
|
| 1462 |
+
"step": 54,
|
| 1463 |
+
"w_high_ratio": 0.12949026003479958,
|
| 1464 |
+
"w_low_ratio": 0.03830569516867399,
|
| 1465 |
+
"w_max": 1.9918950200080872,
|
| 1466 |
+
"w_mean": 1.3933196365833282,
|
| 1467 |
+
"w_min": 0.0,
|
| 1468 |
+
"w_std": 0.25901878997683525
|
| 1469 |
+
},
|
| 1470 |
+
{
|
| 1471 |
+
"clip_ratio/high_max": 0.0,
|
| 1472 |
+
"clip_ratio/high_mean": 0.0,
|
| 1473 |
+
"clip_ratio/low_mean": 0.0,
|
| 1474 |
+
"clip_ratio/low_min": 0.0,
|
| 1475 |
+
"clip_ratio/region_mean": 0.0,
|
| 1476 |
+
"completion_length": 3389.4376220703125,
|
| 1477 |
+
"cov_mean": 1.1409799526518327e-05,
|
| 1478 |
+
"cov_std": 0.2976246848702431,
|
| 1479 |
+
"entropy": 0.40380859375,
|
| 1480 |
+
"epoch": 0.06285714285714286,
|
| 1481 |
+
"grad_norm": 0.2319922149181366,
|
| 1482 |
+
"kl": 0.0006909370422363281,
|
| 1483 |
+
"learning_rate": 5.5e-07,
|
| 1484 |
+
"loss": 0.0036,
|
| 1485 |
+
"reward": 0.614583358168602,
|
| 1486 |
+
"reward_std": 0.4451694190502167,
|
| 1487 |
+
"rewards/accuracy_reward": 0.25000000558793545,
|
| 1488 |
+
"rewards/format_reward": 0.3645833432674408,
|
| 1489 |
+
"step": 55,
|
| 1490 |
+
"w_high_ratio": 0.008611755445599556,
|
| 1491 |
+
"w_low_ratio": 0.038189588114619255,
|
| 1492 |
+
"w_max": 1.4636406004428864,
|
| 1493 |
+
"w_mean": 1.113456517457962,
|
| 1494 |
+
"w_min": 0.25,
|
| 1495 |
+
"w_std": 0.1765221506357193
|
| 1496 |
+
},
|
| 1497 |
+
{
|
| 1498 |
+
"clip_ratio/high_max": 0.0,
|
| 1499 |
+
"clip_ratio/high_mean": 0.0,
|
| 1500 |
+
"clip_ratio/low_mean": 0.0,
|
| 1501 |
+
"clip_ratio/low_min": 0.0,
|
| 1502 |
+
"clip_ratio/region_mean": 0.0,
|
| 1503 |
+
"completion_length": 3142.2188720703125,
|
| 1504 |
+
"cov_mean": -1.977225656446535e-05,
|
| 1505 |
+
"cov_std": 0.4043290466070175,
|
| 1506 |
+
"entropy": 0.36962890625,
|
| 1507 |
+
"epoch": 0.064,
|
| 1508 |
+
"grad_norm": 0.5777710676193237,
|
| 1509 |
+
"kl": 0.003372669219970703,
|
| 1510 |
+
"learning_rate": 5.342952264838747e-07,
|
| 1511 |
+
"loss": -0.0624,
|
| 1512 |
+
"reward": 0.666666679084301,
|
| 1513 |
+
"reward_std": 0.36768075451254845,
|
| 1514 |
+
"rewards/accuracy_reward": 0.2083333432674408,
|
| 1515 |
+
"rewards/format_reward": 0.4583333358168602,
|
| 1516 |
+
"step": 56,
|
| 1517 |
+
"w_high_ratio": 0.02573772892355919,
|
| 1518 |
+
"w_low_ratio": 0.0384283890016377,
|
| 1519 |
+
"w_max": 1.6343314349651337,
|
| 1520 |
+
"w_mean": 1.174754112958908,
|
| 1521 |
+
"w_min": 0.0,
|
| 1522 |
+
"w_std": 0.2166101150214672
|
| 1523 |
+
},
|
| 1524 |
+
{
|
| 1525 |
+
"clip_ratio/high_max": 0.0,
|
| 1526 |
+
"clip_ratio/high_mean": 0.0,
|
| 1527 |
+
"clip_ratio/low_mean": 0.0,
|
| 1528 |
+
"clip_ratio/low_min": 0.0,
|
| 1529 |
+
"clip_ratio/region_mean": 0.0,
|
| 1530 |
+
"completion_length": 3526.041748046875,
|
| 1531 |
+
"cov_mean": 5.059504655946512e-05,
|
| 1532 |
+
"cov_std": 0.19530736654996872,
|
| 1533 |
+
"entropy": 0.3095703125,
|
| 1534 |
+
"epoch": 0.06514285714285714,
|
| 1535 |
+
"grad_norm": 0.14592108130455017,
|
| 1536 |
+
"kl": 0.0003554821014404297,
|
| 1537 |
+
"learning_rate": 5.186095868151436e-07,
|
| 1538 |
+
"loss": 0.0068,
|
| 1539 |
+
"reward": 0.4270833432674408,
|
| 1540 |
+
"reward_std": 0.2942384257912636,
|
| 1541 |
+
"rewards/accuracy_reward": 0.11458333861082792,
|
| 1542 |
+
"rewards/format_reward": 0.3125,
|
| 1543 |
+
"step": 57,
|
| 1544 |
+
"w_high_ratio": 0.0,
|
| 1545 |
+
"w_low_ratio": 0.024141859263181686,
|
| 1546 |
+
"w_max": 1.4007738828659058,
|
| 1547 |
+
"w_mean": 1.1122069656848907,
|
| 1548 |
+
"w_min": 0.25,
|
| 1549 |
+
"w_std": 0.10743825510144234
|
| 1550 |
+
},
|
| 1551 |
+
{
|
| 1552 |
+
"clip_ratio/high_max": 0.0,
|
| 1553 |
+
"clip_ratio/high_mean": 0.0,
|
| 1554 |
+
"clip_ratio/low_mean": 0.0,
|
| 1555 |
+
"clip_ratio/low_min": 0.0,
|
| 1556 |
+
"clip_ratio/region_mean": 0.0,
|
| 1557 |
+
"completion_length": 2448.125030517578,
|
| 1558 |
+
"cov_mean": 2.2156835257192142e-05,
|
| 1559 |
+
"cov_std": 0.41437317430973053,
|
| 1560 |
+
"entropy": 0.3251953125,
|
| 1561 |
+
"epoch": 0.06628571428571428,
|
| 1562 |
+
"grad_norm": 0.31742021441459656,
|
| 1563 |
+
"kl": 0.003879547119140625,
|
| 1564 |
+
"learning_rate": 5.02962191529556e-07,
|
| 1565 |
+
"loss": -0.0288,
|
| 1566 |
+
"reward": 1.0625000223517418,
|
| 1567 |
+
"reward_std": 0.4887799397110939,
|
| 1568 |
+
"rewards/accuracy_reward": 0.3437500037252903,
|
| 1569 |
+
"rewards/format_reward": 0.7187500074505806,
|
| 1570 |
+
"step": 58,
|
| 1571 |
+
"w_high_ratio": 0.0625,
|
| 1572 |
+
"w_low_ratio": 0.03917268430814147,
|
| 1573 |
+
"w_max": 1.9121226966381073,
|
| 1574 |
+
"w_mean": 1.4166812300682068,
|
| 1575 |
+
"w_min": 0.0,
|
| 1576 |
+
"w_std": 0.25635192170739174
|
| 1577 |
+
},
|
| 1578 |
+
{
|
| 1579 |
+
"clip_ratio/high_max": 0.0,
|
| 1580 |
+
"clip_ratio/high_mean": 0.0,
|
| 1581 |
+
"clip_ratio/low_mean": 0.0,
|
| 1582 |
+
"clip_ratio/low_min": 0.0,
|
| 1583 |
+
"clip_ratio/region_mean": 0.0,
|
| 1584 |
+
"completion_length": 3166.6458740234375,
|
| 1585 |
+
"cov_mean": -4.421618541528005e-06,
|
| 1586 |
+
"cov_std": 0.23622582852840424,
|
| 1587 |
+
"entropy": 0.33251953125,
|
| 1588 |
+
"epoch": 0.06742857142857143,
|
| 1589 |
+
"grad_norm": 0.5350203514099121,
|
| 1590 |
+
"kl": 0.0004105567932128906,
|
| 1591 |
+
"learning_rate": 4.873721045679706e-07,
|
| 1592 |
+
"loss": 0.0178,
|
| 1593 |
+
"reward": 0.572916672565043,
|
| 1594 |
+
"reward_std": 0.32521966844797134,
|
| 1595 |
+
"rewards/accuracy_reward": 0.20833333861082792,
|
| 1596 |
+
"rewards/format_reward": 0.36458334140479565,
|
| 1597 |
+
"step": 59,
|
| 1598 |
+
"w_high_ratio": 0.09398643299937248,
|
| 1599 |
+
"w_low_ratio": 0.029231622349470854,
|
| 1600 |
+
"w_max": 1.7523704767227173,
|
| 1601 |
+
"w_mean": 1.1871490776538849,
|
| 1602 |
+
"w_min": 0.0,
|
| 1603 |
+
"w_std": 0.18543793261051178
|
| 1604 |
+
},
|
| 1605 |
+
{
|
| 1606 |
+
"clip_ratio/high_max": 0.0,
|
| 1607 |
+
"clip_ratio/high_mean": 0.0,
|
| 1608 |
+
"clip_ratio/low_mean": 0.0,
|
| 1609 |
+
"clip_ratio/low_min": 0.0,
|
| 1610 |
+
"clip_ratio/region_mean": 0.0,
|
| 1611 |
+
"completion_length": 3290.9688720703125,
|
| 1612 |
+
"cov_mean": 2.4174340069293976e-05,
|
| 1613 |
+
"cov_std": 0.2900906167924404,
|
| 1614 |
+
"entropy": 0.37353515625,
|
| 1615 |
+
"epoch": 0.06857142857142857,
|
| 1616 |
+
"grad_norm": 0.3470577299594879,
|
| 1617 |
+
"kl": 0.002052783966064453,
|
| 1618 |
+
"learning_rate": 4.7185832004988133e-07,
|
| 1619 |
+
"loss": 0.0597,
|
| 1620 |
+
"reward": 0.4895833395421505,
|
| 1621 |
+
"reward_std": 0.39402854442596436,
|
| 1622 |
+
"rewards/accuracy_reward": 0.1041666679084301,
|
| 1623 |
+
"rewards/format_reward": 0.38541667722165585,
|
| 1624 |
+
"step": 60,
|
| 1625 |
+
"w_high_ratio": 0.0,
|
| 1626 |
+
"w_low_ratio": 0.03313248883932829,
|
| 1627 |
+
"w_max": 1.4663892686367035,
|
| 1628 |
+
"w_mean": 1.140129953622818,
|
| 1629 |
+
"w_min": 0.0,
|
| 1630 |
+
"w_std": 0.1620137356221676
|
| 1631 |
+
},
|
| 1632 |
+
{
|
| 1633 |
+
"clip_ratio/high_max": 0.0,
|
| 1634 |
+
"clip_ratio/high_mean": 0.0,
|
| 1635 |
+
"clip_ratio/low_mean": 0.0,
|
| 1636 |
+
"clip_ratio/low_min": 0.0,
|
| 1637 |
+
"clip_ratio/region_mean": 0.0,
|
| 1638 |
+
"completion_length": 3175.916748046875,
|
| 1639 |
+
"cov_mean": 4.250624078849796e-05,
|
| 1640 |
+
"cov_std": 0.4415072202682495,
|
| 1641 |
+
"entropy": 0.3564453125,
|
| 1642 |
+
"epoch": 0.06971428571428571,
|
| 1643 |
+
"grad_norm": 0.3775484263896942,
|
| 1644 |
+
"kl": 0.0009458065032958984,
|
| 1645 |
+
"learning_rate": 4.5643973913200837e-07,
|
| 1646 |
+
"loss": -0.0396,
|
| 1647 |
+
"reward": 0.791666679084301,
|
| 1648 |
+
"reward_std": 0.4833543188869953,
|
| 1649 |
+
"rewards/accuracy_reward": 0.2500000074505806,
|
| 1650 |
+
"rewards/format_reward": 0.541666679084301,
|
| 1651 |
+
"step": 61,
|
| 1652 |
+
"w_high_ratio": 0.04859437793493271,
|
| 1653 |
+
"w_low_ratio": 0.03715647594071925,
|
| 1654 |
+
"w_max": 1.7687608003616333,
|
| 1655 |
+
"w_mean": 1.2712468802928925,
|
| 1656 |
+
"w_min": 0.0,
|
| 1657 |
+
"w_std": 0.24916893057525158
|
| 1658 |
+
},
|
| 1659 |
+
{
|
| 1660 |
+
"clip_ratio/high_max": 0.0,
|
| 1661 |
+
"clip_ratio/high_mean": 0.0,
|
| 1662 |
+
"clip_ratio/low_mean": 0.0,
|
| 1663 |
+
"clip_ratio/low_min": 0.0,
|
| 1664 |
+
"clip_ratio/region_mean": 0.0,
|
| 1665 |
+
"completion_length": 2722.5313110351562,
|
| 1666 |
+
"cov_mean": 3.7332507645260193e-06,
|
| 1667 |
+
"cov_std": 0.3699860963970423,
|
| 1668 |
+
"entropy": 0.30859375,
|
| 1669 |
+
"epoch": 0.07085714285714285,
|
| 1670 |
+
"grad_norm": 1.0045063495635986,
|
| 1671 |
+
"kl": 0.02886199951171875,
|
| 1672 |
+
"learning_rate": 4.4113514698014953e-07,
|
| 1673 |
+
"loss": 0.0303,
|
| 1674 |
+
"reward": 0.8750000298023224,
|
| 1675 |
+
"reward_std": 0.5333737134933472,
|
| 1676 |
+
"rewards/accuracy_reward": 0.29166667722165585,
|
| 1677 |
+
"rewards/format_reward": 0.583333358168602,
|
| 1678 |
+
"step": 62,
|
| 1679 |
+
"w_high_ratio": 0.06600858364254236,
|
| 1680 |
+
"w_low_ratio": 0.04690547380596399,
|
| 1681 |
+
"w_max": 1.9925485253334045,
|
| 1682 |
+
"w_mean": 1.3004825711250305,
|
| 1683 |
+
"w_min": 0.0,
|
| 1684 |
+
"w_std": 0.25962154380977154
|
| 1685 |
+
},
|
| 1686 |
+
{
|
| 1687 |
+
"clip_ratio/high_max": 0.0,
|
| 1688 |
+
"clip_ratio/high_mean": 0.0,
|
| 1689 |
+
"clip_ratio/low_mean": 0.0,
|
| 1690 |
+
"clip_ratio/low_min": 0.0,
|
| 1691 |
+
"clip_ratio/region_mean": 0.0,
|
| 1692 |
+
"completion_length": 2446.8959045410156,
|
| 1693 |
+
"cov_mean": -3.0669682018924505e-05,
|
| 1694 |
+
"cov_std": 0.44857871532440186,
|
| 1695 |
+
"entropy": 0.3876953125,
|
| 1696 |
+
"epoch": 0.072,
|
| 1697 |
+
"grad_norm": 0.4270949065685272,
|
| 1698 |
+
"kl": 0.001964569091796875,
|
| 1699 |
+
"learning_rate": 4.2596318988235037e-07,
|
| 1700 |
+
"loss": -0.0266,
|
| 1701 |
+
"reward": 1.0208333879709244,
|
| 1702 |
+
"reward_std": 0.5118110477924347,
|
| 1703 |
+
"rewards/accuracy_reward": 0.3229166781529784,
|
| 1704 |
+
"rewards/format_reward": 0.6979166716337204,
|
| 1705 |
+
"step": 63,
|
| 1706 |
+
"w_high_ratio": 0.24508000910282135,
|
| 1707 |
+
"w_low_ratio": 0.04229559004306793,
|
| 1708 |
+
"w_max": 2.53433358669281,
|
| 1709 |
+
"w_mean": 1.5855962336063385,
|
| 1710 |
+
"w_min": 3.1441053104750004e-38,
|
| 1711 |
+
"w_std": 0.3284341022372246
|
| 1712 |
+
},
|
| 1713 |
+
{
|
| 1714 |
+
"clip_ratio/high_max": 0.0,
|
| 1715 |
+
"clip_ratio/high_mean": 0.0,
|
| 1716 |
+
"clip_ratio/low_mean": 0.0,
|
| 1717 |
+
"clip_ratio/low_min": 0.0,
|
| 1718 |
+
"clip_ratio/region_mean": 0.0,
|
| 1719 |
+
"completion_length": 3022.9896240234375,
|
| 1720 |
+
"cov_mean": -4.004061929663294e-05,
|
| 1721 |
+
"cov_std": 0.24233128875494003,
|
| 1722 |
+
"entropy": 0.376953125,
|
| 1723 |
+
"epoch": 0.07314285714285715,
|
| 1724 |
+
"grad_norm": 0.22661255300045013,
|
| 1725 |
+
"kl": 0.0009641647338867188,
|
| 1726 |
+
"learning_rate": 4.1094235253127374e-07,
|
| 1727 |
+
"loss": 0.0441,
|
| 1728 |
+
"reward": 0.6666666977107525,
|
| 1729 |
+
"reward_std": 0.42464151978492737,
|
| 1730 |
+
"rewards/accuracy_reward": 0.25000000558793545,
|
| 1731 |
+
"rewards/format_reward": 0.41666668467223644,
|
| 1732 |
+
"step": 64,
|
| 1733 |
+
"w_high_ratio": 0.05784625560045242,
|
| 1734 |
+
"w_low_ratio": 0.024026920087635517,
|
| 1735 |
+
"w_max": 1.9166311621665955,
|
| 1736 |
+
"w_mean": 1.3068864345550537,
|
| 1737 |
+
"w_min": 0.0,
|
| 1738 |
+
"w_std": 0.15533896535634995
|
| 1739 |
+
},
|
| 1740 |
+
{
|
| 1741 |
+
"clip_ratio/high_max": 0.0,
|
| 1742 |
+
"clip_ratio/high_mean": 0.0,
|
| 1743 |
+
"clip_ratio/low_mean": 0.0,
|
| 1744 |
+
"clip_ratio/low_min": 0.0,
|
| 1745 |
+
"clip_ratio/region_mean": 0.0,
|
| 1746 |
+
"completion_length": 2987.9584350585938,
|
| 1747 |
+
"cov_mean": -1.1828518722722947e-05,
|
| 1748 |
+
"cov_std": 0.3229193612933159,
|
| 1749 |
+
"entropy": 0.33544921875,
|
| 1750 |
+
"epoch": 0.07428571428571429,
|
| 1751 |
+
"grad_norm": 0.20347988605499268,
|
| 1752 |
+
"kl": 0.0009045600891113281,
|
| 1753 |
+
"learning_rate": 3.9609093550344907e-07,
|
| 1754 |
+
"loss": 0.0257,
|
| 1755 |
+
"reward": 0.7187500037252903,
|
| 1756 |
+
"reward_std": 0.4016053378582001,
|
| 1757 |
+
"rewards/accuracy_reward": 0.2500000027939677,
|
| 1758 |
+
"rewards/format_reward": 0.46875,
|
| 1759 |
+
"step": 65,
|
| 1760 |
+
"w_high_ratio": 0.0625,
|
| 1761 |
+
"w_low_ratio": 0.04217576887458563,
|
| 1762 |
+
"w_max": 1.5320913791656494,
|
| 1763 |
+
"w_mean": 1.177913784980774,
|
| 1764 |
+
"w_min": 0.0,
|
| 1765 |
+
"w_std": 0.17242734879255295
|
| 1766 |
+
},
|
| 1767 |
+
{
|
| 1768 |
+
"clip_ratio/high_max": 0.0,
|
| 1769 |
+
"clip_ratio/high_mean": 0.0,
|
| 1770 |
+
"clip_ratio/low_mean": 0.0,
|
| 1771 |
+
"clip_ratio/low_min": 0.0,
|
| 1772 |
+
"clip_ratio/region_mean": 0.0,
|
| 1773 |
+
"completion_length": 2277.3542709350586,
|
| 1774 |
+
"cov_mean": 2.5952657381367317e-05,
|
| 1775 |
+
"cov_std": 0.2567654103040695,
|
| 1776 |
+
"entropy": 0.30517578125,
|
| 1777 |
+
"epoch": 0.07542857142857143,
|
| 1778 |
+
"grad_norm": 0.39114055037498474,
|
| 1779 |
+
"kl": 0.0011463165283203125,
|
| 1780 |
+
"learning_rate": 3.8142703296283953e-07,
|
| 1781 |
+
"loss": 0.0283,
|
| 1782 |
+
"reward": 0.8750000111758709,
|
| 1783 |
+
"reward_std": 0.32678278163075447,
|
| 1784 |
+
"rewards/accuracy_reward": 0.35416667349636555,
|
| 1785 |
+
"rewards/format_reward": 0.520833333954215,
|
| 1786 |
+
"step": 66,
|
| 1787 |
+
"w_high_ratio": 0.050385382026433945,
|
| 1788 |
+
"w_low_ratio": 0.036725505255162716,
|
| 1789 |
+
"w_max": 1.6868340969085693,
|
| 1790 |
+
"w_mean": 1.2045796811580658,
|
| 1791 |
+
"w_min": 4.016504513755072e-38,
|
| 1792 |
+
"w_std": 0.18733475357294083
|
| 1793 |
+
},
|
| 1794 |
+
{
|
| 1795 |
+
"clip_ratio/high_max": 0.0,
|
| 1796 |
+
"clip_ratio/high_mean": 0.0,
|
| 1797 |
+
"clip_ratio/low_mean": 0.0,
|
| 1798 |
+
"clip_ratio/low_min": 0.0,
|
| 1799 |
+
"clip_ratio/region_mean": 0.0,
|
| 1800 |
+
"completion_length": 3692.8646240234375,
|
| 1801 |
+
"cov_mean": -1.3762917205895064e-05,
|
| 1802 |
+
"cov_std": 0.1253320723772049,
|
| 1803 |
+
"entropy": 0.3369140625,
|
| 1804 |
+
"epoch": 0.07657142857142857,
|
| 1805 |
+
"grad_norm": 0.09913324564695358,
|
| 1806 |
+
"kl": 0.0007886886596679688,
|
| 1807 |
+
"learning_rate": 3.6696851061588994e-07,
|
| 1808 |
+
"loss": -0.0047,
|
| 1809 |
+
"reward": 0.1979166716337204,
|
| 1810 |
+
"reward_std": 0.14884886890649796,
|
| 1811 |
+
"rewards/accuracy_reward": 0.03125,
|
| 1812 |
+
"rewards/format_reward": 0.1666666716337204,
|
| 1813 |
+
"step": 67,
|
| 1814 |
+
"w_high_ratio": 0.0,
|
| 1815 |
+
"w_low_ratio": 0.014627222903072834,
|
| 1816 |
+
"w_max": 1.3194924890995026,
|
| 1817 |
+
"w_mean": 1.0943252593278885,
|
| 1818 |
+
"w_min": 0.5,
|
| 1819 |
+
"w_std": 0.08329889550805092
|
| 1820 |
+
},
|
| 1821 |
+
{
|
| 1822 |
+
"clip_ratio/high_max": 0.0,
|
| 1823 |
+
"clip_ratio/high_mean": 0.0,
|
| 1824 |
+
"clip_ratio/low_mean": 0.0,
|
| 1825 |
+
"clip_ratio/low_min": 0.0,
|
| 1826 |
+
"clip_ratio/region_mean": 0.0,
|
| 1827 |
+
"completion_length": 2192.4896240234375,
|
| 1828 |
+
"cov_mean": 5.210197195992805e-05,
|
| 1829 |
+
"cov_std": 0.2974618822336197,
|
| 1830 |
+
"entropy": 0.3623046875,
|
| 1831 |
+
"epoch": 0.07771428571428571,
|
| 1832 |
+
"grad_norm": 0.48533204197883606,
|
| 1833 |
+
"kl": 0.004261016845703125,
|
| 1834 |
+
"learning_rate": 3.5273298394491515e-07,
|
| 1835 |
+
"loss": -0.0773,
|
| 1836 |
+
"reward": 0.864583358168602,
|
| 1837 |
+
"reward_std": 0.3760298416018486,
|
| 1838 |
+
"rewards/accuracy_reward": 0.2395833432674408,
|
| 1839 |
+
"rewards/format_reward": 0.625,
|
| 1840 |
+
"step": 68,
|
| 1841 |
+
"w_high_ratio": 0.2266840934753418,
|
| 1842 |
+
"w_low_ratio": 0.028733241837471724,
|
| 1843 |
+
"w_max": 2.714290827512741,
|
| 1844 |
+
"w_mean": 1.5712151527404785,
|
| 1845 |
+
"w_min": 0.25,
|
| 1846 |
+
"w_std": 0.22248514741659164
|
| 1847 |
+
},
|
| 1848 |
+
{
|
| 1849 |
+
"clip_ratio/high_max": 0.0,
|
| 1850 |
+
"clip_ratio/high_mean": 0.0,
|
| 1851 |
+
"clip_ratio/low_mean": 0.0,
|
| 1852 |
+
"clip_ratio/low_min": 0.0,
|
| 1853 |
+
"clip_ratio/region_mean": 0.0,
|
| 1854 |
+
"completion_length": 2785.791748046875,
|
| 1855 |
+
"cov_mean": 5.550627793127205e-05,
|
| 1856 |
+
"cov_std": 0.24297186359763145,
|
| 1857 |
+
"entropy": 0.435546875,
|
| 1858 |
+
"epoch": 0.07885714285714286,
|
| 1859 |
+
"grad_norm": 0.4455502927303314,
|
| 1860 |
+
"kl": 0.003231048583984375,
|
| 1861 |
+
"learning_rate": 3.387377967463493e-07,
|
| 1862 |
+
"loss": -0.0721,
|
| 1863 |
+
"reward": 0.4791666716337204,
|
| 1864 |
+
"reward_std": 0.22700205445289612,
|
| 1865 |
+
"rewards/accuracy_reward": 0.03125,
|
| 1866 |
+
"rewards/format_reward": 0.4479166716337204,
|
| 1867 |
+
"step": 69,
|
| 1868 |
+
"w_high_ratio": 0.18034584820270538,
|
| 1869 |
+
"w_low_ratio": 0.019336777739226818,
|
| 1870 |
+
"w_max": 2.187383383512497,
|
| 1871 |
+
"w_mean": 1.3382967710494995,
|
| 1872 |
+
"w_min": 0.25,
|
| 1873 |
+
"w_std": 0.14556573703885078
|
| 1874 |
+
},
|
| 1875 |
+
{
|
| 1876 |
+
"clip_ratio/high_max": 0.0,
|
| 1877 |
+
"clip_ratio/high_mean": 0.0,
|
| 1878 |
+
"clip_ratio/low_mean": 0.0,
|
| 1879 |
+
"clip_ratio/low_min": 0.0,
|
| 1880 |
+
"clip_ratio/region_mean": 0.0,
|
| 1881 |
+
"completion_length": 3339.760498046875,
|
| 1882 |
+
"cov_mean": 3.0603625077674224e-05,
|
| 1883 |
+
"cov_std": 0.3574352115392685,
|
| 1884 |
+
"entropy": 0.35400390625,
|
| 1885 |
+
"epoch": 0.08,
|
| 1886 |
+
"grad_norm": 0.4004529118537903,
|
| 1887 |
+
"kl": 0.0030617713928222656,
|
| 1888 |
+
"learning_rate": 3.250000000000001e-07,
|
| 1889 |
+
"loss": 0.0435,
|
| 1890 |
+
"reward": 0.48958334513008595,
|
| 1891 |
+
"reward_std": 0.448788546025753,
|
| 1892 |
+
"rewards/accuracy_reward": 0.1145833395421505,
|
| 1893 |
+
"rewards/format_reward": 0.37500001676380634,
|
| 1894 |
+
"step": 70,
|
| 1895 |
+
"w_high_ratio": 0.056590817868709564,
|
| 1896 |
+
"w_low_ratio": 0.04363738652318716,
|
| 1897 |
+
"w_max": 1.621414452791214,
|
| 1898 |
+
"w_mean": 1.1405479907989502,
|
| 1899 |
+
"w_min": 0.0,
|
| 1900 |
+
"w_std": 0.21908994019031525
|
| 1901 |
+
},
|
| 1902 |
+
{
|
| 1903 |
+
"clip_ratio/high_max": 0.0,
|
| 1904 |
+
"clip_ratio/high_mean": 0.0,
|
| 1905 |
+
"clip_ratio/low_mean": 0.0,
|
| 1906 |
+
"clip_ratio/low_min": 0.0,
|
| 1907 |
+
"clip_ratio/region_mean": 0.0,
|
| 1908 |
+
"completion_length": 2994.0834197998047,
|
| 1909 |
+
"cov_mean": 5.412803147919476e-06,
|
| 1910 |
+
"cov_std": 0.24315955862402916,
|
| 1911 |
+
"entropy": 0.39501953125,
|
| 1912 |
+
"epoch": 0.08114285714285714,
|
| 1913 |
+
"grad_norm": 0.43441104888916016,
|
| 1914 |
+
"kl": 0.003234386444091797,
|
| 1915 |
+
"learning_rate": 3.115363310950578e-07,
|
| 1916 |
+
"loss": 0.0297,
|
| 1917 |
+
"reward": 0.4791666669771075,
|
| 1918 |
+
"reward_std": 0.2685965895652771,
|
| 1919 |
+
"rewards/accuracy_reward": 0.1458333395421505,
|
| 1920 |
+
"rewards/format_reward": 0.3333333460614085,
|
| 1921 |
+
"step": 71,
|
| 1922 |
+
"w_high_ratio": 0.05637816712260246,
|
| 1923 |
+
"w_low_ratio": 0.024559800047427416,
|
| 1924 |
+
"w_max": 1.8640311062335968,
|
| 1925 |
+
"w_mean": 1.2403113842010498,
|
| 1926 |
+
"w_min": 0.25,
|
| 1927 |
+
"w_std": 0.18912436068058014
|
| 1928 |
+
},
|
| 1929 |
+
{
|
| 1930 |
+
"clip_ratio/high_max": 0.0,
|
| 1931 |
+
"clip_ratio/high_mean": 0.0,
|
| 1932 |
+
"clip_ratio/low_mean": 0.0,
|
| 1933 |
+
"clip_ratio/low_min": 0.0,
|
| 1934 |
+
"clip_ratio/region_mean": 0.0,
|
| 1935 |
+
"completion_length": 3187.5313110351562,
|
| 1936 |
+
"cov_mean": -5.872446490684524e-05,
|
| 1937 |
+
"cov_std": 0.47990038990974426,
|
| 1938 |
+
"entropy": 0.45556640625,
|
| 1939 |
+
"epoch": 0.08228571428571428,
|
| 1940 |
+
"grad_norm": 0.49201899766921997,
|
| 1941 |
+
"kl": 0.0031557083129882812,
|
| 1942 |
+
"learning_rate": 2.9836319343816397e-07,
|
| 1943 |
+
"loss": -0.0603,
|
| 1944 |
+
"reward": 0.510416679084301,
|
| 1945 |
+
"reward_std": 0.46190567314624786,
|
| 1946 |
+
"rewards/accuracy_reward": 0.07291666697710752,
|
| 1947 |
+
"rewards/format_reward": 0.4375000074505806,
|
| 1948 |
+
"step": 72,
|
| 1949 |
+
"w_high_ratio": 0.13975006341934204,
|
| 1950 |
+
"w_low_ratio": 0.04982540290802717,
|
| 1951 |
+
"w_max": 1.821915477514267,
|
| 1952 |
+
"w_mean": 1.327934205532074,
|
| 1953 |
+
"w_min": 0.0,
|
| 1954 |
+
"w_std": 0.2968912795186043
|
| 1955 |
+
},
|
| 1956 |
+
{
|
| 1957 |
+
"clip_ratio/high_max": 0.0,
|
| 1958 |
+
"clip_ratio/high_mean": 0.0,
|
| 1959 |
+
"clip_ratio/low_mean": 0.0,
|
| 1960 |
+
"clip_ratio/low_min": 0.0,
|
| 1961 |
+
"clip_ratio/region_mean": 0.0,
|
| 1962 |
+
"completion_length": 3788.3125610351562,
|
| 1963 |
+
"cov_mean": -2.8033528451487655e-05,
|
| 1964 |
+
"cov_std": 0.27588948607444763,
|
| 1965 |
+
"entropy": 0.44091796875,
|
| 1966 |
+
"epoch": 0.08342857142857144,
|
| 1967 |
+
"grad_norm": 0.1494845449924469,
|
| 1968 |
+
"kl": 0.00031256675720214844,
|
| 1969 |
+
"learning_rate": 2.854966364683872e-07,
|
| 1970 |
+
"loss": -0.0024,
|
| 1971 |
+
"reward": 0.3541666716337204,
|
| 1972 |
+
"reward_std": 0.38621756434440613,
|
| 1973 |
+
"rewards/accuracy_reward": 0.1354166716337204,
|
| 1974 |
+
"rewards/format_reward": 0.2187500037252903,
|
| 1975 |
+
"step": 73,
|
| 1976 |
+
"w_high_ratio": 0.0,
|
| 1977 |
+
"w_low_ratio": 0.03520650416612625,
|
| 1978 |
+
"w_max": 1.26780503988266,
|
| 1979 |
+
"w_mean": 1.0412998497486115,
|
| 1980 |
+
"w_min": 0.25,
|
| 1981 |
+
"w_std": 0.1442563608288765
|
| 1982 |
+
},
|
| 1983 |
+
{
|
| 1984 |
+
"clip_ratio/high_max": 0.0,
|
| 1985 |
+
"clip_ratio/high_mean": 0.0,
|
| 1986 |
+
"clip_ratio/low_mean": 0.0,
|
| 1987 |
+
"clip_ratio/low_min": 0.0,
|
| 1988 |
+
"clip_ratio/region_mean": 0.0,
|
| 1989 |
+
"completion_length": 3536.3751220703125,
|
| 1990 |
+
"cov_mean": 5.950678314547986e-05,
|
| 1991 |
+
"cov_std": 0.23448628932237625,
|
| 1992 |
+
"entropy": 0.3876953125,
|
| 1993 |
+
"epoch": 0.08457142857142858,
|
| 1994 |
+
"grad_norm": 0.26546525955200195,
|
| 1995 |
+
"kl": 0.0013303756713867188,
|
| 1996 |
+
"learning_rate": 2.729523361034538e-07,
|
| 1997 |
+
"loss": 0.0267,
|
| 1998 |
+
"reward": 0.4791666865348816,
|
| 1999 |
+
"reward_std": 0.3228915072977543,
|
| 2000 |
+
"rewards/accuracy_reward": 0.21875,
|
| 2001 |
+
"rewards/format_reward": 0.2604166716337204,
|
| 2002 |
+
"step": 74,
|
| 2003 |
+
"w_high_ratio": 0.0,
|
| 2004 |
+
"w_low_ratio": 0.030950261279940605,
|
| 2005 |
+
"w_max": 1.3889389336109161,
|
| 2006 |
+
"w_mean": 1.0708437263965607,
|
| 2007 |
+
"w_min": 0.0,
|
| 2008 |
+
"w_std": 0.1411808580160141
|
| 2009 |
+
},
|
| 2010 |
+
{
|
| 2011 |
+
"clip_ratio/high_max": 0.0,
|
| 2012 |
+
"clip_ratio/high_mean": 0.0,
|
| 2013 |
+
"clip_ratio/low_mean": 0.0,
|
| 2014 |
+
"clip_ratio/low_min": 0.0,
|
| 2015 |
+
"clip_ratio/region_mean": 0.0,
|
| 2016 |
+
"completion_length": 3248.729248046875,
|
| 2017 |
+
"cov_mean": -2.3449571926903445e-05,
|
| 2018 |
+
"cov_std": 0.35559114813804626,
|
| 2019 |
+
"entropy": 0.35546875,
|
| 2020 |
+
"epoch": 0.08571428571428572,
|
| 2021 |
+
"grad_norm": 0.23657207190990448,
|
| 2022 |
+
"kl": 0.0014653205871582031,
|
| 2023 |
+
"learning_rate": 2.6074557564105724e-07,
|
| 2024 |
+
"loss": 0.0671,
|
| 2025 |
+
"reward": 0.5625000111758709,
|
| 2026 |
+
"reward_std": 0.37235569953918457,
|
| 2027 |
+
"rewards/accuracy_reward": 0.1875,
|
| 2028 |
+
"rewards/format_reward": 0.3750000037252903,
|
| 2029 |
+
"step": 75,
|
| 2030 |
+
"w_high_ratio": 0.05818156525492668,
|
| 2031 |
+
"w_low_ratio": 0.04433906823396683,
|
| 2032 |
+
"w_max": 1.794895738363266,
|
| 2033 |
+
"w_mean": 1.1899544298648834,
|
| 2034 |
+
"w_min": 0.25,
|
| 2035 |
+
"w_std": 0.21789918839931488
|
| 2036 |
+
},
|
| 2037 |
+
{
|
| 2038 |
+
"clip_ratio/high_max": 0.0,
|
| 2039 |
+
"clip_ratio/high_mean": 0.0,
|
| 2040 |
+
"clip_ratio/low_mean": 0.0,
|
| 2041 |
+
"clip_ratio/low_min": 0.0,
|
| 2042 |
+
"clip_ratio/region_mean": 0.0,
|
| 2043 |
+
"completion_length": 3163.4583740234375,
|
| 2044 |
+
"cov_mean": 1.617619454918895e-05,
|
| 2045 |
+
"cov_std": 0.1999940201640129,
|
| 2046 |
+
"entropy": 0.37890625,
|
| 2047 |
+
"epoch": 0.08685714285714285,
|
| 2048 |
+
"grad_norm": 0.1549980193376541,
|
| 2049 |
+
"kl": 0.00043952465057373047,
|
| 2050 |
+
"learning_rate": 2.488912271385139e-07,
|
| 2051 |
+
"loss": 0.016,
|
| 2052 |
+
"reward": 0.4479166865348816,
|
| 2053 |
+
"reward_std": 0.24646350741386414,
|
| 2054 |
+
"rewards/accuracy_reward": 0.03125,
|
| 2055 |
+
"rewards/format_reward": 0.4166666828095913,
|
| 2056 |
+
"step": 76,
|
| 2057 |
+
"w_high_ratio": 0.0625,
|
| 2058 |
+
"w_low_ratio": 0.027174705173820257,
|
| 2059 |
+
"w_max": 1.7661568522453308,
|
| 2060 |
+
"w_mean": 1.2089157700538635,
|
| 2061 |
+
"w_min": 0.25,
|
| 2062 |
+
"w_std": 0.11429934948682785
|
| 2063 |
+
},
|
| 2064 |
+
{
|
| 2065 |
+
"clip_ratio/high_max": 0.0,
|
| 2066 |
+
"clip_ratio/high_mean": 0.0,
|
| 2067 |
+
"clip_ratio/low_mean": 0.0,
|
| 2068 |
+
"clip_ratio/low_min": 0.0,
|
| 2069 |
+
"clip_ratio/region_mean": 0.0,
|
| 2070 |
+
"completion_length": 3343.1146850585938,
|
| 2071 |
+
"cov_mean": -4.351784809841774e-06,
|
| 2072 |
+
"cov_std": 0.24954789131879807,
|
| 2073 |
+
"entropy": 0.38623046875,
|
| 2074 |
+
"epoch": 0.088,
|
| 2075 |
+
"grad_norm": 0.16160623729228973,
|
| 2076 |
+
"kl": 0.0005016326904296875,
|
| 2077 |
+
"learning_rate": 2.374037332934512e-07,
|
| 2078 |
+
"loss": -0.0028,
|
| 2079 |
+
"reward": 0.5416666977107525,
|
| 2080 |
+
"reward_std": 0.3281986638903618,
|
| 2081 |
+
"rewards/accuracy_reward": 0.15625001024454832,
|
| 2082 |
+
"rewards/format_reward": 0.3854166828095913,
|
| 2083 |
+
"step": 77,
|
| 2084 |
+
"w_high_ratio": 0.0,
|
| 2085 |
+
"w_low_ratio": 0.025709405075758696,
|
| 2086 |
+
"w_max": 1.4693324863910675,
|
| 2087 |
+
"w_mean": 1.1720669269561768,
|
| 2088 |
+
"w_min": 0.25,
|
| 2089 |
+
"w_std": 0.13362640514969826
|
| 2090 |
+
},
|
| 2091 |
+
{
|
| 2092 |
+
"clip_ratio/high_max": 0.0,
|
| 2093 |
+
"clip_ratio/high_mean": 0.0,
|
| 2094 |
+
"clip_ratio/low_mean": 0.0,
|
| 2095 |
+
"clip_ratio/low_min": 0.0,
|
| 2096 |
+
"clip_ratio/region_mean": 0.0,
|
| 2097 |
+
"completion_length": 3459.5625610351562,
|
| 2098 |
+
"cov_mean": 1.626165885681985e-05,
|
| 2099 |
+
"cov_std": 0.48687614500522614,
|
| 2100 |
+
"entropy": 0.37744140625,
|
| 2101 |
+
"epoch": 0.08914285714285715,
|
| 2102 |
+
"grad_norm": 0.24848157167434692,
|
| 2103 |
+
"kl": 0.0005955696105957031,
|
| 2104 |
+
"learning_rate": 2.2629708984760706e-07,
|
| 2105 |
+
"loss": -0.0075,
|
| 2106 |
+
"reward": 0.604166679084301,
|
| 2107 |
+
"reward_std": 0.6587165221571922,
|
| 2108 |
+
"rewards/accuracy_reward": 0.23958334140479565,
|
| 2109 |
+
"rewards/format_reward": 0.3645833395421505,
|
| 2110 |
+
"step": 78,
|
| 2111 |
+
"w_high_ratio": 0.04009601101279259,
|
| 2112 |
+
"w_low_ratio": 0.051254406571388245,
|
| 2113 |
+
"w_max": 1.6542562246322632,
|
| 2114 |
+
"w_mean": 1.174688458442688,
|
| 2115 |
+
"w_min": 0.0,
|
| 2116 |
+
"w_std": 0.27206049114465714
|
| 2117 |
+
},
|
| 2118 |
+
{
|
| 2119 |
+
"clip_ratio/high_max": 0.0,
|
| 2120 |
+
"clip_ratio/high_mean": 0.0,
|
| 2121 |
+
"clip_ratio/low_mean": 0.0,
|
| 2122 |
+
"clip_ratio/low_min": 0.0,
|
| 2123 |
+
"clip_ratio/region_mean": 0.0,
|
| 2124 |
+
"completion_length": 2496.4479370117188,
|
| 2125 |
+
"cov_mean": 4.12471272284165e-05,
|
| 2126 |
+
"cov_std": 0.26665735617280006,
|
| 2127 |
+
"entropy": 0.3173828125,
|
| 2128 |
+
"epoch": 0.09028571428571429,
|
| 2129 |
+
"grad_norm": 0.3005116581916809,
|
| 2130 |
+
"kl": 0.0013518333435058594,
|
| 2131 |
+
"learning_rate": 2.1558482853517253e-07,
|
| 2132 |
+
"loss": -0.0126,
|
| 2133 |
+
"reward": 0.8854166865348816,
|
| 2134 |
+
"reward_std": 0.3675435855984688,
|
| 2135 |
+
"rewards/accuracy_reward": 0.2604166744276881,
|
| 2136 |
+
"rewards/format_reward": 0.6250000149011612,
|
| 2137 |
+
"step": 79,
|
| 2138 |
+
"w_high_ratio": 0.10870501399040222,
|
| 2139 |
+
"w_low_ratio": 0.03222281183116138,
|
| 2140 |
+
"w_max": 1.7319224178791046,
|
| 2141 |
+
"w_mean": 1.2745553255081177,
|
| 2142 |
+
"w_min": 0.0,
|
| 2143 |
+
"w_std": 0.17969628423452377
|
| 2144 |
+
},
|
| 2145 |
+
{
|
| 2146 |
+
"clip_ratio/high_max": 0.0,
|
| 2147 |
+
"clip_ratio/high_mean": 0.0,
|
| 2148 |
+
"clip_ratio/low_mean": 0.0,
|
| 2149 |
+
"clip_ratio/low_min": 0.0,
|
| 2150 |
+
"clip_ratio/region_mean": 0.0,
|
| 2151 |
+
"completion_length": 3547.4063110351562,
|
| 2152 |
+
"cov_mean": -3.7483160667761695e-05,
|
| 2153 |
+
"cov_std": 0.2408691681921482,
|
| 2154 |
+
"entropy": 0.4462890625,
|
| 2155 |
+
"epoch": 0.09142857142857143,
|
| 2156 |
+
"grad_norm": 0.18163201212882996,
|
| 2157 |
+
"kl": 0.001146554946899414,
|
| 2158 |
+
"learning_rate": 2.0528000059645995e-07,
|
| 2159 |
+
"loss": 0.0002,
|
| 2160 |
+
"reward": 0.447916679084301,
|
| 2161 |
+
"reward_std": 0.4143633097410202,
|
| 2162 |
+
"rewards/accuracy_reward": 0.14583333674818277,
|
| 2163 |
+
"rewards/format_reward": 0.30208333395421505,
|
| 2164 |
+
"step": 80,
|
| 2165 |
+
"w_high_ratio": 0.0,
|
| 2166 |
+
"w_low_ratio": 0.028170655015856028,
|
| 2167 |
+
"w_max": 1.4094052910804749,
|
| 2168 |
+
"w_mean": 1.0949373543262482,
|
| 2169 |
+
"w_min": 0.25,
|
| 2170 |
+
"w_std": 0.12403910420835018
|
| 2171 |
+
},
|
| 2172 |
+
{
|
| 2173 |
+
"clip_ratio/high_max": 0.0,
|
| 2174 |
+
"clip_ratio/high_mean": 0.0,
|
| 2175 |
+
"clip_ratio/low_mean": 0.0,
|
| 2176 |
+
"clip_ratio/low_min": 0.0,
|
| 2177 |
+
"clip_ratio/region_mean": 0.0,
|
| 2178 |
+
"completion_length": 3345.6771850585938,
|
| 2179 |
+
"cov_mean": 5.422214962891303e-05,
|
| 2180 |
+
"cov_std": 0.2975510209798813,
|
| 2181 |
+
"entropy": 0.50634765625,
|
| 2182 |
+
"epoch": 0.09257142857142857,
|
| 2183 |
+
"grad_norm": 0.23768211901187897,
|
| 2184 |
+
"kl": 0.0026841163635253906,
|
| 2185 |
+
"learning_rate": 1.9539516087697517e-07,
|
| 2186 |
+
"loss": 0.0359,
|
| 2187 |
+
"reward": 0.4375,
|
| 2188 |
+
"reward_std": 0.29862353205680847,
|
| 2189 |
+
"rewards/accuracy_reward": 0.09375000558793545,
|
| 2190 |
+
"rewards/format_reward": 0.3437500149011612,
|
| 2191 |
+
"step": 81,
|
| 2192 |
+
"w_high_ratio": 0.0,
|
| 2193 |
+
"w_low_ratio": 0.039988940581679344,
|
| 2194 |
+
"w_max": 1.4104009568691254,
|
| 2195 |
+
"w_mean": 1.1443687975406647,
|
| 2196 |
+
"w_min": 0.25,
|
| 2197 |
+
"w_std": 0.15021733939647675
|
| 2198 |
+
},
|
| 2199 |
+
{
|
| 2200 |
+
"clip_ratio/high_max": 0.0,
|
| 2201 |
+
"clip_ratio/high_mean": 0.0,
|
| 2202 |
+
"clip_ratio/low_mean": 0.0,
|
| 2203 |
+
"clip_ratio/low_min": 0.0,
|
| 2204 |
+
"clip_ratio/region_mean": 0.0,
|
| 2205 |
+
"completion_length": 3128.5313110351562,
|
| 2206 |
+
"cov_mean": -4.930557446414241e-05,
|
| 2207 |
+
"cov_std": 0.1716586910188198,
|
| 2208 |
+
"entropy": 0.40576171875,
|
| 2209 |
+
"epoch": 0.09371428571428571,
|
| 2210 |
+
"grad_norm": 0.2130168080329895,
|
| 2211 |
+
"kl": 0.0011749267578125,
|
| 2212 |
+
"learning_rate": 1.8594235253127372e-07,
|
| 2213 |
+
"loss": -0.0095,
|
| 2214 |
+
"reward": 0.645833358168602,
|
| 2215 |
+
"reward_std": 0.28174424916505814,
|
| 2216 |
+
"rewards/accuracy_reward": 0.25000000558793545,
|
| 2217 |
+
"rewards/format_reward": 0.3958333358168602,
|
| 2218 |
+
"step": 82,
|
| 2219 |
+
"w_high_ratio": 0.0625,
|
| 2220 |
+
"w_low_ratio": 0.020149634685367346,
|
| 2221 |
+
"w_max": 1.7989980578422546,
|
| 2222 |
+
"w_mean": 1.2575880885124207,
|
| 2223 |
+
"w_min": 0.25,
|
| 2224 |
+
"w_std": 0.11121102049946785
|
| 2225 |
+
},
|
| 2226 |
+
{
|
| 2227 |
+
"clip_ratio/high_max": 0.0,
|
| 2228 |
+
"clip_ratio/high_mean": 0.0,
|
| 2229 |
+
"clip_ratio/low_mean": 0.0,
|
| 2230 |
+
"clip_ratio/low_min": 0.0,
|
| 2231 |
+
"clip_ratio/region_mean": 0.0,
|
| 2232 |
+
"completion_length": 3006.2188110351562,
|
| 2233 |
+
"cov_mean": -4.109572182642296e-05,
|
| 2234 |
+
"cov_std": 0.26048484444618225,
|
| 2235 |
+
"entropy": 0.41064453125,
|
| 2236 |
+
"epoch": 0.09485714285714286,
|
| 2237 |
+
"grad_norm": 0.342207670211792,
|
| 2238 |
+
"kl": 0.002028226852416992,
|
| 2239 |
+
"learning_rate": 1.7693309235023127e-07,
|
| 2240 |
+
"loss": -0.0346,
|
| 2241 |
+
"reward": 0.5625000149011612,
|
| 2242 |
+
"reward_std": 0.40875787287950516,
|
| 2243 |
+
"rewards/accuracy_reward": 0.19791667256504297,
|
| 2244 |
+
"rewards/format_reward": 0.3645833469927311,
|
| 2245 |
+
"step": 83,
|
| 2246 |
+
"w_high_ratio": 0.054204463958740234,
|
| 2247 |
+
"w_low_ratio": 0.02545588812790811,
|
| 2248 |
+
"w_max": 1.758713960647583,
|
| 2249 |
+
"w_mean": 1.165531873703003,
|
| 2250 |
+
"w_min": 1.1802768922825628e-26,
|
| 2251 |
+
"w_std": 0.13860907219350338
|
| 2252 |
+
},
|
| 2253 |
+
{
|
| 2254 |
+
"clip_ratio/high_max": 0.0,
|
| 2255 |
+
"clip_ratio/high_mean": 0.0,
|
| 2256 |
+
"clip_ratio/low_mean": 0.0,
|
| 2257 |
+
"clip_ratio/low_min": 0.0,
|
| 2258 |
+
"clip_ratio/region_mean": 0.0,
|
| 2259 |
+
"completion_length": 3249.6770935058594,
|
| 2260 |
+
"cov_mean": 1.03536285678274e-06,
|
| 2261 |
+
"cov_std": 0.3258565291762352,
|
| 2262 |
+
"entropy": 0.40576171875,
|
| 2263 |
+
"epoch": 0.096,
|
| 2264 |
+
"grad_norm": 0.3563970625400543,
|
| 2265 |
+
"kl": 0.0010137557983398438,
|
| 2266 |
+
"learning_rate": 1.6837835672960831e-07,
|
| 2267 |
+
"loss": -0.0069,
|
| 2268 |
+
"reward": 0.6979166865348816,
|
| 2269 |
+
"reward_std": 0.44399677217006683,
|
| 2270 |
+
"rewards/accuracy_reward": 0.2916666716337204,
|
| 2271 |
+
"rewards/format_reward": 0.4062500149011612,
|
| 2272 |
+
"step": 84,
|
| 2273 |
+
"w_high_ratio": 0.0561169758439064,
|
| 2274 |
+
"w_low_ratio": 0.032409061677753925,
|
| 2275 |
+
"w_max": 1.6246945261955261,
|
| 2276 |
+
"w_mean": 1.1986894607543945,
|
| 2277 |
+
"w_min": 0.25,
|
| 2278 |
+
"w_std": 0.20476746186614037
|
| 2279 |
+
},
|
| 2280 |
+
{
|
| 2281 |
+
"clip_ratio/high_max": 0.0,
|
| 2282 |
+
"clip_ratio/high_mean": 0.0,
|
| 2283 |
+
"clip_ratio/low_mean": 0.0,
|
| 2284 |
+
"clip_ratio/low_min": 0.0,
|
| 2285 |
+
"clip_ratio/region_mean": 0.0,
|
| 2286 |
+
"completion_length": 3284.145965576172,
|
| 2287 |
+
"cov_mean": -2.360550899993541e-05,
|
| 2288 |
+
"cov_std": 0.322089783847332,
|
| 2289 |
+
"entropy": 0.313232421875,
|
| 2290 |
+
"epoch": 0.09714285714285714,
|
| 2291 |
+
"grad_norm": 0.2012510597705841,
|
| 2292 |
+
"kl": 0.00048732757568359375,
|
| 2293 |
+
"learning_rate": 1.6028856829700258e-07,
|
| 2294 |
+
"loss": 0.022,
|
| 2295 |
+
"reward": 0.6041666939854622,
|
| 2296 |
+
"reward_std": 0.5810144916176796,
|
| 2297 |
+
"rewards/accuracy_reward": 0.1875000074505806,
|
| 2298 |
+
"rewards/format_reward": 0.4166666753590107,
|
| 2299 |
+
"step": 85,
|
| 2300 |
+
"w_high_ratio": 0.04291652888059616,
|
| 2301 |
+
"w_low_ratio": 0.044021588284522295,
|
| 2302 |
+
"w_max": 1.5220162570476532,
|
| 2303 |
+
"w_mean": 1.2036064565181732,
|
| 2304 |
+
"w_min": 0.0,
|
| 2305 |
+
"w_std": 0.2122751884162426
|
| 2306 |
+
},
|
| 2307 |
+
{
|
| 2308 |
+
"clip_ratio/high_max": 0.0,
|
| 2309 |
+
"clip_ratio/high_mean": 0.0,
|
| 2310 |
+
"clip_ratio/low_mean": 0.0,
|
| 2311 |
+
"clip_ratio/low_min": 0.0,
|
| 2312 |
+
"clip_ratio/region_mean": 0.0,
|
| 2313 |
+
"completion_length": 3284.5208740234375,
|
| 2314 |
+
"cov_mean": 7.192745897555142e-05,
|
| 2315 |
+
"cov_std": 0.2986150402575731,
|
| 2316 |
+
"entropy": 0.4326171875,
|
| 2317 |
+
"epoch": 0.09828571428571428,
|
| 2318 |
+
"grad_norm": 0.47124311327934265,
|
| 2319 |
+
"kl": 0.00186920166015625,
|
| 2320 |
+
"learning_rate": 1.5267358321348285e-07,
|
| 2321 |
+
"loss": -0.0661,
|
| 2322 |
+
"reward": 0.541666679084301,
|
| 2323 |
+
"reward_std": 0.26891910284757614,
|
| 2324 |
+
"rewards/accuracy_reward": 0.15625000279396772,
|
| 2325 |
+
"rewards/format_reward": 0.385416679084301,
|
| 2326 |
+
"step": 86,
|
| 2327 |
+
"w_high_ratio": 0.14706559106707573,
|
| 2328 |
+
"w_low_ratio": 0.02547268127091229,
|
| 2329 |
+
"w_max": 2.1021605730056763,
|
| 2330 |
+
"w_mean": 1.3188546895980835,
|
| 2331 |
+
"w_min": 0.25,
|
| 2332 |
+
"w_std": 0.20756208524107933
|
| 2333 |
+
},
|
| 2334 |
+
{
|
| 2335 |
+
"clip_ratio/high_max": 0.0,
|
| 2336 |
+
"clip_ratio/high_mean": 0.0,
|
| 2337 |
+
"clip_ratio/low_mean": 0.0,
|
| 2338 |
+
"clip_ratio/low_min": 0.0,
|
| 2339 |
+
"clip_ratio/region_mean": 0.0,
|
| 2340 |
+
"completion_length": 2947.7188110351562,
|
| 2341 |
+
"cov_mean": -3.2274874229187844e-05,
|
| 2342 |
+
"cov_std": 0.491459421813488,
|
| 2343 |
+
"entropy": 0.478515625,
|
| 2344 |
+
"epoch": 0.09942857142857142,
|
| 2345 |
+
"grad_norm": 0.4571603536605835,
|
| 2346 |
+
"kl": 0.00186920166015625,
|
| 2347 |
+
"learning_rate": 1.4554267916537495e-07,
|
| 2348 |
+
"loss": -0.0175,
|
| 2349 |
+
"reward": 0.7604167014360428,
|
| 2350 |
+
"reward_std": 0.5320771858096123,
|
| 2351 |
+
"rewards/accuracy_reward": 0.1875,
|
| 2352 |
+
"rewards/format_reward": 0.5729166939854622,
|
| 2353 |
+
"step": 87,
|
| 2354 |
+
"w_high_ratio": 0.09944025427103043,
|
| 2355 |
+
"w_low_ratio": 0.051899916026741266,
|
| 2356 |
+
"w_max": 2.0812322199344635,
|
| 2357 |
+
"w_mean": 1.3910081684589386,
|
| 2358 |
+
"w_min": 0.0,
|
| 2359 |
+
"w_std": 0.28863297775387764
|
| 2360 |
+
},
|
| 2361 |
+
{
|
| 2362 |
+
"clip_ratio/high_max": 0.0,
|
| 2363 |
+
"clip_ratio/high_mean": 0.0,
|
| 2364 |
+
"clip_ratio/low_mean": 0.0,
|
| 2365 |
+
"clip_ratio/low_min": 0.0,
|
| 2366 |
+
"clip_ratio/region_mean": 0.0,
|
| 2367 |
+
"completion_length": 2671.010528564453,
|
| 2368 |
+
"cov_mean": 8.175054426828865e-05,
|
| 2369 |
+
"cov_std": 0.5737064629793167,
|
| 2370 |
+
"entropy": 0.42578125,
|
| 2371 |
+
"epoch": 0.10057142857142858,
|
| 2372 |
+
"grad_norm": 0.8297140598297119,
|
| 2373 |
+
"kl": 0.006494998931884766,
|
| 2374 |
+
"learning_rate": 1.3890454406082956e-07,
|
| 2375 |
+
"loss": -0.0247,
|
| 2376 |
+
"reward": 1.052083358168602,
|
| 2377 |
+
"reward_std": 0.6991286426782608,
|
| 2378 |
+
"rewards/accuracy_reward": 0.385416679084301,
|
| 2379 |
+
"rewards/format_reward": 0.6666666865348816,
|
| 2380 |
+
"step": 88,
|
| 2381 |
+
"w_high_ratio": 0.19003082811832428,
|
| 2382 |
+
"w_low_ratio": 0.0572223337367177,
|
| 2383 |
+
"w_max": 2.4647006690502167,
|
| 2384 |
+
"w_mean": 1.4986785650253296,
|
| 2385 |
+
"w_min": 0.0,
|
| 2386 |
+
"w_std": 0.39150019735097885
|
| 2387 |
+
},
|
| 2388 |
+
{
|
| 2389 |
+
"clip_ratio/high_max": 0.0,
|
| 2390 |
+
"clip_ratio/high_mean": 0.0,
|
| 2391 |
+
"clip_ratio/low_mean": 0.0,
|
| 2392 |
+
"clip_ratio/low_min": 0.0,
|
| 2393 |
+
"clip_ratio/region_mean": 0.0,
|
| 2394 |
+
"completion_length": 3554.2291870117188,
|
| 2395 |
+
"cov_mean": -3.470801129878964e-05,
|
| 2396 |
+
"cov_std": 0.33541250973939896,
|
| 2397 |
+
"entropy": 0.4052734375,
|
| 2398 |
+
"epoch": 0.10171428571428572,
|
| 2399 |
+
"grad_norm": 0.35713109374046326,
|
| 2400 |
+
"kl": 0.0019817352294921875,
|
| 2401 |
+
"learning_rate": 1.3276726544494571e-07,
|
| 2402 |
+
"loss": -0.0037,
|
| 2403 |
+
"reward": 0.5625000074505806,
|
| 2404 |
+
"reward_std": 0.5584763810038567,
|
| 2405 |
+
"rewards/accuracy_reward": 0.2291666716337204,
|
| 2406 |
+
"rewards/format_reward": 0.3333333358168602,
|
| 2407 |
+
"step": 89,
|
| 2408 |
+
"w_high_ratio": 0.0,
|
| 2409 |
+
"w_low_ratio": 0.046938784420490265,
|
| 2410 |
+
"w_max": 1.4760373830795288,
|
| 2411 |
+
"w_mean": 1.1144923567771912,
|
| 2412 |
+
"w_min": 4.203895392974451e-45,
|
| 2413 |
+
"w_std": 0.22363057732582092
|
| 2414 |
+
},
|
| 2415 |
+
{
|
| 2416 |
+
"clip_ratio/high_max": 0.0,
|
| 2417 |
+
"clip_ratio/high_mean": 0.0,
|
| 2418 |
+
"clip_ratio/low_mean": 0.0,
|
| 2419 |
+
"clip_ratio/low_min": 0.0,
|
| 2420 |
+
"clip_ratio/region_mean": 0.0,
|
| 2421 |
+
"completion_length": 2691.072998046875,
|
| 2422 |
+
"cov_mean": 3.220158714611898e-05,
|
| 2423 |
+
"cov_std": 0.29605602473020554,
|
| 2424 |
+
"entropy": 0.5078125,
|
| 2425 |
+
"epoch": 0.10285714285714286,
|
| 2426 |
+
"grad_norm": 0.3885078728199005,
|
| 2427 |
+
"kl": 0.0037512779235839844,
|
| 2428 |
+
"learning_rate": 1.2713832064634125e-07,
|
| 2429 |
+
"loss": 0.0576,
|
| 2430 |
+
"reward": 0.5833333432674408,
|
| 2431 |
+
"reward_std": 0.29030610620975494,
|
| 2432 |
+
"rewards/accuracy_reward": 0.06250000186264515,
|
| 2433 |
+
"rewards/format_reward": 0.5208333535119891,
|
| 2434 |
+
"step": 90,
|
| 2435 |
+
"w_high_ratio": 0.029189134016633034,
|
| 2436 |
+
"w_low_ratio": 0.02723024832084775,
|
| 2437 |
+
"w_max": 1.7544832229614258,
|
| 2438 |
+
"w_mean": 1.3031582236289978,
|
| 2439 |
+
"w_min": 0.25,
|
| 2440 |
+
"w_std": 0.1653207652270794
|
| 2441 |
+
},
|
| 2442 |
+
{
|
| 2443 |
+
"clip_ratio/high_max": 0.0,
|
| 2444 |
+
"clip_ratio/high_mean": 0.0,
|
| 2445 |
+
"clip_ratio/low_mean": 0.0,
|
| 2446 |
+
"clip_ratio/low_min": 0.0,
|
| 2447 |
+
"clip_ratio/region_mean": 0.0,
|
| 2448 |
+
"completion_length": 3429.2813110351562,
|
| 2449 |
+
"cov_mean": 4.58851766893531e-06,
|
| 2450 |
+
"cov_std": 0.255037359893322,
|
| 2451 |
+
"entropy": 0.44970703125,
|
| 2452 |
+
"epoch": 0.104,
|
| 2453 |
+
"grad_norm": 0.21901822090148926,
|
| 2454 |
+
"kl": 0.0016050338745117188,
|
| 2455 |
+
"learning_rate": 1.220245676671809e-07,
|
| 2456 |
+
"loss": 0.0086,
|
| 2457 |
+
"reward": 0.4791666679084301,
|
| 2458 |
+
"reward_std": 0.42312975227832794,
|
| 2459 |
+
"rewards/accuracy_reward": 0.14583334140479565,
|
| 2460 |
+
"rewards/format_reward": 0.33333334140479565,
|
| 2461 |
+
"step": 91,
|
| 2462 |
+
"w_high_ratio": 0.0,
|
| 2463 |
+
"w_low_ratio": 0.03593640075996518,
|
| 2464 |
+
"w_max": 1.3688502609729767,
|
| 2465 |
+
"w_mean": 1.1163062751293182,
|
| 2466 |
+
"w_min": 0.0,
|
| 2467 |
+
"w_std": 0.16501911543309689
|
| 2468 |
+
},
|
| 2469 |
+
{
|
| 2470 |
+
"clip_ratio/high_max": 0.0,
|
| 2471 |
+
"clip_ratio/high_mean": 0.0,
|
| 2472 |
+
"clip_ratio/low_mean": 0.0,
|
| 2473 |
+
"clip_ratio/low_min": 0.0,
|
| 2474 |
+
"clip_ratio/region_mean": 0.0,
|
| 2475 |
+
"completion_length": 2883.354217529297,
|
| 2476 |
+
"cov_mean": 0.00012286239780223696,
|
| 2477 |
+
"cov_std": 0.3810538575053215,
|
| 2478 |
+
"entropy": 0.35205078125,
|
| 2479 |
+
"epoch": 0.10514285714285715,
|
| 2480 |
+
"grad_norm": 0.3391018509864807,
|
| 2481 |
+
"kl": 0.0031414031982421875,
|
| 2482 |
+
"learning_rate": 1.1743223682775649e-07,
|
| 2483 |
+
"loss": 0.0203,
|
| 2484 |
+
"reward": 0.6979167014360428,
|
| 2485 |
+
"reward_std": 0.44955648854374886,
|
| 2486 |
+
"rewards/accuracy_reward": 0.18750001024454832,
|
| 2487 |
+
"rewards/format_reward": 0.5104166716337204,
|
| 2488 |
+
"step": 92,
|
| 2489 |
+
"w_high_ratio": 0.04653368145227432,
|
| 2490 |
+
"w_low_ratio": 0.04205216746777296,
|
| 2491 |
+
"w_max": 1.9255772531032562,
|
| 2492 |
+
"w_mean": 1.2982309758663177,
|
| 2493 |
+
"w_min": 1.1786321383436036e-41,
|
| 2494 |
+
"w_std": 0.2336835414171219
|
| 2495 |
+
},
|
| 2496 |
+
{
|
| 2497 |
+
"clip_ratio/high_max": 0.0,
|
| 2498 |
+
"clip_ratio/high_mean": 0.0,
|
| 2499 |
+
"clip_ratio/low_mean": 0.0,
|
| 2500 |
+
"clip_ratio/low_min": 0.0,
|
| 2501 |
+
"clip_ratio/region_mean": 0.0,
|
| 2502 |
+
"completion_length": 3868.2708740234375,
|
| 2503 |
+
"cov_mean": 6.7976218360854546e-06,
|
| 2504 |
+
"cov_std": 0.18175788596272469,
|
| 2505 |
+
"entropy": 0.52001953125,
|
| 2506 |
+
"epoch": 0.10628571428571429,
|
| 2507 |
+
"grad_norm": 0.20475780963897705,
|
| 2508 |
+
"kl": 0.0019414424896240234,
|
| 2509 |
+
"learning_rate": 1.1336692317580158e-07,
|
| 2510 |
+
"loss": 0.0439,
|
| 2511 |
+
"reward": 0.07291666883975267,
|
| 2512 |
+
"reward_std": 0.16979892551898956,
|
| 2513 |
+
"rewards/accuracy_reward": 0.010416666977107525,
|
| 2514 |
+
"rewards/format_reward": 0.06250000186264515,
|
| 2515 |
+
"step": 93,
|
| 2516 |
+
"w_high_ratio": 0.0,
|
| 2517 |
+
"w_low_ratio": 0.026896574534475803,
|
| 2518 |
+
"w_max": 1.2091633975505829,
|
| 2519 |
+
"w_mean": 1.0145649313926697,
|
| 2520 |
+
"w_min": 0.25,
|
| 2521 |
+
"w_std": 0.10182364657521248
|
| 2522 |
+
},
|
| 2523 |
+
{
|
| 2524 |
+
"clip_ratio/high_max": 0.0,
|
| 2525 |
+
"clip_ratio/high_mean": 0.0,
|
| 2526 |
+
"clip_ratio/low_mean": 0.0,
|
| 2527 |
+
"clip_ratio/low_min": 0.0,
|
| 2528 |
+
"clip_ratio/region_mean": 0.0,
|
| 2529 |
+
"completion_length": 3365.0313720703125,
|
| 2530 |
+
"cov_mean": 5.4338164773071185e-05,
|
| 2531 |
+
"cov_std": 0.3005314916372299,
|
| 2532 |
+
"entropy": 0.498046875,
|
| 2533 |
+
"epoch": 0.10742857142857143,
|
| 2534 |
+
"grad_norm": 0.2650594711303711,
|
| 2535 |
+
"kl": 0.0022263526916503906,
|
| 2536 |
+
"learning_rate": 1.0983357966978745e-07,
|
| 2537 |
+
"loss": -0.0027,
|
| 2538 |
+
"reward": 0.40625001303851604,
|
| 2539 |
+
"reward_std": 0.3088777959346771,
|
| 2540 |
+
"rewards/accuracy_reward": 0.13541666977107525,
|
| 2541 |
+
"rewards/format_reward": 0.27083333395421505,
|
| 2542 |
+
"step": 94,
|
| 2543 |
+
"w_high_ratio": 0.0,
|
| 2544 |
+
"w_low_ratio": 0.03483147523365915,
|
| 2545 |
+
"w_max": 1.5337306559085846,
|
| 2546 |
+
"w_mean": 1.0965670943260193,
|
| 2547 |
+
"w_min": 3.503246160812043e-46,
|
| 2548 |
+
"w_std": 0.16744443587958813
|
| 2549 |
+
},
|
| 2550 |
+
{
|
| 2551 |
+
"clip_ratio/high_max": 0.0,
|
| 2552 |
+
"clip_ratio/high_mean": 0.0,
|
| 2553 |
+
"clip_ratio/low_mean": 0.0,
|
| 2554 |
+
"clip_ratio/low_min": 0.0,
|
| 2555 |
+
"clip_ratio/region_mean": 0.0,
|
| 2556 |
+
"completion_length": 3801.90625,
|
| 2557 |
+
"cov_mean": 3.249865312682232e-05,
|
| 2558 |
+
"cov_std": 0.22550634294748306,
|
| 2559 |
+
"entropy": 0.39990234375,
|
| 2560 |
+
"epoch": 0.10857142857142857,
|
| 2561 |
+
"grad_norm": 0.1532547026872635,
|
| 2562 |
+
"kl": 0.00028705596923828125,
|
| 2563 |
+
"learning_rate": 1.068365111445064e-07,
|
| 2564 |
+
"loss": 0.0152,
|
| 2565 |
+
"reward": 0.2395833395421505,
|
| 2566 |
+
"reward_std": 0.36024561524391174,
|
| 2567 |
+
"rewards/accuracy_reward": 0.07291666883975267,
|
| 2568 |
+
"rewards/format_reward": 0.16666666977107525,
|
| 2569 |
+
"step": 95,
|
| 2570 |
+
"w_high_ratio": 0.0,
|
| 2571 |
+
"w_low_ratio": 0.027520990930497646,
|
| 2572 |
+
"w_max": 1.2406704127788544,
|
| 2573 |
+
"w_mean": 1.0429764091968536,
|
| 2574 |
+
"w_min": 0.25,
|
| 2575 |
+
"w_std": 0.12879234366118908
|
| 2576 |
+
},
|
| 2577 |
+
{
|
| 2578 |
+
"clip_ratio/high_max": 0.0,
|
| 2579 |
+
"clip_ratio/high_mean": 0.0,
|
| 2580 |
+
"clip_ratio/low_mean": 0.0,
|
| 2581 |
+
"clip_ratio/low_min": 0.0,
|
| 2582 |
+
"clip_ratio/region_mean": 0.0,
|
| 2583 |
+
"completion_length": 2939.229248046875,
|
| 2584 |
+
"cov_mean": -0.00012694882570940536,
|
| 2585 |
+
"cov_std": 0.42605962604284286,
|
| 2586 |
+
"entropy": 0.35693359375,
|
| 2587 |
+
"epoch": 0.10971428571428571,
|
| 2588 |
+
"grad_norm": 0.46688124537467957,
|
| 2589 |
+
"kl": 0.0020439624786376953,
|
| 2590 |
+
"learning_rate": 1.0437936906629334e-07,
|
| 2591 |
+
"loss": -0.018,
|
| 2592 |
+
"reward": 0.7500000298023224,
|
| 2593 |
+
"reward_std": 0.5337796807289124,
|
| 2594 |
+
"rewards/accuracy_reward": 0.260416679084301,
|
| 2595 |
+
"rewards/format_reward": 0.4895833432674408,
|
| 2596 |
+
"step": 96,
|
| 2597 |
+
"w_high_ratio": 0.09955519065260887,
|
| 2598 |
+
"w_low_ratio": 0.04101596772670746,
|
| 2599 |
+
"w_max": 2.1674250662326813,
|
| 2600 |
+
"w_mean": 1.3317046463489532,
|
| 2601 |
+
"w_min": 0.0,
|
| 2602 |
+
"w_std": 0.2793598398566246
|
| 2603 |
+
},
|
| 2604 |
+
{
|
| 2605 |
+
"clip_ratio/high_max": 0.0,
|
| 2606 |
+
"clip_ratio/high_mean": 0.0,
|
| 2607 |
+
"clip_ratio/low_mean": 0.0,
|
| 2608 |
+
"clip_ratio/low_min": 0.0,
|
| 2609 |
+
"clip_ratio/region_mean": 0.0,
|
| 2610 |
+
"completion_length": 3480.8334350585938,
|
| 2611 |
+
"cov_mean": -2.861599477910204e-05,
|
| 2612 |
+
"cov_std": 0.2684932127594948,
|
| 2613 |
+
"entropy": 0.400390625,
|
| 2614 |
+
"epoch": 0.11085714285714286,
|
| 2615 |
+
"grad_norm": 0.336283415555954,
|
| 2616 |
+
"kl": 0.0008549690246582031,
|
| 2617 |
+
"learning_rate": 1.0246514708427701e-07,
|
| 2618 |
+
"loss": -0.0136,
|
| 2619 |
+
"reward": 0.5416666716337204,
|
| 2620 |
+
"reward_std": 0.3185878023505211,
|
| 2621 |
+
"rewards/accuracy_reward": 0.21875,
|
| 2622 |
+
"rewards/format_reward": 0.3229166716337204,
|
| 2623 |
+
"step": 97,
|
| 2624 |
+
"w_high_ratio": 0.0,
|
| 2625 |
+
"w_low_ratio": 0.03224400524049997,
|
| 2626 |
+
"w_max": 1.3480738699436188,
|
| 2627 |
+
"w_mean": 1.129571795463562,
|
| 2628 |
+
"w_min": 0.25,
|
| 2629 |
+
"w_std": 0.1670757606625557
|
| 2630 |
+
},
|
| 2631 |
+
{
|
| 2632 |
+
"clip_ratio/high_max": 0.0,
|
| 2633 |
+
"clip_ratio/high_mean": 0.0,
|
| 2634 |
+
"clip_ratio/low_mean": 0.0,
|
| 2635 |
+
"clip_ratio/low_min": 0.0,
|
| 2636 |
+
"clip_ratio/region_mean": 0.0,
|
| 2637 |
+
"completion_length": 3187.0418090820312,
|
| 2638 |
+
"cov_mean": -3.800686135946307e-05,
|
| 2639 |
+
"cov_std": 0.3548770062625408,
|
| 2640 |
+
"entropy": 0.37158203125,
|
| 2641 |
+
"epoch": 0.112,
|
| 2642 |
+
"grad_norm": 0.4635946452617645,
|
| 2643 |
+
"kl": 0.0006475448608398438,
|
| 2644 |
+
"learning_rate": 1.0109617738307911e-07,
|
| 2645 |
+
"loss": -0.0451,
|
| 2646 |
+
"reward": 0.5312500149011612,
|
| 2647 |
+
"reward_std": 0.2919851318001747,
|
| 2648 |
+
"rewards/accuracy_reward": 0.13541666977107525,
|
| 2649 |
+
"rewards/format_reward": 0.3958333432674408,
|
| 2650 |
+
"step": 98,
|
| 2651 |
+
"w_high_ratio": 0.08355391025543213,
|
| 2652 |
+
"w_low_ratio": 0.03265182231552899,
|
| 2653 |
+
"w_max": 1.7274979948997498,
|
| 2654 |
+
"w_mean": 1.2217411994934082,
|
| 2655 |
+
"w_min": 0.0,
|
| 2656 |
+
"w_std": 0.2073941007256508
|
| 2657 |
+
},
|
| 2658 |
+
{
|
| 2659 |
+
"clip_ratio/high_max": 0.0,
|
| 2660 |
+
"clip_ratio/high_mean": 0.0,
|
| 2661 |
+
"clip_ratio/low_mean": 0.0,
|
| 2662 |
+
"clip_ratio/low_min": 0.0,
|
| 2663 |
+
"clip_ratio/region_mean": 0.0,
|
| 2664 |
+
"completion_length": 3087.635467529297,
|
| 2665 |
+
"cov_mean": 6.88847137553239e-06,
|
| 2666 |
+
"cov_std": 0.1545543149113655,
|
| 2667 |
+
"entropy": 0.34375,
|
| 2668 |
+
"epoch": 0.11314285714285714,
|
| 2669 |
+
"grad_norm": 0.336643785238266,
|
| 2670 |
+
"kl": 0.001453399658203125,
|
| 2671 |
+
"learning_rate": 1.002741278414069e-07,
|
| 2672 |
+
"loss": 0.0319,
|
| 2673 |
+
"reward": 0.5000000204890966,
|
| 2674 |
+
"reward_std": 0.22891659289598465,
|
| 2675 |
+
"rewards/accuracy_reward": 0.19791667442768812,
|
| 2676 |
+
"rewards/format_reward": 0.3020833348855376,
|
| 2677 |
+
"step": 99,
|
| 2678 |
+
"w_high_ratio": 0.06669171899557114,
|
| 2679 |
+
"w_low_ratio": 0.019805304240435362,
|
| 2680 |
+
"w_max": 1.3878345787525177,
|
| 2681 |
+
"w_mean": 1.1877047568559647,
|
| 2682 |
+
"w_min": 0.25,
|
| 2683 |
+
"w_std": 0.10978816263377666
|
| 2684 |
+
},
|
| 2685 |
+
{
|
| 2686 |
+
"clip_ratio/high_max": 0.0,
|
| 2687 |
+
"clip_ratio/high_mean": 0.0,
|
| 2688 |
+
"clip_ratio/low_mean": 0.0,
|
| 2689 |
+
"clip_ratio/low_min": 0.0,
|
| 2690 |
+
"clip_ratio/region_mean": 0.0,
|
| 2691 |
+
"completion_length": 2950.5938110351562,
|
| 2692 |
+
"cov_mean": 4.911199903290253e-05,
|
| 2693 |
+
"cov_std": 0.4131714701652527,
|
| 2694 |
+
"entropy": 0.34521484375,
|
| 2695 |
+
"epoch": 0.11428571428571428,
|
| 2696 |
+
"grad_norm": 0.6010158061981201,
|
| 2697 |
+
"kl": 0.0022754669189453125,
|
| 2698 |
+
"learning_rate": 1e-07,
|
| 2699 |
+
"loss": -0.0995,
|
| 2700 |
+
"reward": 0.8750000223517418,
|
| 2701 |
+
"reward_std": 0.5578841716051102,
|
| 2702 |
+
"rewards/accuracy_reward": 0.3541666865348816,
|
| 2703 |
+
"rewards/format_reward": 0.5208333507180214,
|
| 2704 |
+
"step": 100,
|
| 2705 |
+
"w_high_ratio": 0.19728600606322289,
|
| 2706 |
+
"w_low_ratio": 0.03664776147343218,
|
| 2707 |
+
"w_max": 2.3017463386058807,
|
| 2708 |
+
"w_mean": 1.389022558927536,
|
| 2709 |
+
"w_min": 4.2088412759042857e-39,
|
| 2710 |
+
"w_std": 0.2764138747006655
|
| 2711 |
+
},
|
| 2712 |
+
{
|
| 2713 |
+
"epoch": 0.11428571428571428,
|
| 2714 |
+
"step": 100,
|
| 2715 |
+
"total_flos": 0.0,
|
| 2716 |
+
"train_loss": -0.0008640377339906991,
|
| 2717 |
+
"train_runtime": 8354.3803,
|
| 2718 |
+
"train_samples_per_second": 1.149,
|
| 2719 |
+
"train_steps_per_second": 0.012
|
| 2720 |
+
}
|
| 2721 |
+
],
|
| 2722 |
+
"logging_steps": 1,
|
| 2723 |
+
"max_steps": 100,
|
| 2724 |
+
"num_input_tokens_seen": 0,
|
| 2725 |
+
"num_train_epochs": 1,
|
| 2726 |
+
"save_steps": 50,
|
| 2727 |
+
"stateful_callbacks": {
|
| 2728 |
+
"TrainerControl": {
|
| 2729 |
+
"args": {
|
| 2730 |
+
"should_epoch_stop": false,
|
| 2731 |
+
"should_evaluate": false,
|
| 2732 |
+
"should_log": false,
|
| 2733 |
+
"should_save": true,
|
| 2734 |
+
"should_training_stop": true
|
| 2735 |
+
},
|
| 2736 |
+
"attributes": {}
|
| 2737 |
+
}
|
| 2738 |
+
},
|
| 2739 |
+
"total_flos": 0.0,
|
| 2740 |
+
"train_batch_size": 6,
|
| 2741 |
+
"trial_name": null,
|
| 2742 |
+
"trial_params": null
|
| 2743 |
+
}
|